ctx               103 arch/alpha/kernel/osf_sys.c 	struct dir_context ctx;
ctx               111 arch/alpha/kernel/osf_sys.c osf_filldir(struct dir_context *ctx, const char *name, int namlen,
ctx               116 arch/alpha/kernel/osf_sys.c 		container_of(ctx, struct osf_dirent_callback, ctx);
ctx               156 arch/alpha/kernel/osf_sys.c 		.ctx.actor = osf_filldir,
ctx               165 arch/alpha/kernel/osf_sys.c 	error = iterate_dir(arg.file, &buf.ctx);
ctx                57 arch/arm/crypto/aes-ce-glue.c static int num_rounds(struct crypto_aes_ctx *ctx)
ctx                66 arch/arm/crypto/aes-ce-glue.c 	return 6 + ctx->key_length / 4;
ctx                69 arch/arm/crypto/aes-ce-glue.c static int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
ctx                88 arch/arm/crypto/aes-ce-glue.c 	ctx->key_length = key_len;
ctx                90 arch/arm/crypto/aes-ce-glue.c 		ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32));
ctx                94 arch/arm/crypto/aes-ce-glue.c 		u32 *rki = ctx->key_enc + (i * kwords);
ctx               124 arch/arm/crypto/aes-ce-glue.c 	key_enc = (struct aes_block *)ctx->key_enc;
ctx               125 arch/arm/crypto/aes-ce-glue.c 	key_dec = (struct aes_block *)ctx->key_dec;
ctx               126 arch/arm/crypto/aes-ce-glue.c 	j = num_rounds(ctx);
ctx               140 arch/arm/crypto/aes-ce-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               143 arch/arm/crypto/aes-ce-glue.c 	ret = ce_aes_expandkey(ctx, in_key, key_len);
ctx               159 arch/arm/crypto/aes-ce-glue.c 	struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               166 arch/arm/crypto/aes-ce-glue.c 	ret = ce_aes_expandkey(&ctx->key1, in_key, key_len / 2);
ctx               168 arch/arm/crypto/aes-ce-glue.c 		ret = ce_aes_expandkey(&ctx->key2, &in_key[key_len / 2],
ctx               180 arch/arm/crypto/aes-ce-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               190 arch/arm/crypto/aes-ce-glue.c 				   ctx->key_enc, num_rounds(ctx), blocks);
ctx               200 arch/arm/crypto/aes-ce-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               210 arch/arm/crypto/aes-ce-glue.c 				   ctx->key_dec, num_rounds(ctx), blocks);
ctx               221 arch/arm/crypto/aes-ce-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               228 arch/arm/crypto/aes-ce-glue.c 				   ctx->key_enc, num_rounds(ctx), blocks,
ctx               251 arch/arm/crypto/aes-ce-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               258 arch/arm/crypto/aes-ce-glue.c 				   ctx->key_dec, num_rounds(ctx), blocks,
ctx               280 arch/arm/crypto/aes-ce-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               328 arch/arm/crypto/aes-ce-glue.c 			       ctx->key_enc, num_rounds(ctx), walk.nbytes,
ctx               338 arch/arm/crypto/aes-ce-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               386 arch/arm/crypto/aes-ce-glue.c 			       ctx->key_dec, num_rounds(ctx), walk.nbytes,
ctx               396 arch/arm/crypto/aes-ce-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               405 arch/arm/crypto/aes-ce-glue.c 				   ctx->key_enc, num_rounds(ctx), blocks,
ctx               422 arch/arm/crypto/aes-ce-glue.c 		ce_aes_ctr_encrypt(tail, NULL, ctx->key_enc, num_rounds(ctx),
ctx               433 arch/arm/crypto/aes-ce-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               442 arch/arm/crypto/aes-ce-glue.c 	aes_encrypt(ctx, dst, src);
ctx               457 arch/arm/crypto/aes-ce-glue.c 	struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               458 arch/arm/crypto/aes-ce-glue.c 	int err, first, rounds = num_rounds(&ctx->key1);
ctx               497 arch/arm/crypto/aes-ce-glue.c 				   ctx->key1.key_enc, rounds, nbytes, walk.iv,
ctx               498 arch/arm/crypto/aes-ce-glue.c 				   ctx->key2.key_enc, first);
ctx               519 arch/arm/crypto/aes-ce-glue.c 			   ctx->key1.key_enc, rounds, walk.nbytes, walk.iv,
ctx               520 arch/arm/crypto/aes-ce-glue.c 			   ctx->key2.key_enc, first);
ctx               529 arch/arm/crypto/aes-ce-glue.c 	struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               530 arch/arm/crypto/aes-ce-glue.c 	int err, first, rounds = num_rounds(&ctx->key1);
ctx               569 arch/arm/crypto/aes-ce-glue.c 				   ctx->key1.key_dec, rounds, nbytes, walk.iv,
ctx               570 arch/arm/crypto/aes-ce-glue.c 				   ctx->key2.key_enc, first);
ctx               591 arch/arm/crypto/aes-ce-glue.c 			   ctx->key1.key_dec, rounds, walk.nbytes, walk.iv,
ctx               592 arch/arm/crypto/aes-ce-glue.c 			   ctx->key2.key_enc, first);
ctx                18 arch/arm/crypto/aes-cipher-glue.c 	struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                19 arch/arm/crypto/aes-cipher-glue.c 	int rounds = 6 + ctx->key_length / 4;
ctx                21 arch/arm/crypto/aes-cipher-glue.c 	__aes_arm_encrypt(ctx->key_enc, rounds, in, out);
ctx                26 arch/arm/crypto/aes-cipher-glue.c 	struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                27 arch/arm/crypto/aes-cipher-glue.c 	int rounds = 6 + ctx->key_length / 4;
ctx                29 arch/arm/crypto/aes-cipher-glue.c 	__aes_arm_decrypt(ctx->key_dec, rounds, in, out);
ctx                69 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                77 arch/arm/crypto/aes-neonbs-glue.c 	ctx->rounds = 6 + key_len / 4;
ctx                80 arch/arm/crypto/aes-neonbs-glue.c 	aesbs_convert_key(ctx->rk, rk.key_enc, ctx->rounds);
ctx                91 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               105 arch/arm/crypto/aes-neonbs-glue.c 		fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk,
ctx               106 arch/arm/crypto/aes-neonbs-glue.c 		   ctx->rounds, blocks);
ctx               128 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               136 arch/arm/crypto/aes-neonbs-glue.c 	ctx->key.rounds = 6 + key_len / 4;
ctx               139 arch/arm/crypto/aes-neonbs-glue.c 	aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds);
ctx               142 arch/arm/crypto/aes-neonbs-glue.c 	return crypto_cipher_setkey(ctx->enc_tfm, in_key, key_len);
ctx               147 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               149 arch/arm/crypto/aes-neonbs-glue.c 	crypto_cipher_encrypt_one(ctx->enc_tfm, dst, src);
ctx               160 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               175 arch/arm/crypto/aes-neonbs-glue.c 				  ctx->key.rk, ctx->key.rounds, blocks,
ctx               187 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               189 arch/arm/crypto/aes-neonbs-glue.c 	ctx->enc_tfm = crypto_alloc_cipher("aes", 0, 0);
ctx               191 arch/arm/crypto/aes-neonbs-glue.c 	return PTR_ERR_OR_ZERO(ctx->enc_tfm);
ctx               196 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               198 arch/arm/crypto/aes-neonbs-glue.c 	crypto_free_cipher(ctx->enc_tfm);
ctx               204 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               207 arch/arm/crypto/aes-neonbs-glue.c 	err = aes_expandkey(&ctx->fallback, in_key, key_len);
ctx               211 arch/arm/crypto/aes-neonbs-glue.c 	ctx->key.rounds = 6 + key_len / 4;
ctx               214 arch/arm/crypto/aes-neonbs-glue.c 	aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds);
ctx               223 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               242 arch/arm/crypto/aes-neonbs-glue.c 				  ctx->rk, ctx->rounds, blocks, walk.iv, final);
ctx               264 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               273 arch/arm/crypto/aes-neonbs-glue.c 	aes_encrypt(&ctx->fallback, dst, src);
ctx               288 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               296 arch/arm/crypto/aes-neonbs-glue.c 	err = crypto_cipher_setkey(ctx->cts_tfm, in_key, key_len);
ctx               299 arch/arm/crypto/aes-neonbs-glue.c 	err = crypto_cipher_setkey(ctx->tweak_tfm, in_key + key_len, key_len);
ctx               308 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_xts_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               310 arch/arm/crypto/aes-neonbs-glue.c 	ctx->cts_tfm = crypto_alloc_cipher("aes", 0, 0);
ctx               311 arch/arm/crypto/aes-neonbs-glue.c 	if (IS_ERR(ctx->cts_tfm))
ctx               312 arch/arm/crypto/aes-neonbs-glue.c 		return PTR_ERR(ctx->cts_tfm);
ctx               314 arch/arm/crypto/aes-neonbs-glue.c 	ctx->tweak_tfm = crypto_alloc_cipher("aes", 0, 0);
ctx               315 arch/arm/crypto/aes-neonbs-glue.c 	if (IS_ERR(ctx->tweak_tfm))
ctx               316 arch/arm/crypto/aes-neonbs-glue.c 		crypto_free_cipher(ctx->cts_tfm);
ctx               318 arch/arm/crypto/aes-neonbs-glue.c 	return PTR_ERR_OR_ZERO(ctx->tweak_tfm);
ctx               323 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_xts_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               325 arch/arm/crypto/aes-neonbs-glue.c 	crypto_free_cipher(ctx->tweak_tfm);
ctx               326 arch/arm/crypto/aes-neonbs-glue.c 	crypto_free_cipher(ctx->cts_tfm);
ctx               334 arch/arm/crypto/aes-neonbs-glue.c 	struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               358 arch/arm/crypto/aes-neonbs-glue.c 	crypto_cipher_encrypt_one(ctx->tweak_tfm, walk.iv, walk.iv);
ctx               371 arch/arm/crypto/aes-neonbs-glue.c 		fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->key.rk,
ctx               372 arch/arm/crypto/aes-neonbs-glue.c 		   ctx->key.rounds, blocks, walk.iv, reorder_last_tweak);
ctx               390 arch/arm/crypto/aes-neonbs-glue.c 		crypto_cipher_encrypt_one(ctx->cts_tfm, buf, buf);
ctx               392 arch/arm/crypto/aes-neonbs-glue.c 		crypto_cipher_decrypt_one(ctx->cts_tfm, buf, buf);
ctx                66 arch/arm/crypto/chacha-neon-glue.c 				  const struct chacha_ctx *ctx, const u8 *iv)
ctx                74 arch/arm/crypto/chacha-neon-glue.c 	crypto_chacha_init(state, ctx, iv);
ctx                84 arch/arm/crypto/chacha-neon-glue.c 			      nbytes, ctx->nrounds);
ctx                95 arch/arm/crypto/chacha-neon-glue.c 	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               100 arch/arm/crypto/chacha-neon-glue.c 	return chacha_neon_stream_xor(req, ctx, req->iv);
ctx               106 arch/arm/crypto/chacha-neon-glue.c 	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               114 arch/arm/crypto/chacha-neon-glue.c 	crypto_chacha_init(state, ctx, req->iv);
ctx               117 arch/arm/crypto/chacha-neon-glue.c 	hchacha_block_neon(state, subctx.key, ctx->nrounds);
ctx               119 arch/arm/crypto/chacha-neon-glue.c 	subctx.nrounds = ctx->nrounds;
ctx                62 arch/arm/crypto/ghash-ce-glue.c 	struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
ctx                64 arch/arm/crypto/ghash-ce-glue.c 	*ctx = (struct ghash_desc_ctx){};
ctx               101 arch/arm/crypto/ghash-ce-glue.c 	struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               102 arch/arm/crypto/ghash-ce-glue.c 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
ctx               104 arch/arm/crypto/ghash-ce-glue.c 	ctx->count += len;
ctx               113 arch/arm/crypto/ghash-ce-glue.c 			memcpy(ctx->buf + partial, src, p);
ctx               121 arch/arm/crypto/ghash-ce-glue.c 		ghash_do_update(blocks, ctx->digest, src, key,
ctx               122 arch/arm/crypto/ghash-ce-glue.c 				partial ? ctx->buf : NULL);
ctx               127 arch/arm/crypto/ghash-ce-glue.c 		memcpy(ctx->buf + partial, src, len);
ctx               133 arch/arm/crypto/ghash-ce-glue.c 	struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               134 arch/arm/crypto/ghash-ce-glue.c 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
ctx               139 arch/arm/crypto/ghash-ce-glue.c 		memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
ctx               140 arch/arm/crypto/ghash-ce-glue.c 		ghash_do_update(1, ctx->digest, ctx->buf, key, NULL);
ctx               142 arch/arm/crypto/ghash-ce-glue.c 	put_unaligned_be64(ctx->digest[1], dst);
ctx               143 arch/arm/crypto/ghash-ce-glue.c 	put_unaligned_be64(ctx->digest[0], dst + 8);
ctx               145 arch/arm/crypto/ghash-ce-glue.c 	*ctx = (struct ghash_desc_ctx){};
ctx               207 arch/arm/crypto/ghash-ce-glue.c 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               209 arch/arm/crypto/ghash-ce-glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
ctx               221 arch/arm/crypto/ghash-ce-glue.c 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               222 arch/arm/crypto/ghash-ce-glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
ctx               239 arch/arm/crypto/ghash-ce-glue.c 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               240 arch/arm/crypto/ghash-ce-glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
ctx               256 arch/arm/crypto/ghash-ce-glue.c 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               258 arch/arm/crypto/ghash-ce-glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
ctx               278 arch/arm/crypto/ghash-ce-glue.c 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               281 arch/arm/crypto/ghash-ce-glue.c 	desc->tfm = cryptd_ahash_child(ctx->cryptd_tfm);
ctx               297 arch/arm/crypto/ghash-ce-glue.c 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               298 arch/arm/crypto/ghash-ce-glue.c 	struct crypto_ahash *child = &ctx->cryptd_tfm->base;
ctx               314 arch/arm/crypto/ghash-ce-glue.c 	struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               319 arch/arm/crypto/ghash-ce-glue.c 	ctx->cryptd_tfm = cryptd_tfm;
ctx               329 arch/arm/crypto/ghash-ce-glue.c 	struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               331 arch/arm/crypto/ghash-ce-glue.c 	cryptd_free_ahash(ctx->cryptd_tfm);
ctx                61 arch/arm/kernel/suspend.c 	u32 *ctx = ptr;
ctx                83 arch/arm/kernel/suspend.c 	__cpuc_flush_dcache_area(ctx, ptrsz);
ctx               219 arch/arm/net/bpf_jit_32.c static inline void _emit(int cond, u32 inst, struct jit_ctx *ctx)
ctx               224 arch/arm/net/bpf_jit_32.c 	if (ctx->target != NULL)
ctx               225 arch/arm/net/bpf_jit_32.c 		ctx->target[ctx->idx] = inst;
ctx               227 arch/arm/net/bpf_jit_32.c 	ctx->idx++;
ctx               233 arch/arm/net/bpf_jit_32.c static inline void emit(u32 inst, struct jit_ctx *ctx)
ctx               235 arch/arm/net/bpf_jit_32.c 	_emit(ARM_COND_AL, inst, ctx);
ctx               349 arch/arm/net/bpf_jit_32.c #define _STACK_SIZE	(ctx->prog->aux->stack_depth + SCRATCH_SIZE)
ctx               354 arch/arm/net/bpf_jit_32.c static u16 imm_offset(u32 k, struct jit_ctx *ctx)
ctx               360 arch/arm/net/bpf_jit_32.c 	if (ctx->target == NULL) {
ctx               361 arch/arm/net/bpf_jit_32.c 		ctx->imm_count++;
ctx               365 arch/arm/net/bpf_jit_32.c 	while ((i < ctx->imm_count) && ctx->imms[i]) {
ctx               366 arch/arm/net/bpf_jit_32.c 		if (ctx->imms[i] == k)
ctx               371 arch/arm/net/bpf_jit_32.c 	if (ctx->imms[i] == 0)
ctx               372 arch/arm/net/bpf_jit_32.c 		ctx->imms[i] = k;
ctx               375 arch/arm/net/bpf_jit_32.c 	offset =  ctx->offsets[ctx->prog->len - 1] * 4;
ctx               376 arch/arm/net/bpf_jit_32.c 	offset += ctx->prologue_bytes;
ctx               377 arch/arm/net/bpf_jit_32.c 	offset += ctx->epilogue_bytes;
ctx               380 arch/arm/net/bpf_jit_32.c 	ctx->target[offset / 4] = k;
ctx               383 arch/arm/net/bpf_jit_32.c 	imm = offset - (8 + ctx->idx * 4);
ctx               390 arch/arm/net/bpf_jit_32.c 		ctx->flags |= FLAG_IMM_OVERFLOW;
ctx               400 arch/arm/net/bpf_jit_32.c 				 const struct jit_ctx *ctx) {
ctx               403 arch/arm/net/bpf_jit_32.c 	if (ctx->target == NULL)
ctx               405 arch/arm/net/bpf_jit_32.c 	to = ctx->offsets[bpf_to];
ctx               406 arch/arm/net/bpf_jit_32.c 	from = ctx->offsets[bpf_from];
ctx               414 arch/arm/net/bpf_jit_32.c static inline void emit_mov_i_no8m(const u8 rd, u32 val, struct jit_ctx *ctx)
ctx               417 arch/arm/net/bpf_jit_32.c 	emit(ARM_LDR_I(rd, ARM_PC, imm_offset(val, ctx)), ctx);
ctx               419 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOVW(rd, val & 0xffff), ctx);
ctx               421 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOVT(rd, val >> 16), ctx);
ctx               425 arch/arm/net/bpf_jit_32.c static inline void emit_mov_i(const u8 rd, u32 val, struct jit_ctx *ctx)
ctx               430 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_I(rd, imm12), ctx);
ctx               432 arch/arm/net/bpf_jit_32.c 		emit_mov_i_no8m(rd, val, ctx);
ctx               435 arch/arm/net/bpf_jit_32.c static void emit_bx_r(u8 tgt_reg, struct jit_ctx *ctx)
ctx               438 arch/arm/net/bpf_jit_32.c 		emit(ARM_BX(tgt_reg), ctx);
ctx               440 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(ARM_PC, tgt_reg), ctx);
ctx               443 arch/arm/net/bpf_jit_32.c static inline void emit_blx_r(u8 tgt_reg, struct jit_ctx *ctx)
ctx               446 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_R(ARM_LR, ARM_PC), ctx);
ctx               447 arch/arm/net/bpf_jit_32.c 	emit_bx_r(tgt_reg, ctx);
ctx               449 arch/arm/net/bpf_jit_32.c 	emit(ARM_BLX_R(tgt_reg), ctx);
ctx               453 arch/arm/net/bpf_jit_32.c static inline int epilogue_offset(const struct jit_ctx *ctx)
ctx               457 arch/arm/net/bpf_jit_32.c 	if (ctx->target == NULL)
ctx               459 arch/arm/net/bpf_jit_32.c 	to = ctx->epilogue_offset;
ctx               460 arch/arm/net/bpf_jit_32.c 	from = ctx->idx;
ctx               465 arch/arm/net/bpf_jit_32.c static inline void emit_udivmod(u8 rd, u8 rm, u8 rn, struct jit_ctx *ctx, u8 op)
ctx               472 arch/arm/net/bpf_jit_32.c 			emit(ARM_UDIV(rd, rm, rn), ctx);
ctx               474 arch/arm/net/bpf_jit_32.c 			emit(ARM_UDIV(ARM_IP, rm, rn), ctx);
ctx               475 arch/arm/net/bpf_jit_32.c 			emit(ARM_MLS(rd, rn, ARM_IP, rm), ctx);
ctx               490 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(tmp[0], ARM_R1), ctx);
ctx               491 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(ARM_R1, rn), ctx);
ctx               494 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(tmp[1], ARM_R0), ctx);
ctx               495 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(ARM_R0, rm), ctx);
ctx               500 arch/arm/net/bpf_jit_32.c 		   (u32)jit_udiv32 : (u32)jit_mod32, ctx);
ctx               501 arch/arm/net/bpf_jit_32.c 	emit_blx_r(ARM_IP, ctx);
ctx               505 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(rd, ARM_R0), ctx);
ctx               509 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(ARM_R1, tmp[0]), ctx);
ctx               511 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(ARM_R0, tmp[1]), ctx);
ctx               524 arch/arm/net/bpf_jit_32.c static s8 arm_bpf_get_reg32(s8 reg, s8 tmp, struct jit_ctx *ctx)
ctx               527 arch/arm/net/bpf_jit_32.c 		emit(ARM_LDR_I(tmp, ARM_FP, EBPF_SCRATCH_TO_ARM_FP(reg)), ctx);
ctx               534 arch/arm/net/bpf_jit_32.c 				   struct jit_ctx *ctx)
ctx               538 arch/arm/net/bpf_jit_32.c 		    ctx->cpu_architecture >= CPU_ARCH_ARMv5TE) {
ctx               540 arch/arm/net/bpf_jit_32.c 					EBPF_SCRATCH_TO_ARM_FP(reg[1])), ctx);
ctx               543 arch/arm/net/bpf_jit_32.c 				       EBPF_SCRATCH_TO_ARM_FP(reg[1])), ctx);
ctx               545 arch/arm/net/bpf_jit_32.c 				       EBPF_SCRATCH_TO_ARM_FP(reg[0])), ctx);
ctx               556 arch/arm/net/bpf_jit_32.c static void arm_bpf_put_reg32(s8 reg, s8 src, struct jit_ctx *ctx)
ctx               559 arch/arm/net/bpf_jit_32.c 		emit(ARM_STR_I(src, ARM_FP, EBPF_SCRATCH_TO_ARM_FP(reg)), ctx);
ctx               561 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(reg, src), ctx);
ctx               565 arch/arm/net/bpf_jit_32.c 			      struct jit_ctx *ctx)
ctx               569 arch/arm/net/bpf_jit_32.c 		    ctx->cpu_architecture >= CPU_ARCH_ARMv5TE) {
ctx               571 arch/arm/net/bpf_jit_32.c 				       EBPF_SCRATCH_TO_ARM_FP(reg[1])), ctx);
ctx               574 arch/arm/net/bpf_jit_32.c 				       EBPF_SCRATCH_TO_ARM_FP(reg[1])), ctx);
ctx               576 arch/arm/net/bpf_jit_32.c 				       EBPF_SCRATCH_TO_ARM_FP(reg[0])), ctx);
ctx               580 arch/arm/net/bpf_jit_32.c 			emit(ARM_MOV_R(reg[1], src[1]), ctx);
ctx               582 arch/arm/net/bpf_jit_32.c 			emit(ARM_MOV_R(reg[0], src[0]), ctx);
ctx               587 arch/arm/net/bpf_jit_32.c 				  struct jit_ctx *ctx)
ctx               592 arch/arm/net/bpf_jit_32.c 		emit_mov_i(tmp[1], val, ctx);
ctx               593 arch/arm/net/bpf_jit_32.c 		arm_bpf_put_reg32(dst, tmp[1], ctx);
ctx               595 arch/arm/net/bpf_jit_32.c 		emit_mov_i(dst, val, ctx);
ctx               599 arch/arm/net/bpf_jit_32.c static void emit_a32_mov_i64(const s8 dst[], u64 val, struct jit_ctx *ctx)
ctx               604 arch/arm/net/bpf_jit_32.c 	emit_mov_i(rd[1], (u32)val, ctx);
ctx               605 arch/arm/net/bpf_jit_32.c 	emit_mov_i(rd[0], val >> 32, ctx);
ctx               607 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg64(dst, rd, ctx);
ctx               612 arch/arm/net/bpf_jit_32.c 				       const u32 val, struct jit_ctx *ctx) {
ctx               617 arch/arm/net/bpf_jit_32.c 	emit_a32_mov_i64(dst, val64, ctx);
ctx               622 arch/arm/net/bpf_jit_32.c 			      struct jit_ctx *ctx) {
ctx               630 arch/arm/net/bpf_jit_32.c 		emit(ARM_ADDS_R(dst, dst, src), ctx);
ctx               632 arch/arm/net/bpf_jit_32.c 		emit(ARM_ADC_R(dst, dst, src), ctx);
ctx               634 arch/arm/net/bpf_jit_32.c 		emit(ARM_ADD_R(dst, dst, src), ctx);
ctx               639 arch/arm/net/bpf_jit_32.c 				  struct jit_ctx *ctx) {
ctx               647 arch/arm/net/bpf_jit_32.c 		emit(ARM_SUBS_R(dst, dst, src), ctx);
ctx               649 arch/arm/net/bpf_jit_32.c 		emit(ARM_SBC_R(dst, dst, src), ctx);
ctx               651 arch/arm/net/bpf_jit_32.c 		emit(ARM_SUB_R(dst, dst, src), ctx);
ctx               655 arch/arm/net/bpf_jit_32.c 			      const bool hi, const u8 op, struct jit_ctx *ctx){
ctx               659 arch/arm/net/bpf_jit_32.c 		emit_a32_add_r(dst, src, is64, hi, ctx);
ctx               663 arch/arm/net/bpf_jit_32.c 		emit_a32_sub_r(dst, src, is64, hi, ctx);
ctx               667 arch/arm/net/bpf_jit_32.c 		emit(ARM_ORR_R(dst, dst, src), ctx);
ctx               671 arch/arm/net/bpf_jit_32.c 		emit(ARM_AND_R(dst, dst, src), ctx);
ctx               675 arch/arm/net/bpf_jit_32.c 		emit(ARM_EOR_R(dst, dst, src), ctx);
ctx               679 arch/arm/net/bpf_jit_32.c 		emit(ARM_MUL(dst, dst, src), ctx);
ctx               683 arch/arm/net/bpf_jit_32.c 		emit(ARM_LSL_R(dst, dst, src), ctx);
ctx               687 arch/arm/net/bpf_jit_32.c 		emit(ARM_LSR_R(dst, dst, src), ctx);
ctx               691 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_SR(dst, dst, SRTYPE_ASR, src), ctx);
ctx               700 arch/arm/net/bpf_jit_32.c 				  struct jit_ctx *ctx, const bool is64,
ctx               705 arch/arm/net/bpf_jit_32.c 	rn = arm_bpf_get_reg32(src, tmp[1], ctx);
ctx               706 arch/arm/net/bpf_jit_32.c 	rd = arm_bpf_get_reg32(dst, tmp[0], ctx);
ctx               708 arch/arm/net/bpf_jit_32.c 	emit_alu_r(rd, rn, is64, hi, op, ctx);
ctx               709 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg32(dst, rd, ctx);
ctx               714 arch/arm/net/bpf_jit_32.c 				  const s8 src[], struct jit_ctx *ctx,
ctx               720 arch/arm/net/bpf_jit_32.c 	rd = arm_bpf_get_reg64(dst, tmp, ctx);
ctx               724 arch/arm/net/bpf_jit_32.c 		rs = arm_bpf_get_reg64(src, tmp2, ctx);
ctx               727 arch/arm/net/bpf_jit_32.c 		emit_alu_r(rd[1], rs[1], true, false, op, ctx);
ctx               728 arch/arm/net/bpf_jit_32.c 		emit_alu_r(rd[0], rs[0], true, true, op, ctx);
ctx               732 arch/arm/net/bpf_jit_32.c 		rs = arm_bpf_get_reg32(src_lo, tmp2[1], ctx);
ctx               735 arch/arm/net/bpf_jit_32.c 		emit_alu_r(rd[1], rs, true, false, op, ctx);
ctx               736 arch/arm/net/bpf_jit_32.c 		if (!ctx->prog->aux->verifier_zext)
ctx               737 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_i(rd[0], 0, ctx);
ctx               740 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg64(dst, rd, ctx);
ctx               745 arch/arm/net/bpf_jit_32.c 				  struct jit_ctx *ctx) {
ctx               749 arch/arm/net/bpf_jit_32.c 	rt = arm_bpf_get_reg32(src, tmp[0], ctx);
ctx               750 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg32(dst, rt, ctx);
ctx               756 arch/arm/net/bpf_jit_32.c 				  struct jit_ctx *ctx) {
ctx               758 arch/arm/net/bpf_jit_32.c 		emit_a32_mov_r(dst_lo, src_lo, ctx);
ctx               759 arch/arm/net/bpf_jit_32.c 		if (!ctx->prog->aux->verifier_zext)
ctx               761 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_i(dst_hi, 0, ctx);
ctx               763 arch/arm/net/bpf_jit_32.c 		   ctx->cpu_architecture < CPU_ARCH_ARMv5TE) {
ctx               765 arch/arm/net/bpf_jit_32.c 		emit_a32_mov_r(dst_lo, src_lo, ctx);
ctx               766 arch/arm/net/bpf_jit_32.c 		emit_a32_mov_r(dst_hi, src_hi, ctx);
ctx               770 arch/arm/net/bpf_jit_32.c 		emit(ARM_LDRD_I(tmp[1], ARM_FP, EBPF_SCRATCH_TO_ARM_FP(src_lo)), ctx);
ctx               771 arch/arm/net/bpf_jit_32.c 		emit(ARM_STRD_I(tmp[1], ARM_FP, EBPF_SCRATCH_TO_ARM_FP(dst_lo)), ctx);
ctx               773 arch/arm/net/bpf_jit_32.c 		emit(ARM_LDRD_I(dst[1], ARM_FP, EBPF_SCRATCH_TO_ARM_FP(src_lo)), ctx);
ctx               775 arch/arm/net/bpf_jit_32.c 		emit(ARM_STRD_I(src[1], ARM_FP, EBPF_SCRATCH_TO_ARM_FP(dst_lo)), ctx);
ctx               777 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(dst[0], src[0]), ctx);
ctx               778 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(dst[1], src[1]), ctx);
ctx               784 arch/arm/net/bpf_jit_32.c 				struct jit_ctx *ctx, const u8 op) {
ctx               788 arch/arm/net/bpf_jit_32.c 	rd = arm_bpf_get_reg32(dst, tmp[0], ctx);
ctx               793 arch/arm/net/bpf_jit_32.c 		emit(ARM_LSL_I(rd, rd, val), ctx);
ctx               796 arch/arm/net/bpf_jit_32.c 		emit(ARM_LSR_I(rd, rd, val), ctx);
ctx               799 arch/arm/net/bpf_jit_32.c 		emit(ARM_RSB_I(rd, rd, val), ctx);
ctx               803 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg32(dst, rd, ctx);
ctx               808 arch/arm/net/bpf_jit_32.c 				struct jit_ctx *ctx){
ctx               813 arch/arm/net/bpf_jit_32.c 	rd = arm_bpf_get_reg64(dst, tmp, ctx);
ctx               816 arch/arm/net/bpf_jit_32.c 	emit(ARM_RSBS_I(rd[1], rd[1], 0), ctx);
ctx               817 arch/arm/net/bpf_jit_32.c 	emit(ARM_RSC_I(rd[0], rd[0], 0), ctx);
ctx               819 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg64(dst, rd, ctx);
ctx               824 arch/arm/net/bpf_jit_32.c 				    struct jit_ctx *ctx) {
ctx               831 arch/arm/net/bpf_jit_32.c 	rt = arm_bpf_get_reg32(src_lo, tmp2[1], ctx);
ctx               832 arch/arm/net/bpf_jit_32.c 	rd = arm_bpf_get_reg64(dst, tmp, ctx);
ctx               835 arch/arm/net/bpf_jit_32.c 	emit(ARM_SUB_I(ARM_IP, rt, 32), ctx);
ctx               836 arch/arm/net/bpf_jit_32.c 	emit(ARM_RSB_I(tmp2[0], rt, 32), ctx);
ctx               837 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_SR(ARM_LR, rd[0], SRTYPE_ASL, rt), ctx);
ctx               838 arch/arm/net/bpf_jit_32.c 	emit(ARM_ORR_SR(ARM_LR, ARM_LR, rd[1], SRTYPE_ASL, ARM_IP), ctx);
ctx               839 arch/arm/net/bpf_jit_32.c 	emit(ARM_ORR_SR(ARM_IP, ARM_LR, rd[1], SRTYPE_LSR, tmp2[0]), ctx);
ctx               840 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_SR(ARM_LR, rd[1], SRTYPE_ASL, rt), ctx);
ctx               842 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg32(dst_lo, ARM_LR, ctx);
ctx               843 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg32(dst_hi, ARM_IP, ctx);
ctx               848 arch/arm/net/bpf_jit_32.c 				     struct jit_ctx *ctx) {
ctx               855 arch/arm/net/bpf_jit_32.c 	rt = arm_bpf_get_reg32(src_lo, tmp2[1], ctx);
ctx               856 arch/arm/net/bpf_jit_32.c 	rd = arm_bpf_get_reg64(dst, tmp, ctx);
ctx               859 arch/arm/net/bpf_jit_32.c 	emit(ARM_RSB_I(ARM_IP, rt, 32), ctx);
ctx               860 arch/arm/net/bpf_jit_32.c 	emit(ARM_SUBS_I(tmp2[0], rt, 32), ctx);
ctx               861 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_SR(ARM_LR, rd[1], SRTYPE_LSR, rt), ctx);
ctx               862 arch/arm/net/bpf_jit_32.c 	emit(ARM_ORR_SR(ARM_LR, ARM_LR, rd[0], SRTYPE_ASL, ARM_IP), ctx);
ctx               863 arch/arm/net/bpf_jit_32.c 	_emit(ARM_COND_MI, ARM_B(0), ctx);
ctx               864 arch/arm/net/bpf_jit_32.c 	emit(ARM_ORR_SR(ARM_LR, ARM_LR, rd[0], SRTYPE_ASR, tmp2[0]), ctx);
ctx               865 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_SR(ARM_IP, rd[0], SRTYPE_ASR, rt), ctx);
ctx               867 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg32(dst_lo, ARM_LR, ctx);
ctx               868 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg32(dst_hi, ARM_IP, ctx);
ctx               873 arch/arm/net/bpf_jit_32.c 				    struct jit_ctx *ctx) {
ctx               880 arch/arm/net/bpf_jit_32.c 	rt = arm_bpf_get_reg32(src_lo, tmp2[1], ctx);
ctx               881 arch/arm/net/bpf_jit_32.c 	rd = arm_bpf_get_reg64(dst, tmp, ctx);
ctx               884 arch/arm/net/bpf_jit_32.c 	emit(ARM_RSB_I(ARM_IP, rt, 32), ctx);
ctx               885 arch/arm/net/bpf_jit_32.c 	emit(ARM_SUBS_I(tmp2[0], rt, 32), ctx);
ctx               886 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_SR(ARM_LR, rd[1], SRTYPE_LSR, rt), ctx);
ctx               887 arch/arm/net/bpf_jit_32.c 	emit(ARM_ORR_SR(ARM_LR, ARM_LR, rd[0], SRTYPE_ASL, ARM_IP), ctx);
ctx               888 arch/arm/net/bpf_jit_32.c 	emit(ARM_ORR_SR(ARM_LR, ARM_LR, rd[0], SRTYPE_LSR, tmp2[0]), ctx);
ctx               889 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_SR(ARM_IP, rd[0], SRTYPE_LSR, rt), ctx);
ctx               891 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg32(dst_lo, ARM_LR, ctx);
ctx               892 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg32(dst_hi, ARM_IP, ctx);
ctx               897 arch/arm/net/bpf_jit_32.c 				    const u32 val, struct jit_ctx *ctx){
ctx               903 arch/arm/net/bpf_jit_32.c 	rd = arm_bpf_get_reg64(dst, tmp, ctx);
ctx               907 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_SI(tmp2[0], rd[0], SRTYPE_ASL, val), ctx);
ctx               908 arch/arm/net/bpf_jit_32.c 		emit(ARM_ORR_SI(rd[0], tmp2[0], rd[1], SRTYPE_LSR, 32 - val), ctx);
ctx               909 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_SI(rd[1], rd[1], SRTYPE_ASL, val), ctx);
ctx               912 arch/arm/net/bpf_jit_32.c 			emit(ARM_MOV_R(rd[0], rd[1]), ctx);
ctx               914 arch/arm/net/bpf_jit_32.c 			emit(ARM_MOV_SI(rd[0], rd[1], SRTYPE_ASL, val - 32), ctx);
ctx               915 arch/arm/net/bpf_jit_32.c 		emit(ARM_EOR_R(rd[1], rd[1], rd[1]), ctx);
ctx               918 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg64(dst, rd, ctx);
ctx               923 arch/arm/net/bpf_jit_32.c 				    const u32 val, struct jit_ctx *ctx) {
ctx               929 arch/arm/net/bpf_jit_32.c 	rd = arm_bpf_get_reg64(dst, tmp, ctx);
ctx               937 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_SI(tmp2[1], rd[1], SRTYPE_LSR, val), ctx);
ctx               938 arch/arm/net/bpf_jit_32.c 		emit(ARM_ORR_SI(rd[1], tmp2[1], rd[0], SRTYPE_ASL, 32 - val), ctx);
ctx               939 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_SI(rd[0], rd[0], SRTYPE_LSR, val), ctx);
ctx               941 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(rd[1], rd[0]), ctx);
ctx               942 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_I(rd[0], 0), ctx);
ctx               944 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_SI(rd[1], rd[0], SRTYPE_LSR, val - 32), ctx);
ctx               945 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_I(rd[0], 0), ctx);
ctx               948 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg64(dst, rd, ctx);
ctx               953 arch/arm/net/bpf_jit_32.c 				     const u32 val, struct jit_ctx *ctx){
ctx               959 arch/arm/net/bpf_jit_32.c 	rd = arm_bpf_get_reg64(dst, tmp, ctx);
ctx               967 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_SI(tmp2[1], rd[1], SRTYPE_LSR, val), ctx);
ctx               968 arch/arm/net/bpf_jit_32.c 		emit(ARM_ORR_SI(rd[1], tmp2[1], rd[0], SRTYPE_ASL, 32 - val), ctx);
ctx               969 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_SI(rd[0], rd[0], SRTYPE_ASR, val), ctx);
ctx               971 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(rd[1], rd[0]), ctx);
ctx               972 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_SI(rd[0], rd[0], SRTYPE_ASR, 31), ctx);
ctx               974 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_SI(rd[1], rd[0], SRTYPE_ASR, val - 32), ctx);
ctx               975 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_SI(rd[0], rd[0], SRTYPE_ASR, 31), ctx);
ctx               978 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg64(dst, rd, ctx);
ctx               982 arch/arm/net/bpf_jit_32.c 				    struct jit_ctx *ctx) {
ctx               988 arch/arm/net/bpf_jit_32.c 	rd = arm_bpf_get_reg64(dst, tmp, ctx);
ctx               989 arch/arm/net/bpf_jit_32.c 	rt = arm_bpf_get_reg64(src, tmp2, ctx);
ctx               992 arch/arm/net/bpf_jit_32.c 	emit(ARM_MUL(ARM_IP, rd[1], rt[0]), ctx);
ctx               993 arch/arm/net/bpf_jit_32.c 	emit(ARM_MUL(ARM_LR, rd[0], rt[1]), ctx);
ctx               994 arch/arm/net/bpf_jit_32.c 	emit(ARM_ADD_R(ARM_LR, ARM_IP, ARM_LR), ctx);
ctx               996 arch/arm/net/bpf_jit_32.c 	emit(ARM_UMULL(ARM_IP, rd[0], rd[1], rt[1]), ctx);
ctx               997 arch/arm/net/bpf_jit_32.c 	emit(ARM_ADD_R(rd[0], ARM_LR, rd[0]), ctx);
ctx               999 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg32(dst_lo, ARM_IP, ctx);
ctx              1000 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg32(dst_hi, rd[0], ctx);
ctx              1025 arch/arm/net/bpf_jit_32.c 			      s16 off, struct jit_ctx *ctx, const u8 sz){
ctx              1029 arch/arm/net/bpf_jit_32.c 	rd = arm_bpf_get_reg32(dst, tmp[1], ctx);
ctx              1032 arch/arm/net/bpf_jit_32.c 		emit_a32_mov_i(tmp[0], off, ctx);
ctx              1033 arch/arm/net/bpf_jit_32.c 		emit(ARM_ADD_R(tmp[0], tmp[0], rd), ctx);
ctx              1040 arch/arm/net/bpf_jit_32.c 		emit(ARM_STRB_I(src_lo, rd, off), ctx);
ctx              1044 arch/arm/net/bpf_jit_32.c 		emit(ARM_STRH_I(src_lo, rd, off), ctx);
ctx              1048 arch/arm/net/bpf_jit_32.c 		emit(ARM_STR_I(src_lo, rd, off), ctx);
ctx              1052 arch/arm/net/bpf_jit_32.c 		emit(ARM_STR_I(src_lo, rd, off), ctx);
ctx              1053 arch/arm/net/bpf_jit_32.c 		emit(ARM_STR_I(src_hi, rd, off + 4), ctx);
ctx              1060 arch/arm/net/bpf_jit_32.c 			      s16 off, struct jit_ctx *ctx, const u8 sz){
ctx              1066 arch/arm/net/bpf_jit_32.c 		emit_a32_mov_i(tmp[0], off, ctx);
ctx              1067 arch/arm/net/bpf_jit_32.c 		emit(ARM_ADD_R(tmp[0], tmp[0], src), ctx);
ctx              1071 arch/arm/net/bpf_jit_32.c 		emit(ARM_MOV_R(tmp[0], rm), ctx);
ctx              1077 arch/arm/net/bpf_jit_32.c 		emit(ARM_LDRB_I(rd[1], rm, off), ctx);
ctx              1078 arch/arm/net/bpf_jit_32.c 		if (!ctx->prog->aux->verifier_zext)
ctx              1079 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_i(rd[0], 0, ctx);
ctx              1083 arch/arm/net/bpf_jit_32.c 		emit(ARM_LDRH_I(rd[1], rm, off), ctx);
ctx              1084 arch/arm/net/bpf_jit_32.c 		if (!ctx->prog->aux->verifier_zext)
ctx              1085 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_i(rd[0], 0, ctx);
ctx              1089 arch/arm/net/bpf_jit_32.c 		emit(ARM_LDR_I(rd[1], rm, off), ctx);
ctx              1090 arch/arm/net/bpf_jit_32.c 		if (!ctx->prog->aux->verifier_zext)
ctx              1091 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_i(rd[0], 0, ctx);
ctx              1095 arch/arm/net/bpf_jit_32.c 		emit(ARM_LDR_I(rd[1], rm, off), ctx);
ctx              1096 arch/arm/net/bpf_jit_32.c 		emit(ARM_LDR_I(rd[0], rm, off + 4), ctx);
ctx              1099 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg64(dst, rd, ctx);
ctx              1104 arch/arm/net/bpf_jit_32.c 			     const u8 rn, struct jit_ctx *ctx, u8 op,
ctx              1109 arch/arm/net/bpf_jit_32.c 			emit(ARM_AND_R(ARM_IP, rt, rn), ctx);
ctx              1110 arch/arm/net/bpf_jit_32.c 			emit(ARM_AND_R(ARM_LR, rd, rm), ctx);
ctx              1111 arch/arm/net/bpf_jit_32.c 			emit(ARM_ORRS_R(ARM_IP, ARM_LR, ARM_IP), ctx);
ctx              1113 arch/arm/net/bpf_jit_32.c 			emit(ARM_ANDS_R(ARM_IP, rt, rn), ctx);
ctx              1123 arch/arm/net/bpf_jit_32.c 			emit(ARM_CMP_R(rd, rm), ctx);
ctx              1125 arch/arm/net/bpf_jit_32.c 			_emit(ARM_COND_EQ, ARM_CMP_R(rt, rn), ctx);
ctx              1127 arch/arm/net/bpf_jit_32.c 			emit(ARM_CMP_R(rt, rn), ctx);
ctx              1132 arch/arm/net/bpf_jit_32.c 		emit(ARM_CMP_R(rn, rt), ctx);
ctx              1134 arch/arm/net/bpf_jit_32.c 			emit(ARM_SBCS_R(ARM_IP, rm, rd), ctx);
ctx              1138 arch/arm/net/bpf_jit_32.c 		emit(ARM_CMP_R(rt, rn), ctx);
ctx              1140 arch/arm/net/bpf_jit_32.c 			emit(ARM_SBCS_R(ARM_IP, rd, rm), ctx);
ctx              1146 arch/arm/net/bpf_jit_32.c static int emit_bpf_tail_call(struct jit_ctx *ctx)
ctx              1156 arch/arm/net/bpf_jit_32.c 	const int idx0 = ctx->idx;
ctx              1157 arch/arm/net/bpf_jit_32.c #define cur_offset (ctx->idx - idx0)
ctx              1169 arch/arm/net/bpf_jit_32.c 	r_array = arm_bpf_get_reg32(r2[1], tmp2[0], ctx);
ctx              1171 arch/arm/net/bpf_jit_32.c 	r_index = arm_bpf_get_reg32(r3[1], tmp2[1], ctx);
ctx              1173 arch/arm/net/bpf_jit_32.c 	emit(ARM_LDR_I(tmp[1], r_array, off), ctx);
ctx              1175 arch/arm/net/bpf_jit_32.c 	emit(ARM_CMP_R(r_index, tmp[1]), ctx);
ctx              1176 arch/arm/net/bpf_jit_32.c 	_emit(ARM_COND_CS, ARM_B(jmp_offset), ctx);
ctx              1186 arch/arm/net/bpf_jit_32.c 	tc = arm_bpf_get_reg64(tcc, tmp, ctx);
ctx              1187 arch/arm/net/bpf_jit_32.c 	emit(ARM_CMP_I(tc[0], hi), ctx);
ctx              1188 arch/arm/net/bpf_jit_32.c 	_emit(ARM_COND_EQ, ARM_CMP_I(tc[1], lo), ctx);
ctx              1189 arch/arm/net/bpf_jit_32.c 	_emit(ARM_COND_HI, ARM_B(jmp_offset), ctx);
ctx              1190 arch/arm/net/bpf_jit_32.c 	emit(ARM_ADDS_I(tc[1], tc[1], 1), ctx);
ctx              1191 arch/arm/net/bpf_jit_32.c 	emit(ARM_ADC_I(tc[0], tc[0], 0), ctx);
ctx              1192 arch/arm/net/bpf_jit_32.c 	arm_bpf_put_reg64(tcc, tmp, ctx);
ctx              1200 arch/arm/net/bpf_jit_32.c 	emit(ARM_ADD_I(tmp[1], r_array, off), ctx);
ctx              1201 arch/arm/net/bpf_jit_32.c 	emit(ARM_LDR_R_SI(tmp[1], tmp[1], r_index, SRTYPE_ASL, 2), ctx);
ctx              1202 arch/arm/net/bpf_jit_32.c 	emit(ARM_CMP_I(tmp[1], 0), ctx);
ctx              1203 arch/arm/net/bpf_jit_32.c 	_emit(ARM_COND_EQ, ARM_B(jmp_offset), ctx);
ctx              1209 arch/arm/net/bpf_jit_32.c 	emit(ARM_LDR_I(tmp[1], tmp[1], off), ctx);
ctx              1210 arch/arm/net/bpf_jit_32.c 	emit(ARM_ADD_I(tmp[1], tmp[1], ctx->prologue_bytes), ctx);
ctx              1211 arch/arm/net/bpf_jit_32.c 	emit_bx_r(tmp[1], ctx);
ctx              1227 arch/arm/net/bpf_jit_32.c static inline void emit_rev16(const u8 rd, const u8 rn, struct jit_ctx *ctx)
ctx              1232 arch/arm/net/bpf_jit_32.c 	emit(ARM_AND_I(tmp2[1], rn, 0xff), ctx);
ctx              1233 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_SI(tmp2[0], rn, SRTYPE_LSR, 8), ctx);
ctx              1234 arch/arm/net/bpf_jit_32.c 	emit(ARM_AND_I(tmp2[0], tmp2[0], 0xff), ctx);
ctx              1235 arch/arm/net/bpf_jit_32.c 	emit(ARM_ORR_SI(rd, tmp2[0], tmp2[1], SRTYPE_LSL, 8), ctx);
ctx              1237 arch/arm/net/bpf_jit_32.c 	emit(ARM_REV16(rd, rn), ctx);
ctx              1242 arch/arm/net/bpf_jit_32.c static inline void emit_rev32(const u8 rd, const u8 rn, struct jit_ctx *ctx)
ctx              1247 arch/arm/net/bpf_jit_32.c 	emit(ARM_AND_I(tmp2[1], rn, 0xff), ctx);
ctx              1248 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_SI(tmp2[0], rn, SRTYPE_LSR, 24), ctx);
ctx              1249 arch/arm/net/bpf_jit_32.c 	emit(ARM_ORR_SI(ARM_IP, tmp2[0], tmp2[1], SRTYPE_LSL, 24), ctx);
ctx              1251 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_SI(tmp2[1], rn, SRTYPE_LSR, 8), ctx);
ctx              1252 arch/arm/net/bpf_jit_32.c 	emit(ARM_AND_I(tmp2[1], tmp2[1], 0xff), ctx);
ctx              1253 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_SI(tmp2[0], rn, SRTYPE_LSR, 16), ctx);
ctx              1254 arch/arm/net/bpf_jit_32.c 	emit(ARM_AND_I(tmp2[0], tmp2[0], 0xff), ctx);
ctx              1255 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_SI(tmp2[0], tmp2[0], SRTYPE_LSL, 8), ctx);
ctx              1256 arch/arm/net/bpf_jit_32.c 	emit(ARM_ORR_SI(tmp2[0], tmp2[0], tmp2[1], SRTYPE_LSL, 16), ctx);
ctx              1257 arch/arm/net/bpf_jit_32.c 	emit(ARM_ORR_R(rd, ARM_IP, tmp2[0]), ctx);
ctx              1260 arch/arm/net/bpf_jit_32.c 	emit(ARM_REV(rd, rn), ctx);
ctx              1265 arch/arm/net/bpf_jit_32.c static inline void emit_push_r64(const s8 src[], struct jit_ctx *ctx)
ctx              1271 arch/arm/net/bpf_jit_32.c 	rt = arm_bpf_get_reg64(src, tmp2, ctx);
ctx              1274 arch/arm/net/bpf_jit_32.c 	emit(ARM_PUSH(reg_set), ctx);
ctx              1277 arch/arm/net/bpf_jit_32.c static void build_prologue(struct jit_ctx *ctx)
ctx              1290 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_R(ARM_IP, ARM_SP), ctx);
ctx              1291 arch/arm/net/bpf_jit_32.c 	emit(ARM_PUSH(reg_set), ctx);
ctx              1292 arch/arm/net/bpf_jit_32.c 	emit(ARM_SUB_I(ARM_FP, ARM_IP, 4), ctx);
ctx              1294 arch/arm/net/bpf_jit_32.c 	emit(ARM_PUSH(CALLEE_PUSH_MASK), ctx);
ctx              1295 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_R(ARM_FP, ARM_SP), ctx);
ctx              1298 arch/arm/net/bpf_jit_32.c 	emit(ARM_SUB_I(ARM_IP, ARM_SP, SCRATCH_SIZE), ctx);
ctx              1300 arch/arm/net/bpf_jit_32.c 	ctx->stack_size = imm8m(STACK_SIZE);
ctx              1303 arch/arm/net/bpf_jit_32.c 	emit(ARM_SUB_I(ARM_SP, ARM_SP, ctx->stack_size), ctx);
ctx              1306 arch/arm/net/bpf_jit_32.c 	emit_a32_mov_r(fplo, ARM_IP, ctx);
ctx              1307 arch/arm/net/bpf_jit_32.c 	emit_a32_mov_i(fphi, 0, ctx);
ctx              1310 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_I(r4, 0), ctx);
ctx              1313 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_R(r3, r4), ctx);
ctx              1314 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_R(r2, r0), ctx);
ctx              1316 arch/arm/net/bpf_jit_32.c 	emit(ARM_STR_I(r4, ARM_FP, EBPF_SCRATCH_TO_ARM_FP(tcc[0])), ctx);
ctx              1317 arch/arm/net/bpf_jit_32.c 	emit(ARM_STR_I(r4, ARM_FP, EBPF_SCRATCH_TO_ARM_FP(tcc[1])), ctx);
ctx              1322 arch/arm/net/bpf_jit_32.c static void build_epilogue(struct jit_ctx *ctx)
ctx              1328 arch/arm/net/bpf_jit_32.c 	emit(ARM_SUB_I(ARM_SP, ARM_FP, hweight16(reg_set) * 4), ctx);
ctx              1329 arch/arm/net/bpf_jit_32.c 	emit(ARM_LDM(ARM_SP, reg_set), ctx);
ctx              1332 arch/arm/net/bpf_jit_32.c 	emit(ARM_MOV_R(ARM_SP, ARM_FP), ctx);
ctx              1333 arch/arm/net/bpf_jit_32.c 	emit(ARM_POP(CALLEE_POP_MASK), ctx);
ctx              1345 arch/arm/net/bpf_jit_32.c static int build_insn(const struct bpf_insn *insn, struct jit_ctx *ctx)
ctx              1354 arch/arm/net/bpf_jit_32.c 	const int i = insn - ctx->prog->insnsi;
ctx              1382 arch/arm/net/bpf_jit_32.c 				emit_a32_mov_i(dst_hi, 0, ctx);
ctx              1385 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_r64(is64, dst, src, ctx);
ctx              1389 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_se_i64(is64, dst, imm, ctx);
ctx              1429 arch/arm/net/bpf_jit_32.c 			emit_a32_alu_r64(is64, dst, src, ctx, BPF_OP(code));
ctx              1438 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_se_i64(is64, tmp2, imm, ctx);
ctx              1439 arch/arm/net/bpf_jit_32.c 			emit_a32_alu_r64(is64, dst, tmp2, ctx, BPF_OP(code));
ctx              1449 arch/arm/net/bpf_jit_32.c 		rd_lo = arm_bpf_get_reg32(dst_lo, tmp2[1], ctx);
ctx              1452 arch/arm/net/bpf_jit_32.c 			rt = arm_bpf_get_reg32(src_lo, tmp2[0], ctx);
ctx              1456 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_i(rt, imm, ctx);
ctx              1462 arch/arm/net/bpf_jit_32.c 		emit_udivmod(rd_lo, rd_lo, rt, ctx, BPF_OP(code));
ctx              1463 arch/arm/net/bpf_jit_32.c 		arm_bpf_put_reg32(dst_lo, rd_lo, ctx);
ctx              1464 arch/arm/net/bpf_jit_32.c 		if (!ctx->prog->aux->verifier_zext)
ctx              1465 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_i(dst_hi, 0, ctx);
ctx              1479 arch/arm/net/bpf_jit_32.c 			emit_a32_alu_i(dst_lo, imm, ctx, BPF_OP(code));
ctx              1480 arch/arm/net/bpf_jit_32.c 		if (!ctx->prog->aux->verifier_zext)
ctx              1481 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_i(dst_hi, 0, ctx);
ctx              1487 arch/arm/net/bpf_jit_32.c 		emit_a32_lsh_i64(dst, imm, ctx);
ctx              1493 arch/arm/net/bpf_jit_32.c 		emit_a32_rsh_i64(dst, imm, ctx);
ctx              1497 arch/arm/net/bpf_jit_32.c 		emit_a32_lsh_r64(dst, src, ctx);
ctx              1501 arch/arm/net/bpf_jit_32.c 		emit_a32_rsh_r64(dst, src, ctx);
ctx              1505 arch/arm/net/bpf_jit_32.c 		emit_a32_arsh_r64(dst, src, ctx);
ctx              1511 arch/arm/net/bpf_jit_32.c 		emit_a32_arsh_i64(dst, imm, ctx);
ctx              1515 arch/arm/net/bpf_jit_32.c 		emit_a32_alu_i(dst_lo, 0, ctx, BPF_OP(code));
ctx              1516 arch/arm/net/bpf_jit_32.c 		if (!ctx->prog->aux->verifier_zext)
ctx              1517 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_i(dst_hi, 0, ctx);
ctx              1521 arch/arm/net/bpf_jit_32.c 		emit_a32_neg64(dst, ctx);
ctx              1528 arch/arm/net/bpf_jit_32.c 			emit_a32_mul_r64(dst, src, ctx);
ctx              1537 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_se_i64(is64, tmp2, imm, ctx);
ctx              1538 arch/arm/net/bpf_jit_32.c 			emit_a32_mul_r64(dst, tmp2, ctx);
ctx              1546 arch/arm/net/bpf_jit_32.c 		rd = arm_bpf_get_reg64(dst, tmp, ctx);
ctx              1551 arch/arm/net/bpf_jit_32.c 			emit_rev16(rd[1], rd[1], ctx);
ctx              1554 arch/arm/net/bpf_jit_32.c 			emit_rev32(rd[1], rd[1], ctx);
ctx              1557 arch/arm/net/bpf_jit_32.c 			emit_rev32(ARM_LR, rd[1], ctx);
ctx              1558 arch/arm/net/bpf_jit_32.c 			emit_rev32(rd[1], rd[0], ctx);
ctx              1559 arch/arm/net/bpf_jit_32.c 			emit(ARM_MOV_R(rd[0], ARM_LR), ctx);
ctx              1568 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_i(tmp2[1], 0xffff, ctx);
ctx              1569 arch/arm/net/bpf_jit_32.c 			emit(ARM_AND_R(rd[1], rd[1], tmp2[1]), ctx);
ctx              1571 arch/arm/net/bpf_jit_32.c 			emit(ARM_UXTH(rd[1], rd[1]), ctx);
ctx              1573 arch/arm/net/bpf_jit_32.c 			if (!ctx->prog->aux->verifier_zext)
ctx              1574 arch/arm/net/bpf_jit_32.c 				emit(ARM_EOR_R(rd[0], rd[0], rd[0]), ctx);
ctx              1578 arch/arm/net/bpf_jit_32.c 			if (!ctx->prog->aux->verifier_zext)
ctx              1579 arch/arm/net/bpf_jit_32.c 				emit(ARM_EOR_R(rd[0], rd[0], rd[0]), ctx);
ctx              1586 arch/arm/net/bpf_jit_32.c 		arm_bpf_put_reg64(dst, rd, ctx);
ctx              1593 arch/arm/net/bpf_jit_32.c 		emit_a32_mov_i64(dst, val, ctx);
ctx              1602 arch/arm/net/bpf_jit_32.c 		rn = arm_bpf_get_reg32(src_lo, tmp2[1], ctx);
ctx              1603 arch/arm/net/bpf_jit_32.c 		emit_ldx_r(dst, rn, off, ctx, BPF_SIZE(code));
ctx              1613 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_se_i64(true, tmp2, imm, ctx);
ctx              1618 arch/arm/net/bpf_jit_32.c 			emit_a32_mov_i(tmp2[1], imm, ctx);
ctx              1621 arch/arm/net/bpf_jit_32.c 		emit_str_r(dst_lo, tmp2, off, ctx, BPF_SIZE(code));
ctx              1633 arch/arm/net/bpf_jit_32.c 		rs = arm_bpf_get_reg64(src, tmp2, ctx);
ctx              1634 arch/arm/net/bpf_jit_32.c 		emit_str_r(dst_lo, rs, off, ctx, BPF_SIZE(code));
ctx              1670 arch/arm/net/bpf_jit_32.c 		rm = arm_bpf_get_reg32(src_hi, tmp2[0], ctx);
ctx              1671 arch/arm/net/bpf_jit_32.c 		rn = arm_bpf_get_reg32(src_lo, tmp2[1], ctx);
ctx              1711 arch/arm/net/bpf_jit_32.c 		emit_a32_mov_se_i64(true, tmp2, imm, ctx);
ctx              1714 arch/arm/net/bpf_jit_32.c 		rd = arm_bpf_get_reg64(dst, tmp, ctx);
ctx              1717 arch/arm/net/bpf_jit_32.c 		emit_ar_r(rd[0], rd[1], rm, rn, ctx, BPF_OP(code),
ctx              1721 arch/arm/net/bpf_jit_32.c 		jmp_offset = bpf2a32_offset(i+off, i, ctx);
ctx              1725 arch/arm/net/bpf_jit_32.c 			_emit(ARM_COND_NE, ARM_B(jmp_offset), ctx);
ctx              1728 arch/arm/net/bpf_jit_32.c 			_emit(ARM_COND_EQ, ARM_B(jmp_offset), ctx);
ctx              1731 arch/arm/net/bpf_jit_32.c 			_emit(ARM_COND_HI, ARM_B(jmp_offset), ctx);
ctx              1734 arch/arm/net/bpf_jit_32.c 			_emit(ARM_COND_CS, ARM_B(jmp_offset), ctx);
ctx              1737 arch/arm/net/bpf_jit_32.c 			_emit(ARM_COND_LT, ARM_B(jmp_offset), ctx);
ctx              1740 arch/arm/net/bpf_jit_32.c 			_emit(ARM_COND_GE, ARM_B(jmp_offset), ctx);
ctx              1743 arch/arm/net/bpf_jit_32.c 			_emit(ARM_COND_LS, ARM_B(jmp_offset), ctx);
ctx              1746 arch/arm/net/bpf_jit_32.c 			_emit(ARM_COND_CC, ARM_B(jmp_offset), ctx);
ctx              1749 arch/arm/net/bpf_jit_32.c 			_emit(ARM_COND_LT, ARM_B(jmp_offset), ctx);
ctx              1752 arch/arm/net/bpf_jit_32.c 			_emit(ARM_COND_GE, ARM_B(jmp_offset), ctx);
ctx              1761 arch/arm/net/bpf_jit_32.c 		jmp_offset = bpf2a32_offset(i+off, i, ctx);
ctx              1763 arch/arm/net/bpf_jit_32.c 		emit(ARM_B(jmp_offset), ctx);
ctx              1768 arch/arm/net/bpf_jit_32.c 		if (emit_bpf_tail_call(ctx))
ctx              1782 arch/arm/net/bpf_jit_32.c 		emit_a32_mov_r64(true, r0, r1, ctx);
ctx              1783 arch/arm/net/bpf_jit_32.c 		emit_a32_mov_r64(true, r1, r2, ctx);
ctx              1784 arch/arm/net/bpf_jit_32.c 		emit_push_r64(r5, ctx);
ctx              1785 arch/arm/net/bpf_jit_32.c 		emit_push_r64(r4, ctx);
ctx              1786 arch/arm/net/bpf_jit_32.c 		emit_push_r64(r3, ctx);
ctx              1788 arch/arm/net/bpf_jit_32.c 		emit_a32_mov_i(tmp[1], func, ctx);
ctx              1789 arch/arm/net/bpf_jit_32.c 		emit_blx_r(tmp[1], ctx);
ctx              1791 arch/arm/net/bpf_jit_32.c 		emit(ARM_ADD_I(ARM_SP, ARM_SP, imm8m(24)), ctx); // callee clean
ctx              1799 arch/arm/net/bpf_jit_32.c 		if (i == ctx->prog->len - 1)
ctx              1801 arch/arm/net/bpf_jit_32.c 		jmp_offset = epilogue_offset(ctx);
ctx              1803 arch/arm/net/bpf_jit_32.c 		emit(ARM_B(jmp_offset), ctx);
ctx              1813 arch/arm/net/bpf_jit_32.c 	if (ctx->flags & FLAG_IMM_OVERFLOW)
ctx              1823 arch/arm/net/bpf_jit_32.c static int build_body(struct jit_ctx *ctx)
ctx              1825 arch/arm/net/bpf_jit_32.c 	const struct bpf_prog *prog = ctx->prog;
ctx              1832 arch/arm/net/bpf_jit_32.c 		ret = build_insn(insn, ctx);
ctx              1837 arch/arm/net/bpf_jit_32.c 			if (ctx->target == NULL)
ctx              1838 arch/arm/net/bpf_jit_32.c 				ctx->offsets[i] = ctx->idx;
ctx              1842 arch/arm/net/bpf_jit_32.c 		if (ctx->target == NULL)
ctx              1843 arch/arm/net/bpf_jit_32.c 			ctx->offsets[i] = ctx->idx;
ctx              1852 arch/arm/net/bpf_jit_32.c static int validate_code(struct jit_ctx *ctx)
ctx              1856 arch/arm/net/bpf_jit_32.c 	for (i = 0; i < ctx->idx; i++) {
ctx              1857 arch/arm/net/bpf_jit_32.c 		if (ctx->target[i] == __opcode_to_mem_arm(ARM_INST_UDF))
ctx              1879 arch/arm/net/bpf_jit_32.c 	struct jit_ctx ctx;
ctx              1903 arch/arm/net/bpf_jit_32.c 	memset(&ctx, 0, sizeof(ctx));
ctx              1904 arch/arm/net/bpf_jit_32.c 	ctx.prog = prog;
ctx              1905 arch/arm/net/bpf_jit_32.c 	ctx.cpu_architecture = cpu_architecture();
ctx              1910 arch/arm/net/bpf_jit_32.c 	ctx.offsets = kcalloc(prog->len, sizeof(int), GFP_KERNEL);
ctx              1911 arch/arm/net/bpf_jit_32.c 	if (ctx.offsets == NULL) {
ctx              1926 arch/arm/net/bpf_jit_32.c 	if (build_body(&ctx)) {
ctx              1931 arch/arm/net/bpf_jit_32.c 	tmp_idx = ctx.idx;
ctx              1932 arch/arm/net/bpf_jit_32.c 	build_prologue(&ctx);
ctx              1933 arch/arm/net/bpf_jit_32.c 	ctx.prologue_bytes = (ctx.idx - tmp_idx) * 4;
ctx              1935 arch/arm/net/bpf_jit_32.c 	ctx.epilogue_offset = ctx.idx;
ctx              1938 arch/arm/net/bpf_jit_32.c 	tmp_idx = ctx.idx;
ctx              1939 arch/arm/net/bpf_jit_32.c 	build_epilogue(&ctx);
ctx              1940 arch/arm/net/bpf_jit_32.c 	ctx.epilogue_bytes = (ctx.idx - tmp_idx) * 4;
ctx              1942 arch/arm/net/bpf_jit_32.c 	ctx.idx += ctx.imm_count;
ctx              1943 arch/arm/net/bpf_jit_32.c 	if (ctx.imm_count) {
ctx              1944 arch/arm/net/bpf_jit_32.c 		ctx.imms = kcalloc(ctx.imm_count, sizeof(u32), GFP_KERNEL);
ctx              1945 arch/arm/net/bpf_jit_32.c 		if (ctx.imms == NULL) {
ctx              1952 arch/arm/net/bpf_jit_32.c 	build_epilogue(&ctx);
ctx              1962 arch/arm/net/bpf_jit_32.c 	image_size = sizeof(u32) * ctx.idx;
ctx              1976 arch/arm/net/bpf_jit_32.c 	ctx.target = (u32 *) image_ptr;
ctx              1977 arch/arm/net/bpf_jit_32.c 	ctx.idx = 0;
ctx              1979 arch/arm/net/bpf_jit_32.c 	build_prologue(&ctx);
ctx              1984 arch/arm/net/bpf_jit_32.c 	if (build_body(&ctx) < 0) {
ctx              1990 arch/arm/net/bpf_jit_32.c 	build_epilogue(&ctx);
ctx              1993 arch/arm/net/bpf_jit_32.c 	if (validate_code(&ctx)) {
ctx              1999 arch/arm/net/bpf_jit_32.c 	flush_icache_range((u32)header, (u32)(ctx.target + ctx.idx));
ctx              2003 arch/arm/net/bpf_jit_32.c 		bpf_jit_dump(prog->len, image_size, 2, ctx.target);
ctx              2006 arch/arm/net/bpf_jit_32.c 	prog->bpf_func = (void *)ctx.target;
ctx              2012 arch/arm/net/bpf_jit_32.c 	if (ctx.imm_count)
ctx              2013 arch/arm/net/bpf_jit_32.c 		kfree(ctx.imms);
ctx              2016 arch/arm/net/bpf_jit_32.c 	kfree(ctx.offsets);
ctx                20 arch/arm64/crypto/aes-ce-ccm-glue.c static int num_rounds(struct crypto_aes_ctx *ctx)
ctx                29 arch/arm64/crypto/aes-ce-ccm-glue.c 	return 6 + ctx->key_length / 4;
ctx                49 arch/arm64/crypto/aes-ce-ccm-glue.c 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
ctx                52 arch/arm64/crypto/aes-ce-ccm-glue.c 	ret = ce_aes_expandkey(ctx, in_key, key_len);
ctx               143 arch/arm64/crypto/aes-ce-ccm-glue.c 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
ctx               159 arch/arm64/crypto/aes-ce-ccm-glue.c 	ccm_update_mac(ctx, mac, (u8 *)&ltag, ltag.len, &macp);
ctx               171 arch/arm64/crypto/aes-ce-ccm-glue.c 		ccm_update_mac(ctx, mac, p, n, &macp);
ctx               181 arch/arm64/crypto/aes-ce-ccm-glue.c 			      struct crypto_aes_ctx *ctx, bool enc)
ctx               205 arch/arm64/crypto/aes-ce-ccm-glue.c 			aes_encrypt(ctx, buf, walk->iv);
ctx               206 arch/arm64/crypto/aes-ce-ccm-glue.c 			aes_encrypt(ctx, mac, mac);
ctx               221 arch/arm64/crypto/aes-ce-ccm-glue.c 		aes_encrypt(ctx, buf, iv0);
ctx               222 arch/arm64/crypto/aes-ce-ccm-glue.c 		aes_encrypt(ctx, mac, mac);
ctx               231 arch/arm64/crypto/aes-ce-ccm-glue.c 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
ctx               260 arch/arm64/crypto/aes-ce-ccm-glue.c 					   walk.nbytes - tail, ctx->key_enc,
ctx               261 arch/arm64/crypto/aes-ce-ccm-glue.c 					   num_rounds(ctx), mac, walk.iv);
ctx               268 arch/arm64/crypto/aes-ce-ccm-glue.c 			ce_aes_ccm_final(mac, buf, ctx->key_enc,
ctx               269 arch/arm64/crypto/aes-ce-ccm-glue.c 					 num_rounds(ctx));
ctx               273 arch/arm64/crypto/aes-ce-ccm-glue.c 		err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
ctx               288 arch/arm64/crypto/aes-ce-ccm-glue.c 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
ctx               318 arch/arm64/crypto/aes-ce-ccm-glue.c 					   walk.nbytes - tail, ctx->key_enc,
ctx               319 arch/arm64/crypto/aes-ce-ccm-glue.c 					   num_rounds(ctx), mac, walk.iv);
ctx               326 arch/arm64/crypto/aes-ce-ccm-glue.c 			ce_aes_ccm_final(mac, buf, ctx->key_enc,
ctx               327 arch/arm64/crypto/aes-ce-ccm-glue.c 					 num_rounds(ctx));
ctx               331 arch/arm64/crypto/aes-ce-ccm-glue.c 		err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
ctx                34 arch/arm64/crypto/aes-ce-glue.c static int num_rounds(struct crypto_aes_ctx *ctx)
ctx                43 arch/arm64/crypto/aes-ce-glue.c 	return 6 + ctx->key_length / 4;
ctx                48 arch/arm64/crypto/aes-ce-glue.c 	struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                51 arch/arm64/crypto/aes-ce-glue.c 		aes_encrypt(ctx, dst, src);
ctx                56 arch/arm64/crypto/aes-ce-glue.c 	__aes_ce_encrypt(ctx->key_enc, dst, src, num_rounds(ctx));
ctx                62 arch/arm64/crypto/aes-ce-glue.c 	struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                65 arch/arm64/crypto/aes-ce-glue.c 		aes_decrypt(ctx, dst, src);
ctx                70 arch/arm64/crypto/aes-ce-glue.c 	__aes_ce_decrypt(ctx->key_dec, dst, src, num_rounds(ctx));
ctx                74 arch/arm64/crypto/aes-ce-glue.c int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
ctx                93 arch/arm64/crypto/aes-ce-glue.c 	ctx->key_length = key_len;
ctx                95 arch/arm64/crypto/aes-ce-glue.c 		ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32));
ctx                99 arch/arm64/crypto/aes-ce-glue.c 		u32 *rki = ctx->key_enc + (i * kwords);
ctx               128 arch/arm64/crypto/aes-ce-glue.c 	key_enc = (struct aes_block *)ctx->key_enc;
ctx               129 arch/arm64/crypto/aes-ce-glue.c 	key_dec = (struct aes_block *)ctx->key_dec;
ctx               130 arch/arm64/crypto/aes-ce-glue.c 	j = num_rounds(ctx);
ctx               145 arch/arm64/crypto/aes-ce-glue.c 	struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               148 arch/arm64/crypto/aes-ce-glue.c 	ret = ce_aes_expandkey(ctx, in_key, key_len);
ctx                 5 arch/arm64/crypto/aes-ce-setkey.h int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
ctx                17 arch/arm64/crypto/aes-cipher-glue.c 	struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                18 arch/arm64/crypto/aes-cipher-glue.c 	int rounds = 6 + ctx->key_length / 4;
ctx                20 arch/arm64/crypto/aes-cipher-glue.c 	__aes_arm64_encrypt(ctx->key_enc, out, in, rounds);
ctx                25 arch/arm64/crypto/aes-cipher-glue.c 	struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                26 arch/arm64/crypto/aes-cipher-glue.c 	int rounds = 6 + ctx->key_length / 4;
ctx                28 arch/arm64/crypto/aes-cipher-glue.c 	__aes_arm64_decrypt(ctx->key_dec, out, in, rounds);
ctx               134 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               137 arch/arm64/crypto/aes-glue.c 	ret = aes_expandkey(ctx, in_key, key_len);
ctx               147 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               154 arch/arm64/crypto/aes-glue.c 	ret = aes_expandkey(&ctx->key1, in_key, key_len / 2);
ctx               156 arch/arm64/crypto/aes-glue.c 		ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2],
ctx               169 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               170 arch/arm64/crypto/aes-glue.c 	SHASH_DESC_ON_STACK(desc, ctx->hash);
ctx               174 arch/arm64/crypto/aes-glue.c 	ret = aes_expandkey(&ctx->key1, in_key, key_len);
ctx               178 arch/arm64/crypto/aes-glue.c 	desc->tfm = ctx->hash;
ctx               181 arch/arm64/crypto/aes-glue.c 	ret = aes_expandkey(&ctx->key2, digest, sizeof(digest));
ctx               194 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               195 arch/arm64/crypto/aes-glue.c 	int err, rounds = 6 + ctx->key_length / 4;
ctx               204 arch/arm64/crypto/aes-glue.c 				ctx->key_enc, rounds, blocks);
ctx               214 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               215 arch/arm64/crypto/aes-glue.c 	int err, rounds = 6 + ctx->key_length / 4;
ctx               224 arch/arm64/crypto/aes-glue.c 				ctx->key_dec, rounds, blocks);
ctx               235 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               236 arch/arm64/crypto/aes-glue.c 	int err = 0, rounds = 6 + ctx->key_length / 4;
ctx               242 arch/arm64/crypto/aes-glue.c 				ctx->key_enc, rounds, blocks, walk->iv);
ctx               264 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               265 arch/arm64/crypto/aes-glue.c 	int err = 0, rounds = 6 + ctx->key_length / 4;
ctx               271 arch/arm64/crypto/aes-glue.c 				ctx->key_dec, rounds, blocks, walk->iv);
ctx               292 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               293 arch/arm64/crypto/aes-glue.c 	int err, rounds = 6 + ctx->key_length / 4;
ctx               340 arch/arm64/crypto/aes-glue.c 			    ctx->key_enc, rounds, walk.nbytes, walk.iv);
ctx               349 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               350 arch/arm64/crypto/aes-glue.c 	int err, rounds = 6 + ctx->key_length / 4;
ctx               397 arch/arm64/crypto/aes-glue.c 			    ctx->key_dec, rounds, walk.nbytes, walk.iv);
ctx               405 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               407 arch/arm64/crypto/aes-glue.c 	ctx->hash = crypto_alloc_shash("sha256", 0, 0);
ctx               409 arch/arm64/crypto/aes-glue.c 	return PTR_ERR_OR_ZERO(ctx->hash);
ctx               414 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               416 arch/arm64/crypto/aes-glue.c 	crypto_free_shash(ctx->hash);
ctx               422 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               423 arch/arm64/crypto/aes-glue.c 	int err, rounds = 6 + ctx->key1.key_length / 4;
ctx               433 arch/arm64/crypto/aes-glue.c 				      ctx->key1.key_enc, rounds, blocks,
ctx               434 arch/arm64/crypto/aes-glue.c 				      req->iv, ctx->key2.key_enc);
ctx               444 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_essiv_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               445 arch/arm64/crypto/aes-glue.c 	int err, rounds = 6 + ctx->key1.key_length / 4;
ctx               455 arch/arm64/crypto/aes-glue.c 				      ctx->key1.key_dec, rounds, blocks,
ctx               456 arch/arm64/crypto/aes-glue.c 				      req->iv, ctx->key2.key_enc);
ctx               466 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               467 arch/arm64/crypto/aes-glue.c 	int err, rounds = 6 + ctx->key_length / 4;
ctx               476 arch/arm64/crypto/aes-glue.c 				ctx->key_enc, rounds, blocks, walk.iv);
ctx               492 arch/arm64/crypto/aes-glue.c 		aes_ctr_encrypt(tail, NULL, ctx->key_enc, rounds,
ctx               504 arch/arm64/crypto/aes-glue.c 	const struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               513 arch/arm64/crypto/aes-glue.c 	aes_encrypt(ctx, dst, src);
ctx               528 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               529 arch/arm64/crypto/aes-glue.c 	int err, first, rounds = 6 + ctx->key1.key_length / 4;
ctx               568 arch/arm64/crypto/aes-glue.c 				ctx->key1.key_enc, rounds, nbytes,
ctx               569 arch/arm64/crypto/aes-glue.c 				ctx->key2.key_enc, walk.iv, first);
ctx               590 arch/arm64/crypto/aes-glue.c 			ctx->key1.key_enc, rounds, walk.nbytes,
ctx               591 arch/arm64/crypto/aes-glue.c 			ctx->key2.key_enc, walk.iv, first);
ctx               600 arch/arm64/crypto/aes-glue.c 	struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               601 arch/arm64/crypto/aes-glue.c 	int err, first, rounds = 6 + ctx->key1.key_length / 4;
ctx               640 arch/arm64/crypto/aes-glue.c 				ctx->key1.key_dec, rounds, nbytes,
ctx               641 arch/arm64/crypto/aes-glue.c 				ctx->key2.key_enc, walk.iv, first);
ctx               663 arch/arm64/crypto/aes-glue.c 			ctx->key1.key_dec, rounds, walk.nbytes,
ctx               664 arch/arm64/crypto/aes-glue.c 			ctx->key2.key_enc, walk.iv, first);
ctx               793 arch/arm64/crypto/aes-glue.c 	struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
ctx               796 arch/arm64/crypto/aes-glue.c 	err = aes_expandkey(&ctx->key, in_key, key_len);
ctx               815 arch/arm64/crypto/aes-glue.c 	struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
ctx               816 arch/arm64/crypto/aes-glue.c 	be128 *consts = (be128 *)ctx->consts;
ctx               826 arch/arm64/crypto/aes-glue.c 	aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, ctx->key.key_enc,
ctx               845 arch/arm64/crypto/aes-glue.c 	struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
ctx               855 arch/arm64/crypto/aes-glue.c 	aes_ecb_encrypt(key, ks[0], ctx->key.key_enc, rounds, 1);
ctx               856 arch/arm64/crypto/aes-glue.c 	aes_ecb_encrypt(ctx->consts, ks[1], ctx->key.key_enc, rounds, 2);
ctx               864 arch/arm64/crypto/aes-glue.c 	struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               866 arch/arm64/crypto/aes-glue.c 	memset(ctx->dg, 0, AES_BLOCK_SIZE);
ctx               867 arch/arm64/crypto/aes-glue.c 	ctx->len = 0;
ctx               872 arch/arm64/crypto/aes-glue.c static void mac_do_update(struct crypto_aes_ctx *ctx, u8 const in[], int blocks,
ctx               875 arch/arm64/crypto/aes-glue.c 	int rounds = 6 + ctx->key_length / 4;
ctx               879 arch/arm64/crypto/aes-glue.c 		aes_mac_update(in, ctx->key_enc, rounds, blocks, dg, enc_before,
ctx               884 arch/arm64/crypto/aes-glue.c 			aes_encrypt(ctx, dg, dg);
ctx               891 arch/arm64/crypto/aes-glue.c 				aes_encrypt(ctx, dg, dg);
ctx               899 arch/arm64/crypto/aes-glue.c 	struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               904 arch/arm64/crypto/aes-glue.c 		if ((ctx->len % AES_BLOCK_SIZE) == 0 &&
ctx               905 arch/arm64/crypto/aes-glue.c 		    (ctx->len + len) > AES_BLOCK_SIZE) {
ctx               911 arch/arm64/crypto/aes-glue.c 			mac_do_update(&tctx->key, p, blocks, ctx->dg,
ctx               912 arch/arm64/crypto/aes-glue.c 				      (ctx->len != 0), (len != 0));
ctx               917 arch/arm64/crypto/aes-glue.c 				ctx->len = AES_BLOCK_SIZE;
ctx               920 arch/arm64/crypto/aes-glue.c 			ctx->len = 0;
ctx               923 arch/arm64/crypto/aes-glue.c 		l = min(len, AES_BLOCK_SIZE - ctx->len);
ctx               926 arch/arm64/crypto/aes-glue.c 			crypto_xor(ctx->dg + ctx->len, p, l);
ctx               927 arch/arm64/crypto/aes-glue.c 			ctx->len += l;
ctx               939 arch/arm64/crypto/aes-glue.c 	struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               941 arch/arm64/crypto/aes-glue.c 	mac_do_update(&tctx->key, NULL, 0, ctx->dg, (ctx->len != 0), 0);
ctx               943 arch/arm64/crypto/aes-glue.c 	memcpy(out, ctx->dg, AES_BLOCK_SIZE);
ctx               951 arch/arm64/crypto/aes-glue.c 	struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               954 arch/arm64/crypto/aes-glue.c 	if (ctx->len != AES_BLOCK_SIZE) {
ctx               955 arch/arm64/crypto/aes-glue.c 		ctx->dg[ctx->len] ^= 0x80;
ctx               959 arch/arm64/crypto/aes-glue.c 	mac_do_update(&tctx->key, consts, 1, ctx->dg, 0, 1);
ctx               961 arch/arm64/crypto/aes-glue.c 	memcpy(out, ctx->dg, AES_BLOCK_SIZE);
ctx                80 arch/arm64/crypto/aes-neonbs-glue.c 	struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                88 arch/arm64/crypto/aes-neonbs-glue.c 	ctx->rounds = 6 + key_len / 4;
ctx                91 arch/arm64/crypto/aes-neonbs-glue.c 	aesbs_convert_key(ctx->rk, rk.key_enc, ctx->rounds);
ctx               102 arch/arm64/crypto/aes-neonbs-glue.c 	struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               116 arch/arm64/crypto/aes-neonbs-glue.c 		fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk,
ctx               117 arch/arm64/crypto/aes-neonbs-glue.c 		   ctx->rounds, blocks);
ctx               139 arch/arm64/crypto/aes-neonbs-glue.c 	struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               147 arch/arm64/crypto/aes-neonbs-glue.c 	ctx->key.rounds = 6 + key_len / 4;
ctx               149 arch/arm64/crypto/aes-neonbs-glue.c 	memcpy(ctx->enc, rk.key_enc, sizeof(ctx->enc));
ctx               152 arch/arm64/crypto/aes-neonbs-glue.c 	aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds);
ctx               161 arch/arm64/crypto/aes-neonbs-glue.c 	struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               173 arch/arm64/crypto/aes-neonbs-glue.c 				     ctx->enc, ctx->key.rounds, blocks,
ctx               184 arch/arm64/crypto/aes-neonbs-glue.c 	struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               199 arch/arm64/crypto/aes-neonbs-glue.c 				  ctx->key.rk, ctx->key.rounds, blocks,
ctx               212 arch/arm64/crypto/aes-neonbs-glue.c 	struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               215 arch/arm64/crypto/aes-neonbs-glue.c 	err = aes_expandkey(&ctx->fallback, in_key, key_len);
ctx               219 arch/arm64/crypto/aes-neonbs-glue.c 	ctx->key.rounds = 6 + key_len / 4;
ctx               222 arch/arm64/crypto/aes-neonbs-glue.c 	aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds);
ctx               231 arch/arm64/crypto/aes-neonbs-glue.c 	struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               250 arch/arm64/crypto/aes-neonbs-glue.c 				  ctx->rk, ctx->rounds, blocks, walk.iv, final);
ctx               272 arch/arm64/crypto/aes-neonbs-glue.c 	struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               281 arch/arm64/crypto/aes-neonbs-glue.c 	err = aes_expandkey(&ctx->cts, in_key, key_len);
ctx               289 arch/arm64/crypto/aes-neonbs-glue.c 	memcpy(ctx->twkey, rk.key_enc, sizeof(ctx->twkey));
ctx               296 arch/arm64/crypto/aes-neonbs-glue.c 	struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               305 arch/arm64/crypto/aes-neonbs-glue.c 	aes_encrypt(&ctx->fallback, dst, src);
ctx               322 arch/arm64/crypto/aes-neonbs-glue.c 	struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               371 arch/arm64/crypto/aes-neonbs-glue.c 						     ctx->twkey,
ctx               372 arch/arm64/crypto/aes-neonbs-glue.c 						     ctx->key.rounds, 1);
ctx               375 arch/arm64/crypto/aes-neonbs-glue.c 			fn(out, in, ctx->key.rk, ctx->key.rounds, blocks,
ctx               412 arch/arm64/crypto/aes-neonbs-glue.c 		neon_aes_xts_encrypt(out, in, ctx->cts.key_enc, ctx->key.rounds,
ctx               413 arch/arm64/crypto/aes-neonbs-glue.c 				     nbytes, ctx->twkey, walk.iv, first ?: 2);
ctx               415 arch/arm64/crypto/aes-neonbs-glue.c 		neon_aes_xts_decrypt(out, in, ctx->cts.key_dec, ctx->key.rounds,
ctx               416 arch/arm64/crypto/aes-neonbs-glue.c 				     nbytes, ctx->twkey, walk.iv, first ?: 2);
ctx                63 arch/arm64/crypto/chacha-neon-glue.c 				  const struct chacha_ctx *ctx, const u8 *iv)
ctx                71 arch/arm64/crypto/chacha-neon-glue.c 	crypto_chacha_init(state, ctx, iv);
ctx                81 arch/arm64/crypto/chacha-neon-glue.c 			      nbytes, ctx->nrounds);
ctx                92 arch/arm64/crypto/chacha-neon-glue.c 	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                97 arch/arm64/crypto/chacha-neon-glue.c 	return chacha_neon_stream_xor(req, ctx, req->iv);
ctx               103 arch/arm64/crypto/chacha-neon-glue.c 	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               111 arch/arm64/crypto/chacha-neon-glue.c 	crypto_chacha_init(state, ctx, req->iv);
ctx               114 arch/arm64/crypto/chacha-neon-glue.c 	hchacha_block_neon(state, subctx.key, ctx->nrounds);
ctx               116 arch/arm64/crypto/chacha-neon-glue.c 	subctx.nrounds = ctx->nrounds;
ctx                75 arch/arm64/crypto/ghash-ce-glue.c 	struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
ctx                77 arch/arm64/crypto/ghash-ce-glue.c 	*ctx = (struct ghash_desc_ctx){};
ctx               125 arch/arm64/crypto/ghash-ce-glue.c 	struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               126 arch/arm64/crypto/ghash-ce-glue.c 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
ctx               128 arch/arm64/crypto/ghash-ce-glue.c 	ctx->count += len;
ctx               137 arch/arm64/crypto/ghash-ce-glue.c 			memcpy(ctx->buf + partial, src, p);
ctx               148 arch/arm64/crypto/ghash-ce-glue.c 			ghash_do_update(chunk, ctx->digest, src, key,
ctx               149 arch/arm64/crypto/ghash-ce-glue.c 					partial ? ctx->buf : NULL,
ctx               158 arch/arm64/crypto/ghash-ce-glue.c 		memcpy(ctx->buf + partial, src, len);
ctx               176 arch/arm64/crypto/ghash-ce-glue.c 	struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               177 arch/arm64/crypto/ghash-ce-glue.c 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
ctx               182 arch/arm64/crypto/ghash-ce-glue.c 		memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
ctx               184 arch/arm64/crypto/ghash-ce-glue.c 		ghash_do_update(1, ctx->digest, ctx->buf, key, NULL,
ctx               187 arch/arm64/crypto/ghash-ce-glue.c 	put_unaligned_be64(ctx->digest[1], dst);
ctx               188 arch/arm64/crypto/ghash-ce-glue.c 	put_unaligned_be64(ctx->digest[0], dst + 8);
ctx               190 arch/arm64/crypto/ghash-ce-glue.c 	*ctx = (struct ghash_desc_ctx){};
ctx               196 arch/arm64/crypto/ghash-ce-glue.c 	struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               197 arch/arm64/crypto/ghash-ce-glue.c 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
ctx               202 arch/arm64/crypto/ghash-ce-glue.c 		memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
ctx               204 arch/arm64/crypto/ghash-ce-glue.c 		ghash_do_update(1, ctx->digest, ctx->buf, key, NULL,
ctx               207 arch/arm64/crypto/ghash-ce-glue.c 	put_unaligned_be64(ctx->digest[1], dst);
ctx               208 arch/arm64/crypto/ghash-ce-glue.c 	put_unaligned_be64(ctx->digest[0], dst + 8);
ctx               210 arch/arm64/crypto/ghash-ce-glue.c 	*ctx = (struct ghash_desc_ctx){};
ctx               291 arch/arm64/crypto/ghash-ce-glue.c static int num_rounds(struct crypto_aes_ctx *ctx)
ctx               300 arch/arm64/crypto/ghash-ce-glue.c 	return 6 + ctx->key_length / 4;
ctx               306 arch/arm64/crypto/ghash-ce-glue.c 	struct gcm_aes_ctx *ctx = crypto_aead_ctx(tfm);
ctx               310 arch/arm64/crypto/ghash-ce-glue.c 	ret = aes_expandkey(&ctx->aes_key, inkey, keylen);
ctx               316 arch/arm64/crypto/ghash-ce-glue.c 	aes_encrypt(&ctx->aes_key, key, (u8[AES_BLOCK_SIZE]){});
ctx               318 arch/arm64/crypto/ghash-ce-glue.c 	return __ghash_setkey(&ctx->ghash_key, key, sizeof(be128));
ctx               335 arch/arm64/crypto/ghash-ce-glue.c 			   int *buf_count, struct gcm_aes_ctx *ctx)
ctx               350 arch/arm64/crypto/ghash-ce-glue.c 		ghash_do_update(blocks, dg, src, &ctx->ghash_key,
ctx               368 arch/arm64/crypto/ghash-ce-glue.c 	struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
ctx               386 arch/arm64/crypto/ghash-ce-glue.c 		gcm_update_mac(dg, p, n, buf, &buf_count, ctx);
ctx               396 arch/arm64/crypto/ghash-ce-glue.c 		ghash_do_update(1, dg, buf, &ctx->ghash_key, NULL,
ctx               401 arch/arm64/crypto/ghash-ce-glue.c static void gcm_final(struct aead_request *req, struct gcm_aes_ctx *ctx,
ctx               410 arch/arm64/crypto/ghash-ce-glue.c 	ghash_do_update(1, dg, (void *)&lengths, &ctx->ghash_key, NULL,
ctx               422 arch/arm64/crypto/ghash-ce-glue.c 	struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
ctx               428 arch/arm64/crypto/ghash-ce-glue.c 	int nrounds = num_rounds(&ctx->aes_key);
ctx               443 arch/arm64/crypto/ghash-ce-glue.c 		pmull_gcm_encrypt_block(tag, iv, ctx->aes_key.key_enc, nrounds);
ctx               457 arch/arm64/crypto/ghash-ce-glue.c 					  walk.src.virt.addr, &ctx->ghash_key,
ctx               464 arch/arm64/crypto/ghash-ce-glue.c 			rk = ctx->aes_key.key_enc;
ctx               467 arch/arm64/crypto/ghash-ce-glue.c 		aes_encrypt(&ctx->aes_key, tag, iv);
ctx               478 arch/arm64/crypto/ghash-ce-glue.c 				aes_encrypt(&ctx->aes_key, ks, iv);
ctx               487 arch/arm64/crypto/ghash-ce-glue.c 					walk.dst.virt.addr, &ctx->ghash_key,
ctx               494 arch/arm64/crypto/ghash-ce-glue.c 			aes_encrypt(&ctx->aes_key, ks, iv);
ctx               497 arch/arm64/crypto/ghash-ce-glue.c 				aes_encrypt(&ctx->aes_key, ks + AES_BLOCK_SIZE, iv);
ctx               520 arch/arm64/crypto/ghash-ce-glue.c 		ghash_do_update(!!nbytes, dg, buf, &ctx->ghash_key, head,
ctx               529 arch/arm64/crypto/ghash-ce-glue.c 	gcm_final(req, ctx, dg, tag, req->cryptlen);
ctx               541 arch/arm64/crypto/ghash-ce-glue.c 	struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
ctx               548 arch/arm64/crypto/ghash-ce-glue.c 	int nrounds = num_rounds(&ctx->aes_key);
ctx               563 arch/arm64/crypto/ghash-ce-glue.c 		pmull_gcm_encrypt_block(tag, iv, ctx->aes_key.key_enc, nrounds);
ctx               574 arch/arm64/crypto/ghash-ce-glue.c 					  walk.src.virt.addr, &ctx->ghash_key,
ctx               598 arch/arm64/crypto/ghash-ce-glue.c 			rk = ctx->aes_key.key_enc;
ctx               601 arch/arm64/crypto/ghash-ce-glue.c 		aes_encrypt(&ctx->aes_key, tag, iv);
ctx               610 arch/arm64/crypto/ghash-ce-glue.c 					&ctx->ghash_key, NULL,
ctx               614 arch/arm64/crypto/ghash-ce-glue.c 				aes_encrypt(&ctx->aes_key, buf, iv);
ctx               632 arch/arm64/crypto/ghash-ce-glue.c 				aes_encrypt(&ctx->aes_key, iv2, iv2);
ctx               634 arch/arm64/crypto/ghash-ce-glue.c 			aes_encrypt(&ctx->aes_key, iv, iv);
ctx               652 arch/arm64/crypto/ghash-ce-glue.c 		ghash_do_update(!!nbytes, dg, buf, &ctx->ghash_key, head,
ctx               664 arch/arm64/crypto/ghash-ce-glue.c 	gcm_final(req, ctx, dg, tag, req->cryptlen - authsize);
ctx                22 arch/arm64/crypto/sm4-ce-glue.c 	const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                28 arch/arm64/crypto/sm4-ce-glue.c 		sm4_ce_do_crypt(ctx->rkey_enc, out, in);
ctx                35 arch/arm64/crypto/sm4-ce-glue.c 	const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                41 arch/arm64/crypto/sm4-ce-glue.c 		sm4_ce_do_crypt(ctx->rkey_dec, out, in);
ctx               133 arch/arm64/kernel/probes/uprobes.c bool arch_uretprobe_is_alive(struct return_instance *ret, enum rp_check ctx,
ctx               143 arch/arm64/kernel/probes/uprobes.c 	if (ctx == RP_CHECK_CHAIN_CALL)
ctx               170 arch/arm64/kernel/signal.c static int preserve_fpsimd_context(struct fpsimd_context __user *ctx)
ctx               177 arch/arm64/kernel/signal.c 	err = __copy_to_user(ctx->vregs, fpsimd->vregs, sizeof(fpsimd->vregs));
ctx               178 arch/arm64/kernel/signal.c 	__put_user_error(fpsimd->fpsr, &ctx->fpsr, err);
ctx               179 arch/arm64/kernel/signal.c 	__put_user_error(fpsimd->fpcr, &ctx->fpcr, err);
ctx               182 arch/arm64/kernel/signal.c 	__put_user_error(FPSIMD_MAGIC, &ctx->head.magic, err);
ctx               183 arch/arm64/kernel/signal.c 	__put_user_error(sizeof(struct fpsimd_context), &ctx->head.size, err);
ctx               188 arch/arm64/kernel/signal.c static int restore_fpsimd_context(struct fpsimd_context __user *ctx)
ctx               195 arch/arm64/kernel/signal.c 	__get_user_error(magic, &ctx->head.magic, err);
ctx               196 arch/arm64/kernel/signal.c 	__get_user_error(size, &ctx->head.size, err);
ctx               203 arch/arm64/kernel/signal.c 	err = __copy_from_user(fpsimd.vregs, ctx->vregs,
ctx               205 arch/arm64/kernel/signal.c 	__get_user_error(fpsimd.fpsr, &ctx->fpsr, err);
ctx               206 arch/arm64/kernel/signal.c 	__get_user_error(fpsimd.fpcr, &ctx->fpcr, err);
ctx               225 arch/arm64/kernel/signal.c static int preserve_sve_context(struct sve_context __user *ctx)
ctx               228 arch/arm64/kernel/signal.c 	u16 reserved[ARRAY_SIZE(ctx->__reserved)];
ctx               237 arch/arm64/kernel/signal.c 	__put_user_error(SVE_MAGIC, &ctx->head.magic, err);
ctx               239 arch/arm64/kernel/signal.c 			 &ctx->head.size, err);
ctx               240 arch/arm64/kernel/signal.c 	__put_user_error(vl, &ctx->vl, err);
ctx               241 arch/arm64/kernel/signal.c 	BUILD_BUG_ON(sizeof(ctx->__reserved) != sizeof(reserved));
ctx               242 arch/arm64/kernel/signal.c 	err |= __copy_to_user(&ctx->__reserved, reserved, sizeof(reserved));
ctx               249 arch/arm64/kernel/signal.c 		err |= __copy_to_user((char __user *)ctx + SVE_SIG_REGS_OFFSET,
ctx               318 arch/arm64/kernel/signal.c extern int preserve_sve_context(void __user *ctx);
ctx                34 arch/arm64/kvm/pmu.c 	struct kvm_host_data *ctx = this_cpu_ptr(&kvm_host_data);
ctx                40 arch/arm64/kvm/pmu.c 		ctx->pmu_events.events_host |= set;
ctx                42 arch/arm64/kvm/pmu.c 		ctx->pmu_events.events_guest |= set;
ctx                50 arch/arm64/kvm/pmu.c 	struct kvm_host_data *ctx = this_cpu_ptr(&kvm_host_data);
ctx                52 arch/arm64/kvm/pmu.c 	ctx->pmu_events.events_host &= ~clr;
ctx                53 arch/arm64/kvm/pmu.c 	ctx->pmu_events.events_guest &= ~clr;
ctx                63 arch/arm64/net/bpf_jit_comp.c static inline void emit(const u32 insn, struct jit_ctx *ctx)
ctx                65 arch/arm64/net/bpf_jit_comp.c 	if (ctx->image != NULL)
ctx                66 arch/arm64/net/bpf_jit_comp.c 		ctx->image[ctx->idx] = cpu_to_le32(insn);
ctx                68 arch/arm64/net/bpf_jit_comp.c 	ctx->idx++;
ctx                72 arch/arm64/net/bpf_jit_comp.c 				  const s32 val, struct jit_ctx *ctx)
ctx                79 arch/arm64/net/bpf_jit_comp.c 			emit(A64_MOVN(is64, reg, (u16)~lo, 0), ctx);
ctx                81 arch/arm64/net/bpf_jit_comp.c 			emit(A64_MOVN(is64, reg, (u16)~hi, 16), ctx);
ctx                83 arch/arm64/net/bpf_jit_comp.c 				emit(A64_MOVK(is64, reg, lo, 0), ctx);
ctx                86 arch/arm64/net/bpf_jit_comp.c 		emit(A64_MOVZ(is64, reg, lo, 0), ctx);
ctx                88 arch/arm64/net/bpf_jit_comp.c 			emit(A64_MOVK(is64, reg, hi, 16), ctx);
ctx               101 arch/arm64/net/bpf_jit_comp.c 				    struct jit_ctx *ctx)
ctx               108 arch/arm64/net/bpf_jit_comp.c 		return emit_a64_mov_i(0, reg, (u32)val, ctx);
ctx               114 arch/arm64/net/bpf_jit_comp.c 		emit(A64_MOVN(1, reg, (rev_tmp >> shift) & 0xffff, shift), ctx);
ctx               116 arch/arm64/net/bpf_jit_comp.c 		emit(A64_MOVZ(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx);
ctx               120 arch/arm64/net/bpf_jit_comp.c 			emit(A64_MOVK(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx);
ctx               131 arch/arm64/net/bpf_jit_comp.c 				     struct jit_ctx *ctx)
ctx               136 arch/arm64/net/bpf_jit_comp.c 	emit(A64_MOVN(1, reg, ~tmp & 0xffff, shift), ctx);
ctx               140 arch/arm64/net/bpf_jit_comp.c 		emit(A64_MOVK(1, reg, tmp & 0xffff, shift), ctx);
ctx               145 arch/arm64/net/bpf_jit_comp.c 				 const struct jit_ctx *ctx)
ctx               147 arch/arm64/net/bpf_jit_comp.c 	int to = ctx->offset[bpf_to];
ctx               149 arch/arm64/net/bpf_jit_comp.c 	int from = ctx->offset[bpf_from] - 1;
ctx               162 arch/arm64/net/bpf_jit_comp.c static inline int epilogue_offset(const struct jit_ctx *ctx)
ctx               164 arch/arm64/net/bpf_jit_comp.c 	int to = ctx->epilogue_offset;
ctx               165 arch/arm64/net/bpf_jit_comp.c 	int from = ctx->idx;
ctx               176 arch/arm64/net/bpf_jit_comp.c static int build_prologue(struct jit_ctx *ctx, bool ebpf_from_cbpf)
ctx               178 arch/arm64/net/bpf_jit_comp.c 	const struct bpf_prog *prog = ctx->prog;
ctx               185 arch/arm64/net/bpf_jit_comp.c 	const int idx0 = ctx->idx;
ctx               212 arch/arm64/net/bpf_jit_comp.c 	emit(A64_PUSH(A64_FP, A64_LR, A64_SP), ctx);
ctx               213 arch/arm64/net/bpf_jit_comp.c 	emit(A64_MOV(1, A64_FP, A64_SP), ctx);
ctx               216 arch/arm64/net/bpf_jit_comp.c 	emit(A64_PUSH(r6, r7, A64_SP), ctx);
ctx               217 arch/arm64/net/bpf_jit_comp.c 	emit(A64_PUSH(r8, r9, A64_SP), ctx);
ctx               218 arch/arm64/net/bpf_jit_comp.c 	emit(A64_PUSH(fp, tcc, A64_SP), ctx);
ctx               221 arch/arm64/net/bpf_jit_comp.c 	emit(A64_MOV(1, fp, A64_SP), ctx);
ctx               225 arch/arm64/net/bpf_jit_comp.c 		emit(A64_MOVZ(1, tcc, 0, 0), ctx);
ctx               227 arch/arm64/net/bpf_jit_comp.c 		cur_offset = ctx->idx - idx0;
ctx               235 arch/arm64/net/bpf_jit_comp.c 	ctx->stack_size = STACK_ALIGN(prog->aux->stack_depth);
ctx               238 arch/arm64/net/bpf_jit_comp.c 	emit(A64_SUB_I(1, A64_SP, A64_SP, ctx->stack_size), ctx);
ctx               243 arch/arm64/net/bpf_jit_comp.c static int emit_bpf_tail_call(struct jit_ctx *ctx)
ctx               252 arch/arm64/net/bpf_jit_comp.c 	const int idx0 = ctx->idx;
ctx               253 arch/arm64/net/bpf_jit_comp.c #define cur_offset (ctx->idx - idx0)
ctx               261 arch/arm64/net/bpf_jit_comp.c 	emit_a64_mov_i64(tmp, off, ctx);
ctx               262 arch/arm64/net/bpf_jit_comp.c 	emit(A64_LDR32(tmp, r2, tmp), ctx);
ctx               263 arch/arm64/net/bpf_jit_comp.c 	emit(A64_MOV(0, r3, r3), ctx);
ctx               264 arch/arm64/net/bpf_jit_comp.c 	emit(A64_CMP(0, r3, tmp), ctx);
ctx               265 arch/arm64/net/bpf_jit_comp.c 	emit(A64_B_(A64_COND_CS, jmp_offset), ctx);
ctx               271 arch/arm64/net/bpf_jit_comp.c 	emit_a64_mov_i64(tmp, MAX_TAIL_CALL_CNT, ctx);
ctx               272 arch/arm64/net/bpf_jit_comp.c 	emit(A64_CMP(1, tcc, tmp), ctx);
ctx               273 arch/arm64/net/bpf_jit_comp.c 	emit(A64_B_(A64_COND_HI, jmp_offset), ctx);
ctx               274 arch/arm64/net/bpf_jit_comp.c 	emit(A64_ADD_I(1, tcc, tcc, 1), ctx);
ctx               281 arch/arm64/net/bpf_jit_comp.c 	emit_a64_mov_i64(tmp, off, ctx);
ctx               282 arch/arm64/net/bpf_jit_comp.c 	emit(A64_ADD(1, tmp, r2, tmp), ctx);
ctx               283 arch/arm64/net/bpf_jit_comp.c 	emit(A64_LSL(1, prg, r3, 3), ctx);
ctx               284 arch/arm64/net/bpf_jit_comp.c 	emit(A64_LDR64(prg, tmp, prg), ctx);
ctx               285 arch/arm64/net/bpf_jit_comp.c 	emit(A64_CBZ(1, prg, jmp_offset), ctx);
ctx               289 arch/arm64/net/bpf_jit_comp.c 	emit_a64_mov_i64(tmp, off, ctx);
ctx               290 arch/arm64/net/bpf_jit_comp.c 	emit(A64_LDR64(tmp, prg, tmp), ctx);
ctx               291 arch/arm64/net/bpf_jit_comp.c 	emit(A64_ADD_I(1, tmp, tmp, sizeof(u32) * PROLOGUE_OFFSET), ctx);
ctx               292 arch/arm64/net/bpf_jit_comp.c 	emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx);
ctx               293 arch/arm64/net/bpf_jit_comp.c 	emit(A64_BR(tmp), ctx);
ctx               308 arch/arm64/net/bpf_jit_comp.c static void build_epilogue(struct jit_ctx *ctx)
ctx               318 arch/arm64/net/bpf_jit_comp.c 	emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx);
ctx               321 arch/arm64/net/bpf_jit_comp.c 	emit(A64_POP(fp, A64_R(26), A64_SP), ctx);
ctx               324 arch/arm64/net/bpf_jit_comp.c 	emit(A64_POP(r8, r9, A64_SP), ctx);
ctx               325 arch/arm64/net/bpf_jit_comp.c 	emit(A64_POP(r6, r7, A64_SP), ctx);
ctx               328 arch/arm64/net/bpf_jit_comp.c 	emit(A64_POP(A64_FP, A64_LR, A64_SP), ctx);
ctx               331 arch/arm64/net/bpf_jit_comp.c 	emit(A64_MOV(1, A64_R(0), r0), ctx);
ctx               333 arch/arm64/net/bpf_jit_comp.c 	emit(A64_RET(A64_LR), ctx);
ctx               342 arch/arm64/net/bpf_jit_comp.c static int build_insn(const struct bpf_insn *insn, struct jit_ctx *ctx,
ctx               353 arch/arm64/net/bpf_jit_comp.c 	const int i = insn - ctx->prog->insnsi;
ctx               375 arch/arm64/net/bpf_jit_comp.c 		emit(A64_MOV(is64, dst, src), ctx);
ctx               380 arch/arm64/net/bpf_jit_comp.c 		emit(A64_ADD(is64, dst, dst, src), ctx);
ctx               384 arch/arm64/net/bpf_jit_comp.c 		emit(A64_SUB(is64, dst, dst, src), ctx);
ctx               388 arch/arm64/net/bpf_jit_comp.c 		emit(A64_AND(is64, dst, dst, src), ctx);
ctx               392 arch/arm64/net/bpf_jit_comp.c 		emit(A64_ORR(is64, dst, dst, src), ctx);
ctx               396 arch/arm64/net/bpf_jit_comp.c 		emit(A64_EOR(is64, dst, dst, src), ctx);
ctx               400 arch/arm64/net/bpf_jit_comp.c 		emit(A64_MUL(is64, dst, dst, src), ctx);
ctx               408 arch/arm64/net/bpf_jit_comp.c 			emit(A64_UDIV(is64, dst, dst, src), ctx);
ctx               411 arch/arm64/net/bpf_jit_comp.c 			emit(A64_UDIV(is64, tmp, dst, src), ctx);
ctx               412 arch/arm64/net/bpf_jit_comp.c 			emit(A64_MSUB(is64, dst, dst, tmp, src), ctx);
ctx               418 arch/arm64/net/bpf_jit_comp.c 		emit(A64_LSLV(is64, dst, dst, src), ctx);
ctx               422 arch/arm64/net/bpf_jit_comp.c 		emit(A64_LSRV(is64, dst, dst, src), ctx);
ctx               426 arch/arm64/net/bpf_jit_comp.c 		emit(A64_ASRV(is64, dst, dst, src), ctx);
ctx               431 arch/arm64/net/bpf_jit_comp.c 		emit(A64_NEG(is64, dst, dst), ctx);
ctx               445 arch/arm64/net/bpf_jit_comp.c 			emit(A64_REV16(is64, dst, dst), ctx);
ctx               447 arch/arm64/net/bpf_jit_comp.c 			emit(A64_UXTH(is64, dst, dst), ctx);
ctx               450 arch/arm64/net/bpf_jit_comp.c 			emit(A64_REV32(is64, dst, dst), ctx);
ctx               454 arch/arm64/net/bpf_jit_comp.c 			emit(A64_REV64(dst, dst), ctx);
ctx               462 arch/arm64/net/bpf_jit_comp.c 			emit(A64_UXTH(is64, dst, dst), ctx);
ctx               466 arch/arm64/net/bpf_jit_comp.c 			emit(A64_UXTW(is64, dst, dst), ctx);
ctx               476 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(is64, dst, imm, ctx);
ctx               481 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(is64, tmp, imm, ctx);
ctx               482 arch/arm64/net/bpf_jit_comp.c 		emit(A64_ADD(is64, dst, dst, tmp), ctx);
ctx               486 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(is64, tmp, imm, ctx);
ctx               487 arch/arm64/net/bpf_jit_comp.c 		emit(A64_SUB(is64, dst, dst, tmp), ctx);
ctx               491 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(is64, tmp, imm, ctx);
ctx               492 arch/arm64/net/bpf_jit_comp.c 		emit(A64_AND(is64, dst, dst, tmp), ctx);
ctx               496 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(is64, tmp, imm, ctx);
ctx               497 arch/arm64/net/bpf_jit_comp.c 		emit(A64_ORR(is64, dst, dst, tmp), ctx);
ctx               501 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(is64, tmp, imm, ctx);
ctx               502 arch/arm64/net/bpf_jit_comp.c 		emit(A64_EOR(is64, dst, dst, tmp), ctx);
ctx               506 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(is64, tmp, imm, ctx);
ctx               507 arch/arm64/net/bpf_jit_comp.c 		emit(A64_MUL(is64, dst, dst, tmp), ctx);
ctx               511 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(is64, tmp, imm, ctx);
ctx               512 arch/arm64/net/bpf_jit_comp.c 		emit(A64_UDIV(is64, dst, dst, tmp), ctx);
ctx               516 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(is64, tmp2, imm, ctx);
ctx               517 arch/arm64/net/bpf_jit_comp.c 		emit(A64_UDIV(is64, tmp, dst, tmp2), ctx);
ctx               518 arch/arm64/net/bpf_jit_comp.c 		emit(A64_MSUB(is64, dst, dst, tmp, tmp2), ctx);
ctx               522 arch/arm64/net/bpf_jit_comp.c 		emit(A64_LSL(is64, dst, dst, imm), ctx);
ctx               526 arch/arm64/net/bpf_jit_comp.c 		emit(A64_LSR(is64, dst, dst, imm), ctx);
ctx               530 arch/arm64/net/bpf_jit_comp.c 		emit(A64_ASR(is64, dst, dst, imm), ctx);
ctx               535 arch/arm64/net/bpf_jit_comp.c 		jmp_offset = bpf2a64_offset(i + off, i, ctx);
ctx               537 arch/arm64/net/bpf_jit_comp.c 		emit(A64_B(jmp_offset), ctx);
ctx               560 arch/arm64/net/bpf_jit_comp.c 		emit(A64_CMP(is64, dst, src), ctx);
ctx               562 arch/arm64/net/bpf_jit_comp.c 		jmp_offset = bpf2a64_offset(i + off, i, ctx);
ctx               599 arch/arm64/net/bpf_jit_comp.c 		emit(A64_B_(jmp_cond, jmp_offset), ctx);
ctx               603 arch/arm64/net/bpf_jit_comp.c 		emit(A64_TST(is64, dst, src), ctx);
ctx               626 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(is64, tmp, imm, ctx);
ctx               627 arch/arm64/net/bpf_jit_comp.c 		emit(A64_CMP(is64, dst, tmp), ctx);
ctx               631 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(is64, tmp, imm, ctx);
ctx               632 arch/arm64/net/bpf_jit_comp.c 		emit(A64_TST(is64, dst, tmp), ctx);
ctx               642 arch/arm64/net/bpf_jit_comp.c 		ret = bpf_jit_get_func_addr(ctx->prog, insn, extra_pass,
ctx               646 arch/arm64/net/bpf_jit_comp.c 		emit_addr_mov_i64(tmp, func_addr, ctx);
ctx               647 arch/arm64/net/bpf_jit_comp.c 		emit(A64_BLR(tmp), ctx);
ctx               648 arch/arm64/net/bpf_jit_comp.c 		emit(A64_MOV(1, r0, A64_R(0)), ctx);
ctx               653 arch/arm64/net/bpf_jit_comp.c 		if (emit_bpf_tail_call(ctx))
ctx               660 arch/arm64/net/bpf_jit_comp.c 		if (i == ctx->prog->len - 1)
ctx               662 arch/arm64/net/bpf_jit_comp.c 		jmp_offset = epilogue_offset(ctx);
ctx               664 arch/arm64/net/bpf_jit_comp.c 		emit(A64_B(jmp_offset), ctx);
ctx               674 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i64(dst, imm64, ctx);
ctx               684 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(1, tmp, off, ctx);
ctx               687 arch/arm64/net/bpf_jit_comp.c 			emit(A64_LDR32(dst, src, tmp), ctx);
ctx               690 arch/arm64/net/bpf_jit_comp.c 			emit(A64_LDRH(dst, src, tmp), ctx);
ctx               693 arch/arm64/net/bpf_jit_comp.c 			emit(A64_LDRB(dst, src, tmp), ctx);
ctx               696 arch/arm64/net/bpf_jit_comp.c 			emit(A64_LDR64(dst, src, tmp), ctx);
ctx               707 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(1, tmp2, off, ctx);
ctx               708 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(1, tmp, imm, ctx);
ctx               711 arch/arm64/net/bpf_jit_comp.c 			emit(A64_STR32(tmp, dst, tmp2), ctx);
ctx               714 arch/arm64/net/bpf_jit_comp.c 			emit(A64_STRH(tmp, dst, tmp2), ctx);
ctx               717 arch/arm64/net/bpf_jit_comp.c 			emit(A64_STRB(tmp, dst, tmp2), ctx);
ctx               720 arch/arm64/net/bpf_jit_comp.c 			emit(A64_STR64(tmp, dst, tmp2), ctx);
ctx               730 arch/arm64/net/bpf_jit_comp.c 		emit_a64_mov_i(1, tmp, off, ctx);
ctx               733 arch/arm64/net/bpf_jit_comp.c 			emit(A64_STR32(src, dst, tmp), ctx);
ctx               736 arch/arm64/net/bpf_jit_comp.c 			emit(A64_STRH(src, dst, tmp), ctx);
ctx               739 arch/arm64/net/bpf_jit_comp.c 			emit(A64_STRB(src, dst, tmp), ctx);
ctx               742 arch/arm64/net/bpf_jit_comp.c 			emit(A64_STR64(src, dst, tmp), ctx);
ctx               754 arch/arm64/net/bpf_jit_comp.c 			emit_a64_mov_i(1, tmp, off, ctx);
ctx               755 arch/arm64/net/bpf_jit_comp.c 			emit(A64_ADD(1, tmp, tmp, dst), ctx);
ctx               759 arch/arm64/net/bpf_jit_comp.c 			emit(A64_STADD(isdw, reg, src), ctx);
ctx               761 arch/arm64/net/bpf_jit_comp.c 			emit(A64_LDXR(isdw, tmp2, reg), ctx);
ctx               762 arch/arm64/net/bpf_jit_comp.c 			emit(A64_ADD(isdw, tmp2, tmp2, src), ctx);
ctx               763 arch/arm64/net/bpf_jit_comp.c 			emit(A64_STXR(isdw, tmp2, reg, tmp3), ctx);
ctx               766 arch/arm64/net/bpf_jit_comp.c 			emit(A64_CBNZ(0, tmp3, jmp_offset), ctx);
ctx               778 arch/arm64/net/bpf_jit_comp.c static int build_body(struct jit_ctx *ctx, bool extra_pass)
ctx               780 arch/arm64/net/bpf_jit_comp.c 	const struct bpf_prog *prog = ctx->prog;
ctx               787 arch/arm64/net/bpf_jit_comp.c 		ret = build_insn(insn, ctx, extra_pass);
ctx               790 arch/arm64/net/bpf_jit_comp.c 			if (ctx->image == NULL)
ctx               791 arch/arm64/net/bpf_jit_comp.c 				ctx->offset[i] = ctx->idx;
ctx               794 arch/arm64/net/bpf_jit_comp.c 		if (ctx->image == NULL)
ctx               795 arch/arm64/net/bpf_jit_comp.c 			ctx->offset[i] = ctx->idx;
ctx               803 arch/arm64/net/bpf_jit_comp.c static int validate_code(struct jit_ctx *ctx)
ctx               807 arch/arm64/net/bpf_jit_comp.c 	for (i = 0; i < ctx->idx; i++) {
ctx               808 arch/arm64/net/bpf_jit_comp.c 		u32 a64_insn = le32_to_cpu(ctx->image[i]);
ctx               825 arch/arm64/net/bpf_jit_comp.c 	struct jit_ctx ctx;
ctx               836 arch/arm64/net/bpf_jit_comp.c 	struct jit_ctx ctx;
ctx               863 arch/arm64/net/bpf_jit_comp.c 	if (jit_data->ctx.offset) {
ctx               864 arch/arm64/net/bpf_jit_comp.c 		ctx = jit_data->ctx;
ctx               868 arch/arm64/net/bpf_jit_comp.c 		image_size = sizeof(u32) * ctx.idx;
ctx               871 arch/arm64/net/bpf_jit_comp.c 	memset(&ctx, 0, sizeof(ctx));
ctx               872 arch/arm64/net/bpf_jit_comp.c 	ctx.prog = prog;
ctx               874 arch/arm64/net/bpf_jit_comp.c 	ctx.offset = kcalloc(prog->len, sizeof(int), GFP_KERNEL);
ctx               875 arch/arm64/net/bpf_jit_comp.c 	if (ctx.offset == NULL) {
ctx               883 arch/arm64/net/bpf_jit_comp.c 	if (build_body(&ctx, extra_pass)) {
ctx               888 arch/arm64/net/bpf_jit_comp.c 	if (build_prologue(&ctx, was_classic)) {
ctx               893 arch/arm64/net/bpf_jit_comp.c 	ctx.epilogue_offset = ctx.idx;
ctx               894 arch/arm64/net/bpf_jit_comp.c 	build_epilogue(&ctx);
ctx               897 arch/arm64/net/bpf_jit_comp.c 	image_size = sizeof(u32) * ctx.idx;
ctx               907 arch/arm64/net/bpf_jit_comp.c 	ctx.image = (__le32 *)image_ptr;
ctx               909 arch/arm64/net/bpf_jit_comp.c 	ctx.idx = 0;
ctx               911 arch/arm64/net/bpf_jit_comp.c 	build_prologue(&ctx, was_classic);
ctx               913 arch/arm64/net/bpf_jit_comp.c 	if (build_body(&ctx, extra_pass)) {
ctx               919 arch/arm64/net/bpf_jit_comp.c 	build_epilogue(&ctx);
ctx               922 arch/arm64/net/bpf_jit_comp.c 	if (validate_code(&ctx)) {
ctx               930 arch/arm64/net/bpf_jit_comp.c 		bpf_jit_dump(prog->len, image_size, 2, ctx.image);
ctx               932 arch/arm64/net/bpf_jit_comp.c 	bpf_flush_icache(header, ctx.image + ctx.idx);
ctx               935 arch/arm64/net/bpf_jit_comp.c 		if (extra_pass && ctx.idx != jit_data->ctx.idx) {
ctx               937 arch/arm64/net/bpf_jit_comp.c 				    ctx.idx, jit_data->ctx.idx);
ctx               945 arch/arm64/net/bpf_jit_comp.c 		jit_data->ctx = ctx;
ctx               949 arch/arm64/net/bpf_jit_comp.c 	prog->bpf_func = (void *)ctx.image;
ctx               954 arch/arm64/net/bpf_jit_comp.c 		bpf_prog_fill_jited_linfo(prog, ctx.offset);
ctx               956 arch/arm64/net/bpf_jit_comp.c 		kfree(ctx.offset);
ctx                22 arch/ia64/include/asm/mmu_context.h #define ia64_rid(ctx,addr)	(((ctx) << 3) | (addr >> 61))
ctx               103 arch/ia64/kernel/perfmon.c #define PMC_OVFL_NOTIFY(ctx, i)	((ctx)->ctx_pmds[i].flags &  PFM_REGFL_OVFL_NOTIFY)
ctx               130 arch/ia64/kernel/perfmon.c #define CTX_USED_PMD(ctx, mask) (ctx)->ctx_used_pmds[0] |= (mask)
ctx               131 arch/ia64/kernel/perfmon.c #define CTX_IS_USED_PMD(ctx, c) (((ctx)->ctx_used_pmds[0] & (1UL << (c))) != 0UL)
ctx               133 arch/ia64/kernel/perfmon.c #define CTX_USED_MONITOR(ctx, mask) (ctx)->ctx_used_monitors[0] |= (mask)
ctx               135 arch/ia64/kernel/perfmon.c #define CTX_USED_IBR(ctx,n) 	(ctx)->ctx_used_ibrs[(n)>>6] |= 1UL<< ((n) % 64)
ctx               136 arch/ia64/kernel/perfmon.c #define CTX_USED_DBR(ctx,n) 	(ctx)->ctx_used_dbrs[(n)>>6] |= 1UL<< ((n) % 64)
ctx               137 arch/ia64/kernel/perfmon.c #define CTX_USES_DBREGS(ctx)	(((pfm_context_t *)(ctx))->ctx_fl_using_dbreg==1)
ctx               352 arch/ia64/kernel/perfmon.c #define SET_LAST_CPU(ctx, v)	(ctx)->ctx_last_cpu = (v)
ctx               353 arch/ia64/kernel/perfmon.c #define GET_LAST_CPU(ctx)	(ctx)->ctx_last_cpu
ctx               355 arch/ia64/kernel/perfmon.c #define SET_LAST_CPU(ctx, v)	do {} while(0)
ctx               356 arch/ia64/kernel/perfmon.c #define GET_LAST_CPU(ctx)	do {} while(0)
ctx               392 arch/ia64/kernel/perfmon.c typedef int (*pfm_reg_check_t)(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs);
ctx               473 arch/ia64/kernel/perfmon.c 	int		(*cmd_func)(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs);
ctx               577 arch/ia64/kernel/perfmon.c static int pfm_context_unload(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs);
ctx               606 arch/ia64/kernel/perfmon.c 	struct pseudo_fs_context *ctx = init_pseudo(fc, PFMFS_MAGIC);
ctx               607 arch/ia64/kernel/perfmon.c 	if (!ctx)
ctx               609 arch/ia64/kernel/perfmon.c 	ctx->dops = &pfmfs_dentry_operations;
ctx               638 arch/ia64/kernel/perfmon.c static int pfm_write_ibr_dbr(int mode, pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs);
ctx               654 arch/ia64/kernel/perfmon.c static int pfm_end_notify_user(pfm_context_t *ctx);
ctx               742 arch/ia64/kernel/perfmon.c pfm_read_soft_counter(pfm_context_t *ctx, int i)
ctx               744 arch/ia64/kernel/perfmon.c 	return ctx->ctx_pmds[i].val + (ia64_get_pmd(i) & pmu_conf->ovfl_val);
ctx               751 arch/ia64/kernel/perfmon.c pfm_write_soft_counter(pfm_context_t *ctx, int i, unsigned long val)
ctx               755 arch/ia64/kernel/perfmon.c 	ctx->ctx_pmds[i].val = val  & ~ovfl_val;
ctx               764 arch/ia64/kernel/perfmon.c pfm_get_new_msg(pfm_context_t *ctx)
ctx               768 arch/ia64/kernel/perfmon.c 	next = (ctx->ctx_msgq_tail+1) % PFM_MAX_MSGS;
ctx               770 arch/ia64/kernel/perfmon.c 	DPRINT(("ctx_fd=%p head=%d tail=%d\n", ctx, ctx->ctx_msgq_head, ctx->ctx_msgq_tail));
ctx               771 arch/ia64/kernel/perfmon.c 	if (next == ctx->ctx_msgq_head) return NULL;
ctx               773 arch/ia64/kernel/perfmon.c  	idx = 	ctx->ctx_msgq_tail;
ctx               774 arch/ia64/kernel/perfmon.c 	ctx->ctx_msgq_tail = next;
ctx               776 arch/ia64/kernel/perfmon.c 	DPRINT(("ctx=%p head=%d tail=%d msg=%d\n", ctx, ctx->ctx_msgq_head, ctx->ctx_msgq_tail, idx));
ctx               778 arch/ia64/kernel/perfmon.c 	return ctx->ctx_msgq+idx;
ctx               782 arch/ia64/kernel/perfmon.c pfm_get_next_msg(pfm_context_t *ctx)
ctx               786 arch/ia64/kernel/perfmon.c 	DPRINT(("ctx=%p head=%d tail=%d\n", ctx, ctx->ctx_msgq_head, ctx->ctx_msgq_tail));
ctx               788 arch/ia64/kernel/perfmon.c 	if (PFM_CTXQ_EMPTY(ctx)) return NULL;
ctx               793 arch/ia64/kernel/perfmon.c 	msg = ctx->ctx_msgq+ctx->ctx_msgq_head;
ctx               798 arch/ia64/kernel/perfmon.c 	ctx->ctx_msgq_head = (ctx->ctx_msgq_head+1) % PFM_MAX_MSGS;
ctx               800 arch/ia64/kernel/perfmon.c 	DPRINT(("ctx=%p head=%d tail=%d type=%d\n", ctx, ctx->ctx_msgq_head, ctx->ctx_msgq_tail, msg->pfm_gen_msg.msg_type));
ctx               806 arch/ia64/kernel/perfmon.c pfm_reset_msgq(pfm_context_t *ctx)
ctx               808 arch/ia64/kernel/perfmon.c 	ctx->ctx_msgq_head = ctx->ctx_msgq_tail = 0;
ctx               809 arch/ia64/kernel/perfmon.c 	DPRINT(("ctx=%p msgq reset\n", ctx));
ctx               815 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx               821 arch/ia64/kernel/perfmon.c 	ctx = kzalloc(sizeof(pfm_context_t), GFP_KERNEL);
ctx               822 arch/ia64/kernel/perfmon.c 	if (ctx) {
ctx               823 arch/ia64/kernel/perfmon.c 		DPRINT(("alloc ctx @%p\n", ctx));
ctx               828 arch/ia64/kernel/perfmon.c 		spin_lock_init(&ctx->ctx_lock);
ctx               833 arch/ia64/kernel/perfmon.c 		ctx->ctx_state = PFM_CTX_UNLOADED;
ctx               838 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_block       = (ctx_flags & PFM_FL_NOTIFY_BLOCK) ? 1 : 0;
ctx               839 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_system      = (ctx_flags & PFM_FL_SYSTEM_WIDE) ? 1: 0;
ctx               840 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_no_msg      = (ctx_flags & PFM_FL_OVFL_NO_MSG) ? 1: 0;
ctx               849 arch/ia64/kernel/perfmon.c 		init_completion(&ctx->ctx_restart_done);
ctx               854 arch/ia64/kernel/perfmon.c 		ctx->ctx_last_activation = PFM_INVALID_ACTIVATION;
ctx               855 arch/ia64/kernel/perfmon.c 		SET_LAST_CPU(ctx, -1);
ctx               860 arch/ia64/kernel/perfmon.c 		ctx->ctx_msgq_head = ctx->ctx_msgq_tail = 0;
ctx               861 arch/ia64/kernel/perfmon.c 		init_waitqueue_head(&ctx->ctx_msgq_wait);
ctx               862 arch/ia64/kernel/perfmon.c 		init_waitqueue_head(&ctx->ctx_zombieq);
ctx               865 arch/ia64/kernel/perfmon.c 	return ctx;
ctx               869 arch/ia64/kernel/perfmon.c pfm_context_free(pfm_context_t *ctx)
ctx               871 arch/ia64/kernel/perfmon.c 	if (ctx) {
ctx               872 arch/ia64/kernel/perfmon.c 		DPRINT(("free ctx @%p\n", ctx));
ctx               873 arch/ia64/kernel/perfmon.c 		kfree(ctx);
ctx               880 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx = PFM_GET_CTX(task);
ctx               906 arch/ia64/kernel/perfmon.c 	mask = ctx->ctx_used_pmds[0];
ctx               916 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[i].val += (val & ovfl_mask);
ctx               918 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[i].val = val;
ctx               922 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[i].val,
ctx               933 arch/ia64/kernel/perfmon.c 	mask = ctx->ctx_used_monitors[0] >> PMU_FIRST_COUNTER;
ctx               936 arch/ia64/kernel/perfmon.c 		ia64_set_pmc(i, ctx->th_pmcs[i] & ~0xfUL);
ctx               937 arch/ia64/kernel/perfmon.c 		ctx->th_pmcs[i] &= ~0xfUL;
ctx               938 arch/ia64/kernel/perfmon.c 		DPRINT_ovfl(("pmc[%d]=0x%lx\n", i, ctx->th_pmcs[i]));
ctx               954 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx = PFM_GET_CTX(task);
ctx               959 arch/ia64/kernel/perfmon.c 	is_system = ctx->ctx_fl_system;
ctx               966 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_state != PFM_CTX_MASKED) {
ctx               968 arch/ia64/kernel/perfmon.c 			task_pid_nr(task), task_pid_nr(current), ctx->ctx_state);
ctx               992 arch/ia64/kernel/perfmon.c 	mask = ctx->ctx_used_pmds[0];
ctx              1002 arch/ia64/kernel/perfmon.c 			val = ctx->ctx_pmds[i].val & ovfl_mask;
ctx              1003 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[i].val &= ~ovfl_mask;
ctx              1005 arch/ia64/kernel/perfmon.c 			val = ctx->ctx_pmds[i].val;
ctx              1011 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[i].val,
ctx              1017 arch/ia64/kernel/perfmon.c 	mask = ctx->ctx_used_monitors[0] >> PMU_FIRST_COUNTER;
ctx              1020 arch/ia64/kernel/perfmon.c 		ctx->th_pmcs[i] = ctx->ctx_pmcs[i];
ctx              1021 arch/ia64/kernel/perfmon.c 		ia64_set_pmc(i, ctx->th_pmcs[i]);
ctx              1023 arch/ia64/kernel/perfmon.c 					task_pid_nr(task), i, ctx->th_pmcs[i]));
ctx              1031 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_fl_using_dbreg) {
ctx              1032 arch/ia64/kernel/perfmon.c 		pfm_restore_ibrs(ctx->ctx_ibrs, pmu_conf->num_ibrs);
ctx              1033 arch/ia64/kernel/perfmon.c 		pfm_restore_dbrs(ctx->ctx_dbrs, pmu_conf->num_dbrs);
ctx              1080 arch/ia64/kernel/perfmon.c pfm_copy_pmds(struct task_struct *task, pfm_context_t *ctx)
ctx              1083 arch/ia64/kernel/perfmon.c 	unsigned long mask = ctx->ctx_all_pmds[0];
ctx              1091 arch/ia64/kernel/perfmon.c 		val = ctx->ctx_pmds[i].val;
ctx              1100 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[i].val = val & ~ovfl_val;
ctx              1103 arch/ia64/kernel/perfmon.c 		ctx->th_pmds[i] = val;
ctx              1107 arch/ia64/kernel/perfmon.c 			ctx->th_pmds[i],
ctx              1108 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[i].val));
ctx              1116 arch/ia64/kernel/perfmon.c pfm_copy_pmcs(struct task_struct *task, pfm_context_t *ctx)
ctx              1118 arch/ia64/kernel/perfmon.c 	unsigned long mask = ctx->ctx_all_pmcs[0];
ctx              1125 arch/ia64/kernel/perfmon.c 		ctx->th_pmcs[i] = ctx->ctx_pmcs[i];
ctx              1126 arch/ia64/kernel/perfmon.c 		DPRINT(("pmc[%d]=0x%lx\n", i, ctx->th_pmcs[i]));
ctx              1350 arch/ia64/kernel/perfmon.c pfm_unreserve_session(pfm_context_t *ctx, int is_syswide, unsigned int cpu)
ctx              1371 arch/ia64/kernel/perfmon.c 		if (ctx && ctx->ctx_fl_using_dbreg) {
ctx              1373 arch/ia64/kernel/perfmon.c 				printk(KERN_ERR "perfmon: invalid release for ctx %p sys_use_dbregs=0\n", ctx);
ctx              1435 arch/ia64/kernel/perfmon.c pfm_free_smpl_buffer(pfm_context_t *ctx)
ctx              1439 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_smpl_hdr == NULL) goto invalid_free;
ctx              1444 arch/ia64/kernel/perfmon.c 	fmt = ctx->ctx_buf_fmt;
ctx              1447 arch/ia64/kernel/perfmon.c 		ctx->ctx_smpl_hdr,
ctx              1448 arch/ia64/kernel/perfmon.c 		ctx->ctx_smpl_size,
ctx              1449 arch/ia64/kernel/perfmon.c 		ctx->ctx_smpl_vaddr));
ctx              1456 arch/ia64/kernel/perfmon.c 	vfree(ctx->ctx_smpl_hdr);
ctx              1458 arch/ia64/kernel/perfmon.c 	ctx->ctx_smpl_hdr  = NULL;
ctx              1459 arch/ia64/kernel/perfmon.c 	ctx->ctx_smpl_size = 0UL;
ctx              1504 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              1514 arch/ia64/kernel/perfmon.c 	ctx = filp->private_data;
ctx              1515 arch/ia64/kernel/perfmon.c 	if (ctx == NULL) {
ctx              1524 arch/ia64/kernel/perfmon.c 		DPRINT(("message is too small ctx=%p (>=%ld)\n", ctx, sizeof(pfm_msg_t)));
ctx              1528 arch/ia64/kernel/perfmon.c 	PROTECT_CTX(ctx, flags);
ctx              1533 arch/ia64/kernel/perfmon.c   	add_wait_queue(&ctx->ctx_msgq_wait, &wait);
ctx              1543 arch/ia64/kernel/perfmon.c 		DPRINT(("head=%d tail=%d\n", ctx->ctx_msgq_head, ctx->ctx_msgq_tail));
ctx              1546 arch/ia64/kernel/perfmon.c 		if(PFM_CTXQ_EMPTY(ctx) == 0) break;
ctx              1548 arch/ia64/kernel/perfmon.c 		UNPROTECT_CTX(ctx, flags);
ctx              1568 arch/ia64/kernel/perfmon.c 		PROTECT_CTX(ctx, flags);
ctx              1572 arch/ia64/kernel/perfmon.c 	remove_wait_queue(&ctx->ctx_msgq_wait, &wait);
ctx              1577 arch/ia64/kernel/perfmon.c 	msg = pfm_get_next_msg(ctx);
ctx              1579 arch/ia64/kernel/perfmon.c 		printk(KERN_ERR "perfmon: pfm_read no msg for ctx=%p [%d]\n", ctx, task_pid_nr(current));
ctx              1589 arch/ia64/kernel/perfmon.c 	UNPROTECT_CTX(ctx, flags);
ctx              1605 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              1614 arch/ia64/kernel/perfmon.c 	ctx = filp->private_data;
ctx              1615 arch/ia64/kernel/perfmon.c 	if (ctx == NULL) {
ctx              1621 arch/ia64/kernel/perfmon.c 	DPRINT(("pfm_poll ctx_fd=%d before poll_wait\n", ctx->ctx_fd));
ctx              1623 arch/ia64/kernel/perfmon.c 	poll_wait(filp, &ctx->ctx_msgq_wait, wait);
ctx              1625 arch/ia64/kernel/perfmon.c 	PROTECT_CTX(ctx, flags);
ctx              1627 arch/ia64/kernel/perfmon.c 	if (PFM_CTXQ_EMPTY(ctx) == 0)
ctx              1630 arch/ia64/kernel/perfmon.c 	UNPROTECT_CTX(ctx, flags);
ctx              1632 arch/ia64/kernel/perfmon.c 	DPRINT(("pfm_poll ctx_fd=%d mask=0x%x\n", ctx->ctx_fd, mask));
ctx              1648 arch/ia64/kernel/perfmon.c pfm_do_fasync(int fd, struct file *filp, pfm_context_t *ctx, int on)
ctx              1652 arch/ia64/kernel/perfmon.c 	ret = fasync_helper (fd, filp, on, &ctx->ctx_async_queue);
ctx              1658 arch/ia64/kernel/perfmon.c 		ctx->ctx_async_queue, ret));
ctx              1666 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              1674 arch/ia64/kernel/perfmon.c 	ctx = filp->private_data;
ctx              1675 arch/ia64/kernel/perfmon.c 	if (ctx == NULL) {
ctx              1686 arch/ia64/kernel/perfmon.c 	ret = pfm_do_fasync(fd, filp, ctx, on);
ctx              1692 arch/ia64/kernel/perfmon.c 		ctx->ctx_async_queue, ret));
ctx              1706 arch/ia64/kernel/perfmon.c 	pfm_context_t   *ctx = (pfm_context_t *)info;
ctx              1712 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_cpu != smp_processor_id()) {
ctx              1714 arch/ia64/kernel/perfmon.c 			ctx->ctx_cpu,
ctx              1719 arch/ia64/kernel/perfmon.c 	if (owner != ctx->ctx_task) {
ctx              1722 arch/ia64/kernel/perfmon.c 			task_pid_nr(owner), task_pid_nr(ctx->ctx_task));
ctx              1725 arch/ia64/kernel/perfmon.c 	if (GET_PMU_CTX() != ctx) {
ctx              1728 arch/ia64/kernel/perfmon.c 			GET_PMU_CTX(), ctx);
ctx              1732 arch/ia64/kernel/perfmon.c 	DPRINT(("on CPU%d forcing system wide stop for [%d]\n", smp_processor_id(), task_pid_nr(ctx->ctx_task)));
ctx              1740 arch/ia64/kernel/perfmon.c 	ret = pfm_context_unload(ctx, NULL, 0, regs);
ctx              1752 arch/ia64/kernel/perfmon.c pfm_syswide_cleanup_other_cpu(pfm_context_t *ctx)
ctx              1756 arch/ia64/kernel/perfmon.c 	DPRINT(("calling CPU%d for cleanup\n", ctx->ctx_cpu));
ctx              1757 arch/ia64/kernel/perfmon.c 	ret = smp_call_function_single(ctx->ctx_cpu, pfm_syswide_force_stop, ctx, 1);
ctx              1758 arch/ia64/kernel/perfmon.c 	DPRINT(("called CPU%d for cleanup ret=%d\n", ctx->ctx_cpu, ret));
ctx              1769 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              1782 arch/ia64/kernel/perfmon.c 	ctx = filp->private_data;
ctx              1783 arch/ia64/kernel/perfmon.c 	if (ctx == NULL) {
ctx              1801 arch/ia64/kernel/perfmon.c 	PROTECT_CTX(ctx, flags);
ctx              1803 arch/ia64/kernel/perfmon.c 	state     = ctx->ctx_state;
ctx              1804 arch/ia64/kernel/perfmon.c 	is_system = ctx->ctx_fl_system;
ctx              1806 arch/ia64/kernel/perfmon.c 	task = PFM_CTX_TASK(ctx);
ctx              1829 arch/ia64/kernel/perfmon.c 		if (is_system && ctx->ctx_cpu != smp_processor_id()) {
ctx              1831 arch/ia64/kernel/perfmon.c 			DPRINT(("should be running on CPU%d\n", ctx->ctx_cpu));
ctx              1837 arch/ia64/kernel/perfmon.c 			pfm_syswide_cleanup_other_cpu(ctx);
ctx              1856 arch/ia64/kernel/perfmon.c 			pfm_context_unload(ctx, NULL, 0, regs);
ctx              1858 arch/ia64/kernel/perfmon.c 			DPRINT(("ctx_state=%d\n", ctx->ctx_state));
ctx              1873 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_smpl_vaddr && current->mm) {
ctx              1874 arch/ia64/kernel/perfmon.c 		smpl_buf_vaddr = ctx->ctx_smpl_vaddr;
ctx              1875 arch/ia64/kernel/perfmon.c 		smpl_buf_size  = ctx->ctx_smpl_size;
ctx              1878 arch/ia64/kernel/perfmon.c 	UNPROTECT_CTX(ctx, flags);
ctx              1908 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              1925 arch/ia64/kernel/perfmon.c 	ctx = filp->private_data;
ctx              1926 arch/ia64/kernel/perfmon.c 	if (ctx == NULL) {
ctx              1931 arch/ia64/kernel/perfmon.c 	PROTECT_CTX(ctx, flags);
ctx              1933 arch/ia64/kernel/perfmon.c 	state     = ctx->ctx_state;
ctx              1934 arch/ia64/kernel/perfmon.c 	is_system = ctx->ctx_fl_system;
ctx              1936 arch/ia64/kernel/perfmon.c 	task = PFM_CTX_TASK(ctx);
ctx              1960 arch/ia64/kernel/perfmon.c 	if (state == PFM_CTX_MASKED && CTX_OVFL_NOBLOCK(ctx) == 0) {
ctx              1976 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_going_zombie = 1;
ctx              1981 arch/ia64/kernel/perfmon.c 		complete(&ctx->ctx_restart_done);
ctx              1994 arch/ia64/kernel/perfmon.c   		add_wait_queue(&ctx->ctx_zombieq, &wait);
ctx              1996 arch/ia64/kernel/perfmon.c 		UNPROTECT_CTX(ctx, flags);
ctx              2006 arch/ia64/kernel/perfmon.c 		PROTECT_CTX(ctx, flags);
ctx              2009 arch/ia64/kernel/perfmon.c 		remove_wait_queue(&ctx->ctx_zombieq, &wait);
ctx              2022 arch/ia64/kernel/perfmon.c 		ctx->ctx_state = PFM_CTX_ZOMBIE;
ctx              2031 arch/ia64/kernel/perfmon.c 		pfm_context_unload(ctx, NULL, 0, regs);
ctx              2037 arch/ia64/kernel/perfmon.c 	state = ctx->ctx_state;
ctx              2053 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_smpl_hdr) {
ctx              2054 arch/ia64/kernel/perfmon.c 		smpl_buf_addr = ctx->ctx_smpl_hdr;
ctx              2055 arch/ia64/kernel/perfmon.c 		smpl_buf_size = ctx->ctx_smpl_size;
ctx              2057 arch/ia64/kernel/perfmon.c 		ctx->ctx_smpl_hdr = NULL;
ctx              2058 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_is_sampling = 0;
ctx              2067 arch/ia64/kernel/perfmon.c 	if (smpl_buf_addr) pfm_exit_smpl_buffer(ctx->ctx_buf_fmt);
ctx              2073 arch/ia64/kernel/perfmon.c 		pfm_unreserve_session(ctx, ctx->ctx_fl_system , ctx->ctx_cpu);
ctx              2089 arch/ia64/kernel/perfmon.c 	UNPROTECT_CTX(ctx, flags);
ctx              2100 arch/ia64/kernel/perfmon.c 	if (free_possible) pfm_context_free(ctx);
ctx              2129 arch/ia64/kernel/perfmon.c pfm_alloc_file(pfm_context_t *ctx)
ctx              2168 arch/ia64/kernel/perfmon.c 	file->private_data = ctx;
ctx              2196 arch/ia64/kernel/perfmon.c pfm_smpl_buffer_alloc(struct task_struct *task, struct file *filp, pfm_context_t *ctx, unsigned long rsize, void **user_vaddr)
ctx              2252 arch/ia64/kernel/perfmon.c 	ctx->ctx_smpl_hdr   = smpl_buf;
ctx              2253 arch/ia64/kernel/perfmon.c 	ctx->ctx_smpl_size  = size; /* aligned size */
ctx              2273 arch/ia64/kernel/perfmon.c 	DPRINT(("aligned size=%ld, hdr=%p mapped @0x%lx\n", size, ctx->ctx_smpl_hdr, vma->vm_start));
ctx              2294 arch/ia64/kernel/perfmon.c 	ctx->ctx_smpl_vaddr = (void *)vma->vm_start;
ctx              2368 arch/ia64/kernel/perfmon.c pfm_setup_buffer_fmt(struct task_struct *task, struct file *filp, pfm_context_t *ctx, unsigned int ctx_flags,
ctx              2397 arch/ia64/kernel/perfmon.c 	ctx->ctx_buf_fmt = fmt;
ctx              2398 arch/ia64/kernel/perfmon.c 	ctx->ctx_fl_is_sampling = 1; /* assume record() is defined */
ctx              2410 arch/ia64/kernel/perfmon.c 		ret = pfm_smpl_buffer_alloc(current, filp, ctx, size, &uaddr);
ctx              2416 arch/ia64/kernel/perfmon.c 	ret = pfm_buf_fmt_init(fmt, task, ctx->ctx_smpl_hdr, ctx_flags, cpu, fmt_arg);
ctx              2423 arch/ia64/kernel/perfmon.c pfm_reset_pmu_state(pfm_context_t *ctx)
ctx              2432 arch/ia64/kernel/perfmon.c 		ctx->ctx_pmcs[i] = PMC_DFL_VAL(i);
ctx              2433 arch/ia64/kernel/perfmon.c 		DPRINT(("pmc[%d]=0x%lx\n", i, ctx->ctx_pmcs[i]));
ctx              2462 arch/ia64/kernel/perfmon.c 	ctx->ctx_all_pmcs[0] = pmu_conf->impl_pmcs[0] & ~0x1;
ctx              2467 arch/ia64/kernel/perfmon.c 	ctx->ctx_all_pmds[0] = pmu_conf->impl_pmds[0];
ctx              2469 arch/ia64/kernel/perfmon.c 	DPRINT(("<%d> all_pmcs=0x%lx all_pmds=0x%lx\n", ctx->ctx_fd, ctx->ctx_all_pmcs[0],ctx->ctx_all_pmds[0]));
ctx              2474 arch/ia64/kernel/perfmon.c 	ctx->ctx_used_ibrs[0] = 0UL;
ctx              2475 arch/ia64/kernel/perfmon.c 	ctx->ctx_used_dbrs[0] = 0UL;
ctx              2509 arch/ia64/kernel/perfmon.c pfm_task_incompatible(pfm_context_t *ctx, struct task_struct *task)
ctx              2525 arch/ia64/kernel/perfmon.c 	if (CTX_OVFL_NOBLOCK(ctx) == 0 && task == current) {
ctx              2555 arch/ia64/kernel/perfmon.c pfm_get_task(pfm_context_t *ctx, pid_t pid, struct task_struct **task)
ctx              2570 arch/ia64/kernel/perfmon.c 	ret = pfm_task_incompatible(ctx, p);
ctx              2582 arch/ia64/kernel/perfmon.c pfm_context_create(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              2604 arch/ia64/kernel/perfmon.c 	ctx = pfm_context_alloc(ctx_flags);
ctx              2605 arch/ia64/kernel/perfmon.c 	if (!ctx)
ctx              2608 arch/ia64/kernel/perfmon.c 	filp = pfm_alloc_file(ctx);
ctx              2614 arch/ia64/kernel/perfmon.c 	req->ctx_fd = ctx->ctx_fd = fd;
ctx              2620 arch/ia64/kernel/perfmon.c 		ret = pfm_setup_buffer_fmt(current, filp, ctx, ctx_flags, 0, req);
ctx              2626 arch/ia64/kernel/perfmon.c 		ctx,
ctx              2628 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_system,
ctx              2629 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_block,
ctx              2630 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_excl_idle,
ctx              2631 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_no_msg,
ctx              2632 arch/ia64/kernel/perfmon.c 		ctx->ctx_fd));
ctx              2637 arch/ia64/kernel/perfmon.c 	pfm_reset_pmu_state(ctx);
ctx              2648 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_buf_fmt) {
ctx              2649 arch/ia64/kernel/perfmon.c 		pfm_buf_fmt_exit(ctx->ctx_buf_fmt, current, NULL, regs);
ctx              2652 arch/ia64/kernel/perfmon.c 	pfm_context_free(ctx);
ctx              2679 arch/ia64/kernel/perfmon.c pfm_reset_regs_masked(pfm_context_t *ctx, unsigned long *ovfl_regs, int is_long_reset)
ctx              2694 arch/ia64/kernel/perfmon.c 		ctx->ctx_pmds[i].val = val = pfm_new_counter_value(ctx->ctx_pmds+ i, is_long_reset);
ctx              2695 arch/ia64/kernel/perfmon.c 		reset_others        |= ctx->ctx_pmds[i].reset_pmds[0];
ctx              2707 arch/ia64/kernel/perfmon.c 		ctx->ctx_pmds[i].val = val = pfm_new_counter_value(ctx->ctx_pmds + i, is_long_reset);
ctx              2715 arch/ia64/kernel/perfmon.c pfm_reset_regs(pfm_context_t *ctx, unsigned long *ovfl_regs, int is_long_reset)
ctx              2724 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_state == PFM_CTX_MASKED) {
ctx              2725 arch/ia64/kernel/perfmon.c 		pfm_reset_regs_masked(ctx, ovfl_regs, is_long_reset);
ctx              2737 arch/ia64/kernel/perfmon.c 		val           = pfm_new_counter_value(ctx->ctx_pmds+ i, is_long_reset);
ctx              2738 arch/ia64/kernel/perfmon.c 		reset_others |= ctx->ctx_pmds[i].reset_pmds[0];
ctx              2742 arch/ia64/kernel/perfmon.c 		pfm_write_soft_counter(ctx, i, val);
ctx              2752 arch/ia64/kernel/perfmon.c 		val = pfm_new_counter_value(ctx->ctx_pmds + i, is_long_reset);
ctx              2755 arch/ia64/kernel/perfmon.c 			pfm_write_soft_counter(ctx, i, val);
ctx              2766 arch/ia64/kernel/perfmon.c pfm_write_pmcs(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              2779 arch/ia64/kernel/perfmon.c 	state     = ctx->ctx_state;
ctx              2781 arch/ia64/kernel/perfmon.c 	is_system = ctx->ctx_fl_system;
ctx              2782 arch/ia64/kernel/perfmon.c 	task      = ctx->ctx_task;
ctx              2793 arch/ia64/kernel/perfmon.c 		if (is_system && ctx->ctx_cpu != smp_processor_id()) {
ctx              2794 arch/ia64/kernel/perfmon.c 			DPRINT(("should be running on CPU%d\n", ctx->ctx_cpu));
ctx              2880 arch/ia64/kernel/perfmon.c 			ret = (*wr_func)(task, ctx, cnum, &value, regs);
ctx              2901 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[cnum].flags = flags;
ctx              2903 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[cnum].reset_pmds[0] = reset_pmds;
ctx              2904 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[cnum].smpl_pmds[0]  = smpl_pmds;
ctx              2905 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[cnum].eventid       = req->reg_smpl_eventid;
ctx              2918 arch/ia64/kernel/perfmon.c 			CTX_USED_PMD(ctx, reset_pmds);
ctx              2919 arch/ia64/kernel/perfmon.c 			CTX_USED_PMD(ctx, smpl_pmds);
ctx              2924 arch/ia64/kernel/perfmon.c 			if (state == PFM_CTX_MASKED) ctx->ctx_ovfl_regs[0] &= ~1UL << cnum;
ctx              2931 arch/ia64/kernel/perfmon.c 		CTX_USED_PMD(ctx, pmu_conf->pmc_desc[cnum].dep_pmd[0]);
ctx              2945 arch/ia64/kernel/perfmon.c 		if (is_monitor) CTX_USED_MONITOR(ctx, 1UL << cnum);
ctx              2950 arch/ia64/kernel/perfmon.c 		ctx->ctx_pmcs[cnum] = value;
ctx              2956 arch/ia64/kernel/perfmon.c 			if (is_system == 0) ctx->th_pmcs[cnum] = value;
ctx              2973 arch/ia64/kernel/perfmon.c 				ctx->ctx_reload_pmcs[0] |= 1UL << cnum;
ctx              2984 arch/ia64/kernel/perfmon.c 			  ctx->ctx_all_pmcs[0],
ctx              2985 arch/ia64/kernel/perfmon.c 			  ctx->ctx_used_pmds[0],
ctx              2986 arch/ia64/kernel/perfmon.c 			  ctx->ctx_pmds[cnum].eventid,
ctx              2989 arch/ia64/kernel/perfmon.c 			  ctx->ctx_reload_pmcs[0],
ctx              2990 arch/ia64/kernel/perfmon.c 			  ctx->ctx_used_monitors[0],
ctx              2991 arch/ia64/kernel/perfmon.c 			  ctx->ctx_ovfl_regs[0]));
ctx              3006 arch/ia64/kernel/perfmon.c pfm_write_pmds(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              3018 arch/ia64/kernel/perfmon.c 	state     = ctx->ctx_state;
ctx              3020 arch/ia64/kernel/perfmon.c 	is_system = ctx->ctx_fl_system;
ctx              3022 arch/ia64/kernel/perfmon.c 	task      = ctx->ctx_task;
ctx              3036 arch/ia64/kernel/perfmon.c 		if (unlikely(is_system && ctx->ctx_cpu != smp_processor_id())) {
ctx              3037 arch/ia64/kernel/perfmon.c 			DPRINT(("should be running on CPU%d\n", ctx->ctx_cpu));
ctx              3062 arch/ia64/kernel/perfmon.c 			ret = (*wr_func)(task, ctx, cnum, &v, regs);
ctx              3086 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[cnum].lval = value;
ctx              3099 arch/ia64/kernel/perfmon.c 		ctx->ctx_pmds[cnum].long_reset  = req->reg_long_reset;
ctx              3100 arch/ia64/kernel/perfmon.c 		ctx->ctx_pmds[cnum].short_reset = req->reg_short_reset;
ctx              3105 arch/ia64/kernel/perfmon.c 		ctx->ctx_pmds[cnum].seed = req->reg_random_seed;
ctx              3106 arch/ia64/kernel/perfmon.c 		ctx->ctx_pmds[cnum].mask = req->reg_random_mask;
ctx              3111 arch/ia64/kernel/perfmon.c 		ctx->ctx_pmds[cnum].val  = value;
ctx              3119 arch/ia64/kernel/perfmon.c 		CTX_USED_PMD(ctx, PMD_PMD_DEP(cnum));
ctx              3124 arch/ia64/kernel/perfmon.c 		CTX_USED_PMD(ctx, RDEP(cnum));
ctx              3131 arch/ia64/kernel/perfmon.c 			ctx->ctx_ovfl_regs[0] &= ~1UL << cnum;
ctx              3138 arch/ia64/kernel/perfmon.c 			if (is_system == 0) ctx->th_pmds[cnum] = hw_value;
ctx              3152 arch/ia64/kernel/perfmon.c 				ctx->ctx_reload_pmds[0] |= 1UL << cnum;
ctx              3164 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[cnum].val,
ctx              3165 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[cnum].short_reset,
ctx              3166 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[cnum].long_reset,
ctx              3167 arch/ia64/kernel/perfmon.c 			PMC_OVFL_NOTIFY(ctx, cnum) ? 'Y':'N',
ctx              3168 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[cnum].seed,
ctx              3169 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[cnum].mask,
ctx              3170 arch/ia64/kernel/perfmon.c 			ctx->ctx_used_pmds[0],
ctx              3171 arch/ia64/kernel/perfmon.c 			ctx->ctx_pmds[cnum].reset_pmds[0],
ctx              3172 arch/ia64/kernel/perfmon.c 			ctx->ctx_reload_pmds[0],
ctx              3173 arch/ia64/kernel/perfmon.c 			ctx->ctx_all_pmds[0],
ctx              3174 arch/ia64/kernel/perfmon.c 			ctx->ctx_ovfl_regs[0]));
ctx              3202 arch/ia64/kernel/perfmon.c pfm_read_pmds(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              3218 arch/ia64/kernel/perfmon.c 	state     = ctx->ctx_state;
ctx              3220 arch/ia64/kernel/perfmon.c 	is_system = ctx->ctx_fl_system;
ctx              3222 arch/ia64/kernel/perfmon.c 	task      = ctx->ctx_task;
ctx              3232 arch/ia64/kernel/perfmon.c 		if (unlikely(is_system && ctx->ctx_cpu != smp_processor_id())) {
ctx              3233 arch/ia64/kernel/perfmon.c 			DPRINT(("should be running on CPU%d\n", ctx->ctx_cpu));
ctx              3269 arch/ia64/kernel/perfmon.c 		if (unlikely(!CTX_IS_USED_PMD(ctx, cnum))) goto error;
ctx              3271 arch/ia64/kernel/perfmon.c 		sval        = ctx->ctx_pmds[cnum].val;
ctx              3272 arch/ia64/kernel/perfmon.c 		lval        = ctx->ctx_pmds[cnum].lval;
ctx              3288 arch/ia64/kernel/perfmon.c 			val = is_loaded ? ctx->th_pmds[cnum] : 0UL;
ctx              3305 arch/ia64/kernel/perfmon.c 			ret = (*rd_func)(ctx->ctx_task, ctx, cnum, &v, regs);
ctx              3335 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              3339 arch/ia64/kernel/perfmon.c  	ctx = GET_PMU_CTX();
ctx              3341 arch/ia64/kernel/perfmon.c 	if (ctx == NULL) return -EINVAL;
ctx              3347 arch/ia64/kernel/perfmon.c 	if (task != current && ctx->ctx_fl_system == 0) return -EBUSY;
ctx              3349 arch/ia64/kernel/perfmon.c 	return pfm_write_pmcs(ctx, req, nreq, regs);
ctx              3356 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              3360 arch/ia64/kernel/perfmon.c  	ctx = GET_PMU_CTX();
ctx              3362 arch/ia64/kernel/perfmon.c 	if (ctx == NULL) return -EINVAL;
ctx              3368 arch/ia64/kernel/perfmon.c 	if (task != current && ctx->ctx_fl_system == 0) return -EBUSY;
ctx              3370 arch/ia64/kernel/perfmon.c 	return pfm_read_pmds(ctx, req, nreq, regs);
ctx              3381 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx = task->thread.pfm_context;
ctx              3402 arch/ia64/kernel/perfmon.c 	if (ctx && ctx->ctx_fl_using_dbreg == 1) return -1;
ctx              3455 arch/ia64/kernel/perfmon.c pfm_restart(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              3463 arch/ia64/kernel/perfmon.c 	state     = ctx->ctx_state;
ctx              3464 arch/ia64/kernel/perfmon.c 	fmt       = ctx->ctx_buf_fmt;
ctx              3465 arch/ia64/kernel/perfmon.c 	is_system = ctx->ctx_fl_system;
ctx              3466 arch/ia64/kernel/perfmon.c 	task      = PFM_CTX_TASK(ctx);
ctx              3472 arch/ia64/kernel/perfmon.c 			if (CTX_HAS_SMPL(ctx) && fmt->fmt_restart_active) break;
ctx              3488 arch/ia64/kernel/perfmon.c 	if (is_system && ctx->ctx_cpu != smp_processor_id()) {
ctx              3489 arch/ia64/kernel/perfmon.c 		DPRINT(("should be running on CPU%d\n", ctx->ctx_cpu));
ctx              3501 arch/ia64/kernel/perfmon.c 		fmt = ctx->ctx_buf_fmt;
ctx              3505 arch/ia64/kernel/perfmon.c 			ctx->ctx_ovfl_regs[0]));
ctx              3507 arch/ia64/kernel/perfmon.c 		if (CTX_HAS_SMPL(ctx)) {
ctx              3509 arch/ia64/kernel/perfmon.c 			prefetch(ctx->ctx_smpl_hdr);
ctx              3515 arch/ia64/kernel/perfmon.c 				ret = pfm_buf_fmt_restart_active(fmt, task, &rst_ctrl, ctx->ctx_smpl_hdr, regs);
ctx              3517 arch/ia64/kernel/perfmon.c 				ret = pfm_buf_fmt_restart(fmt, task, &rst_ctrl, ctx->ctx_smpl_hdr, regs);
ctx              3525 arch/ia64/kernel/perfmon.c 				pfm_reset_regs(ctx, ctx->ctx_ovfl_regs, PFM_PMD_LONG_RESET);
ctx              3540 arch/ia64/kernel/perfmon.c 		ctx->ctx_ovfl_regs[0] = 0UL;
ctx              3545 arch/ia64/kernel/perfmon.c 		ctx->ctx_state = PFM_CTX_LOADED;
ctx              3550 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_can_restart = 0;
ctx              3564 arch/ia64/kernel/perfmon.c 		if (ctx->ctx_fl_can_restart == 0) return -EINVAL;
ctx              3569 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_can_restart = 0;
ctx              3588 arch/ia64/kernel/perfmon.c 	if (CTX_OVFL_NOBLOCK(ctx) == 0 && state == PFM_CTX_MASKED) {
ctx              3590 arch/ia64/kernel/perfmon.c 		complete(&ctx->ctx_restart_done);
ctx              3594 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_trap_reason = PFM_TRAP_REASON_RESET;
ctx              3608 arch/ia64/kernel/perfmon.c pfm_debug(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              3627 arch/ia64/kernel/perfmon.c pfm_write_ibr_dbr(int mode, pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              3642 arch/ia64/kernel/perfmon.c 	state     = ctx->ctx_state;
ctx              3644 arch/ia64/kernel/perfmon.c 	is_system = ctx->ctx_fl_system;
ctx              3645 arch/ia64/kernel/perfmon.c 	task      = ctx->ctx_task;
ctx              3660 arch/ia64/kernel/perfmon.c 		if (unlikely(is_system && ctx->ctx_cpu != smp_processor_id())) {
ctx              3661 arch/ia64/kernel/perfmon.c 			DPRINT(("should be running on CPU%d\n", ctx->ctx_cpu));
ctx              3674 arch/ia64/kernel/perfmon.c 	first_time = ctx->ctx_fl_using_dbreg == 0;
ctx              3709 arch/ia64/kernel/perfmon.c 	ctx->ctx_fl_using_dbreg = 1;
ctx              3774 arch/ia64/kernel/perfmon.c 			CTX_USED_IBR(ctx, rnum);
ctx              3781 arch/ia64/kernel/perfmon.c 			ctx->ctx_ibrs[rnum] = dbreg.val;
ctx              3784 arch/ia64/kernel/perfmon.c 				rnum, dbreg.val, ctx->ctx_used_ibrs[0], is_loaded, can_access_pmu));
ctx              3786 arch/ia64/kernel/perfmon.c 			CTX_USED_DBR(ctx, rnum);
ctx              3792 arch/ia64/kernel/perfmon.c 			ctx->ctx_dbrs[rnum] = dbreg.val;
ctx              3795 arch/ia64/kernel/perfmon.c 				rnum, dbreg.val, ctx->ctx_used_dbrs[0], is_loaded, can_access_pmu));
ctx              3807 arch/ia64/kernel/perfmon.c 		if (ctx->ctx_fl_system) {
ctx              3811 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_using_dbreg = 0;
ctx              3822 arch/ia64/kernel/perfmon.c pfm_write_ibrs(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              3824 arch/ia64/kernel/perfmon.c 	return pfm_write_ibr_dbr(PFM_CODE_RR, ctx, arg, count, regs);
ctx              3828 arch/ia64/kernel/perfmon.c pfm_write_dbrs(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              3830 arch/ia64/kernel/perfmon.c 	return pfm_write_ibr_dbr(PFM_DATA_RR, ctx, arg, count, regs);
ctx              3836 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              3840 arch/ia64/kernel/perfmon.c  	ctx = GET_PMU_CTX();
ctx              3842 arch/ia64/kernel/perfmon.c 	if (ctx == NULL) return -EINVAL;
ctx              3848 arch/ia64/kernel/perfmon.c 	if (task != current && ctx->ctx_fl_system == 0) return -EBUSY;
ctx              3850 arch/ia64/kernel/perfmon.c 	return pfm_write_ibrs(ctx, req, nreq, regs);
ctx              3857 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              3861 arch/ia64/kernel/perfmon.c  	ctx = GET_PMU_CTX();
ctx              3863 arch/ia64/kernel/perfmon.c 	if (ctx == NULL) return -EINVAL;
ctx              3869 arch/ia64/kernel/perfmon.c 	if (task != current && ctx->ctx_fl_system == 0) return -EBUSY;
ctx              3871 arch/ia64/kernel/perfmon.c 	return pfm_write_dbrs(ctx, req, nreq, regs);
ctx              3877 arch/ia64/kernel/perfmon.c pfm_get_features(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              3886 arch/ia64/kernel/perfmon.c pfm_stop(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              3889 arch/ia64/kernel/perfmon.c 	struct task_struct *task = PFM_CTX_TASK(ctx);
ctx              3892 arch/ia64/kernel/perfmon.c 	state     = ctx->ctx_state;
ctx              3893 arch/ia64/kernel/perfmon.c 	is_system = ctx->ctx_fl_system;
ctx              3905 arch/ia64/kernel/perfmon.c 	if (is_system && ctx->ctx_cpu != smp_processor_id()) {
ctx              3906 arch/ia64/kernel/perfmon.c 		DPRINT(("should be running on CPU%d\n", ctx->ctx_cpu));
ctx              3910 arch/ia64/kernel/perfmon.c 		task_pid_nr(PFM_CTX_TASK(ctx)),
ctx              3967 arch/ia64/kernel/perfmon.c 		ctx->ctx_saved_psr_up = 0;
ctx              3975 arch/ia64/kernel/perfmon.c pfm_start(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              3980 arch/ia64/kernel/perfmon.c 	state     = ctx->ctx_state;
ctx              3981 arch/ia64/kernel/perfmon.c 	is_system = ctx->ctx_fl_system;
ctx              3990 arch/ia64/kernel/perfmon.c 	if (is_system && ctx->ctx_cpu != smp_processor_id()) {
ctx              3991 arch/ia64/kernel/perfmon.c 		DPRINT(("should be running on CPU%d\n", ctx->ctx_cpu));
ctx              4028 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_task == current) {
ctx              4039 arch/ia64/kernel/perfmon.c 		tregs = task_pt_regs(ctx->ctx_task);
ctx              4045 arch/ia64/kernel/perfmon.c 		ctx->ctx_saved_psr_up = IA64_PSR_UP;
ctx              4056 arch/ia64/kernel/perfmon.c pfm_get_pmc_reset(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              4083 arch/ia64/kernel/perfmon.c pfm_check_task_exist(pfm_context_t *ctx)
ctx              4091 arch/ia64/kernel/perfmon.c 		if (t->thread.pfm_context == ctx) {
ctx              4099 arch/ia64/kernel/perfmon.c 	DPRINT(("pfm_check_task_exist: ret=%d ctx=%p\n", ret, ctx));
ctx              4105 arch/ia64/kernel/perfmon.c pfm_context_load(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              4120 arch/ia64/kernel/perfmon.c 	state     = ctx->ctx_state;
ctx              4121 arch/ia64/kernel/perfmon.c 	is_system = ctx->ctx_fl_system;
ctx              4128 arch/ia64/kernel/perfmon.c 			ctx->ctx_state));
ctx              4132 arch/ia64/kernel/perfmon.c 	DPRINT(("load_pid [%d] using_dbreg=%d\n", req->load_pid, ctx->ctx_fl_using_dbreg));
ctx              4134 arch/ia64/kernel/perfmon.c 	if (CTX_OVFL_NOBLOCK(ctx) == 0 && req->load_pid == current->pid) {
ctx              4139 arch/ia64/kernel/perfmon.c 	ret = pfm_get_task(ctx, req->load_pid, &task);
ctx              4163 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_fl_using_dbreg) {
ctx              4203 arch/ia64/kernel/perfmon.c 	the_cpu = ctx->ctx_cpu = smp_processor_id();
ctx              4222 arch/ia64/kernel/perfmon.c 		thread->pfm_context, ctx));
ctx              4225 arch/ia64/kernel/perfmon.c 	old = ia64_cmpxchg(acq, &thread->pfm_context, NULL, ctx, sizeof(pfm_context_t *));
ctx              4231 arch/ia64/kernel/perfmon.c 	pfm_reset_msgq(ctx);
ctx              4233 arch/ia64/kernel/perfmon.c 	ctx->ctx_state = PFM_CTX_LOADED;
ctx              4238 arch/ia64/kernel/perfmon.c 	ctx->ctx_task = task;
ctx              4247 arch/ia64/kernel/perfmon.c 		if (ctx->ctx_fl_excl_idle) PFM_CPUINFO_SET(PFM_CPUINFO_EXCL_IDLE);
ctx              4255 arch/ia64/kernel/perfmon.c 	pfm_copy_pmds(task, ctx);
ctx              4256 arch/ia64/kernel/perfmon.c 	pfm_copy_pmcs(task, ctx);
ctx              4258 arch/ia64/kernel/perfmon.c 	pmcs_source = ctx->th_pmcs;
ctx              4259 arch/ia64/kernel/perfmon.c 	pmds_source = ctx->th_pmds;
ctx              4272 arch/ia64/kernel/perfmon.c 			SET_LAST_CPU(ctx, smp_processor_id());
ctx              4274 arch/ia64/kernel/perfmon.c 			SET_ACTIVATION(ctx);
ctx              4287 arch/ia64/kernel/perfmon.c 		pfm_restore_pmds(pmds_source, ctx->ctx_all_pmds[0]);
ctx              4288 arch/ia64/kernel/perfmon.c 		pfm_restore_pmcs(pmcs_source, ctx->ctx_all_pmcs[0]);
ctx              4290 arch/ia64/kernel/perfmon.c 		ctx->ctx_reload_pmcs[0] = 0UL;
ctx              4291 arch/ia64/kernel/perfmon.c 		ctx->ctx_reload_pmds[0] = 0UL;
ctx              4296 arch/ia64/kernel/perfmon.c 		if (ctx->ctx_fl_using_dbreg) {
ctx              4297 arch/ia64/kernel/perfmon.c 			pfm_restore_ibrs(ctx->ctx_ibrs, pmu_conf->num_ibrs);
ctx              4298 arch/ia64/kernel/perfmon.c 			pfm_restore_dbrs(ctx->ctx_dbrs, pmu_conf->num_dbrs);
ctx              4303 arch/ia64/kernel/perfmon.c 		SET_PMU_OWNER(task, ctx);
ctx              4313 arch/ia64/kernel/perfmon.c 		ctx->ctx_last_activation = PFM_INVALID_ACTIVATION;
ctx              4314 arch/ia64/kernel/perfmon.c 		SET_LAST_CPU(ctx, -1);
ctx              4317 arch/ia64/kernel/perfmon.c 		ctx->ctx_saved_psr_up = 0UL;
ctx              4324 arch/ia64/kernel/perfmon.c 	if (ret) pfm_unreserve_session(ctx, ctx->ctx_fl_system, the_cpu);
ctx              4341 arch/ia64/kernel/perfmon.c 			ret = pfm_check_task_exist(ctx);
ctx              4343 arch/ia64/kernel/perfmon.c 				ctx->ctx_state = PFM_CTX_UNLOADED;
ctx              4344 arch/ia64/kernel/perfmon.c 				ctx->ctx_task  = NULL;
ctx              4359 arch/ia64/kernel/perfmon.c static void pfm_flush_pmds(struct task_struct *, pfm_context_t *ctx);
ctx              4362 arch/ia64/kernel/perfmon.c pfm_context_unload(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs)
ctx              4364 arch/ia64/kernel/perfmon.c 	struct task_struct *task = PFM_CTX_TASK(ctx);
ctx              4369 arch/ia64/kernel/perfmon.c 	DPRINT(("ctx_state=%d task [%d]\n", ctx->ctx_state, task ? task_pid_nr(task) : -1));
ctx              4371 arch/ia64/kernel/perfmon.c 	prev_state = ctx->ctx_state;
ctx              4372 arch/ia64/kernel/perfmon.c 	is_system  = ctx->ctx_fl_system;
ctx              4385 arch/ia64/kernel/perfmon.c 	ret = pfm_stop(ctx, NULL, 0, regs);
ctx              4388 arch/ia64/kernel/perfmon.c 	ctx->ctx_state = PFM_CTX_UNLOADED;
ctx              4409 arch/ia64/kernel/perfmon.c 		pfm_flush_pmds(current, ctx);
ctx              4416 arch/ia64/kernel/perfmon.c 			pfm_unreserve_session(ctx, 1 , ctx->ctx_cpu);
ctx              4425 arch/ia64/kernel/perfmon.c 		ctx->ctx_task = NULL;
ctx              4450 arch/ia64/kernel/perfmon.c 	pfm_flush_pmds(task, ctx);
ctx              4459 arch/ia64/kernel/perfmon.c 		pfm_unreserve_session(ctx, 0 , ctx->ctx_cpu);
ctx              4464 arch/ia64/kernel/perfmon.c 	ctx->ctx_last_activation = PFM_INVALID_ACTIVATION;
ctx              4465 arch/ia64/kernel/perfmon.c 	SET_LAST_CPU(ctx, -1);
ctx              4476 arch/ia64/kernel/perfmon.c 	ctx->ctx_task             = NULL;
ctx              4480 arch/ia64/kernel/perfmon.c 	ctx->ctx_fl_trap_reason  = PFM_TRAP_REASON_NONE;
ctx              4481 arch/ia64/kernel/perfmon.c 	ctx->ctx_fl_can_restart  = 0;
ctx              4482 arch/ia64/kernel/perfmon.c 	ctx->ctx_fl_going_zombie = 0;
ctx              4497 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              4503 arch/ia64/kernel/perfmon.c 	ctx = PFM_GET_CTX(task);
ctx              4505 arch/ia64/kernel/perfmon.c 	PROTECT_CTX(ctx, flags);
ctx              4507 arch/ia64/kernel/perfmon.c 	DPRINT(("state=%d task [%d]\n", ctx->ctx_state, task_pid_nr(task)));
ctx              4509 arch/ia64/kernel/perfmon.c 	state = ctx->ctx_state;
ctx              4520 arch/ia64/kernel/perfmon.c 			ret = pfm_context_unload(ctx, NULL, 0, regs);
ctx              4526 arch/ia64/kernel/perfmon.c 			pfm_end_notify_user(ctx);
ctx              4529 arch/ia64/kernel/perfmon.c 			ret = pfm_context_unload(ctx, NULL, 0, regs);
ctx              4539 arch/ia64/kernel/perfmon.c 	UNPROTECT_CTX(ctx, flags);
ctx              4552 arch/ia64/kernel/perfmon.c 	if (free_ok) pfm_context_free(ctx);
ctx              4603 arch/ia64/kernel/perfmon.c pfm_check_task_state(pfm_context_t *ctx, int cmd, unsigned long flags)
ctx              4609 arch/ia64/kernel/perfmon.c 	state = ctx->ctx_state;
ctx              4610 arch/ia64/kernel/perfmon.c 	task  = ctx->ctx_task;
ctx              4613 arch/ia64/kernel/perfmon.c 		DPRINT(("context %d no task, state=%d\n", ctx->ctx_fd, state));
ctx              4618 arch/ia64/kernel/perfmon.c 		ctx->ctx_fd,
ctx              4630 arch/ia64/kernel/perfmon.c 	if (task == current || ctx->ctx_fl_system) return 0;
ctx              4686 arch/ia64/kernel/perfmon.c 		UNPROTECT_CTX(ctx, flags);
ctx              4690 arch/ia64/kernel/perfmon.c 		PROTECT_CTX(ctx, flags);
ctx              4695 arch/ia64/kernel/perfmon.c 		if (ctx->ctx_state != old_state) {
ctx              4696 arch/ia64/kernel/perfmon.c 			DPRINT(("old_state=%d new_state=%d\n", old_state, ctx->ctx_state));
ctx              4710 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx = NULL;
ctx              4716 arch/ia64/kernel/perfmon.c 	int (*func)(pfm_context_t *ctx, void *arg, int count, struct pt_regs *regs);
ctx              4816 arch/ia64/kernel/perfmon.c 	ctx = f.file->private_data;
ctx              4817 arch/ia64/kernel/perfmon.c 	if (unlikely(ctx == NULL)) {
ctx              4821 arch/ia64/kernel/perfmon.c 	prefetch(&ctx->ctx_state);
ctx              4823 arch/ia64/kernel/perfmon.c 	PROTECT_CTX(ctx, flags);
ctx              4828 arch/ia64/kernel/perfmon.c 	ret = pfm_check_task_state(ctx, cmd, flags);
ctx              4832 arch/ia64/kernel/perfmon.c 	ret = (*func)(ctx, args_k, count, task_pt_regs(current));
ctx              4837 arch/ia64/kernel/perfmon.c 	if (likely(ctx)) {
ctx              4839 arch/ia64/kernel/perfmon.c 		UNPROTECT_CTX(ctx, flags);
ctx              4857 arch/ia64/kernel/perfmon.c pfm_resume_after_ovfl(pfm_context_t *ctx, unsigned long ovfl_regs, struct pt_regs *regs)
ctx              4859 arch/ia64/kernel/perfmon.c 	pfm_buffer_fmt_t *fmt = ctx->ctx_buf_fmt;
ctx              4864 arch/ia64/kernel/perfmon.c 	state = ctx->ctx_state;
ctx              4869 arch/ia64/kernel/perfmon.c 	if (CTX_HAS_SMPL(ctx)) {
ctx              4875 arch/ia64/kernel/perfmon.c 			ret = pfm_buf_fmt_restart_active(fmt, current, &rst_ctrl, ctx->ctx_smpl_hdr, regs);
ctx              4877 arch/ia64/kernel/perfmon.c 			ret = pfm_buf_fmt_restart(fmt, current, &rst_ctrl, ctx->ctx_smpl_hdr, regs);
ctx              4885 arch/ia64/kernel/perfmon.c 			pfm_reset_regs(ctx, &ovfl_regs, PFM_PMD_LONG_RESET);
ctx              4889 arch/ia64/kernel/perfmon.c 			if (ctx->ctx_state == PFM_CTX_MASKED) pfm_restore_monitoring(current);
ctx              4894 arch/ia64/kernel/perfmon.c 		ctx->ctx_state = PFM_CTX_LOADED;
ctx              4903 arch/ia64/kernel/perfmon.c pfm_context_force_terminate(pfm_context_t *ctx, struct pt_regs *regs)
ctx              4909 arch/ia64/kernel/perfmon.c 	ret = pfm_context_unload(ctx, NULL, 0, regs);
ctx              4917 arch/ia64/kernel/perfmon.c 	wake_up_interruptible(&ctx->ctx_zombieq);
ctx              4926 arch/ia64/kernel/perfmon.c static int pfm_ovfl_notify_user(pfm_context_t *ctx, unsigned long ovfl_pmds);
ctx              4940 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              4947 arch/ia64/kernel/perfmon.c 	ctx = PFM_GET_CTX(current);
ctx              4948 arch/ia64/kernel/perfmon.c 	if (ctx == NULL) {
ctx              4954 arch/ia64/kernel/perfmon.c 	PROTECT_CTX(ctx, flags);
ctx              4963 arch/ia64/kernel/perfmon.c 	reason = ctx->ctx_fl_trap_reason;
ctx              4964 arch/ia64/kernel/perfmon.c 	ctx->ctx_fl_trap_reason = PFM_TRAP_REASON_NONE;
ctx              4965 arch/ia64/kernel/perfmon.c 	ovfl_regs = ctx->ctx_ovfl_regs[0];
ctx              4967 arch/ia64/kernel/perfmon.c 	DPRINT(("reason=%d state=%d\n", reason, ctx->ctx_state));
ctx              4972 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_fl_going_zombie || ctx->ctx_state == PFM_CTX_ZOMBIE)
ctx              4983 arch/ia64/kernel/perfmon.c 	UNPROTECT_CTX(ctx, flags);
ctx              4996 arch/ia64/kernel/perfmon.c 	ret = wait_for_completion_interruptible(&ctx->ctx_restart_done);
ctx              5006 arch/ia64/kernel/perfmon.c 	PROTECT_CTX(ctx, dummy_flags);
ctx              5014 arch/ia64/kernel/perfmon.c 	ovfl_regs = ctx->ctx_ovfl_regs[0];
ctx              5016 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_fl_going_zombie) {
ctx              5019 arch/ia64/kernel/perfmon.c 		pfm_context_force_terminate(ctx, regs);
ctx              5029 arch/ia64/kernel/perfmon.c 	pfm_resume_after_ovfl(ctx, ovfl_regs, regs);
ctx              5030 arch/ia64/kernel/perfmon.c 	ctx->ctx_ovfl_regs[0] = 0UL;
ctx              5036 arch/ia64/kernel/perfmon.c 	UNPROTECT_CTX(ctx, flags);
ctx              5040 arch/ia64/kernel/perfmon.c pfm_notify_user(pfm_context_t *ctx, pfm_msg_t *msg)
ctx              5042 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_state == PFM_CTX_ZOMBIE) {
ctx              5049 arch/ia64/kernel/perfmon.c 	if (msg) wake_up_interruptible(&ctx->ctx_msgq_wait);
ctx              5055 arch/ia64/kernel/perfmon.c 	kill_fasync (&ctx->ctx_async_queue, SIGIO, POLL_IN);
ctx              5061 arch/ia64/kernel/perfmon.c pfm_ovfl_notify_user(pfm_context_t *ctx, unsigned long ovfl_pmds)
ctx              5065 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_fl_no_msg == 0) {
ctx              5066 arch/ia64/kernel/perfmon.c 		msg = pfm_get_new_msg(ctx);
ctx              5073 arch/ia64/kernel/perfmon.c 		msg->pfm_ovfl_msg.msg_ctx_fd       = ctx->ctx_fd;
ctx              5084 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_no_msg,
ctx              5085 arch/ia64/kernel/perfmon.c 		ctx->ctx_fd,
ctx              5088 arch/ia64/kernel/perfmon.c 	return pfm_notify_user(ctx, msg);
ctx              5092 arch/ia64/kernel/perfmon.c pfm_end_notify_user(pfm_context_t *ctx)
ctx              5096 arch/ia64/kernel/perfmon.c 	msg = pfm_get_new_msg(ctx);
ctx              5105 arch/ia64/kernel/perfmon.c 	msg->pfm_end_msg.msg_ctx_fd  = ctx->ctx_fd;
ctx              5110 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_no_msg,
ctx              5111 arch/ia64/kernel/perfmon.c 		ctx->ctx_fd));
ctx              5113 arch/ia64/kernel/perfmon.c 	return pfm_notify_user(ctx, msg);
ctx              5120 arch/ia64/kernel/perfmon.c static void pfm_overflow_handler(struct task_struct *task, pfm_context_t *ctx,
ctx              5132 arch/ia64/kernel/perfmon.c 	if (unlikely(ctx->ctx_state == PFM_CTX_ZOMBIE)) goto stop_monitoring;
ctx              5142 arch/ia64/kernel/perfmon.c 	has_smpl = CTX_HAS_SMPL(ctx);
ctx              5149 arch/ia64/kernel/perfmon.c 			CTX_OVFL_NOBLOCK(ctx) ? "nonblocking" : "blocking",
ctx              5150 arch/ia64/kernel/perfmon.c 			ctx->ctx_used_pmds[0]));
ctx              5168 arch/ia64/kernel/perfmon.c 		old_val              = new_val = ctx->ctx_pmds[i].val;
ctx              5170 arch/ia64/kernel/perfmon.c 		ctx->ctx_pmds[i].val = new_val;
ctx              5177 arch/ia64/kernel/perfmon.c 			if (PMC_OVFL_NOTIFY(ctx, i)) ovfl_notify |= 1UL << i;
ctx              5211 arch/ia64/kernel/perfmon.c 		ovfl_arg = &ctx->ctx_ovfl_arg;
ctx              5213 arch/ia64/kernel/perfmon.c 		prefetch(ctx->ctx_smpl_hdr);
ctx              5225 arch/ia64/kernel/perfmon.c 			ovfl_arg->smpl_pmds[0]  = smpl_pmds = ctx->ctx_pmds[i].smpl_pmds[0];
ctx              5227 arch/ia64/kernel/perfmon.c 			ovfl_arg->pmd_value      = ctx->ctx_pmds[i].val;
ctx              5228 arch/ia64/kernel/perfmon.c 			ovfl_arg->pmd_last_reset = ctx->ctx_pmds[i].lval;
ctx              5229 arch/ia64/kernel/perfmon.c 			ovfl_arg->pmd_eventid    = ctx->ctx_pmds[i].eventid;
ctx              5238 arch/ia64/kernel/perfmon.c 					ovfl_arg->smpl_pmds_values[k++] = PMD_IS_COUNTING(j) ?  pfm_read_soft_counter(ctx, j) : ia64_get_pmd(j);
ctx              5250 arch/ia64/kernel/perfmon.c 			ret = (*ctx->ctx_buf_fmt->fmt_handler)(task, ctx->ctx_smpl_hdr, ovfl_arg, regs, tstamp);
ctx              5301 arch/ia64/kernel/perfmon.c 		pfm_reset_regs(ctx, &bm, PFM_PMD_SHORT_RESET);
ctx              5308 arch/ia64/kernel/perfmon.c 		ctx->ctx_ovfl_regs[0] = ovfl_pmds;
ctx              5313 arch/ia64/kernel/perfmon.c 		if (CTX_OVFL_NOBLOCK(ctx) == 0 && ovfl_ctrl.bits.block_task) {
ctx              5315 arch/ia64/kernel/perfmon.c 			ctx->ctx_fl_trap_reason = PFM_TRAP_REASON_BLOCK;
ctx              5338 arch/ia64/kernel/perfmon.c 			ctx->ctx_fl_trap_reason,
ctx              5347 arch/ia64/kernel/perfmon.c 		ctx->ctx_state = PFM_CTX_MASKED;
ctx              5348 arch/ia64/kernel/perfmon.c 		ctx->ctx_fl_can_restart = 1;
ctx              5354 arch/ia64/kernel/perfmon.c 	if (must_notify) pfm_ovfl_notify_user(ctx, ovfl_notify);
ctx              5405 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              5419 arch/ia64/kernel/perfmon.c 	ctx  = GET_PMU_CTX();
ctx              5431 arch/ia64/kernel/perfmon.c 		if (!ctx) goto report_spurious1;
ctx              5433 arch/ia64/kernel/perfmon.c 		if (ctx->ctx_fl_system == 0 && (task->thread.flags & IA64_THREAD_PM_VALID) == 0) 
ctx              5436 arch/ia64/kernel/perfmon.c 		PROTECT_CTX_NOPRINT(ctx, flags);
ctx              5438 arch/ia64/kernel/perfmon.c 		pfm_overflow_handler(task, ctx, pmc0, regs);
ctx              5440 arch/ia64/kernel/perfmon.c 		UNPROTECT_CTX_NOPRINT(ctx, flags);
ctx              5717 arch/ia64/kernel/perfmon.c pfm_force_cleanup(pfm_context_t *ctx, struct pt_regs *regs)
ctx              5719 arch/ia64/kernel/perfmon.c 	struct task_struct *task = ctx->ctx_task;
ctx              5726 arch/ia64/kernel/perfmon.c 					task_pid_nr(ctx->ctx_task)));
ctx              5748 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              5753 arch/ia64/kernel/perfmon.c 	ctx = PFM_GET_CTX(task);
ctx              5754 arch/ia64/kernel/perfmon.c 	if (ctx == NULL) return;
ctx              5761 arch/ia64/kernel/perfmon.c 	flags = pfm_protect_ctx_ctxsw(ctx);
ctx              5763 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_state == PFM_CTX_ZOMBIE) {
ctx              5768 arch/ia64/kernel/perfmon.c 		pfm_force_cleanup(ctx, regs);
ctx              5770 arch/ia64/kernel/perfmon.c 		BUG_ON(ctx->ctx_smpl_hdr);
ctx              5772 arch/ia64/kernel/perfmon.c 		pfm_unprotect_ctx_ctxsw(ctx, flags);
ctx              5774 arch/ia64/kernel/perfmon.c 		pfm_context_free(ctx);
ctx              5798 arch/ia64/kernel/perfmon.c 	ctx->ctx_saved_psr_up = psr & IA64_PSR_UP;
ctx              5812 arch/ia64/kernel/perfmon.c 	pfm_save_pmds(ctx->th_pmds, ctx->ctx_used_pmds[0]);
ctx              5819 arch/ia64/kernel/perfmon.c 	ctx->th_pmcs[0] = ia64_get_pmc(0);
ctx              5824 arch/ia64/kernel/perfmon.c 	if (ctx->th_pmcs[0] & ~0x1UL) pfm_unfreeze_pmu();
ctx              5830 arch/ia64/kernel/perfmon.c 	pfm_unprotect_ctx_ctxsw(ctx, flags);
ctx              5837 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              5840 arch/ia64/kernel/perfmon.c 	ctx = PFM_GET_CTX(task);
ctx              5841 arch/ia64/kernel/perfmon.c 	if (ctx == NULL) return;
ctx              5862 arch/ia64/kernel/perfmon.c 	ctx->ctx_saved_psr_up = psr & IA64_PSR_UP;
ctx              5868 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              5875 arch/ia64/kernel/perfmon.c 	ctx = PFM_GET_CTX(task);
ctx              5886 arch/ia64/kernel/perfmon.c 	PROTECT_CTX(ctx,flags);
ctx              5900 arch/ia64/kernel/perfmon.c 	pfm_save_pmds(ctx->th_pmds, ctx->ctx_used_pmds[0]);
ctx              5907 arch/ia64/kernel/perfmon.c 	ctx->th_pmcs[0] = ia64_get_pmc(0);
ctx              5912 arch/ia64/kernel/perfmon.c 	if (ctx->th_pmcs[0] & ~0x1UL) pfm_unfreeze_pmu();
ctx              5919 arch/ia64/kernel/perfmon.c 	UNPROTECT_CTX(ctx,flags);
ctx              5930 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              5936 arch/ia64/kernel/perfmon.c 	ctx = PFM_GET_CTX(task);
ctx              5937 arch/ia64/kernel/perfmon.c 	if (unlikely(ctx == NULL)) return;
ctx              5951 arch/ia64/kernel/perfmon.c 	flags = pfm_protect_ctx_ctxsw(ctx);
ctx              5959 arch/ia64/kernel/perfmon.c 	if (unlikely(ctx->ctx_state == PFM_CTX_ZOMBIE)) {
ctx              5962 arch/ia64/kernel/perfmon.c 		BUG_ON(ctx->ctx_smpl_hdr);
ctx              5964 arch/ia64/kernel/perfmon.c 		pfm_force_cleanup(ctx, regs);
ctx              5966 arch/ia64/kernel/perfmon.c 		pfm_unprotect_ctx_ctxsw(ctx, flags);
ctx              5971 arch/ia64/kernel/perfmon.c 		pfm_context_free(ctx);
ctx              5980 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_fl_using_dbreg) {
ctx              5981 arch/ia64/kernel/perfmon.c 		pfm_restore_ibrs(ctx->ctx_ibrs, pmu_conf->num_ibrs);
ctx              5982 arch/ia64/kernel/perfmon.c 		pfm_restore_dbrs(ctx->ctx_dbrs, pmu_conf->num_dbrs);
ctx              5987 arch/ia64/kernel/perfmon.c 	psr_up = ctx->ctx_saved_psr_up;
ctx              5993 arch/ia64/kernel/perfmon.c 	if (GET_LAST_CPU(ctx) == smp_processor_id() && ctx->ctx_last_activation == GET_ACTIVATION()) {
ctx              5998 arch/ia64/kernel/perfmon.c 		pmc_mask = ctx->ctx_reload_pmcs[0];
ctx              5999 arch/ia64/kernel/perfmon.c 		pmd_mask = ctx->ctx_reload_pmds[0];
ctx              6008 arch/ia64/kernel/perfmon.c 		pmd_mask = pfm_sysctl.fastctxsw ?  ctx->ctx_used_pmds[0] : ctx->ctx_all_pmds[0];
ctx              6017 arch/ia64/kernel/perfmon.c 		pmc_mask = ctx->ctx_all_pmcs[0];
ctx              6026 arch/ia64/kernel/perfmon.c 	if (pmd_mask) pfm_restore_pmds(ctx->th_pmds, pmd_mask);
ctx              6027 arch/ia64/kernel/perfmon.c 	if (pmc_mask) pfm_restore_pmcs(ctx->th_pmcs, pmc_mask);
ctx              6033 arch/ia64/kernel/perfmon.c 	if (unlikely(PMC0_HAS_OVFL(ctx->th_pmcs[0]))) {
ctx              6038 arch/ia64/kernel/perfmon.c 		ia64_set_pmc(0, ctx->th_pmcs[0]);
ctx              6040 arch/ia64/kernel/perfmon.c 		ctx->th_pmcs[0] = 0UL;
ctx              6053 arch/ia64/kernel/perfmon.c 	ctx->ctx_reload_pmcs[0] = 0UL;
ctx              6054 arch/ia64/kernel/perfmon.c 	ctx->ctx_reload_pmds[0] = 0UL;
ctx              6056 arch/ia64/kernel/perfmon.c 	SET_LAST_CPU(ctx, smp_processor_id());
ctx              6065 arch/ia64/kernel/perfmon.c 	SET_ACTIVATION(ctx);
ctx              6070 arch/ia64/kernel/perfmon.c 	SET_PMU_OWNER(task, ctx);
ctx              6083 arch/ia64/kernel/perfmon.c 	pfm_unprotect_ctx_ctxsw(ctx, flags);
ctx              6093 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              6100 arch/ia64/kernel/perfmon.c 	ctx   = PFM_GET_CTX(task);
ctx              6114 arch/ia64/kernel/perfmon.c 	if (ctx->ctx_fl_using_dbreg) {
ctx              6115 arch/ia64/kernel/perfmon.c 		pfm_restore_ibrs(ctx->ctx_ibrs, pmu_conf->num_ibrs);
ctx              6116 arch/ia64/kernel/perfmon.c 		pfm_restore_dbrs(ctx->ctx_dbrs, pmu_conf->num_dbrs);
ctx              6122 arch/ia64/kernel/perfmon.c 	psr_up = ctx->ctx_saved_psr_up;
ctx              6152 arch/ia64/kernel/perfmon.c 	pmd_mask = pfm_sysctl.fastctxsw ?  ctx->ctx_used_pmds[0] : ctx->ctx_all_pmds[0];
ctx              6161 arch/ia64/kernel/perfmon.c 	pmc_mask = ctx->ctx_all_pmcs[0];
ctx              6163 arch/ia64/kernel/perfmon.c 	pfm_restore_pmds(ctx->th_pmds, pmd_mask);
ctx              6164 arch/ia64/kernel/perfmon.c 	pfm_restore_pmcs(ctx->th_pmcs, pmc_mask);
ctx              6170 arch/ia64/kernel/perfmon.c 	if (unlikely(PMC0_HAS_OVFL(ctx->th_pmcs[0]))) {
ctx              6175 arch/ia64/kernel/perfmon.c 		ia64_set_pmc(0, ctx->th_pmcs[0]);
ctx              6178 arch/ia64/kernel/perfmon.c 		ctx->th_pmcs[0] = 0UL;
ctx              6191 arch/ia64/kernel/perfmon.c 	SET_PMU_OWNER(task, ctx);
ctx              6207 arch/ia64/kernel/perfmon.c pfm_flush_pmds(struct task_struct *task, pfm_context_t *ctx)
ctx              6218 arch/ia64/kernel/perfmon.c 	is_self = ctx->ctx_task == task ? 1 : 0;
ctx              6227 arch/ia64/kernel/perfmon.c 	can_access_pmu = (GET_PMU_OWNER() == task) || (ctx->ctx_fl_system && ctx->ctx_cpu == smp_processor_id());
ctx              6253 arch/ia64/kernel/perfmon.c 		pmc0 = ctx->th_pmcs[0];
ctx              6257 arch/ia64/kernel/perfmon.c 		ctx->th_pmcs[0] = 0;
ctx              6266 arch/ia64/kernel/perfmon.c 	mask2 = ctx->ctx_used_pmds[0];
ctx              6278 arch/ia64/kernel/perfmon.c 		val = pmd_val = can_access_pmu ? ia64_get_pmd(i) : ctx->th_pmds[i];
ctx              6284 arch/ia64/kernel/perfmon.c 				ctx->ctx_pmds[i].val,
ctx              6290 arch/ia64/kernel/perfmon.c 			val = ctx->ctx_pmds[i].val + (val & ovfl_val);
ctx              6310 arch/ia64/kernel/perfmon.c 		if (is_self) ctx->th_pmds[i] = pmd_val;
ctx              6312 arch/ia64/kernel/perfmon.c 		ctx->ctx_pmds[i].val = val;
ctx              6610 arch/ia64/kernel/perfmon.c 	pfm_context_t *ctx;
ctx              6634 arch/ia64/kernel/perfmon.c 	ctx  = GET_PMU_CTX();
ctx              6636 arch/ia64/kernel/perfmon.c 	printk("->CPU%d owner [%d] ctx=%p\n", this_cpu, task ? task_pid_nr(task) : -1, ctx);
ctx              6655 arch/ia64/kernel/perfmon.c 		printk("->CPU%d pmc[%d]=0x%lx thread_pmc[%d]=0x%lx\n", this_cpu, i, ia64_get_pmc(i), i, ctx->th_pmcs[i]);
ctx              6660 arch/ia64/kernel/perfmon.c 		printk("->CPU%d pmd[%d]=0x%lx thread_pmd[%d]=0x%lx\n", this_cpu, i, ia64_get_pmd(i), i, ctx->th_pmds[i]);
ctx              6663 arch/ia64/kernel/perfmon.c 	if (ctx) {
ctx              6666 arch/ia64/kernel/perfmon.c 				ctx->ctx_state,
ctx              6667 arch/ia64/kernel/perfmon.c 				ctx->ctx_smpl_vaddr,
ctx              6668 arch/ia64/kernel/perfmon.c 				ctx->ctx_smpl_hdr,
ctx              6669 arch/ia64/kernel/perfmon.c 				ctx->ctx_msgq_head,
ctx              6670 arch/ia64/kernel/perfmon.c 				ctx->ctx_msgq_tail,
ctx              6671 arch/ia64/kernel/perfmon.c 				ctx->ctx_saved_psr_up);
ctx                 9 arch/ia64/kernel/perfmon_itanium.h static int pfm_ita_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs);
ctx                52 arch/ia64/kernel/perfmon_itanium.h pfm_ita_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs)
ctx                58 arch/ia64/kernel/perfmon_itanium.h 	if (ctx == NULL) return -EINVAL;
ctx                60 arch/ia64/kernel/perfmon_itanium.h 	is_loaded = ctx->ctx_state == PFM_CTX_LOADED || ctx->ctx_state == PFM_CTX_MASKED;
ctx                66 arch/ia64/kernel/perfmon_itanium.h 	if (cnum == 13 && is_loaded && ((*val & 0x1) == 0UL) && ctx->ctx_fl_using_dbreg == 0) {
ctx                77 arch/ia64/kernel/perfmon_itanium.h 		ret = pfm_write_ibr_dbr(1, ctx, NULL, 0, regs);
ctx                85 arch/ia64/kernel/perfmon_itanium.h 	if (cnum == 11 && is_loaded && ((*val >> 28)& 0x1) == 0 && ctx->ctx_fl_using_dbreg == 0) {
ctx                96 arch/ia64/kernel/perfmon_itanium.h 		ret = pfm_write_ibr_dbr(0, ctx, NULL, 0, regs);
ctx                 9 arch/ia64/kernel/perfmon_mckinley.h static int pfm_mck_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs);
ctx                78 arch/ia64/kernel/perfmon_mckinley.h pfm_mck_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs)
ctx                88 arch/ia64/kernel/perfmon_mckinley.h 	if (ctx == NULL) return -EINVAL;
ctx                90 arch/ia64/kernel/perfmon_mckinley.h 	is_loaded = ctx->ctx_state == PFM_CTX_LOADED || ctx->ctx_state == PFM_CTX_MASKED;
ctx               102 arch/ia64/kernel/perfmon_mckinley.h 	DPRINT(("cnum=%u val=0x%lx, using_dbreg=%d loaded=%d\n", cnum, *val, ctx->ctx_fl_using_dbreg, is_loaded));
ctx               105 arch/ia64/kernel/perfmon_mckinley.h 	    && (*val & 0x1e00000000000UL) && (*val & 0x18181818UL) != 0x18181818UL && ctx->ctx_fl_using_dbreg == 0) {
ctx               116 arch/ia64/kernel/perfmon_mckinley.h 		ret = pfm_write_ibr_dbr(PFM_DATA_RR, ctx, NULL, 0, regs);
ctx               123 arch/ia64/kernel/perfmon_mckinley.h 	if (cnum == 14 && is_loaded && ((*val & 0x2222UL) != 0x2222UL) && ctx->ctx_fl_using_dbreg == 0) {
ctx               134 arch/ia64/kernel/perfmon_mckinley.h 		ret = pfm_write_ibr_dbr(PFM_CODE_RR, ctx, NULL, 0, regs);
ctx               143 arch/ia64/kernel/perfmon_mckinley.h 			 val13 = ctx->ctx_pmcs[13];
ctx               144 arch/ia64/kernel/perfmon_mckinley.h 			 val14 = ctx->ctx_pmcs[14];
ctx               147 arch/ia64/kernel/perfmon_mckinley.h 		case 13: val8  = ctx->ctx_pmcs[8];
ctx               149 arch/ia64/kernel/perfmon_mckinley.h 			 val14 = ctx->ctx_pmcs[14];
ctx               152 arch/ia64/kernel/perfmon_mckinley.h 		case 14: val8  = ctx->ctx_pmcs[8];
ctx               153 arch/ia64/kernel/perfmon_mckinley.h 			 val13 = ctx->ctx_pmcs[13];
ctx                 9 arch/ia64/kernel/perfmon_montecito.h static int pfm_mont_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs);
ctx               156 arch/ia64/kernel/perfmon_montecito.h pfm_mont_pmc_check(struct task_struct *task, pfm_context_t *ctx, unsigned int cnum, unsigned long *val, struct pt_regs *regs)
ctx               170 arch/ia64/kernel/perfmon_montecito.h 	if (ctx == NULL) return -EINVAL;
ctx               172 arch/ia64/kernel/perfmon_montecito.h 	is_loaded = ctx->ctx_state == PFM_CTX_LOADED || ctx->ctx_state == PFM_CTX_MASKED;
ctx               186 arch/ia64/kernel/perfmon_montecito.h 	DPRINT(("cnum=%u val=0x%lx, using_dbreg=%d loaded=%d\n", cnum, tmpval, ctx->ctx_fl_using_dbreg, is_loaded));
ctx               189 arch/ia64/kernel/perfmon_montecito.h 	    && (tmpval & 0x1e00000000000UL) && (tmpval & 0x18181818UL) != 0x18181818UL && ctx->ctx_fl_using_dbreg == 0) {
ctx               200 arch/ia64/kernel/perfmon_montecito.h 		ret = pfm_write_ibr_dbr(PFM_DATA_RR, ctx, NULL, 0, regs);
ctx               209 arch/ia64/kernel/perfmon_montecito.h 	if (cnum == 38 && is_loaded && ((tmpval & 0x492UL) != 0x492UL) && ctx->ctx_fl_using_dbreg == 0) {
ctx               220 arch/ia64/kernel/perfmon_montecito.h 		ret = pfm_write_ibr_dbr(PFM_CODE_RR, ctx, NULL, 0, regs);
ctx               226 arch/ia64/kernel/perfmon_montecito.h 			  val38 = ctx->ctx_pmcs[38];
ctx               227 arch/ia64/kernel/perfmon_montecito.h 			  val41 = ctx->ctx_pmcs[41];
ctx               231 arch/ia64/kernel/perfmon_montecito.h 			  val32 = ctx->ctx_pmcs[32];
ctx               232 arch/ia64/kernel/perfmon_montecito.h 			  val41 = ctx->ctx_pmcs[41];
ctx               236 arch/ia64/kernel/perfmon_montecito.h 			  val32 = ctx->ctx_pmcs[32];
ctx               237 arch/ia64/kernel/perfmon_montecito.h 			  val38 = ctx->ctx_pmcs[38];
ctx                34 arch/m68k/include/asm/mmu_context.h 	mm_context_t ctx;
ctx                42 arch/m68k/include/asm/mmu_context.h 	ctx = next_mmu_context;
ctx                43 arch/m68k/include/asm/mmu_context.h 	while (test_and_set_bit(ctx, context_map)) {
ctx                44 arch/m68k/include/asm/mmu_context.h 		ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx);
ctx                45 arch/m68k/include/asm/mmu_context.h 		if (ctx > LAST_CONTEXT)
ctx                46 arch/m68k/include/asm/mmu_context.h 			ctx = 0;
ctx                48 arch/m68k/include/asm/mmu_context.h 	next_mmu_context = (ctx + 1) & LAST_CONTEXT;
ctx                49 arch/m68k/include/asm/mmu_context.h 	mm->context = ctx;
ctx                50 arch/m68k/include/asm/mmu_context.h 	context_mm[ctx] = mm;
ctx               121 arch/m68k/include/asm/tlbflush.h        unsigned char ctx, oldctx;
ctx               125 arch/m68k/include/asm/tlbflush.h 	       for(ctx = 0; ctx < 8; ctx++) {
ctx               126 arch/m68k/include/asm/tlbflush.h 		       sun3_put_context(ctx);
ctx                29 arch/microblaze/include/asm/mmu_context_mm.h # define CTX_TO_VSID(ctx, va)	(((ctx) * (897 * 16) + ((va) >> 28) * 0x111) \
ctx                86 arch/microblaze/include/asm/mmu_context_mm.h 	mm_context_t ctx;
ctx                92 arch/microblaze/include/asm/mmu_context_mm.h 	ctx = next_mmu_context;
ctx                93 arch/microblaze/include/asm/mmu_context_mm.h 	while (test_and_set_bit(ctx, context_map)) {
ctx                94 arch/microblaze/include/asm/mmu_context_mm.h 		ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx);
ctx                95 arch/microblaze/include/asm/mmu_context_mm.h 		if (ctx > LAST_CONTEXT)
ctx                96 arch/microblaze/include/asm/mmu_context_mm.h 			ctx = 0;
ctx                98 arch/microblaze/include/asm/mmu_context_mm.h 	next_mmu_context = (ctx + 1) & LAST_CONTEXT;
ctx                99 arch/microblaze/include/asm/mmu_context_mm.h 	mm->context = ctx;
ctx               100 arch/microblaze/include/asm/mmu_context_mm.h 	context_mm[ctx] = mm;
ctx                38 arch/mips/cavium-octeon/crypto/octeon-md5.c static void octeon_md5_store_hash(struct md5_state *ctx)
ctx                40 arch/mips/cavium-octeon/crypto/octeon-md5.c 	u64 *hash = (u64 *)ctx->hash;
ctx                46 arch/mips/cavium-octeon/crypto/octeon-md5.c static void octeon_md5_read_hash(struct md5_state *ctx)
ctx                48 arch/mips/cavium-octeon/crypto/octeon-md5.c 	u64 *hash = (u64 *)ctx->hash;
ctx               158 arch/mips/cavium-octeon/crypto/octeon-md5.c 	struct md5_state *ctx = shash_desc_ctx(desc);
ctx               160 arch/mips/cavium-octeon/crypto/octeon-md5.c 	memcpy(out, ctx, sizeof(*ctx));
ctx               166 arch/mips/cavium-octeon/crypto/octeon-md5.c 	struct md5_state *ctx = shash_desc_ctx(desc);
ctx               168 arch/mips/cavium-octeon/crypto/octeon-md5.c 	memcpy(ctx, in, sizeof(*ctx));
ctx               163 arch/mips/crypto/crc32-mips.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               165 arch/mips/crypto/crc32-mips.c 	ctx->crc = mctx->key;
ctx               191 arch/mips/crypto/crc32-mips.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               193 arch/mips/crypto/crc32-mips.c 	ctx->crc = crc32_mips_le_hw(ctx->crc, data, length);
ctx               200 arch/mips/crypto/crc32-mips.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               202 arch/mips/crypto/crc32-mips.c 	ctx->crc = crc32c_mips_le_hw(ctx->crc, data, length);
ctx               208 arch/mips/crypto/crc32-mips.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               210 arch/mips/crypto/crc32-mips.c 	put_unaligned_le32(ctx->crc, out);
ctx               216 arch/mips/crypto/crc32-mips.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               218 arch/mips/crypto/crc32-mips.c 	put_unaligned_le32(~ctx->crc, out);
ctx               237 arch/mips/crypto/crc32-mips.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               239 arch/mips/crypto/crc32-mips.c 	return __chksum_finup(ctx->crc, data, len, out);
ctx               245 arch/mips/crypto/crc32-mips.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               247 arch/mips/crypto/crc32-mips.c 	return __chksumc_finup(ctx->crc, data, len, out);
ctx               169 arch/mips/include/asm/fpu_emulator.h 				    struct mips_fpu_struct *ctx, int has_fpu,
ctx               115 arch/mips/include/asm/mmu_context.h 				   struct mm_struct *mm, u64 ctx)
ctx               118 arch/mips/include/asm/mmu_context.h 		atomic64_set(&mm->context.mmid, ctx);
ctx               120 arch/mips/include/asm/mmu_context.h 		mm->context.asid[cpu] = ctx;
ctx               197 arch/mips/include/asm/mmu_context.h 	u64 ctx;
ctx               202 arch/mips/include/asm/mmu_context.h 	ctx = cpu_context(cpu, mm);
ctx               204 arch/mips/include/asm/mmu_context.h 	if (!ctx) {
ctx               216 arch/mips/include/asm/mmu_context.h 		write_c0_memorymapid(ctx & cpu_asid_mask(&cpu_data[cpu]));
ctx               801 arch/mips/math-emu/cp1emu.c 		(si) = (int)get_fpr32(&ctx->fpr[x], 0);			\
ctx               803 arch/mips/math-emu/cp1emu.c 		(si) = (int)get_fpr32(&ctx->fpr[(x) & ~1], (x) & 1);	\
ctx               810 arch/mips/math-emu/cp1emu.c 		set_fpr32(&ctx->fpr[x], 0, si);				\
ctx               811 arch/mips/math-emu/cp1emu.c 		for (i = 1; i < ARRAY_SIZE(ctx->fpr[x].val32); i++)	\
ctx               812 arch/mips/math-emu/cp1emu.c 			set_fpr32(&ctx->fpr[x], i, 0);			\
ctx               814 arch/mips/math-emu/cp1emu.c 		set_fpr32(&ctx->fpr[(x) & ~1], (x) & 1, si);		\
ctx               818 arch/mips/math-emu/cp1emu.c #define SIFROMHREG(si, x)	((si) = (int)get_fpr32(&ctx->fpr[x], 1))
ctx               823 arch/mips/math-emu/cp1emu.c 	set_fpr32(&ctx->fpr[x], 1, si);					\
ctx               824 arch/mips/math-emu/cp1emu.c 	for (i = 2; i < ARRAY_SIZE(ctx->fpr[x].val32); i++)		\
ctx               825 arch/mips/math-emu/cp1emu.c 		set_fpr32(&ctx->fpr[x], i, 0);				\
ctx               829 arch/mips/math-emu/cp1emu.c 	((di) = get_fpr64(&ctx->fpr[(x) & ~(cop1_64bit(xcp) ^ 1)], 0))
ctx               835 arch/mips/math-emu/cp1emu.c 	set_fpr64(&ctx->fpr[fpr], 0, di);				\
ctx               836 arch/mips/math-emu/cp1emu.c 	for (i = 1; i < ARRAY_SIZE(ctx->fpr[x].val64); i++)		\
ctx               837 arch/mips/math-emu/cp1emu.c 		set_fpr64(&ctx->fpr[fpr], i, 0);			\
ctx               848 arch/mips/math-emu/cp1emu.c static inline void cop1_cfc(struct pt_regs *xcp, struct mips_fpu_struct *ctx,
ctx               851 arch/mips/math-emu/cp1emu.c 	u32 fcr31 = ctx->fcr31;
ctx               905 arch/mips/math-emu/cp1emu.c static inline void cop1_ctc(struct pt_regs *xcp, struct mips_fpu_struct *ctx,
ctx               908 arch/mips/math-emu/cp1emu.c 	u32 fcr31 = ctx->fcr31;
ctx               963 arch/mips/math-emu/cp1emu.c 	ctx->fcr31 = fcr31;
ctx               971 arch/mips/math-emu/cp1emu.c static int cop1Emulate(struct pt_regs *xcp, struct mips_fpu_struct *ctx,
ctx              1173 arch/mips/math-emu/cp1emu.c 			cop1_cfc(xcp, ctx, ir);
ctx              1178 arch/mips/math-emu/cp1emu.c 			cop1_ctc(xcp, ctx, ir);
ctx              1179 arch/mips/math-emu/cp1emu.c 			if ((ctx->fcr31 >> 5) & ctx->fcr31 & FPU_CSR_ALL_E) {
ctx              1213 arch/mips/math-emu/cp1emu.c 			cond = ctx->fcr31 & cbit;
ctx              1353 arch/mips/math-emu/cp1emu.c 			sig = fpu_emu(xcp, ctx, ir);
ctx              1363 arch/mips/math-emu/cp1emu.c 		sig = fpux_emu(xcp, ctx, ir, fault_addr);
ctx              1375 arch/mips/math-emu/cp1emu.c 		if (((ctx->fcr31 & cond) != 0) == ((MIPSInst_RT(ir) & 1) != 0))
ctx              1463 arch/mips/math-emu/cp1emu.c static int fpux_emu(struct pt_regs *xcp, struct mips_fpu_struct *ctx,
ctx              1554 arch/mips/math-emu/cp1emu.c 			ctx->fcr31 = (ctx->fcr31 & ~FPU_CSR_ALL_X) | rcsr;
ctx              1555 arch/mips/math-emu/cp1emu.c 			if ((ctx->fcr31 >> 5) & ctx->fcr31 & FPU_CSR_ALL_E) {
ctx              1658 arch/mips/math-emu/cp1emu.c static int fpu_emu(struct pt_regs *xcp, struct mips_fpu_struct *ctx,
ctx              1737 arch/mips/math-emu/cp1emu.c 			if (((ctx->fcr31 & cond) != 0) !=
ctx              2110 arch/mips/math-emu/cp1emu.c 			if (((ctx->fcr31 & cond) != 0) !=
ctx              2740 arch/mips/math-emu/cp1emu.c 	ctx->fcr31 = (ctx->fcr31 & ~FPU_CSR_ALL_X) | rcsr;
ctx              2741 arch/mips/math-emu/cp1emu.c 	if ((ctx->fcr31 >> 5) & ctx->fcr31 & FPU_CSR_ALL_E) {
ctx              2757 arch/mips/math-emu/cp1emu.c 			ctx->fcr31 |= cbit;
ctx              2759 arch/mips/math-emu/cp1emu.c 			ctx->fcr31 &= ~cbit;
ctx              2813 arch/mips/math-emu/cp1emu.c int fpu_emulator_cop1Handler(struct pt_regs *xcp, struct mips_fpu_struct *ctx,
ctx              2898 arch/mips/math-emu/cp1emu.c 			sig = cop1Emulate(xcp, ctx, dec_insn, fault_addr);
ctx               182 arch/mips/mm/context.c 	u64 ctx, old_active_mmid;
ctx               209 arch/mips/mm/context.c 	ctx = cpu_context(cpu, mm);
ctx               212 arch/mips/mm/context.c 	    !asid_versions_eq(cpu, ctx, atomic64_read(&mmid_version)) ||
ctx               213 arch/mips/mm/context.c 	    !cmpxchg_relaxed(&cpu_data[cpu].asid_cache, old_active_mmid, ctx)) {
ctx               216 arch/mips/mm/context.c 		ctx = cpu_context(cpu, mm);
ctx               217 arch/mips/mm/context.c 		if (!asid_versions_eq(cpu, ctx, atomic64_read(&mmid_version)))
ctx               218 arch/mips/mm/context.c 			ctx = get_new_mmid(mm);
ctx               220 arch/mips/mm/context.c 		WRITE_ONCE(cpu_data[cpu].asid_cache, ctx);
ctx               236 arch/mips/mm/context.c 	write_c0_memorymapid(ctx & cpu_asid_mask(&boot_cpu_data));
ctx               996 arch/mips/mm/tlbex.c static void build_adjust_context(u32 **p, unsigned int ctx)
ctx              1018 arch/mips/mm/tlbex.c 		UASM_i_SRL(p, ctx, ctx, shift);
ctx              1019 arch/mips/mm/tlbex.c 	uasm_i_andi(p, ctx, ctx, mask);
ctx               119 arch/mips/net/ebpf_jit.c static enum reg_val_type get_reg_val_type(const struct jit_ctx *ctx,
ctx               122 arch/mips/net/ebpf_jit.c 	return (ctx->reg_val_types[index] >> (reg * 3)) & 7;
ctx               126 arch/mips/net/ebpf_jit.c #define emit_instr_long(ctx, func64, func32, ...)		\
ctx               128 arch/mips/net/ebpf_jit.c 	if ((ctx)->target != NULL) {				\
ctx               129 arch/mips/net/ebpf_jit.c 		u32 *p = &(ctx)->target[ctx->idx];		\
ctx               135 arch/mips/net/ebpf_jit.c 	(ctx)->idx++;						\
ctx               138 arch/mips/net/ebpf_jit.c #define emit_instr(ctx, func, ...)				\
ctx               139 arch/mips/net/ebpf_jit.c 	emit_instr_long(ctx, func, func, ##__VA_ARGS__)
ctx               141 arch/mips/net/ebpf_jit.c static unsigned int j_target(struct jit_ctx *ctx, int target_idx)
ctx               146 arch/mips/net/ebpf_jit.c 	if (!ctx->target)
ctx               149 arch/mips/net/ebpf_jit.c 	base_va = (unsigned long)ctx->target;
ctx               150 arch/mips/net/ebpf_jit.c 	target_va = base_va + (ctx->offsets[target_idx] & ~OFFSETS_B_CONV);
ctx               159 arch/mips/net/ebpf_jit.c static u32 b_imm(unsigned int tgt, struct jit_ctx *ctx)
ctx               161 arch/mips/net/ebpf_jit.c 	if (!ctx->gen_b_offsets)
ctx               176 arch/mips/net/ebpf_jit.c 	return (ctx->offsets[tgt] & ~OFFSETS_B_CONV) -
ctx               177 arch/mips/net/ebpf_jit.c 		(ctx->idx * 4) - 4;
ctx               193 arch/mips/net/ebpf_jit.c static int ebpf_to_mips_reg(struct jit_ctx *ctx,
ctx               214 arch/mips/net/ebpf_jit.c 		ctx->flags |= EBPF_SAVE_S0;
ctx               217 arch/mips/net/ebpf_jit.c 		ctx->flags |= EBPF_SAVE_S1;
ctx               220 arch/mips/net/ebpf_jit.c 		ctx->flags |= EBPF_SAVE_S2;
ctx               223 arch/mips/net/ebpf_jit.c 		ctx->flags |= EBPF_SAVE_S3;
ctx               228 arch/mips/net/ebpf_jit.c 		ctx->flags |= EBPF_SEEN_FP;
ctx               270 arch/mips/net/ebpf_jit.c static int gen_int_prologue(struct jit_ctx *ctx)
ctx               276 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_RA)
ctx               282 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S0)
ctx               284 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S1)
ctx               286 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S2)
ctx               288 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S3)
ctx               290 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S4)
ctx               294 arch/mips/net/ebpf_jit.c 	locals_size = (ctx->flags & EBPF_SEEN_FP) ? MAX_BPF_STACK : 0;
ctx               298 arch/mips/net/ebpf_jit.c 	ctx->stack_size = stack_adjust;
ctx               305 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, addiu, MIPS_R_V1, MIPS_R_ZERO, MAX_TAIL_CALL_CNT);
ctx               307 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, daddiu, addiu,
ctx               314 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_RA) {
ctx               315 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, sd, sw,
ctx               319 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S0) {
ctx               320 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, sd, sw,
ctx               324 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S1) {
ctx               325 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, sd, sw,
ctx               329 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S2) {
ctx               330 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, sd, sw,
ctx               334 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S3) {
ctx               335 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, sd, sw,
ctx               339 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S4) {
ctx               340 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, sd, sw,
ctx               345 arch/mips/net/ebpf_jit.c 	if ((ctx->flags & EBPF_SEEN_TC) && !(ctx->flags & EBPF_TCC_IN_V1))
ctx               346 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, daddu, addu,
ctx               352 arch/mips/net/ebpf_jit.c static int build_int_epilogue(struct jit_ctx *ctx, int dest_reg)
ctx               354 arch/mips/net/ebpf_jit.c 	const struct bpf_prog *prog = ctx->skf;
ctx               355 arch/mips/net/ebpf_jit.c 	int stack_adjust = ctx->stack_size;
ctx               362 arch/mips/net/ebpf_jit.c 		td = get_reg_val_type(ctx, prog->len, BPF_REG_0);
ctx               364 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sll, r0, r0, 0);
ctx               367 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_RA) {
ctx               368 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, ld, lw,
ctx               372 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S0) {
ctx               373 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, ld, lw,
ctx               377 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S1) {
ctx               378 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, ld, lw,
ctx               382 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S2) {
ctx               383 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, ld, lw,
ctx               387 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S3) {
ctx               388 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, ld, lw,
ctx               392 arch/mips/net/ebpf_jit.c 	if (ctx->flags & EBPF_SAVE_S4) {
ctx               393 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, ld, lw,
ctx               397 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, jr, dest_reg);
ctx               400 arch/mips/net/ebpf_jit.c 		emit_instr_long(ctx, daddiu, addiu,
ctx               403 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, nop);
ctx               409 arch/mips/net/ebpf_jit.c 			   struct jit_ctx *ctx)
ctx               412 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, addiu, reg, MIPS_R_ZERO, insn->imm);
ctx               417 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, lui, reg, upper >> 16);
ctx               418 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, addiu, reg, reg, lower);
ctx               422 arch/mips/net/ebpf_jit.c static int gen_imm_insn(const struct bpf_insn *insn, struct jit_ctx *ctx,
ctx               426 arch/mips/net/ebpf_jit.c 	int dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx               464 arch/mips/net/ebpf_jit.c 	    get_reg_val_type(ctx, idx, insn->dst_reg) == REG_32BIT)
ctx               465 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
ctx               470 arch/mips/net/ebpf_jit.c 	    get_reg_val_type(ctx, idx, insn->dst_reg) != REG_32BIT)
ctx               471 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, sll, dst, dst, 0);
ctx               477 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, daddiu, dst, MIPS_R_ZERO, insn->imm);
ctx               481 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, andi, dst, dst, insn->imm);
ctx               485 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, ori, dst, dst, insn->imm);
ctx               489 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, xori, dst, dst, insn->imm);
ctx               492 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, daddiu, dst, dst, insn->imm);
ctx               495 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, daddiu, dst, dst, -insn->imm);
ctx               498 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dsrl_safe, dst, dst, insn->imm & 0x3f);
ctx               501 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, srl, dst, dst, insn->imm & 0x1f);
ctx               504 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dsll_safe, dst, dst, insn->imm & 0x3f);
ctx               507 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sll, dst, dst, insn->imm & 0x1f);
ctx               510 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dsra_safe, dst, dst, insn->imm & 0x3f);
ctx               513 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sra, dst, dst, insn->imm & 0x1f);
ctx               516 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, addiu, dst, MIPS_R_ZERO, insn->imm);
ctx               519 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, addiu, dst, dst, insn->imm);
ctx               522 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, addiu, dst, dst, -insn->imm);
ctx               530 arch/mips/net/ebpf_jit.c 			gen_imm_to_reg(insn, dst, ctx);
ctx               532 arch/mips/net/ebpf_jit.c 			gen_imm_to_reg(insn, MIPS_R_AT, ctx);
ctx               536 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, and, dst, dst, MIPS_R_AT);
ctx               540 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, or, dst, dst, MIPS_R_AT);
ctx               544 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, xor, dst, dst, MIPS_R_AT);
ctx               547 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, daddu, dst, dst, MIPS_R_AT);
ctx               550 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, dsubu, dst, dst, MIPS_R_AT);
ctx               553 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, addu, dst, dst, MIPS_R_AT);
ctx               556 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, subu, dst, dst, MIPS_R_AT);
ctx               567 arch/mips/net/ebpf_jit.c static void emit_const_to_reg(struct jit_ctx *ctx, int dst, u64 value)
ctx               570 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, daddiu, dst, MIPS_R_ZERO, (int)value);
ctx               573 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, lui, dst, (s32)(s16)(value >> 16));
ctx               574 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, ori, dst, dst, (unsigned int)(value & 0xffff));
ctx               584 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, dsll_safe, dst, dst, needed_shift);
ctx               589 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, lui, dst, (s32)(s16)part);
ctx               592 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, ori, dst,
ctx               604 arch/mips/net/ebpf_jit.c static int emit_bpf_tail_call(struct jit_ctx *ctx, int this_idx)
ctx               609 arch/mips/net/ebpf_jit.c 	ctx->flags |= EBPF_SEEN_TC;
ctx               615 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, lwu, MIPS_R_T5, off, MIPS_R_A1);
ctx               616 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, sltu, MIPS_R_AT, MIPS_R_T5, MIPS_R_A2);
ctx               617 arch/mips/net/ebpf_jit.c 	b_off = b_imm(this_idx + 1, ctx);
ctx               618 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, bne, MIPS_R_AT, MIPS_R_ZERO, b_off);
ctx               624 arch/mips/net/ebpf_jit.c 	tcc_reg = (ctx->flags & EBPF_TCC_IN_V1) ? MIPS_R_V1 : MIPS_R_S4;
ctx               625 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, daddiu, MIPS_R_T5, tcc_reg, -1);
ctx               626 arch/mips/net/ebpf_jit.c 	b_off = b_imm(this_idx + 1, ctx);
ctx               627 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, bltz, tcc_reg, b_off);
ctx               634 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, dsll, MIPS_R_T8, MIPS_R_A2, 3);
ctx               635 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, daddu, MIPS_R_T8, MIPS_R_T8, MIPS_R_A1);
ctx               637 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, ld, MIPS_R_AT, off, MIPS_R_T8);
ctx               638 arch/mips/net/ebpf_jit.c 	b_off = b_imm(this_idx + 1, ctx);
ctx               639 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, beq, MIPS_R_AT, MIPS_R_ZERO, b_off);
ctx               641 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, nop);
ctx               645 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, ld, MIPS_R_T9, off, MIPS_R_AT);
ctx               647 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, daddu, MIPS_R_V1, MIPS_R_T5, MIPS_R_ZERO);
ctx               649 arch/mips/net/ebpf_jit.c 	emit_instr(ctx, daddiu, MIPS_R_T9, MIPS_R_T9, 4);
ctx               650 arch/mips/net/ebpf_jit.c 	return build_int_epilogue(ctx, MIPS_R_T9);
ctx               659 arch/mips/net/ebpf_jit.c static int build_one_insn(const struct bpf_insn *insn, struct jit_ctx *ctx,
ctx               692 arch/mips/net/ebpf_jit.c 		r = gen_imm_insn(insn, ctx, this_idx);
ctx               697 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx               700 arch/mips/net/ebpf_jit.c 		if (get_reg_val_type(ctx, this_idx, insn->dst_reg) == REG_32BIT)
ctx               701 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
ctx               704 arch/mips/net/ebpf_jit.c 		gen_imm_to_reg(insn, MIPS_R_AT, ctx);
ctx               706 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dmulu, dst, dst, MIPS_R_AT);
ctx               708 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dmultu, MIPS_R_AT, dst);
ctx               709 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, mflo, dst);
ctx               713 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx               716 arch/mips/net/ebpf_jit.c 		if (get_reg_val_type(ctx, this_idx, insn->dst_reg) == REG_32BIT)
ctx               717 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
ctx               718 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, dsubu, dst, MIPS_R_ZERO, dst);
ctx               721 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx               724 arch/mips/net/ebpf_jit.c 		td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
ctx               727 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sll, dst, dst, 0);
ctx               731 arch/mips/net/ebpf_jit.c 		gen_imm_to_reg(insn, MIPS_R_AT, ctx);
ctx               733 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, mulu, dst, dst, MIPS_R_AT);
ctx               735 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, multu, dst, MIPS_R_AT);
ctx               736 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, mflo, dst);
ctx               740 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx               743 arch/mips/net/ebpf_jit.c 		td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
ctx               746 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sll, dst, dst, 0);
ctx               748 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, subu, dst, MIPS_R_ZERO, dst);
ctx               754 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx               757 arch/mips/net/ebpf_jit.c 		td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
ctx               760 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sll, dst, dst, 0);
ctx               764 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, addu, dst, MIPS_R_ZERO, MIPS_R_ZERO);
ctx               767 arch/mips/net/ebpf_jit.c 		gen_imm_to_reg(insn, MIPS_R_AT, ctx);
ctx               770 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, divu_r6, dst, dst, MIPS_R_AT);
ctx               772 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, modu, dst, dst, MIPS_R_AT);
ctx               775 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, divu, dst, MIPS_R_AT);
ctx               777 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, mflo, dst);
ctx               779 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, mfhi, dst);
ctx               785 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx               788 arch/mips/net/ebpf_jit.c 		if (get_reg_val_type(ctx, this_idx, insn->dst_reg) == REG_32BIT)
ctx               789 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
ctx               793 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, addu, dst, MIPS_R_ZERO, MIPS_R_ZERO);
ctx               796 arch/mips/net/ebpf_jit.c 		gen_imm_to_reg(insn, MIPS_R_AT, ctx);
ctx               799 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, ddivu_r6, dst, dst, MIPS_R_AT);
ctx               801 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, modu, dst, dst, MIPS_R_AT);
ctx               804 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, ddivu, dst, MIPS_R_AT);
ctx               806 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, mflo, dst);
ctx               808 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, mfhi, dst);
ctx               822 arch/mips/net/ebpf_jit.c 		src = ebpf_to_mips_reg(ctx, insn, src_reg);
ctx               823 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx               826 arch/mips/net/ebpf_jit.c 		if (get_reg_val_type(ctx, this_idx, insn->dst_reg) == REG_32BIT)
ctx               827 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
ctx               831 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, daddiu, dst, MIPS_R_SP, MAX_BPF_STACK);
ctx               834 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, daddiu, MIPS_R_AT, MIPS_R_SP, MAX_BPF_STACK);
ctx               837 arch/mips/net/ebpf_jit.c 		} else if (get_reg_val_type(ctx, this_idx, insn->src_reg) == REG_32BIT) {
ctx               844 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, daddu, tmp_reg, src, MIPS_R_ZERO);
ctx               845 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dinsu, tmp_reg, MIPS_R_ZERO, 32, 32);
ctx               851 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, daddu, dst, src, MIPS_R_ZERO);
ctx               854 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, daddu, dst, dst, src);
ctx               857 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dsubu, dst, dst, src);
ctx               860 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, xor, dst, dst, src);
ctx               863 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, or, dst, dst, src);
ctx               866 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, and, dst, dst, src);
ctx               870 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, dmulu, dst, dst, src);
ctx               872 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, dmultu, dst, src);
ctx               873 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, mflo, dst);
ctx               880 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, ddivu_r6,
ctx               883 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, modu, dst, dst, src);
ctx               886 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, ddivu, dst, src);
ctx               888 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, mflo, dst);
ctx               890 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, mfhi, dst);
ctx               893 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dsllv, dst, dst, src);
ctx               896 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dsrlv, dst, dst, src);
ctx               899 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dsrav, dst, dst, src);
ctx               918 arch/mips/net/ebpf_jit.c 		src = ebpf_to_mips_reg(ctx, insn, src_reg_no_fp);
ctx               919 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx               922 arch/mips/net/ebpf_jit.c 		td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
ctx               925 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sll, dst, dst, 0);
ctx               928 arch/mips/net/ebpf_jit.c 		ts = get_reg_val_type(ctx, this_idx, insn->src_reg);
ctx               937 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sll, tmp_reg, src, 0);
ctx               943 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, addu, dst, src, MIPS_R_ZERO);
ctx               946 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, addu, dst, dst, src);
ctx               949 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, subu, dst, dst, src);
ctx               952 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, xor, dst, dst, src);
ctx               955 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, or, dst, dst, src);
ctx               958 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, and, dst, dst, src);
ctx               961 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, mul, dst, dst, src);
ctx               967 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, divu_r6, dst, dst, src);
ctx               969 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, modu, dst, dst, src);
ctx               972 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, divu, dst, src);
ctx               974 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, mflo, dst);
ctx               976 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, mfhi, dst);
ctx               979 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sllv, dst, dst, src);
ctx               982 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, srlv, dst, dst, src);
ctx               985 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, srav, dst, dst, src);
ctx               994 arch/mips/net/ebpf_jit.c 			b_off = b_imm(exit_idx, ctx);
ctx               997 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, beq, MIPS_R_ZERO, MIPS_R_ZERO, b_off);
ctx               998 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, nop);
ctx              1004 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok);
ctx              1010 arch/mips/net/ebpf_jit.c 			gen_imm_to_reg(insn, MIPS_R_AT, ctx);
ctx              1025 arch/mips/net/ebpf_jit.c 		src = ebpf_to_mips_reg(ctx, insn, src_reg_no_fp);
ctx              1026 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx              1029 arch/mips/net/ebpf_jit.c 		td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
ctx              1030 arch/mips/net/ebpf_jit.c 		ts = get_reg_val_type(ctx, this_idx, insn->src_reg);
ctx              1032 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sll, MIPS_R_AT, src, 0);
ctx              1035 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sll, MIPS_R_AT, dst, 0);
ctx              1039 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, and, MIPS_R_AT, dst, src);
ctx              1044 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dsubu, MIPS_R_AT, dst, src);
ctx              1046 arch/mips/net/ebpf_jit.c 				b_off = b_imm(exit_idx, ctx);
ctx              1050 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, blez, MIPS_R_AT, b_off);
ctx              1052 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, bgtz, MIPS_R_AT, b_off);
ctx              1053 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, nop);
ctx              1056 arch/mips/net/ebpf_jit.c 			b_off = b_imm(this_idx + insn->off + 1, ctx);
ctx              1060 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, bgtz, MIPS_R_AT, b_off);
ctx              1062 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, blez, MIPS_R_AT, b_off);
ctx              1063 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, nop);
ctx              1066 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, slt, MIPS_R_AT, dst, src);
ctx              1072 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dsubu, MIPS_R_T8, dst, src);
ctx              1073 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sltu, MIPS_R_AT, dst, src);
ctx              1076 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, seleqz, MIPS_R_T9,
ctx              1079 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, movz, MIPS_R_T9,
ctx              1081 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, movn, MIPS_R_T9,
ctx              1084 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, or, MIPS_R_AT, MIPS_R_T9, MIPS_R_AT);
ctx              1089 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sltu, MIPS_R_AT, dst, src);
ctx              1104 arch/mips/net/ebpf_jit.c 			b_off = b_imm(exit_idx, ctx);
ctx              1106 arch/mips/net/ebpf_jit.c 				target = j_target(ctx, exit_idx);
ctx              1111 arch/mips/net/ebpf_jit.c 				if (!(ctx->offsets[this_idx] & OFFSETS_B_CONV)) {
ctx              1112 arch/mips/net/ebpf_jit.c 					ctx->offsets[this_idx] |= OFFSETS_B_CONV;
ctx              1113 arch/mips/net/ebpf_jit.c 					ctx->long_b_conversion = 1;
ctx              1118 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, bne, dst, src, b_off);
ctx              1120 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, beq, dst, src, b_off);
ctx              1121 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, nop);
ctx              1122 arch/mips/net/ebpf_jit.c 			if (ctx->offsets[this_idx] & OFFSETS_B_CONV) {
ctx              1123 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, j, target);
ctx              1124 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, nop);
ctx              1128 arch/mips/net/ebpf_jit.c 		b_off = b_imm(this_idx + insn->off + 1, ctx);
ctx              1130 arch/mips/net/ebpf_jit.c 			target = j_target(ctx, this_idx + insn->off + 1);
ctx              1135 arch/mips/net/ebpf_jit.c 			if (!(ctx->offsets[this_idx] & OFFSETS_B_CONV)) {
ctx              1136 arch/mips/net/ebpf_jit.c 				ctx->offsets[this_idx] |= OFFSETS_B_CONV;
ctx              1137 arch/mips/net/ebpf_jit.c 				ctx->long_b_conversion = 1;
ctx              1142 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, beq, dst, src, b_off);
ctx              1144 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, bne, dst, src, b_off);
ctx              1145 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, nop);
ctx              1146 arch/mips/net/ebpf_jit.c 		if (ctx->offsets[this_idx] & OFFSETS_B_CONV) {
ctx              1147 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, j, target);
ctx              1148 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, nop);
ctx              1156 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok);
ctx              1162 arch/mips/net/ebpf_jit.c 				b_off = b_imm(exit_idx, ctx);
ctx              1167 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, blez, dst, b_off);
ctx              1170 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, bltz, dst, b_off);
ctx              1173 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, bgez, dst, b_off);
ctx              1176 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, bgtz, dst, b_off);
ctx              1179 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, nop);
ctx              1182 arch/mips/net/ebpf_jit.c 			b_off = b_imm(this_idx + insn->off + 1, ctx);
ctx              1187 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, bgtz, dst, b_off);
ctx              1190 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, bgez, dst, b_off);
ctx              1193 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, bltz, dst, b_off);
ctx              1196 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, blez, dst, b_off);
ctx              1199 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, nop);
ctx              1215 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, slti, MIPS_R_AT, dst, (int)t64s);
ctx              1220 arch/mips/net/ebpf_jit.c 		emit_const_to_reg(ctx, MIPS_R_AT, (u64)t64s);
ctx              1221 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, slt, MIPS_R_AT, dst, MIPS_R_AT);
ctx              1231 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok);
ctx              1247 arch/mips/net/ebpf_jit.c 		emit_const_to_reg(ctx, MIPS_R_AT, (u64)t64s);
ctx              1248 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, sltu, MIPS_R_AT, dst, MIPS_R_AT);
ctx              1254 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg_fp_ok);
ctx              1258 arch/mips/net/ebpf_jit.c 		if (ctx->use_bbit_insns && hweight32((u32)insn->imm) == 1) {
ctx              1260 arch/mips/net/ebpf_jit.c 				b_off = b_imm(exit_idx, ctx);
ctx              1263 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, bbit0, dst, ffs((u32)insn->imm) - 1, b_off);
ctx              1264 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, nop);
ctx              1267 arch/mips/net/ebpf_jit.c 			b_off = b_imm(this_idx + insn->off + 1, ctx);
ctx              1270 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, bbit1, dst, ffs((u32)insn->imm) - 1, b_off);
ctx              1271 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, nop);
ctx              1275 arch/mips/net/ebpf_jit.c 		emit_const_to_reg(ctx, MIPS_R_AT, t64);
ctx              1276 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, and, MIPS_R_AT, dst, MIPS_R_AT);
ctx              1287 arch/mips/net/ebpf_jit.c 		b_off = b_imm(this_idx + insn->off + 1, ctx);
ctx              1289 arch/mips/net/ebpf_jit.c 			target = j_target(ctx, this_idx + insn->off + 1);
ctx              1292 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, j, target);
ctx              1294 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, b, b_off);
ctx              1296 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, nop);
ctx              1301 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx              1305 arch/mips/net/ebpf_jit.c 		emit_const_to_reg(ctx, dst, t64);
ctx              1309 arch/mips/net/ebpf_jit.c 		ctx->flags |= EBPF_SAVE_RA;
ctx              1311 arch/mips/net/ebpf_jit.c 		emit_const_to_reg(ctx, MIPS_R_T9, (u64)t64s);
ctx              1312 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, jalr, MIPS_R_RA, MIPS_R_T9);
ctx              1314 arch/mips/net/ebpf_jit.c 		emit_instr(ctx, nop);
ctx              1318 arch/mips/net/ebpf_jit.c 		if (emit_bpf_tail_call(ctx, this_idx))
ctx              1324 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx              1327 arch/mips/net/ebpf_jit.c 		td = get_reg_val_type(ctx, this_idx, insn->dst_reg);
ctx              1329 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, dinsu, dst, MIPS_R_ZERO, 32, 32);
ctx              1333 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sll, dst, dst, 0);
ctx              1343 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, wsbh, dst, dst);
ctx              1344 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, andi, dst, dst, 0xffff);
ctx              1347 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, wsbh, dst, dst);
ctx              1348 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, rotr, dst, dst, 16);
ctx              1352 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, dsbh, dst, dst);
ctx              1353 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, dshd, dst, dst);
ctx              1363 arch/mips/net/ebpf_jit.c 			ctx->flags |= EBPF_SEEN_FP;
ctx              1367 arch/mips/net/ebpf_jit.c 			dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx              1372 arch/mips/net/ebpf_jit.c 		gen_imm_to_reg(insn, MIPS_R_AT, ctx);
ctx              1375 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sb, MIPS_R_AT, mem_off, dst);
ctx              1378 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sh, MIPS_R_AT, mem_off, dst);
ctx              1381 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sw, MIPS_R_AT, mem_off, dst);
ctx              1384 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, sd, MIPS_R_AT, mem_off, dst);
ctx              1394 arch/mips/net/ebpf_jit.c 			ctx->flags |= EBPF_SEEN_FP;
ctx              1398 arch/mips/net/ebpf_jit.c 			src = ebpf_to_mips_reg(ctx, insn, src_reg_no_fp);
ctx              1403 arch/mips/net/ebpf_jit.c 		dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx              1408 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, lbu, dst, mem_off, src);
ctx              1411 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, lhu, dst, mem_off, src);
ctx              1414 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, lw, dst, mem_off, src);
ctx              1417 arch/mips/net/ebpf_jit.c 			emit_instr(ctx, ld, dst, mem_off, src);
ctx              1429 arch/mips/net/ebpf_jit.c 			ctx->flags |= EBPF_SEEN_FP;
ctx              1433 arch/mips/net/ebpf_jit.c 			dst = ebpf_to_mips_reg(ctx, insn, dst_reg);
ctx              1438 arch/mips/net/ebpf_jit.c 		src = ebpf_to_mips_reg(ctx, insn, src_reg_no_fp);
ctx              1448 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, daddiu, MIPS_R_T6,
ctx              1455 arch/mips/net/ebpf_jit.c 				if (get_reg_val_type(ctx, this_idx, insn->src_reg) == REG_32BIT) {
ctx              1456 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, sll, MIPS_R_AT, src, 0);
ctx              1459 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, ll, MIPS_R_T8, mem_off, dst);
ctx              1460 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, addu, MIPS_R_T8, MIPS_R_T8, src);
ctx              1461 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, sc, MIPS_R_T8, mem_off, dst);
ctx              1466 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, beq, MIPS_R_T8, MIPS_R_ZERO, -4 * 4);
ctx              1467 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, nop);
ctx              1470 arch/mips/net/ebpf_jit.c 				if (get_reg_val_type(ctx, this_idx, insn->src_reg) == REG_32BIT) {
ctx              1471 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, daddu, MIPS_R_AT, src, MIPS_R_ZERO);
ctx              1472 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, dinsu, MIPS_R_AT, MIPS_R_ZERO, 32, 32);
ctx              1475 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, lld, MIPS_R_T8, mem_off, dst);
ctx              1476 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, daddu, MIPS_R_T8, MIPS_R_T8, src);
ctx              1477 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, scd, MIPS_R_T8, mem_off, dst);
ctx              1478 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, beq, MIPS_R_T8, MIPS_R_ZERO, -4 * 4);
ctx              1479 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, nop);
ctx              1485 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, sb, src, mem_off, dst);
ctx              1488 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, sh, src, mem_off, dst);
ctx              1491 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, sw, src, mem_off, dst);
ctx              1494 arch/mips/net/ebpf_jit.c 				if (get_reg_val_type(ctx, this_idx, insn->src_reg) == REG_32BIT) {
ctx              1495 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, daddu, MIPS_R_AT, src, MIPS_R_ZERO);
ctx              1496 arch/mips/net/ebpf_jit.c 					emit_instr(ctx, dinsu, MIPS_R_AT, MIPS_R_ZERO, 32, 32);
ctx              1499 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, sd, src, mem_off, dst);
ctx              1518 arch/mips/net/ebpf_jit.c static int build_int_body(struct jit_ctx *ctx)
ctx              1520 arch/mips/net/ebpf_jit.c 	const struct bpf_prog *prog = ctx->skf;
ctx              1526 arch/mips/net/ebpf_jit.c 		if ((ctx->reg_val_types[i] & RVT_VISITED_MASK) == 0) {
ctx              1532 arch/mips/net/ebpf_jit.c 		if (ctx->target == NULL)
ctx              1533 arch/mips/net/ebpf_jit.c 			ctx->offsets[i] = (ctx->offsets[i] & OFFSETS_B_CONV) | (ctx->idx * 4);
ctx              1535 arch/mips/net/ebpf_jit.c 		r = build_one_insn(insn, ctx, i, prog->len);
ctx              1541 arch/mips/net/ebpf_jit.c 	if (ctx->target == NULL)
ctx              1542 arch/mips/net/ebpf_jit.c 		ctx->offsets[i] = ctx->idx * 4;
ctx              1549 arch/mips/net/ebpf_jit.c 	if (ctx->target == NULL)
ctx              1553 arch/mips/net/ebpf_jit.c 				ctx->offsets[i] = ctx->idx * 4;
ctx              1559 arch/mips/net/ebpf_jit.c static int reg_val_propagate_range(struct jit_ctx *ctx, u64 initial_rvt,
ctx              1562 arch/mips/net/ebpf_jit.c 	const struct bpf_prog *prog = ctx->skf;
ctx              1565 arch/mips/net/ebpf_jit.c 	u64 *rvt = ctx->reg_val_types;
ctx              1736 arch/mips/net/ebpf_jit.c static int reg_val_propagate(struct jit_ctx *ctx)
ctx              1738 arch/mips/net/ebpf_jit.c 	const struct bpf_prog *prog = ctx->skf;
ctx              1757 arch/mips/net/ebpf_jit.c 	reg_val_propagate_range(ctx, exit_rvt, 0, false);
ctx              1766 arch/mips/net/ebpf_jit.c 		u64 rvt = ctx->reg_val_types[i];
ctx              1772 arch/mips/net/ebpf_jit.c 			reg_val_propagate_range(ctx, rvt & ~RVT_VISITED_MASK, i, true);
ctx              1775 arch/mips/net/ebpf_jit.c 			reg_val_propagate_range(ctx, rvt & ~RVT_VISITED_MASK, i, false);
ctx              1803 arch/mips/net/ebpf_jit.c 	struct jit_ctx ctx;
ctx              1821 arch/mips/net/ebpf_jit.c 	memset(&ctx, 0, sizeof(ctx));
ctx              1829 arch/mips/net/ebpf_jit.c 		ctx.use_bbit_insns = 1;
ctx              1832 arch/mips/net/ebpf_jit.c 		ctx.use_bbit_insns = 0;
ctx              1836 arch/mips/net/ebpf_jit.c 	ctx.offsets = kcalloc(prog->len + 1, sizeof(*ctx.offsets), GFP_KERNEL);
ctx              1837 arch/mips/net/ebpf_jit.c 	if (ctx.offsets == NULL)
ctx              1840 arch/mips/net/ebpf_jit.c 	ctx.reg_val_types = kcalloc(prog->len + 1, sizeof(*ctx.reg_val_types), GFP_KERNEL);
ctx              1841 arch/mips/net/ebpf_jit.c 	if (ctx.reg_val_types == NULL)
ctx              1844 arch/mips/net/ebpf_jit.c 	ctx.skf = prog;
ctx              1846 arch/mips/net/ebpf_jit.c 	if (reg_val_propagate(&ctx))
ctx              1853 arch/mips/net/ebpf_jit.c 	if (build_int_body(&ctx))
ctx              1860 arch/mips/net/ebpf_jit.c 	if (ctx.flags & EBPF_SEEN_TC) {
ctx              1861 arch/mips/net/ebpf_jit.c 		if (ctx.flags & EBPF_SAVE_RA)
ctx              1862 arch/mips/net/ebpf_jit.c 			ctx.flags |= EBPF_SAVE_S4;
ctx              1864 arch/mips/net/ebpf_jit.c 			ctx.flags |= EBPF_TCC_IN_V1;
ctx              1875 arch/mips/net/ebpf_jit.c 		ctx.idx = 0;
ctx              1876 arch/mips/net/ebpf_jit.c 		ctx.gen_b_offsets = 1;
ctx              1877 arch/mips/net/ebpf_jit.c 		ctx.long_b_conversion = 0;
ctx              1878 arch/mips/net/ebpf_jit.c 		if (gen_int_prologue(&ctx))
ctx              1880 arch/mips/net/ebpf_jit.c 		if (build_int_body(&ctx))
ctx              1882 arch/mips/net/ebpf_jit.c 		if (build_int_epilogue(&ctx, MIPS_R_RA))
ctx              1884 arch/mips/net/ebpf_jit.c 	} while (ctx.long_b_conversion);
ctx              1886 arch/mips/net/ebpf_jit.c 	image_size = 4 * ctx.idx;
ctx              1893 arch/mips/net/ebpf_jit.c 	ctx.target = (u32 *)image_ptr;
ctx              1896 arch/mips/net/ebpf_jit.c 	ctx.idx = 0;
ctx              1897 arch/mips/net/ebpf_jit.c 	if (gen_int_prologue(&ctx))
ctx              1899 arch/mips/net/ebpf_jit.c 	if (build_int_body(&ctx))
ctx              1901 arch/mips/net/ebpf_jit.c 	if (build_int_epilogue(&ctx, MIPS_R_RA))
ctx              1905 arch/mips/net/ebpf_jit.c 	flush_icache_range((unsigned long)ctx.target,
ctx              1906 arch/mips/net/ebpf_jit.c 			   (unsigned long)&ctx.target[ctx.idx]);
ctx              1910 arch/mips/net/ebpf_jit.c 		bpf_jit_dump(prog->len, image_size, 2, ctx.target);
ctx              1913 arch/mips/net/ebpf_jit.c 	prog->bpf_func = (void *)ctx.target;
ctx              1920 arch/mips/net/ebpf_jit.c 	kfree(ctx.offsets);
ctx              1921 arch/mips/net/ebpf_jit.c 	kfree(ctx.reg_val_types);
ctx                92 arch/mips/pci/pci-alchemy.c static void alchemy_pci_wired_entry(struct alchemy_pci_context *ctx)
ctx                94 arch/mips/pci/pci-alchemy.c 	ctx->wired_entry = read_c0_wired();
ctx                95 arch/mips/pci/pci-alchemy.c 	add_wired_entry(0, 0, (unsigned long)ctx->pci_cfg_vm->addr, PM_4K);
ctx                96 arch/mips/pci/pci-alchemy.c 	ctx->last_elo0 = ctx->last_elo1 = ~0;
ctx               102 arch/mips/pci/pci-alchemy.c 	struct alchemy_pci_context *ctx = bus->sysdata;
ctx               114 arch/mips/pci/pci-alchemy.c 	r = __raw_readl(ctx->regs + PCI_REG_STATCMD) & 0x0000ffff;
ctx               116 arch/mips/pci/pci-alchemy.c 	__raw_writel(r, ctx->regs + PCI_REG_STATCMD);
ctx               122 arch/mips/pci/pci-alchemy.c 	if (ctx->board_pci_idsel(device, 1) == 0) {
ctx               147 arch/mips/pci/pci-alchemy.c 	if ((entryLo0 != ctx->last_elo0) || (entryLo1 != ctx->last_elo1)) {
ctx               148 arch/mips/pci/pci-alchemy.c 		mod_wired_entry(ctx->wired_entry, entryLo0, entryLo1,
ctx               149 arch/mips/pci/pci-alchemy.c 				(unsigned long)ctx->pci_cfg_vm->addr, PM_4K);
ctx               150 arch/mips/pci/pci-alchemy.c 		ctx->last_elo0 = entryLo0;
ctx               151 arch/mips/pci/pci-alchemy.c 		ctx->last_elo1 = entryLo1;
ctx               155 arch/mips/pci/pci-alchemy.c 		__raw_writel(*data, ctx->pci_cfg_vm->addr + offset);
ctx               157 arch/mips/pci/pci-alchemy.c 		*data = __raw_readl(ctx->pci_cfg_vm->addr + offset);
ctx               164 arch/mips/pci/pci-alchemy.c 	status = __raw_readl(ctx->regs + PCI_REG_STATCMD);
ctx               175 arch/mips/pci/pci-alchemy.c 		__raw_writel(status & 0xf000ffff, ctx->regs + PCI_REG_STATCMD);
ctx               182 arch/mips/pci/pci-alchemy.c 	(void)ctx->board_pci_idsel(device, 0);
ctx               309 arch/mips/pci/pci-alchemy.c 	struct alchemy_pci_context *ctx = __alchemy_pci_ctx;
ctx               310 arch/mips/pci/pci-alchemy.c 	if (!ctx)
ctx               313 arch/mips/pci/pci-alchemy.c 	ctx->pm[0]  = __raw_readl(ctx->regs + PCI_REG_CMEM);
ctx               314 arch/mips/pci/pci-alchemy.c 	ctx->pm[1]  = __raw_readl(ctx->regs + PCI_REG_CONFIG) & 0x0009ffff;
ctx               315 arch/mips/pci/pci-alchemy.c 	ctx->pm[2]  = __raw_readl(ctx->regs + PCI_REG_B2BMASK_CCH);
ctx               316 arch/mips/pci/pci-alchemy.c 	ctx->pm[3]  = __raw_readl(ctx->regs + PCI_REG_B2BBASE0_VID);
ctx               317 arch/mips/pci/pci-alchemy.c 	ctx->pm[4]  = __raw_readl(ctx->regs + PCI_REG_B2BBASE1_SID);
ctx               318 arch/mips/pci/pci-alchemy.c 	ctx->pm[5]  = __raw_readl(ctx->regs + PCI_REG_MWMASK_DEV);
ctx               319 arch/mips/pci/pci-alchemy.c 	ctx->pm[6]  = __raw_readl(ctx->regs + PCI_REG_MWBASE_REV_CCL);
ctx               320 arch/mips/pci/pci-alchemy.c 	ctx->pm[7]  = __raw_readl(ctx->regs + PCI_REG_ID);
ctx               321 arch/mips/pci/pci-alchemy.c 	ctx->pm[8]  = __raw_readl(ctx->regs + PCI_REG_CLASSREV);
ctx               322 arch/mips/pci/pci-alchemy.c 	ctx->pm[9]  = __raw_readl(ctx->regs + PCI_REG_PARAM);
ctx               323 arch/mips/pci/pci-alchemy.c 	ctx->pm[10] = __raw_readl(ctx->regs + PCI_REG_MBAR);
ctx               324 arch/mips/pci/pci-alchemy.c 	ctx->pm[11] = __raw_readl(ctx->regs + PCI_REG_TIMEOUT);
ctx               331 arch/mips/pci/pci-alchemy.c 	struct alchemy_pci_context *ctx = __alchemy_pci_ctx;
ctx               332 arch/mips/pci/pci-alchemy.c 	if (!ctx)
ctx               335 arch/mips/pci/pci-alchemy.c 	__raw_writel(ctx->pm[0],  ctx->regs + PCI_REG_CMEM);
ctx               336 arch/mips/pci/pci-alchemy.c 	__raw_writel(ctx->pm[2],  ctx->regs + PCI_REG_B2BMASK_CCH);
ctx               337 arch/mips/pci/pci-alchemy.c 	__raw_writel(ctx->pm[3],  ctx->regs + PCI_REG_B2BBASE0_VID);
ctx               338 arch/mips/pci/pci-alchemy.c 	__raw_writel(ctx->pm[4],  ctx->regs + PCI_REG_B2BBASE1_SID);
ctx               339 arch/mips/pci/pci-alchemy.c 	__raw_writel(ctx->pm[5],  ctx->regs + PCI_REG_MWMASK_DEV);
ctx               340 arch/mips/pci/pci-alchemy.c 	__raw_writel(ctx->pm[6],  ctx->regs + PCI_REG_MWBASE_REV_CCL);
ctx               341 arch/mips/pci/pci-alchemy.c 	__raw_writel(ctx->pm[7],  ctx->regs + PCI_REG_ID);
ctx               342 arch/mips/pci/pci-alchemy.c 	__raw_writel(ctx->pm[8],  ctx->regs + PCI_REG_CLASSREV);
ctx               343 arch/mips/pci/pci-alchemy.c 	__raw_writel(ctx->pm[9],  ctx->regs + PCI_REG_PARAM);
ctx               344 arch/mips/pci/pci-alchemy.c 	__raw_writel(ctx->pm[10], ctx->regs + PCI_REG_MBAR);
ctx               345 arch/mips/pci/pci-alchemy.c 	__raw_writel(ctx->pm[11], ctx->regs + PCI_REG_TIMEOUT);
ctx               347 arch/mips/pci/pci-alchemy.c 	__raw_writel(ctx->pm[1],  ctx->regs + PCI_REG_CONFIG);
ctx               353 arch/mips/pci/pci-alchemy.c 	ctx->wired_entry = 8191;	/* impossibly high value */
ctx               354 arch/mips/pci/pci-alchemy.c 	alchemy_pci_wired_entry(ctx);	/* install it */
ctx               365 arch/mips/pci/pci-alchemy.c 	struct alchemy_pci_context *ctx;
ctx               379 arch/mips/pci/pci-alchemy.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               380 arch/mips/pci/pci-alchemy.c 	if (!ctx) {
ctx               412 arch/mips/pci/pci-alchemy.c 	ctx->regs = ioremap_nocache(r->start, resource_size(r));
ctx               413 arch/mips/pci/pci-alchemy.c 	if (!ctx->regs) {
ctx               429 arch/mips/pci/pci-alchemy.c 	ctx->alchemy_pci_ctrl.io_map_base = (unsigned long)virt_io;
ctx               435 arch/mips/pci/pci-alchemy.c 		val = __raw_readl(ctx->regs + PCI_REG_CONFIG);
ctx               437 arch/mips/pci/pci-alchemy.c 		__raw_writel(val, ctx->regs + PCI_REG_CONFIG);
ctx               443 arch/mips/pci/pci-alchemy.c 		ctx->board_map_irq = pd->board_map_irq;
ctx               446 arch/mips/pci/pci-alchemy.c 		ctx->board_pci_idsel = pd->board_pci_idsel;
ctx               448 arch/mips/pci/pci-alchemy.c 		ctx->board_pci_idsel = alchemy_pci_def_idsel;
ctx               451 arch/mips/pci/pci-alchemy.c 	ctx->alchemy_pci_ctrl.pci_ops = &alchemy_pci_ops;
ctx               452 arch/mips/pci/pci-alchemy.c 	ctx->alchemy_pci_ctrl.mem_resource = &alchemy_pci_def_memres;
ctx               453 arch/mips/pci/pci-alchemy.c 	ctx->alchemy_pci_ctrl.io_resource = &alchemy_pci_def_iores;
ctx               461 arch/mips/pci/pci-alchemy.c 	ctx->pci_cfg_vm = get_vm_area(0x2000, VM_IOREMAP);
ctx               462 arch/mips/pci/pci-alchemy.c 	if (!ctx->pci_cfg_vm) {
ctx               467 arch/mips/pci/pci-alchemy.c 	ctx->wired_entry = 8191;	/* impossibly high value */
ctx               468 arch/mips/pci/pci-alchemy.c 	alchemy_pci_wired_entry(ctx);	/* install it */
ctx               470 arch/mips/pci/pci-alchemy.c 	set_io_port_base((unsigned long)ctx->alchemy_pci_ctrl.io_map_base);
ctx               473 arch/mips/pci/pci-alchemy.c 	val = __raw_readl(ctx->regs + PCI_REG_CONFIG);
ctx               477 arch/mips/pci/pci-alchemy.c 	__raw_writel(val, ctx->regs + PCI_REG_CONFIG);
ctx               480 arch/mips/pci/pci-alchemy.c 	__alchemy_pci_ctx = ctx;
ctx               481 arch/mips/pci/pci-alchemy.c 	platform_set_drvdata(pdev, ctx);
ctx               483 arch/mips/pci/pci-alchemy.c 	register_pci_controller(&ctx->alchemy_pci_ctrl);
ctx               493 arch/mips/pci/pci-alchemy.c 	iounmap(ctx->regs);
ctx               501 arch/mips/pci/pci-alchemy.c 	kfree(ctx);
ctx               528 arch/mips/pci/pci-alchemy.c 	struct alchemy_pci_context *ctx = dev->sysdata;
ctx               529 arch/mips/pci/pci-alchemy.c 	if (ctx && ctx->board_map_irq)
ctx               530 arch/mips/pci/pci-alchemy.c 		return ctx->board_map_irq(dev, slot, pin);
ctx                21 arch/nios2/include/asm/mmu_context.h extern unsigned long get_pid_from_context(mm_context_t *ctx);
ctx                92 arch/powerpc/crypto/aes-spe-glue.c 	struct ppc_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               103 arch/powerpc/crypto/aes-spe-glue.c 		ctx->rounds = 4;
ctx               104 arch/powerpc/crypto/aes-spe-glue.c 		ppc_expand_key_128(ctx->key_enc, in_key);
ctx               107 arch/powerpc/crypto/aes-spe-glue.c 		ctx->rounds = 5;
ctx               108 arch/powerpc/crypto/aes-spe-glue.c 		ppc_expand_key_192(ctx->key_enc, in_key);
ctx               111 arch/powerpc/crypto/aes-spe-glue.c 		ctx->rounds = 6;
ctx               112 arch/powerpc/crypto/aes-spe-glue.c 		ppc_expand_key_256(ctx->key_enc, in_key);
ctx               116 arch/powerpc/crypto/aes-spe-glue.c 	ppc_generate_decrypt_key(ctx->key_dec, ctx->key_enc, key_len);
ctx               124 arch/powerpc/crypto/aes-spe-glue.c 	struct ppc_xts_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               142 arch/powerpc/crypto/aes-spe-glue.c 		ctx->rounds = 4;
ctx               143 arch/powerpc/crypto/aes-spe-glue.c 		ppc_expand_key_128(ctx->key_enc, in_key);
ctx               144 arch/powerpc/crypto/aes-spe-glue.c 		ppc_expand_key_128(ctx->key_twk, in_key + AES_KEYSIZE_128);
ctx               147 arch/powerpc/crypto/aes-spe-glue.c 		ctx->rounds = 5;
ctx               148 arch/powerpc/crypto/aes-spe-glue.c 		ppc_expand_key_192(ctx->key_enc, in_key);
ctx               149 arch/powerpc/crypto/aes-spe-glue.c 		ppc_expand_key_192(ctx->key_twk, in_key + AES_KEYSIZE_192);
ctx               152 arch/powerpc/crypto/aes-spe-glue.c 		ctx->rounds = 6;
ctx               153 arch/powerpc/crypto/aes-spe-glue.c 		ppc_expand_key_256(ctx->key_enc, in_key);
ctx               154 arch/powerpc/crypto/aes-spe-glue.c 		ppc_expand_key_256(ctx->key_twk, in_key + AES_KEYSIZE_256);
ctx               158 arch/powerpc/crypto/aes-spe-glue.c 	ppc_generate_decrypt_key(ctx->key_dec, ctx->key_enc, key_len);
ctx               165 arch/powerpc/crypto/aes-spe-glue.c 	struct ppc_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               168 arch/powerpc/crypto/aes-spe-glue.c 	ppc_encrypt_aes(out, in, ctx->key_enc, ctx->rounds);
ctx               174 arch/powerpc/crypto/aes-spe-glue.c 	struct ppc_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               177 arch/powerpc/crypto/aes-spe-glue.c 	ppc_decrypt_aes(out, in, ctx->key_dec, ctx->rounds);
ctx               184 arch/powerpc/crypto/aes-spe-glue.c 	struct ppc_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               200 arch/powerpc/crypto/aes-spe-glue.c 				ctx->key_enc, ctx->rounds, nbytes);
ctx               212 arch/powerpc/crypto/aes-spe-glue.c 	struct ppc_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               228 arch/powerpc/crypto/aes-spe-glue.c 				ctx->key_dec, ctx->rounds, nbytes);
ctx               240 arch/powerpc/crypto/aes-spe-glue.c 	struct ppc_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               256 arch/powerpc/crypto/aes-spe-glue.c 				ctx->key_enc, ctx->rounds, nbytes, walk.iv);
ctx               268 arch/powerpc/crypto/aes-spe-glue.c 	struct ppc_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               284 arch/powerpc/crypto/aes-spe-glue.c 				ctx->key_dec, ctx->rounds, nbytes, walk.iv);
ctx               296 arch/powerpc/crypto/aes-spe-glue.c 	struct ppc_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               313 arch/powerpc/crypto/aes-spe-glue.c 			      ctx->key_enc, ctx->rounds, pbytes , walk.iv);
ctx               326 arch/powerpc/crypto/aes-spe-glue.c 	struct ppc_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               335 arch/powerpc/crypto/aes-spe-glue.c 	twk = ctx->key_twk;
ctx               344 arch/powerpc/crypto/aes-spe-glue.c 				ctx->key_enc, ctx->rounds, nbytes, walk.iv, twk);
ctx               357 arch/powerpc/crypto/aes-spe-glue.c 	struct ppc_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               366 arch/powerpc/crypto/aes-spe-glue.c 	twk = ctx->key_twk;
ctx               375 arch/powerpc/crypto/aes-spe-glue.c 				ctx->key_dec, ctx->rounds, nbytes, walk.iv, twk);
ctx                92 arch/powerpc/include/asm/asm-prototypes.h long sys_debug_setcontext(struct ucontext __user *ctx,
ctx               814 arch/powerpc/include/asm/book3s/64/mmu-hash.h 	unsigned long ctx;
ctx               823 arch/powerpc/include/asm/book3s/64/mmu-hash.h 		ctx =  1 + ((ea & EA_MASK) >> MAX_EA_BITS_PER_CONTEXT);
ctx               825 arch/powerpc/include/asm/book3s/64/mmu-hash.h 		ctx = region_id + MAX_KERNEL_CTX_CNT - 1;
ctx               826 arch/powerpc/include/asm/book3s/64/mmu-hash.h 	return ctx;
ctx               142 arch/powerpc/include/asm/book3s/64/mmu.h static inline u16 mm_ctx_user_psize(mm_context_t *ctx)
ctx               144 arch/powerpc/include/asm/book3s/64/mmu.h 	return ctx->hash_context->user_psize;
ctx               147 arch/powerpc/include/asm/book3s/64/mmu.h static inline void mm_ctx_set_user_psize(mm_context_t *ctx, u16 user_psize)
ctx               149 arch/powerpc/include/asm/book3s/64/mmu.h 	ctx->hash_context->user_psize = user_psize;
ctx               152 arch/powerpc/include/asm/book3s/64/mmu.h static inline unsigned char *mm_ctx_low_slices(mm_context_t *ctx)
ctx               154 arch/powerpc/include/asm/book3s/64/mmu.h 	return ctx->hash_context->low_slices_psize;
ctx               157 arch/powerpc/include/asm/book3s/64/mmu.h static inline unsigned char *mm_ctx_high_slices(mm_context_t *ctx)
ctx               159 arch/powerpc/include/asm/book3s/64/mmu.h 	return ctx->hash_context->high_slices_psize;
ctx               162 arch/powerpc/include/asm/book3s/64/mmu.h static inline unsigned long mm_ctx_slb_addr_limit(mm_context_t *ctx)
ctx               164 arch/powerpc/include/asm/book3s/64/mmu.h 	return ctx->hash_context->slb_addr_limit;
ctx               167 arch/powerpc/include/asm/book3s/64/mmu.h static inline void mm_ctx_set_slb_addr_limit(mm_context_t *ctx, unsigned long limit)
ctx               169 arch/powerpc/include/asm/book3s/64/mmu.h 	ctx->hash_context->slb_addr_limit = limit;
ctx               172 arch/powerpc/include/asm/book3s/64/mmu.h static inline struct slice_mask *slice_mask_for_size(mm_context_t *ctx, int psize)
ctx               176 arch/powerpc/include/asm/book3s/64/mmu.h 		return &ctx->hash_context->mask_64k;
ctx               180 arch/powerpc/include/asm/book3s/64/mmu.h 		return &ctx->hash_context->mask_16m;
ctx               182 arch/powerpc/include/asm/book3s/64/mmu.h 		return &ctx->hash_context->mask_16g;
ctx               186 arch/powerpc/include/asm/book3s/64/mmu.h 	return &ctx->hash_context->mask_4k;
ctx               190 arch/powerpc/include/asm/book3s/64/mmu.h static inline struct subpage_prot_table *mm_ctx_subpage_prot(mm_context_t *ctx)
ctx               192 arch/powerpc/include/asm/book3s/64/mmu.h 	return ctx->hash_context->spt;
ctx               246 arch/powerpc/include/asm/book3s/64/mmu.h static inline int get_user_context(mm_context_t *ctx, unsigned long ea)
ctx               250 arch/powerpc/include/asm/book3s/64/mmu.h 	if (likely(index < ARRAY_SIZE(ctx->extended_id)))
ctx               251 arch/powerpc/include/asm/book3s/64/mmu.h 		return ctx->extended_id[index];
ctx               258 arch/powerpc/include/asm/book3s/64/mmu.h static inline unsigned long get_user_vsid(mm_context_t *ctx,
ctx               261 arch/powerpc/include/asm/book3s/64/mmu.h 	unsigned long context = get_user_context(ctx, ea);
ctx               226 arch/powerpc/include/asm/nohash/32/mmu-8xx.h static inline u16 mm_ctx_user_psize(mm_context_t *ctx)
ctx               228 arch/powerpc/include/asm/nohash/32/mmu-8xx.h 	return ctx->user_psize;
ctx               231 arch/powerpc/include/asm/nohash/32/mmu-8xx.h static inline void mm_ctx_set_user_psize(mm_context_t *ctx, u16 user_psize)
ctx               233 arch/powerpc/include/asm/nohash/32/mmu-8xx.h 	ctx->user_psize = user_psize;
ctx               236 arch/powerpc/include/asm/nohash/32/mmu-8xx.h static inline unsigned char *mm_ctx_low_slices(mm_context_t *ctx)
ctx               238 arch/powerpc/include/asm/nohash/32/mmu-8xx.h 	return ctx->low_slices_psize;
ctx               241 arch/powerpc/include/asm/nohash/32/mmu-8xx.h static inline unsigned char *mm_ctx_high_slices(mm_context_t *ctx)
ctx               243 arch/powerpc/include/asm/nohash/32/mmu-8xx.h 	return ctx->high_slices_psize;
ctx               246 arch/powerpc/include/asm/nohash/32/mmu-8xx.h static inline unsigned long mm_ctx_slb_addr_limit(mm_context_t *ctx)
ctx               248 arch/powerpc/include/asm/nohash/32/mmu-8xx.h 	return ctx->slb_addr_limit;
ctx               251 arch/powerpc/include/asm/nohash/32/mmu-8xx.h static inline void mm_ctx_set_slb_addr_limit(mm_context_t *ctx, unsigned long limit)
ctx               253 arch/powerpc/include/asm/nohash/32/mmu-8xx.h 	ctx->slb_addr_limit = limit;
ctx               256 arch/powerpc/include/asm/nohash/32/mmu-8xx.h static inline struct slice_mask *slice_mask_for_size(mm_context_t *ctx, int psize)
ctx               259 arch/powerpc/include/asm/nohash/32/mmu-8xx.h 		return &ctx->mask_512k;
ctx               261 arch/powerpc/include/asm/nohash/32/mmu-8xx.h 		return &ctx->mask_8m;
ctx               265 arch/powerpc/include/asm/nohash/32/mmu-8xx.h 	return &ctx->mask_base_psize;
ctx               108 arch/powerpc/include/asm/pgtable.h static inline void *pte_frag_get(mm_context_t *ctx)
ctx               110 arch/powerpc/include/asm/pgtable.h 	return ctx->pte_frag;
ctx               113 arch/powerpc/include/asm/pgtable.h static inline void pte_frag_set(mm_context_t *ctx, void *p)
ctx               115 arch/powerpc/include/asm/pgtable.h 	ctx->pte_frag = p;
ctx               122 arch/powerpc/include/asm/pgtable.h static inline void *pte_frag_get(mm_context_t *ctx)
ctx               127 arch/powerpc/include/asm/pgtable.h static inline void pte_frag_set(mm_context_t *ctx, void *p)
ctx               129 arch/powerpc/include/asm/spu.h 	struct spu_context *ctx;
ctx               212 arch/powerpc/include/asm/spu.h void spu_set_profile_private_kref(struct spu_context *ctx,
ctx               216 arch/powerpc/include/asm/spu.h void *spu_get_profile_private_kref(struct spu_context *ctx);
ctx               170 arch/powerpc/include/asm/spu_priv1.h 	void (*enable_spu)(struct spu_context *ctx);
ctx               171 arch/powerpc/include/asm/spu_priv1.h 	void (*disable_spu)(struct spu_context *ctx);
ctx               202 arch/powerpc/include/asm/spu_priv1.h spu_enable_spu (struct spu_context *ctx)
ctx               204 arch/powerpc/include/asm/spu_priv1.h 	spu_management_ops->enable_spu(ctx);
ctx               208 arch/powerpc/include/asm/spu_priv1.h spu_disable_spu (struct spu_context *ctx)
ctx               210 arch/powerpc/include/asm/spu_priv1.h 	spu_management_ops->disable_spu(ctx);
ctx               105 arch/powerpc/kernel/hw_breakpoint.c 	if (bp->ctx && bp->ctx->task && bp->ctx->task != ((void *)-1L))
ctx               106 arch/powerpc/kernel/hw_breakpoint.c 		bp->ctx->task->thread.last_hit_ubp = NULL;
ctx              1252 arch/powerpc/kernel/signal_32.c SYSCALL_DEFINE3(debug_setcontext, struct ucontext __user *, ctx,
ctx              1313 arch/powerpc/kernel/signal_32.c 	if (!access_ok(ctx, sizeof(*ctx)) ||
ctx              1314 arch/powerpc/kernel/signal_32.c 	    fault_in_pages_readable((u8 __user *)ctx, sizeof(*ctx)))
ctx              1328 arch/powerpc/kernel/signal_32.c 	if (do_setcontext(ctx, regs, 1)) {
ctx              1334 arch/powerpc/kernel/signal_32.c 					   ctx, regs->nip, regs->link);
ctx              1347 arch/powerpc/kernel/signal_32.c 	restore_altstack(&ctx->uc_stack);
ctx               196 arch/powerpc/kernel/uprobes.c bool arch_uretprobe_is_alive(struct return_instance *ret, enum rp_check ctx,
ctx               199 arch/powerpc/kernel/uprobes.c 	if (ctx == RP_CHECK_CHAIN_CALL)
ctx              1742 arch/powerpc/kvm/book3s_64_mmu_hv.c 	struct kvm_htab_ctx *ctx = file->private_data;
ctx              1743 arch/powerpc/kvm/book3s_64_mmu_hv.c 	struct kvm *kvm = ctx->kvm;
ctx              1759 arch/powerpc/kvm/book3s_64_mmu_hv.c 	first_pass = ctx->first_pass;
ctx              1760 arch/powerpc/kvm/book3s_64_mmu_hv.c 	flags = ctx->flags;
ctx              1762 arch/powerpc/kvm/book3s_64_mmu_hv.c 	i = ctx->index;
ctx              1828 arch/powerpc/kvm/book3s_64_mmu_hv.c 			ctx->first_pass = 0;
ctx              1833 arch/powerpc/kvm/book3s_64_mmu_hv.c 	ctx->index = i;
ctx              1841 arch/powerpc/kvm/book3s_64_mmu_hv.c 	struct kvm_htab_ctx *ctx = file->private_data;
ctx              1842 arch/powerpc/kvm/book3s_64_mmu_hv.c 	struct kvm *kvm = ctx->kvm;
ctx              1964 arch/powerpc/kvm/book3s_64_mmu_hv.c 	struct kvm_htab_ctx *ctx = filp->private_data;
ctx              1967 arch/powerpc/kvm/book3s_64_mmu_hv.c 	if (!(ctx->flags & KVM_GET_HTAB_WRITE))
ctx              1968 arch/powerpc/kvm/book3s_64_mmu_hv.c 		atomic_dec(&ctx->kvm->arch.hpte_mod_interest);
ctx              1969 arch/powerpc/kvm/book3s_64_mmu_hv.c 	kvm_put_kvm(ctx->kvm);
ctx              1970 arch/powerpc/kvm/book3s_64_mmu_hv.c 	kfree(ctx);
ctx              1984 arch/powerpc/kvm/book3s_64_mmu_hv.c 	struct kvm_htab_ctx *ctx;
ctx              1990 arch/powerpc/kvm/book3s_64_mmu_hv.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              1991 arch/powerpc/kvm/book3s_64_mmu_hv.c 	if (!ctx)
ctx              1994 arch/powerpc/kvm/book3s_64_mmu_hv.c 	ctx->kvm = kvm;
ctx              1995 arch/powerpc/kvm/book3s_64_mmu_hv.c 	ctx->index = ghf->start_index;
ctx              1996 arch/powerpc/kvm/book3s_64_mmu_hv.c 	ctx->flags = ghf->flags;
ctx              1997 arch/powerpc/kvm/book3s_64_mmu_hv.c 	ctx->first_pass = 1;
ctx              2000 arch/powerpc/kvm/book3s_64_mmu_hv.c 	ret = anon_inode_getfd("kvm-htab", &kvm_htab_fops, ctx, rwflag | O_CLOEXEC);
ctx              2002 arch/powerpc/kvm/book3s_64_mmu_hv.c 		kfree(ctx);
ctx                60 arch/powerpc/mm/book3s32/mmu_context.c 	unsigned long ctx = next_mmu_context;
ctx                62 arch/powerpc/mm/book3s32/mmu_context.c 	while (test_and_set_bit(ctx, context_map)) {
ctx                63 arch/powerpc/mm/book3s32/mmu_context.c 		ctx = find_next_zero_bit(context_map, LAST_CONTEXT+1, ctx);
ctx                64 arch/powerpc/mm/book3s32/mmu_context.c 		if (ctx > LAST_CONTEXT)
ctx                65 arch/powerpc/mm/book3s32/mmu_context.c 			ctx = 0;
ctx                67 arch/powerpc/mm/book3s32/mmu_context.c 	next_mmu_context = (ctx + 1) & LAST_CONTEXT;
ctx                69 arch/powerpc/mm/book3s32/mmu_context.c 	return ctx;
ctx                86 arch/powerpc/mm/book3s32/mmu_context.c void __destroy_context(unsigned long ctx)
ctx                88 arch/powerpc/mm/book3s32/mmu_context.c 	clear_bit(ctx, context_map);
ctx                80 arch/powerpc/mm/book3s32/tlb.c 	unsigned int ctx = mm->context.id;
ctx                97 arch/powerpc/mm/book3s32/tlb.c 			flush_hash_pages(ctx, start, pmd_val(*pmd), count);
ctx                53 arch/powerpc/mm/book3s64/mmu_context.c static int realloc_context_ids(mm_context_t *ctx)
ctx                69 arch/powerpc/mm/book3s64/mmu_context.c 	for (i = 0; i < ARRAY_SIZE(ctx->extended_id); i++) {
ctx                70 arch/powerpc/mm/book3s64/mmu_context.c 		if (i == 0 || ctx->extended_id[i]) {
ctx                75 arch/powerpc/mm/book3s64/mmu_context.c 			ctx->extended_id[i] = id;
ctx                80 arch/powerpc/mm/book3s64/mmu_context.c 	return ctx->id;
ctx                84 arch/powerpc/mm/book3s64/mmu_context.c 		if (ctx->extended_id[i])
ctx                85 arch/powerpc/mm/book3s64/mmu_context.c 			ida_free(&mmu_context_ida, ctx->extended_id[i]);
ctx               213 arch/powerpc/mm/book3s64/mmu_context.c static void destroy_contexts(mm_context_t *ctx)
ctx               217 arch/powerpc/mm/book3s64/mmu_context.c 	for (index = 0; index < ARRAY_SIZE(ctx->extended_id); index++) {
ctx               218 arch/powerpc/mm/book3s64/mmu_context.c 		context_id = ctx->extended_id[index];
ctx               222 arch/powerpc/mm/book3s64/mmu_context.c 	kfree(ctx->hash_context);
ctx                33 arch/powerpc/net/bpf_jit.h #define EMIT(instr)		PLANT_INSTR(image, ctx->idx, instr)
ctx               184 arch/powerpc/net/bpf_jit.h 				     (((dest) - (ctx->idx * 4)) & 0x03fffffc))
ctx               188 arch/powerpc/net/bpf_jit.h 					     (((dest) - (ctx->idx * 4)) &     \
ctx               241 arch/powerpc/net/bpf_jit.h 		if (is_nearbranch((dest) - (ctx->idx * 4))) {		      \
ctx               246 arch/powerpc/net/bpf_jit.h 			PPC_BCC_SHORT(cond ^ COND_CMP_TRUE, (ctx->idx+2)*4);  \
ctx                25 arch/powerpc/net/bpf_jit_comp.c 				   struct codegen_context *ctx)
ctx                30 arch/powerpc/net/bpf_jit_comp.c 	if (ctx->seen & (SEEN_MEM | SEEN_DATAREF)) {
ctx                32 arch/powerpc/net/bpf_jit_comp.c 		if (ctx->seen & SEEN_DATAREF) {
ctx                41 arch/powerpc/net/bpf_jit_comp.c 		if (ctx->seen & SEEN_MEM) {
ctx                47 arch/powerpc/net/bpf_jit_comp.c 				if (ctx->seen & (1 << (i-r_M)))
ctx                54 arch/powerpc/net/bpf_jit_comp.c 	if (ctx->seen & SEEN_DATAREF) {
ctx                68 arch/powerpc/net/bpf_jit_comp.c 	if (ctx->seen & SEEN_XREG) {
ctx                81 arch/powerpc/net/bpf_jit_comp.c static void bpf_jit_build_epilogue(u32 *image, struct codegen_context *ctx)
ctx                85 arch/powerpc/net/bpf_jit_comp.c 	if (ctx->seen & (SEEN_MEM | SEEN_DATAREF)) {
ctx                87 arch/powerpc/net/bpf_jit_comp.c 		if (ctx->seen & SEEN_DATAREF) {
ctx                93 arch/powerpc/net/bpf_jit_comp.c 		if (ctx->seen & SEEN_MEM) {
ctx                96 arch/powerpc/net/bpf_jit_comp.c 				if (ctx->seen & (1 << (i-r_M)))
ctx               111 arch/powerpc/net/bpf_jit_comp.c 			      struct codegen_context *ctx,
ctx               131 arch/powerpc/net/bpf_jit_comp.c 		addrs[i] = ctx->idx * 4;
ctx               136 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_XREG;
ctx               147 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_XREG;
ctx               158 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_XREG;
ctx               171 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_XREG;
ctx               173 arch/powerpc/net/bpf_jit_comp.c 			if (ctx->pc_ret0 != -1) {
ctx               174 arch/powerpc/net/bpf_jit_comp.c 				PPC_BCC(COND_EQ, addrs[ctx->pc_ret0]);
ctx               176 arch/powerpc/net/bpf_jit_comp.c 				PPC_BCC_SHORT(COND_NE, (ctx->idx*4)+12);
ctx               201 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_XREG;
ctx               213 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_XREG;
ctx               224 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_XREG;
ctx               234 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_XREG;
ctx               244 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_XREG;
ctx               259 arch/powerpc/net/bpf_jit_comp.c 				if (ctx->pc_ret0 == -1)
ctx               260 arch/powerpc/net/bpf_jit_comp.c 					ctx->pc_ret0 = i;
ctx               277 arch/powerpc/net/bpf_jit_comp.c 				if (ctx->seen)
ctx               286 arch/powerpc/net/bpf_jit_comp.c 				if (ctx->seen)
ctx               296 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_XREG;
ctx               309 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_MEM | (1<<(K & 0xf));
ctx               313 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_MEM | (1<<(K & 0xf));
ctx               317 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_MEM | (1<<(K & 0xf));
ctx               321 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_XREG | SEEN_MEM | (1<<(K & 0xf));
ctx               350 arch/powerpc/net/bpf_jit_comp.c 			if (ctx->pc_ret0 != -1) {
ctx               351 arch/powerpc/net/bpf_jit_comp.c 				PPC_BCC(COND_EQ, addrs[ctx->pc_ret0]);
ctx               354 arch/powerpc/net/bpf_jit_comp.c 				PPC_BCC_SHORT(COND_NE, ctx->idx * 4 + 12);
ctx               415 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_DATAREF;
ctx               441 arch/powerpc/net/bpf_jit_comp.c 			ctx->seen |= SEEN_DATAREF | SEEN_XREG;
ctx               491 arch/powerpc/net/bpf_jit_comp.c 				ctx->seen |= SEEN_XREG;
ctx               495 arch/powerpc/net/bpf_jit_comp.c 				ctx->seen |= SEEN_XREG;
ctx               546 arch/powerpc/net/bpf_jit_comp.c 	addrs[i] = ctx->idx * 4;
ctx                32 arch/powerpc/net/bpf_jit_comp64.c static inline bool bpf_is_seen_register(struct codegen_context *ctx, int i)
ctx                34 arch/powerpc/net/bpf_jit_comp64.c 	return (ctx->seen & (1 << (31 - b2p[i])));
ctx                37 arch/powerpc/net/bpf_jit_comp64.c static inline void bpf_set_seen_register(struct codegen_context *ctx, int i)
ctx                39 arch/powerpc/net/bpf_jit_comp64.c 	ctx->seen |= (1 << (31 - b2p[i]));
ctx                42 arch/powerpc/net/bpf_jit_comp64.c static inline bool bpf_has_stack_frame(struct codegen_context *ctx)
ctx                50 arch/powerpc/net/bpf_jit_comp64.c 	return ctx->seen & SEEN_FUNC || bpf_is_seen_register(ctx, BPF_REG_FP);
ctx                64 arch/powerpc/net/bpf_jit_comp64.c static int bpf_jit_stack_local(struct codegen_context *ctx)
ctx                66 arch/powerpc/net/bpf_jit_comp64.c 	if (bpf_has_stack_frame(ctx))
ctx                67 arch/powerpc/net/bpf_jit_comp64.c 		return STACK_FRAME_MIN_SIZE + ctx->stack_size;
ctx                72 arch/powerpc/net/bpf_jit_comp64.c static int bpf_jit_stack_tailcallcnt(struct codegen_context *ctx)
ctx                74 arch/powerpc/net/bpf_jit_comp64.c 	return bpf_jit_stack_local(ctx) + 8;
ctx                77 arch/powerpc/net/bpf_jit_comp64.c static int bpf_jit_stack_offsetof(struct codegen_context *ctx, int reg)
ctx                80 arch/powerpc/net/bpf_jit_comp64.c 		return (bpf_has_stack_frame(ctx) ?
ctx                81 arch/powerpc/net/bpf_jit_comp64.c 			(BPF_PPC_STACKFRAME + ctx->stack_size) : 0)
ctx                88 arch/powerpc/net/bpf_jit_comp64.c static void bpf_jit_build_prologue(u32 *image, struct codegen_context *ctx)
ctx                97 arch/powerpc/net/bpf_jit_comp64.c 	if (ctx->seen & SEEN_TAILCALL) {
ctx               108 arch/powerpc/net/bpf_jit_comp64.c 	if (bpf_has_stack_frame(ctx)) {
ctx               113 arch/powerpc/net/bpf_jit_comp64.c 		if (ctx->seen & SEEN_FUNC) {
ctx               118 arch/powerpc/net/bpf_jit_comp64.c 		PPC_BPF_STLU(1, 1, -(BPF_PPC_STACKFRAME + ctx->stack_size));
ctx               127 arch/powerpc/net/bpf_jit_comp64.c 		if (bpf_is_seen_register(ctx, i))
ctx               128 arch/powerpc/net/bpf_jit_comp64.c 			PPC_BPF_STL(b2p[i], 1, bpf_jit_stack_offsetof(ctx, b2p[i]));
ctx               131 arch/powerpc/net/bpf_jit_comp64.c 	if (bpf_is_seen_register(ctx, BPF_REG_FP))
ctx               133 arch/powerpc/net/bpf_jit_comp64.c 				STACK_FRAME_MIN_SIZE + ctx->stack_size);
ctx               136 arch/powerpc/net/bpf_jit_comp64.c static void bpf_jit_emit_common_epilogue(u32 *image, struct codegen_context *ctx)
ctx               142 arch/powerpc/net/bpf_jit_comp64.c 		if (bpf_is_seen_register(ctx, i))
ctx               143 arch/powerpc/net/bpf_jit_comp64.c 			PPC_BPF_LL(b2p[i], 1, bpf_jit_stack_offsetof(ctx, b2p[i]));
ctx               146 arch/powerpc/net/bpf_jit_comp64.c 	if (bpf_has_stack_frame(ctx)) {
ctx               147 arch/powerpc/net/bpf_jit_comp64.c 		PPC_ADDI(1, 1, BPF_PPC_STACKFRAME + ctx->stack_size);
ctx               148 arch/powerpc/net/bpf_jit_comp64.c 		if (ctx->seen & SEEN_FUNC) {
ctx               155 arch/powerpc/net/bpf_jit_comp64.c static void bpf_jit_build_epilogue(u32 *image, struct codegen_context *ctx)
ctx               157 arch/powerpc/net/bpf_jit_comp64.c 	bpf_jit_emit_common_epilogue(image, ctx);
ctx               165 arch/powerpc/net/bpf_jit_comp64.c static void bpf_jit_emit_func_call_hlp(u32 *image, struct codegen_context *ctx,
ctx               190 arch/powerpc/net/bpf_jit_comp64.c static void bpf_jit_emit_func_call_rel(u32 *image, struct codegen_context *ctx,
ctx               193 arch/powerpc/net/bpf_jit_comp64.c 	unsigned int i, ctx_idx = ctx->idx;
ctx               208 arch/powerpc/net/bpf_jit_comp64.c 	for (i = ctx->idx - ctx_idx; i < 5; i++)
ctx               227 arch/powerpc/net/bpf_jit_comp64.c static void bpf_jit_emit_tail_call(u32 *image, struct codegen_context *ctx, u32 out)
ctx               251 arch/powerpc/net/bpf_jit_comp64.c 	PPC_BPF_LL(b2p[TMP_REG_1], 1, bpf_jit_stack_tailcallcnt(ctx));
ctx               259 arch/powerpc/net/bpf_jit_comp64.c 	PPC_BPF_STL(b2p[TMP_REG_1], 1, bpf_jit_stack_tailcallcnt(ctx));
ctx               285 arch/powerpc/net/bpf_jit_comp64.c 	bpf_jit_emit_common_epilogue(image, ctx);
ctx               293 arch/powerpc/net/bpf_jit_comp64.c 			      struct codegen_context *ctx,
ctx               319 arch/powerpc/net/bpf_jit_comp64.c 		addrs[i] = ctx->idx * 4;
ctx               333 arch/powerpc/net/bpf_jit_comp64.c 			bpf_set_seen_register(ctx, insn[i].dst_reg);
ctx               335 arch/powerpc/net/bpf_jit_comp64.c 			bpf_set_seen_register(ctx, insn[i].src_reg);
ctx               505 arch/powerpc/net/bpf_jit_comp64.c 				addrs[++i] = ctx->idx * 4;
ctx               514 arch/powerpc/net/bpf_jit_comp64.c 				addrs[++i] = ctx->idx * 4;
ctx               523 arch/powerpc/net/bpf_jit_comp64.c 				addrs[++i] = ctx->idx * 4;
ctx               531 arch/powerpc/net/bpf_jit_comp64.c 				addrs[++i] = ctx->idx * 4;
ctx               569 arch/powerpc/net/bpf_jit_comp64.c 				addrs[++i] = ctx->idx * 4;
ctx               621 arch/powerpc/net/bpf_jit_comp64.c 				PPC_BPF_STL(dst_reg, 1, bpf_jit_stack_local(ctx));
ctx               622 arch/powerpc/net/bpf_jit_comp64.c 				PPC_ADDI(b2p[TMP_REG_1], 1, bpf_jit_stack_local(ctx));
ctx               634 arch/powerpc/net/bpf_jit_comp64.c 					addrs[++i] = ctx->idx * 4;
ctx               690 arch/powerpc/net/bpf_jit_comp64.c 			tmp_idx = ctx->idx * 4;
ctx               703 arch/powerpc/net/bpf_jit_comp64.c 			tmp_idx = ctx->idx * 4;
ctx               717 arch/powerpc/net/bpf_jit_comp64.c 				addrs[++i] = ctx->idx * 4;
ctx               723 arch/powerpc/net/bpf_jit_comp64.c 				addrs[++i] = ctx->idx * 4;
ctx               729 arch/powerpc/net/bpf_jit_comp64.c 				addrs[++i] = ctx->idx * 4;
ctx               744 arch/powerpc/net/bpf_jit_comp64.c 			addrs[++i] = ctx->idx * 4;
ctx               766 arch/powerpc/net/bpf_jit_comp64.c 			ctx->seen |= SEEN_FUNC;
ctx               774 arch/powerpc/net/bpf_jit_comp64.c 				bpf_jit_emit_func_call_hlp(image, ctx, func_addr);
ctx               776 arch/powerpc/net/bpf_jit_comp64.c 				bpf_jit_emit_func_call_rel(image, ctx, func_addr);
ctx               991 arch/powerpc/net/bpf_jit_comp64.c 			ctx->seen |= SEEN_TAILCALL;
ctx               992 arch/powerpc/net/bpf_jit_comp64.c 			bpf_jit_emit_tail_call(image, ctx, addrs[i + 1]);
ctx              1008 arch/powerpc/net/bpf_jit_comp64.c 	addrs[i] = ctx->idx * 4;
ctx              1015 arch/powerpc/net/bpf_jit_comp64.c 				       struct codegen_context *ctx, u32 *addrs)
ctx              1047 arch/powerpc/net/bpf_jit_comp64.c 			tmp_idx = ctx->idx;
ctx              1048 arch/powerpc/net/bpf_jit_comp64.c 			ctx->idx = addrs[i] / 4;
ctx              1049 arch/powerpc/net/bpf_jit_comp64.c 			bpf_jit_emit_func_call_rel(image, ctx, func_addr);
ctx              1055 arch/powerpc/net/bpf_jit_comp64.c 			ctx->idx = tmp_idx;
ctx              1067 arch/powerpc/net/bpf_jit_comp64.c 	struct codegen_context ctx;
ctx              1117 arch/powerpc/net/bpf_jit_comp64.c 		cgctx = jit_data->ctx;
ctx              1234 arch/powerpc/net/bpf_jit_comp64.c 		jit_data->ctx = cgctx;
ctx               169 arch/powerpc/oprofile/cell/spu_task_sync.c 		ref = spu_get_profile_private_kref(the_spu->ctx);
ctx               240 arch/powerpc/oprofile/cell/spu_task_sync.c 	spu_set_profile_private_kref(spu->ctx, &info->cache_ref,
ctx               129 arch/powerpc/perf/core-book3s.c static void power_pmu_sched_task(struct perf_event_context *ctx, bool sched_in) {}
ctx               375 arch/powerpc/perf/core-book3s.c 	if (event->ctx->task && cpuhw->bhrb_context != event->ctx) {
ctx               377 arch/powerpc/perf/core-book3s.c 		cpuhw->bhrb_context = event->ctx;
ctx               380 arch/powerpc/perf/core-book3s.c 	perf_sched_cb_inc(event->ctx->pmu);
ctx               392 arch/powerpc/perf/core-book3s.c 	perf_sched_cb_dec(event->ctx->pmu);
ctx               407 arch/powerpc/perf/core-book3s.c static void power_pmu_sched_task(struct perf_event_context *ctx, bool sched_in)
ctx               349 arch/powerpc/platforms/cell/spu_manage.c static void enable_spu_by_master_run(struct spu_context *ctx)
ctx               351 arch/powerpc/platforms/cell/spu_manage.c 	ctx->ops->master_start(ctx);
ctx               354 arch/powerpc/platforms/cell/spu_manage.c static void disable_spu_by_master_run(struct spu_context *ctx)
ctx               356 arch/powerpc/platforms/cell/spu_manage.c 	ctx->ops->master_stop(ctx);
ctx                18 arch/powerpc/platforms/cell/spu_notify.c void spu_switch_notify(struct spu *spu, struct spu_context *ctx)
ctx                21 arch/powerpc/platforms/cell/spu_notify.c 				     ctx ? ctx->object_id : 0, spu);
ctx                41 arch/powerpc/platforms/cell/spu_notify.c void spu_set_profile_private_kref(struct spu_context *ctx,
ctx                45 arch/powerpc/platforms/cell/spu_notify.c 	ctx->prof_priv_kref = prof_info_kref;
ctx                46 arch/powerpc/platforms/cell/spu_notify.c 	ctx->prof_priv_release = prof_info_release;
ctx                50 arch/powerpc/platforms/cell/spu_notify.c void *spu_get_profile_private_kref(struct spu_context *ctx)
ctx                52 arch/powerpc/platforms/cell/spu_notify.c 	return ctx->prof_priv_kref;
ctx                34 arch/powerpc/platforms/cell/spufs/backing_ops.c static void gen_spu_event(struct spu_context *ctx, u32 event)
ctx                40 arch/powerpc/platforms/cell/spufs/backing_ops.c 	ch0_cnt = ctx->csa.spu_chnlcnt_RW[0];
ctx                41 arch/powerpc/platforms/cell/spufs/backing_ops.c 	ch0_data = ctx->csa.spu_chnldata_RW[0];
ctx                42 arch/powerpc/platforms/cell/spufs/backing_ops.c 	ch1_data = ctx->csa.spu_chnldata_RW[1];
ctx                43 arch/powerpc/platforms/cell/spufs/backing_ops.c 	ctx->csa.spu_chnldata_RW[0] |= event;
ctx                45 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.spu_chnlcnt_RW[0] = 1;
ctx                49 arch/powerpc/platforms/cell/spufs/backing_ops.c static int spu_backing_mbox_read(struct spu_context *ctx, u32 * data)
ctx                54 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_lock(&ctx->csa.register_lock);
ctx                55 arch/powerpc/platforms/cell/spufs/backing_ops.c 	mbox_stat = ctx->csa.prob.mb_stat_R;
ctx                61 arch/powerpc/platforms/cell/spufs/backing_ops.c 		*data = ctx->csa.prob.pu_mb_R;
ctx                62 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.prob.mb_stat_R &= ~(0x0000ff);
ctx                63 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.spu_chnlcnt_RW[28] = 1;
ctx                64 arch/powerpc/platforms/cell/spufs/backing_ops.c 		gen_spu_event(ctx, MFC_PU_MAILBOX_AVAILABLE_EVENT);
ctx                67 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_unlock(&ctx->csa.register_lock);
ctx                71 arch/powerpc/platforms/cell/spufs/backing_ops.c static u32 spu_backing_mbox_stat_read(struct spu_context *ctx)
ctx                73 arch/powerpc/platforms/cell/spufs/backing_ops.c 	return ctx->csa.prob.mb_stat_R;
ctx                76 arch/powerpc/platforms/cell/spufs/backing_ops.c static __poll_t spu_backing_mbox_stat_poll(struct spu_context *ctx,
ctx                83 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_lock_irq(&ctx->csa.register_lock);
ctx                84 arch/powerpc/platforms/cell/spufs/backing_ops.c 	stat = ctx->csa.prob.mb_stat_R;
ctx                95 arch/powerpc/platforms/cell/spufs/backing_ops.c 			ctx->csa.priv1.int_stat_class2_RW &=
ctx                97 arch/powerpc/platforms/cell/spufs/backing_ops.c 			ctx->csa.priv1.int_mask_class2_RW |=
ctx               105 arch/powerpc/platforms/cell/spufs/backing_ops.c 			ctx->csa.priv1.int_stat_class2_RW &=
ctx               107 arch/powerpc/platforms/cell/spufs/backing_ops.c 			ctx->csa.priv1.int_mask_class2_RW |=
ctx               111 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_unlock_irq(&ctx->csa.register_lock);
ctx               115 arch/powerpc/platforms/cell/spufs/backing_ops.c static int spu_backing_ibox_read(struct spu_context *ctx, u32 * data)
ctx               119 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_lock(&ctx->csa.register_lock);
ctx               120 arch/powerpc/platforms/cell/spufs/backing_ops.c 	if (ctx->csa.prob.mb_stat_R & 0xff0000) {
ctx               125 arch/powerpc/platforms/cell/spufs/backing_ops.c 		*data = ctx->csa.priv2.puint_mb_R;
ctx               126 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.prob.mb_stat_R &= ~(0xff0000);
ctx               127 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.spu_chnlcnt_RW[30] = 1;
ctx               128 arch/powerpc/platforms/cell/spufs/backing_ops.c 		gen_spu_event(ctx, MFC_PU_INT_MAILBOX_AVAILABLE_EVENT);
ctx               132 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.priv1.int_mask_class2_RW |= CLASS2_ENABLE_MAILBOX_INTR;
ctx               135 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_unlock(&ctx->csa.register_lock);
ctx               139 arch/powerpc/platforms/cell/spufs/backing_ops.c static int spu_backing_wbox_write(struct spu_context *ctx, u32 data)
ctx               143 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_lock(&ctx->csa.register_lock);
ctx               144 arch/powerpc/platforms/cell/spufs/backing_ops.c 	if ((ctx->csa.prob.mb_stat_R) & 0x00ff00) {
ctx               145 arch/powerpc/platforms/cell/spufs/backing_ops.c 		int slot = ctx->csa.spu_chnlcnt_RW[29];
ctx               146 arch/powerpc/platforms/cell/spufs/backing_ops.c 		int avail = (ctx->csa.prob.mb_stat_R & 0x00ff00) >> 8;
ctx               153 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.spu_mailbox_data[slot] = data;
ctx               154 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.spu_chnlcnt_RW[29] = ++slot;
ctx               155 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.prob.mb_stat_R &= ~(0x00ff00);
ctx               156 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.prob.mb_stat_R |= (((4 - slot) & 0xff) << 8);
ctx               157 arch/powerpc/platforms/cell/spufs/backing_ops.c 		gen_spu_event(ctx, MFC_SPU_MAILBOX_WRITTEN_EVENT);
ctx               162 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.priv1.int_mask_class2_RW |=
ctx               166 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_unlock(&ctx->csa.register_lock);
ctx               170 arch/powerpc/platforms/cell/spufs/backing_ops.c static u32 spu_backing_signal1_read(struct spu_context *ctx)
ctx               172 arch/powerpc/platforms/cell/spufs/backing_ops.c 	return ctx->csa.spu_chnldata_RW[3];
ctx               175 arch/powerpc/platforms/cell/spufs/backing_ops.c static void spu_backing_signal1_write(struct spu_context *ctx, u32 data)
ctx               177 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_lock(&ctx->csa.register_lock);
ctx               178 arch/powerpc/platforms/cell/spufs/backing_ops.c 	if (ctx->csa.priv2.spu_cfg_RW & 0x1)
ctx               179 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.spu_chnldata_RW[3] |= data;
ctx               181 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.spu_chnldata_RW[3] = data;
ctx               182 arch/powerpc/platforms/cell/spufs/backing_ops.c 	ctx->csa.spu_chnlcnt_RW[3] = 1;
ctx               183 arch/powerpc/platforms/cell/spufs/backing_ops.c 	gen_spu_event(ctx, MFC_SIGNAL_1_EVENT);
ctx               184 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_unlock(&ctx->csa.register_lock);
ctx               187 arch/powerpc/platforms/cell/spufs/backing_ops.c static u32 spu_backing_signal2_read(struct spu_context *ctx)
ctx               189 arch/powerpc/platforms/cell/spufs/backing_ops.c 	return ctx->csa.spu_chnldata_RW[4];
ctx               192 arch/powerpc/platforms/cell/spufs/backing_ops.c static void spu_backing_signal2_write(struct spu_context *ctx, u32 data)
ctx               194 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_lock(&ctx->csa.register_lock);
ctx               195 arch/powerpc/platforms/cell/spufs/backing_ops.c 	if (ctx->csa.priv2.spu_cfg_RW & 0x2)
ctx               196 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.spu_chnldata_RW[4] |= data;
ctx               198 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.spu_chnldata_RW[4] = data;
ctx               199 arch/powerpc/platforms/cell/spufs/backing_ops.c 	ctx->csa.spu_chnlcnt_RW[4] = 1;
ctx               200 arch/powerpc/platforms/cell/spufs/backing_ops.c 	gen_spu_event(ctx, MFC_SIGNAL_2_EVENT);
ctx               201 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_unlock(&ctx->csa.register_lock);
ctx               204 arch/powerpc/platforms/cell/spufs/backing_ops.c static void spu_backing_signal1_type_set(struct spu_context *ctx, u64 val)
ctx               208 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_lock(&ctx->csa.register_lock);
ctx               209 arch/powerpc/platforms/cell/spufs/backing_ops.c 	tmp = ctx->csa.priv2.spu_cfg_RW;
ctx               214 arch/powerpc/platforms/cell/spufs/backing_ops.c 	ctx->csa.priv2.spu_cfg_RW = tmp;
ctx               215 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_unlock(&ctx->csa.register_lock);
ctx               218 arch/powerpc/platforms/cell/spufs/backing_ops.c static u64 spu_backing_signal1_type_get(struct spu_context *ctx)
ctx               220 arch/powerpc/platforms/cell/spufs/backing_ops.c 	return ((ctx->csa.priv2.spu_cfg_RW & 1) != 0);
ctx               223 arch/powerpc/platforms/cell/spufs/backing_ops.c static void spu_backing_signal2_type_set(struct spu_context *ctx, u64 val)
ctx               227 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_lock(&ctx->csa.register_lock);
ctx               228 arch/powerpc/platforms/cell/spufs/backing_ops.c 	tmp = ctx->csa.priv2.spu_cfg_RW;
ctx               233 arch/powerpc/platforms/cell/spufs/backing_ops.c 	ctx->csa.priv2.spu_cfg_RW = tmp;
ctx               234 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_unlock(&ctx->csa.register_lock);
ctx               237 arch/powerpc/platforms/cell/spufs/backing_ops.c static u64 spu_backing_signal2_type_get(struct spu_context *ctx)
ctx               239 arch/powerpc/platforms/cell/spufs/backing_ops.c 	return ((ctx->csa.priv2.spu_cfg_RW & 2) != 0);
ctx               242 arch/powerpc/platforms/cell/spufs/backing_ops.c static u32 spu_backing_npc_read(struct spu_context *ctx)
ctx               244 arch/powerpc/platforms/cell/spufs/backing_ops.c 	return ctx->csa.prob.spu_npc_RW;
ctx               247 arch/powerpc/platforms/cell/spufs/backing_ops.c static void spu_backing_npc_write(struct spu_context *ctx, u32 val)
ctx               249 arch/powerpc/platforms/cell/spufs/backing_ops.c 	ctx->csa.prob.spu_npc_RW = val;
ctx               252 arch/powerpc/platforms/cell/spufs/backing_ops.c static u32 spu_backing_status_read(struct spu_context *ctx)
ctx               254 arch/powerpc/platforms/cell/spufs/backing_ops.c 	return ctx->csa.prob.spu_status_R;
ctx               257 arch/powerpc/platforms/cell/spufs/backing_ops.c static char *spu_backing_get_ls(struct spu_context *ctx)
ctx               259 arch/powerpc/platforms/cell/spufs/backing_ops.c 	return ctx->csa.lscsa->ls;
ctx               262 arch/powerpc/platforms/cell/spufs/backing_ops.c static void spu_backing_privcntl_write(struct spu_context *ctx, u64 val)
ctx               264 arch/powerpc/platforms/cell/spufs/backing_ops.c 	ctx->csa.priv2.spu_privcntl_RW = val;
ctx               267 arch/powerpc/platforms/cell/spufs/backing_ops.c static u32 spu_backing_runcntl_read(struct spu_context *ctx)
ctx               269 arch/powerpc/platforms/cell/spufs/backing_ops.c 	return ctx->csa.prob.spu_runcntl_RW;
ctx               272 arch/powerpc/platforms/cell/spufs/backing_ops.c static void spu_backing_runcntl_write(struct spu_context *ctx, u32 val)
ctx               274 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_lock(&ctx->csa.register_lock);
ctx               275 arch/powerpc/platforms/cell/spufs/backing_ops.c 	ctx->csa.prob.spu_runcntl_RW = val;
ctx               277 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.prob.spu_status_R &=
ctx               283 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.prob.spu_status_R |= SPU_STATUS_RUNNING;
ctx               285 arch/powerpc/platforms/cell/spufs/backing_ops.c 		ctx->csa.prob.spu_status_R &= ~SPU_STATUS_RUNNING;
ctx               287 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_unlock(&ctx->csa.register_lock);
ctx               290 arch/powerpc/platforms/cell/spufs/backing_ops.c static void spu_backing_runcntl_stop(struct spu_context *ctx)
ctx               292 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spu_backing_runcntl_write(ctx, SPU_RUNCNTL_STOP);
ctx               295 arch/powerpc/platforms/cell/spufs/backing_ops.c static void spu_backing_master_start(struct spu_context *ctx)
ctx               297 arch/powerpc/platforms/cell/spufs/backing_ops.c 	struct spu_state *csa = &ctx->csa;
ctx               306 arch/powerpc/platforms/cell/spufs/backing_ops.c static void spu_backing_master_stop(struct spu_context *ctx)
ctx               308 arch/powerpc/platforms/cell/spufs/backing_ops.c 	struct spu_state *csa = &ctx->csa;
ctx               317 arch/powerpc/platforms/cell/spufs/backing_ops.c static int spu_backing_set_mfc_query(struct spu_context * ctx, u32 mask,
ctx               320 arch/powerpc/platforms/cell/spufs/backing_ops.c 	struct spu_problem_collapsed *prob = &ctx->csa.prob;
ctx               323 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_lock(&ctx->csa.register_lock);
ctx               336 arch/powerpc/platforms/cell/spufs/backing_ops.c 	ctx->csa.prob.dma_tagstatus_R &= mask;
ctx               338 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_unlock(&ctx->csa.register_lock);
ctx               343 arch/powerpc/platforms/cell/spufs/backing_ops.c static u32 spu_backing_read_mfc_tagstatus(struct spu_context * ctx)
ctx               345 arch/powerpc/platforms/cell/spufs/backing_ops.c 	return ctx->csa.prob.dma_tagstatus_R;
ctx               348 arch/powerpc/platforms/cell/spufs/backing_ops.c static u32 spu_backing_get_mfc_free_elements(struct spu_context *ctx)
ctx               350 arch/powerpc/platforms/cell/spufs/backing_ops.c 	return ctx->csa.prob.dma_qstatus_R;
ctx               353 arch/powerpc/platforms/cell/spufs/backing_ops.c static int spu_backing_send_mfc_command(struct spu_context *ctx,
ctx               358 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_lock(&ctx->csa.register_lock);
ctx               361 arch/powerpc/platforms/cell/spufs/backing_ops.c 	spin_unlock(&ctx->csa.register_lock);
ctx               366 arch/powerpc/platforms/cell/spufs/backing_ops.c static void spu_backing_restart_dma(struct spu_context *ctx)
ctx               368 arch/powerpc/platforms/cell/spufs/backing_ops.c 	ctx->csa.priv2.mfc_control_RW |= MFC_CNTL_RESTART_DMA_COMMAND;
ctx                27 arch/powerpc/platforms/cell/spufs/context.c 	struct spu_context *ctx;
ctx                29 arch/powerpc/platforms/cell/spufs/context.c 	ctx = kzalloc(sizeof *ctx, GFP_KERNEL);
ctx                30 arch/powerpc/platforms/cell/spufs/context.c 	if (!ctx)
ctx                35 arch/powerpc/platforms/cell/spufs/context.c 	if (spu_init_csa(&ctx->csa))
ctx                37 arch/powerpc/platforms/cell/spufs/context.c 	spin_lock_init(&ctx->mmio_lock);
ctx                38 arch/powerpc/platforms/cell/spufs/context.c 	mutex_init(&ctx->mapping_lock);
ctx                39 arch/powerpc/platforms/cell/spufs/context.c 	kref_init(&ctx->kref);
ctx                40 arch/powerpc/platforms/cell/spufs/context.c 	mutex_init(&ctx->state_mutex);
ctx                41 arch/powerpc/platforms/cell/spufs/context.c 	mutex_init(&ctx->run_mutex);
ctx                42 arch/powerpc/platforms/cell/spufs/context.c 	init_waitqueue_head(&ctx->ibox_wq);
ctx                43 arch/powerpc/platforms/cell/spufs/context.c 	init_waitqueue_head(&ctx->wbox_wq);
ctx                44 arch/powerpc/platforms/cell/spufs/context.c 	init_waitqueue_head(&ctx->stop_wq);
ctx                45 arch/powerpc/platforms/cell/spufs/context.c 	init_waitqueue_head(&ctx->mfc_wq);
ctx                46 arch/powerpc/platforms/cell/spufs/context.c 	init_waitqueue_head(&ctx->run_wq);
ctx                47 arch/powerpc/platforms/cell/spufs/context.c 	ctx->state = SPU_STATE_SAVED;
ctx                48 arch/powerpc/platforms/cell/spufs/context.c 	ctx->ops = &spu_backing_ops;
ctx                49 arch/powerpc/platforms/cell/spufs/context.c 	ctx->owner = get_task_mm(current);
ctx                50 arch/powerpc/platforms/cell/spufs/context.c 	INIT_LIST_HEAD(&ctx->rq);
ctx                51 arch/powerpc/platforms/cell/spufs/context.c 	INIT_LIST_HEAD(&ctx->aff_list);
ctx                53 arch/powerpc/platforms/cell/spufs/context.c 		spu_gang_add_ctx(gang, ctx);
ctx                55 arch/powerpc/platforms/cell/spufs/context.c 	__spu_update_sched_info(ctx);
ctx                56 arch/powerpc/platforms/cell/spufs/context.c 	spu_set_timeslice(ctx);
ctx                57 arch/powerpc/platforms/cell/spufs/context.c 	ctx->stats.util_state = SPU_UTIL_IDLE_LOADED;
ctx                58 arch/powerpc/platforms/cell/spufs/context.c 	ctx->stats.tstamp = ktime_get_ns();
ctx                63 arch/powerpc/platforms/cell/spufs/context.c 	kfree(ctx);
ctx                64 arch/powerpc/platforms/cell/spufs/context.c 	ctx = NULL;
ctx                66 arch/powerpc/platforms/cell/spufs/context.c 	return ctx;
ctx                71 arch/powerpc/platforms/cell/spufs/context.c 	struct spu_context *ctx;
ctx                72 arch/powerpc/platforms/cell/spufs/context.c 	ctx = container_of(kref, struct spu_context, kref);
ctx                73 arch/powerpc/platforms/cell/spufs/context.c 	spu_context_nospu_trace(destroy_spu_context__enter, ctx);
ctx                74 arch/powerpc/platforms/cell/spufs/context.c 	mutex_lock(&ctx->state_mutex);
ctx                75 arch/powerpc/platforms/cell/spufs/context.c 	spu_deactivate(ctx);
ctx                76 arch/powerpc/platforms/cell/spufs/context.c 	mutex_unlock(&ctx->state_mutex);
ctx                77 arch/powerpc/platforms/cell/spufs/context.c 	spu_fini_csa(&ctx->csa);
ctx                78 arch/powerpc/platforms/cell/spufs/context.c 	if (ctx->gang)
ctx                79 arch/powerpc/platforms/cell/spufs/context.c 		spu_gang_remove_ctx(ctx->gang, ctx);
ctx                80 arch/powerpc/platforms/cell/spufs/context.c 	if (ctx->prof_priv_kref)
ctx                81 arch/powerpc/platforms/cell/spufs/context.c 		kref_put(ctx->prof_priv_kref, ctx->prof_priv_release);
ctx                82 arch/powerpc/platforms/cell/spufs/context.c 	BUG_ON(!list_empty(&ctx->rq));
ctx                84 arch/powerpc/platforms/cell/spufs/context.c 	kfree(ctx->switch_log);
ctx                85 arch/powerpc/platforms/cell/spufs/context.c 	kfree(ctx);
ctx                88 arch/powerpc/platforms/cell/spufs/context.c struct spu_context * get_spu_context(struct spu_context *ctx)
ctx                90 arch/powerpc/platforms/cell/spufs/context.c 	kref_get(&ctx->kref);
ctx                91 arch/powerpc/platforms/cell/spufs/context.c 	return ctx;
ctx                94 arch/powerpc/platforms/cell/spufs/context.c int put_spu_context(struct spu_context *ctx)
ctx                96 arch/powerpc/platforms/cell/spufs/context.c 	return kref_put(&ctx->kref, &destroy_spu_context);
ctx               100 arch/powerpc/platforms/cell/spufs/context.c void spu_forget(struct spu_context *ctx)
ctx               109 arch/powerpc/platforms/cell/spufs/context.c 	mutex_lock(&ctx->state_mutex);
ctx               110 arch/powerpc/platforms/cell/spufs/context.c 	if (ctx->state != SPU_STATE_SAVED)
ctx               111 arch/powerpc/platforms/cell/spufs/context.c 		spu_deactivate(ctx);
ctx               113 arch/powerpc/platforms/cell/spufs/context.c 	mm = ctx->owner;
ctx               114 arch/powerpc/platforms/cell/spufs/context.c 	ctx->owner = NULL;
ctx               116 arch/powerpc/platforms/cell/spufs/context.c 	spu_release(ctx);
ctx               119 arch/powerpc/platforms/cell/spufs/context.c void spu_unmap_mappings(struct spu_context *ctx)
ctx               121 arch/powerpc/platforms/cell/spufs/context.c 	mutex_lock(&ctx->mapping_lock);
ctx               122 arch/powerpc/platforms/cell/spufs/context.c 	if (ctx->local_store)
ctx               123 arch/powerpc/platforms/cell/spufs/context.c 		unmap_mapping_range(ctx->local_store, 0, LS_SIZE, 1);
ctx               124 arch/powerpc/platforms/cell/spufs/context.c 	if (ctx->mfc)
ctx               125 arch/powerpc/platforms/cell/spufs/context.c 		unmap_mapping_range(ctx->mfc, 0, SPUFS_MFC_MAP_SIZE, 1);
ctx               126 arch/powerpc/platforms/cell/spufs/context.c 	if (ctx->cntl)
ctx               127 arch/powerpc/platforms/cell/spufs/context.c 		unmap_mapping_range(ctx->cntl, 0, SPUFS_CNTL_MAP_SIZE, 1);
ctx               128 arch/powerpc/platforms/cell/spufs/context.c 	if (ctx->signal1)
ctx               129 arch/powerpc/platforms/cell/spufs/context.c 		unmap_mapping_range(ctx->signal1, 0, SPUFS_SIGNAL_MAP_SIZE, 1);
ctx               130 arch/powerpc/platforms/cell/spufs/context.c 	if (ctx->signal2)
ctx               131 arch/powerpc/platforms/cell/spufs/context.c 		unmap_mapping_range(ctx->signal2, 0, SPUFS_SIGNAL_MAP_SIZE, 1);
ctx               132 arch/powerpc/platforms/cell/spufs/context.c 	if (ctx->mss)
ctx               133 arch/powerpc/platforms/cell/spufs/context.c 		unmap_mapping_range(ctx->mss, 0, SPUFS_MSS_MAP_SIZE, 1);
ctx               134 arch/powerpc/platforms/cell/spufs/context.c 	if (ctx->psmap)
ctx               135 arch/powerpc/platforms/cell/spufs/context.c 		unmap_mapping_range(ctx->psmap, 0, SPUFS_PS_MAP_SIZE, 1);
ctx               136 arch/powerpc/platforms/cell/spufs/context.c 	mutex_unlock(&ctx->mapping_lock);
ctx               143 arch/powerpc/platforms/cell/spufs/context.c int spu_acquire_saved(struct spu_context *ctx)
ctx               147 arch/powerpc/platforms/cell/spufs/context.c 	spu_context_nospu_trace(spu_acquire_saved__enter, ctx);
ctx               149 arch/powerpc/platforms/cell/spufs/context.c 	ret = spu_acquire(ctx);
ctx               153 arch/powerpc/platforms/cell/spufs/context.c 	if (ctx->state != SPU_STATE_SAVED) {
ctx               154 arch/powerpc/platforms/cell/spufs/context.c 		set_bit(SPU_SCHED_WAS_ACTIVE, &ctx->sched_flags);
ctx               155 arch/powerpc/platforms/cell/spufs/context.c 		spu_deactivate(ctx);
ctx               165 arch/powerpc/platforms/cell/spufs/context.c void spu_release_saved(struct spu_context *ctx)
ctx               167 arch/powerpc/platforms/cell/spufs/context.c 	BUG_ON(ctx->state != SPU_STATE_SAVED);
ctx               169 arch/powerpc/platforms/cell/spufs/context.c 	if (test_and_clear_bit(SPU_SCHED_WAS_ACTIVE, &ctx->sched_flags) &&
ctx               170 arch/powerpc/platforms/cell/spufs/context.c 			test_bit(SPU_SCHED_SPU_RUN, &ctx->sched_flags))
ctx               171 arch/powerpc/platforms/cell/spufs/context.c 		spu_activate(ctx, 0);
ctx               173 arch/powerpc/platforms/cell/spufs/context.c 	spu_release(ctx);
ctx                24 arch/powerpc/platforms/cell/spufs/coredump.c static ssize_t do_coredump_read(int num, struct spu_context *ctx, void *buffer,
ctx                31 arch/powerpc/platforms/cell/spufs/coredump.c 		return spufs_coredump_read[num].read(ctx, buffer, size, off);
ctx                33 arch/powerpc/platforms/cell/spufs/coredump.c 	data = spufs_coredump_read[num].get(ctx);
ctx                40 arch/powerpc/platforms/cell/spufs/coredump.c static int spufs_ctx_note_size(struct spu_context *ctx, int dfd)
ctx                62 arch/powerpc/platforms/cell/spufs/coredump.c 	struct spu_context *ctx;
ctx                65 arch/powerpc/platforms/cell/spufs/coredump.c 	ctx = SPUFS_I(file_inode(file))->i_ctx;
ctx                66 arch/powerpc/platforms/cell/spufs/coredump.c 	if (ctx->flags & SPU_CREATE_NOSCHED)
ctx                96 arch/powerpc/platforms/cell/spufs/coredump.c 	struct spu_context *ctx;
ctx               100 arch/powerpc/platforms/cell/spufs/coredump.c 	while ((ctx = coredump_next_context(&fd)) != NULL) {
ctx               101 arch/powerpc/platforms/cell/spufs/coredump.c 		rc = spu_acquire_saved(ctx);
ctx               104 arch/powerpc/platforms/cell/spufs/coredump.c 		rc = spufs_ctx_note_size(ctx, fd);
ctx               105 arch/powerpc/platforms/cell/spufs/coredump.c 		spu_release_saved(ctx);
ctx               118 arch/powerpc/platforms/cell/spufs/coredump.c static int spufs_arch_write_note(struct spu_context *ctx, int i,
ctx               151 arch/powerpc/platforms/cell/spufs/coredump.c 		rc = do_coredump_read(i, ctx, buf, bufsz, &pos);
ctx               177 arch/powerpc/platforms/cell/spufs/coredump.c 	struct spu_context *ctx;
ctx               181 arch/powerpc/platforms/cell/spufs/coredump.c 	while ((ctx = coredump_next_context(&fd)) != NULL) {
ctx               182 arch/powerpc/platforms/cell/spufs/coredump.c 		rc = spu_acquire_saved(ctx);
ctx               187 arch/powerpc/platforms/cell/spufs/coredump.c 			rc = spufs_arch_write_note(ctx, j, cprm, fd);
ctx               189 arch/powerpc/platforms/cell/spufs/coredump.c 				spu_release_saved(ctx);
ctx               194 arch/powerpc/platforms/cell/spufs/coredump.c 		spu_release_saved(ctx);
ctx                23 arch/powerpc/platforms/cell/spufs/fault.c static void spufs_handle_event(struct spu_context *ctx,
ctx                26 arch/powerpc/platforms/cell/spufs/fault.c 	if (ctx->flags & SPU_CREATE_EVENTS_ENABLED) {
ctx                27 arch/powerpc/platforms/cell/spufs/fault.c 		ctx->event_return |= type;
ctx                28 arch/powerpc/platforms/cell/spufs/fault.c 		wake_up_all(&ctx->stop_wq);
ctx                37 arch/powerpc/platforms/cell/spufs/fault.c 		ctx->ops->restart_dma(ctx);
ctx                48 arch/powerpc/platforms/cell/spufs/fault.c 			ctx->ops->npc_read(ctx) - 4);
ctx                53 arch/powerpc/platforms/cell/spufs/fault.c int spufs_handle_class0(struct spu_context *ctx)
ctx                55 arch/powerpc/platforms/cell/spufs/fault.c 	unsigned long stat = ctx->csa.class_0_pending & CLASS0_INTR_MASK;
ctx                61 arch/powerpc/platforms/cell/spufs/fault.c 		spufs_handle_event(ctx, ctx->csa.class_0_dar,
ctx                65 arch/powerpc/platforms/cell/spufs/fault.c 		spufs_handle_event(ctx, ctx->csa.class_0_dar,
ctx                69 arch/powerpc/platforms/cell/spufs/fault.c 		spufs_handle_event(ctx, ctx->csa.class_0_dar,
ctx                72 arch/powerpc/platforms/cell/spufs/fault.c 	ctx->csa.class_0_pending = 0;
ctx                86 arch/powerpc/platforms/cell/spufs/fault.c int spufs_handle_class1(struct spu_context *ctx)
ctx               102 arch/powerpc/platforms/cell/spufs/fault.c 	ea = ctx->csa.class_1_dar;
ctx               103 arch/powerpc/platforms/cell/spufs/fault.c 	dsisr = ctx->csa.class_1_dsisr;
ctx               108 arch/powerpc/platforms/cell/spufs/fault.c 	spuctx_switch_state(ctx, SPU_UTIL_IOWAIT);
ctx               110 arch/powerpc/platforms/cell/spufs/fault.c 	pr_debug("ctx %p: ea %016llx, dsisr %016llx state %d\n", ctx, ea,
ctx               111 arch/powerpc/platforms/cell/spufs/fault.c 		dsisr, ctx->state);
ctx               113 arch/powerpc/platforms/cell/spufs/fault.c 	ctx->stats.hash_flt++;
ctx               114 arch/powerpc/platforms/cell/spufs/fault.c 	if (ctx->state == SPU_STATE_RUNNABLE)
ctx               115 arch/powerpc/platforms/cell/spufs/fault.c 		ctx->spu->stats.hash_flt++;
ctx               118 arch/powerpc/platforms/cell/spufs/fault.c 	spu_release(ctx);
ctx               134 arch/powerpc/platforms/cell/spufs/fault.c 	mutex_lock(&ctx->state_mutex);
ctx               141 arch/powerpc/platforms/cell/spufs/fault.c 	ctx->csa.class_1_dar = ctx->csa.class_1_dsisr = 0;
ctx               150 arch/powerpc/platforms/cell/spufs/fault.c 			ctx->stats.maj_flt++;
ctx               152 arch/powerpc/platforms/cell/spufs/fault.c 			ctx->stats.min_flt++;
ctx               153 arch/powerpc/platforms/cell/spufs/fault.c 		if (ctx->state == SPU_STATE_RUNNABLE) {
ctx               155 arch/powerpc/platforms/cell/spufs/fault.c 				ctx->spu->stats.maj_flt++;
ctx               157 arch/powerpc/platforms/cell/spufs/fault.c 				ctx->spu->stats.min_flt++;
ctx               160 arch/powerpc/platforms/cell/spufs/fault.c 		if (ctx->spu)
ctx               161 arch/powerpc/platforms/cell/spufs/fault.c 			ctx->ops->restart_dma(ctx);
ctx               163 arch/powerpc/platforms/cell/spufs/fault.c 		spufs_handle_event(ctx, ea, SPE_EVENT_SPE_DATA_STORAGE);
ctx               165 arch/powerpc/platforms/cell/spufs/fault.c 	spuctx_switch_state(ctx, SPU_UTIL_SYSTEM);
ctx               151 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx               153 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx               154 arch/powerpc/platforms/cell/spufs/file.c 	file->private_data = ctx;
ctx               156 arch/powerpc/platforms/cell/spufs/file.c 		ctx->local_store = inode->i_mapping;
ctx               157 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx               165 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx               167 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx               169 arch/powerpc/platforms/cell/spufs/file.c 		ctx->local_store = NULL;
ctx               170 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx               175 arch/powerpc/platforms/cell/spufs/file.c __spufs_mem_read(struct spu_context *ctx, char __user *buffer,
ctx               178 arch/powerpc/platforms/cell/spufs/file.c 	char *local_store = ctx->ops->get_ls(ctx);
ctx               187 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               190 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx               193 arch/powerpc/platforms/cell/spufs/file.c 	ret = __spufs_mem_read(ctx, buffer, size, pos);
ctx               194 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               203 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               211 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx               215 arch/powerpc/platforms/cell/spufs/file.c 	local_store = ctx->ops->get_ls(ctx);
ctx               217 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               226 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx	= vma->vm_file->private_data;
ctx               237 arch/powerpc/platforms/cell/spufs/file.c 	if (spu_acquire(ctx))
ctx               240 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx->state == SPU_STATE_SAVED) {
ctx               242 arch/powerpc/platforms/cell/spufs/file.c 		pfn = vmalloc_to_pfn(ctx->csa.lscsa->ls + offset);
ctx               245 arch/powerpc/platforms/cell/spufs/file.c 		pfn = (ctx->spu->local_store_phys + offset) >> PAGE_SHIFT;
ctx               249 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               258 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = vma->vm_file->private_data;
ctx               264 arch/powerpc/platforms/cell/spufs/file.c 	if (spu_acquire(ctx))
ctx               268 arch/powerpc/platforms/cell/spufs/file.c 	local_store = ctx->ops->get_ls(ctx);
ctx               273 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               307 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = vmf->vma->vm_file->private_data;
ctx               312 arch/powerpc/platforms/cell/spufs/file.c 	spu_context_nospu_trace(spufs_ps_fault__enter, ctx);
ctx               325 arch/powerpc/platforms/cell/spufs/file.c 	get_spu_context(ctx);
ctx               335 arch/powerpc/platforms/cell/spufs/file.c 	if (spu_acquire(ctx))
ctx               338 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx->state == SPU_STATE_SAVED) {
ctx               340 arch/powerpc/platforms/cell/spufs/file.c 		spu_context_nospu_trace(spufs_ps_fault__sleep, ctx);
ctx               341 arch/powerpc/platforms/cell/spufs/file.c 		err = spufs_wait(ctx->run_wq, ctx->state == SPU_STATE_RUNNABLE);
ctx               342 arch/powerpc/platforms/cell/spufs/file.c 		spu_context_trace(spufs_ps_fault__wake, ctx, ctx->spu);
ctx               345 arch/powerpc/platforms/cell/spufs/file.c 		area = ctx->spu->problem_phys + ps_offs;
ctx               348 arch/powerpc/platforms/cell/spufs/file.c 		spu_context_trace(spufs_ps_fault__insert, ctx, ctx->spu);
ctx               352 arch/powerpc/platforms/cell/spufs/file.c 		spu_release(ctx);
ctx               355 arch/powerpc/platforms/cell/spufs/file.c 	put_spu_context(ctx);
ctx               389 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = data;
ctx               392 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx               395 arch/powerpc/platforms/cell/spufs/file.c 	*val = ctx->ops->status_read(ctx);
ctx               396 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               403 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = data;
ctx               406 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx               409 arch/powerpc/platforms/cell/spufs/file.c 	ctx->ops->runcntl_write(ctx, val);
ctx               410 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               418 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx               420 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx               421 arch/powerpc/platforms/cell/spufs/file.c 	file->private_data = ctx;
ctx               423 arch/powerpc/platforms/cell/spufs/file.c 		ctx->cntl = inode->i_mapping;
ctx               424 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx               433 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx               437 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx               439 arch/powerpc/platforms/cell/spufs/file.c 		ctx->cntl = NULL;
ctx               440 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx               462 arch/powerpc/platforms/cell/spufs/file.c __spufs_regs_read(struct spu_context *ctx, char __user *buffer,
ctx               465 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_lscsa *lscsa = ctx->csa.lscsa;
ctx               475 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               479 arch/powerpc/platforms/cell/spufs/file.c 	if (*pos >= sizeof(ctx->csa.lscsa->gprs))
ctx               482 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx               485 arch/powerpc/platforms/cell/spufs/file.c 	ret = __spufs_regs_read(ctx, buffer, size, pos);
ctx               486 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx               494 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               495 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_lscsa *lscsa = ctx->csa.lscsa;
ctx               501 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx               508 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx               520 arch/powerpc/platforms/cell/spufs/file.c __spufs_fpcr_read(struct spu_context *ctx, char __user * buffer,
ctx               523 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_lscsa *lscsa = ctx->csa.lscsa;
ctx               533 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               535 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx               538 arch/powerpc/platforms/cell/spufs/file.c 	ret = __spufs_fpcr_read(ctx, buffer, size, pos);
ctx               539 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx               547 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               548 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_lscsa *lscsa = ctx->csa.lscsa;
ctx               554 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx               561 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx               592 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               604 arch/powerpc/platforms/cell/spufs/file.c 	count = spu_acquire(ctx);
ctx               610 arch/powerpc/platforms/cell/spufs/file.c 		ret = ctx->ops->mbox_read(ctx, &mbox_data);
ctx               626 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               643 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               650 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx               654 arch/powerpc/platforms/cell/spufs/file.c 	mbox_stat = ctx->ops->mbox_stat_read(ctx) & 0xff;
ctx               656 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               671 arch/powerpc/platforms/cell/spufs/file.c size_t spu_ibox_read(struct spu_context *ctx, u32 *data)
ctx               673 arch/powerpc/platforms/cell/spufs/file.c 	return ctx->ops->ibox_read(ctx, data);
ctx               679 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = spu->ctx;
ctx               681 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx)
ctx               682 arch/powerpc/platforms/cell/spufs/file.c 		wake_up_all(&ctx->ibox_wq);
ctx               700 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               712 arch/powerpc/platforms/cell/spufs/file.c 	count = spu_acquire(ctx);
ctx               719 arch/powerpc/platforms/cell/spufs/file.c 		if (!spu_ibox_read(ctx, &ibox_data)) {
ctx               724 arch/powerpc/platforms/cell/spufs/file.c 		count = spufs_wait(ctx->ibox_wq, spu_ibox_read(ctx, &ibox_data));
ctx               736 arch/powerpc/platforms/cell/spufs/file.c 		ret = ctx->ops->ibox_read(ctx, &ibox_data);
ctx               750 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               757 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               760 arch/powerpc/platforms/cell/spufs/file.c 	poll_wait(file, &ctx->ibox_wq, wait);
ctx               766 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->state_mutex);
ctx               767 arch/powerpc/platforms/cell/spufs/file.c 	mask = ctx->ops->mbox_stat_poll(ctx, EPOLLIN | EPOLLRDNORM);
ctx               768 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               783 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               790 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx               793 arch/powerpc/platforms/cell/spufs/file.c 	ibox_stat = (ctx->ops->mbox_stat_read(ctx) >> 16) & 0xff;
ctx               794 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               809 arch/powerpc/platforms/cell/spufs/file.c size_t spu_wbox_write(struct spu_context *ctx, u32 data)
ctx               811 arch/powerpc/platforms/cell/spufs/file.c 	return ctx->ops->wbox_write(ctx, data);
ctx               817 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = spu->ctx;
ctx               819 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx)
ctx               820 arch/powerpc/platforms/cell/spufs/file.c 		wake_up_all(&ctx->wbox_wq);
ctx               838 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               852 arch/powerpc/platforms/cell/spufs/file.c 	count = spu_acquire(ctx);
ctx               862 arch/powerpc/platforms/cell/spufs/file.c 		if (!spu_wbox_write(ctx, wbox_data)) {
ctx               867 arch/powerpc/platforms/cell/spufs/file.c 		count = spufs_wait(ctx->wbox_wq, spu_wbox_write(ctx, wbox_data));
ctx               880 arch/powerpc/platforms/cell/spufs/file.c 		ret = spu_wbox_write(ctx, wbox_data);
ctx               886 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               893 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               896 arch/powerpc/platforms/cell/spufs/file.c 	poll_wait(file, &ctx->wbox_wq, wait);
ctx               902 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->state_mutex);
ctx               903 arch/powerpc/platforms/cell/spufs/file.c 	mask = ctx->ops->mbox_stat_poll(ctx, EPOLLOUT | EPOLLWRNORM);
ctx               904 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               919 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx               926 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx               929 arch/powerpc/platforms/cell/spufs/file.c 	wbox_stat = (ctx->ops->mbox_stat_read(ctx) >> 8) & 0xff;
ctx               930 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx               947 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx               949 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx               950 arch/powerpc/platforms/cell/spufs/file.c 	file->private_data = ctx;
ctx               952 arch/powerpc/platforms/cell/spufs/file.c 		ctx->signal1 = inode->i_mapping;
ctx               953 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx               961 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx               963 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx               965 arch/powerpc/platforms/cell/spufs/file.c 		ctx->signal1 = NULL;
ctx               966 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx               970 arch/powerpc/platforms/cell/spufs/file.c static ssize_t __spufs_signal1_read(struct spu_context *ctx, char __user *buf,
ctx               979 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx->csa.spu_chnlcnt_RW[3]) {
ctx               980 arch/powerpc/platforms/cell/spufs/file.c 		data = ctx->csa.spu_chnldata_RW[3];
ctx               998 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx              1000 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx              1003 arch/powerpc/platforms/cell/spufs/file.c 	ret = __spufs_signal1_read(ctx, buf, len, pos);
ctx              1004 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx              1012 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx;
ctx              1016 arch/powerpc/platforms/cell/spufs/file.c 	ctx = file->private_data;
ctx              1024 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx              1027 arch/powerpc/platforms/cell/spufs/file.c 	ctx->ops->signal1_write(ctx, data);
ctx              1028 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              1084 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx              1086 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx              1087 arch/powerpc/platforms/cell/spufs/file.c 	file->private_data = ctx;
ctx              1089 arch/powerpc/platforms/cell/spufs/file.c 		ctx->signal2 = inode->i_mapping;
ctx              1090 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx              1098 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx              1100 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx              1102 arch/powerpc/platforms/cell/spufs/file.c 		ctx->signal2 = NULL;
ctx              1103 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx              1107 arch/powerpc/platforms/cell/spufs/file.c static ssize_t __spufs_signal2_read(struct spu_context *ctx, char __user *buf,
ctx              1116 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx->csa.spu_chnlcnt_RW[4]) {
ctx              1117 arch/powerpc/platforms/cell/spufs/file.c 		data =  ctx->csa.spu_chnldata_RW[4];
ctx              1134 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx              1137 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx              1140 arch/powerpc/platforms/cell/spufs/file.c 	ret = __spufs_signal2_read(ctx, buf, len, pos);
ctx              1141 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx              1149 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx;
ctx              1153 arch/powerpc/platforms/cell/spufs/file.c 	ctx = file->private_data;
ctx              1161 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx              1164 arch/powerpc/platforms/cell/spufs/file.c 	ctx->ops->signal2_write(ctx, data);
ctx              1165 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              1234 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = data;					\
ctx              1238 arch/powerpc/platforms/cell/spufs/file.c 		ret = spu_acquire(ctx);					\
ctx              1241 arch/powerpc/platforms/cell/spufs/file.c 		*val = __get(ctx);					\
ctx              1242 arch/powerpc/platforms/cell/spufs/file.c 		spu_release(ctx);					\
ctx              1244 arch/powerpc/platforms/cell/spufs/file.c 		ret = spu_acquire_saved(ctx);				\
ctx              1247 arch/powerpc/platforms/cell/spufs/file.c 		*val = __get(ctx);					\
ctx              1248 arch/powerpc/platforms/cell/spufs/file.c 		spu_release_saved(ctx);					\
ctx              1250 arch/powerpc/platforms/cell/spufs/file.c 		*val = __get(ctx);					\
ctx              1258 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = data;
ctx              1261 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx              1264 arch/powerpc/platforms/cell/spufs/file.c 	ctx->ops->signal1_type_set(ctx, val);
ctx              1265 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              1270 arch/powerpc/platforms/cell/spufs/file.c static u64 spufs_signal1_type_get(struct spu_context *ctx)
ctx              1272 arch/powerpc/platforms/cell/spufs/file.c 	return ctx->ops->signal1_type_get(ctx);
ctx              1280 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = data;
ctx              1283 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx              1286 arch/powerpc/platforms/cell/spufs/file.c 	ctx->ops->signal2_type_set(ctx, val);
ctx              1287 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              1292 arch/powerpc/platforms/cell/spufs/file.c static u64 spufs_signal2_type_get(struct spu_context *ctx)
ctx              1294 arch/powerpc/platforms/cell/spufs/file.c 	return ctx->ops->signal2_type_get(ctx);
ctx              1331 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx              1335 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx              1337 arch/powerpc/platforms/cell/spufs/file.c 		ctx->mss = inode->i_mapping;
ctx              1338 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx              1346 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx              1348 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx              1350 arch/powerpc/platforms/cell/spufs/file.c 		ctx->mss = NULL;
ctx              1351 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx              1390 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx              1392 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx              1395 arch/powerpc/platforms/cell/spufs/file.c 		ctx->psmap = inode->i_mapping;
ctx              1396 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx              1404 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx              1406 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx              1408 arch/powerpc/platforms/cell/spufs/file.c 		ctx->psmap = NULL;
ctx              1409 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx              1453 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx              1456 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx->owner != current->mm)
ctx              1462 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx              1463 arch/powerpc/platforms/cell/spufs/file.c 	file->private_data = ctx;
ctx              1465 arch/powerpc/platforms/cell/spufs/file.c 		ctx->mfc = inode->i_mapping;
ctx              1466 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx              1474 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx              1476 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->mapping_lock);
ctx              1478 arch/powerpc/platforms/cell/spufs/file.c 		ctx->mfc = NULL;
ctx              1479 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx              1486 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = spu->ctx;
ctx              1488 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx)
ctx              1489 arch/powerpc/platforms/cell/spufs/file.c 		wake_up_all(&ctx->mfc_wq);
ctx              1492 arch/powerpc/platforms/cell/spufs/file.c static int spufs_read_mfc_tagstatus(struct spu_context *ctx, u32 *status)
ctx              1496 arch/powerpc/platforms/cell/spufs/file.c 	*status = ctx->ops->read_mfc_tagstatus(ctx) & ctx->tagwait;
ctx              1497 arch/powerpc/platforms/cell/spufs/file.c 	ctx->tagwait &= ~*status;
ctx              1503 arch/powerpc/platforms/cell/spufs/file.c 	ctx->ops->set_mfc_query(ctx, ctx->tagwait, 1);
ctx              1510 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx              1517 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx              1523 arch/powerpc/platforms/cell/spufs/file.c 		status = ctx->ops->read_mfc_tagstatus(ctx);
ctx              1524 arch/powerpc/platforms/cell/spufs/file.c 		if (!(status & ctx->tagwait))
ctx              1528 arch/powerpc/platforms/cell/spufs/file.c 			ctx->tagwait &= ~status;
ctx              1530 arch/powerpc/platforms/cell/spufs/file.c 		ret = spufs_wait(ctx->mfc_wq,
ctx              1531 arch/powerpc/platforms/cell/spufs/file.c 			   spufs_read_mfc_tagstatus(ctx, &status));
ctx              1535 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              1615 arch/powerpc/platforms/cell/spufs/file.c static int spu_send_mfc_command(struct spu_context *ctx,
ctx              1619 arch/powerpc/platforms/cell/spufs/file.c 	*error = ctx->ops->send_mfc_command(ctx, &cmd);
ctx              1623 arch/powerpc/platforms/cell/spufs/file.c 		ctx->ops->set_mfc_query(ctx, ctx->tagwait, 1);
ctx              1626 arch/powerpc/platforms/cell/spufs/file.c 		*error = ctx->ops->send_mfc_command(ctx, &cmd);
ctx              1636 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx              1651 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx              1655 arch/powerpc/platforms/cell/spufs/file.c 	ret = spufs_wait(ctx->run_wq, ctx->state == SPU_STATE_RUNNABLE);
ctx              1660 arch/powerpc/platforms/cell/spufs/file.c 		ret = ctx->ops->send_mfc_command(ctx, &cmd);
ctx              1663 arch/powerpc/platforms/cell/spufs/file.c 		ret = spufs_wait(ctx->mfc_wq,
ctx              1664 arch/powerpc/platforms/cell/spufs/file.c 				 spu_send_mfc_command(ctx, cmd, &status));
ctx              1674 arch/powerpc/platforms/cell/spufs/file.c 	ctx->tagwait |= 1 << cmd.tag;
ctx              1678 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              1685 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx              1689 arch/powerpc/platforms/cell/spufs/file.c 	poll_wait(file, &ctx->mfc_wq, wait);
ctx              1695 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->state_mutex);
ctx              1696 arch/powerpc/platforms/cell/spufs/file.c 	ctx->ops->set_mfc_query(ctx, ctx->tagwait, 2);
ctx              1697 arch/powerpc/platforms/cell/spufs/file.c 	free_elements = ctx->ops->get_mfc_free_elements(ctx);
ctx              1698 arch/powerpc/platforms/cell/spufs/file.c 	tagstatus = ctx->ops->read_mfc_tagstatus(ctx);
ctx              1699 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              1704 arch/powerpc/platforms/cell/spufs/file.c 	if (tagstatus & ctx->tagwait)
ctx              1708 arch/powerpc/platforms/cell/spufs/file.c 		free_elements, tagstatus, ctx->tagwait);
ctx              1715 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx              1718 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx              1723 arch/powerpc/platforms/cell/spufs/file.c 	ret = spufs_wait(ctx->mfc_wq,
ctx              1724 arch/powerpc/platforms/cell/spufs/file.c 			 ctx->ops->set_mfc_query(ctx, ctx->tagwait, 2));
ctx              1727 arch/powerpc/platforms/cell/spufs/file.c 	ret = spufs_wait(ctx->mfc_wq,
ctx              1728 arch/powerpc/platforms/cell/spufs/file.c 			 ctx->ops->read_mfc_tagstatus(ctx) == ctx->tagwait);
ctx              1734 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              1765 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = data;
ctx              1768 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx              1771 arch/powerpc/platforms/cell/spufs/file.c 	ctx->ops->npc_write(ctx, val);
ctx              1772 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              1777 arch/powerpc/platforms/cell/spufs/file.c static u64 spufs_npc_get(struct spu_context *ctx)
ctx              1779 arch/powerpc/platforms/cell/spufs/file.c 	return ctx->ops->npc_read(ctx);
ctx              1786 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = data;
ctx              1787 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_lscsa *lscsa = ctx->csa.lscsa;
ctx              1790 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx              1794 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx              1799 arch/powerpc/platforms/cell/spufs/file.c static u64 spufs_decr_get(struct spu_context *ctx)
ctx              1801 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_lscsa *lscsa = ctx->csa.lscsa;
ctx              1809 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = data;
ctx              1812 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx              1816 arch/powerpc/platforms/cell/spufs/file.c 		ctx->csa.priv2.mfc_control_RW |= MFC_CNTL_DECREMENTER_RUNNING;
ctx              1818 arch/powerpc/platforms/cell/spufs/file.c 		ctx->csa.priv2.mfc_control_RW &= ~MFC_CNTL_DECREMENTER_RUNNING;
ctx              1819 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx              1824 arch/powerpc/platforms/cell/spufs/file.c static u64 spufs_decr_status_get(struct spu_context *ctx)
ctx              1826 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx->csa.priv2.mfc_control_RW & MFC_CNTL_DECREMENTER_RUNNING)
ctx              1837 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = data;
ctx              1838 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_lscsa *lscsa = ctx->csa.lscsa;
ctx              1841 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx              1845 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx              1850 arch/powerpc/platforms/cell/spufs/file.c static u64 spufs_event_mask_get(struct spu_context *ctx)
ctx              1852 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_lscsa *lscsa = ctx->csa.lscsa;
ctx              1860 arch/powerpc/platforms/cell/spufs/file.c static u64 spufs_event_status_get(struct spu_context *ctx)
ctx              1862 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_state *state = &ctx->csa;
ctx              1874 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = data;
ctx              1875 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_lscsa *lscsa = ctx->csa.lscsa;
ctx              1878 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx              1882 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx              1887 arch/powerpc/platforms/cell/spufs/file.c static u64 spufs_srr0_get(struct spu_context *ctx)
ctx              1889 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_lscsa *lscsa = ctx->csa.lscsa;
ctx              1895 arch/powerpc/platforms/cell/spufs/file.c static u64 spufs_id_get(struct spu_context *ctx)
ctx              1899 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx->state == SPU_STATE_RUNNABLE)
ctx              1900 arch/powerpc/platforms/cell/spufs/file.c 		num = ctx->spu->number;
ctx              1909 arch/powerpc/platforms/cell/spufs/file.c static u64 spufs_object_id_get(struct spu_context *ctx)
ctx              1912 arch/powerpc/platforms/cell/spufs/file.c 	return ctx->object_id;
ctx              1917 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = data;
ctx              1918 arch/powerpc/platforms/cell/spufs/file.c 	ctx->object_id = id;
ctx              1926 arch/powerpc/platforms/cell/spufs/file.c static u64 spufs_lslr_get(struct spu_context *ctx)
ctx              1928 arch/powerpc/platforms/cell/spufs/file.c 	return ctx->csa.priv2.spu_lslr_RW;
ctx              1936 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = i->i_ctx;
ctx              1937 arch/powerpc/platforms/cell/spufs/file.c 	file->private_data = ctx;
ctx              1943 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = s->private;
ctx              1945 arch/powerpc/platforms/cell/spufs/file.c 	if (!(ctx->flags & SPU_CREATE_NOSCHED))
ctx              1947 arch/powerpc/platforms/cell/spufs/file.c 	if (!(ctx->flags & SPU_CREATE_ISOLATE))
ctx              1964 arch/powerpc/platforms/cell/spufs/file.c static ssize_t __spufs_mbox_info_read(struct spu_context *ctx,
ctx              1970 arch/powerpc/platforms/cell/spufs/file.c 	if (!(ctx->csa.prob.mb_stat_R & 0x0000ff))
ctx              1973 arch/powerpc/platforms/cell/spufs/file.c 	data = ctx->csa.prob.pu_mb_R;
ctx              1982 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx              1987 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx              1990 arch/powerpc/platforms/cell/spufs/file.c 	spin_lock(&ctx->csa.register_lock);
ctx              1991 arch/powerpc/platforms/cell/spufs/file.c 	ret = __spufs_mbox_info_read(ctx, buf, len, pos);
ctx              1992 arch/powerpc/platforms/cell/spufs/file.c 	spin_unlock(&ctx->csa.register_lock);
ctx              1993 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx              2004 arch/powerpc/platforms/cell/spufs/file.c static ssize_t __spufs_ibox_info_read(struct spu_context *ctx,
ctx              2010 arch/powerpc/platforms/cell/spufs/file.c 	if (!(ctx->csa.prob.mb_stat_R & 0xff0000))
ctx              2013 arch/powerpc/platforms/cell/spufs/file.c 	data = ctx->csa.priv2.puint_mb_R;
ctx              2021 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx              2027 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx              2030 arch/powerpc/platforms/cell/spufs/file.c 	spin_lock(&ctx->csa.register_lock);
ctx              2031 arch/powerpc/platforms/cell/spufs/file.c 	ret = __spufs_ibox_info_read(ctx, buf, len, pos);
ctx              2032 arch/powerpc/platforms/cell/spufs/file.c 	spin_unlock(&ctx->csa.register_lock);
ctx              2033 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx              2044 arch/powerpc/platforms/cell/spufs/file.c static ssize_t __spufs_wbox_info_read(struct spu_context *ctx,
ctx              2051 arch/powerpc/platforms/cell/spufs/file.c 	wbox_stat = ctx->csa.prob.mb_stat_R;
ctx              2054 arch/powerpc/platforms/cell/spufs/file.c 		data[i] = ctx->csa.spu_mailbox_data[i];
ctx              2064 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx              2070 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx              2073 arch/powerpc/platforms/cell/spufs/file.c 	spin_lock(&ctx->csa.register_lock);
ctx              2074 arch/powerpc/platforms/cell/spufs/file.c 	ret = __spufs_wbox_info_read(ctx, buf, len, pos);
ctx              2075 arch/powerpc/platforms/cell/spufs/file.c 	spin_unlock(&ctx->csa.register_lock);
ctx              2076 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx              2087 arch/powerpc/platforms/cell/spufs/file.c static ssize_t __spufs_dma_info_read(struct spu_context *ctx,
ctx              2094 arch/powerpc/platforms/cell/spufs/file.c 	info.dma_info_type = ctx->csa.priv2.spu_tag_status_query_RW;
ctx              2095 arch/powerpc/platforms/cell/spufs/file.c 	info.dma_info_mask = ctx->csa.lscsa->tag_mask.slot[0];
ctx              2096 arch/powerpc/platforms/cell/spufs/file.c 	info.dma_info_status = ctx->csa.spu_chnldata_RW[24];
ctx              2097 arch/powerpc/platforms/cell/spufs/file.c 	info.dma_info_stall_and_notify = ctx->csa.spu_chnldata_RW[25];
ctx              2098 arch/powerpc/platforms/cell/spufs/file.c 	info.dma_info_atomic_command_status = ctx->csa.spu_chnldata_RW[27];
ctx              2101 arch/powerpc/platforms/cell/spufs/file.c 		spuqp = &ctx->csa.priv2.spuq[i];
ctx              2116 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx              2122 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx              2125 arch/powerpc/platforms/cell/spufs/file.c 	spin_lock(&ctx->csa.register_lock);
ctx              2126 arch/powerpc/platforms/cell/spufs/file.c 	ret = __spufs_dma_info_read(ctx, buf, len, pos);
ctx              2127 arch/powerpc/platforms/cell/spufs/file.c 	spin_unlock(&ctx->csa.register_lock);
ctx              2128 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx              2139 arch/powerpc/platforms/cell/spufs/file.c static ssize_t __spufs_proxydma_info_read(struct spu_context *ctx,
ctx              2153 arch/powerpc/platforms/cell/spufs/file.c 	info.proxydma_info_type = ctx->csa.prob.dma_querytype_RW;
ctx              2154 arch/powerpc/platforms/cell/spufs/file.c 	info.proxydma_info_mask = ctx->csa.prob.dma_querymask_RW;
ctx              2155 arch/powerpc/platforms/cell/spufs/file.c 	info.proxydma_info_status = ctx->csa.prob.dma_tagstatus_R;
ctx              2158 arch/powerpc/platforms/cell/spufs/file.c 		puqp = &ctx->csa.priv2.puq[i];
ctx              2173 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = file->private_data;
ctx              2176 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire_saved(ctx);
ctx              2179 arch/powerpc/platforms/cell/spufs/file.c 	spin_lock(&ctx->csa.register_lock);
ctx              2180 arch/powerpc/platforms/cell/spufs/file.c 	ret = __spufs_proxydma_info_read(ctx, buf, len, pos);
ctx              2181 arch/powerpc/platforms/cell/spufs/file.c 	spin_unlock(&ctx->csa.register_lock);
ctx              2182 arch/powerpc/platforms/cell/spufs/file.c 	spu_release_saved(ctx);
ctx              2195 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = s->private;
ctx              2197 arch/powerpc/platforms/cell/spufs/file.c 	seq_printf(s, "%d\n", ctx->tid);
ctx              2217 arch/powerpc/platforms/cell/spufs/file.c static unsigned long long spufs_acct_time(struct spu_context *ctx,
ctx              2220 arch/powerpc/platforms/cell/spufs/file.c 	unsigned long long time = ctx->stats.times[state];
ctx              2231 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx->spu && ctx->stats.util_state == state) {
ctx              2232 arch/powerpc/platforms/cell/spufs/file.c 		time += ktime_get_ns() - ctx->stats.tstamp;
ctx              2238 arch/powerpc/platforms/cell/spufs/file.c static unsigned long long spufs_slb_flts(struct spu_context *ctx)
ctx              2240 arch/powerpc/platforms/cell/spufs/file.c 	unsigned long long slb_flts = ctx->stats.slb_flt;
ctx              2242 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx->state == SPU_STATE_RUNNABLE) {
ctx              2243 arch/powerpc/platforms/cell/spufs/file.c 		slb_flts += (ctx->spu->stats.slb_flt -
ctx              2244 arch/powerpc/platforms/cell/spufs/file.c 			     ctx->stats.slb_flt_base);
ctx              2250 arch/powerpc/platforms/cell/spufs/file.c static unsigned long long spufs_class2_intrs(struct spu_context *ctx)
ctx              2252 arch/powerpc/platforms/cell/spufs/file.c 	unsigned long long class2_intrs = ctx->stats.class2_intr;
ctx              2254 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx->state == SPU_STATE_RUNNABLE) {
ctx              2255 arch/powerpc/platforms/cell/spufs/file.c 		class2_intrs += (ctx->spu->stats.class2_intr -
ctx              2256 arch/powerpc/platforms/cell/spufs/file.c 				 ctx->stats.class2_intr_base);
ctx              2265 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = s->private;
ctx              2268 arch/powerpc/platforms/cell/spufs/file.c 	ret = spu_acquire(ctx);
ctx              2274 arch/powerpc/platforms/cell/spufs/file.c 		ctx_state_names[ctx->stats.util_state],
ctx              2275 arch/powerpc/platforms/cell/spufs/file.c 		spufs_acct_time(ctx, SPU_UTIL_USER),
ctx              2276 arch/powerpc/platforms/cell/spufs/file.c 		spufs_acct_time(ctx, SPU_UTIL_SYSTEM),
ctx              2277 arch/powerpc/platforms/cell/spufs/file.c 		spufs_acct_time(ctx, SPU_UTIL_IOWAIT),
ctx              2278 arch/powerpc/platforms/cell/spufs/file.c 		spufs_acct_time(ctx, SPU_UTIL_IDLE_LOADED),
ctx              2279 arch/powerpc/platforms/cell/spufs/file.c 		ctx->stats.vol_ctx_switch,
ctx              2280 arch/powerpc/platforms/cell/spufs/file.c 		ctx->stats.invol_ctx_switch,
ctx              2281 arch/powerpc/platforms/cell/spufs/file.c 		spufs_slb_flts(ctx),
ctx              2282 arch/powerpc/platforms/cell/spufs/file.c 		ctx->stats.hash_flt,
ctx              2283 arch/powerpc/platforms/cell/spufs/file.c 		ctx->stats.min_flt,
ctx              2284 arch/powerpc/platforms/cell/spufs/file.c 		ctx->stats.maj_flt,
ctx              2285 arch/powerpc/platforms/cell/spufs/file.c 		spufs_class2_intrs(ctx),
ctx              2286 arch/powerpc/platforms/cell/spufs/file.c 		ctx->stats.libassist);
ctx              2287 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              2303 arch/powerpc/platforms/cell/spufs/file.c static inline int spufs_switch_log_used(struct spu_context *ctx)
ctx              2305 arch/powerpc/platforms/cell/spufs/file.c 	return (ctx->switch_log->head - ctx->switch_log->tail) %
ctx              2309 arch/powerpc/platforms/cell/spufs/file.c static inline int spufs_switch_log_avail(struct spu_context *ctx)
ctx              2311 arch/powerpc/platforms/cell/spufs/file.c 	return SWITCH_LOG_BUFSIZE - spufs_switch_log_used(ctx);
ctx              2316 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = SPUFS_I(inode)->i_ctx;
ctx              2319 arch/powerpc/platforms/cell/spufs/file.c 	rc = spu_acquire(ctx);
ctx              2323 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx->switch_log) {
ctx              2328 arch/powerpc/platforms/cell/spufs/file.c 	ctx->switch_log = kmalloc(struct_size(ctx->switch_log, log,
ctx              2331 arch/powerpc/platforms/cell/spufs/file.c 	if (!ctx->switch_log) {
ctx              2336 arch/powerpc/platforms/cell/spufs/file.c 	ctx->switch_log->head = ctx->switch_log->tail = 0;
ctx              2337 arch/powerpc/platforms/cell/spufs/file.c 	init_waitqueue_head(&ctx->switch_log->wait);
ctx              2341 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              2347 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = SPUFS_I(inode)->i_ctx;
ctx              2350 arch/powerpc/platforms/cell/spufs/file.c 	rc = spu_acquire(ctx);
ctx              2354 arch/powerpc/platforms/cell/spufs/file.c 	kfree(ctx->switch_log);
ctx              2355 arch/powerpc/platforms/cell/spufs/file.c 	ctx->switch_log = NULL;
ctx              2356 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              2361 arch/powerpc/platforms/cell/spufs/file.c static int switch_log_sprint(struct spu_context *ctx, char *tbuf, int n)
ctx              2365 arch/powerpc/platforms/cell/spufs/file.c 	p = ctx->switch_log->log + ctx->switch_log->tail % SWITCH_LOG_BUFSIZE;
ctx              2380 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = SPUFS_I(inode)->i_ctx;
ctx              2386 arch/powerpc/platforms/cell/spufs/file.c 	error = spu_acquire(ctx);
ctx              2394 arch/powerpc/platforms/cell/spufs/file.c 		if (spufs_switch_log_used(ctx) == 0) {
ctx              2410 arch/powerpc/platforms/cell/spufs/file.c 				error = spufs_wait(ctx->switch_log->wait,
ctx              2411 arch/powerpc/platforms/cell/spufs/file.c 						spufs_switch_log_used(ctx) > 0);
ctx              2421 arch/powerpc/platforms/cell/spufs/file.c 				if (spufs_switch_log_used(ctx) == 0)
ctx              2426 arch/powerpc/platforms/cell/spufs/file.c 		width = switch_log_sprint(ctx, tbuf, sizeof(tbuf));
ctx              2428 arch/powerpc/platforms/cell/spufs/file.c 			ctx->switch_log->tail =
ctx              2429 arch/powerpc/platforms/cell/spufs/file.c 				(ctx->switch_log->tail + 1) %
ctx              2442 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              2450 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = SPUFS_I(inode)->i_ctx;
ctx              2454 arch/powerpc/platforms/cell/spufs/file.c 	poll_wait(file, &ctx->switch_log->wait, wait);
ctx              2456 arch/powerpc/platforms/cell/spufs/file.c 	rc = spu_acquire(ctx);
ctx              2460 arch/powerpc/platforms/cell/spufs/file.c 	if (spufs_switch_log_used(ctx) > 0)
ctx              2463 arch/powerpc/platforms/cell/spufs/file.c 	spu_release(ctx);
ctx              2481 arch/powerpc/platforms/cell/spufs/file.c void spu_switch_log_notify(struct spu *spu, struct spu_context *ctx,
ctx              2484 arch/powerpc/platforms/cell/spufs/file.c 	if (!ctx->switch_log)
ctx              2487 arch/powerpc/platforms/cell/spufs/file.c 	if (spufs_switch_log_avail(ctx) > 1) {
ctx              2490 arch/powerpc/platforms/cell/spufs/file.c 		p = ctx->switch_log->log + ctx->switch_log->head;
ctx              2497 arch/powerpc/platforms/cell/spufs/file.c 		ctx->switch_log->head =
ctx              2498 arch/powerpc/platforms/cell/spufs/file.c 			(ctx->switch_log->head + 1) % SWITCH_LOG_BUFSIZE;
ctx              2501 arch/powerpc/platforms/cell/spufs/file.c 	wake_up(&ctx->switch_log->wait);
ctx              2506 arch/powerpc/platforms/cell/spufs/file.c 	struct spu_context *ctx = s->private;
ctx              2509 arch/powerpc/platforms/cell/spufs/file.c 	mutex_lock(&ctx->state_mutex);
ctx              2510 arch/powerpc/platforms/cell/spufs/file.c 	if (ctx->spu) {
ctx              2511 arch/powerpc/platforms/cell/spufs/file.c 		struct spu *spu = ctx->spu;
ctx              2518 arch/powerpc/platforms/cell/spufs/file.c 		struct spu_state *csa = &ctx->csa;
ctx              2525 arch/powerpc/platforms/cell/spufs/file.c 		ctx->state == SPU_STATE_SAVED ? 'S' : 'R',
ctx              2526 arch/powerpc/platforms/cell/spufs/file.c 		ctx->flags,
ctx              2527 arch/powerpc/platforms/cell/spufs/file.c 		ctx->sched_flags,
ctx              2528 arch/powerpc/platforms/cell/spufs/file.c 		ctx->prio,
ctx              2529 arch/powerpc/platforms/cell/spufs/file.c 		ctx->time_slice,
ctx              2530 arch/powerpc/platforms/cell/spufs/file.c 		ctx->spu ? ctx->spu->number : -1,
ctx              2531 arch/powerpc/platforms/cell/spufs/file.c 		!list_empty(&ctx->rq) ? 'q' : ' ',
ctx              2532 arch/powerpc/platforms/cell/spufs/file.c 		ctx->csa.class_0_pending,
ctx              2533 arch/powerpc/platforms/cell/spufs/file.c 		ctx->csa.class_0_dar,
ctx              2534 arch/powerpc/platforms/cell/spufs/file.c 		ctx->csa.class_1_dsisr,
ctx              2536 arch/powerpc/platforms/cell/spufs/file.c 		ctx->ops->runcntl_read(ctx),
ctx              2537 arch/powerpc/platforms/cell/spufs/file.c 		ctx->ops->status_read(ctx));
ctx              2539 arch/powerpc/platforms/cell/spufs/file.c 	mutex_unlock(&ctx->state_mutex);
ctx                52 arch/powerpc/platforms/cell/spufs/gang.c void spu_gang_add_ctx(struct spu_gang *gang, struct spu_context *ctx)
ctx                55 arch/powerpc/platforms/cell/spufs/gang.c 	ctx->gang = get_spu_gang(gang);
ctx                56 arch/powerpc/platforms/cell/spufs/gang.c 	list_add(&ctx->gang_list, &gang->list);
ctx                61 arch/powerpc/platforms/cell/spufs/gang.c void spu_gang_remove_ctx(struct spu_gang *gang, struct spu_context *ctx)
ctx                64 arch/powerpc/platforms/cell/spufs/gang.c 	WARN_ON(ctx->gang != gang);
ctx                65 arch/powerpc/platforms/cell/spufs/gang.c 	if (!list_empty(&ctx->aff_list)) {
ctx                66 arch/powerpc/platforms/cell/spufs/gang.c 		list_del_init(&ctx->aff_list);
ctx                69 arch/powerpc/platforms/cell/spufs/gang.c 	list_del_init(&ctx->gang_list);
ctx                24 arch/powerpc/platforms/cell/spufs/hw_ops.c static int spu_hw_mbox_read(struct spu_context *ctx, u32 * data)
ctx                26 arch/powerpc/platforms/cell/spufs/hw_ops.c 	struct spu *spu = ctx->spu;
ctx                41 arch/powerpc/platforms/cell/spufs/hw_ops.c static u32 spu_hw_mbox_stat_read(struct spu_context *ctx)
ctx                43 arch/powerpc/platforms/cell/spufs/hw_ops.c 	return in_be32(&ctx->spu->problem->mb_stat_R);
ctx                46 arch/powerpc/platforms/cell/spufs/hw_ops.c static __poll_t spu_hw_mbox_stat_poll(struct spu_context *ctx, __poll_t events)
ctx                48 arch/powerpc/platforms/cell/spufs/hw_ops.c 	struct spu *spu = ctx->spu;
ctx                82 arch/powerpc/platforms/cell/spufs/hw_ops.c static int spu_hw_ibox_read(struct spu_context *ctx, u32 * data)
ctx                84 arch/powerpc/platforms/cell/spufs/hw_ops.c 	struct spu *spu = ctx->spu;
ctx               103 arch/powerpc/platforms/cell/spufs/hw_ops.c static int spu_hw_wbox_write(struct spu_context *ctx, u32 data)
ctx               105 arch/powerpc/platforms/cell/spufs/hw_ops.c 	struct spu *spu = ctx->spu;
ctx               124 arch/powerpc/platforms/cell/spufs/hw_ops.c static void spu_hw_signal1_write(struct spu_context *ctx, u32 data)
ctx               126 arch/powerpc/platforms/cell/spufs/hw_ops.c 	out_be32(&ctx->spu->problem->signal_notify1, data);
ctx               129 arch/powerpc/platforms/cell/spufs/hw_ops.c static void spu_hw_signal2_write(struct spu_context *ctx, u32 data)
ctx               131 arch/powerpc/platforms/cell/spufs/hw_ops.c 	out_be32(&ctx->spu->problem->signal_notify2, data);
ctx               134 arch/powerpc/platforms/cell/spufs/hw_ops.c static void spu_hw_signal1_type_set(struct spu_context *ctx, u64 val)
ctx               136 arch/powerpc/platforms/cell/spufs/hw_ops.c 	struct spu *spu = ctx->spu;
ctx               150 arch/powerpc/platforms/cell/spufs/hw_ops.c static u64 spu_hw_signal1_type_get(struct spu_context *ctx)
ctx               152 arch/powerpc/platforms/cell/spufs/hw_ops.c 	return ((in_be64(&ctx->spu->priv2->spu_cfg_RW) & 1) != 0);
ctx               155 arch/powerpc/platforms/cell/spufs/hw_ops.c static void spu_hw_signal2_type_set(struct spu_context *ctx, u64 val)
ctx               157 arch/powerpc/platforms/cell/spufs/hw_ops.c 	struct spu *spu = ctx->spu;
ctx               171 arch/powerpc/platforms/cell/spufs/hw_ops.c static u64 spu_hw_signal2_type_get(struct spu_context *ctx)
ctx               173 arch/powerpc/platforms/cell/spufs/hw_ops.c 	return ((in_be64(&ctx->spu->priv2->spu_cfg_RW) & 2) != 0);
ctx               176 arch/powerpc/platforms/cell/spufs/hw_ops.c static u32 spu_hw_npc_read(struct spu_context *ctx)
ctx               178 arch/powerpc/platforms/cell/spufs/hw_ops.c 	return in_be32(&ctx->spu->problem->spu_npc_RW);
ctx               181 arch/powerpc/platforms/cell/spufs/hw_ops.c static void spu_hw_npc_write(struct spu_context *ctx, u32 val)
ctx               183 arch/powerpc/platforms/cell/spufs/hw_ops.c 	out_be32(&ctx->spu->problem->spu_npc_RW, val);
ctx               186 arch/powerpc/platforms/cell/spufs/hw_ops.c static u32 spu_hw_status_read(struct spu_context *ctx)
ctx               188 arch/powerpc/platforms/cell/spufs/hw_ops.c 	return in_be32(&ctx->spu->problem->spu_status_R);
ctx               191 arch/powerpc/platforms/cell/spufs/hw_ops.c static char *spu_hw_get_ls(struct spu_context *ctx)
ctx               193 arch/powerpc/platforms/cell/spufs/hw_ops.c 	return ctx->spu->local_store;
ctx               196 arch/powerpc/platforms/cell/spufs/hw_ops.c static void spu_hw_privcntl_write(struct spu_context *ctx, u64 val)
ctx               198 arch/powerpc/platforms/cell/spufs/hw_ops.c 	out_be64(&ctx->spu->priv2->spu_privcntl_RW, val);
ctx               201 arch/powerpc/platforms/cell/spufs/hw_ops.c static u32 spu_hw_runcntl_read(struct spu_context *ctx)
ctx               203 arch/powerpc/platforms/cell/spufs/hw_ops.c 	return in_be32(&ctx->spu->problem->spu_runcntl_RW);
ctx               206 arch/powerpc/platforms/cell/spufs/hw_ops.c static void spu_hw_runcntl_write(struct spu_context *ctx, u32 val)
ctx               208 arch/powerpc/platforms/cell/spufs/hw_ops.c 	spin_lock_irq(&ctx->spu->register_lock);
ctx               210 arch/powerpc/platforms/cell/spufs/hw_ops.c 		spu_hw_privcntl_write(ctx,
ctx               212 arch/powerpc/platforms/cell/spufs/hw_ops.c 	out_be32(&ctx->spu->problem->spu_runcntl_RW, val);
ctx               213 arch/powerpc/platforms/cell/spufs/hw_ops.c 	spin_unlock_irq(&ctx->spu->register_lock);
ctx               216 arch/powerpc/platforms/cell/spufs/hw_ops.c static void spu_hw_runcntl_stop(struct spu_context *ctx)
ctx               218 arch/powerpc/platforms/cell/spufs/hw_ops.c 	spin_lock_irq(&ctx->spu->register_lock);
ctx               219 arch/powerpc/platforms/cell/spufs/hw_ops.c 	out_be32(&ctx->spu->problem->spu_runcntl_RW, SPU_RUNCNTL_STOP);
ctx               220 arch/powerpc/platforms/cell/spufs/hw_ops.c 	while (in_be32(&ctx->spu->problem->spu_status_R) & SPU_STATUS_RUNNING)
ctx               222 arch/powerpc/platforms/cell/spufs/hw_ops.c 	spin_unlock_irq(&ctx->spu->register_lock);
ctx               225 arch/powerpc/platforms/cell/spufs/hw_ops.c static void spu_hw_master_start(struct spu_context *ctx)
ctx               227 arch/powerpc/platforms/cell/spufs/hw_ops.c 	struct spu *spu = ctx->spu;
ctx               236 arch/powerpc/platforms/cell/spufs/hw_ops.c static void spu_hw_master_stop(struct spu_context *ctx)
ctx               238 arch/powerpc/platforms/cell/spufs/hw_ops.c 	struct spu *spu = ctx->spu;
ctx               247 arch/powerpc/platforms/cell/spufs/hw_ops.c static int spu_hw_set_mfc_query(struct spu_context * ctx, u32 mask, u32 mode)
ctx               249 arch/powerpc/platforms/cell/spufs/hw_ops.c 	struct spu_problem __iomem *prob = ctx->spu->problem;
ctx               252 arch/powerpc/platforms/cell/spufs/hw_ops.c 	spin_lock_irq(&ctx->spu->register_lock);
ctx               260 arch/powerpc/platforms/cell/spufs/hw_ops.c 	spin_unlock_irq(&ctx->spu->register_lock);
ctx               264 arch/powerpc/platforms/cell/spufs/hw_ops.c static u32 spu_hw_read_mfc_tagstatus(struct spu_context * ctx)
ctx               266 arch/powerpc/platforms/cell/spufs/hw_ops.c 	return in_be32(&ctx->spu->problem->dma_tagstatus_R);
ctx               269 arch/powerpc/platforms/cell/spufs/hw_ops.c static u32 spu_hw_get_mfc_free_elements(struct spu_context *ctx)
ctx               271 arch/powerpc/platforms/cell/spufs/hw_ops.c 	return in_be32(&ctx->spu->problem->dma_qstatus_R);
ctx               274 arch/powerpc/platforms/cell/spufs/hw_ops.c static int spu_hw_send_mfc_command(struct spu_context *ctx,
ctx               278 arch/powerpc/platforms/cell/spufs/hw_ops.c 	struct spu_problem __iomem *prob = ctx->spu->problem;
ctx               280 arch/powerpc/platforms/cell/spufs/hw_ops.c 	spin_lock_irq(&ctx->spu->register_lock);
ctx               288 arch/powerpc/platforms/cell/spufs/hw_ops.c 	spin_unlock_irq(&ctx->spu->register_lock);
ctx               300 arch/powerpc/platforms/cell/spufs/hw_ops.c static void spu_hw_restart_dma(struct spu_context *ctx)
ctx               302 arch/powerpc/platforms/cell/spufs/hw_ops.c 	struct spu_priv2 __iomem *priv2 = ctx->spu->priv2;
ctx               304 arch/powerpc/platforms/cell/spufs/hw_ops.c 	if (!test_bit(SPU_CONTEXT_SWITCH_PENDING, &ctx->spu->flags))
ctx               110 arch/powerpc/platforms/cell/spufs/inode.c 		size_t size, struct spu_context *ctx)
ctx               127 arch/powerpc/platforms/cell/spufs/inode.c 	inode->i_private = SPUFS_I(inode)->i_ctx = get_spu_context(ctx);
ctx               183 arch/powerpc/platforms/cell/spufs/inode.c 		struct spu_context *ctx)
ctx               191 arch/powerpc/platforms/cell/spufs/inode.c 					files->mode & mode, files->size, ctx);
ctx               201 arch/powerpc/platforms/cell/spufs/inode.c 	struct spu_context *ctx;
ctx               208 arch/powerpc/platforms/cell/spufs/inode.c 	ctx = SPUFS_I(d_inode(dir))->i_ctx;
ctx               234 arch/powerpc/platforms/cell/spufs/inode.c 	struct spu_context *ctx;
ctx               244 arch/powerpc/platforms/cell/spufs/inode.c 	ctx = alloc_spu_context(SPUFS_I(dir)->i_gang); /* XXX gang */
ctx               245 arch/powerpc/platforms/cell/spufs/inode.c 	SPUFS_I(inode)->i_ctx = ctx;
ctx               246 arch/powerpc/platforms/cell/spufs/inode.c 	if (!ctx) {
ctx               251 arch/powerpc/platforms/cell/spufs/inode.c 	ctx->flags = flags;
ctx               265 arch/powerpc/platforms/cell/spufs/inode.c 					 mode, ctx);
ctx               267 arch/powerpc/platforms/cell/spufs/inode.c 		ret = spufs_fill_dir(dentry, spufs_dir_contents, mode, ctx);
ctx               271 arch/powerpc/platforms/cell/spufs/inode.c 				mode, ctx);
ctx               373 arch/powerpc/platforms/cell/spufs/inode.c spufs_set_affinity(unsigned int flags, struct spu_context *ctx,
ctx               377 arch/powerpc/platforms/cell/spufs/inode.c 		ctx->gang->aff_ref_ctx = ctx;
ctx               382 arch/powerpc/platforms/cell/spufs/inode.c 				&ctx->gang->aff_list_head);
ctx               386 arch/powerpc/platforms/cell/spufs/inode.c 		if (list_is_last(&neighbor->aff_list, &ctx->gang->aff_list_head)
ctx               389 arch/powerpc/platforms/cell/spufs/inode.c 			list_add(&ctx->aff_list, &neighbor->aff_list);
ctx               391 arch/powerpc/platforms/cell/spufs/inode.c 			list_add_tail(&ctx->aff_list, &neighbor->aff_list);
ctx               394 arch/powerpc/platforms/cell/spufs/inode.c 				ctx->aff_head = 1;
ctx               398 arch/powerpc/platforms/cell/spufs/inode.c 		if (!ctx->gang->aff_ref_ctx)
ctx               399 arch/powerpc/platforms/cell/spufs/inode.c 			ctx->gang->aff_ref_ctx = ctx;
ctx               621 arch/powerpc/platforms/cell/spufs/inode.c 	struct spufs_fs_context *ctx = fc->fs_private;
ctx               637 arch/powerpc/platforms/cell/spufs/inode.c 		ctx->uid = uid;
ctx               643 arch/powerpc/platforms/cell/spufs/inode.c 		ctx->gid = gid;
ctx               646 arch/powerpc/platforms/cell/spufs/inode.c 		ctx->mode = result.uint_32 & S_IALLUGO;
ctx               689 arch/powerpc/platforms/cell/spufs/inode.c 	struct spufs_fs_context *ctx = fc->fs_private;
ctx               695 arch/powerpc/platforms/cell/spufs/inode.c 	inode = spufs_new_inode(sb, S_IFDIR | ctx->mode);
ctx               699 arch/powerpc/platforms/cell/spufs/inode.c 	inode->i_uid = ctx->uid;
ctx               700 arch/powerpc/platforms/cell/spufs/inode.c 	inode->i_gid = ctx->gid;
ctx               749 arch/powerpc/platforms/cell/spufs/inode.c 	struct spufs_fs_context *ctx;
ctx               752 arch/powerpc/platforms/cell/spufs/inode.c 	ctx = kzalloc(sizeof(struct spufs_fs_context), GFP_KERNEL);
ctx               753 arch/powerpc/platforms/cell/spufs/inode.c 	if (!ctx)
ctx               760 arch/powerpc/platforms/cell/spufs/inode.c 	ctx->uid = current_uid();
ctx               761 arch/powerpc/platforms/cell/spufs/inode.c 	ctx->gid = current_gid();
ctx               762 arch/powerpc/platforms/cell/spufs/inode.c 	ctx->mode = 0755;
ctx               764 arch/powerpc/platforms/cell/spufs/inode.c 	fc->fs_private = ctx;
ctx               770 arch/powerpc/platforms/cell/spufs/inode.c 	kfree(ctx);
ctx                17 arch/powerpc/platforms/cell/spufs/run.c 	struct spu_context *ctx = spu->ctx;
ctx                26 arch/powerpc/platforms/cell/spufs/run.c 	if (ctx) {
ctx                30 arch/powerpc/platforms/cell/spufs/run.c 			ctx->csa.class_0_pending = spu->class_0_pending;
ctx                31 arch/powerpc/platforms/cell/spufs/run.c 			ctx->csa.class_0_dar = spu->class_0_dar;
ctx                34 arch/powerpc/platforms/cell/spufs/run.c 			ctx->csa.class_1_dsisr = spu->class_1_dsisr;
ctx                35 arch/powerpc/platforms/cell/spufs/run.c 			ctx->csa.class_1_dar = spu->class_1_dar;
ctx                45 arch/powerpc/platforms/cell/spufs/run.c 		wake_up_all(&ctx->stop_wq);
ctx                49 arch/powerpc/platforms/cell/spufs/run.c int spu_stopped(struct spu_context *ctx, u32 *stat)
ctx                58 arch/powerpc/platforms/cell/spufs/run.c 	*stat = ctx->ops->status_read(ctx);
ctx                69 arch/powerpc/platforms/cell/spufs/run.c 	if (test_bit(SPU_SCHED_NOTIFY_ACTIVE, &ctx->sched_flags))
ctx                72 arch/powerpc/platforms/cell/spufs/run.c 	dsisr = ctx->csa.class_1_dsisr;
ctx                76 arch/powerpc/platforms/cell/spufs/run.c 	if (ctx->csa.class_0_pending)
ctx                82 arch/powerpc/platforms/cell/spufs/run.c static int spu_setup_isolated(struct spu_context *ctx)
ctx               102 arch/powerpc/platforms/cell/spufs/run.c 	spu_unmap_mappings(ctx);
ctx               104 arch/powerpc/platforms/cell/spufs/run.c 	mfc_cntl = &ctx->spu->priv2->mfc_control_RW;
ctx               125 arch/powerpc/platforms/cell/spufs/run.c 	sr1 = spu_mfc_sr1_get(ctx->spu);
ctx               127 arch/powerpc/platforms/cell/spufs/run.c 	spu_mfc_sr1_set(ctx->spu, sr1);
ctx               130 arch/powerpc/platforms/cell/spufs/run.c 	ctx->ops->signal1_write(ctx, (unsigned long)isolated_loader >> 32);
ctx               131 arch/powerpc/platforms/cell/spufs/run.c 	ctx->ops->signal2_write(ctx,
ctx               134 arch/powerpc/platforms/cell/spufs/run.c 	ctx->ops->runcntl_write(ctx,
ctx               139 arch/powerpc/platforms/cell/spufs/run.c 	while (((status = ctx->ops->status_read(ctx)) & status_loading) ==
ctx               154 arch/powerpc/platforms/cell/spufs/run.c 		ctx->ops->runcntl_write(ctx, SPU_RUNCNTL_RUNNABLE);
ctx               162 arch/powerpc/platforms/cell/spufs/run.c 		ctx->ops->runcntl_write(ctx, SPU_RUNCNTL_STOP);
ctx               170 arch/powerpc/platforms/cell/spufs/run.c 	spu_mfc_sr1_set(ctx->spu, sr1);
ctx               176 arch/powerpc/platforms/cell/spufs/run.c static int spu_run_init(struct spu_context *ctx, u32 *npc)
ctx               181 arch/powerpc/platforms/cell/spufs/run.c 	spuctx_switch_state(ctx, SPU_UTIL_SYSTEM);
ctx               187 arch/powerpc/platforms/cell/spufs/run.c 	if (ctx->flags & SPU_CREATE_NOSCHED) {
ctx               188 arch/powerpc/platforms/cell/spufs/run.c 		if (ctx->state == SPU_STATE_SAVED) {
ctx               189 arch/powerpc/platforms/cell/spufs/run.c 			ret = spu_activate(ctx, 0);
ctx               198 arch/powerpc/platforms/cell/spufs/run.c 	if (ctx->flags & SPU_CREATE_ISOLATE) {
ctx               199 arch/powerpc/platforms/cell/spufs/run.c 		if (!(ctx->ops->status_read(ctx) & SPU_STATUS_ISOLATED_STATE)) {
ctx               200 arch/powerpc/platforms/cell/spufs/run.c 			ret = spu_setup_isolated(ctx);
ctx               209 arch/powerpc/platforms/cell/spufs/run.c 		runcntl = ctx->ops->runcntl_read(ctx) &
ctx               221 arch/powerpc/platforms/cell/spufs/run.c 		ctx->ops->privcntl_write(ctx, privcntl);
ctx               222 arch/powerpc/platforms/cell/spufs/run.c 		ctx->ops->npc_write(ctx, *npc);
ctx               225 arch/powerpc/platforms/cell/spufs/run.c 	ctx->ops->runcntl_write(ctx, runcntl);
ctx               227 arch/powerpc/platforms/cell/spufs/run.c 	if (ctx->flags & SPU_CREATE_NOSCHED) {
ctx               228 arch/powerpc/platforms/cell/spufs/run.c 		spuctx_switch_state(ctx, SPU_UTIL_USER);
ctx               231 arch/powerpc/platforms/cell/spufs/run.c 		if (ctx->state == SPU_STATE_SAVED) {
ctx               232 arch/powerpc/platforms/cell/spufs/run.c 			ret = spu_activate(ctx, 0);
ctx               236 arch/powerpc/platforms/cell/spufs/run.c 			spuctx_switch_state(ctx, SPU_UTIL_USER);
ctx               240 arch/powerpc/platforms/cell/spufs/run.c 	set_bit(SPU_SCHED_SPU_RUN, &ctx->sched_flags);
ctx               244 arch/powerpc/platforms/cell/spufs/run.c static int spu_run_fini(struct spu_context *ctx, u32 *npc,
ctx               249 arch/powerpc/platforms/cell/spufs/run.c 	spu_del_from_rq(ctx);
ctx               251 arch/powerpc/platforms/cell/spufs/run.c 	*status = ctx->ops->status_read(ctx);
ctx               252 arch/powerpc/platforms/cell/spufs/run.c 	*npc = ctx->ops->npc_read(ctx);
ctx               254 arch/powerpc/platforms/cell/spufs/run.c 	spuctx_switch_state(ctx, SPU_UTIL_IDLE_LOADED);
ctx               255 arch/powerpc/platforms/cell/spufs/run.c 	clear_bit(SPU_SCHED_SPU_RUN, &ctx->sched_flags);
ctx               256 arch/powerpc/platforms/cell/spufs/run.c 	spu_switch_log_notify(NULL, ctx, SWITCH_LOG_EXIT, *status);
ctx               257 arch/powerpc/platforms/cell/spufs/run.c 	spu_release(ctx);
ctx               273 arch/powerpc/platforms/cell/spufs/run.c static int spu_handle_restartsys(struct spu_context *ctx, long *spu_ret,
ctx               309 arch/powerpc/platforms/cell/spufs/run.c static int spu_process_callback(struct spu_context *ctx)
ctx               318 arch/powerpc/platforms/cell/spufs/run.c 	npc = ctx->ops->npc_read(ctx) & ~3;
ctx               319 arch/powerpc/platforms/cell/spufs/run.c 	ls = (void __iomem *)ctx->ops->get_ls(ctx);
ctx               331 arch/powerpc/platforms/cell/spufs/run.c 		spu_release(ctx);
ctx               335 arch/powerpc/platforms/cell/spufs/run.c 			ret = spu_handle_restartsys(ctx, &spu_ret, &npc);
ctx               337 arch/powerpc/platforms/cell/spufs/run.c 		mutex_lock(&ctx->state_mutex);
ctx               344 arch/powerpc/platforms/cell/spufs/run.c 	ls = (void __iomem *)ctx->ops->get_ls(ctx);
ctx               348 arch/powerpc/platforms/cell/spufs/run.c 	ctx->ops->npc_write(ctx, npc);
ctx               349 arch/powerpc/platforms/cell/spufs/run.c 	ctx->ops->runcntl_write(ctx, SPU_RUNCNTL_RUNNABLE);
ctx               353 arch/powerpc/platforms/cell/spufs/run.c long spufs_run_spu(struct spu_context *ctx, u32 *npc, u32 *event)
ctx               359 arch/powerpc/platforms/cell/spufs/run.c 	if (mutex_lock_interruptible(&ctx->run_mutex))
ctx               362 arch/powerpc/platforms/cell/spufs/run.c 	ctx->event_return = 0;
ctx               364 arch/powerpc/platforms/cell/spufs/run.c 	ret = spu_acquire(ctx);
ctx               368 arch/powerpc/platforms/cell/spufs/run.c 	spu_enable_spu(ctx);
ctx               370 arch/powerpc/platforms/cell/spufs/run.c 	spu_update_sched_info(ctx);
ctx               372 arch/powerpc/platforms/cell/spufs/run.c 	ret = spu_run_init(ctx, npc);
ctx               374 arch/powerpc/platforms/cell/spufs/run.c 		spu_release(ctx);
ctx               379 arch/powerpc/platforms/cell/spufs/run.c 		ret = spufs_wait(ctx->stop_wq, spu_stopped(ctx, &status));
ctx               386 arch/powerpc/platforms/cell/spufs/run.c 			mutex_lock(&ctx->state_mutex);
ctx               389 arch/powerpc/platforms/cell/spufs/run.c 		spu = ctx->spu;
ctx               391 arch/powerpc/platforms/cell/spufs/run.c 						&ctx->sched_flags))) {
ctx               393 arch/powerpc/platforms/cell/spufs/run.c 				spu_switch_notify(spu, ctx);
ctx               398 arch/powerpc/platforms/cell/spufs/run.c 		spuctx_switch_state(ctx, SPU_UTIL_SYSTEM);
ctx               402 arch/powerpc/platforms/cell/spufs/run.c 			ret = spu_process_callback(ctx);
ctx               407 arch/powerpc/platforms/cell/spufs/run.c 		ret = spufs_handle_class1(ctx);
ctx               411 arch/powerpc/platforms/cell/spufs/run.c 		ret = spufs_handle_class0(ctx);
ctx               421 arch/powerpc/platforms/cell/spufs/run.c 	spu_disable_spu(ctx);
ctx               422 arch/powerpc/platforms/cell/spufs/run.c 	ret = spu_run_fini(ctx, npc, &status);
ctx               423 arch/powerpc/platforms/cell/spufs/run.c 	spu_yield(ctx);
ctx               427 arch/powerpc/platforms/cell/spufs/run.c 		ctx->stats.libassist++;
ctx               451 arch/powerpc/platforms/cell/spufs/run.c 	*event = ctx->event_return;
ctx               453 arch/powerpc/platforms/cell/spufs/run.c 	mutex_unlock(&ctx->run_mutex);
ctx                85 arch/powerpc/platforms/cell/spufs/sched.c void spu_set_timeslice(struct spu_context *ctx)
ctx                87 arch/powerpc/platforms/cell/spufs/sched.c 	if (ctx->prio < NORMAL_PRIO)
ctx                88 arch/powerpc/platforms/cell/spufs/sched.c 		ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE * 4, ctx->prio);
ctx                90 arch/powerpc/platforms/cell/spufs/sched.c 		ctx->time_slice = SCALE_PRIO(DEF_SPU_TIMESLICE, ctx->prio);
ctx                96 arch/powerpc/platforms/cell/spufs/sched.c void __spu_update_sched_info(struct spu_context *ctx)
ctx               102 arch/powerpc/platforms/cell/spufs/sched.c 	BUG_ON(!list_empty(&ctx->rq));
ctx               109 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->tid = current->pid;
ctx               118 arch/powerpc/platforms/cell/spufs/sched.c 		ctx->prio = current->prio;
ctx               120 arch/powerpc/platforms/cell/spufs/sched.c 		ctx->prio = current->static_prio;
ctx               121 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->policy = current->policy;
ctx               131 arch/powerpc/platforms/cell/spufs/sched.c 	cpumask_copy(&ctx->cpus_allowed, current->cpus_ptr);
ctx               134 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->last_ran = raw_smp_processor_id();
ctx               137 arch/powerpc/platforms/cell/spufs/sched.c void spu_update_sched_info(struct spu_context *ctx)
ctx               141 arch/powerpc/platforms/cell/spufs/sched.c 	if (ctx->state == SPU_STATE_RUNNABLE) {
ctx               142 arch/powerpc/platforms/cell/spufs/sched.c 		node = ctx->spu->node;
ctx               148 arch/powerpc/platforms/cell/spufs/sched.c 		__spu_update_sched_info(ctx);
ctx               151 arch/powerpc/platforms/cell/spufs/sched.c 		__spu_update_sched_info(ctx);
ctx               155 arch/powerpc/platforms/cell/spufs/sched.c static int __node_allowed(struct spu_context *ctx, int node)
ctx               160 arch/powerpc/platforms/cell/spufs/sched.c 		if (cpumask_intersects(mask, &ctx->cpus_allowed))
ctx               167 arch/powerpc/platforms/cell/spufs/sched.c static int node_allowed(struct spu_context *ctx, int node)
ctx               172 arch/powerpc/platforms/cell/spufs/sched.c 	rval = __node_allowed(ctx, node);
ctx               194 arch/powerpc/platforms/cell/spufs/sched.c 				struct spu_context *ctx = spu->ctx;
ctx               196 arch/powerpc/platforms/cell/spufs/sched.c 					&ctx->sched_flags);
ctx               198 arch/powerpc/platforms/cell/spufs/sched.c 				wake_up_all(&ctx->stop_wq);
ctx               210 arch/powerpc/platforms/cell/spufs/sched.c static void spu_bind_context(struct spu *spu, struct spu_context *ctx)
ctx               212 arch/powerpc/platforms/cell/spufs/sched.c 	spu_context_trace(spu_bind_context__enter, ctx, spu);
ctx               214 arch/powerpc/platforms/cell/spufs/sched.c 	spuctx_switch_state(ctx, SPU_UTIL_SYSTEM);
ctx               216 arch/powerpc/platforms/cell/spufs/sched.c 	if (ctx->flags & SPU_CREATE_NOSCHED)
ctx               219 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->stats.slb_flt_base = spu->stats.slb_flt;
ctx               220 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->stats.class2_intr_base = spu->stats.class2_intr;
ctx               222 arch/powerpc/platforms/cell/spufs/sched.c 	spu_associate_mm(spu, ctx->owner);
ctx               225 arch/powerpc/platforms/cell/spufs/sched.c 	spu->ctx = ctx;
ctx               227 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->spu = spu;
ctx               228 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->ops = &spu_hw_ops;
ctx               237 arch/powerpc/platforms/cell/spufs/sched.c 	spu_unmap_mappings(ctx);
ctx               239 arch/powerpc/platforms/cell/spufs/sched.c 	spu_switch_log_notify(spu, ctx, SWITCH_LOG_START, 0);
ctx               240 arch/powerpc/platforms/cell/spufs/sched.c 	spu_restore(&ctx->csa, spu);
ctx               242 arch/powerpc/platforms/cell/spufs/sched.c 	spu_switch_notify(spu, ctx);
ctx               243 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->state = SPU_STATE_RUNNABLE;
ctx               245 arch/powerpc/platforms/cell/spufs/sched.c 	spuctx_switch_state(ctx, SPU_UTIL_USER);
ctx               255 arch/powerpc/platforms/cell/spufs/sched.c 	return (!spu->ctx || !(spu->ctx->flags & SPU_CREATE_NOSCHED));
ctx               260 arch/powerpc/platforms/cell/spufs/sched.c 	struct spu_context *ctx;
ctx               262 arch/powerpc/platforms/cell/spufs/sched.c 	list_for_each_entry(ctx, &gang->aff_list_head, aff_list) {
ctx               263 arch/powerpc/platforms/cell/spufs/sched.c 		if (list_empty(&ctx->aff_list))
ctx               264 arch/powerpc/platforms/cell/spufs/sched.c 			list_add(&ctx->aff_list, &gang->aff_list_head);
ctx               271 arch/powerpc/platforms/cell/spufs/sched.c 	struct spu_context *ctx;
ctx               275 arch/powerpc/platforms/cell/spufs/sched.c 	list_for_each_entry_reverse(ctx, &gang->aff_ref_ctx->aff_list,
ctx               277 arch/powerpc/platforms/cell/spufs/sched.c 		if (&ctx->aff_list == &gang->aff_list_head)
ctx               279 arch/powerpc/platforms/cell/spufs/sched.c 		ctx->aff_offset = offset--;
ctx               283 arch/powerpc/platforms/cell/spufs/sched.c 	list_for_each_entry(ctx, gang->aff_ref_ctx->aff_list.prev, aff_list) {
ctx               284 arch/powerpc/platforms/cell/spufs/sched.c 		if (&ctx->aff_list == &gang->aff_list_head)
ctx               286 arch/powerpc/platforms/cell/spufs/sched.c 		ctx->aff_offset = offset++;
ctx               292 arch/powerpc/platforms/cell/spufs/sched.c static struct spu *aff_ref_location(struct spu_context *ctx, int mem_aff,
ctx               316 arch/powerpc/platforms/cell/spufs/sched.c 		if (!node_allowed(ctx, node))
ctx               322 arch/powerpc/platforms/cell/spufs/sched.c 			if (spu->ctx && spu->ctx->gang && !spu->ctx->aff_offset
ctx               323 arch/powerpc/platforms/cell/spufs/sched.c 					&& spu->ctx->gang->aff_ref_spu)
ctx               324 arch/powerpc/platforms/cell/spufs/sched.c 				available_spus -= spu->ctx->gang->contexts;
ctx               327 arch/powerpc/platforms/cell/spufs/sched.c 		if (available_spus < ctx->gang->contexts) {
ctx               347 arch/powerpc/platforms/cell/spufs/sched.c 	struct spu_context *ctx;
ctx               357 arch/powerpc/platforms/cell/spufs/sched.c 	list_for_each_entry_reverse(ctx, &gang->aff_ref_ctx->aff_list,
ctx               359 arch/powerpc/platforms/cell/spufs/sched.c 		if (&ctx->aff_list == &gang->aff_list_head)
ctx               361 arch/powerpc/platforms/cell/spufs/sched.c 		lowest_offset = ctx->aff_offset;
ctx               398 arch/powerpc/platforms/cell/spufs/sched.c static int has_affinity(struct spu_context *ctx)
ctx               400 arch/powerpc/platforms/cell/spufs/sched.c 	struct spu_gang *gang = ctx->gang;
ctx               402 arch/powerpc/platforms/cell/spufs/sched.c 	if (list_empty(&ctx->aff_list))
ctx               405 arch/powerpc/platforms/cell/spufs/sched.c 	if (atomic_read(&ctx->gang->aff_sched_count) == 0)
ctx               406 arch/powerpc/platforms/cell/spufs/sched.c 		ctx->gang->aff_ref_spu = NULL;
ctx               424 arch/powerpc/platforms/cell/spufs/sched.c static void spu_unbind_context(struct spu *spu, struct spu_context *ctx)
ctx               428 arch/powerpc/platforms/cell/spufs/sched.c 	spu_context_trace(spu_unbind_context__enter, ctx, spu);
ctx               430 arch/powerpc/platforms/cell/spufs/sched.c 	spuctx_switch_state(ctx, SPU_UTIL_SYSTEM);
ctx               432 arch/powerpc/platforms/cell/spufs/sched.c  	if (spu->ctx->flags & SPU_CREATE_NOSCHED)
ctx               435 arch/powerpc/platforms/cell/spufs/sched.c 	if (ctx->gang)
ctx               441 arch/powerpc/platforms/cell/spufs/sched.c 		atomic_dec_if_positive(&ctx->gang->aff_sched_count);
ctx               444 arch/powerpc/platforms/cell/spufs/sched.c 	spu_unmap_mappings(ctx);
ctx               445 arch/powerpc/platforms/cell/spufs/sched.c 	spu_save(&ctx->csa, spu);
ctx               446 arch/powerpc/platforms/cell/spufs/sched.c 	spu_switch_log_notify(spu, ctx, SWITCH_LOG_STOP, 0);
ctx               450 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->state = SPU_STATE_SAVED;
ctx               457 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->ops = &spu_backing_ops;
ctx               459 arch/powerpc/platforms/cell/spufs/sched.c 	spu->ctx = NULL;
ctx               464 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->stats.slb_flt +=
ctx               465 arch/powerpc/platforms/cell/spufs/sched.c 		(spu->stats.slb_flt - ctx->stats.slb_flt_base);
ctx               466 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->stats.class2_intr +=
ctx               467 arch/powerpc/platforms/cell/spufs/sched.c 		(spu->stats.class2_intr - ctx->stats.class2_intr_base);
ctx               470 arch/powerpc/platforms/cell/spufs/sched.c 	spuctx_switch_state(ctx, SPU_UTIL_IDLE_LOADED);
ctx               471 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->spu = NULL;
ctx               473 arch/powerpc/platforms/cell/spufs/sched.c 	if (spu_stopped(ctx, &status))
ctx               474 arch/powerpc/platforms/cell/spufs/sched.c 		wake_up_all(&ctx->stop_wq);
ctx               481 arch/powerpc/platforms/cell/spufs/sched.c static void __spu_add_to_rq(struct spu_context *ctx)
ctx               496 arch/powerpc/platforms/cell/spufs/sched.c 	if (list_empty(&ctx->rq)) {
ctx               497 arch/powerpc/platforms/cell/spufs/sched.c 		list_add_tail(&ctx->rq, &spu_prio->runq[ctx->prio]);
ctx               498 arch/powerpc/platforms/cell/spufs/sched.c 		set_bit(ctx->prio, spu_prio->bitmap);
ctx               504 arch/powerpc/platforms/cell/spufs/sched.c static void spu_add_to_rq(struct spu_context *ctx)
ctx               507 arch/powerpc/platforms/cell/spufs/sched.c 	__spu_add_to_rq(ctx);
ctx               511 arch/powerpc/platforms/cell/spufs/sched.c static void __spu_del_from_rq(struct spu_context *ctx)
ctx               513 arch/powerpc/platforms/cell/spufs/sched.c 	int prio = ctx->prio;
ctx               515 arch/powerpc/platforms/cell/spufs/sched.c 	if (!list_empty(&ctx->rq)) {
ctx               518 arch/powerpc/platforms/cell/spufs/sched.c 		list_del_init(&ctx->rq);
ctx               525 arch/powerpc/platforms/cell/spufs/sched.c void spu_del_from_rq(struct spu_context *ctx)
ctx               528 arch/powerpc/platforms/cell/spufs/sched.c 	__spu_del_from_rq(ctx);
ctx               532 arch/powerpc/platforms/cell/spufs/sched.c static void spu_prio_wait(struct spu_context *ctx)
ctx               541 arch/powerpc/platforms/cell/spufs/sched.c 	BUG_ON(!(ctx->flags & SPU_CREATE_NOSCHED));
ctx               544 arch/powerpc/platforms/cell/spufs/sched.c 	prepare_to_wait_exclusive(&ctx->stop_wq, &wait, TASK_INTERRUPTIBLE);
ctx               546 arch/powerpc/platforms/cell/spufs/sched.c 		__spu_add_to_rq(ctx);
ctx               548 arch/powerpc/platforms/cell/spufs/sched.c 		mutex_unlock(&ctx->state_mutex);
ctx               550 arch/powerpc/platforms/cell/spufs/sched.c 		mutex_lock(&ctx->state_mutex);
ctx               552 arch/powerpc/platforms/cell/spufs/sched.c 		__spu_del_from_rq(ctx);
ctx               556 arch/powerpc/platforms/cell/spufs/sched.c 	remove_wait_queue(&ctx->stop_wq, &wait);
ctx               559 arch/powerpc/platforms/cell/spufs/sched.c static struct spu *spu_get_idle(struct spu_context *ctx)
ctx               564 arch/powerpc/platforms/cell/spufs/sched.c 	spu_context_nospu_trace(spu_get_idle__enter, ctx);
ctx               566 arch/powerpc/platforms/cell/spufs/sched.c 	if (ctx->gang) {
ctx               567 arch/powerpc/platforms/cell/spufs/sched.c 		mutex_lock(&ctx->gang->aff_mutex);
ctx               568 arch/powerpc/platforms/cell/spufs/sched.c 		if (has_affinity(ctx)) {
ctx               569 arch/powerpc/platforms/cell/spufs/sched.c 			aff_ref_spu = ctx->gang->aff_ref_spu;
ctx               570 arch/powerpc/platforms/cell/spufs/sched.c 			atomic_inc(&ctx->gang->aff_sched_count);
ctx               571 arch/powerpc/platforms/cell/spufs/sched.c 			mutex_unlock(&ctx->gang->aff_mutex);
ctx               575 arch/powerpc/platforms/cell/spufs/sched.c 			spu = ctx_location(aff_ref_spu, ctx->aff_offset, node);
ctx               580 arch/powerpc/platforms/cell/spufs/sched.c 			atomic_dec(&ctx->gang->aff_sched_count);
ctx               583 arch/powerpc/platforms/cell/spufs/sched.c 		mutex_unlock(&ctx->gang->aff_mutex);
ctx               588 arch/powerpc/platforms/cell/spufs/sched.c 		if (!node_allowed(ctx, node))
ctx               600 arch/powerpc/platforms/cell/spufs/sched.c 	spu_context_nospu_trace(spu_get_idle__not_found, ctx);
ctx               606 arch/powerpc/platforms/cell/spufs/sched.c 	spu_context_trace(spu_get_idle__found, ctx, spu);
ctx               617 arch/powerpc/platforms/cell/spufs/sched.c static struct spu *find_victim(struct spu_context *ctx)
ctx               623 arch/powerpc/platforms/cell/spufs/sched.c 	spu_context_nospu_trace(spu_find_victim__enter, ctx);
ctx               636 arch/powerpc/platforms/cell/spufs/sched.c 		if (!node_allowed(ctx, node))
ctx               641 arch/powerpc/platforms/cell/spufs/sched.c 			struct spu_context *tmp = spu->ctx;
ctx               643 arch/powerpc/platforms/cell/spufs/sched.c 			if (tmp && tmp->prio > ctx->prio &&
ctx               646 arch/powerpc/platforms/cell/spufs/sched.c 				victim = spu->ctx;
ctx               671 arch/powerpc/platforms/cell/spufs/sched.c 			if (!spu || victim->prio <= ctx->prio) {
ctx               683 arch/powerpc/platforms/cell/spufs/sched.c 			spu_context_trace(__spu_deactivate__unload, ctx, spu);
ctx               705 arch/powerpc/platforms/cell/spufs/sched.c static void __spu_schedule(struct spu *spu, struct spu_context *ctx)
ctx               710 arch/powerpc/platforms/cell/spufs/sched.c 	spu_set_timeslice(ctx);
ctx               713 arch/powerpc/platforms/cell/spufs/sched.c 	if (spu->ctx == NULL) {
ctx               714 arch/powerpc/platforms/cell/spufs/sched.c 		spu_bind_context(spu, ctx);
ctx               722 arch/powerpc/platforms/cell/spufs/sched.c 		wake_up_all(&ctx->run_wq);
ctx               724 arch/powerpc/platforms/cell/spufs/sched.c 		spu_add_to_rq(ctx);
ctx               727 arch/powerpc/platforms/cell/spufs/sched.c static void spu_schedule(struct spu *spu, struct spu_context *ctx)
ctx               731 arch/powerpc/platforms/cell/spufs/sched.c 	mutex_lock(&ctx->state_mutex);
ctx               732 arch/powerpc/platforms/cell/spufs/sched.c 	if (ctx->state == SPU_STATE_SAVED)
ctx               733 arch/powerpc/platforms/cell/spufs/sched.c 		__spu_schedule(spu, ctx);
ctx               734 arch/powerpc/platforms/cell/spufs/sched.c 	spu_release(ctx);
ctx               750 arch/powerpc/platforms/cell/spufs/sched.c static void spu_unschedule(struct spu *spu, struct spu_context *ctx,
ctx               759 arch/powerpc/platforms/cell/spufs/sched.c 	spu_unbind_context(spu, ctx);
ctx               760 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->stats.invol_ctx_switch++;
ctx               774 arch/powerpc/platforms/cell/spufs/sched.c int spu_activate(struct spu_context *ctx, unsigned long flags)
ctx               784 arch/powerpc/platforms/cell/spufs/sched.c 	if (ctx->spu)
ctx               791 arch/powerpc/platforms/cell/spufs/sched.c 	spu = spu_get_idle(ctx);
ctx               796 arch/powerpc/platforms/cell/spufs/sched.c 	if (!spu && rt_prio(ctx->prio))
ctx               797 arch/powerpc/platforms/cell/spufs/sched.c 		spu = find_victim(ctx);
ctx               801 arch/powerpc/platforms/cell/spufs/sched.c 		runcntl = ctx->ops->runcntl_read(ctx);
ctx               802 arch/powerpc/platforms/cell/spufs/sched.c 		__spu_schedule(spu, ctx);
ctx               804 arch/powerpc/platforms/cell/spufs/sched.c 			spuctx_switch_state(ctx, SPU_UTIL_USER);
ctx               809 arch/powerpc/platforms/cell/spufs/sched.c 	if (ctx->flags & SPU_CREATE_NOSCHED) {
ctx               810 arch/powerpc/platforms/cell/spufs/sched.c 		spu_prio_wait(ctx);
ctx               814 arch/powerpc/platforms/cell/spufs/sched.c 	spu_add_to_rq(ctx);
ctx               827 arch/powerpc/platforms/cell/spufs/sched.c 	struct spu_context *ctx;
ctx               835 arch/powerpc/platforms/cell/spufs/sched.c 		list_for_each_entry(ctx, rq, rq) {
ctx               837 arch/powerpc/platforms/cell/spufs/sched.c 			if (__node_allowed(ctx, node)) {
ctx               838 arch/powerpc/platforms/cell/spufs/sched.c 				__spu_del_from_rq(ctx);
ctx               844 arch/powerpc/platforms/cell/spufs/sched.c 	ctx = NULL;
ctx               847 arch/powerpc/platforms/cell/spufs/sched.c 	return ctx;
ctx               850 arch/powerpc/platforms/cell/spufs/sched.c static int __spu_deactivate(struct spu_context *ctx, int force, int max_prio)
ctx               852 arch/powerpc/platforms/cell/spufs/sched.c 	struct spu *spu = ctx->spu;
ctx               858 arch/powerpc/platforms/cell/spufs/sched.c 			spu_unschedule(spu, ctx, new == NULL);
ctx               863 arch/powerpc/platforms/cell/spufs/sched.c 					spu_release(ctx);
ctx               867 arch/powerpc/platforms/cell/spufs/sched.c 					mutex_lock(&ctx->state_mutex);
ctx               883 arch/powerpc/platforms/cell/spufs/sched.c void spu_deactivate(struct spu_context *ctx)
ctx               885 arch/powerpc/platforms/cell/spufs/sched.c 	spu_context_nospu_trace(spu_deactivate__enter, ctx);
ctx               886 arch/powerpc/platforms/cell/spufs/sched.c 	__spu_deactivate(ctx, 1, MAX_PRIO);
ctx               897 arch/powerpc/platforms/cell/spufs/sched.c void spu_yield(struct spu_context *ctx)
ctx               899 arch/powerpc/platforms/cell/spufs/sched.c 	spu_context_nospu_trace(spu_yield__enter, ctx);
ctx               900 arch/powerpc/platforms/cell/spufs/sched.c 	if (!(ctx->flags & SPU_CREATE_NOSCHED)) {
ctx               901 arch/powerpc/platforms/cell/spufs/sched.c 		mutex_lock(&ctx->state_mutex);
ctx               902 arch/powerpc/platforms/cell/spufs/sched.c 		__spu_deactivate(ctx, 0, MAX_PRIO);
ctx               903 arch/powerpc/platforms/cell/spufs/sched.c 		mutex_unlock(&ctx->state_mutex);
ctx               907 arch/powerpc/platforms/cell/spufs/sched.c static noinline void spusched_tick(struct spu_context *ctx)
ctx               912 arch/powerpc/platforms/cell/spufs/sched.c 	if (spu_acquire(ctx))
ctx               915 arch/powerpc/platforms/cell/spufs/sched.c 	if (ctx->state != SPU_STATE_RUNNABLE)
ctx               917 arch/powerpc/platforms/cell/spufs/sched.c 	if (ctx->flags & SPU_CREATE_NOSCHED)
ctx               919 arch/powerpc/platforms/cell/spufs/sched.c 	if (ctx->policy == SCHED_FIFO)
ctx               922 arch/powerpc/platforms/cell/spufs/sched.c 	if (--ctx->time_slice && test_bit(SPU_SCHED_SPU_RUN, &ctx->sched_flags))
ctx               925 arch/powerpc/platforms/cell/spufs/sched.c 	spu = ctx->spu;
ctx               927 arch/powerpc/platforms/cell/spufs/sched.c 	spu_context_trace(spusched_tick__preempt, ctx, spu);
ctx               929 arch/powerpc/platforms/cell/spufs/sched.c 	new = grab_runnable_context(ctx->prio + 1, spu->node);
ctx               931 arch/powerpc/platforms/cell/spufs/sched.c 		spu_unschedule(spu, ctx, 0);
ctx               932 arch/powerpc/platforms/cell/spufs/sched.c 		if (test_bit(SPU_SCHED_SPU_RUN, &ctx->sched_flags))
ctx               933 arch/powerpc/platforms/cell/spufs/sched.c 			spu_add_to_rq(ctx);
ctx               935 arch/powerpc/platforms/cell/spufs/sched.c 		spu_context_nospu_trace(spusched_tick__newslice, ctx);
ctx               936 arch/powerpc/platforms/cell/spufs/sched.c 		if (!ctx->time_slice)
ctx               937 arch/powerpc/platforms/cell/spufs/sched.c 			ctx->time_slice++;
ctx               940 arch/powerpc/platforms/cell/spufs/sched.c 	spu_release(ctx);
ctx              1008 arch/powerpc/platforms/cell/spufs/sched.c 				struct spu_context *ctx = spu->ctx;
ctx              1010 arch/powerpc/platforms/cell/spufs/sched.c 				if (ctx) {
ctx              1011 arch/powerpc/platforms/cell/spufs/sched.c 					get_spu_context(ctx);
ctx              1013 arch/powerpc/platforms/cell/spufs/sched.c 					spusched_tick(ctx);
ctx              1015 arch/powerpc/platforms/cell/spufs/sched.c 					put_spu_context(ctx);
ctx              1025 arch/powerpc/platforms/cell/spufs/sched.c void spuctx_switch_state(struct spu_context *ctx,
ctx              1035 arch/powerpc/platforms/cell/spufs/sched.c 	delta = curtime - ctx->stats.tstamp;
ctx              1037 arch/powerpc/platforms/cell/spufs/sched.c 	WARN_ON(!mutex_is_locked(&ctx->state_mutex));
ctx              1040 arch/powerpc/platforms/cell/spufs/sched.c 	spu = ctx->spu;
ctx              1041 arch/powerpc/platforms/cell/spufs/sched.c 	old_state = ctx->stats.util_state;
ctx              1042 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->stats.util_state = new_state;
ctx              1043 arch/powerpc/platforms/cell/spufs/sched.c 	ctx->stats.tstamp = curtime;
ctx              1049 arch/powerpc/platforms/cell/spufs/sched.c 		ctx->stats.times[old_state] += delta;
ctx               173 arch/powerpc/platforms/cell/spufs/spufs.h 	int (*mbox_read) (struct spu_context * ctx, u32 * data);
ctx               174 arch/powerpc/platforms/cell/spufs/spufs.h 	 u32(*mbox_stat_read) (struct spu_context * ctx);
ctx               175 arch/powerpc/platforms/cell/spufs/spufs.h 	__poll_t (*mbox_stat_poll)(struct spu_context *ctx, __poll_t events);
ctx               176 arch/powerpc/platforms/cell/spufs/spufs.h 	int (*ibox_read) (struct spu_context * ctx, u32 * data);
ctx               177 arch/powerpc/platforms/cell/spufs/spufs.h 	int (*wbox_write) (struct spu_context * ctx, u32 data);
ctx               178 arch/powerpc/platforms/cell/spufs/spufs.h 	 u32(*signal1_read) (struct spu_context * ctx);
ctx               179 arch/powerpc/platforms/cell/spufs/spufs.h 	void (*signal1_write) (struct spu_context * ctx, u32 data);
ctx               180 arch/powerpc/platforms/cell/spufs/spufs.h 	 u32(*signal2_read) (struct spu_context * ctx);
ctx               181 arch/powerpc/platforms/cell/spufs/spufs.h 	void (*signal2_write) (struct spu_context * ctx, u32 data);
ctx               182 arch/powerpc/platforms/cell/spufs/spufs.h 	void (*signal1_type_set) (struct spu_context * ctx, u64 val);
ctx               183 arch/powerpc/platforms/cell/spufs/spufs.h 	 u64(*signal1_type_get) (struct spu_context * ctx);
ctx               184 arch/powerpc/platforms/cell/spufs/spufs.h 	void (*signal2_type_set) (struct spu_context * ctx, u64 val);
ctx               185 arch/powerpc/platforms/cell/spufs/spufs.h 	 u64(*signal2_type_get) (struct spu_context * ctx);
ctx               186 arch/powerpc/platforms/cell/spufs/spufs.h 	 u32(*npc_read) (struct spu_context * ctx);
ctx               187 arch/powerpc/platforms/cell/spufs/spufs.h 	void (*npc_write) (struct spu_context * ctx, u32 data);
ctx               188 arch/powerpc/platforms/cell/spufs/spufs.h 	 u32(*status_read) (struct spu_context * ctx);
ctx               189 arch/powerpc/platforms/cell/spufs/spufs.h 	char*(*get_ls) (struct spu_context * ctx);
ctx               190 arch/powerpc/platforms/cell/spufs/spufs.h 	void (*privcntl_write) (struct spu_context *ctx, u64 data);
ctx               191 arch/powerpc/platforms/cell/spufs/spufs.h 	 u32 (*runcntl_read) (struct spu_context * ctx);
ctx               192 arch/powerpc/platforms/cell/spufs/spufs.h 	void (*runcntl_write) (struct spu_context * ctx, u32 data);
ctx               193 arch/powerpc/platforms/cell/spufs/spufs.h 	void (*runcntl_stop) (struct spu_context * ctx);
ctx               194 arch/powerpc/platforms/cell/spufs/spufs.h 	void (*master_start) (struct spu_context * ctx);
ctx               195 arch/powerpc/platforms/cell/spufs/spufs.h 	void (*master_stop) (struct spu_context * ctx);
ctx               196 arch/powerpc/platforms/cell/spufs/spufs.h 	int (*set_mfc_query)(struct spu_context * ctx, u32 mask, u32 mode);
ctx               197 arch/powerpc/platforms/cell/spufs/spufs.h 	u32 (*read_mfc_tagstatus)(struct spu_context * ctx);
ctx               198 arch/powerpc/platforms/cell/spufs/spufs.h 	u32 (*get_mfc_free_elements)(struct spu_context *ctx);
ctx               199 arch/powerpc/platforms/cell/spufs/spufs.h 	int (*send_mfc_command)(struct spu_context * ctx,
ctx               201 arch/powerpc/platforms/cell/spufs/spufs.h 	void (*dma_info_read) (struct spu_context * ctx,
ctx               203 arch/powerpc/platforms/cell/spufs/spufs.h 	void (*proxydma_info_read) (struct spu_context * ctx,
ctx               205 arch/powerpc/platforms/cell/spufs/spufs.h 	void (*restart_dma)(struct spu_context *ctx);
ctx               234 arch/powerpc/platforms/cell/spufs/spufs.h long spufs_run_spu(struct spu_context *ctx, u32 *npc, u32 *status);
ctx               247 arch/powerpc/platforms/cell/spufs/spufs.h void spu_gang_remove_ctx(struct spu_gang *gang, struct spu_context *ctx);
ctx               248 arch/powerpc/platforms/cell/spufs/spufs.h void spu_gang_add_ctx(struct spu_gang *gang, struct spu_context *ctx);
ctx               251 arch/powerpc/platforms/cell/spufs/spufs.h int spufs_handle_class1(struct spu_context *ctx);
ctx               252 arch/powerpc/platforms/cell/spufs/spufs.h int spufs_handle_class0(struct spu_context *ctx);
ctx               255 arch/powerpc/platforms/cell/spufs/spufs.h struct spu *affinity_check(struct spu_context *ctx);
ctx               259 arch/powerpc/platforms/cell/spufs/spufs.h static inline int __must_check spu_acquire(struct spu_context *ctx)
ctx               261 arch/powerpc/platforms/cell/spufs/spufs.h 	return mutex_lock_interruptible(&ctx->state_mutex);
ctx               264 arch/powerpc/platforms/cell/spufs/spufs.h static inline void spu_release(struct spu_context *ctx)
ctx               266 arch/powerpc/platforms/cell/spufs/spufs.h 	mutex_unlock(&ctx->state_mutex);
ctx               271 arch/powerpc/platforms/cell/spufs/spufs.h struct spu_context * get_spu_context(struct spu_context *ctx);
ctx               272 arch/powerpc/platforms/cell/spufs/spufs.h int put_spu_context(struct spu_context *ctx);
ctx               273 arch/powerpc/platforms/cell/spufs/spufs.h void spu_unmap_mappings(struct spu_context *ctx);
ctx               275 arch/powerpc/platforms/cell/spufs/spufs.h void spu_forget(struct spu_context *ctx);
ctx               276 arch/powerpc/platforms/cell/spufs/spufs.h int __must_check spu_acquire_saved(struct spu_context *ctx);
ctx               277 arch/powerpc/platforms/cell/spufs/spufs.h void spu_release_saved(struct spu_context *ctx);
ctx               279 arch/powerpc/platforms/cell/spufs/spufs.h int spu_stopped(struct spu_context *ctx, u32 * stat);
ctx               280 arch/powerpc/platforms/cell/spufs/spufs.h void spu_del_from_rq(struct spu_context *ctx);
ctx               281 arch/powerpc/platforms/cell/spufs/spufs.h int spu_activate(struct spu_context *ctx, unsigned long flags);
ctx               282 arch/powerpc/platforms/cell/spufs/spufs.h void spu_deactivate(struct spu_context *ctx);
ctx               283 arch/powerpc/platforms/cell/spufs/spufs.h void spu_yield(struct spu_context *ctx);
ctx               284 arch/powerpc/platforms/cell/spufs/spufs.h void spu_switch_notify(struct spu *spu, struct spu_context *ctx);
ctx               285 arch/powerpc/platforms/cell/spufs/spufs.h void spu_switch_log_notify(struct spu *spu, struct spu_context *ctx,
ctx               287 arch/powerpc/platforms/cell/spufs/spufs.h void spu_set_timeslice(struct spu_context *ctx);
ctx               288 arch/powerpc/platforms/cell/spufs/spufs.h void spu_update_sched_info(struct spu_context *ctx);
ctx               289 arch/powerpc/platforms/cell/spufs/spufs.h void __spu_update_sched_info(struct spu_context *ctx);
ctx               313 arch/powerpc/platforms/cell/spufs/spufs.h 		spu_release(ctx);					\
ctx               319 arch/powerpc/platforms/cell/spufs/spufs.h 		__ret = spu_acquire(ctx);				\
ctx               327 arch/powerpc/platforms/cell/spufs/spufs.h size_t spu_wbox_write(struct spu_context *ctx, u32 data);
ctx               328 arch/powerpc/platforms/cell/spufs/spufs.h size_t spu_ibox_read(struct spu_context *ctx, u32 *data);
ctx               340 arch/powerpc/platforms/cell/spufs/spufs.h 	ssize_t (*read)(struct spu_context *ctx,
ctx               342 arch/powerpc/platforms/cell/spufs/spufs.h 	u64 (*get)(struct spu_context *ctx);
ctx               357 arch/powerpc/platforms/cell/spufs/spufs.h extern void spuctx_switch_state(struct spu_context *ctx,
ctx                12 arch/powerpc/platforms/cell/spufs/sputrace.h 	TP_PROTO(struct spu_context *ctx, struct spu *spu, const char *name),
ctx                13 arch/powerpc/platforms/cell/spufs/sputrace.h 	TP_ARGS(ctx, spu, name),
ctx                23 arch/powerpc/platforms/cell/spufs/sputrace.h 		__entry->owner_tid = ctx->tid;
ctx                31 arch/powerpc/platforms/cell/spufs/sputrace.h #define spu_context_trace(name, ctx, spu) \
ctx                32 arch/powerpc/platforms/cell/spufs/sputrace.h 	trace_spufs_context(ctx, spu, __stringify(name))
ctx                33 arch/powerpc/platforms/cell/spufs/sputrace.h #define spu_context_nospu_trace(name, ctx) \
ctx                34 arch/powerpc/platforms/cell/spufs/sputrace.h 	trace_spufs_context(ctx, NULL, __stringify(name))
ctx              1700 arch/powerpc/platforms/cell/spufs/switch.c 	struct spu_context *ctx = spu->ctx;
ctx              1702 arch/powerpc/platforms/cell/spufs/switch.c 	spu_cpu_affinity_set(spu, ctx->last_ran);
ctx               442 arch/powerpc/platforms/ps3/spu.c static void ps3_enable_spu(struct spu_context *ctx)
ctx               446 arch/powerpc/platforms/ps3/spu.c static void ps3_disable_spu(struct spu_context *ctx)
ctx               448 arch/powerpc/platforms/ps3/spu.c 	ctx->ops->runcntl_stop(ctx);
ctx              4084 arch/powerpc/xmon/xmon.c 	DUMP_FIELD(spu, "0x%p", ctx);
ctx                89 arch/riscv/net/bpf_jit_comp.c 	struct rv_jit_context ctx;
ctx                92 arch/riscv/net/bpf_jit_comp.c static u8 bpf_to_rv_reg(int bpf_reg, struct rv_jit_context *ctx)
ctx               103 arch/riscv/net/bpf_jit_comp.c 		__set_bit(reg, &ctx->flags);
ctx               108 arch/riscv/net/bpf_jit_comp.c static bool seen_reg(int reg, struct rv_jit_context *ctx)
ctx               118 arch/riscv/net/bpf_jit_comp.c 		return test_bit(reg, &ctx->flags);
ctx               123 arch/riscv/net/bpf_jit_comp.c static void mark_fp(struct rv_jit_context *ctx)
ctx               125 arch/riscv/net/bpf_jit_comp.c 	__set_bit(RV_CTX_F_SEEN_S5, &ctx->flags);
ctx               128 arch/riscv/net/bpf_jit_comp.c static void mark_call(struct rv_jit_context *ctx)
ctx               130 arch/riscv/net/bpf_jit_comp.c 	__set_bit(RV_CTX_F_SEEN_CALL, &ctx->flags);
ctx               133 arch/riscv/net/bpf_jit_comp.c static bool seen_call(struct rv_jit_context *ctx)
ctx               135 arch/riscv/net/bpf_jit_comp.c 	return test_bit(RV_CTX_F_SEEN_CALL, &ctx->flags);
ctx               138 arch/riscv/net/bpf_jit_comp.c static void mark_tail_call(struct rv_jit_context *ctx)
ctx               140 arch/riscv/net/bpf_jit_comp.c 	__set_bit(RV_CTX_F_SEEN_TAIL_CALL, &ctx->flags);
ctx               143 arch/riscv/net/bpf_jit_comp.c static bool seen_tail_call(struct rv_jit_context *ctx)
ctx               145 arch/riscv/net/bpf_jit_comp.c 	return test_bit(RV_CTX_F_SEEN_TAIL_CALL, &ctx->flags);
ctx               148 arch/riscv/net/bpf_jit_comp.c static u8 rv_tail_call_reg(struct rv_jit_context *ctx)
ctx               150 arch/riscv/net/bpf_jit_comp.c 	mark_tail_call(ctx);
ctx               152 arch/riscv/net/bpf_jit_comp.c 	if (seen_call(ctx)) {
ctx               153 arch/riscv/net/bpf_jit_comp.c 		__set_bit(RV_CTX_F_SEEN_S6, &ctx->flags);
ctx               159 arch/riscv/net/bpf_jit_comp.c static void emit(const u32 insn, struct rv_jit_context *ctx)
ctx               161 arch/riscv/net/bpf_jit_comp.c 	if (ctx->insns)
ctx               162 arch/riscv/net/bpf_jit_comp.c 		ctx->insns[ctx->ninsns] = insn;
ctx               164 arch/riscv/net/bpf_jit_comp.c 	ctx->ninsns++;
ctx               514 arch/riscv/net/bpf_jit_comp.c static void emit_imm(u8 rd, s64 val, struct rv_jit_context *ctx)
ctx               531 arch/riscv/net/bpf_jit_comp.c 			emit(rv_lui(rd, upper), ctx);
ctx               534 arch/riscv/net/bpf_jit_comp.c 			emit(rv_addi(rd, RV_REG_ZERO, lower), ctx);
ctx               538 arch/riscv/net/bpf_jit_comp.c 		emit(rv_addiw(rd, rd, lower), ctx);
ctx               546 arch/riscv/net/bpf_jit_comp.c 	emit_imm(rd, upper, ctx);
ctx               548 arch/riscv/net/bpf_jit_comp.c 	emit(rv_slli(rd, rd, shift), ctx);
ctx               550 arch/riscv/net/bpf_jit_comp.c 		emit(rv_addi(rd, rd, lower), ctx);
ctx               553 arch/riscv/net/bpf_jit_comp.c static int rv_offset(int bpf_to, int bpf_from, struct rv_jit_context *ctx)
ctx               555 arch/riscv/net/bpf_jit_comp.c 	int from = ctx->offset[bpf_from] - 1, to = ctx->offset[bpf_to];
ctx               560 arch/riscv/net/bpf_jit_comp.c static int epilogue_offset(struct rv_jit_context *ctx)
ctx               562 arch/riscv/net/bpf_jit_comp.c 	int to = ctx->epilogue_offset, from = ctx->ninsns;
ctx               567 arch/riscv/net/bpf_jit_comp.c static void __build_epilogue(u8 reg, struct rv_jit_context *ctx)
ctx               569 arch/riscv/net/bpf_jit_comp.c 	int stack_adjust = ctx->stack_size, store_offset = stack_adjust - 8;
ctx               571 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_RA, ctx)) {
ctx               572 arch/riscv/net/bpf_jit_comp.c 		emit(rv_ld(RV_REG_RA, store_offset, RV_REG_SP), ctx);
ctx               575 arch/riscv/net/bpf_jit_comp.c 	emit(rv_ld(RV_REG_FP, store_offset, RV_REG_SP), ctx);
ctx               577 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S1, ctx)) {
ctx               578 arch/riscv/net/bpf_jit_comp.c 		emit(rv_ld(RV_REG_S1, store_offset, RV_REG_SP), ctx);
ctx               581 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S2, ctx)) {
ctx               582 arch/riscv/net/bpf_jit_comp.c 		emit(rv_ld(RV_REG_S2, store_offset, RV_REG_SP), ctx);
ctx               585 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S3, ctx)) {
ctx               586 arch/riscv/net/bpf_jit_comp.c 		emit(rv_ld(RV_REG_S3, store_offset, RV_REG_SP), ctx);
ctx               589 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S4, ctx)) {
ctx               590 arch/riscv/net/bpf_jit_comp.c 		emit(rv_ld(RV_REG_S4, store_offset, RV_REG_SP), ctx);
ctx               593 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S5, ctx)) {
ctx               594 arch/riscv/net/bpf_jit_comp.c 		emit(rv_ld(RV_REG_S5, store_offset, RV_REG_SP), ctx);
ctx               597 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S6, ctx)) {
ctx               598 arch/riscv/net/bpf_jit_comp.c 		emit(rv_ld(RV_REG_S6, store_offset, RV_REG_SP), ctx);
ctx               602 arch/riscv/net/bpf_jit_comp.c 	emit(rv_addi(RV_REG_SP, RV_REG_SP, stack_adjust), ctx);
ctx               605 arch/riscv/net/bpf_jit_comp.c 		emit(rv_addi(RV_REG_A0, RV_REG_A5, 0), ctx);
ctx               606 arch/riscv/net/bpf_jit_comp.c 	emit(rv_jalr(RV_REG_ZERO, reg, 0), ctx);
ctx               609 arch/riscv/net/bpf_jit_comp.c static void emit_zext_32(u8 reg, struct rv_jit_context *ctx)
ctx               611 arch/riscv/net/bpf_jit_comp.c 	emit(rv_slli(reg, reg, 32), ctx);
ctx               612 arch/riscv/net/bpf_jit_comp.c 	emit(rv_srli(reg, reg, 32), ctx);
ctx               615 arch/riscv/net/bpf_jit_comp.c static int emit_bpf_tail_call(int insn, struct rv_jit_context *ctx)
ctx               617 arch/riscv/net/bpf_jit_comp.c 	int tc_ninsn, off, start_insn = ctx->ninsns;
ctx               618 arch/riscv/net/bpf_jit_comp.c 	u8 tcc = rv_tail_call_reg(ctx);
ctx               627 arch/riscv/net/bpf_jit_comp.c 	tc_ninsn = insn ? ctx->offset[insn] - ctx->offset[insn - 1] :
ctx               628 arch/riscv/net/bpf_jit_comp.c 		   ctx->offset[0];
ctx               629 arch/riscv/net/bpf_jit_comp.c 	emit_zext_32(RV_REG_A2, ctx);
ctx               634 arch/riscv/net/bpf_jit_comp.c 	emit(rv_lwu(RV_REG_T1, off, RV_REG_A1), ctx);
ctx               635 arch/riscv/net/bpf_jit_comp.c 	off = (tc_ninsn - (ctx->ninsns - start_insn)) << 2;
ctx               638 arch/riscv/net/bpf_jit_comp.c 	emit(rv_bgeu(RV_REG_A2, RV_REG_T1, off >> 1), ctx);
ctx               643 arch/riscv/net/bpf_jit_comp.c 	emit(rv_addi(RV_REG_T1, tcc, -1), ctx);
ctx               644 arch/riscv/net/bpf_jit_comp.c 	off = (tc_ninsn - (ctx->ninsns - start_insn)) << 2;
ctx               647 arch/riscv/net/bpf_jit_comp.c 	emit(rv_blt(tcc, RV_REG_ZERO, off >> 1), ctx);
ctx               653 arch/riscv/net/bpf_jit_comp.c 	emit(rv_slli(RV_REG_T2, RV_REG_A2, 3), ctx);
ctx               654 arch/riscv/net/bpf_jit_comp.c 	emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_A1), ctx);
ctx               658 arch/riscv/net/bpf_jit_comp.c 	emit(rv_ld(RV_REG_T2, off, RV_REG_T2), ctx);
ctx               659 arch/riscv/net/bpf_jit_comp.c 	off = (tc_ninsn - (ctx->ninsns - start_insn)) << 2;
ctx               662 arch/riscv/net/bpf_jit_comp.c 	emit(rv_beq(RV_REG_T2, RV_REG_ZERO, off >> 1), ctx);
ctx               668 arch/riscv/net/bpf_jit_comp.c 	emit(rv_ld(RV_REG_T3, off, RV_REG_T2), ctx);
ctx               669 arch/riscv/net/bpf_jit_comp.c 	emit(rv_addi(RV_REG_T3, RV_REG_T3, 4), ctx);
ctx               670 arch/riscv/net/bpf_jit_comp.c 	emit(rv_addi(RV_REG_TCC, RV_REG_T1, 0), ctx);
ctx               671 arch/riscv/net/bpf_jit_comp.c 	__build_epilogue(RV_REG_T3, ctx);
ctx               676 arch/riscv/net/bpf_jit_comp.c 		      struct rv_jit_context *ctx)
ctx               687 arch/riscv/net/bpf_jit_comp.c 		*rd = bpf_to_rv_reg(insn->dst_reg, ctx);
ctx               693 arch/riscv/net/bpf_jit_comp.c 		*rs = bpf_to_rv_reg(insn->src_reg, ctx);
ctx               697 arch/riscv/net/bpf_jit_comp.c 			   struct rv_jit_context *ctx)
ctx               699 arch/riscv/net/bpf_jit_comp.c 	*rvoff = rv_offset(insn + off, insn, ctx);
ctx               703 arch/riscv/net/bpf_jit_comp.c static void emit_zext_32_rd_rs(u8 *rd, u8 *rs, struct rv_jit_context *ctx)
ctx               705 arch/riscv/net/bpf_jit_comp.c 	emit(rv_addi(RV_REG_T2, *rd, 0), ctx);
ctx               706 arch/riscv/net/bpf_jit_comp.c 	emit_zext_32(RV_REG_T2, ctx);
ctx               707 arch/riscv/net/bpf_jit_comp.c 	emit(rv_addi(RV_REG_T1, *rs, 0), ctx);
ctx               708 arch/riscv/net/bpf_jit_comp.c 	emit_zext_32(RV_REG_T1, ctx);
ctx               713 arch/riscv/net/bpf_jit_comp.c static void emit_sext_32_rd_rs(u8 *rd, u8 *rs, struct rv_jit_context *ctx)
ctx               715 arch/riscv/net/bpf_jit_comp.c 	emit(rv_addiw(RV_REG_T2, *rd, 0), ctx);
ctx               716 arch/riscv/net/bpf_jit_comp.c 	emit(rv_addiw(RV_REG_T1, *rs, 0), ctx);
ctx               721 arch/riscv/net/bpf_jit_comp.c static void emit_zext_32_rd_t1(u8 *rd, struct rv_jit_context *ctx)
ctx               723 arch/riscv/net/bpf_jit_comp.c 	emit(rv_addi(RV_REG_T2, *rd, 0), ctx);
ctx               724 arch/riscv/net/bpf_jit_comp.c 	emit_zext_32(RV_REG_T2, ctx);
ctx               725 arch/riscv/net/bpf_jit_comp.c 	emit_zext_32(RV_REG_T1, ctx);
ctx               729 arch/riscv/net/bpf_jit_comp.c static void emit_sext_32_rd(u8 *rd, struct rv_jit_context *ctx)
ctx               731 arch/riscv/net/bpf_jit_comp.c 	emit(rv_addiw(RV_REG_T2, *rd, 0), ctx);
ctx               735 arch/riscv/net/bpf_jit_comp.c static int emit_insn(const struct bpf_insn *insn, struct rv_jit_context *ctx,
ctx               740 arch/riscv/net/bpf_jit_comp.c 	struct bpf_prog_aux *aux = ctx->prog->aux;
ctx               741 arch/riscv/net/bpf_jit_comp.c 	int rvoff, i = insn - ctx->prog->insnsi;
ctx               746 arch/riscv/net/bpf_jit_comp.c 	init_regs(&rd, &rs, insn, ctx);
ctx               754 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               757 arch/riscv/net/bpf_jit_comp.c 		emit(is64 ? rv_addi(rd, rs, 0) : rv_addiw(rd, rs, 0), ctx);
ctx               759 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               765 arch/riscv/net/bpf_jit_comp.c 		emit(is64 ? rv_add(rd, rd, rs) : rv_addw(rd, rd, rs), ctx);
ctx               767 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               771 arch/riscv/net/bpf_jit_comp.c 		emit(is64 ? rv_sub(rd, rd, rs) : rv_subw(rd, rd, rs), ctx);
ctx               773 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               777 arch/riscv/net/bpf_jit_comp.c 		emit(rv_and(rd, rd, rs), ctx);
ctx               779 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               783 arch/riscv/net/bpf_jit_comp.c 		emit(rv_or(rd, rd, rs), ctx);
ctx               785 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               789 arch/riscv/net/bpf_jit_comp.c 		emit(rv_xor(rd, rd, rs), ctx);
ctx               791 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               795 arch/riscv/net/bpf_jit_comp.c 		emit(is64 ? rv_mul(rd, rd, rs) : rv_mulw(rd, rd, rs), ctx);
ctx               797 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               801 arch/riscv/net/bpf_jit_comp.c 		emit(is64 ? rv_divu(rd, rd, rs) : rv_divuw(rd, rd, rs), ctx);
ctx               803 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               807 arch/riscv/net/bpf_jit_comp.c 		emit(is64 ? rv_remu(rd, rd, rs) : rv_remuw(rd, rd, rs), ctx);
ctx               809 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               813 arch/riscv/net/bpf_jit_comp.c 		emit(is64 ? rv_sll(rd, rd, rs) : rv_sllw(rd, rd, rs), ctx);
ctx               815 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               819 arch/riscv/net/bpf_jit_comp.c 		emit(is64 ? rv_srl(rd, rd, rs) : rv_srlw(rd, rd, rs), ctx);
ctx               821 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               825 arch/riscv/net/bpf_jit_comp.c 		emit(is64 ? rv_sra(rd, rd, rs) : rv_sraw(rd, rd, rs), ctx);
ctx               827 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               834 arch/riscv/net/bpf_jit_comp.c 		     rv_subw(rd, RV_REG_ZERO, rd), ctx);
ctx               836 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               844 arch/riscv/net/bpf_jit_comp.c 		emit(rv_slli(rd, rd, shift), ctx);
ctx               845 arch/riscv/net/bpf_jit_comp.c 		emit(rv_srli(rd, rd, shift), ctx);
ctx               849 arch/riscv/net/bpf_jit_comp.c 		emit(rv_addi(RV_REG_T2, RV_REG_ZERO, 0), ctx);
ctx               851 arch/riscv/net/bpf_jit_comp.c 		emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
ctx               852 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
ctx               853 arch/riscv/net/bpf_jit_comp.c 		emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
ctx               854 arch/riscv/net/bpf_jit_comp.c 		emit(rv_srli(rd, rd, 8), ctx);
ctx               858 arch/riscv/net/bpf_jit_comp.c 		emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
ctx               859 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
ctx               860 arch/riscv/net/bpf_jit_comp.c 		emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
ctx               861 arch/riscv/net/bpf_jit_comp.c 		emit(rv_srli(rd, rd, 8), ctx);
ctx               863 arch/riscv/net/bpf_jit_comp.c 		emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
ctx               864 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
ctx               865 arch/riscv/net/bpf_jit_comp.c 		emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
ctx               866 arch/riscv/net/bpf_jit_comp.c 		emit(rv_srli(rd, rd, 8), ctx);
ctx               870 arch/riscv/net/bpf_jit_comp.c 		emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
ctx               871 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
ctx               872 arch/riscv/net/bpf_jit_comp.c 		emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
ctx               873 arch/riscv/net/bpf_jit_comp.c 		emit(rv_srli(rd, rd, 8), ctx);
ctx               875 arch/riscv/net/bpf_jit_comp.c 		emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
ctx               876 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
ctx               877 arch/riscv/net/bpf_jit_comp.c 		emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
ctx               878 arch/riscv/net/bpf_jit_comp.c 		emit(rv_srli(rd, rd, 8), ctx);
ctx               880 arch/riscv/net/bpf_jit_comp.c 		emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
ctx               881 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
ctx               882 arch/riscv/net/bpf_jit_comp.c 		emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
ctx               883 arch/riscv/net/bpf_jit_comp.c 		emit(rv_srli(rd, rd, 8), ctx);
ctx               885 arch/riscv/net/bpf_jit_comp.c 		emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
ctx               886 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
ctx               887 arch/riscv/net/bpf_jit_comp.c 		emit(rv_slli(RV_REG_T2, RV_REG_T2, 8), ctx);
ctx               888 arch/riscv/net/bpf_jit_comp.c 		emit(rv_srli(rd, rd, 8), ctx);
ctx               890 arch/riscv/net/bpf_jit_comp.c 		emit(rv_andi(RV_REG_T1, rd, 0xff), ctx);
ctx               891 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T2, RV_REG_T2, RV_REG_T1), ctx);
ctx               893 arch/riscv/net/bpf_jit_comp.c 		emit(rv_addi(rd, RV_REG_T2, 0), ctx);
ctx               899 arch/riscv/net/bpf_jit_comp.c 		emit_imm(rd, imm, ctx);
ctx               901 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               909 arch/riscv/net/bpf_jit_comp.c 			     rv_addiw(rd, rd, imm), ctx);
ctx               911 arch/riscv/net/bpf_jit_comp.c 			emit_imm(RV_REG_T1, imm, ctx);
ctx               913 arch/riscv/net/bpf_jit_comp.c 			     rv_addw(rd, rd, RV_REG_T1), ctx);
ctx               916 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               922 arch/riscv/net/bpf_jit_comp.c 			     rv_addiw(rd, rd, -imm), ctx);
ctx               924 arch/riscv/net/bpf_jit_comp.c 			emit_imm(RV_REG_T1, imm, ctx);
ctx               926 arch/riscv/net/bpf_jit_comp.c 			     rv_subw(rd, rd, RV_REG_T1), ctx);
ctx               929 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               934 arch/riscv/net/bpf_jit_comp.c 			emit(rv_andi(rd, rd, imm), ctx);
ctx               936 arch/riscv/net/bpf_jit_comp.c 			emit_imm(RV_REG_T1, imm, ctx);
ctx               937 arch/riscv/net/bpf_jit_comp.c 			emit(rv_and(rd, rd, RV_REG_T1), ctx);
ctx               940 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               945 arch/riscv/net/bpf_jit_comp.c 			emit(rv_ori(rd, rd, imm), ctx);
ctx               947 arch/riscv/net/bpf_jit_comp.c 			emit_imm(RV_REG_T1, imm, ctx);
ctx               948 arch/riscv/net/bpf_jit_comp.c 			emit(rv_or(rd, rd, RV_REG_T1), ctx);
ctx               951 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               956 arch/riscv/net/bpf_jit_comp.c 			emit(rv_xori(rd, rd, imm), ctx);
ctx               958 arch/riscv/net/bpf_jit_comp.c 			emit_imm(RV_REG_T1, imm, ctx);
ctx               959 arch/riscv/net/bpf_jit_comp.c 			emit(rv_xor(rd, rd, RV_REG_T1), ctx);
ctx               962 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               966 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx               968 arch/riscv/net/bpf_jit_comp.c 		     rv_mulw(rd, rd, RV_REG_T1), ctx);
ctx               970 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               974 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx               976 arch/riscv/net/bpf_jit_comp.c 		     rv_divuw(rd, rd, RV_REG_T1), ctx);
ctx               978 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               982 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx               984 arch/riscv/net/bpf_jit_comp.c 		     rv_remuw(rd, rd, RV_REG_T1), ctx);
ctx               986 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               990 arch/riscv/net/bpf_jit_comp.c 		emit(is64 ? rv_slli(rd, rd, imm) : rv_slliw(rd, rd, imm), ctx);
ctx               992 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx               996 arch/riscv/net/bpf_jit_comp.c 		emit(is64 ? rv_srli(rd, rd, imm) : rv_srliw(rd, rd, imm), ctx);
ctx               998 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx              1002 arch/riscv/net/bpf_jit_comp.c 		emit(is64 ? rv_srai(rd, rd, imm) : rv_sraiw(rd, rd, imm), ctx);
ctx              1004 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32(rd, ctx);
ctx              1009 arch/riscv/net/bpf_jit_comp.c 		rvoff = rv_offset(i + off, i, ctx);
ctx              1016 arch/riscv/net/bpf_jit_comp.c 		emit(rv_jal(RV_REG_ZERO, rvoff >> 1), ctx);
ctx              1022 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1025 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_rs(&rd, &rs, ctx);
ctx              1026 arch/riscv/net/bpf_jit_comp.c 		emit(rv_beq(rd, rs, rvoff >> 1), ctx);
ctx              1030 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1033 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_rs(&rd, &rs, ctx);
ctx              1034 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bltu(rs, rd, rvoff >> 1), ctx);
ctx              1038 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1041 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_rs(&rd, &rs, ctx);
ctx              1042 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bltu(rd, rs, rvoff >> 1), ctx);
ctx              1046 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1049 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_rs(&rd, &rs, ctx);
ctx              1050 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bgeu(rd, rs, rvoff >> 1), ctx);
ctx              1054 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1057 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_rs(&rd, &rs, ctx);
ctx              1058 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bgeu(rs, rd, rvoff >> 1), ctx);
ctx              1062 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1065 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_rs(&rd, &rs, ctx);
ctx              1066 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bne(rd, rs, rvoff >> 1), ctx);
ctx              1070 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1073 arch/riscv/net/bpf_jit_comp.c 			emit_sext_32_rd_rs(&rd, &rs, ctx);
ctx              1074 arch/riscv/net/bpf_jit_comp.c 		emit(rv_blt(rs, rd, rvoff >> 1), ctx);
ctx              1078 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1081 arch/riscv/net/bpf_jit_comp.c 			emit_sext_32_rd_rs(&rd, &rs, ctx);
ctx              1082 arch/riscv/net/bpf_jit_comp.c 		emit(rv_blt(rd, rs, rvoff >> 1), ctx);
ctx              1086 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1089 arch/riscv/net/bpf_jit_comp.c 			emit_sext_32_rd_rs(&rd, &rs, ctx);
ctx              1090 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bge(rd, rs, rvoff >> 1), ctx);
ctx              1094 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1097 arch/riscv/net/bpf_jit_comp.c 			emit_sext_32_rd_rs(&rd, &rs, ctx);
ctx              1098 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bge(rs, rd, rvoff >> 1), ctx);
ctx              1102 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1105 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_rs(&rd, &rs, ctx);
ctx              1106 arch/riscv/net/bpf_jit_comp.c 		emit(rv_and(RV_REG_T1, rd, rs), ctx);
ctx              1107 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bne(RV_REG_T1, RV_REG_ZERO, rvoff >> 1), ctx);
ctx              1113 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1115 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1117 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_t1(&rd, ctx);
ctx              1118 arch/riscv/net/bpf_jit_comp.c 		emit(rv_beq(rd, RV_REG_T1, rvoff >> 1), ctx);
ctx              1122 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1124 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1126 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_t1(&rd, ctx);
ctx              1127 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bltu(RV_REG_T1, rd, rvoff >> 1), ctx);
ctx              1131 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1133 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1135 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_t1(&rd, ctx);
ctx              1136 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bltu(rd, RV_REG_T1, rvoff >> 1), ctx);
ctx              1140 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1142 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1144 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_t1(&rd, ctx);
ctx              1145 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bgeu(rd, RV_REG_T1, rvoff >> 1), ctx);
ctx              1149 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1151 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1153 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_t1(&rd, ctx);
ctx              1154 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bgeu(RV_REG_T1, rd, rvoff >> 1), ctx);
ctx              1158 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1160 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1162 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_t1(&rd, ctx);
ctx              1163 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bne(rd, RV_REG_T1, rvoff >> 1), ctx);
ctx              1167 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1169 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1171 arch/riscv/net/bpf_jit_comp.c 			emit_sext_32_rd(&rd, ctx);
ctx              1172 arch/riscv/net/bpf_jit_comp.c 		emit(rv_blt(RV_REG_T1, rd, rvoff >> 1), ctx);
ctx              1176 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1178 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1180 arch/riscv/net/bpf_jit_comp.c 			emit_sext_32_rd(&rd, ctx);
ctx              1181 arch/riscv/net/bpf_jit_comp.c 		emit(rv_blt(rd, RV_REG_T1, rvoff >> 1), ctx);
ctx              1185 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1187 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1189 arch/riscv/net/bpf_jit_comp.c 			emit_sext_32_rd(&rd, ctx);
ctx              1190 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bge(rd, RV_REG_T1, rvoff >> 1), ctx);
ctx              1194 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1196 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1198 arch/riscv/net/bpf_jit_comp.c 			emit_sext_32_rd(&rd, ctx);
ctx              1199 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bge(RV_REG_T1, rd, rvoff >> 1), ctx);
ctx              1203 arch/riscv/net/bpf_jit_comp.c 		if (rv_offset_check(&rvoff, off, i, ctx))
ctx              1205 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1207 arch/riscv/net/bpf_jit_comp.c 			emit_zext_32_rd_t1(&rd, ctx);
ctx              1208 arch/riscv/net/bpf_jit_comp.c 		emit(rv_and(RV_REG_T1, rd, RV_REG_T1), ctx);
ctx              1209 arch/riscv/net/bpf_jit_comp.c 		emit(rv_bne(RV_REG_T1, RV_REG_ZERO, rvoff >> 1), ctx);
ctx              1219 arch/riscv/net/bpf_jit_comp.c 		mark_call(ctx);
ctx              1220 arch/riscv/net/bpf_jit_comp.c 		ret = bpf_jit_get_func_addr(ctx->prog, insn, extra_pass, &addr,
ctx              1225 arch/riscv/net/bpf_jit_comp.c 			emit_imm(RV_REG_T1, addr, ctx);
ctx              1227 arch/riscv/net/bpf_jit_comp.c 			i = ctx->ninsns;
ctx              1228 arch/riscv/net/bpf_jit_comp.c 			emit_imm(RV_REG_T1, addr, ctx);
ctx              1229 arch/riscv/net/bpf_jit_comp.c 			for (i = ctx->ninsns - i; i < 8; i++) {
ctx              1232 arch/riscv/net/bpf_jit_comp.c 				     ctx);
ctx              1235 arch/riscv/net/bpf_jit_comp.c 		emit(rv_jalr(RV_REG_RA, RV_REG_T1, 0), ctx);
ctx              1236 arch/riscv/net/bpf_jit_comp.c 		rd = bpf_to_rv_reg(BPF_REG_0, ctx);
ctx              1237 arch/riscv/net/bpf_jit_comp.c 		emit(rv_addi(rd, RV_REG_A0, 0), ctx);
ctx              1242 arch/riscv/net/bpf_jit_comp.c 		if (emit_bpf_tail_call(i, ctx))
ctx              1248 arch/riscv/net/bpf_jit_comp.c 		if (i == ctx->prog->len - 1)
ctx              1251 arch/riscv/net/bpf_jit_comp.c 		rvoff = epilogue_offset(ctx);
ctx              1254 arch/riscv/net/bpf_jit_comp.c 		emit(rv_jal(RV_REG_ZERO, rvoff >> 1), ctx);
ctx              1264 arch/riscv/net/bpf_jit_comp.c 		emit_imm(rd, imm64, ctx);
ctx              1271 arch/riscv/net/bpf_jit_comp.c 			emit(rv_lbu(rd, off, rs), ctx);
ctx              1275 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, off, ctx);
ctx              1276 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx);
ctx              1277 arch/riscv/net/bpf_jit_comp.c 		emit(rv_lbu(rd, 0, RV_REG_T1), ctx);
ctx              1283 arch/riscv/net/bpf_jit_comp.c 			emit(rv_lhu(rd, off, rs), ctx);
ctx              1287 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, off, ctx);
ctx              1288 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx);
ctx              1289 arch/riscv/net/bpf_jit_comp.c 		emit(rv_lhu(rd, 0, RV_REG_T1), ctx);
ctx              1295 arch/riscv/net/bpf_jit_comp.c 			emit(rv_lwu(rd, off, rs), ctx);
ctx              1299 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, off, ctx);
ctx              1300 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx);
ctx              1301 arch/riscv/net/bpf_jit_comp.c 		emit(rv_lwu(rd, 0, RV_REG_T1), ctx);
ctx              1307 arch/riscv/net/bpf_jit_comp.c 			emit(rv_ld(rd, off, rs), ctx);
ctx              1311 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, off, ctx);
ctx              1312 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T1, RV_REG_T1, rs), ctx);
ctx              1313 arch/riscv/net/bpf_jit_comp.c 		emit(rv_ld(rd, 0, RV_REG_T1), ctx);
ctx              1318 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1320 arch/riscv/net/bpf_jit_comp.c 			emit(rv_sb(rd, off, RV_REG_T1), ctx);
ctx              1324 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T2, off, ctx);
ctx              1325 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx);
ctx              1326 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sb(RV_REG_T2, 0, RV_REG_T1), ctx);
ctx              1330 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1332 arch/riscv/net/bpf_jit_comp.c 			emit(rv_sh(rd, off, RV_REG_T1), ctx);
ctx              1336 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T2, off, ctx);
ctx              1337 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx);
ctx              1338 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sh(RV_REG_T2, 0, RV_REG_T1), ctx);
ctx              1341 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1343 arch/riscv/net/bpf_jit_comp.c 			emit(rv_sw(rd, off, RV_REG_T1), ctx);
ctx              1347 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T2, off, ctx);
ctx              1348 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx);
ctx              1349 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sw(RV_REG_T2, 0, RV_REG_T1), ctx);
ctx              1352 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, imm, ctx);
ctx              1354 arch/riscv/net/bpf_jit_comp.c 			emit(rv_sd(rd, off, RV_REG_T1), ctx);
ctx              1358 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T2, off, ctx);
ctx              1359 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T2, RV_REG_T2, rd), ctx);
ctx              1360 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sd(RV_REG_T2, 0, RV_REG_T1), ctx);
ctx              1366 arch/riscv/net/bpf_jit_comp.c 			emit(rv_sb(rd, off, rs), ctx);
ctx              1370 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, off, ctx);
ctx              1371 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx);
ctx              1372 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sb(RV_REG_T1, 0, rs), ctx);
ctx              1376 arch/riscv/net/bpf_jit_comp.c 			emit(rv_sh(rd, off, rs), ctx);
ctx              1380 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, off, ctx);
ctx              1381 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx);
ctx              1382 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sh(RV_REG_T1, 0, rs), ctx);
ctx              1386 arch/riscv/net/bpf_jit_comp.c 			emit(rv_sw(rd, off, rs), ctx);
ctx              1390 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, off, ctx);
ctx              1391 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx);
ctx              1392 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sw(RV_REG_T1, 0, rs), ctx);
ctx              1396 arch/riscv/net/bpf_jit_comp.c 			emit(rv_sd(rd, off, rs), ctx);
ctx              1400 arch/riscv/net/bpf_jit_comp.c 		emit_imm(RV_REG_T1, off, ctx);
ctx              1401 arch/riscv/net/bpf_jit_comp.c 		emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx);
ctx              1402 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sd(RV_REG_T1, 0, rs), ctx);
ctx              1410 arch/riscv/net/bpf_jit_comp.c 				emit(rv_addi(RV_REG_T1, rd, off), ctx);
ctx              1412 arch/riscv/net/bpf_jit_comp.c 				emit_imm(RV_REG_T1, off, ctx);
ctx              1413 arch/riscv/net/bpf_jit_comp.c 				emit(rv_add(RV_REG_T1, RV_REG_T1, rd), ctx);
ctx              1421 arch/riscv/net/bpf_jit_comp.c 		     rv_amoadd_d(RV_REG_ZERO, rs, rd, 0, 0), ctx);
ctx              1431 arch/riscv/net/bpf_jit_comp.c static void build_prologue(struct rv_jit_context *ctx)
ctx              1435 arch/riscv/net/bpf_jit_comp.c 	bpf_stack_adjust = round_up(ctx->prog->aux->stack_depth, 16);
ctx              1437 arch/riscv/net/bpf_jit_comp.c 		mark_fp(ctx);
ctx              1439 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_RA, ctx))
ctx              1442 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S1, ctx))
ctx              1444 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S2, ctx))
ctx              1446 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S3, ctx))
ctx              1448 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S4, ctx))
ctx              1450 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S5, ctx))
ctx              1452 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S6, ctx))
ctx              1463 arch/riscv/net/bpf_jit_comp.c 	emit(rv_addi(RV_REG_TCC, RV_REG_ZERO, MAX_TAIL_CALL_CNT), ctx);
ctx              1465 arch/riscv/net/bpf_jit_comp.c 	emit(rv_addi(RV_REG_SP, RV_REG_SP, -stack_adjust), ctx);
ctx              1467 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_RA, ctx)) {
ctx              1468 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sd(RV_REG_SP, store_offset, RV_REG_RA), ctx);
ctx              1471 arch/riscv/net/bpf_jit_comp.c 	emit(rv_sd(RV_REG_SP, store_offset, RV_REG_FP), ctx);
ctx              1473 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S1, ctx)) {
ctx              1474 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S1), ctx);
ctx              1477 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S2, ctx)) {
ctx              1478 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S2), ctx);
ctx              1481 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S3, ctx)) {
ctx              1482 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S3), ctx);
ctx              1485 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S4, ctx)) {
ctx              1486 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S4), ctx);
ctx              1489 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S5, ctx)) {
ctx              1490 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S5), ctx);
ctx              1493 arch/riscv/net/bpf_jit_comp.c 	if (seen_reg(RV_REG_S6, ctx)) {
ctx              1494 arch/riscv/net/bpf_jit_comp.c 		emit(rv_sd(RV_REG_SP, store_offset, RV_REG_S6), ctx);
ctx              1498 arch/riscv/net/bpf_jit_comp.c 	emit(rv_addi(RV_REG_FP, RV_REG_SP, stack_adjust), ctx);
ctx              1501 arch/riscv/net/bpf_jit_comp.c 		emit(rv_addi(RV_REG_S5, RV_REG_SP, bpf_stack_adjust), ctx);
ctx              1506 arch/riscv/net/bpf_jit_comp.c 	if (seen_tail_call(ctx) && seen_call(ctx))
ctx              1507 arch/riscv/net/bpf_jit_comp.c 		emit(rv_addi(RV_REG_TCC_SAVED, RV_REG_TCC, 0), ctx);
ctx              1509 arch/riscv/net/bpf_jit_comp.c 	ctx->stack_size = stack_adjust;
ctx              1512 arch/riscv/net/bpf_jit_comp.c static void build_epilogue(struct rv_jit_context *ctx)
ctx              1514 arch/riscv/net/bpf_jit_comp.c 	__build_epilogue(RV_REG_RA, ctx);
ctx              1517 arch/riscv/net/bpf_jit_comp.c static int build_body(struct rv_jit_context *ctx, bool extra_pass)
ctx              1519 arch/riscv/net/bpf_jit_comp.c 	const struct bpf_prog *prog = ctx->prog;
ctx              1526 arch/riscv/net/bpf_jit_comp.c 		ret = emit_insn(insn, ctx, extra_pass);
ctx              1529 arch/riscv/net/bpf_jit_comp.c 			if (ctx->insns == NULL)
ctx              1530 arch/riscv/net/bpf_jit_comp.c 				ctx->offset[i] = ctx->ninsns;
ctx              1533 arch/riscv/net/bpf_jit_comp.c 		if (ctx->insns == NULL)
ctx              1534 arch/riscv/net/bpf_jit_comp.c 			ctx->offset[i] = ctx->ninsns;
ctx              1561 arch/riscv/net/bpf_jit_comp.c 	struct rv_jit_context *ctx;
ctx              1585 arch/riscv/net/bpf_jit_comp.c 	ctx = &jit_data->ctx;
ctx              1587 arch/riscv/net/bpf_jit_comp.c 	if (ctx->offset) {
ctx              1589 arch/riscv/net/bpf_jit_comp.c 		image_size = sizeof(u32) * ctx->ninsns;
ctx              1593 arch/riscv/net/bpf_jit_comp.c 	ctx->prog = prog;
ctx              1594 arch/riscv/net/bpf_jit_comp.c 	ctx->offset = kcalloc(prog->len, sizeof(int), GFP_KERNEL);
ctx              1595 arch/riscv/net/bpf_jit_comp.c 	if (!ctx->offset) {
ctx              1601 arch/riscv/net/bpf_jit_comp.c 	if (build_body(ctx, extra_pass)) {
ctx              1605 arch/riscv/net/bpf_jit_comp.c 	build_prologue(ctx);
ctx              1606 arch/riscv/net/bpf_jit_comp.c 	ctx->epilogue_offset = ctx->ninsns;
ctx              1607 arch/riscv/net/bpf_jit_comp.c 	build_epilogue(ctx);
ctx              1610 arch/riscv/net/bpf_jit_comp.c 	image_size = sizeof(u32) * ctx->ninsns;
ctx              1620 arch/riscv/net/bpf_jit_comp.c 	ctx->insns = (u32 *)jit_data->image;
ctx              1622 arch/riscv/net/bpf_jit_comp.c 	ctx->ninsns = 0;
ctx              1624 arch/riscv/net/bpf_jit_comp.c 	build_prologue(ctx);
ctx              1625 arch/riscv/net/bpf_jit_comp.c 	if (build_body(ctx, extra_pass)) {
ctx              1630 arch/riscv/net/bpf_jit_comp.c 	build_epilogue(ctx);
ctx              1633 arch/riscv/net/bpf_jit_comp.c 		bpf_jit_dump(prog->len, image_size, 2, ctx->insns);
ctx              1635 arch/riscv/net/bpf_jit_comp.c 	prog->bpf_func = (void *)ctx->insns;
ctx              1639 arch/riscv/net/bpf_jit_comp.c 	bpf_flush_icache(jit_data->header, ctx->insns + ctx->ninsns);
ctx              1643 arch/riscv/net/bpf_jit_comp.c 		kfree(ctx->offset);
ctx               796 arch/s390/crypto/aes_s390.c 	struct s390_aes_ctx *ctx = crypto_aead_ctx(tfm);
ctx               800 arch/s390/crypto/aes_s390.c 		ctx->fc = CPACF_KMA_GCM_AES_128;
ctx               803 arch/s390/crypto/aes_s390.c 		ctx->fc = CPACF_KMA_GCM_AES_192;
ctx               806 arch/s390/crypto/aes_s390.c 		ctx->fc = CPACF_KMA_GCM_AES_256;
ctx               812 arch/s390/crypto/aes_s390.c 	memcpy(ctx->key, key, keylen);
ctx               813 arch/s390/crypto/aes_s390.c 	ctx->key_len = keylen;
ctx               991 arch/s390/crypto/aes_s390.c 	struct s390_aes_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1033 arch/s390/crypto/aes_s390.c 	memcpy(param.k, ctx->key, ctx->key_len);
ctx              1064 arch/s390/crypto/aes_s390.c 		cpacf_kma(ctx->fc | flags, &param,
ctx               103 arch/s390/crypto/crc32-vx.c 	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               105 arch/s390/crypto/crc32-vx.c 	ctx->crc = mctx->key;
ctx               137 arch/s390/crypto/crc32-vx.c 	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               139 arch/s390/crypto/crc32-vx.c 	*(__le32 *)out = cpu_to_le32p(&ctx->crc);
ctx               145 arch/s390/crypto/crc32-vx.c 	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               147 arch/s390/crypto/crc32-vx.c 	*(__be32 *)out = cpu_to_be32p(&ctx->crc);
ctx               153 arch/s390/crypto/crc32-vx.c 	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               159 arch/s390/crypto/crc32-vx.c 	*(__le32 *)out = ~cpu_to_le32p(&ctx->crc);
ctx               217 arch/s390/crypto/crc32-vx.c 		struct crc_desc_ctx *ctx = shash_desc_ctx(desc);	      \
ctx               218 arch/s390/crypto/crc32-vx.c 		ctx->crc = func(ctx->crc, data, datalen);		      \
ctx                37 arch/s390/crypto/des_s390.c 	struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                44 arch/s390/crypto/des_s390.c 	memcpy(ctx->key, key, key_len);
ctx                50 arch/s390/crypto/des_s390.c 	struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                52 arch/s390/crypto/des_s390.c 	cpacf_km(CPACF_KM_DEA, ctx->key, out, in, DES_BLOCK_SIZE);
ctx                57 arch/s390/crypto/des_s390.c 	struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                60 arch/s390/crypto/des_s390.c 		 ctx->key, out, in, DES_BLOCK_SIZE);
ctx                85 arch/s390/crypto/des_s390.c 	struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx                93 arch/s390/crypto/des_s390.c 		cpacf_km(fc, ctx->key, walk->dst.virt.addr,
ctx               103 arch/s390/crypto/des_s390.c 	struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               113 arch/s390/crypto/des_s390.c 	memcpy(param.key, ctx->key, DES3_KEY_SIZE);
ctx               224 arch/s390/crypto/des_s390.c 	struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               231 arch/s390/crypto/des_s390.c 	memcpy(ctx->key, key, key_len);
ctx               237 arch/s390/crypto/des_s390.c 	struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               239 arch/s390/crypto/des_s390.c 	cpacf_km(CPACF_KM_TDEA_192, ctx->key, dst, src, DES_BLOCK_SIZE);
ctx               244 arch/s390/crypto/des_s390.c 	struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               247 arch/s390/crypto/des_s390.c 		 ctx->key, dst, src, DES_BLOCK_SIZE);
ctx               370 arch/s390/crypto/des_s390.c 	struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               383 arch/s390/crypto/des_s390.c 		cpacf_kmctr(fc, ctx->key, walk->dst.virt.addr,
ctx               395 arch/s390/crypto/des_s390.c 		cpacf_kmctr(fc, ctx->key, buf, walk->src.virt.addr,
ctx                33 arch/s390/crypto/ghash_s390.c 	struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
ctx                36 arch/s390/crypto/ghash_s390.c 	memcpy(dctx->key, ctx->key, GHASH_BLOCK_SIZE);
ctx                44 arch/s390/crypto/ghash_s390.c 	struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
ctx                51 arch/s390/crypto/ghash_s390.c 	memcpy(ctx->key, key, GHASH_BLOCK_SIZE);
ctx               108 arch/s390/crypto/paes_s390.c static int __paes_set_key(struct s390_paes_ctx *ctx)
ctx               112 arch/s390/crypto/paes_s390.c 	if (__paes_convert_key(&ctx->kb, &ctx->pk))
ctx               116 arch/s390/crypto/paes_s390.c 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PAES_128 :
ctx               117 arch/s390/crypto/paes_s390.c 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KM_PAES_192 :
ctx               118 arch/s390/crypto/paes_s390.c 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KM_PAES_256 : 0;
ctx               121 arch/s390/crypto/paes_s390.c 	ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
ctx               123 arch/s390/crypto/paes_s390.c 	return ctx->fc ? 0 : -EINVAL;
ctx               128 arch/s390/crypto/paes_s390.c 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               130 arch/s390/crypto/paes_s390.c 	ctx->kb.key = NULL;
ctx               137 arch/s390/crypto/paes_s390.c 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               139 arch/s390/crypto/paes_s390.c 	_free_kb_keybuf(&ctx->kb);
ctx               146 arch/s390/crypto/paes_s390.c 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               148 arch/s390/crypto/paes_s390.c 	_free_kb_keybuf(&ctx->kb);
ctx               149 arch/s390/crypto/paes_s390.c 	rc = _copy_key_to_kb(&ctx->kb, in_key, key_len);
ctx               153 arch/s390/crypto/paes_s390.c 	if (__paes_set_key(ctx)) {
ctx               164 arch/s390/crypto/paes_s390.c 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               172 arch/s390/crypto/paes_s390.c 		k = cpacf_km(ctx->fc | modifier, ctx->pk.protkey,
ctx               177 arch/s390/crypto/paes_s390.c 			if (__paes_set_key(ctx) != 0)
ctx               229 arch/s390/crypto/paes_s390.c 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               231 arch/s390/crypto/paes_s390.c 	ctx->kb.key = NULL;
ctx               238 arch/s390/crypto/paes_s390.c 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               240 arch/s390/crypto/paes_s390.c 	_free_kb_keybuf(&ctx->kb);
ctx               243 arch/s390/crypto/paes_s390.c static int __cbc_paes_set_key(struct s390_paes_ctx *ctx)
ctx               247 arch/s390/crypto/paes_s390.c 	if (__paes_convert_key(&ctx->kb, &ctx->pk))
ctx               251 arch/s390/crypto/paes_s390.c 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMC_PAES_128 :
ctx               252 arch/s390/crypto/paes_s390.c 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMC_PAES_192 :
ctx               253 arch/s390/crypto/paes_s390.c 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KMC_PAES_256 : 0;
ctx               256 arch/s390/crypto/paes_s390.c 	ctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0;
ctx               258 arch/s390/crypto/paes_s390.c 	return ctx->fc ? 0 : -EINVAL;
ctx               265 arch/s390/crypto/paes_s390.c 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               267 arch/s390/crypto/paes_s390.c 	_free_kb_keybuf(&ctx->kb);
ctx               268 arch/s390/crypto/paes_s390.c 	rc = _copy_key_to_kb(&ctx->kb, in_key, key_len);
ctx               272 arch/s390/crypto/paes_s390.c 	if (__cbc_paes_set_key(ctx)) {
ctx               282 arch/s390/crypto/paes_s390.c 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               292 arch/s390/crypto/paes_s390.c 	memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
ctx               296 arch/s390/crypto/paes_s390.c 		k = cpacf_kmc(ctx->fc | modifier, &param,
ctx               301 arch/s390/crypto/paes_s390.c 			if (__cbc_paes_set_key(ctx) != 0)
ctx               303 arch/s390/crypto/paes_s390.c 			memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
ctx               356 arch/s390/crypto/paes_s390.c 	struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               358 arch/s390/crypto/paes_s390.c 	ctx->kb[0].key = NULL;
ctx               359 arch/s390/crypto/paes_s390.c 	ctx->kb[1].key = NULL;
ctx               366 arch/s390/crypto/paes_s390.c 	struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               368 arch/s390/crypto/paes_s390.c 	_free_kb_keybuf(&ctx->kb[0]);
ctx               369 arch/s390/crypto/paes_s390.c 	_free_kb_keybuf(&ctx->kb[1]);
ctx               372 arch/s390/crypto/paes_s390.c static int __xts_paes_set_key(struct s390_pxts_ctx *ctx)
ctx               376 arch/s390/crypto/paes_s390.c 	if (__paes_convert_key(&ctx->kb[0], &ctx->pk[0]) ||
ctx               377 arch/s390/crypto/paes_s390.c 	    __paes_convert_key(&ctx->kb[1], &ctx->pk[1]))
ctx               380 arch/s390/crypto/paes_s390.c 	if (ctx->pk[0].type != ctx->pk[1].type)
ctx               384 arch/s390/crypto/paes_s390.c 	fc = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PXTS_128 :
ctx               385 arch/s390/crypto/paes_s390.c 		(ctx->pk[0].type == PKEY_KEYTYPE_AES_256) ?
ctx               389 arch/s390/crypto/paes_s390.c 	ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
ctx               391 arch/s390/crypto/paes_s390.c 	return ctx->fc ? 0 : -EINVAL;
ctx               398 arch/s390/crypto/paes_s390.c 	struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               407 arch/s390/crypto/paes_s390.c 	_free_kb_keybuf(&ctx->kb[0]);
ctx               408 arch/s390/crypto/paes_s390.c 	_free_kb_keybuf(&ctx->kb[1]);
ctx               409 arch/s390/crypto/paes_s390.c 	rc = _copy_key_to_kb(&ctx->kb[0], in_key, key_len);
ctx               412 arch/s390/crypto/paes_s390.c 	rc = _copy_key_to_kb(&ctx->kb[1], in_key + key_len, key_len);
ctx               416 arch/s390/crypto/paes_s390.c 	if (__xts_paes_set_key(ctx)) {
ctx               426 arch/s390/crypto/paes_s390.c 	ckey_len = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ?
ctx               428 arch/s390/crypto/paes_s390.c 	memcpy(ckey, ctx->pk[0].protkey, ckey_len);
ctx               429 arch/s390/crypto/paes_s390.c 	memcpy(ckey + ckey_len, ctx->pk[1].protkey, ckey_len);
ctx               436 arch/s390/crypto/paes_s390.c 	struct s390_pxts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               452 arch/s390/crypto/paes_s390.c 	keylen = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 48 : 64;
ctx               453 arch/s390/crypto/paes_s390.c 	offset = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 16 : 0;
ctx               457 arch/s390/crypto/paes_s390.c 	memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen);
ctx               458 arch/s390/crypto/paes_s390.c 	cpacf_pcc(ctx->fc, pcc_param.key + offset);
ctx               460 arch/s390/crypto/paes_s390.c 	memcpy(xts_param.key + offset, ctx->pk[0].protkey, keylen);
ctx               466 arch/s390/crypto/paes_s390.c 		k = cpacf_km(ctx->fc | modifier, xts_param.key + offset,
ctx               471 arch/s390/crypto/paes_s390.c 			if (__xts_paes_set_key(ctx) != 0)
ctx               525 arch/s390/crypto/paes_s390.c 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               527 arch/s390/crypto/paes_s390.c 	ctx->kb.key = NULL;
ctx               534 arch/s390/crypto/paes_s390.c 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               536 arch/s390/crypto/paes_s390.c 	_free_kb_keybuf(&ctx->kb);
ctx               539 arch/s390/crypto/paes_s390.c static int __ctr_paes_set_key(struct s390_paes_ctx *ctx)
ctx               543 arch/s390/crypto/paes_s390.c 	if (__paes_convert_key(&ctx->kb, &ctx->pk))
ctx               547 arch/s390/crypto/paes_s390.c 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMCTR_PAES_128 :
ctx               548 arch/s390/crypto/paes_s390.c 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMCTR_PAES_192 :
ctx               549 arch/s390/crypto/paes_s390.c 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ?
ctx               553 arch/s390/crypto/paes_s390.c 	ctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0;
ctx               555 arch/s390/crypto/paes_s390.c 	return ctx->fc ? 0 : -EINVAL;
ctx               562 arch/s390/crypto/paes_s390.c 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               564 arch/s390/crypto/paes_s390.c 	_free_kb_keybuf(&ctx->kb);
ctx               565 arch/s390/crypto/paes_s390.c 	rc = _copy_key_to_kb(&ctx->kb, in_key, key_len);
ctx               569 arch/s390/crypto/paes_s390.c 	if (__ctr_paes_set_key(ctx)) {
ctx               594 arch/s390/crypto/paes_s390.c 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               607 arch/s390/crypto/paes_s390.c 		k = cpacf_kmctr(ctx->fc | modifier, ctx->pk.protkey,
ctx               618 arch/s390/crypto/paes_s390.c 			if (__ctr_paes_set_key(ctx) != 0) {
ctx               632 arch/s390/crypto/paes_s390.c 			if (cpacf_kmctr(ctx->fc | modifier,
ctx               633 arch/s390/crypto/paes_s390.c 					ctx->pk.protkey, buf,
ctx               637 arch/s390/crypto/paes_s390.c 			if (__ctr_paes_set_key(ctx) != 0)
ctx                23 arch/s390/crypto/sha512_s390.c 	struct s390_sha_ctx *ctx = shash_desc_ctx(desc);
ctx                25 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[0] = 0x6a09e667f3bcc908ULL;
ctx                26 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[2] = 0xbb67ae8584caa73bULL;
ctx                27 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[4] = 0x3c6ef372fe94f82bULL;
ctx                28 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[6] = 0xa54ff53a5f1d36f1ULL;
ctx                29 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[8] = 0x510e527fade682d1ULL;
ctx                30 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[10] = 0x9b05688c2b3e6c1fULL;
ctx                31 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[12] = 0x1f83d9abfb41bd6bULL;
ctx                32 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[14] = 0x5be0cd19137e2179ULL;
ctx                33 arch/s390/crypto/sha512_s390.c 	ctx->count = 0;
ctx                34 arch/s390/crypto/sha512_s390.c 	ctx->func = CPACF_KIMD_SHA_512;
ctx                88 arch/s390/crypto/sha512_s390.c 	struct s390_sha_ctx *ctx = shash_desc_ctx(desc);
ctx                90 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[0] = 0xcbbb9d5dc1059ed8ULL;
ctx                91 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[2] = 0x629a292a367cd507ULL;
ctx                92 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[4] = 0x9159015a3070dd17ULL;
ctx                93 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[6] = 0x152fecd8f70e5939ULL;
ctx                94 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[8] = 0x67332667ffc00b31ULL;
ctx                95 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[10] = 0x8eb44a8768581511ULL;
ctx                96 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[12] = 0xdb0c2e0d64f98fa7ULL;
ctx                97 arch/s390/crypto/sha512_s390.c 	*(__u64 *)&ctx->state[14] = 0x47b5481dbefa4fa4ULL;
ctx                98 arch/s390/crypto/sha512_s390.c 	ctx->count = 0;
ctx                99 arch/s390/crypto/sha512_s390.c 	ctx->func = CPACF_KIMD_SHA_512;
ctx                18 arch/s390/crypto/sha_common.c 	struct s390_sha_ctx *ctx = shash_desc_ctx(desc);
ctx                23 arch/s390/crypto/sha_common.c 	index = ctx->count % bsize;
ctx                24 arch/s390/crypto/sha_common.c 	ctx->count += len;
ctx                31 arch/s390/crypto/sha_common.c 		memcpy(ctx->buf + index, data, bsize - index);
ctx                32 arch/s390/crypto/sha_common.c 		cpacf_kimd(ctx->func, ctx->state, ctx->buf, bsize);
ctx                41 arch/s390/crypto/sha_common.c 		cpacf_kimd(ctx->func, ctx->state, data, n);
ctx                47 arch/s390/crypto/sha_common.c 		memcpy(ctx->buf + index , data, len);
ctx                74 arch/s390/crypto/sha_common.c 	struct s390_sha_ctx *ctx = shash_desc_ctx(desc);
ctx                80 arch/s390/crypto/sha_common.c 	n = ctx->count % bsize;
ctx                81 arch/s390/crypto/sha_common.c 	bits = ctx->count * 8;
ctx                82 arch/s390/crypto/sha_common.c 	mbl_offset = s390_crypto_shash_parmsize(ctx->func);
ctx                89 arch/s390/crypto/sha_common.c 	switch (ctx->func) {
ctx                92 arch/s390/crypto/sha_common.c 		memcpy(ctx->state + mbl_offset, &bits, sizeof(bits));
ctx                99 arch/s390/crypto/sha_common.c 		memset(ctx->state + mbl_offset, 0x00, sizeof(bits));
ctx               101 arch/s390/crypto/sha_common.c 		memcpy(ctx->state + mbl_offset, &bits, sizeof(bits));
ctx               112 arch/s390/crypto/sha_common.c 	cpacf_klmd(ctx->func, ctx->state, ctx->buf, n);
ctx               115 arch/s390/crypto/sha_common.c 	memcpy(out, ctx->state, crypto_shash_digestsize(desc->tfm));
ctx               117 arch/s390/crypto/sha_common.c 	memset(ctx, 0, sizeof *ctx);
ctx               153 arch/s390/kernel/uprobes.c bool arch_uretprobe_is_alive(struct return_instance *ret, enum rp_check ctx,
ctx               156 arch/s390/kernel/uprobes.c 	if (ctx == RP_CHECK_CHAIN_CALL)
ctx              1323 arch/s390/net/bpf_jit_comp.c 	struct bpf_jit ctx;
ctx              1364 arch/s390/net/bpf_jit_comp.c 	if (jit_data->ctx.addrs) {
ctx              1365 arch/s390/net/bpf_jit_comp.c 		jit = jit_data->ctx;
ctx              1416 arch/s390/net/bpf_jit_comp.c 		jit_data->ctx = jit;
ctx               170 arch/sparc/crypto/aes_glue.c 	struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               175 arch/sparc/crypto/aes_glue.c 		ctx->expanded_key_length = 0xb0;
ctx               176 arch/sparc/crypto/aes_glue.c 		ctx->ops = &aes128_ops;
ctx               180 arch/sparc/crypto/aes_glue.c 		ctx->expanded_key_length = 0xd0;
ctx               181 arch/sparc/crypto/aes_glue.c 		ctx->ops = &aes192_ops;
ctx               185 arch/sparc/crypto/aes_glue.c 		ctx->expanded_key_length = 0xf0;
ctx               186 arch/sparc/crypto/aes_glue.c 		ctx->ops = &aes256_ops;
ctx               194 arch/sparc/crypto/aes_glue.c 	aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
ctx               195 arch/sparc/crypto/aes_glue.c 	ctx->key_length = key_len;
ctx               202 arch/sparc/crypto/aes_glue.c 	struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               204 arch/sparc/crypto/aes_glue.c 	ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
ctx               209 arch/sparc/crypto/aes_glue.c 	struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               211 arch/sparc/crypto/aes_glue.c 	ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
ctx               220 arch/sparc/crypto/aes_glue.c 	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               228 arch/sparc/crypto/aes_glue.c 	ctx->ops->load_encrypt_keys(&ctx->key[0]);
ctx               233 arch/sparc/crypto/aes_glue.c 			ctx->ops->ecb_encrypt(&ctx->key[0],
ctx               249 arch/sparc/crypto/aes_glue.c 	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               258 arch/sparc/crypto/aes_glue.c 	ctx->ops->load_decrypt_keys(&ctx->key[0]);
ctx               259 arch/sparc/crypto/aes_glue.c 	key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
ctx               264 arch/sparc/crypto/aes_glue.c 			ctx->ops->ecb_decrypt(key_end,
ctx               280 arch/sparc/crypto/aes_glue.c 	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               288 arch/sparc/crypto/aes_glue.c 	ctx->ops->load_encrypt_keys(&ctx->key[0]);
ctx               293 arch/sparc/crypto/aes_glue.c 			ctx->ops->cbc_encrypt(&ctx->key[0],
ctx               309 arch/sparc/crypto/aes_glue.c 	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               318 arch/sparc/crypto/aes_glue.c 	ctx->ops->load_decrypt_keys(&ctx->key[0]);
ctx               319 arch/sparc/crypto/aes_glue.c 	key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
ctx               324 arch/sparc/crypto/aes_glue.c 			ctx->ops->cbc_decrypt(key_end,
ctx               337 arch/sparc/crypto/aes_glue.c static void ctr_crypt_final(struct crypto_sparc64_aes_ctx *ctx,
ctx               346 arch/sparc/crypto/aes_glue.c 	ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
ctx               356 arch/sparc/crypto/aes_glue.c 	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               364 arch/sparc/crypto/aes_glue.c 	ctx->ops->load_encrypt_keys(&ctx->key[0]);
ctx               369 arch/sparc/crypto/aes_glue.c 			ctx->ops->ctr_crypt(&ctx->key[0],
ctx               378 arch/sparc/crypto/aes_glue.c 		ctr_crypt_final(ctx, &walk);
ctx                39 arch/sparc/crypto/camellia_glue.c 	struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                48 arch/sparc/crypto/camellia_glue.c 	ctx->key_len = key_len;
ctx                50 arch/sparc/crypto/camellia_glue.c 	camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0],
ctx                51 arch/sparc/crypto/camellia_glue.c 				    key_len, &ctx->decrypt_key[0]);
ctx                60 arch/sparc/crypto/camellia_glue.c 	struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                62 arch/sparc/crypto/camellia_glue.c 	camellia_sparc64_crypt(&ctx->encrypt_key[0],
ctx                64 arch/sparc/crypto/camellia_glue.c 			       (u32 *) dst, ctx->key_len);
ctx                69 arch/sparc/crypto/camellia_glue.c 	struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                71 arch/sparc/crypto/camellia_glue.c 	camellia_sparc64_crypt(&ctx->decrypt_key[0],
ctx                73 arch/sparc/crypto/camellia_glue.c 			       (u32 *) dst, ctx->key_len);
ctx                90 arch/sparc/crypto/camellia_glue.c 	struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx                97 arch/sparc/crypto/camellia_glue.c 	if (ctx->key_len != 16)
ctx               105 arch/sparc/crypto/camellia_glue.c 		key = &ctx->encrypt_key[0];
ctx               107 arch/sparc/crypto/camellia_glue.c 		key = &ctx->decrypt_key[0];
ctx               108 arch/sparc/crypto/camellia_glue.c 	camellia_sparc64_load_keys(key, ctx->key_len);
ctx               153 arch/sparc/crypto/camellia_glue.c 	struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               160 arch/sparc/crypto/camellia_glue.c 	if (ctx->key_len != 16)
ctx               167 arch/sparc/crypto/camellia_glue.c 	key = &ctx->encrypt_key[0];
ctx               168 arch/sparc/crypto/camellia_glue.c 	camellia_sparc64_load_keys(key, ctx->key_len);
ctx               192 arch/sparc/crypto/camellia_glue.c 	struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               199 arch/sparc/crypto/camellia_glue.c 	if (ctx->key_len != 16)
ctx               206 arch/sparc/crypto/camellia_glue.c 	key = &ctx->decrypt_key[0];
ctx               207 arch/sparc/crypto/camellia_glue.c 	camellia_sparc64_load_keys(key, ctx->key_len);
ctx                69 arch/sparc/crypto/des_glue.c 	struct des_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                70 arch/sparc/crypto/des_glue.c 	const u64 *K = ctx->encrypt_expkey;
ctx                77 arch/sparc/crypto/des_glue.c 	struct des_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                78 arch/sparc/crypto/des_glue.c 	const u64 *K = ctx->decrypt_expkey;
ctx                94 arch/sparc/crypto/des_glue.c 	struct des_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               103 arch/sparc/crypto/des_glue.c 		des_sparc64_load_keys(&ctx->encrypt_expkey[0]);
ctx               105 arch/sparc/crypto/des_glue.c 		des_sparc64_load_keys(&ctx->decrypt_expkey[0]);
ctx               142 arch/sparc/crypto/des_glue.c 	struct des_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               150 arch/sparc/crypto/des_glue.c 	des_sparc64_load_keys(&ctx->encrypt_expkey[0]);
ctx               173 arch/sparc/crypto/des_glue.c 	struct des_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               181 arch/sparc/crypto/des_glue.c 	des_sparc64_load_keys(&ctx->decrypt_expkey[0]);
ctx               235 arch/sparc/crypto/des_glue.c 	struct des3_ede_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               236 arch/sparc/crypto/des_glue.c 	const u64 *K = ctx->encrypt_expkey;
ctx               243 arch/sparc/crypto/des_glue.c 	struct des3_ede_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               244 arch/sparc/crypto/des_glue.c 	const u64 *K = ctx->decrypt_expkey;
ctx               258 arch/sparc/crypto/des_glue.c 	struct des3_ede_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               268 arch/sparc/crypto/des_glue.c 		K = &ctx->encrypt_expkey[0];
ctx               270 arch/sparc/crypto/des_glue.c 		K = &ctx->decrypt_expkey[0];
ctx               310 arch/sparc/crypto/des_glue.c 	struct des3_ede_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               319 arch/sparc/crypto/des_glue.c 	K = &ctx->encrypt_expkey[0];
ctx               346 arch/sparc/crypto/des_glue.c 	struct des3_ede_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
ctx               355 arch/sparc/crypto/des_glue.c 	K = &ctx->decrypt_expkey[0];
ctx                39 arch/sparc/include/asm/mmu_context_64.h 					  unsigned long ctx)
ctx                51 arch/sparc/include/asm/mmu_context_64.h 			     ctx);
ctx               188 arch/sparc/kernel/iommu.c static inline void iommu_free_ctx(struct iommu *iommu, int ctx)
ctx               190 arch/sparc/kernel/iommu.c 	if (likely(ctx)) {
ctx               191 arch/sparc/kernel/iommu.c 		__clear_bit(ctx, iommu->ctx_bitmap);
ctx               192 arch/sparc/kernel/iommu.c 		if (ctx < iommu->ctx_lowest_free)
ctx               193 arch/sparc/kernel/iommu.c 			iommu->ctx_lowest_free = ctx;
ctx               272 arch/sparc/kernel/iommu.c 	unsigned long i, base_paddr, ctx;
ctx               288 arch/sparc/kernel/iommu.c 	ctx = 0;
ctx               290 arch/sparc/kernel/iommu.c 		ctx = iommu_alloc_ctx(iommu);
ctx               301 arch/sparc/kernel/iommu.c 		iopte_protection = IOPTE_STREAMING(ctx);
ctx               303 arch/sparc/kernel/iommu.c 		iopte_protection = IOPTE_CONSISTENT(ctx);
ctx               313 arch/sparc/kernel/iommu.c 	iommu_free_ctx(iommu, ctx);
ctx               321 arch/sparc/kernel/iommu.c 			 u32 vaddr, unsigned long ctx, unsigned long npages,
ctx               332 arch/sparc/kernel/iommu.c 		matchreg = STC_CTXMATCH_ADDR(strbuf, ctx);
ctx               334 arch/sparc/kernel/iommu.c 		iommu_write(flushreg, ctx);
ctx               342 arch/sparc/kernel/iommu.c 				iommu_write(flushreg, ctx);
ctx               349 arch/sparc/kernel/iommu.c 			       val, ctx);
ctx               383 arch/sparc/kernel/iommu.c 		       vaddr, ctx, npages);
ctx               393 arch/sparc/kernel/iommu.c 	unsigned long flags, npages, ctx, i;
ctx               413 arch/sparc/kernel/iommu.c 	ctx = 0;
ctx               415 arch/sparc/kernel/iommu.c 		ctx = (iopte_val(*base) & IOPTE_CONTEXT) >> 47UL;
ctx               419 arch/sparc/kernel/iommu.c 		strbuf_flush(strbuf, iommu, bus_addr, ctx,
ctx               426 arch/sparc/kernel/iommu.c 	iommu_free_ctx(iommu, ctx);
ctx               437 arch/sparc/kernel/iommu.c 	unsigned long flags, handle, prot, ctx;
ctx               455 arch/sparc/kernel/iommu.c 	ctx = 0;
ctx               457 arch/sparc/kernel/iommu.c 		ctx = iommu_alloc_ctx(iommu);
ctx               460 arch/sparc/kernel/iommu.c 		prot = IOPTE_STREAMING(ctx);
ctx               462 arch/sparc/kernel/iommu.c 		prot = IOPTE_CONSISTENT(ctx);
ctx               592 arch/sparc/kernel/iommu.c 	unsigned long ctx = 0;
ctx               603 arch/sparc/kernel/iommu.c 		ctx = (iopte_val(*base) & IOPTE_CONTEXT) >> 47UL;
ctx               605 arch/sparc/kernel/iommu.c 	return ctx;
ctx               612 arch/sparc/kernel/iommu.c 	unsigned long flags, ctx;
ctx               622 arch/sparc/kernel/iommu.c 	ctx = fetch_sg_ctx(iommu, sglist);
ctx               644 arch/sparc/kernel/iommu.c 			strbuf_flush(strbuf, iommu, dma_handle, ctx,
ctx               655 arch/sparc/kernel/iommu.c 	iommu_free_ctx(iommu, ctx);
ctx               666 arch/sparc/kernel/iommu.c 	unsigned long flags, ctx, npages;
ctx               681 arch/sparc/kernel/iommu.c 	ctx = 0;
ctx               689 arch/sparc/kernel/iommu.c 		ctx = (iopte_val(*iopte) & IOPTE_CONTEXT) >> 47UL;
ctx               693 arch/sparc/kernel/iommu.c 	strbuf_flush(strbuf, iommu, bus_addr, ctx, npages, direction);
ctx               704 arch/sparc/kernel/iommu.c 	unsigned long flags, ctx, npages, i;
ctx               717 arch/sparc/kernel/iommu.c 	ctx = 0;
ctx               725 arch/sparc/kernel/iommu.c 		ctx = (iopte_val(*iopte) & IOPTE_CONTEXT) >> 47UL;
ctx               739 arch/sparc/kernel/iommu.c 	strbuf_flush(strbuf, iommu, bus_addr, ctx, npages, direction);
ctx               838 arch/sparc/kernel/smp_64.c static void smp_cross_call_masked(unsigned long *func, u32 ctx, u64 data1, u64 data2, const cpumask_t *mask)
ctx               840 arch/sparc/kernel/smp_64.c 	u64 data0 = (((u64)ctx)<<32 | (((u64)func) & 0xffffffff));
ctx               846 arch/sparc/kernel/smp_64.c static void smp_cross_call(unsigned long *func, u32 ctx, u64 data1, u64 data2)
ctx               848 arch/sparc/kernel/smp_64.c 	smp_cross_call_masked(func, ctx, data1, data2, cpu_online_mask);
ctx              1082 arch/sparc/kernel/smp_64.c 	u32 ctx = CTX_HWBITS(mm->context);
ctx              1091 arch/sparc/kernel/smp_64.c 			      ctx, 0, 0,
ctx              1095 arch/sparc/kernel/smp_64.c 	__flush_tlb_mm(ctx, SECONDARY_CONTEXT);
ctx              1101 arch/sparc/kernel/smp_64.c 	unsigned long ctx;
ctx              1110 arch/sparc/kernel/smp_64.c 	__flush_tlb_pending(t->ctx, t->nr, t->vaddrs);
ctx              1115 arch/sparc/kernel/smp_64.c 	u32 ctx = CTX_HWBITS(mm->context);
ctx              1119 arch/sparc/kernel/smp_64.c 	info.ctx = ctx;
ctx              1129 arch/sparc/kernel/smp_64.c 	__flush_tlb_pending(ctx, nr, vaddrs);
ctx               223 arch/sparc/kernel/traps_64.c 	unsigned short ctx  = (type_ctx & 0xffff);
ctx               232 arch/sparc/kernel/traps_64.c 		       addr, ctx, type);
ctx               343 arch/sparc/kernel/traps_64.c 	unsigned short ctx  = (type_ctx & 0xffff);
ctx               367 arch/sparc/kernel/traps_64.c 		       addr, ctx, type);
ctx               838 arch/sparc/mm/init_64.c 	unsigned long ctx, new_ctx;
ctx               847 arch/sparc/mm/init_64.c 	ctx = (tlb_context_cache + 1) & CTX_NR_MASK;
ctx               848 arch/sparc/mm/init_64.c 	new_ctx = find_next_zero_bit(mmu_context_bmap, 1 << CTX_NR_BITS, ctx);
ctx               850 arch/sparc/mm/init_64.c 		new_ctx = find_next_zero_bit(mmu_context_bmap, ctx, 1);
ctx               851 arch/sparc/mm/init_64.c 		if (new_ctx >= ctx) {
ctx              3010 arch/sparc/mm/init_64.c 		unsigned long ctx;
ctx              3013 arch/sparc/mm/init_64.c 		ctx = mm->context.sparc64_ctx_val;
ctx              3014 arch/sparc/mm/init_64.c 		ctx &= ~CTX_PGSZ_MASK;
ctx              3015 arch/sparc/mm/init_64.c 		ctx |= CTX_PGSZ_BASE << CTX_PGSZ0_SHIFT;
ctx              3016 arch/sparc/mm/init_64.c 		ctx |= CTX_PGSZ_HUGE << CTX_PGSZ1_SHIFT;
ctx              3018 arch/sparc/mm/init_64.c 		if (ctx != mm->context.sparc64_ctx_val) {
ctx              3030 arch/sparc/mm/init_64.c 			mm->context.sparc64_ctx_val = ctx;
ctx                43 arch/sparc/mm/leon_mm.c 	unsigned int ctx;
ctx                65 arch/sparc/mm/leon_mm.c 	ctx = srmmu_get_context();
ctx                67 arch/sparc/mm/leon_mm.c 		printk(KERN_INFO "swprobe:  --- ctx (%x) ---\n", ctx);
ctx                69 arch/sparc/mm/leon_mm.c 	pgd = LEON_BYPASS_LOAD_PA(ctxtbl + (ctx * 4));
ctx               472 arch/sparc/mm/srmmu.c 	int ctx;
ctx               480 arch/sparc/mm/srmmu.c 	for (ctx = 0; ctx < numctx; ctx++) {
ctx               483 arch/sparc/mm/srmmu.c 		clist = (ctx_list_pool + ctx);
ctx               484 arch/sparc/mm/srmmu.c 		clist->ctx_number = ctx;
ctx               489 arch/sparc/mm/srmmu.c 	for (ctx = 0; ctx < numctx; ctx++)
ctx               490 arch/sparc/mm/srmmu.c 		add_to_free_ctxlist(ctx_list_pool + ctx);
ctx               236 arch/sparc/net/bpf_jit_comp_64.c static void emit(const u32 insn, struct jit_ctx *ctx)
ctx               238 arch/sparc/net/bpf_jit_comp_64.c 	if (ctx->image != NULL)
ctx               239 arch/sparc/net/bpf_jit_comp_64.c 		ctx->image[ctx->idx] = insn;
ctx               241 arch/sparc/net/bpf_jit_comp_64.c 	ctx->idx++;
ctx               244 arch/sparc/net/bpf_jit_comp_64.c static void emit_call(u32 *func, struct jit_ctx *ctx)
ctx               246 arch/sparc/net/bpf_jit_comp_64.c 	if (ctx->image != NULL) {
ctx               247 arch/sparc/net/bpf_jit_comp_64.c 		void *here = &ctx->image[ctx->idx];
ctx               251 arch/sparc/net/bpf_jit_comp_64.c 		ctx->image[ctx->idx] = CALL | ((off >> 2) & 0x3fffffff);
ctx               253 arch/sparc/net/bpf_jit_comp_64.c 	ctx->idx++;
ctx               256 arch/sparc/net/bpf_jit_comp_64.c static void emit_nop(struct jit_ctx *ctx)
ctx               258 arch/sparc/net/bpf_jit_comp_64.c 	emit(SETHI(0, G0), ctx);
ctx               261 arch/sparc/net/bpf_jit_comp_64.c static void emit_reg_move(u32 from, u32 to, struct jit_ctx *ctx)
ctx               263 arch/sparc/net/bpf_jit_comp_64.c 	emit(OR | RS1(G0) | RS2(from) | RD(to), ctx);
ctx               267 arch/sparc/net/bpf_jit_comp_64.c static void emit_set_const(s32 K, u32 reg, struct jit_ctx *ctx)
ctx               269 arch/sparc/net/bpf_jit_comp_64.c 	emit(SETHI(K, reg), ctx);
ctx               270 arch/sparc/net/bpf_jit_comp_64.c 	emit(OR_LO(K, reg), ctx);
ctx               274 arch/sparc/net/bpf_jit_comp_64.c static void emit_set_const_sext(s32 K, u32 reg, struct jit_ctx *ctx)
ctx               277 arch/sparc/net/bpf_jit_comp_64.c 		emit(SETHI(K, reg), ctx);
ctx               278 arch/sparc/net/bpf_jit_comp_64.c 		emit(OR_LO(K, reg), ctx);
ctx               283 arch/sparc/net/bpf_jit_comp_64.c 		emit(SETHI(hbits, reg), ctx);
ctx               284 arch/sparc/net/bpf_jit_comp_64.c 		emit(XOR | IMMED | RS1(reg) | S13(lbits) | RD(reg), ctx);
ctx               288 arch/sparc/net/bpf_jit_comp_64.c static void emit_alu(u32 opcode, u32 src, u32 dst, struct jit_ctx *ctx)
ctx               290 arch/sparc/net/bpf_jit_comp_64.c 	emit(opcode | RS1(dst) | RS2(src) | RD(dst), ctx);
ctx               293 arch/sparc/net/bpf_jit_comp_64.c static void emit_alu3(u32 opcode, u32 a, u32 b, u32 c, struct jit_ctx *ctx)
ctx               295 arch/sparc/net/bpf_jit_comp_64.c 	emit(opcode | RS1(a) | RS2(b) | RD(c), ctx);
ctx               299 arch/sparc/net/bpf_jit_comp_64.c 		       struct jit_ctx *ctx)
ctx               306 arch/sparc/net/bpf_jit_comp_64.c 		emit(insn | IMMED | S13(imm), ctx);
ctx               310 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_1_used = true;
ctx               312 arch/sparc/net/bpf_jit_comp_64.c 		emit_set_const_sext(imm, tmp, ctx);
ctx               313 arch/sparc/net/bpf_jit_comp_64.c 		emit(insn | RS2(tmp), ctx);
ctx               318 arch/sparc/net/bpf_jit_comp_64.c 			unsigned int dst, struct jit_ctx *ctx)
ctx               325 arch/sparc/net/bpf_jit_comp_64.c 		emit(insn | IMMED | S13(imm), ctx);
ctx               329 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_1_used = true;
ctx               331 arch/sparc/net/bpf_jit_comp_64.c 		emit_set_const_sext(imm, tmp, ctx);
ctx               332 arch/sparc/net/bpf_jit_comp_64.c 		emit(insn | RS2(tmp), ctx);
ctx               336 arch/sparc/net/bpf_jit_comp_64.c static void emit_loadimm32(s32 K, unsigned int dest, struct jit_ctx *ctx)
ctx               340 arch/sparc/net/bpf_jit_comp_64.c 		emit(OR | IMMED | RS1(G0) | S13(K) | RD(dest), ctx);
ctx               342 arch/sparc/net/bpf_jit_comp_64.c 		emit_set_const(K, dest, ctx);
ctx               346 arch/sparc/net/bpf_jit_comp_64.c static void emit_loadimm(s32 K, unsigned int dest, struct jit_ctx *ctx)
ctx               350 arch/sparc/net/bpf_jit_comp_64.c 		emit(OR | IMMED | RS1(G0) | S13(K) | RD(dest), ctx);
ctx               352 arch/sparc/net/bpf_jit_comp_64.c 		emit_set_const(K, dest, ctx);
ctx               356 arch/sparc/net/bpf_jit_comp_64.c static void emit_loadimm_sext(s32 K, unsigned int dest, struct jit_ctx *ctx)
ctx               360 arch/sparc/net/bpf_jit_comp_64.c 		emit(OR | IMMED | RS1(G0) | S13(K) | RD(dest), ctx);
ctx               362 arch/sparc/net/bpf_jit_comp_64.c 		emit_set_const_sext(K, dest, ctx);
ctx               451 arch/sparc/net/bpf_jit_comp_64.c 					  int shift_count, struct jit_ctx *ctx)
ctx               453 arch/sparc/net/bpf_jit_comp_64.c 	emit_loadimm32(high_bits, dest, ctx);
ctx               456 arch/sparc/net/bpf_jit_comp_64.c 	emit_alu_K(SLLX, dest, shift_count, ctx);
ctx               462 arch/sparc/net/bpf_jit_comp_64.c 		emit(OR | IMMED | RS1(dest) | S13(low_imm) | RD(dest), ctx);
ctx               465 arch/sparc/net/bpf_jit_comp_64.c static void emit_loadimm64(u64 K, unsigned int dest, struct jit_ctx *ctx)
ctx               476 arch/sparc/net/bpf_jit_comp_64.c 		return emit_loadimm_sext(K, dest, ctx);
ctx               478 arch/sparc/net/bpf_jit_comp_64.c 		return emit_loadimm32(K, dest, ctx);
ctx               504 arch/sparc/net/bpf_jit_comp_64.c 		emit(OR | IMMED | RS1(G0) | S13(the_const) | RD(dest), ctx);
ctx               506 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu_K(SLLX, dest, shift, ctx);
ctx               508 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu_K(SRLX, dest, -shift, ctx);
ctx               524 arch/sparc/net/bpf_jit_comp_64.c 		emit(SETHI(focus_bits, dest), ctx);
ctx               530 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu_K(SRLX, dest, 10 - lowest_bit_set, ctx);
ctx               532 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu_K(SLLX, dest, lowest_bit_set - 10, ctx);
ctx               538 arch/sparc/net/bpf_jit_comp_64.c 		emit_loadimm32(high_bits, dest, ctx);
ctx               539 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(SLLX, dest, 32, ctx);
ctx               561 arch/sparc/net/bpf_jit_comp_64.c 				emit(SETHI(fast_int, dest), ctx);
ctx               563 arch/sparc/net/bpf_jit_comp_64.c 				emit(OR | IMMED | RS1(G0) | S13(fast_int) | RD(dest), ctx);
ctx               565 arch/sparc/net/bpf_jit_comp_64.c 				emit_loadimm64(fast_int, dest, ctx);
ctx               570 arch/sparc/net/bpf_jit_comp_64.c 			emit_loadimm64(n, dest, ctx);
ctx               575 arch/sparc/net/bpf_jit_comp_64.c 		emit(XOR | IMMED | RS1(dest) | S13(low_bits) | RD(dest), ctx);
ctx               592 arch/sparc/net/bpf_jit_comp_64.c 					      lowest_bit_set, ctx);
ctx               603 arch/sparc/net/bpf_jit_comp_64.c 					      dest, 32, ctx);
ctx               608 arch/sparc/net/bpf_jit_comp_64.c 	ctx->tmp_1_used = true;
ctx               610 arch/sparc/net/bpf_jit_comp_64.c 	emit_loadimm32(high_bits, tmp, ctx);
ctx               611 arch/sparc/net/bpf_jit_comp_64.c 	emit_loadimm32(low_bits, dest, ctx);
ctx               612 arch/sparc/net/bpf_jit_comp_64.c 	emit_alu_K(SLLX, tmp, 32, ctx);
ctx               613 arch/sparc/net/bpf_jit_comp_64.c 	emit(OR | RS1(dest) | RS2(tmp) | RD(dest), ctx);
ctx               617 arch/sparc/net/bpf_jit_comp_64.c 			struct jit_ctx *ctx)
ctx               622 arch/sparc/net/bpf_jit_comp_64.c 		emit(br_opc | WDISP19(off << 2), ctx);
ctx               624 arch/sparc/net/bpf_jit_comp_64.c 		emit(br_opc | WDISP22(off << 2), ctx);
ctx               628 arch/sparc/net/bpf_jit_comp_64.c 			const u8 dst, const u8 src, struct jit_ctx *ctx)
ctx               632 arch/sparc/net/bpf_jit_comp_64.c 	emit(cb_opc | WDISP10(off << 2) | RS1(dst) | RS2(src), ctx);
ctx               636 arch/sparc/net/bpf_jit_comp_64.c 			 const u8 dst, s32 imm, struct jit_ctx *ctx)
ctx               640 arch/sparc/net/bpf_jit_comp_64.c 	emit(cb_opc | IMMED | WDISP10(off << 2) | RS1(dst) | S5(imm), ctx);
ctx               660 arch/sparc/net/bpf_jit_comp_64.c 				   struct jit_ctx *ctx)
ctx               665 arch/sparc/net/bpf_jit_comp_64.c 	branch_dst = ctx->offset[branch_dst];
ctx               667 arch/sparc/net/bpf_jit_comp_64.c 	if (!is_simm10(branch_dst - ctx->idx) ||
ctx               681 arch/sparc/net/bpf_jit_comp_64.c 			ctx->tmp_1_used = true;
ctx               682 arch/sparc/net/bpf_jit_comp_64.c 			emit_loadimm_sext(imm, tmp, ctx);
ctx               693 arch/sparc/net/bpf_jit_comp_64.c 				emit_btsti(dst, imm, ctx);
ctx               695 arch/sparc/net/bpf_jit_comp_64.c 				emit_btst(dst, src, ctx);
ctx               698 arch/sparc/net/bpf_jit_comp_64.c 				emit_cmpi(dst, imm, ctx);
ctx               700 arch/sparc/net/bpf_jit_comp_64.c 				emit_cmp(dst, src, ctx);
ctx               740 arch/sparc/net/bpf_jit_comp_64.c 		emit_branch(br_opcode, ctx->idx, branch_dst, ctx);
ctx               741 arch/sparc/net/bpf_jit_comp_64.c 		emit_nop(ctx);
ctx               784 arch/sparc/net/bpf_jit_comp_64.c 			emit_cbcondi(cbcond_opcode, ctx->idx, branch_dst,
ctx               785 arch/sparc/net/bpf_jit_comp_64.c 				     dst, imm, ctx);
ctx               787 arch/sparc/net/bpf_jit_comp_64.c 			emit_cbcond(cbcond_opcode, ctx->idx, branch_dst,
ctx               788 arch/sparc/net/bpf_jit_comp_64.c 				    dst, src, ctx);
ctx               797 arch/sparc/net/bpf_jit_comp_64.c static void build_prologue(struct jit_ctx *ctx)
ctx               801 arch/sparc/net/bpf_jit_comp_64.c 	if (ctx->saw_frame_pointer || ctx->saw_tail_call) {
ctx               802 arch/sparc/net/bpf_jit_comp_64.c 		struct bpf_prog *prog = ctx->prog;
ctx               809 arch/sparc/net/bpf_jit_comp_64.c 	if (ctx->saw_tail_call)
ctx               813 arch/sparc/net/bpf_jit_comp_64.c 	emit(SAVE | IMMED | RS1(SP) | S13(-stack_needed) | RD(SP), ctx);
ctx               816 arch/sparc/net/bpf_jit_comp_64.c 	if (ctx->saw_tail_call) {
ctx               819 arch/sparc/net/bpf_jit_comp_64.c 		emit(ST32 | IMMED | RS1(SP) | S13(off) | RD(G0), ctx);
ctx               821 arch/sparc/net/bpf_jit_comp_64.c 		emit_nop(ctx);
ctx               823 arch/sparc/net/bpf_jit_comp_64.c 	if (ctx->saw_frame_pointer) {
ctx               826 arch/sparc/net/bpf_jit_comp_64.c 		emit(ADD | IMMED | RS1(FP) | S13(STACK_BIAS) | RD(vfp), ctx);
ctx               828 arch/sparc/net/bpf_jit_comp_64.c 		emit_nop(ctx);
ctx               831 arch/sparc/net/bpf_jit_comp_64.c 	emit_reg_move(I0, O0, ctx);
ctx               832 arch/sparc/net/bpf_jit_comp_64.c 	emit_reg_move(I1, O1, ctx);
ctx               833 arch/sparc/net/bpf_jit_comp_64.c 	emit_reg_move(I2, O2, ctx);
ctx               834 arch/sparc/net/bpf_jit_comp_64.c 	emit_reg_move(I3, O3, ctx);
ctx               835 arch/sparc/net/bpf_jit_comp_64.c 	emit_reg_move(I4, O4, ctx);
ctx               839 arch/sparc/net/bpf_jit_comp_64.c static void build_epilogue(struct jit_ctx *ctx)
ctx               841 arch/sparc/net/bpf_jit_comp_64.c 	ctx->epilogue_offset = ctx->idx;
ctx               844 arch/sparc/net/bpf_jit_comp_64.c 	emit(JMPL | IMMED | RS1(I7) | S13(8) | RD(G0), ctx);
ctx               847 arch/sparc/net/bpf_jit_comp_64.c 	emit(RESTORE | RS1(bpf2sparc[BPF_REG_0]) | RS2(G0) | RD(O0), ctx);
ctx               850 arch/sparc/net/bpf_jit_comp_64.c static void emit_tail_call(struct jit_ctx *ctx)
ctx               857 arch/sparc/net/bpf_jit_comp_64.c 	ctx->saw_tail_call = true;
ctx               860 arch/sparc/net/bpf_jit_comp_64.c 	emit(LD32 | IMMED | RS1(bpf_array) | S13(off) | RD(tmp), ctx);
ctx               861 arch/sparc/net/bpf_jit_comp_64.c 	emit_cmp(bpf_index, tmp, ctx);
ctx               863 arch/sparc/net/bpf_jit_comp_64.c 	emit_branch(BGEU, ctx->idx, ctx->idx + OFFSET1, ctx);
ctx               864 arch/sparc/net/bpf_jit_comp_64.c 	emit_nop(ctx);
ctx               867 arch/sparc/net/bpf_jit_comp_64.c 	emit(LD32 | IMMED | RS1(SP) | S13(off) | RD(tmp), ctx);
ctx               868 arch/sparc/net/bpf_jit_comp_64.c 	emit_cmpi(tmp, MAX_TAIL_CALL_CNT, ctx);
ctx               870 arch/sparc/net/bpf_jit_comp_64.c 	emit_branch(BGU, ctx->idx, ctx->idx + OFFSET2, ctx);
ctx               871 arch/sparc/net/bpf_jit_comp_64.c 	emit_nop(ctx);
ctx               873 arch/sparc/net/bpf_jit_comp_64.c 	emit_alu_K(ADD, tmp, 1, ctx);
ctx               875 arch/sparc/net/bpf_jit_comp_64.c 	emit(ST32 | IMMED | RS1(SP) | S13(off) | RD(tmp), ctx);
ctx               877 arch/sparc/net/bpf_jit_comp_64.c 	emit_alu3_K(SLL, bpf_index, 3, tmp, ctx);
ctx               878 arch/sparc/net/bpf_jit_comp_64.c 	emit_alu(ADD, bpf_array, tmp, ctx);
ctx               880 arch/sparc/net/bpf_jit_comp_64.c 	emit(LD64 | IMMED | RS1(tmp) | S13(off) | RD(tmp), ctx);
ctx               882 arch/sparc/net/bpf_jit_comp_64.c 	emit_cmpi(tmp, 0, ctx);
ctx               884 arch/sparc/net/bpf_jit_comp_64.c 	emit_branch(BE, ctx->idx, ctx->idx + OFFSET3, ctx);
ctx               885 arch/sparc/net/bpf_jit_comp_64.c 	emit_nop(ctx);
ctx               888 arch/sparc/net/bpf_jit_comp_64.c 	emit(LD64 | IMMED | RS1(tmp) | S13(off) | RD(tmp), ctx);
ctx               891 arch/sparc/net/bpf_jit_comp_64.c 	emit(JMPL | IMMED | RS1(tmp) | S13(off) | RD(G0), ctx);
ctx               892 arch/sparc/net/bpf_jit_comp_64.c 	emit_nop(ctx);
ctx               895 arch/sparc/net/bpf_jit_comp_64.c static int build_insn(const struct bpf_insn *insn, struct jit_ctx *ctx)
ctx               900 arch/sparc/net/bpf_jit_comp_64.c 	const int i = insn - ctx->prog->insnsi;
ctx               905 arch/sparc/net/bpf_jit_comp_64.c 		ctx->saw_frame_pointer = true;
ctx               910 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu3_K(SRL, src, 0, dst, ctx);
ctx               915 arch/sparc/net/bpf_jit_comp_64.c 		emit_reg_move(src, dst, ctx);
ctx               920 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(ADD, src, dst, ctx);
ctx               924 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(SUB, src, dst, ctx);
ctx               928 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(AND, src, dst, ctx);
ctx               932 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(OR, src, dst, ctx);
ctx               936 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(XOR, src, dst, ctx);
ctx               939 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(MUL, src, dst, ctx);
ctx               942 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(MULX, src, dst, ctx);
ctx               945 arch/sparc/net/bpf_jit_comp_64.c 		emit_write_y(G0, ctx);
ctx               946 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(DIV, src, dst, ctx);
ctx               951 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(UDIVX, src, dst, ctx);
ctx               956 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_1_used = true;
ctx               958 arch/sparc/net/bpf_jit_comp_64.c 		emit_write_y(G0, ctx);
ctx               959 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu3(DIV, dst, src, tmp, ctx);
ctx               960 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu3(MULX, tmp, src, tmp, ctx);
ctx               961 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu3(SUB, dst, tmp, dst, ctx);
ctx               967 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_1_used = true;
ctx               969 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu3(UDIVX, dst, src, tmp, ctx);
ctx               970 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu3(MULX, tmp, src, tmp, ctx);
ctx               971 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu3(SUB, dst, tmp, dst, ctx);
ctx               975 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(SLL, src, dst, ctx);
ctx               978 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(SLLX, src, dst, ctx);
ctx               981 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(SRL, src, dst, ctx);
ctx               986 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(SRLX, src, dst, ctx);
ctx               989 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(SRA, src, dst, ctx);
ctx               992 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu(SRAX, src, dst, ctx);
ctx               998 arch/sparc/net/bpf_jit_comp_64.c 		emit(SUB | RS1(0) | RS2(dst) | RD(dst), ctx);
ctx              1004 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu_K(SLL, dst, 16, ctx);
ctx              1005 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu_K(SRL, dst, 16, ctx);
ctx              1010 arch/sparc/net/bpf_jit_comp_64.c 			if (!ctx->prog->aux->verifier_zext)
ctx              1011 arch/sparc/net/bpf_jit_comp_64.c 				emit_alu_K(SRL, dst, 0, ctx);
ctx              1025 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_1_used = true;
ctx              1028 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(AND, dst, 0xff, tmp, ctx);
ctx              1029 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(SRL, dst, 8, dst, ctx);
ctx              1030 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(AND, dst, 0xff, dst, ctx);
ctx              1031 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(SLL, tmp, 8, tmp, ctx);
ctx              1032 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu(OR, tmp, dst, ctx);
ctx              1038 arch/sparc/net/bpf_jit_comp_64.c 			ctx->tmp_2_used = true;
ctx              1039 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(SRL, dst, 24, tmp, ctx);	/* tmp  = dst >> 24 */
ctx              1040 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(SRL, dst, 16, tmp2, ctx);	/* tmp2 = dst >> 16 */
ctx              1041 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(AND, tmp2, 0xff, tmp2, ctx);/* tmp2 = tmp2 & 0xff */
ctx              1042 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(SLL, tmp2, 8, tmp2, ctx);	/* tmp2 = tmp2 << 8 */
ctx              1043 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu(OR, tmp2, tmp, ctx);		/* tmp  = tmp | tmp2 */
ctx              1044 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(SRL, dst, 8, tmp2, ctx);	/* tmp2 = dst >> 8 */
ctx              1045 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(AND, tmp2, 0xff, tmp2, ctx);/* tmp2 = tmp2 & 0xff */
ctx              1046 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(SLL, tmp2, 16, tmp2, ctx);	/* tmp2 = tmp2 << 16 */
ctx              1047 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu(OR, tmp2, tmp, ctx);		/* tmp  = tmp | tmp2 */
ctx              1048 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(AND, dst, 0xff, dst, ctx);	/* dst	= dst & 0xff */
ctx              1049 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(SLL, dst, 24, dst, ctx);	/* dst  = dst << 24 */
ctx              1050 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu(OR, tmp, dst, ctx);		/* dst  = dst | tmp */
ctx              1056 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu3_K(ADD, SP, STACK_BIAS + 128, tmp, ctx);
ctx              1057 arch/sparc/net/bpf_jit_comp_64.c 			emit(ST64 | RS1(tmp) | RS2(G0) | RD(dst), ctx);
ctx              1058 arch/sparc/net/bpf_jit_comp_64.c 			emit(LD64A | ASI(ASI_PL) | RS1(tmp) | RS2(G0) | RD(dst), ctx);
ctx              1065 arch/sparc/net/bpf_jit_comp_64.c 		emit_loadimm32(imm, dst, ctx);
ctx              1070 arch/sparc/net/bpf_jit_comp_64.c 		emit_loadimm_sext(imm, dst, ctx);
ctx              1075 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(ADD, dst, imm, ctx);
ctx              1079 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(SUB, dst, imm, ctx);
ctx              1083 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(AND, dst, imm, ctx);
ctx              1087 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(OR, dst, imm, ctx);
ctx              1091 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(XOR, dst, imm, ctx);
ctx              1094 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(MUL, dst, imm, ctx);
ctx              1097 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(MULX, dst, imm, ctx);
ctx              1103 arch/sparc/net/bpf_jit_comp_64.c 		emit_write_y(G0, ctx);
ctx              1104 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(DIV, dst, imm, ctx);
ctx              1110 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(UDIVX, dst, imm, ctx);
ctx              1122 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_2_used = true;
ctx              1125 arch/sparc/net/bpf_jit_comp_64.c 			emit_write_y(G0, ctx);
ctx              1127 arch/sparc/net/bpf_jit_comp_64.c 			emit(div | IMMED | RS1(dst) | S13(imm) | RD(tmp), ctx);
ctx              1128 arch/sparc/net/bpf_jit_comp_64.c 			emit(MULX | IMMED | RS1(tmp) | S13(imm) | RD(tmp), ctx);
ctx              1129 arch/sparc/net/bpf_jit_comp_64.c 			emit(SUB | RS1(dst) | RS2(tmp) | RD(dst), ctx);
ctx              1133 arch/sparc/net/bpf_jit_comp_64.c 			ctx->tmp_1_used = true;
ctx              1135 arch/sparc/net/bpf_jit_comp_64.c 			emit_set_const_sext(imm, tmp1, ctx);
ctx              1136 arch/sparc/net/bpf_jit_comp_64.c 			emit(div | RS1(dst) | RS2(tmp1) | RD(tmp), ctx);
ctx              1137 arch/sparc/net/bpf_jit_comp_64.c 			emit(MULX | RS1(tmp) | RS2(tmp1) | RD(tmp), ctx);
ctx              1138 arch/sparc/net/bpf_jit_comp_64.c 			emit(SUB | RS1(dst) | RS2(tmp) | RD(dst), ctx);
ctx              1143 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(SLL, dst, imm, ctx);
ctx              1146 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(SLLX, dst, imm, ctx);
ctx              1149 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(SRL, dst, imm, ctx);
ctx              1154 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(SRLX, dst, imm, ctx);
ctx              1157 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(SRA, dst, imm, ctx);
ctx              1160 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu_K(SRAX, dst, imm, ctx);
ctx              1165 arch/sparc/net/bpf_jit_comp_64.c 		    !ctx->prog->aux->verifier_zext)
ctx              1166 arch/sparc/net/bpf_jit_comp_64.c 			emit_alu_K(SRL, dst, 0, ctx);
ctx              1171 arch/sparc/net/bpf_jit_comp_64.c 		emit_branch(BA, ctx->idx, ctx->offset[i + off], ctx);
ctx              1172 arch/sparc/net/bpf_jit_comp_64.c 		emit_nop(ctx);
ctx              1188 arch/sparc/net/bpf_jit_comp_64.c 		err = emit_compare_and_branch(code, dst, src, 0, false, i + off, ctx);
ctx              1207 arch/sparc/net/bpf_jit_comp_64.c 		err = emit_compare_and_branch(code, dst, 0, imm, true, i + off, ctx);
ctx              1218 arch/sparc/net/bpf_jit_comp_64.c 		ctx->saw_call = true;
ctx              1220 arch/sparc/net/bpf_jit_comp_64.c 		emit_call((u32 *)func, ctx);
ctx              1221 arch/sparc/net/bpf_jit_comp_64.c 		emit_nop(ctx);
ctx              1223 arch/sparc/net/bpf_jit_comp_64.c 		emit_reg_move(O0, bpf2sparc[BPF_REG_0], ctx);
ctx              1229 arch/sparc/net/bpf_jit_comp_64.c 		emit_tail_call(ctx);
ctx              1236 arch/sparc/net/bpf_jit_comp_64.c 		if (i == ctx->prog->len - 1)
ctx              1238 arch/sparc/net/bpf_jit_comp_64.c 		emit_branch(BA, ctx->idx, ctx->epilogue_offset, ctx);
ctx              1239 arch/sparc/net/bpf_jit_comp_64.c 		emit_nop(ctx);
ctx              1249 arch/sparc/net/bpf_jit_comp_64.c 		emit_loadimm64(imm64, dst, ctx);
ctx              1262 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_1_used = true;
ctx              1282 arch/sparc/net/bpf_jit_comp_64.c 			emit_loadimm(off, tmp, ctx);
ctx              1285 arch/sparc/net/bpf_jit_comp_64.c 		emit(opcode | RS1(src) | rs2 | RD(dst), ctx);
ctx              1300 arch/sparc/net/bpf_jit_comp_64.c 			ctx->saw_frame_pointer = true;
ctx              1302 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_2_used = true;
ctx              1303 arch/sparc/net/bpf_jit_comp_64.c 		emit_loadimm(imm, tmp2, ctx);
ctx              1324 arch/sparc/net/bpf_jit_comp_64.c 			ctx->tmp_1_used = true;
ctx              1325 arch/sparc/net/bpf_jit_comp_64.c 			emit_loadimm(off, tmp, ctx);
ctx              1328 arch/sparc/net/bpf_jit_comp_64.c 		emit(opcode | RS1(dst) | rs2 | RD(tmp2), ctx);
ctx              1341 arch/sparc/net/bpf_jit_comp_64.c 			ctx->saw_frame_pointer = true;
ctx              1361 arch/sparc/net/bpf_jit_comp_64.c 			ctx->tmp_1_used = true;
ctx              1362 arch/sparc/net/bpf_jit_comp_64.c 			emit_loadimm(off, tmp, ctx);
ctx              1365 arch/sparc/net/bpf_jit_comp_64.c 		emit(opcode | RS1(dst) | rs2 | RD(src), ctx);
ctx              1376 arch/sparc/net/bpf_jit_comp_64.c 			ctx->saw_frame_pointer = true;
ctx              1378 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_1_used = true;
ctx              1379 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_2_used = true;
ctx              1380 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_3_used = true;
ctx              1381 arch/sparc/net/bpf_jit_comp_64.c 		emit_loadimm(off, tmp, ctx);
ctx              1382 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu3(ADD, dst, tmp, tmp, ctx);
ctx              1384 arch/sparc/net/bpf_jit_comp_64.c 		emit(LD32 | RS1(tmp) | RS2(G0) | RD(tmp2), ctx);
ctx              1385 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu3(ADD, tmp2, src, tmp3, ctx);
ctx              1386 arch/sparc/net/bpf_jit_comp_64.c 		emit(CAS | ASI(ASI_P) | RS1(tmp) | RS2(tmp2) | RD(tmp3), ctx);
ctx              1387 arch/sparc/net/bpf_jit_comp_64.c 		emit_cmp(tmp2, tmp3, ctx);
ctx              1388 arch/sparc/net/bpf_jit_comp_64.c 		emit_branch(BNE, 4, 0, ctx);
ctx              1389 arch/sparc/net/bpf_jit_comp_64.c 		emit_nop(ctx);
ctx              1399 arch/sparc/net/bpf_jit_comp_64.c 			ctx->saw_frame_pointer = true;
ctx              1401 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_1_used = true;
ctx              1402 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_2_used = true;
ctx              1403 arch/sparc/net/bpf_jit_comp_64.c 		ctx->tmp_3_used = true;
ctx              1404 arch/sparc/net/bpf_jit_comp_64.c 		emit_loadimm(off, tmp, ctx);
ctx              1405 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu3(ADD, dst, tmp, tmp, ctx);
ctx              1407 arch/sparc/net/bpf_jit_comp_64.c 		emit(LD64 | RS1(tmp) | RS2(G0) | RD(tmp2), ctx);
ctx              1408 arch/sparc/net/bpf_jit_comp_64.c 		emit_alu3(ADD, tmp2, src, tmp3, ctx);
ctx              1409 arch/sparc/net/bpf_jit_comp_64.c 		emit(CASX | ASI(ASI_P) | RS1(tmp) | RS2(tmp2) | RD(tmp3), ctx);
ctx              1410 arch/sparc/net/bpf_jit_comp_64.c 		emit_cmp(tmp2, tmp3, ctx);
ctx              1411 arch/sparc/net/bpf_jit_comp_64.c 		emit_branch(BNE, 4, 0, ctx);
ctx              1412 arch/sparc/net/bpf_jit_comp_64.c 		emit_nop(ctx);
ctx              1424 arch/sparc/net/bpf_jit_comp_64.c static int build_body(struct jit_ctx *ctx)
ctx              1426 arch/sparc/net/bpf_jit_comp_64.c 	const struct bpf_prog *prog = ctx->prog;
ctx              1433 arch/sparc/net/bpf_jit_comp_64.c 		ret = build_insn(insn, ctx);
ctx              1437 arch/sparc/net/bpf_jit_comp_64.c 			ctx->offset[i] = ctx->idx;
ctx              1440 arch/sparc/net/bpf_jit_comp_64.c 		ctx->offset[i] = ctx->idx;
ctx              1463 arch/sparc/net/bpf_jit_comp_64.c 	struct jit_ctx ctx;
ctx              1474 arch/sparc/net/bpf_jit_comp_64.c 	struct jit_ctx ctx;
ctx              1501 arch/sparc/net/bpf_jit_comp_64.c 	if (jit_data->ctx.offset) {
ctx              1502 arch/sparc/net/bpf_jit_comp_64.c 		ctx = jit_data->ctx;
ctx              1506 arch/sparc/net/bpf_jit_comp_64.c 		image_size = sizeof(u32) * ctx.idx;
ctx              1512 arch/sparc/net/bpf_jit_comp_64.c 	memset(&ctx, 0, sizeof(ctx));
ctx              1513 arch/sparc/net/bpf_jit_comp_64.c 	ctx.prog = prog;
ctx              1515 arch/sparc/net/bpf_jit_comp_64.c 	ctx.offset = kmalloc_array(prog->len, sizeof(unsigned int), GFP_KERNEL);
ctx              1516 arch/sparc/net/bpf_jit_comp_64.c 	if (ctx.offset == NULL) {
ctx              1525 arch/sparc/net/bpf_jit_comp_64.c 		ctx.offset[i] = i * (12 * 4);
ctx              1529 arch/sparc/net/bpf_jit_comp_64.c 		ctx.idx = 0;
ctx              1531 arch/sparc/net/bpf_jit_comp_64.c 		build_prologue(&ctx);
ctx              1532 arch/sparc/net/bpf_jit_comp_64.c 		if (build_body(&ctx)) {
ctx              1536 arch/sparc/net/bpf_jit_comp_64.c 		build_epilogue(&ctx);
ctx              1540 arch/sparc/net/bpf_jit_comp_64.c 				ctx.idx * 4,
ctx              1541 arch/sparc/net/bpf_jit_comp_64.c 				ctx.tmp_1_used ? '1' : ' ',
ctx              1542 arch/sparc/net/bpf_jit_comp_64.c 				ctx.tmp_2_used ? '2' : ' ',
ctx              1543 arch/sparc/net/bpf_jit_comp_64.c 				ctx.tmp_3_used ? '3' : ' ',
ctx              1544 arch/sparc/net/bpf_jit_comp_64.c 				ctx.saw_frame_pointer ? 'F' : ' ',
ctx              1545 arch/sparc/net/bpf_jit_comp_64.c 				ctx.saw_call ? 'C' : ' ',
ctx              1546 arch/sparc/net/bpf_jit_comp_64.c 				ctx.saw_tail_call ? 'T' : ' ');
ctx              1548 arch/sparc/net/bpf_jit_comp_64.c 		if (ctx.idx * 4 == prev_image_size)
ctx              1550 arch/sparc/net/bpf_jit_comp_64.c 		prev_image_size = ctx.idx * 4;
ctx              1555 arch/sparc/net/bpf_jit_comp_64.c 	image_size = sizeof(u32) * ctx.idx;
ctx              1563 arch/sparc/net/bpf_jit_comp_64.c 	ctx.image = (u32 *)image_ptr;
ctx              1565 arch/sparc/net/bpf_jit_comp_64.c 	ctx.idx = 0;
ctx              1567 arch/sparc/net/bpf_jit_comp_64.c 	build_prologue(&ctx);
ctx              1569 arch/sparc/net/bpf_jit_comp_64.c 	if (build_body(&ctx)) {
ctx              1575 arch/sparc/net/bpf_jit_comp_64.c 	build_epilogue(&ctx);
ctx              1577 arch/sparc/net/bpf_jit_comp_64.c 	if (ctx.idx * 4 != prev_image_size) {
ctx              1579 arch/sparc/net/bpf_jit_comp_64.c 		       prev_image_size, ctx.idx * 4);
ctx              1586 arch/sparc/net/bpf_jit_comp_64.c 		bpf_jit_dump(prog->len, image_size, pass, ctx.image);
ctx              1593 arch/sparc/net/bpf_jit_comp_64.c 		jit_data->ctx = ctx;
ctx              1598 arch/sparc/net/bpf_jit_comp_64.c 	prog->bpf_func = (void *)ctx.image;
ctx              1603 arch/sparc/net/bpf_jit_comp_64.c 		bpf_prog_fill_jited_linfo(prog, ctx.offset);
ctx              1605 arch/sparc/net/bpf_jit_comp_64.c 		kfree(ctx.offset);
ctx                24 arch/sparc/prom/mp.c prom_startcpu(int cpunode, struct linux_prom_registers *ctable_reg, int ctx, char *pc)
ctx                37 arch/sparc/prom/mp.c 		ret = (*(romvec->v3_cpustart))(cpunode, (int) ctable_reg, ctx, pc);
ctx               798 arch/um/drivers/virtio_uml.c 				     const char *name, bool ctx)
ctx               816 arch/um/drivers/virtio_uml.c 				    ctx, vu_notify, callback, info->name);
ctx               866 arch/um/drivers/virtio_uml.c 		       const char * const names[], const bool *ctx,
ctx               884 arch/um/drivers/virtio_uml.c 				     ctx ? ctx[i] : false);
ctx               137 arch/x86/crypto/aegis128-aesni-glue.c 	u8 *ctx = crypto_aead_ctx(aead);
ctx               138 arch/x86/crypto/aegis128-aesni-glue.c 	ctx = PTR_ALIGN(ctx, __alignof__(struct aegis_ctx));
ctx               139 arch/x86/crypto/aegis128-aesni-glue.c 	return (void *)ctx;
ctx               145 arch/x86/crypto/aegis128-aesni-glue.c 	struct aegis_ctx *ctx = crypto_aegis128_aesni_ctx(aead);
ctx               152 arch/x86/crypto/aegis128-aesni-glue.c 	memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE);
ctx               173 arch/x86/crypto/aegis128-aesni-glue.c 	struct aegis_ctx *ctx = crypto_aegis128_aesni_ctx(tfm);
ctx               181 arch/x86/crypto/aegis128-aesni-glue.c 	crypto_aegis128_aesni_init(&state, ctx->key.bytes, req->iv);
ctx                84 arch/x86/crypto/aesni-intel_glue.c asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
ctx                86 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
ctx                88 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
ctx                90 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
ctx                92 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
ctx                94 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
ctx                96 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
ctx               104 arch/x86/crypto/aesni-intel_glue.c static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
ctx               106 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
ctx               109 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out,
ctx               127 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_enc(void *ctx,
ctx               149 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_dec(void *ctx,
ctx               156 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_init(void *ctx,
ctx               161 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_enc_update(void *ctx,
ctx               164 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_dec_update(void *ctx,
ctx               168 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_finalize(void *ctx,
ctx               173 arch/x86/crypto/aesni-intel_glue.c 	void (*init)(void *ctx, struct gcm_context_data *gdata, u8 *iv,
ctx               175 arch/x86/crypto/aesni-intel_glue.c 	void (*enc_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
ctx               177 arch/x86/crypto/aesni-intel_glue.c 	void (*dec_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
ctx               179 arch/x86/crypto/aesni-intel_glue.c 	void (*finalize)(void *ctx, struct gcm_context_data *gdata,
ctx               209 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_enc_update_avx_gen2(void *ctx,
ctx               212 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_dec_update_avx_gen2(void *ctx,
ctx               216 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_finalize_avx_gen2(void *ctx,
ctx               220 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx,
ctx               226 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx,
ctx               254 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_enc_update_avx_gen4(void *ctx,
ctx               257 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_dec_update_avx_gen4(void *ctx,
ctx               261 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_finalize_avx_gen4(void *ctx,
ctx               265 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx,
ctx               271 arch/x86/crypto/aesni-intel_glue.c asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx,
ctx               320 arch/x86/crypto/aesni-intel_glue.c 	struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
ctx               331 arch/x86/crypto/aesni-intel_glue.c 		err = aes_expandkey(ctx, in_key, key_len);
ctx               334 arch/x86/crypto/aesni-intel_glue.c 		err = aesni_set_key(ctx, in_key, key_len);
ctx               349 arch/x86/crypto/aesni-intel_glue.c 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
ctx               352 arch/x86/crypto/aesni-intel_glue.c 		aes_encrypt(ctx, dst, src);
ctx               355 arch/x86/crypto/aesni-intel_glue.c 		aesni_enc(ctx, dst, src);
ctx               362 arch/x86/crypto/aesni-intel_glue.c 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
ctx               365 arch/x86/crypto/aesni-intel_glue.c 		aes_decrypt(ctx, dst, src);
ctx               368 arch/x86/crypto/aesni-intel_glue.c 		aesni_dec(ctx, dst, src);
ctx               383 arch/x86/crypto/aesni-intel_glue.c 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
ctx               392 arch/x86/crypto/aesni-intel_glue.c 		aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
ctx               405 arch/x86/crypto/aesni-intel_glue.c 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
ctx               414 arch/x86/crypto/aesni-intel_glue.c 		aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
ctx               427 arch/x86/crypto/aesni-intel_glue.c 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
ctx               436 arch/x86/crypto/aesni-intel_glue.c 		aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
ctx               449 arch/x86/crypto/aesni-intel_glue.c 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
ctx               458 arch/x86/crypto/aesni-intel_glue.c 		aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
ctx               469 arch/x86/crypto/aesni-intel_glue.c static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
ctx               478 arch/x86/crypto/aesni-intel_glue.c 	aesni_enc(ctx, keystream, ctrblk);
ctx               485 arch/x86/crypto/aesni-intel_glue.c static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
ctx               494 arch/x86/crypto/aesni-intel_glue.c 	if (ctx->key_length == AES_KEYSIZE_128)
ctx               495 arch/x86/crypto/aesni-intel_glue.c 		aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
ctx               496 arch/x86/crypto/aesni-intel_glue.c 	else if (ctx->key_length == AES_KEYSIZE_192)
ctx               497 arch/x86/crypto/aesni-intel_glue.c 		aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
ctx               499 arch/x86/crypto/aesni-intel_glue.c 		aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
ctx               506 arch/x86/crypto/aesni-intel_glue.c 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
ctx               515 arch/x86/crypto/aesni-intel_glue.c 		aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
ctx               521 arch/x86/crypto/aesni-intel_glue.c 		ctr_crypt_final(ctx, &walk);
ctx               532 arch/x86/crypto/aesni-intel_glue.c 	struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               542 arch/x86/crypto/aesni-intel_glue.c 	err = aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_crypt_ctx,
ctx               548 arch/x86/crypto/aesni-intel_glue.c 	return aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_tweak_ctx,
ctx               553 arch/x86/crypto/aesni-intel_glue.c static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in)
ctx               555 arch/x86/crypto/aesni-intel_glue.c 	aesni_enc(ctx, out, in);
ctx               558 arch/x86/crypto/aesni-intel_glue.c static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx               560 arch/x86/crypto/aesni-intel_glue.c 	glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc));
ctx               563 arch/x86/crypto/aesni-intel_glue.c static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx               565 arch/x86/crypto/aesni-intel_glue.c 	glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec));
ctx               568 arch/x86/crypto/aesni-intel_glue.c static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx               570 arch/x86/crypto/aesni-intel_glue.c 	aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv);
ctx               573 arch/x86/crypto/aesni-intel_glue.c static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx               575 arch/x86/crypto/aesni-intel_glue.c 	aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv);
ctx               607 arch/x86/crypto/aesni-intel_glue.c 	struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               611 arch/x86/crypto/aesni-intel_glue.c 				   aes_ctx(ctx->raw_tweak_ctx),
ctx               612 arch/x86/crypto/aesni-intel_glue.c 				   aes_ctx(ctx->raw_crypt_ctx),
ctx               619 arch/x86/crypto/aesni-intel_glue.c 	struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               623 arch/x86/crypto/aesni-intel_glue.c 				   aes_ctx(ctx->raw_tweak_ctx),
ctx               624 arch/x86/crypto/aesni-intel_glue.c 				   aes_ctx(ctx->raw_crypt_ctx),
ctx               631 arch/x86/crypto/aesni-intel_glue.c 	struct crypto_aes_ctx ctx;
ctx               634 arch/x86/crypto/aesni-intel_glue.c 	ret = aes_expandkey(&ctx, key, key_len);
ctx               642 arch/x86/crypto/aesni-intel_glue.c 	aes_encrypt(&ctx, hash_subkey, hash_subkey);
ctx               644 arch/x86/crypto/aesni-intel_glue.c 	memzero_explicit(&ctx, sizeof(ctx));
ctx               651 arch/x86/crypto/aesni-intel_glue.c 	struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
ctx               660 arch/x86/crypto/aesni-intel_glue.c 	memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));
ctx               663 arch/x86/crypto/aesni-intel_glue.c 				  &ctx->aes_key_expanded, key, key_len) ?:
ctx               664 arch/x86/crypto/aesni-intel_glue.c 	       rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
ctx               854 arch/x86/crypto/aesni-intel_glue.c 	struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
ctx               855 arch/x86/crypto/aesni-intel_glue.c 	void *aes_ctx = &(ctx->aes_key_expanded);
ctx               868 arch/x86/crypto/aesni-intel_glue.c 		*(iv+i) = ctx->nonce[i];
ctx               873 arch/x86/crypto/aesni-intel_glue.c 	return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
ctx               881 arch/x86/crypto/aesni-intel_glue.c 	struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
ctx               882 arch/x86/crypto/aesni-intel_glue.c 	void *aes_ctx = &(ctx->aes_key_expanded);
ctx               895 arch/x86/crypto/aesni-intel_glue.c 		*(iv+i) = ctx->nonce[i];
ctx               900 arch/x86/crypto/aesni-intel_glue.c 	return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
ctx              1001 arch/x86/crypto/aesni-intel_glue.c 	struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(aead);
ctx              1004 arch/x86/crypto/aesni-intel_glue.c 				  &ctx->aes_key_expanded, key, key_len) ?:
ctx              1005 arch/x86/crypto/aesni-intel_glue.c 	       rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
ctx              1011 arch/x86/crypto/aesni-intel_glue.c 	struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
ctx              1012 arch/x86/crypto/aesni-intel_glue.c 	void *aes_ctx = &(ctx->aes_key_expanded);
ctx              1019 arch/x86/crypto/aesni-intel_glue.c 	return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
ctx              1027 arch/x86/crypto/aesni-intel_glue.c 	struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
ctx              1028 arch/x86/crypto/aesni-intel_glue.c 	void *aes_ctx = &(ctx->aes_key_expanded);
ctx              1034 arch/x86/crypto/aesni-intel_glue.c 	return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,
ctx                22 arch/x86/crypto/blowfish_glue.c asmlinkage void __blowfish_enc_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src,
ctx                24 arch/x86/crypto/blowfish_glue.c asmlinkage void blowfish_dec_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src);
ctx                27 arch/x86/crypto/blowfish_glue.c asmlinkage void __blowfish_enc_blk_4way(struct bf_ctx *ctx, u8 *dst,
ctx                29 arch/x86/crypto/blowfish_glue.c asmlinkage void blowfish_dec_blk_4way(struct bf_ctx *ctx, u8 *dst,
ctx                32 arch/x86/crypto/blowfish_glue.c static inline void blowfish_enc_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src)
ctx                34 arch/x86/crypto/blowfish_glue.c 	__blowfish_enc_blk(ctx, dst, src, false);
ctx                37 arch/x86/crypto/blowfish_glue.c static inline void blowfish_enc_blk_xor(struct bf_ctx *ctx, u8 *dst,
ctx                40 arch/x86/crypto/blowfish_glue.c 	__blowfish_enc_blk(ctx, dst, src, true);
ctx                43 arch/x86/crypto/blowfish_glue.c static inline void blowfish_enc_blk_4way(struct bf_ctx *ctx, u8 *dst,
ctx                46 arch/x86/crypto/blowfish_glue.c 	__blowfish_enc_blk_4way(ctx, dst, src, false);
ctx                49 arch/x86/crypto/blowfish_glue.c static inline void blowfish_enc_blk_xor_4way(struct bf_ctx *ctx, u8 *dst,
ctx                52 arch/x86/crypto/blowfish_glue.c 	__blowfish_enc_blk_4way(ctx, dst, src, true);
ctx                77 arch/x86/crypto/blowfish_glue.c 	struct bf_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                91 arch/x86/crypto/blowfish_glue.c 				fn_4way(ctx, wdst, wsrc);
ctx               104 arch/x86/crypto/blowfish_glue.c 			fn(ctx, wdst, wsrc);
ctx               128 arch/x86/crypto/blowfish_glue.c static unsigned int __cbc_encrypt(struct bf_ctx *ctx,
ctx               139 arch/x86/crypto/blowfish_glue.c 		blowfish_enc_blk(ctx, (u8 *)dst, (u8 *)dst);
ctx               154 arch/x86/crypto/blowfish_glue.c 	struct bf_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               162 arch/x86/crypto/blowfish_glue.c 		nbytes = __cbc_encrypt(ctx, &walk);
ctx               169 arch/x86/crypto/blowfish_glue.c static unsigned int __cbc_decrypt(struct bf_ctx *ctx,
ctx               196 arch/x86/crypto/blowfish_glue.c 			blowfish_dec_blk_4way(ctx, (u8 *)dst, (u8 *)src);
ctx               214 arch/x86/crypto/blowfish_glue.c 		blowfish_dec_blk(ctx, (u8 *)dst, (u8 *)src);
ctx               235 arch/x86/crypto/blowfish_glue.c 	struct bf_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               243 arch/x86/crypto/blowfish_glue.c 		nbytes = __cbc_decrypt(ctx, &walk);
ctx               250 arch/x86/crypto/blowfish_glue.c static void ctr_crypt_final(struct bf_ctx *ctx, struct skcipher_walk *walk)
ctx               258 arch/x86/crypto/blowfish_glue.c 	blowfish_enc_blk(ctx, keystream, ctrblk);
ctx               264 arch/x86/crypto/blowfish_glue.c static unsigned int __ctr_crypt(struct bf_ctx *ctx, struct skcipher_walk *walk)
ctx               289 arch/x86/crypto/blowfish_glue.c 			blowfish_enc_blk_xor_4way(ctx, (u8 *)dst,
ctx               307 arch/x86/crypto/blowfish_glue.c 		blowfish_enc_blk_xor(ctx, (u8 *)dst, (u8 *)ctrblocks);
ctx               321 arch/x86/crypto/blowfish_glue.c 	struct bf_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               329 arch/x86/crypto/blowfish_glue.c 		nbytes = __ctr_crypt(ctx, &walk);
ctx               334 arch/x86/crypto/blowfish_glue.c 		ctr_crypt_final(ctx, &walk);
ctx                22 arch/x86/crypto/camellia_aesni_avx2_glue.c asmlinkage void camellia_ecb_enc_32way(struct camellia_ctx *ctx, u8 *dst,
ctx                24 arch/x86/crypto/camellia_aesni_avx2_glue.c asmlinkage void camellia_ecb_dec_32way(struct camellia_ctx *ctx, u8 *dst,
ctx                27 arch/x86/crypto/camellia_aesni_avx2_glue.c asmlinkage void camellia_cbc_dec_32way(struct camellia_ctx *ctx, u8 *dst,
ctx                29 arch/x86/crypto/camellia_aesni_avx2_glue.c asmlinkage void camellia_ctr_32way(struct camellia_ctx *ctx, u8 *dst,
ctx                32 arch/x86/crypto/camellia_aesni_avx2_glue.c asmlinkage void camellia_xts_enc_32way(struct camellia_ctx *ctx, u8 *dst,
ctx                34 arch/x86/crypto/camellia_aesni_avx2_glue.c asmlinkage void camellia_xts_dec_32way(struct camellia_ctx *ctx, u8 *dst,
ctx               181 arch/x86/crypto/camellia_aesni_avx2_glue.c 	struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               185 arch/x86/crypto/camellia_aesni_avx2_glue.c 				   &ctx->tweak_ctx, &ctx->crypt_ctx, false);
ctx               191 arch/x86/crypto/camellia_aesni_avx2_glue.c 	struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               195 arch/x86/crypto/camellia_aesni_avx2_glue.c 				   &ctx->tweak_ctx, &ctx->crypt_ctx, true);
ctx                21 arch/x86/crypto/camellia_aesni_avx_glue.c asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst,
ctx                25 arch/x86/crypto/camellia_aesni_avx_glue.c asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst,
ctx                29 arch/x86/crypto/camellia_aesni_avx_glue.c asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst,
ctx                33 arch/x86/crypto/camellia_aesni_avx_glue.c asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst,
ctx                37 arch/x86/crypto/camellia_aesni_avx_glue.c asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst,
ctx                41 arch/x86/crypto/camellia_aesni_avx_glue.c asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst,
ctx                45 arch/x86/crypto/camellia_aesni_avx_glue.c void camellia_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx                47 arch/x86/crypto/camellia_aesni_avx_glue.c 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
ctx                52 arch/x86/crypto/camellia_aesni_avx_glue.c void camellia_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx                54 arch/x86/crypto/camellia_aesni_avx_glue.c 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
ctx               185 arch/x86/crypto/camellia_aesni_avx_glue.c 	struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               194 arch/x86/crypto/camellia_aesni_avx_glue.c 	err = __camellia_setkey(&ctx->crypt_ctx, key, keylen / 2, flags);
ctx               199 arch/x86/crypto/camellia_aesni_avx_glue.c 	return __camellia_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2,
ctx               207 arch/x86/crypto/camellia_aesni_avx_glue.c 	struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               211 arch/x86/crypto/camellia_aesni_avx_glue.c 				   &ctx->tweak_ctx, &ctx->crypt_ctx, false);
ctx               217 arch/x86/crypto/camellia_aesni_avx_glue.c 	struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               221 arch/x86/crypto/camellia_aesni_avx_glue.c 				   &ctx->tweak_ctx, &ctx->crypt_ctx, true);
ctx                21 arch/x86/crypto/camellia_glue.c asmlinkage void __camellia_enc_blk(struct camellia_ctx *ctx, u8 *dst,
ctx                24 arch/x86/crypto/camellia_glue.c asmlinkage void camellia_dec_blk(struct camellia_ctx *ctx, u8 *dst,
ctx                29 arch/x86/crypto/camellia_glue.c asmlinkage void __camellia_enc_blk_2way(struct camellia_ctx *ctx, u8 *dst,
ctx                32 arch/x86/crypto/camellia_glue.c asmlinkage void camellia_dec_blk_2way(struct camellia_ctx *ctx, u8 *dst,
ctx              1270 arch/x86/crypto/camellia_glue.c void camellia_decrypt_cbc_2way(void *ctx, u128 *dst, const u128 *src)
ctx              1274 arch/x86/crypto/camellia_glue.c 	camellia_dec_blk_2way(ctx, (u8 *)dst, (u8 *)src);
ctx              1280 arch/x86/crypto/camellia_glue.c void camellia_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx              1290 arch/x86/crypto/camellia_glue.c 	camellia_enc_blk_xor(ctx, (u8 *)dst, (u8 *)&ctrblk);
ctx              1294 arch/x86/crypto/camellia_glue.c void camellia_crypt_ctr_2way(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx              1308 arch/x86/crypto/camellia_glue.c 	camellia_enc_blk_xor_2way(ctx, (u8 *)dst, (u8 *)ctrblks);
ctx                20 arch/x86/crypto/cast5_avx_glue.c asmlinkage void cast5_ecb_enc_16way(struct cast5_ctx *ctx, u8 *dst,
ctx                22 arch/x86/crypto/cast5_avx_glue.c asmlinkage void cast5_ecb_dec_16way(struct cast5_ctx *ctx, u8 *dst,
ctx                24 arch/x86/crypto/cast5_avx_glue.c asmlinkage void cast5_cbc_dec_16way(struct cast5_ctx *ctx, u8 *dst,
ctx                26 arch/x86/crypto/cast5_avx_glue.c asmlinkage void cast5_ctr_16way(struct cast5_ctx *ctx, u8 *dst, const u8 *src,
ctx                51 arch/x86/crypto/cast5_avx_glue.c 	struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                55 arch/x86/crypto/cast5_avx_glue.c 	void (*fn)(struct cast5_ctx *ctx, u8 *dst, const u8 *src);
ctx                70 arch/x86/crypto/cast5_avx_glue.c 				fn(ctx, wdst, wsrc);
ctx                85 arch/x86/crypto/cast5_avx_glue.c 			fn(ctx, wdst, wsrc);
ctx               114 arch/x86/crypto/cast5_avx_glue.c 	struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               128 arch/x86/crypto/cast5_avx_glue.c 			__cast5_encrypt(ctx, (u8 *)dst, (u8 *)dst);
ctx               142 arch/x86/crypto/cast5_avx_glue.c static unsigned int __cbc_decrypt(struct cast5_ctx *ctx,
ctx               164 arch/x86/crypto/cast5_avx_glue.c 			cast5_cbc_dec_16way(ctx, (u8 *)dst, (u8 *)src);
ctx               178 arch/x86/crypto/cast5_avx_glue.c 		__cast5_decrypt(ctx, (u8 *)dst, (u8 *)src);
ctx               199 arch/x86/crypto/cast5_avx_glue.c 	struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               209 arch/x86/crypto/cast5_avx_glue.c 		nbytes = __cbc_decrypt(ctx, &walk);
ctx               217 arch/x86/crypto/cast5_avx_glue.c static void ctr_crypt_final(struct skcipher_walk *walk, struct cast5_ctx *ctx)
ctx               225 arch/x86/crypto/cast5_avx_glue.c 	__cast5_encrypt(ctx, keystream, ctrblk);
ctx               232 arch/x86/crypto/cast5_avx_glue.c 				struct cast5_ctx *ctx)
ctx               242 arch/x86/crypto/cast5_avx_glue.c 			cast5_ctr_16way(ctx, (u8 *)dst, (u8 *)src,
ctx               264 arch/x86/crypto/cast5_avx_glue.c 		__cast5_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
ctx               279 arch/x86/crypto/cast5_avx_glue.c 	struct cast5_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               289 arch/x86/crypto/cast5_avx_glue.c 		nbytes = __ctr_crypt(&walk, ctx);
ctx               296 arch/x86/crypto/cast5_avx_glue.c 		ctr_crypt_final(&walk, ctx);
ctx                23 arch/x86/crypto/cast6_avx_glue.c asmlinkage void cast6_ecb_enc_8way(struct cast6_ctx *ctx, u8 *dst,
ctx                25 arch/x86/crypto/cast6_avx_glue.c asmlinkage void cast6_ecb_dec_8way(struct cast6_ctx *ctx, u8 *dst,
ctx                28 arch/x86/crypto/cast6_avx_glue.c asmlinkage void cast6_cbc_dec_8way(struct cast6_ctx *ctx, u8 *dst,
ctx                30 arch/x86/crypto/cast6_avx_glue.c asmlinkage void cast6_ctr_8way(struct cast6_ctx *ctx, u8 *dst, const u8 *src,
ctx                33 arch/x86/crypto/cast6_avx_glue.c asmlinkage void cast6_xts_enc_8way(struct cast6_ctx *ctx, u8 *dst,
ctx                35 arch/x86/crypto/cast6_avx_glue.c asmlinkage void cast6_xts_dec_8way(struct cast6_ctx *ctx, u8 *dst,
ctx                44 arch/x86/crypto/cast6_avx_glue.c static void cast6_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx                46 arch/x86/crypto/cast6_avx_glue.c 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
ctx                50 arch/x86/crypto/cast6_avx_glue.c static void cast6_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx                52 arch/x86/crypto/cast6_avx_glue.c 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
ctx                56 arch/x86/crypto/cast6_avx_glue.c static void cast6_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx                63 arch/x86/crypto/cast6_avx_glue.c 	__cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
ctx               179 arch/x86/crypto/cast6_avx_glue.c 	struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               188 arch/x86/crypto/cast6_avx_glue.c 	err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2, flags);
ctx               193 arch/x86/crypto/cast6_avx_glue.c 	return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2,
ctx               200 arch/x86/crypto/cast6_avx_glue.c 	struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               204 arch/x86/crypto/cast6_avx_glue.c 				   &ctx->tweak_ctx, &ctx->crypt_ctx, false);
ctx               210 arch/x86/crypto/cast6_avx_glue.c 	struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               214 arch/x86/crypto/cast6_avx_glue.c 				   &ctx->tweak_ctx, &ctx->crypt_ctx, true);
ctx               127 arch/x86/crypto/chacha_glue.c 				  const struct chacha_ctx *ctx, const u8 *iv)
ctx               136 arch/x86/crypto/chacha_glue.c 	crypto_chacha_init(state, ctx, iv);
ctx               147 arch/x86/crypto/chacha_glue.c 			      nbytes, ctx->nrounds);
ctx               165 arch/x86/crypto/chacha_glue.c 	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               177 arch/x86/crypto/chacha_glue.c 	err = chacha_simd_stream_xor(&walk, ctx, req->iv);
ctx               185 arch/x86/crypto/chacha_glue.c 	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               201 arch/x86/crypto/chacha_glue.c 	crypto_chacha_init(state, ctx, req->iv);
ctx               205 arch/x86/crypto/chacha_glue.c 	hchacha_block_ssse3(state, subctx.key, ctx->nrounds);
ctx               206 arch/x86/crypto/chacha_glue.c 	subctx.nrounds = ctx->nrounds;
ctx                45 arch/x86/crypto/crct10dif-pclmul_glue.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx                47 arch/x86/crypto/crct10dif-pclmul_glue.c 	ctx->crc = 0;
ctx                55 arch/x86/crypto/crct10dif-pclmul_glue.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx                59 arch/x86/crypto/crct10dif-pclmul_glue.c 		ctx->crc = crc_t10dif_pcl(ctx->crc, data, length);
ctx                62 arch/x86/crypto/crct10dif-pclmul_glue.c 		ctx->crc = crc_t10dif_generic(ctx->crc, data, length);
ctx                68 arch/x86/crypto/crct10dif-pclmul_glue.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx                70 arch/x86/crypto/crct10dif-pclmul_glue.c 	*(__u16 *)out = ctx->crc;
ctx                88 arch/x86/crypto/crct10dif-pclmul_glue.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx                90 arch/x86/crypto/crct10dif-pclmul_glue.c 	return __chksum_finup(ctx->crc, data, len, out);
ctx                34 arch/x86/crypto/des3_ede_glue.c static inline void des3_ede_enc_blk(struct des3_ede_x86_ctx *ctx, u8 *dst,
ctx                37 arch/x86/crypto/des3_ede_glue.c 	u32 *enc_ctx = ctx->enc.expkey;
ctx                42 arch/x86/crypto/des3_ede_glue.c static inline void des3_ede_dec_blk(struct des3_ede_x86_ctx *ctx, u8 *dst,
ctx                45 arch/x86/crypto/des3_ede_glue.c 	u32 *dec_ctx = ctx->dec.expkey;
ctx                50 arch/x86/crypto/des3_ede_glue.c static inline void des3_ede_enc_blk_3way(struct des3_ede_x86_ctx *ctx, u8 *dst,
ctx                53 arch/x86/crypto/des3_ede_glue.c 	u32 *enc_ctx = ctx->enc.expkey;
ctx                58 arch/x86/crypto/des3_ede_glue.c static inline void des3_ede_dec_blk_3way(struct des3_ede_x86_ctx *ctx, u8 *dst,
ctx                61 arch/x86/crypto/des3_ede_glue.c 	u32 *dec_ctx = ctx->dec.expkey;
ctx               123 arch/x86/crypto/des3_ede_glue.c 	struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               125 arch/x86/crypto/des3_ede_glue.c 	return ecb_crypt(req, ctx->enc.expkey);
ctx               131 arch/x86/crypto/des3_ede_glue.c 	struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               133 arch/x86/crypto/des3_ede_glue.c 	return ecb_crypt(req, ctx->dec.expkey);
ctx               136 arch/x86/crypto/des3_ede_glue.c static unsigned int __cbc_encrypt(struct des3_ede_x86_ctx *ctx,
ctx               147 arch/x86/crypto/des3_ede_glue.c 		des3_ede_enc_blk(ctx, (u8 *)dst, (u8 *)dst);
ctx               162 arch/x86/crypto/des3_ede_glue.c 	struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               170 arch/x86/crypto/des3_ede_glue.c 		nbytes = __cbc_encrypt(ctx, &walk);
ctx               177 arch/x86/crypto/des3_ede_glue.c static unsigned int __cbc_decrypt(struct des3_ede_x86_ctx *ctx,
ctx               203 arch/x86/crypto/des3_ede_glue.c 			des3_ede_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src);
ctx               220 arch/x86/crypto/des3_ede_glue.c 		des3_ede_dec_blk(ctx, (u8 *)dst, (u8 *)src);
ctx               241 arch/x86/crypto/des3_ede_glue.c 	struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               249 arch/x86/crypto/des3_ede_glue.c 		nbytes = __cbc_decrypt(ctx, &walk);
ctx               256 arch/x86/crypto/des3_ede_glue.c static void ctr_crypt_final(struct des3_ede_x86_ctx *ctx,
ctx               265 arch/x86/crypto/des3_ede_glue.c 	des3_ede_enc_blk(ctx, keystream, ctrblk);
ctx               271 arch/x86/crypto/des3_ede_glue.c static unsigned int __ctr_crypt(struct des3_ede_x86_ctx *ctx,
ctx               289 arch/x86/crypto/des3_ede_glue.c 			des3_ede_enc_blk_3way(ctx, (u8 *)ctrblocks,
ctx               308 arch/x86/crypto/des3_ede_glue.c 		des3_ede_enc_blk(ctx, (u8 *)ctrblocks, (u8 *)ctrblocks);
ctx               324 arch/x86/crypto/des3_ede_glue.c 	struct des3_ede_x86_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               332 arch/x86/crypto/des3_ede_glue.c 		nbytes = __ctr_crypt(ctx, &walk);
ctx               337 arch/x86/crypto/des3_ede_glue.c 		ctr_crypt_final(ctx, &walk);
ctx               347 arch/x86/crypto/des3_ede_glue.c 	struct des3_ede_x86_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               351 arch/x86/crypto/des3_ede_glue.c 	err = des3_ede_expand_key(&ctx->enc, key, keylen);
ctx               360 arch/x86/crypto/des3_ede_glue.c 		memset(ctx, 0, sizeof(*ctx));
ctx               368 arch/x86/crypto/des3_ede_glue.c 		tmp = ror32(ctx->enc.expkey[i + 1], 4);
ctx               369 arch/x86/crypto/des3_ede_glue.c 		ctx->enc.expkey[i + 1] = tmp;
ctx               371 arch/x86/crypto/des3_ede_glue.c 		ctx->dec.expkey[j + 0] = ctx->enc.expkey[i + 0];
ctx               372 arch/x86/crypto/des3_ede_glue.c 		ctx->dec.expkey[j + 1] = tmp;
ctx                56 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
ctx                69 arch/x86/crypto/ghash-clmulni-intel_glue.c 	ctx->shash.a = (b << 1) | (a >> 63);
ctx                70 arch/x86/crypto/ghash-clmulni-intel_glue.c 	ctx->shash.b = (a << 1) | (b >> 63);
ctx                73 arch/x86/crypto/ghash-clmulni-intel_glue.c 		ctx->shash.b ^= ((u64)0xc2) << 56;
ctx                82 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
ctx                97 arch/x86/crypto/ghash-clmulni-intel_glue.c 			clmul_ghash_mul(dst, &ctx->shash);
ctx               100 arch/x86/crypto/ghash-clmulni-intel_glue.c 	clmul_ghash_update(dst, src, srclen, &ctx->shash);
ctx               114 arch/x86/crypto/ghash-clmulni-intel_glue.c static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
ctx               125 arch/x86/crypto/ghash-clmulni-intel_glue.c 		clmul_ghash_mul(dst, &ctx->shash);
ctx               135 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
ctx               138 arch/x86/crypto/ghash-clmulni-intel_glue.c 	ghash_flush(ctx, dctx);
ctx               165 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               167 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
ctx               179 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               180 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
ctx               197 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               198 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
ctx               237 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               239 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
ctx               258 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               259 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct crypto_ahash *child = &ctx->cryptd_tfm->base;
ctx               275 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               282 arch/x86/crypto/ghash-clmulni-intel_glue.c 	ctx->cryptd_tfm = cryptd_tfm;
ctx               292 arch/x86/crypto/ghash-clmulni-intel_glue.c 	struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               294 arch/x86/crypto/ghash-clmulni-intel_glue.c 	cryptd_free_ahash(ctx->cryptd_tfm);
ctx                24 arch/x86/crypto/glue_helper.c 	void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
ctx                49 arch/x86/crypto/glue_helper.c 				gctx->funcs[i].fn_u.ecb(ctx, dst, src);
ctx                69 arch/x86/crypto/glue_helper.c 	void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
ctx                84 arch/x86/crypto/glue_helper.c 			fn(ctx, (u8 *)dst, (u8 *)dst);
ctx               101 arch/x86/crypto/glue_helper.c 	void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
ctx               137 arch/x86/crypto/glue_helper.c 				gctx->funcs[i].fn_u.cbc(ctx, dst, src);
ctx               161 arch/x86/crypto/glue_helper.c 	void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
ctx               191 arch/x86/crypto/glue_helper.c 				gctx->funcs[i].fn_u.ctr(ctx, dst, src, &ctrblk);
ctx               213 arch/x86/crypto/glue_helper.c 		gctx->funcs[gctx->num_funcs - 1].fn_u.ctr(ctx, &tmp, &tmp,
ctx               226 arch/x86/crypto/glue_helper.c 					  void *ctx,
ctx               243 arch/x86/crypto/glue_helper.c 				gctx->funcs[i].fn_u.xts(ctx, dst, src,
ctx               357 arch/x86/crypto/glue_helper.c void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src, le128 *iv,
ctx               369 arch/x86/crypto/glue_helper.c 	fn(ctx, (u8 *)dst, (u8 *)dst);
ctx                22 arch/x86/crypto/serpent_avx2_glue.c asmlinkage void serpent_ecb_enc_16way(struct serpent_ctx *ctx, u8 *dst,
ctx                24 arch/x86/crypto/serpent_avx2_glue.c asmlinkage void serpent_ecb_dec_16way(struct serpent_ctx *ctx, u8 *dst,
ctx                26 arch/x86/crypto/serpent_avx2_glue.c asmlinkage void serpent_cbc_dec_16way(void *ctx, u128 *dst, const u128 *src);
ctx                28 arch/x86/crypto/serpent_avx2_glue.c asmlinkage void serpent_ctr_16way(void *ctx, u128 *dst, const u128 *src,
ctx                30 arch/x86/crypto/serpent_avx2_glue.c asmlinkage void serpent_xts_enc_16way(struct serpent_ctx *ctx, u8 *dst,
ctx                32 arch/x86/crypto/serpent_avx2_glue.c asmlinkage void serpent_xts_dec_16way(struct serpent_ctx *ctx, u8 *dst,
ctx               166 arch/x86/crypto/serpent_avx2_glue.c 	struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               170 arch/x86/crypto/serpent_avx2_glue.c 				   &ctx->tweak_ctx, &ctx->crypt_ctx, false);
ctx               176 arch/x86/crypto/serpent_avx2_glue.c 	struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               180 arch/x86/crypto/serpent_avx2_glue.c 				   &ctx->tweak_ctx, &ctx->crypt_ctx, true);
ctx                23 arch/x86/crypto/serpent_avx_glue.c asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
ctx                27 arch/x86/crypto/serpent_avx_glue.c asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
ctx                31 arch/x86/crypto/serpent_avx_glue.c asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
ctx                35 arch/x86/crypto/serpent_avx_glue.c asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst,
ctx                39 arch/x86/crypto/serpent_avx_glue.c asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
ctx                43 arch/x86/crypto/serpent_avx_glue.c asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
ctx                47 arch/x86/crypto/serpent_avx_glue.c void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx                54 arch/x86/crypto/serpent_avx_glue.c 	__serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
ctx                59 arch/x86/crypto/serpent_avx_glue.c void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx                61 arch/x86/crypto/serpent_avx_glue.c 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
ctx                66 arch/x86/crypto/serpent_avx_glue.c void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx                68 arch/x86/crypto/serpent_avx_glue.c 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
ctx                82 arch/x86/crypto/serpent_avx_glue.c 	struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                90 arch/x86/crypto/serpent_avx_glue.c 	err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
ctx                95 arch/x86/crypto/serpent_avx_glue.c 	return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
ctx               206 arch/x86/crypto/serpent_avx_glue.c 	struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               210 arch/x86/crypto/serpent_avx_glue.c 				   &ctx->tweak_ctx, &ctx->crypt_ctx, false);
ctx               216 arch/x86/crypto/serpent_avx_glue.c 	struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               220 arch/x86/crypto/serpent_avx_glue.c 				   &ctx->tweak_ctx, &ctx->crypt_ctx, true);
ctx                34 arch/x86/crypto/serpent_sse2_glue.c static void serpent_decrypt_cbc_xway(void *ctx, u128 *dst, const u128 *src)
ctx                42 arch/x86/crypto/serpent_sse2_glue.c 	serpent_dec_blk_xway(ctx, (u8 *)dst, (u8 *)src);
ctx                48 arch/x86/crypto/serpent_sse2_glue.c static void serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx                55 arch/x86/crypto/serpent_sse2_glue.c 	__serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
ctx                59 arch/x86/crypto/serpent_sse2_glue.c static void serpent_crypt_ctr_xway(void *ctx, u128 *dst, const u128 *src,
ctx                73 arch/x86/crypto/serpent_sse2_glue.c 	serpent_enc_blk_xway_xor(ctx, (u8 *)dst, (u8 *)ctrblks);
ctx                25 arch/x86/crypto/twofish_avx_glue.c asmlinkage void twofish_ecb_enc_8way(struct twofish_ctx *ctx, u8 *dst,
ctx                27 arch/x86/crypto/twofish_avx_glue.c asmlinkage void twofish_ecb_dec_8way(struct twofish_ctx *ctx, u8 *dst,
ctx                30 arch/x86/crypto/twofish_avx_glue.c asmlinkage void twofish_cbc_dec_8way(struct twofish_ctx *ctx, u8 *dst,
ctx                32 arch/x86/crypto/twofish_avx_glue.c asmlinkage void twofish_ctr_8way(struct twofish_ctx *ctx, u8 *dst,
ctx                35 arch/x86/crypto/twofish_avx_glue.c asmlinkage void twofish_xts_enc_8way(struct twofish_ctx *ctx, u8 *dst,
ctx                37 arch/x86/crypto/twofish_avx_glue.c asmlinkage void twofish_xts_dec_8way(struct twofish_ctx *ctx, u8 *dst,
ctx                46 arch/x86/crypto/twofish_avx_glue.c static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst,
ctx                49 arch/x86/crypto/twofish_avx_glue.c 	__twofish_enc_blk_3way(ctx, dst, src, false);
ctx                52 arch/x86/crypto/twofish_avx_glue.c static void twofish_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx                54 arch/x86/crypto/twofish_avx_glue.c 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
ctx                58 arch/x86/crypto/twofish_avx_glue.c static void twofish_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx                60 arch/x86/crypto/twofish_avx_glue.c 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
ctx                72 arch/x86/crypto/twofish_avx_glue.c 	struct twofish_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                81 arch/x86/crypto/twofish_avx_glue.c 	err = __twofish_setkey(&ctx->crypt_ctx, key, keylen / 2, flags);
ctx                86 arch/x86/crypto/twofish_avx_glue.c 	return __twofish_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2,
ctx               209 arch/x86/crypto/twofish_avx_glue.c 	struct twofish_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               213 arch/x86/crypto/twofish_avx_glue.c 				   &ctx->tweak_ctx, &ctx->crypt_ctx, false);
ctx               219 arch/x86/crypto/twofish_avx_glue.c 	struct twofish_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               223 arch/x86/crypto/twofish_avx_glue.c 				   &ctx->tweak_ctx, &ctx->crypt_ctx, true);
ctx                47 arch/x86/crypto/twofish_glue.c asmlinkage void twofish_enc_blk(struct twofish_ctx *ctx, u8 *dst,
ctx                50 arch/x86/crypto/twofish_glue.c asmlinkage void twofish_dec_blk(struct twofish_ctx *ctx, u8 *dst,
ctx                28 arch/x86/crypto/twofish_glue_3way.c static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst,
ctx                31 arch/x86/crypto/twofish_glue_3way.c 	__twofish_enc_blk_3way(ctx, dst, src, false);
ctx                34 arch/x86/crypto/twofish_glue_3way.c static inline void twofish_enc_blk_xor_3way(struct twofish_ctx *ctx, u8 *dst,
ctx                37 arch/x86/crypto/twofish_glue_3way.c 	__twofish_enc_blk_3way(ctx, dst, src, true);
ctx                40 arch/x86/crypto/twofish_glue_3way.c void twofish_dec_blk_cbc_3way(void *ctx, u128 *dst, const u128 *src)
ctx                47 arch/x86/crypto/twofish_glue_3way.c 	twofish_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src);
ctx                54 arch/x86/crypto/twofish_glue_3way.c void twofish_enc_blk_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
ctx                64 arch/x86/crypto/twofish_glue_3way.c 	twofish_enc_blk(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
ctx                69 arch/x86/crypto/twofish_glue_3way.c void twofish_enc_blk_ctr_3way(void *ctx, u128 *dst, const u128 *src,
ctx                87 arch/x86/crypto/twofish_glue_3way.c 	twofish_enc_blk_xor_3way(ctx, (u8 *)dst, (u8 *)ctrblks);
ctx              2251 arch/x86/events/core.c static void x86_pmu_sched_task(struct perf_event_context *ctx, bool sched_in)
ctx              2254 arch/x86/events/core.c 		x86_pmu.sched_task(ctx, sched_in);
ctx              3815 arch/x86/events/intel/core.c static void intel_pmu_sched_task(struct perf_event_context *ctx,
ctx              3818 arch/x86/events/intel/core.c 	intel_pmu_pebs_sched_task(ctx, sched_in);
ctx              3819 arch/x86/events/intel/core.c 	intel_pmu_lbr_sched_task(ctx, sched_in);
ctx               911 arch/x86/events/intel/ds.c void intel_pmu_pebs_sched_task(struct perf_event_context *ctx, bool sched_in)
ctx              1015 arch/x86/events/intel/ds.c 	struct pmu *pmu = event->ctx->pmu;
ctx               420 arch/x86/events/intel/lbr.c void intel_pmu_lbr_sched_task(struct perf_event_context *ctx, bool sched_in)
ctx               433 arch/x86/events/intel/lbr.c 	task_ctx = ctx ? ctx->task_ctx_data : NULL;
ctx               467 arch/x86/events/intel/lbr.c 	if (branch_user_callstack(cpuc->br_sel) && event->ctx->task_ctx_data) {
ctx               468 arch/x86/events/intel/lbr.c 		task_ctx = event->ctx->task_ctx_data;
ctx               493 arch/x86/events/intel/lbr.c 	perf_sched_cb_inc(event->ctx->pmu);
ctx               507 arch/x86/events/intel/lbr.c 	    event->ctx->task_ctx_data) {
ctx               508 arch/x86/events/intel/lbr.c 		task_ctx = event->ctx->task_ctx_data;
ctx               517 arch/x86/events/intel/lbr.c 	perf_sched_cb_dec(event->ctx->pmu);
ctx               642 arch/x86/events/perf_event.h 	void		(*sched_task)(struct perf_event_context *ctx,
ctx              1013 arch/x86/events/perf_event.h void intel_pmu_pebs_sched_task(struct perf_event_context *ctx, bool sched_in);
ctx              1021 arch/x86/events/perf_event.h void intel_pmu_lbr_sched_task(struct perf_event_context *ctx, bool sched_in);
ctx                35 arch/x86/include/asm/crypto/camellia.h asmlinkage void __camellia_enc_blk(struct camellia_ctx *ctx, u8 *dst,
ctx                37 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_dec_blk(struct camellia_ctx *ctx, u8 *dst,
ctx                41 arch/x86/include/asm/crypto/camellia.h asmlinkage void __camellia_enc_blk_2way(struct camellia_ctx *ctx, u8 *dst,
ctx                43 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_dec_blk_2way(struct camellia_ctx *ctx, u8 *dst,
ctx                47 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst,
ctx                49 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst,
ctx                52 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst,
ctx                54 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst,
ctx                57 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst,
ctx                59 arch/x86/include/asm/crypto/camellia.h asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst,
ctx                62 arch/x86/include/asm/crypto/camellia.h static inline void camellia_enc_blk(struct camellia_ctx *ctx, u8 *dst,
ctx                65 arch/x86/include/asm/crypto/camellia.h 	__camellia_enc_blk(ctx, dst, src, false);
ctx                68 arch/x86/include/asm/crypto/camellia.h static inline void camellia_enc_blk_xor(struct camellia_ctx *ctx, u8 *dst,
ctx                71 arch/x86/include/asm/crypto/camellia.h 	__camellia_enc_blk(ctx, dst, src, true);
ctx                74 arch/x86/include/asm/crypto/camellia.h static inline void camellia_enc_blk_2way(struct camellia_ctx *ctx, u8 *dst,
ctx                77 arch/x86/include/asm/crypto/camellia.h 	__camellia_enc_blk_2way(ctx, dst, src, false);
ctx                80 arch/x86/include/asm/crypto/camellia.h static inline void camellia_enc_blk_xor_2way(struct camellia_ctx *ctx, u8 *dst,
ctx                83 arch/x86/include/asm/crypto/camellia.h 	__camellia_enc_blk_2way(ctx, dst, src, true);
ctx                87 arch/x86/include/asm/crypto/camellia.h extern void camellia_decrypt_cbc_2way(void *ctx, u128 *dst, const u128 *src);
ctx                88 arch/x86/include/asm/crypto/camellia.h extern void camellia_crypt_ctr(void *ctx, u128 *dst, const u128 *src,
ctx                90 arch/x86/include/asm/crypto/camellia.h extern void camellia_crypt_ctr_2way(void *ctx, u128 *dst, const u128 *src,
ctx                93 arch/x86/include/asm/crypto/camellia.h extern void camellia_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv);
ctx                94 arch/x86/include/asm/crypto/camellia.h extern void camellia_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv);
ctx                14 arch/x86/include/asm/crypto/glue_helper.h typedef void (*common_glue_func_t)(void *ctx, u8 *dst, const u8 *src);
ctx                15 arch/x86/include/asm/crypto/glue_helper.h typedef void (*common_glue_cbc_func_t)(void *ctx, u128 *dst, const u128 *src);
ctx                16 arch/x86/include/asm/crypto/glue_helper.h typedef void (*common_glue_ctr_func_t)(void *ctx, u128 *dst, const u128 *src,
ctx                18 arch/x86/include/asm/crypto/glue_helper.h typedef void (*common_glue_xts_func_t)(void *ctx, u128 *dst, const u128 *src,
ctx               119 arch/x86/include/asm/crypto/glue_helper.h extern void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src,
ctx                18 arch/x86/include/asm/crypto/serpent-avx.h asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
ctx                20 arch/x86/include/asm/crypto/serpent-avx.h asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
ctx                23 arch/x86/include/asm/crypto/serpent-avx.h asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
ctx                25 arch/x86/include/asm/crypto/serpent-avx.h asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst,
ctx                28 arch/x86/include/asm/crypto/serpent-avx.h asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
ctx                30 arch/x86/include/asm/crypto/serpent-avx.h asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
ctx                33 arch/x86/include/asm/crypto/serpent-avx.h extern void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src,
ctx                36 arch/x86/include/asm/crypto/serpent-avx.h extern void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv);
ctx                37 arch/x86/include/asm/crypto/serpent-avx.h extern void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv);
ctx                12 arch/x86/include/asm/crypto/serpent-sse2.h asmlinkage void __serpent_enc_blk_4way(struct serpent_ctx *ctx, u8 *dst,
ctx                14 arch/x86/include/asm/crypto/serpent-sse2.h asmlinkage void serpent_dec_blk_4way(struct serpent_ctx *ctx, u8 *dst,
ctx                17 arch/x86/include/asm/crypto/serpent-sse2.h static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst,
ctx                20 arch/x86/include/asm/crypto/serpent-sse2.h 	__serpent_enc_blk_4way(ctx, dst, src, false);
ctx                23 arch/x86/include/asm/crypto/serpent-sse2.h static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst,
ctx                26 arch/x86/include/asm/crypto/serpent-sse2.h 	__serpent_enc_blk_4way(ctx, dst, src, true);
ctx                29 arch/x86/include/asm/crypto/serpent-sse2.h static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst,
ctx                32 arch/x86/include/asm/crypto/serpent-sse2.h 	serpent_dec_blk_4way(ctx, dst, src);
ctx                39 arch/x86/include/asm/crypto/serpent-sse2.h asmlinkage void __serpent_enc_blk_8way(struct serpent_ctx *ctx, u8 *dst,
ctx                41 arch/x86/include/asm/crypto/serpent-sse2.h asmlinkage void serpent_dec_blk_8way(struct serpent_ctx *ctx, u8 *dst,
ctx                44 arch/x86/include/asm/crypto/serpent-sse2.h static inline void serpent_enc_blk_xway(struct serpent_ctx *ctx, u8 *dst,
ctx                47 arch/x86/include/asm/crypto/serpent-sse2.h 	__serpent_enc_blk_8way(ctx, dst, src, false);
ctx                50 arch/x86/include/asm/crypto/serpent-sse2.h static inline void serpent_enc_blk_xway_xor(struct serpent_ctx *ctx, u8 *dst,
ctx                53 arch/x86/include/asm/crypto/serpent-sse2.h 	__serpent_enc_blk_8way(ctx, dst, src, true);
ctx                56 arch/x86/include/asm/crypto/serpent-sse2.h static inline void serpent_dec_blk_xway(struct serpent_ctx *ctx, u8 *dst,
ctx                59 arch/x86/include/asm/crypto/serpent-sse2.h 	serpent_dec_blk_8way(ctx, dst, src);
ctx                10 arch/x86/include/asm/crypto/twofish.h asmlinkage void twofish_enc_blk(struct twofish_ctx *ctx, u8 *dst,
ctx                12 arch/x86/include/asm/crypto/twofish.h asmlinkage void twofish_dec_blk(struct twofish_ctx *ctx, u8 *dst,
ctx                16 arch/x86/include/asm/crypto/twofish.h asmlinkage void __twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst,
ctx                18 arch/x86/include/asm/crypto/twofish.h asmlinkage void twofish_dec_blk_3way(struct twofish_ctx *ctx, u8 *dst,
ctx                22 arch/x86/include/asm/crypto/twofish.h extern void twofish_dec_blk_cbc_3way(void *ctx, u128 *dst, const u128 *src);
ctx                23 arch/x86/include/asm/crypto/twofish.h extern void twofish_enc_blk_ctr(void *ctx, u128 *dst, const u128 *src,
ctx                25 arch/x86/include/asm/crypto/twofish.h extern void twofish_enc_blk_ctr_3way(void *ctx, u128 *dst, const u128 *src,
ctx               254 arch/x86/kernel/cpu/mce/severity.c 	enum context ctx = error_context(m);
ctx               262 arch/x86/kernel/cpu/mce/severity.c 		if (ctx == IN_KERNEL)
ctx               273 arch/x86/kernel/cpu/mce/severity.c 				return mce_severity_amd_smca(m, ctx);
ctx               307 arch/x86/kernel/cpu/mce/severity.c 	enum context ctx = error_context(m);
ctx               319 arch/x86/kernel/cpu/mce/severity.c 		if (s->context && ctx != s->context)
ctx               326 arch/x86/kernel/cpu/mce/severity.c 		if (s->sev >= MCE_UC_SEVERITY && ctx == IN_KERNEL) {
ctx               348 arch/x86/kernel/cpu/microcode/core.c 	struct cpu_info_ctx *ctx = arg;
ctx               350 arch/x86/kernel/cpu/microcode/core.c 	ctx->err = microcode_ops->collect_cpu_info(smp_processor_id(),
ctx               351 arch/x86/kernel/cpu/microcode/core.c 						   ctx->cpu_sig);
ctx               356 arch/x86/kernel/cpu/microcode/core.c 	struct cpu_info_ctx ctx = { .cpu_sig = cpu_sig, .err = 0 };
ctx               359 arch/x86/kernel/cpu/microcode/core.c 	ret = smp_call_function_single(cpu, collect_cpu_info_local, &ctx, 1);
ctx               361 arch/x86/kernel/cpu/microcode/core.c 		ret = ctx.err;
ctx              1941 arch/x86/kernel/cpu/resctrl/rdtgroup.c static int rdt_enable_ctx(struct rdt_fs_context *ctx)
ctx              1945 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	if (ctx->enable_cdpl2)
ctx              1948 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	if (!ret && ctx->enable_cdpl3)
ctx              1951 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	if (!ret && ctx->enable_mba_mbps)
ctx              1959 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	struct rdt_fs_context *ctx = rdt_fc2context(fc);
ctx              1974 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	ret = rdt_enable_ctx(ctx);
ctx              2035 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	if (ctx->enable_mba_mbps)
ctx              2067 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	struct rdt_fs_context *ctx = rdt_fc2context(fc);
ctx              2077 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		ctx->enable_cdpl3 = true;
ctx              2080 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		ctx->enable_cdpl2 = true;
ctx              2085 arch/x86/kernel/cpu/resctrl/rdtgroup.c 		ctx->enable_mba_mbps = true;
ctx              2094 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	struct rdt_fs_context *ctx = rdt_fc2context(fc);
ctx              2097 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	kfree(ctx);
ctx              2108 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	struct rdt_fs_context *ctx;
ctx              2110 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	ctx = kzalloc(sizeof(struct rdt_fs_context), GFP_KERNEL);
ctx              2111 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	if (!ctx)
ctx              2114 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	ctx->kfc.root = rdt_root;
ctx              2115 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	ctx->kfc.magic = RDTGROUP_SUPER_MAGIC;
ctx              2116 arch/x86/kernel/cpu/resctrl/rdtgroup.c 	fc->fs_private = &ctx->kfc;
ctx              1086 arch/x86/kernel/uprobes.c bool arch_uretprobe_is_alive(struct return_instance *ret, enum rp_check ctx,
ctx              1089 arch/x86/kernel/uprobes.c 	if (ctx == RP_CHECK_CALL) /* sp was just decremented by "call" insn */
ctx              1021 arch/x86/kvm/vmx/vmx.c static inline void pt_load_msr(struct pt_ctx *ctx, u32 addr_range)
ctx              1025 arch/x86/kvm/vmx/vmx.c 	wrmsrl(MSR_IA32_RTIT_STATUS, ctx->status);
ctx              1026 arch/x86/kvm/vmx/vmx.c 	wrmsrl(MSR_IA32_RTIT_OUTPUT_BASE, ctx->output_base);
ctx              1027 arch/x86/kvm/vmx/vmx.c 	wrmsrl(MSR_IA32_RTIT_OUTPUT_MASK, ctx->output_mask);
ctx              1028 arch/x86/kvm/vmx/vmx.c 	wrmsrl(MSR_IA32_RTIT_CR3_MATCH, ctx->cr3_match);
ctx              1030 arch/x86/kvm/vmx/vmx.c 		wrmsrl(MSR_IA32_RTIT_ADDR0_A + i * 2, ctx->addr_a[i]);
ctx              1031 arch/x86/kvm/vmx/vmx.c 		wrmsrl(MSR_IA32_RTIT_ADDR0_B + i * 2, ctx->addr_b[i]);
ctx              1035 arch/x86/kvm/vmx/vmx.c static inline void pt_save_msr(struct pt_ctx *ctx, u32 addr_range)
ctx              1039 arch/x86/kvm/vmx/vmx.c 	rdmsrl(MSR_IA32_RTIT_STATUS, ctx->status);
ctx              1040 arch/x86/kvm/vmx/vmx.c 	rdmsrl(MSR_IA32_RTIT_OUTPUT_BASE, ctx->output_base);
ctx              1041 arch/x86/kvm/vmx/vmx.c 	rdmsrl(MSR_IA32_RTIT_OUTPUT_MASK, ctx->output_mask);
ctx              1042 arch/x86/kvm/vmx/vmx.c 	rdmsrl(MSR_IA32_RTIT_CR3_MATCH, ctx->cr3_match);
ctx              1044 arch/x86/kvm/vmx/vmx.c 		rdmsrl(MSR_IA32_RTIT_ADDR0_A + i * 2, ctx->addr_a[i]);
ctx              1045 arch/x86/kvm/vmx/vmx.c 		rdmsrl(MSR_IA32_RTIT_ADDR0_B + i * 2, ctx->addr_b[i]);
ctx               232 arch/x86/mm/kmmio.c 	struct kmmio_context *ctx;
ctx               263 arch/x86/mm/kmmio.c 	ctx = &get_cpu_var(kmmio_ctx);
ctx               264 arch/x86/mm/kmmio.c 	if (ctx->active) {
ctx               265 arch/x86/mm/kmmio.c 		if (page_base == ctx->addr) {
ctx               285 arch/x86/mm/kmmio.c 			pr_emerg("previous hit was at 0x%08lx.\n", ctx->addr);
ctx               290 arch/x86/mm/kmmio.c 	ctx->active++;
ctx               292 arch/x86/mm/kmmio.c 	ctx->fpage = faultpage;
ctx               293 arch/x86/mm/kmmio.c 	ctx->probe = get_kmmio_probe(page_base);
ctx               294 arch/x86/mm/kmmio.c 	ctx->saved_flags = (regs->flags & (X86_EFLAGS_TF | X86_EFLAGS_IF));
ctx               295 arch/x86/mm/kmmio.c 	ctx->addr = page_base;
ctx               297 arch/x86/mm/kmmio.c 	if (ctx->probe && ctx->probe->pre_handler)
ctx               298 arch/x86/mm/kmmio.c 		ctx->probe->pre_handler(ctx->probe, regs, addr);
ctx               308 arch/x86/mm/kmmio.c 	disarm_kmmio_fault_page(ctx->fpage);
ctx               336 arch/x86/mm/kmmio.c 	struct kmmio_context *ctx = &get_cpu_var(kmmio_ctx);
ctx               338 arch/x86/mm/kmmio.c 	if (!ctx->active) {
ctx               349 arch/x86/mm/kmmio.c 	if (ctx->probe && ctx->probe->post_handler)
ctx               350 arch/x86/mm/kmmio.c 		ctx->probe->post_handler(ctx->probe, condition, regs);
ctx               354 arch/x86/mm/kmmio.c 	if (ctx->fpage->count)
ctx               355 arch/x86/mm/kmmio.c 		arm_kmmio_fault_page(ctx->fpage);
ctx               359 arch/x86/mm/kmmio.c 	regs->flags |= ctx->saved_flags;
ctx               362 arch/x86/mm/kmmio.c 	ctx->active--;
ctx               363 arch/x86/mm/kmmio.c 	BUG_ON(ctx->active);
ctx               394 arch/x86/net/bpf_jit_comp.c 		  int oldproglen, struct jit_context *ctx)
ctx              1019 arch/x86/net/bpf_jit_comp.c 				jmp_offset = ctx->cleanup_addr - addrs[i];
ctx              1024 arch/x86/net/bpf_jit_comp.c 			ctx->cleanup_addr = proglen;
ctx              1071 arch/x86/net/bpf_jit_comp.c 	struct jit_context ctx;
ctx              1080 arch/x86/net/bpf_jit_comp.c 	struct jit_context ctx = {};
ctx              1114 arch/x86/net/bpf_jit_comp.c 		ctx = jit_data->ctx;
ctx              1135 arch/x86/net/bpf_jit_comp.c 	ctx.cleanup_addr = proglen;
ctx              1145 arch/x86/net/bpf_jit_comp.c 		proglen = do_jit(prog, addrs, image, oldproglen, &ctx);
ctx              1182 arch/x86/net/bpf_jit_comp.c 			jit_data->ctx = ctx;
ctx              1463 arch/x86/net/bpf_jit_comp32.c 		  int oldproglen, struct jit_context *ctx)
ctx              2253 arch/x86/net/bpf_jit_comp32.c 				jmp_offset = ctx->cleanup_addr - addrs[i];
ctx              2258 arch/x86/net/bpf_jit_comp32.c 			ctx->cleanup_addr = proglen;
ctx              2304 arch/x86/net/bpf_jit_comp32.c 	struct jit_context ctx = {};
ctx              2340 arch/x86/net/bpf_jit_comp32.c 	ctx.cleanup_addr = proglen;
ctx              2349 arch/x86/net/bpf_jit_comp32.c 		proglen = do_jit(prog, addrs, image, oldproglen, &ctx);
ctx              1033 block/bfq-cgroup.c 	struct blkg_conf_ctx ctx;
ctx              1038 block/bfq-cgroup.c 	ret = blkg_conf_prep(blkcg, &blkcg_policy_bfq, buf, &ctx);
ctx              1042 block/bfq-cgroup.c 	if (sscanf(ctx.body, "%llu", &v) == 1) {
ctx              1047 block/bfq-cgroup.c 	} else if (!strcmp(strim(ctx.body), "default")) {
ctx              1054 block/bfq-cgroup.c 	bfqg = blkg_to_bfqg(ctx.blkg);
ctx              1062 block/bfq-cgroup.c 	blkg_conf_finish(&ctx);
ctx               807 block/blk-cgroup.c 		   char *input, struct blkg_conf_ctx *ctx)
ctx               881 block/blk-cgroup.c 	ctx->disk = disk;
ctx               882 block/blk-cgroup.c 	ctx->blkg = blkg;
ctx               883 block/blk-cgroup.c 	ctx->body = input;
ctx               912 block/blk-cgroup.c void blkg_conf_finish(struct blkg_conf_ctx *ctx)
ctx               913 block/blk-cgroup.c 	__releases(&ctx->disk->queue->queue_lock) __releases(rcu)
ctx               915 block/blk-cgroup.c 	spin_unlock_irq(&ctx->disk->queue->queue_lock);
ctx               917 block/blk-cgroup.c 	put_disk_and_module(ctx->disk);
ctx               331 block/blk-flush.c 	struct blk_mq_ctx *ctx = rq->mq_ctx;
ctx               333 block/blk-flush.c 	struct blk_flush_queue *fq = blk_get_flush_queue(q, ctx);
ctx              1120 block/blk-iocost.c 	struct iocg_wake_ctx *ctx = (struct iocg_wake_ctx *)key;
ctx              1121 block/blk-iocost.c 	u64 cost = abs_cost_to_cost(wait->abs_cost, ctx->hw_inuse);
ctx              1123 block/blk-iocost.c 	ctx->vbudget -= cost;
ctx              1125 block/blk-iocost.c 	if (ctx->vbudget < 0)
ctx              1128 block/blk-iocost.c 	iocg_commit_bio(ctx->iocg, wait->bio, cost);
ctx              1145 block/blk-iocost.c 	struct iocg_wake_ctx ctx = { .iocg = iocg };
ctx              1173 block/blk-iocost.c 	ctx.hw_inuse = hw_inuse;
ctx              1174 block/blk-iocost.c 	ctx.vbudget = vbudget - vdebt;
ctx              1175 block/blk-iocost.c 	__wake_up_locked_key(&iocg->waitq, TASK_NORMAL, &ctx);
ctx              1178 block/blk-iocost.c 	if (WARN_ON_ONCE(ctx.vbudget >= 0))
ctx              1182 block/blk-iocost.c 	vshortage = -ctx.vbudget;
ctx              2100 block/blk-iocost.c 	struct blkg_conf_ctx ctx;
ctx              2130 block/blk-iocost.c 	ret = blkg_conf_prep(blkcg, &blkcg_policy_iocost, buf, &ctx);
ctx              2134 block/blk-iocost.c 	iocg = blkg_to_iocg(ctx.blkg);
ctx              2136 block/blk-iocost.c 	if (!strncmp(ctx.body, "default", 7)) {
ctx              2139 block/blk-iocost.c 		if (!sscanf(ctx.body, "%u", &v))
ctx              2150 block/blk-iocost.c 	blkg_conf_finish(&ctx);
ctx              2154 block/blk-iocost.c 	blkg_conf_finish(&ctx);
ctx               792 block/blk-iolatency.c 	struct blkg_conf_ctx ctx;
ctx               800 block/blk-iolatency.c 	ret = blkg_conf_prep(blkcg, &blkcg_policy_iolatency, buf, &ctx);
ctx               804 block/blk-iolatency.c 	iolat = blkg_to_lat(ctx.blkg);
ctx               805 block/blk-iolatency.c 	p = ctx.body;
ctx               830 block/blk-iolatency.c 	blkg = ctx.blkg;
ctx               845 block/blk-iolatency.c 	blkg_conf_finish(&ctx);
ctx               627 block/blk-mq-debugfs.c 	__acquires(&ctx->lock)						\
ctx               629 block/blk-mq-debugfs.c 	struct blk_mq_ctx *ctx = m->private;				\
ctx               631 block/blk-mq-debugfs.c 	spin_lock(&ctx->lock);						\
ctx               632 block/blk-mq-debugfs.c 	return seq_list_start(&ctx->rq_lists[type], *pos);		\
ctx               638 block/blk-mq-debugfs.c 	struct blk_mq_ctx *ctx = m->private;				\
ctx               640 block/blk-mq-debugfs.c 	return seq_list_next(v, &ctx->rq_lists[type], pos);		\
ctx               644 block/blk-mq-debugfs.c 	__releases(&ctx->lock)						\
ctx               646 block/blk-mq-debugfs.c 	struct blk_mq_ctx *ctx = m->private;				\
ctx               648 block/blk-mq-debugfs.c 	spin_unlock(&ctx->lock);					\
ctx               664 block/blk-mq-debugfs.c 	struct blk_mq_ctx *ctx = data;
ctx               666 block/blk-mq-debugfs.c 	seq_printf(m, "%lu %lu\n", ctx->rq_dispatched[1], ctx->rq_dispatched[0]);
ctx               673 block/blk-mq-debugfs.c 	struct blk_mq_ctx *ctx = data;
ctx               675 block/blk-mq-debugfs.c 	ctx->rq_dispatched[0] = ctx->rq_dispatched[1] = 0;
ctx               681 block/blk-mq-debugfs.c 	struct blk_mq_ctx *ctx = data;
ctx               683 block/blk-mq-debugfs.c 	seq_printf(m, "%lu\n", ctx->rq_merged);
ctx               690 block/blk-mq-debugfs.c 	struct blk_mq_ctx *ctx = data;
ctx               692 block/blk-mq-debugfs.c 	ctx->rq_merged = 0;
ctx               698 block/blk-mq-debugfs.c 	struct blk_mq_ctx *ctx = data;
ctx               700 block/blk-mq-debugfs.c 	seq_printf(m, "%lu %lu\n", ctx->rq_completed[1], ctx->rq_completed[0]);
ctx               707 block/blk-mq-debugfs.c 	struct blk_mq_ctx *ctx = data;
ctx               709 block/blk-mq-debugfs.c 	ctx->rq_completed[0] = ctx->rq_completed[1] = 0;
ctx               865 block/blk-mq-debugfs.c 					struct blk_mq_ctx *ctx)
ctx               870 block/blk-mq-debugfs.c 	snprintf(name, sizeof(name), "cpu%u", ctx->cpu);
ctx               873 block/blk-mq-debugfs.c 	debugfs_create_files(ctx_dir, ctx, blk_mq_debugfs_ctx_attrs);
ctx               879 block/blk-mq-debugfs.c 	struct blk_mq_ctx *ctx;
ctx               888 block/blk-mq-debugfs.c 	hctx_for_each_ctx(hctx, ctx, i)
ctx               889 block/blk-mq-debugfs.c 		blk_mq_debugfs_register_ctx(hctx, ctx);
ctx               119 block/blk-mq-sched.c 					  struct blk_mq_ctx *ctx)
ctx               121 block/blk-mq-sched.c 	unsigned short idx = ctx->index_hw[hctx->type];
ctx               138 block/blk-mq-sched.c 	struct blk_mq_ctx *ctx = READ_ONCE(hctx->dispatch_from);
ctx               149 block/blk-mq-sched.c 		rq = blk_mq_dequeue_from_ctx(hctx, ctx);
ctx               163 block/blk-mq-sched.c 		ctx = blk_mq_next_ctx(hctx, rq->mq_ctx);
ctx               167 block/blk-mq-sched.c 	WRITE_ONCE(hctx->dispatch_from, ctx);
ctx               309 block/blk-mq-sched.c 				 struct blk_mq_ctx *ctx, struct bio *bio,
ctx               314 block/blk-mq-sched.c 	lockdep_assert_held(&ctx->lock);
ctx               316 block/blk-mq-sched.c 	if (blk_mq_bio_list_merge(q, &ctx->rq_lists[type], bio, nr_segs)) {
ctx               317 block/blk-mq-sched.c 		ctx->rq_merged++;
ctx               328 block/blk-mq-sched.c 	struct blk_mq_ctx *ctx = blk_mq_get_ctx(q);
ctx               329 block/blk-mq-sched.c 	struct blk_mq_hw_ctx *hctx = blk_mq_map_queue(q, bio->bi_opf, ctx);
ctx               338 block/blk-mq-sched.c 			!list_empty_careful(&ctx->rq_lists[type])) {
ctx               340 block/blk-mq-sched.c 		spin_lock(&ctx->lock);
ctx               341 block/blk-mq-sched.c 		ret = blk_mq_attempt_merge(q, hctx, ctx, bio, nr_segs);
ctx               342 block/blk-mq-sched.c 		spin_unlock(&ctx->lock);
ctx               389 block/blk-mq-sched.c 	struct blk_mq_ctx *ctx = rq->mq_ctx;
ctx               433 block/blk-mq-sched.c 		spin_lock(&ctx->lock);
ctx               435 block/blk-mq-sched.c 		spin_unlock(&ctx->lock);
ctx               444 block/blk-mq-sched.c 				  struct blk_mq_ctx *ctx,
ctx               471 block/blk-mq-sched.c 		blk_mq_insert_requests(hctx, ctx, list);
ctx                25 block/blk-mq-sched.h 				  struct blk_mq_ctx *ctx,
ctx                28 block/blk-mq-sysfs.c 	struct blk_mq_ctx *ctx = container_of(kobj, struct blk_mq_ctx, kobj);
ctx                31 block/blk-mq-sysfs.c 	kobject_put(&ctx->ctxs->kobj);
ctx                66 block/blk-mq-sysfs.c 	struct blk_mq_ctx *ctx;
ctx                71 block/blk-mq-sysfs.c 	ctx = container_of(kobj, struct blk_mq_ctx, kobj);
ctx                72 block/blk-mq-sysfs.c 	q = ctx->queue;
ctx                80 block/blk-mq-sysfs.c 		res = entry->show(ctx, page);
ctx                89 block/blk-mq-sysfs.c 	struct blk_mq_ctx *ctx;
ctx                94 block/blk-mq-sysfs.c 	ctx = container_of(kobj, struct blk_mq_ctx, kobj);
ctx                95 block/blk-mq-sysfs.c 	q = ctx->queue;
ctx               103 block/blk-mq-sysfs.c 		res = entry->store(ctx, page, length);
ctx               239 block/blk-mq-sysfs.c 	struct blk_mq_ctx *ctx;
ctx               245 block/blk-mq-sysfs.c 	hctx_for_each_ctx(hctx, ctx, i)
ctx               246 block/blk-mq-sysfs.c 		kobject_del(&ctx->kobj);
ctx               254 block/blk-mq-sysfs.c 	struct blk_mq_ctx *ctx;
ctx               264 block/blk-mq-sysfs.c 	hctx_for_each_ctx(hctx, ctx, i) {
ctx               265 block/blk-mq-sysfs.c 		ret = kobject_add(&ctx->kobj, &hctx->kobj, "cpu%u", ctx->cpu);
ctx               297 block/blk-mq-sysfs.c 	struct blk_mq_ctx *ctx;
ctx               301 block/blk-mq-sysfs.c 		ctx = per_cpu_ptr(q->queue_ctx, cpu);
ctx               302 block/blk-mq-sysfs.c 		kobject_put(&ctx->kobj);
ctx               309 block/blk-mq-sysfs.c 	struct blk_mq_ctx *ctx;
ctx               315 block/blk-mq-sysfs.c 		ctx = per_cpu_ptr(q->queue_ctx, cpu);
ctx               318 block/blk-mq-sysfs.c 		kobject_init(&ctx->kobj, &blk_mq_ctx_ktype);
ctx               168 block/blk-mq-tag.c 		data->ctx = blk_mq_get_ctx(data->q);
ctx               170 block/blk-mq-tag.c 						data->ctx);
ctx               195 block/blk-mq-tag.c 		    struct blk_mq_ctx *ctx, unsigned int tag)
ctx               201 block/blk-mq-tag.c 		sbitmap_queue_clear(&tags->bitmap_tags, real_tag, ctx->cpu);
ctx               204 block/blk-mq-tag.c 		sbitmap_queue_clear(&tags->breserved_tags, tag, ctx->cpu);
ctx                30 block/blk-mq-tag.h 			   struct blk_mq_ctx *ctx, unsigned int tag);
ctx                78 block/blk-mq.c 				     struct blk_mq_ctx *ctx)
ctx                80 block/blk-mq.c 	const int bit = ctx->index_hw[hctx->type];
ctx                87 block/blk-mq.c 				      struct blk_mq_ctx *ctx)
ctx                89 block/blk-mq.c 	const int bit = ctx->index_hw[hctx->type];
ctx               316 block/blk-mq.c 	rq->mq_ctx = data->ctx;
ctx               351 block/blk-mq.c 	data->ctx->rq_dispatched[op_is_sync(op)]++;
ctx               373 block/blk-mq.c 	if (likely(!data->ctx)) {
ctx               374 block/blk-mq.c 		data->ctx = blk_mq_get_ctx(q);
ctx               379 block/blk-mq.c 						data->ctx);
ctx               402 block/blk-mq.c 			data->ctx = NULL;
ctx               480 block/blk-mq.c 	alloc_data.ctx = __blk_mq_get_ctx(q, cpu);
ctx               495 block/blk-mq.c 	struct blk_mq_ctx *ctx = rq->mq_ctx;
ctx               502 block/blk-mq.c 		blk_mq_put_tag(hctx, hctx->tags, ctx, rq->tag);
ctx               504 block/blk-mq.c 		blk_mq_put_tag(hctx, hctx->sched_tags, ctx, sched_tag);
ctx               513 block/blk-mq.c 	struct blk_mq_ctx *ctx = rq->mq_ctx;
ctx               525 block/blk-mq.c 	ctx->rq_completed[rq_is_sync(rq)]++;
ctx               584 block/blk-mq.c 	struct blk_mq_ctx *ctx = rq->mq_ctx;
ctx               616 block/blk-mq.c 		shared = cpus_share_cache(cpu, ctx->cpu);
ctx               618 block/blk-mq.c 	if (cpu != ctx->cpu && !shared && cpu_online(ctx->cpu)) {
ctx               622 block/blk-mq.c 		smp_call_function_single_async(ctx->cpu, &rq->csd);
ctx               983 block/blk-mq.c 	struct blk_mq_ctx *ctx = hctx->ctxs[bitnr];
ctx               986 block/blk-mq.c 	spin_lock(&ctx->lock);
ctx               987 block/blk-mq.c 	list_splice_tail_init(&ctx->rq_lists[type], flush_data->list);
ctx               989 block/blk-mq.c 	spin_unlock(&ctx->lock);
ctx              1018 block/blk-mq.c 	struct blk_mq_ctx *ctx = hctx->ctxs[bitnr];
ctx              1021 block/blk-mq.c 	spin_lock(&ctx->lock);
ctx              1022 block/blk-mq.c 	if (!list_empty(&ctx->rq_lists[type])) {
ctx              1023 block/blk-mq.c 		dispatch_data->rq = list_entry_rq(ctx->rq_lists[type].next);
ctx              1025 block/blk-mq.c 		if (list_empty(&ctx->rq_lists[type]))
ctx              1028 block/blk-mq.c 	spin_unlock(&ctx->lock);
ctx              1645 block/blk-mq.c 	struct blk_mq_ctx *ctx = rq->mq_ctx;
ctx              1648 block/blk-mq.c 	lockdep_assert_held(&ctx->lock);
ctx              1653 block/blk-mq.c 		list_add(&rq->queuelist, &ctx->rq_lists[type]);
ctx              1655 block/blk-mq.c 		list_add_tail(&rq->queuelist, &ctx->rq_lists[type]);
ctx              1661 block/blk-mq.c 	struct blk_mq_ctx *ctx = rq->mq_ctx;
ctx              1663 block/blk-mq.c 	lockdep_assert_held(&ctx->lock);
ctx              1666 block/blk-mq.c 	blk_mq_hctx_mark_pending(hctx, ctx);
ctx              1689 block/blk-mq.c void blk_mq_insert_requests(struct blk_mq_hw_ctx *hctx, struct blk_mq_ctx *ctx,
ctx              1701 block/blk-mq.c 		BUG_ON(rq->mq_ctx != ctx);
ctx              1705 block/blk-mq.c 	spin_lock(&ctx->lock);
ctx              1706 block/blk-mq.c 	list_splice_tail_init(list, &ctx->rq_lists[type]);
ctx              1707 block/blk-mq.c 	blk_mq_hctx_mark_pending(hctx, ctx);
ctx              1708 block/blk-mq.c 	spin_unlock(&ctx->lock);
ctx              2242 block/blk-mq.c 	struct blk_mq_ctx *ctx;
ctx              2247 block/blk-mq.c 	ctx = __blk_mq_get_ctx(hctx->queue, cpu);
ctx              2250 block/blk-mq.c 	spin_lock(&ctx->lock);
ctx              2251 block/blk-mq.c 	if (!list_empty(&ctx->rq_lists[type])) {
ctx              2252 block/blk-mq.c 		list_splice_init(&ctx->rq_lists[type], &tmp);
ctx              2253 block/blk-mq.c 		blk_mq_hctx_clear_pending(hctx, ctx);
ctx              2255 block/blk-mq.c 	spin_unlock(&ctx->lock);
ctx              2481 block/blk-mq.c 	struct blk_mq_ctx *ctx;
ctx              2509 block/blk-mq.c 		ctx = per_cpu_ptr(q->queue_ctx, i);
ctx              2512 block/blk-mq.c 				ctx->hctxs[j] = blk_mq_map_queue_type(q,
ctx              2518 block/blk-mq.c 			ctx->hctxs[j] = hctx;
ctx              2529 block/blk-mq.c 			ctx->index_hw[hctx->type] = hctx->nr_ctx;
ctx              2530 block/blk-mq.c 			hctx->ctxs[hctx->nr_ctx++] = ctx;
ctx              2540 block/blk-mq.c 			ctx->hctxs[j] = blk_mq_map_queue_type(q,
ctx              2662 block/blk-mq.c 		struct blk_mq_ctx *ctx = per_cpu_ptr(ctxs->queue_ctx, cpu);
ctx              2663 block/blk-mq.c 		ctx->ctxs = ctxs;
ctx                71 block/blk-mq.h void blk_mq_insert_requests(struct blk_mq_hw_ctx *hctx, struct blk_mq_ctx *ctx,
ctx               105 block/blk-mq.h 						     struct blk_mq_ctx *ctx)
ctx               117 block/blk-mq.h 	return ctx->hctxs[type];
ctx               166 block/blk-mq.h 	struct blk_mq_ctx *ctx;
ctx              1426 block/blk-throttle.c 	struct blkg_conf_ctx ctx;
ctx              1431 block/blk-throttle.c 	ret = blkg_conf_prep(blkcg, &blkcg_policy_throtl, buf, &ctx);
ctx              1436 block/blk-throttle.c 	if (sscanf(ctx.body, "%llu", &v) != 1)
ctx              1441 block/blk-throttle.c 	tg = blkg_to_tg(ctx.blkg);
ctx              1451 block/blk-throttle.c 	blkg_conf_finish(&ctx);
ctx              1589 block/blk-throttle.c 	struct blkg_conf_ctx ctx;
ctx              1597 block/blk-throttle.c 	ret = blkg_conf_prep(blkcg, &blkcg_policy_throtl, buf, &ctx);
ctx              1601 block/blk-throttle.c 	tg = blkg_to_tg(ctx.blkg);
ctx              1616 block/blk-throttle.c 		if (sscanf(ctx.body, "%26s%n", tok, &len) != 1)
ctx              1620 block/blk-throttle.c 		ctx.body += len;
ctx              1697 block/blk-throttle.c 	blkg_conf_finish(&ctx);
ctx                42 block/blk.h    blk_get_flush_queue(struct request_queue *q, struct blk_mq_ctx *ctx)
ctx                44 block/blk.h    	return blk_mq_map_queue(q, REQ_OP_FLUSH, ctx)->fq;
ctx               569 block/kyber-iosched.c 	struct blk_mq_ctx *ctx = blk_mq_get_ctx(hctx->queue);
ctx               570 block/kyber-iosched.c 	struct kyber_ctx_queue *kcq = &khd->kcqs[ctx->index_hw[hctx->type]];
ctx               207 certs/system_keyring.c 			     int (*view_content)(void *ctx,
ctx               210 certs/system_keyring.c 			     void *ctx)
ctx               262 certs/system_keyring.c 		ret = view_content(ctx, data, len, asn1hdrlen);
ctx               286 certs/system_keyring.c 			   int (*view_content)(void *ctx,
ctx               289 certs/system_keyring.c 			   void *ctx)
ctx               299 certs/system_keyring.c 				       view_content, ctx);
ctx                33 crypto/842.c   	void *ctx;
ctx                35 crypto/842.c   	ctx = kmalloc(SW842_MEM_COMPRESS, GFP_KERNEL);
ctx                36 crypto/842.c   	if (!ctx)
ctx                39 crypto/842.c   	return ctx;
ctx                44 crypto/842.c   	struct crypto842_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                46 crypto/842.c   	ctx->wmem = crypto842_alloc_ctx(NULL);
ctx                47 crypto/842.c   	if (IS_ERR(ctx->wmem))
ctx                53 crypto/842.c   static void crypto842_free_ctx(struct crypto_scomp *tfm, void *ctx)
ctx                55 crypto/842.c   	kfree(ctx);
ctx                60 crypto/842.c   	struct crypto842_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                62 crypto/842.c   	crypto842_free_ctx(NULL, ctx->wmem);
ctx                69 crypto/842.c   	struct crypto842_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                71 crypto/842.c   	return sw842_compress(src, slen, dst, dlen, ctx->wmem);
ctx                76 crypto/842.c   			       u8 *dst, unsigned int *dlen, void *ctx)
ctx                78 crypto/842.c   	return sw842_compress(src, slen, dst, dlen, ctx);
ctx                90 crypto/842.c   				 u8 *dst, unsigned int *dlen, void *ctx)
ctx               218 crypto/aead.c  	struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
ctx               220 crypto/aead.c  	return crypto_aead_setkey(ctx->child, key, keylen);
ctx               226 crypto/aead.c  	struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
ctx               228 crypto/aead.c  	return crypto_aead_setauthsize(ctx->child, authsize);
ctx               320 crypto/aead.c  	struct aead_geniv_ctx *ctx = crypto_aead_ctx(aead);
ctx               325 crypto/aead.c  	spin_lock_init(&ctx->lock);
ctx               331 crypto/aead.c  	err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
ctx               337 crypto/aead.c  	ctx->sknull = crypto_get_default_null_skcipher();
ctx               338 crypto/aead.c  	err = PTR_ERR(ctx->sknull);
ctx               339 crypto/aead.c  	if (IS_ERR(ctx->sknull))
ctx               347 crypto/aead.c  	ctx->child = child;
ctx               364 crypto/aead.c  	struct aead_geniv_ctx *ctx = crypto_aead_ctx(tfm);
ctx               366 crypto/aead.c  	crypto_free_aead(ctx->child);
ctx               372 crypto/aegis128-core.c 	struct aegis_ctx *ctx = crypto_aead_ctx(aead);
ctx               379 crypto/aegis128-core.c 	memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE);
ctx               399 crypto/aegis128-core.c 	struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
ctx               402 crypto/aegis128-core.c 	crypto_aegis128_init(&state, &ctx->key, req->iv);
ctx              1138 crypto/aes_generic.c 	struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1142 crypto/aes_generic.c 	ret = aes_expandkey(ctx, in_key, key_len);
ctx              1184 crypto/aes_generic.c 	const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1186 crypto/aes_generic.c 	const u32 *kp = ctx->key_enc + 4;
ctx              1187 crypto/aes_generic.c 	const int key_len = ctx->key_length;
ctx              1189 crypto/aes_generic.c 	b0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in);
ctx              1190 crypto/aes_generic.c 	b0[1] = ctx->key_enc[1] ^ get_unaligned_le32(in + 4);
ctx              1191 crypto/aes_generic.c 	b0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8);
ctx              1192 crypto/aes_generic.c 	b0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12);
ctx              1254 crypto/aes_generic.c 	const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1256 crypto/aes_generic.c 	const int key_len = ctx->key_length;
ctx              1257 crypto/aes_generic.c 	const u32 *kp = ctx->key_dec + 4;
ctx              1259 crypto/aes_generic.c 	b0[0] = ctx->key_dec[0] ^ get_unaligned_le32(in);
ctx              1260 crypto/aes_generic.c 	b0[1] = ctx->key_dec[1] ^ get_unaligned_le32(in + 4);
ctx              1261 crypto/aes_generic.c 	b0[2] = ctx->key_dec[2] ^ get_unaligned_le32(in + 8);
ctx              1262 crypto/aes_generic.c 	b0[3] = ctx->key_dec[3] ^ get_unaligned_le32(in + 12);
ctx                15 crypto/aes_ti.c 	struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                17 crypto/aes_ti.c 	return aes_expandkey(ctx, in_key, key_len);
ctx                22 crypto/aes_ti.c 	const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                31 crypto/aes_ti.c 	aes_encrypt(ctx, out, in);
ctx                38 crypto/aes_ti.c 	const struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                47 crypto/aes_ti.c 	aes_decrypt(ctx, out, in);
ctx               489 crypto/af_alg.c 	struct af_alg_ctx *ctx = ask->private;
ctx               493 crypto/af_alg.c 	sgl = list_entry(ctx->tsgl_list.prev, struct af_alg_tsgl, list);
ctx               494 crypto/af_alg.c 	if (!list_empty(&ctx->tsgl_list))
ctx               510 crypto/af_alg.c 		list_add_tail(&sgl->list, &ctx->tsgl_list);
ctx               530 crypto/af_alg.c 	const struct af_alg_ctx *ctx = ask->private;
ctx               538 crypto/af_alg.c 	list_for_each_entry(sgl, &ctx->tsgl_list, list) {
ctx               587 crypto/af_alg.c 	struct af_alg_ctx *ctx = ask->private;
ctx               592 crypto/af_alg.c 	while (!list_empty(&ctx->tsgl_list)) {
ctx               593 crypto/af_alg.c 		sgl = list_first_entry(&ctx->tsgl_list, struct af_alg_tsgl,
ctx               627 crypto/af_alg.c 			ctx->used -= plen;
ctx               640 crypto/af_alg.c 	if (!ctx->used)
ctx               641 crypto/af_alg.c 		ctx->merge = 0;
ctx               654 crypto/af_alg.c 	struct af_alg_ctx *ctx = ask->private;
ctx               661 crypto/af_alg.c 		atomic_sub(rsgl->sg_num_bytes, &ctx->rcvused);
ctx               747 crypto/af_alg.c 	struct af_alg_ctx *ctx = ask->private;
ctx               761 crypto/af_alg.c 		if (sk_wait_event(sk, &timeout, (ctx->used || !ctx->more),
ctx               783 crypto/af_alg.c 	struct af_alg_ctx *ctx = ask->private;
ctx               786 crypto/af_alg.c 	if (!ctx->used)
ctx               820 crypto/af_alg.c 	struct af_alg_ctx *ctx = ask->private;
ctx               850 crypto/af_alg.c 	if (!ctx->more && ctx->used) {
ctx               856 crypto/af_alg.c 		ctx->enc = enc;
ctx               858 crypto/af_alg.c 			memcpy(ctx->iv, con.iv->iv, ivsize);
ctx               860 crypto/af_alg.c 		ctx->aead_assoclen = con.aead_assoclen;
ctx               869 crypto/af_alg.c 		if (ctx->merge) {
ctx               870 crypto/af_alg.c 			sgl = list_entry(ctx->tsgl_list.prev,
ctx               883 crypto/af_alg.c 			ctx->merge = (sg->offset + sg->length) &
ctx               886 crypto/af_alg.c 			ctx->used += len;
ctx               905 crypto/af_alg.c 		sgl = list_entry(ctx->tsgl_list.prev, struct af_alg_tsgl,
ctx               932 crypto/af_alg.c 			ctx->used += plen;
ctx               941 crypto/af_alg.c 		ctx->merge = plen & (PAGE_SIZE - 1);
ctx               946 crypto/af_alg.c 	ctx->more = msg->msg_flags & MSG_MORE;
ctx               966 crypto/af_alg.c 	struct af_alg_ctx *ctx = ask->private;
ctx               974 crypto/af_alg.c 	if (!ctx->more && ctx->used)
ctx               990 crypto/af_alg.c 	ctx->merge = 0;
ctx               991 crypto/af_alg.c 	sgl = list_entry(ctx->tsgl_list.prev, struct af_alg_tsgl, list);
ctx              1001 crypto/af_alg.c 	ctx->used += size;
ctx              1004 crypto/af_alg.c 	ctx->more = flags & MSG_MORE;
ctx              1060 crypto/af_alg.c 	struct af_alg_ctx *ctx = ask->private;
ctx              1066 crypto/af_alg.c 	if (!ctx->more || ctx->used)
ctx              1119 crypto/af_alg.c 	struct af_alg_ctx *ctx = ask->private;
ctx              1158 crypto/af_alg.c 		atomic_add(err, &ctx->rcvused);
ctx                49 crypto/algif_aead.c 	struct af_alg_ctx *ctx = ask->private;
ctx                58 crypto/algif_aead.c 	return ctx->used >= ctx->aead_assoclen + (ctx->enc ? 0 : as);
ctx                95 crypto/algif_aead.c 	struct af_alg_ctx *ctx = ask->private;
ctx               109 crypto/algif_aead.c 	if (!ctx->used) {
ctx               119 crypto/algif_aead.c 	used = ctx->used;
ctx               141 crypto/algif_aead.c 	if (ctx->enc)
ctx               150 crypto/algif_aead.c 	used -= ctx->aead_assoclen;
ctx               181 crypto/algif_aead.c 	processed = used + ctx->aead_assoclen;
ctx               182 crypto/algif_aead.c 	list_for_each_entry_safe(tsgl, tmp, &ctx->tsgl_list, list) {
ctx               215 crypto/algif_aead.c 	if (ctx->enc) {
ctx               281 crypto/algif_aead.c 			       areq->first_rsgl.sgl.sg, used, ctx->iv);
ctx               282 crypto/algif_aead.c 	aead_request_set_ad(&areq->cra_u.aead_req, ctx->aead_assoclen);
ctx               296 crypto/algif_aead.c 		err = ctx->enc ? crypto_aead_encrypt(&areq->cra_u.aead_req) :
ctx               308 crypto/algif_aead.c 					  crypto_req_done, &ctx->wait);
ctx               309 crypto/algif_aead.c 		err = crypto_wait_req(ctx->enc ?
ctx               312 crypto/algif_aead.c 				&ctx->wait);
ctx               528 crypto/algif_aead.c 	struct af_alg_ctx *ctx = ask->private;
ctx               535 crypto/algif_aead.c 	af_alg_pull_tsgl(sk, ctx->used, NULL, 0);
ctx               536 crypto/algif_aead.c 	sock_kzfree_s(sk, ctx->iv, ivlen);
ctx               537 crypto/algif_aead.c 	sock_kfree_s(sk, ctx, ctx->len);
ctx               543 crypto/algif_aead.c 	struct af_alg_ctx *ctx;
ctx               547 crypto/algif_aead.c 	unsigned int len = sizeof(*ctx);
ctx               550 crypto/algif_aead.c 	ctx = sock_kmalloc(sk, len, GFP_KERNEL);
ctx               551 crypto/algif_aead.c 	if (!ctx)
ctx               553 crypto/algif_aead.c 	memset(ctx, 0, len);
ctx               555 crypto/algif_aead.c 	ctx->iv = sock_kmalloc(sk, ivlen, GFP_KERNEL);
ctx               556 crypto/algif_aead.c 	if (!ctx->iv) {
ctx               557 crypto/algif_aead.c 		sock_kfree_s(sk, ctx, len);
ctx               560 crypto/algif_aead.c 	memset(ctx->iv, 0, ivlen);
ctx               562 crypto/algif_aead.c 	INIT_LIST_HEAD(&ctx->tsgl_list);
ctx               563 crypto/algif_aead.c 	ctx->len = len;
ctx               564 crypto/algif_aead.c 	ctx->used = 0;
ctx               565 crypto/algif_aead.c 	atomic_set(&ctx->rcvused, 0);
ctx               566 crypto/algif_aead.c 	ctx->more = 0;
ctx               567 crypto/algif_aead.c 	ctx->merge = 0;
ctx               568 crypto/algif_aead.c 	ctx->enc = 0;
ctx               569 crypto/algif_aead.c 	ctx->aead_assoclen = 0;
ctx               570 crypto/algif_aead.c 	crypto_init_wait(&ctx->wait);
ctx               572 crypto/algif_aead.c 	ask->private = ctx;
ctx                32 crypto/algif_hash.c static int hash_alloc_result(struct sock *sk, struct hash_ctx *ctx)
ctx                36 crypto/algif_hash.c 	if (ctx->result)
ctx                39 crypto/algif_hash.c 	ds = crypto_ahash_digestsize(crypto_ahash_reqtfm(&ctx->req));
ctx                41 crypto/algif_hash.c 	ctx->result = sock_kmalloc(sk, ds, GFP_KERNEL);
ctx                42 crypto/algif_hash.c 	if (!ctx->result)
ctx                45 crypto/algif_hash.c 	memset(ctx->result, 0, ds);
ctx                50 crypto/algif_hash.c static void hash_free_result(struct sock *sk, struct hash_ctx *ctx)
ctx                54 crypto/algif_hash.c 	if (!ctx->result)
ctx                57 crypto/algif_hash.c 	ds = crypto_ahash_digestsize(crypto_ahash_reqtfm(&ctx->req));
ctx                59 crypto/algif_hash.c 	sock_kzfree_s(sk, ctx->result, ds);
ctx                60 crypto/algif_hash.c 	ctx->result = NULL;
ctx                69 crypto/algif_hash.c 	struct hash_ctx *ctx = ask->private;
ctx                77 crypto/algif_hash.c 	if (!ctx->more) {
ctx                79 crypto/algif_hash.c 			hash_free_result(sk, ctx);
ctx                81 crypto/algif_hash.c 		err = crypto_wait_req(crypto_ahash_init(&ctx->req), &ctx->wait);
ctx                86 crypto/algif_hash.c 	ctx->more = 0;
ctx                94 crypto/algif_hash.c 		len = af_alg_make_sg(&ctx->sgl, &msg->msg_iter, len);
ctx               100 crypto/algif_hash.c 		ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, NULL, len);
ctx               102 crypto/algif_hash.c 		err = crypto_wait_req(crypto_ahash_update(&ctx->req),
ctx               103 crypto/algif_hash.c 				      &ctx->wait);
ctx               104 crypto/algif_hash.c 		af_alg_free_sg(&ctx->sgl);
ctx               114 crypto/algif_hash.c 	ctx->more = msg->msg_flags & MSG_MORE;
ctx               115 crypto/algif_hash.c 	if (!ctx->more) {
ctx               116 crypto/algif_hash.c 		err = hash_alloc_result(sk, ctx);
ctx               120 crypto/algif_hash.c 		ahash_request_set_crypt(&ctx->req, NULL, ctx->result, 0);
ctx               121 crypto/algif_hash.c 		err = crypto_wait_req(crypto_ahash_final(&ctx->req),
ctx               122 crypto/algif_hash.c 				      &ctx->wait);
ctx               136 crypto/algif_hash.c 	struct hash_ctx *ctx = ask->private;
ctx               143 crypto/algif_hash.c 	sg_init_table(ctx->sgl.sg, 1);
ctx               144 crypto/algif_hash.c 	sg_set_page(ctx->sgl.sg, page, size, offset);
ctx               147 crypto/algif_hash.c 		err = hash_alloc_result(sk, ctx);
ctx               150 crypto/algif_hash.c 	} else if (!ctx->more)
ctx               151 crypto/algif_hash.c 		hash_free_result(sk, ctx);
ctx               153 crypto/algif_hash.c 	ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, ctx->result, size);
ctx               156 crypto/algif_hash.c 		if (ctx->more)
ctx               157 crypto/algif_hash.c 			err = crypto_ahash_finup(&ctx->req);
ctx               159 crypto/algif_hash.c 			err = crypto_ahash_digest(&ctx->req);
ctx               161 crypto/algif_hash.c 		if (!ctx->more) {
ctx               162 crypto/algif_hash.c 			err = crypto_ahash_init(&ctx->req);
ctx               163 crypto/algif_hash.c 			err = crypto_wait_req(err, &ctx->wait);
ctx               168 crypto/algif_hash.c 		err = crypto_ahash_update(&ctx->req);
ctx               171 crypto/algif_hash.c 	err = crypto_wait_req(err, &ctx->wait);
ctx               175 crypto/algif_hash.c 	ctx->more = flags & MSG_MORE;
ctx               188 crypto/algif_hash.c 	struct hash_ctx *ctx = ask->private;
ctx               189 crypto/algif_hash.c 	unsigned ds = crypto_ahash_digestsize(crypto_ahash_reqtfm(&ctx->req));
ctx               199 crypto/algif_hash.c 	result = ctx->result;
ctx               200 crypto/algif_hash.c 	err = hash_alloc_result(sk, ctx);
ctx               204 crypto/algif_hash.c 	ahash_request_set_crypt(&ctx->req, NULL, ctx->result, 0);
ctx               206 crypto/algif_hash.c 	if (!result && !ctx->more) {
ctx               207 crypto/algif_hash.c 		err = crypto_wait_req(crypto_ahash_init(&ctx->req),
ctx               208 crypto/algif_hash.c 				      &ctx->wait);
ctx               213 crypto/algif_hash.c 	if (!result || ctx->more) {
ctx               214 crypto/algif_hash.c 		ctx->more = 0;
ctx               215 crypto/algif_hash.c 		err = crypto_wait_req(crypto_ahash_final(&ctx->req),
ctx               216 crypto/algif_hash.c 				      &ctx->wait);
ctx               221 crypto/algif_hash.c 	err = memcpy_to_msg(msg, ctx->result, len);
ctx               224 crypto/algif_hash.c 	hash_free_result(sk, ctx);
ctx               235 crypto/algif_hash.c 	struct hash_ctx *ctx = ask->private;
ctx               236 crypto/algif_hash.c 	struct ahash_request *req = &ctx->req;
ctx               245 crypto/algif_hash.c 	more = ctx->more;
ctx               419 crypto/algif_hash.c 	struct hash_ctx *ctx = ask->private;
ctx               421 crypto/algif_hash.c 	hash_free_result(sk, ctx);
ctx               422 crypto/algif_hash.c 	sock_kfree_s(sk, ctx, ctx->len);
ctx               430 crypto/algif_hash.c 	struct hash_ctx *ctx;
ctx               431 crypto/algif_hash.c 	unsigned int len = sizeof(*ctx) + crypto_ahash_reqsize(tfm);
ctx               433 crypto/algif_hash.c 	ctx = sock_kmalloc(sk, len, GFP_KERNEL);
ctx               434 crypto/algif_hash.c 	if (!ctx)
ctx               437 crypto/algif_hash.c 	ctx->result = NULL;
ctx               438 crypto/algif_hash.c 	ctx->len = len;
ctx               439 crypto/algif_hash.c 	ctx->more = 0;
ctx               440 crypto/algif_hash.c 	crypto_init_wait(&ctx->wait);
ctx               442 crypto/algif_hash.c 	ask->private = ctx;
ctx               444 crypto/algif_hash.c 	ahash_request_set_tfm(&ctx->req, tfm);
ctx               445 crypto/algif_hash.c 	ahash_request_set_callback(&ctx->req, CRYPTO_TFM_REQ_MAY_BACKLOG,
ctx               446 crypto/algif_hash.c 				   crypto_req_done, &ctx->wait);
ctx                63 crypto/algif_rng.c 	struct rng_ctx *ctx = ask->private;
ctx                85 crypto/algif_rng.c 	genlen = crypto_rng_get_bytes(ctx->drng, result, len);
ctx               129 crypto/algif_rng.c 	struct rng_ctx *ctx = ask->private;
ctx               131 crypto/algif_rng.c 	sock_kfree_s(sk, ctx, ctx->len);
ctx               137 crypto/algif_rng.c 	struct rng_ctx *ctx;
ctx               139 crypto/algif_rng.c 	unsigned int len = sizeof(*ctx);
ctx               141 crypto/algif_rng.c 	ctx = sock_kmalloc(sk, len, GFP_KERNEL);
ctx               142 crypto/algif_rng.c 	if (!ctx)
ctx               145 crypto/algif_rng.c 	ctx->len = len;
ctx               153 crypto/algif_rng.c 	ctx->drng = private;
ctx               154 crypto/algif_rng.c 	ask->private = ctx;
ctx                57 crypto/algif_skcipher.c 	struct af_alg_ctx *ctx = ask->private;
ctx                64 crypto/algif_skcipher.c 	if (!ctx->used) {
ctx                82 crypto/algif_skcipher.c 	if (len > ctx->used)
ctx                83 crypto/algif_skcipher.c 		len = ctx->used;
ctx                89 crypto/algif_skcipher.c 	if (ctx->more || len < ctx->used)
ctx               112 crypto/algif_skcipher.c 				   areq->first_rsgl.sgl.sg, len, ctx->iv);
ctx               125 crypto/algif_skcipher.c 		err = ctx->enc ?
ctx               139 crypto/algif_skcipher.c 					      crypto_req_done, &ctx->wait);
ctx               140 crypto/algif_skcipher.c 		err = crypto_wait_req(ctx->enc ?
ctx               143 crypto/algif_skcipher.c 						 &ctx->wait);
ctx               322 crypto/algif_skcipher.c 	struct af_alg_ctx *ctx = ask->private;
ctx               327 crypto/algif_skcipher.c 	af_alg_pull_tsgl(sk, ctx->used, NULL, 0);
ctx               328 crypto/algif_skcipher.c 	sock_kzfree_s(sk, ctx->iv, crypto_skcipher_ivsize(tfm));
ctx               329 crypto/algif_skcipher.c 	sock_kfree_s(sk, ctx, ctx->len);
ctx               335 crypto/algif_skcipher.c 	struct af_alg_ctx *ctx;
ctx               338 crypto/algif_skcipher.c 	unsigned int len = sizeof(*ctx);
ctx               340 crypto/algif_skcipher.c 	ctx = sock_kmalloc(sk, len, GFP_KERNEL);
ctx               341 crypto/algif_skcipher.c 	if (!ctx)
ctx               344 crypto/algif_skcipher.c 	ctx->iv = sock_kmalloc(sk, crypto_skcipher_ivsize(tfm),
ctx               346 crypto/algif_skcipher.c 	if (!ctx->iv) {
ctx               347 crypto/algif_skcipher.c 		sock_kfree_s(sk, ctx, len);
ctx               351 crypto/algif_skcipher.c 	memset(ctx->iv, 0, crypto_skcipher_ivsize(tfm));
ctx               353 crypto/algif_skcipher.c 	INIT_LIST_HEAD(&ctx->tsgl_list);
ctx               354 crypto/algif_skcipher.c 	ctx->len = len;
ctx               355 crypto/algif_skcipher.c 	ctx->used = 0;
ctx               356 crypto/algif_skcipher.c 	atomic_set(&ctx->rcvused, 0);
ctx               357 crypto/algif_skcipher.c 	ctx->more = 0;
ctx               358 crypto/algif_skcipher.c 	ctx->merge = 0;
ctx               359 crypto/algif_skcipher.c 	ctx->enc = 0;
ctx               360 crypto/algif_skcipher.c 	crypto_init_wait(&ctx->wait);
ctx               362 crypto/algif_skcipher.c 	ask->private = ctx;
ctx                80 crypto/ansi_cprng.c static int _get_more_prng_bytes(struct prng_context *ctx, int cont_test)
ctx                88 crypto/ansi_cprng.c 		ctx);
ctx                90 crypto/ansi_cprng.c 	hexdump("Input DT: ", ctx->DT, DEFAULT_BLK_SZ);
ctx                91 crypto/ansi_cprng.c 	hexdump("Input I: ", ctx->I, DEFAULT_BLK_SZ);
ctx                92 crypto/ansi_cprng.c 	hexdump("Input V: ", ctx->V, DEFAULT_BLK_SZ);
ctx               105 crypto/ansi_cprng.c 			memcpy(tmp, ctx->DT, DEFAULT_BLK_SZ);
ctx               106 crypto/ansi_cprng.c 			output = ctx->I;
ctx               116 crypto/ansi_cprng.c 			xor_vectors(ctx->I, ctx->V, tmp, DEFAULT_BLK_SZ);
ctx               118 crypto/ansi_cprng.c 			output = ctx->rand_data;
ctx               125 crypto/ansi_cprng.c 			if (!memcmp(ctx->rand_data, ctx->last_rand_data,
ctx               129 crypto/ansi_cprng.c 						ctx);
ctx               134 crypto/ansi_cprng.c 					ctx);
ctx               136 crypto/ansi_cprng.c 				ctx->flags |= PRNG_NEED_RESET;
ctx               139 crypto/ansi_cprng.c 			memcpy(ctx->last_rand_data, ctx->rand_data,
ctx               146 crypto/ansi_cprng.c 			xor_vectors(ctx->rand_data, ctx->I, tmp,
ctx               148 crypto/ansi_cprng.c 			output = ctx->V;
ctx               155 crypto/ansi_cprng.c 		crypto_cipher_encrypt_one(ctx->tfm, output, tmp);
ctx               163 crypto/ansi_cprng.c 		ctx->DT[i] += 1;
ctx               164 crypto/ansi_cprng.c 		if (ctx->DT[i] != 0)
ctx               168 crypto/ansi_cprng.c 	dbgprint("Returning new block for context %p\n", ctx);
ctx               169 crypto/ansi_cprng.c 	ctx->rand_data_valid = 0;
ctx               171 crypto/ansi_cprng.c 	hexdump("Output DT: ", ctx->DT, DEFAULT_BLK_SZ);
ctx               172 crypto/ansi_cprng.c 	hexdump("Output I: ", ctx->I, DEFAULT_BLK_SZ);
ctx               173 crypto/ansi_cprng.c 	hexdump("Output V: ", ctx->V, DEFAULT_BLK_SZ);
ctx               174 crypto/ansi_cprng.c 	hexdump("New Random Data: ", ctx->rand_data, DEFAULT_BLK_SZ);
ctx               180 crypto/ansi_cprng.c static int get_prng_bytes(char *buf, size_t nbytes, struct prng_context *ctx,
ctx               188 crypto/ansi_cprng.c 	spin_lock_bh(&ctx->prng_lock);
ctx               191 crypto/ansi_cprng.c 	if (ctx->flags & PRNG_NEED_RESET)
ctx               199 crypto/ansi_cprng.c 	if (ctx->flags & PRNG_FIXED_SIZE) {
ctx               212 crypto/ansi_cprng.c 		byte_count, ctx);
ctx               216 crypto/ansi_cprng.c 	if (ctx->rand_data_valid == DEFAULT_BLK_SZ) {
ctx               217 crypto/ansi_cprng.c 		if (_get_more_prng_bytes(ctx, do_cont_test) < 0) {
ctx               229 crypto/ansi_cprng.c 		while (ctx->rand_data_valid < DEFAULT_BLK_SZ) {
ctx               230 crypto/ansi_cprng.c 			*ptr = ctx->rand_data[ctx->rand_data_valid];
ctx               233 crypto/ansi_cprng.c 			ctx->rand_data_valid++;
ctx               243 crypto/ansi_cprng.c 		if (ctx->rand_data_valid == DEFAULT_BLK_SZ) {
ctx               244 crypto/ansi_cprng.c 			if (_get_more_prng_bytes(ctx, do_cont_test) < 0) {
ctx               250 crypto/ansi_cprng.c 		if (ctx->rand_data_valid > 0)
ctx               252 crypto/ansi_cprng.c 		memcpy(ptr, ctx->rand_data, DEFAULT_BLK_SZ);
ctx               253 crypto/ansi_cprng.c 		ctx->rand_data_valid += DEFAULT_BLK_SZ;
ctx               264 crypto/ansi_cprng.c 	spin_unlock_bh(&ctx->prng_lock);
ctx               266 crypto/ansi_cprng.c 		err, ctx);
ctx               270 crypto/ansi_cprng.c static void free_prng_context(struct prng_context *ctx)
ctx               272 crypto/ansi_cprng.c 	crypto_free_cipher(ctx->tfm);
ctx               275 crypto/ansi_cprng.c static int reset_prng_context(struct prng_context *ctx,
ctx               282 crypto/ansi_cprng.c 	spin_lock_bh(&ctx->prng_lock);
ctx               283 crypto/ansi_cprng.c 	ctx->flags |= PRNG_NEED_RESET;
ctx               291 crypto/ansi_cprng.c 		memcpy(ctx->V, V, DEFAULT_BLK_SZ);
ctx               293 crypto/ansi_cprng.c 		memcpy(ctx->V, DEFAULT_V_SEED, DEFAULT_BLK_SZ);
ctx               296 crypto/ansi_cprng.c 		memcpy(ctx->DT, DT, DEFAULT_BLK_SZ);
ctx               298 crypto/ansi_cprng.c 		memset(ctx->DT, 0, DEFAULT_BLK_SZ);
ctx               300 crypto/ansi_cprng.c 	memset(ctx->rand_data, 0, DEFAULT_BLK_SZ);
ctx               301 crypto/ansi_cprng.c 	memset(ctx->last_rand_data, 0, DEFAULT_BLK_SZ);
ctx               303 crypto/ansi_cprng.c 	ctx->rand_data_valid = DEFAULT_BLK_SZ;
ctx               305 crypto/ansi_cprng.c 	ret = crypto_cipher_setkey(ctx->tfm, prng_key, klen);
ctx               308 crypto/ansi_cprng.c 			crypto_cipher_get_flags(ctx->tfm));
ctx               313 crypto/ansi_cprng.c 	ctx->flags &= ~PRNG_NEED_RESET;
ctx               315 crypto/ansi_cprng.c 	spin_unlock_bh(&ctx->prng_lock);
ctx               321 crypto/ansi_cprng.c 	struct prng_context *ctx = crypto_tfm_ctx(tfm);
ctx               323 crypto/ansi_cprng.c 	spin_lock_init(&ctx->prng_lock);
ctx               324 crypto/ansi_cprng.c 	ctx->tfm = crypto_alloc_cipher("aes", 0, 0);
ctx               325 crypto/ansi_cprng.c 	if (IS_ERR(ctx->tfm)) {
ctx               327 crypto/ansi_cprng.c 				ctx);
ctx               328 crypto/ansi_cprng.c 		return PTR_ERR(ctx->tfm);
ctx               331 crypto/ansi_cprng.c 	if (reset_prng_context(ctx, NULL, DEFAULT_PRNG_KSZ, NULL, NULL) < 0)
ctx               339 crypto/ansi_cprng.c 	ctx->flags |= PRNG_NEED_RESET;
ctx               465 crypto/anubis.c 	struct anubis_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               481 crypto/anubis.c 	ctx->key_len = key_len * 8;
ctx               482 crypto/anubis.c 	N = ctx->key_len >> 5;
ctx               483 crypto/anubis.c 	ctx->R = R = 8 + N;
ctx               524 crypto/anubis.c 		ctx->E[r][0] = K0;
ctx               525 crypto/anubis.c 		ctx->E[r][1] = K1;
ctx               526 crypto/anubis.c 		ctx->E[r][2] = K2;
ctx               527 crypto/anubis.c 		ctx->E[r][3] = K3;
ctx               557 crypto/anubis.c 		ctx->D[0][i] = ctx->E[R][i];
ctx               558 crypto/anubis.c 		ctx->D[R][i] = ctx->E[0][i];
ctx               562 crypto/anubis.c 			u32 v = ctx->E[R - r][i];
ctx               563 crypto/anubis.c 			ctx->D[r][i] =
ctx               664 crypto/anubis.c 	struct anubis_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               665 crypto/anubis.c 	anubis_crypt(ctx->E, dst, src, ctx->R);
ctx               670 crypto/anubis.c 	struct anubis_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               671 crypto/anubis.c 	anubis_crypt(ctx->D, dst, src, ctx->R);
ctx                19 crypto/arc4.c  	struct arc4_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                21 crypto/arc4.c  	return arc4_setkey(ctx, in_key, key_len);
ctx                27 crypto/arc4.c  	struct arc4_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                34 crypto/arc4.c  		arc4_crypt(ctx, walk.dst.virt.addr, walk.src.virt.addr,
ctx                23 crypto/asymmetric_keys/mscode_parser.c 	struct pefile_context *ctx = _ctx;
ctx                30 crypto/asymmetric_keys/mscode_parser.c 	return asn1_ber_decoder(&mscode_decoder, ctx, content_data, data_len);
ctx                72 crypto/asymmetric_keys/mscode_parser.c 	struct pefile_context *ctx = context;
ctx                79 crypto/asymmetric_keys/mscode_parser.c 		ctx->digest_algo = "md4";
ctx                82 crypto/asymmetric_keys/mscode_parser.c 		ctx->digest_algo = "md5";
ctx                85 crypto/asymmetric_keys/mscode_parser.c 		ctx->digest_algo = "sha1";
ctx                88 crypto/asymmetric_keys/mscode_parser.c 		ctx->digest_algo = "sha256";
ctx                91 crypto/asymmetric_keys/mscode_parser.c 		ctx->digest_algo = "sha384";
ctx                94 crypto/asymmetric_keys/mscode_parser.c 		ctx->digest_algo = "sha512";
ctx                97 crypto/asymmetric_keys/mscode_parser.c 		ctx->digest_algo = "sha224";
ctx               120 crypto/asymmetric_keys/mscode_parser.c 	struct pefile_context *ctx = context;
ctx               122 crypto/asymmetric_keys/mscode_parser.c 	ctx->digest = kmemdup(value, vlen, GFP_KERNEL);
ctx               123 crypto/asymmetric_keys/mscode_parser.c 	if (!ctx->digest)
ctx               126 crypto/asymmetric_keys/mscode_parser.c 	ctx->digest_len = vlen;
ctx                28 crypto/asymmetric_keys/pkcs7_key_type.c static int pkcs7_view_content(void *ctx, const void *data, size_t len,
ctx                31 crypto/asymmetric_keys/pkcs7_key_type.c 	struct key_preparsed_payload *prep = ctx;
ctx               117 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx;
ctx               121 crypto/asymmetric_keys/pkcs7_parser.c 	ctx = kzalloc(sizeof(struct pkcs7_parse_context), GFP_KERNEL);
ctx               122 crypto/asymmetric_keys/pkcs7_parser.c 	if (!ctx)
ctx               124 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->msg = kzalloc(sizeof(struct pkcs7_message), GFP_KERNEL);
ctx               125 crypto/asymmetric_keys/pkcs7_parser.c 	if (!ctx->msg)
ctx               127 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->sinfo = kzalloc(sizeof(struct pkcs7_signed_info), GFP_KERNEL);
ctx               128 crypto/asymmetric_keys/pkcs7_parser.c 	if (!ctx->sinfo)
ctx               130 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->sinfo->sig = kzalloc(sizeof(struct public_key_signature),
ctx               132 crypto/asymmetric_keys/pkcs7_parser.c 	if (!ctx->sinfo->sig)
ctx               135 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->data = (unsigned long)data;
ctx               136 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->ppcerts = &ctx->certs;
ctx               137 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->ppsinfo = &ctx->msg->signed_infos;
ctx               140 crypto/asymmetric_keys/pkcs7_parser.c 	ret = asn1_ber_decoder(&pkcs7_decoder, ctx, data, datalen);
ctx               146 crypto/asymmetric_keys/pkcs7_parser.c 	ret = pkcs7_check_authattrs(ctx->msg);
ctx               152 crypto/asymmetric_keys/pkcs7_parser.c 	msg = ctx->msg;
ctx               153 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->msg = NULL;
ctx               156 crypto/asymmetric_keys/pkcs7_parser.c 	while (ctx->certs) {
ctx               157 crypto/asymmetric_keys/pkcs7_parser.c 		struct x509_certificate *cert = ctx->certs;
ctx               158 crypto/asymmetric_keys/pkcs7_parser.c 		ctx->certs = cert->next;
ctx               162 crypto/asymmetric_keys/pkcs7_parser.c 	pkcs7_free_signed_info(ctx->sinfo);
ctx               164 crypto/asymmetric_keys/pkcs7_parser.c 	pkcs7_free_message(ctx->msg);
ctx               166 crypto/asymmetric_keys/pkcs7_parser.c 	kfree(ctx);
ctx               208 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               210 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->last_oid = look_up_OID(value, vlen);
ctx               211 crypto/asymmetric_keys/pkcs7_parser.c 	if (ctx->last_oid == OID__NR) {
ctx               215 crypto/asymmetric_keys/pkcs7_parser.c 		       (unsigned long)value - ctx->data, buffer);
ctx               227 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               229 crypto/asymmetric_keys/pkcs7_parser.c 	switch (ctx->last_oid) {
ctx               231 crypto/asymmetric_keys/pkcs7_parser.c 		ctx->sinfo->sig->hash_algo = "md4";
ctx               234 crypto/asymmetric_keys/pkcs7_parser.c 		ctx->sinfo->sig->hash_algo = "md5";
ctx               237 crypto/asymmetric_keys/pkcs7_parser.c 		ctx->sinfo->sig->hash_algo = "sha1";
ctx               240 crypto/asymmetric_keys/pkcs7_parser.c 		ctx->sinfo->sig->hash_algo = "sha256";
ctx               243 crypto/asymmetric_keys/pkcs7_parser.c 		ctx->sinfo->sig->hash_algo = "sha384";
ctx               246 crypto/asymmetric_keys/pkcs7_parser.c 		ctx->sinfo->sig->hash_algo = "sha512";
ctx               249 crypto/asymmetric_keys/pkcs7_parser.c 		ctx->sinfo->sig->hash_algo = "sha224";
ctx               252 crypto/asymmetric_keys/pkcs7_parser.c 		printk("Unsupported digest algo: %u\n", ctx->last_oid);
ctx               265 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               267 crypto/asymmetric_keys/pkcs7_parser.c 	switch (ctx->last_oid) {
ctx               269 crypto/asymmetric_keys/pkcs7_parser.c 		ctx->sinfo->sig->pkey_algo = "rsa";
ctx               270 crypto/asymmetric_keys/pkcs7_parser.c 		ctx->sinfo->sig->encoding = "pkcs1";
ctx               273 crypto/asymmetric_keys/pkcs7_parser.c 		printk("Unsupported pkey algo: %u\n", ctx->last_oid);
ctx               286 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               288 crypto/asymmetric_keys/pkcs7_parser.c 	if (ctx->last_oid != OID_signed_data) {
ctx               303 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               309 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->msg->version = version = *(const u8 *)value;
ctx               337 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               349 crypto/asymmetric_keys/pkcs7_parser.c 		if (ctx->msg->version != 1)
ctx               351 crypto/asymmetric_keys/pkcs7_parser.c 		ctx->expect_skid = false;
ctx               355 crypto/asymmetric_keys/pkcs7_parser.c 		if (ctx->msg->version == 1)
ctx               357 crypto/asymmetric_keys/pkcs7_parser.c 		ctx->expect_skid = true;
ctx               380 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               385 crypto/asymmetric_keys/pkcs7_parser.c 			 tag, (unsigned long)ctx - ctx->data);
ctx               404 crypto/asymmetric_keys/pkcs7_parser.c 	x509->index = ++ctx->x509_index;
ctx               408 crypto/asymmetric_keys/pkcs7_parser.c 	*ctx->ppcerts = x509;
ctx               409 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->ppcerts = &x509->next;
ctx               420 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               424 crypto/asymmetric_keys/pkcs7_parser.c 	*ctx->ppcerts = ctx->msg->certs;
ctx               425 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->msg->certs = ctx->certs;
ctx               426 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->certs = NULL;
ctx               427 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->ppcerts = &ctx->certs;
ctx               438 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               440 crypto/asymmetric_keys/pkcs7_parser.c 	if (ctx->last_oid != OID_data &&
ctx               441 crypto/asymmetric_keys/pkcs7_parser.c 	    ctx->last_oid != OID_msIndirectData) {
ctx               442 crypto/asymmetric_keys/pkcs7_parser.c 		pr_warn("Unsupported data type %d\n", ctx->last_oid);
ctx               446 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->msg->data_type = ctx->last_oid;
ctx               458 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               462 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->msg->data = value;
ctx               463 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->msg->data_len = vlen;
ctx               464 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->msg->data_hdrlen = hdrlen;
ctx               475 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               476 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_signed_info *sinfo = ctx->sinfo;
ctx               481 crypto/asymmetric_keys/pkcs7_parser.c 	switch (ctx->last_oid) {
ctx               486 crypto/asymmetric_keys/pkcs7_parser.c 		if (content_type != ctx->msg->data_type) {
ctx               488 crypto/asymmetric_keys/pkcs7_parser.c 				ctx->msg->data_type, sinfo->index,
ctx               515 crypto/asymmetric_keys/pkcs7_parser.c 		if (ctx->msg->data_type != OID_msIndirectData) {
ctx               535 crypto/asymmetric_keys/pkcs7_parser.c 		if (ctx->msg->data_type != OID_msIndirectData) {
ctx               558 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               559 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_signed_info *sinfo = ctx->sinfo;
ctx               567 crypto/asymmetric_keys/pkcs7_parser.c 	if (ctx->msg->data_type != OID_msIndirectData &&
ctx               586 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               587 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->raw_serial = value;
ctx               588 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->raw_serial_size = vlen;
ctx               599 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               600 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->raw_issuer = value;
ctx               601 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->raw_issuer_size = vlen;
ctx               612 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               616 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->raw_skid = value;
ctx               617 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->raw_skid_size = vlen;
ctx               628 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               630 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->sinfo->sig->s = kmemdup(value, vlen, GFP_KERNEL);
ctx               631 crypto/asymmetric_keys/pkcs7_parser.c 	if (!ctx->sinfo->sig->s)
ctx               634 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->sinfo->sig->s_size = vlen;
ctx               645 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_parse_context *ctx = context;
ctx               646 crypto/asymmetric_keys/pkcs7_parser.c 	struct pkcs7_signed_info *sinfo = ctx->sinfo;
ctx               649 crypto/asymmetric_keys/pkcs7_parser.c 	if (ctx->msg->data_type == OID_msIndirectData && !sinfo->authattrs) {
ctx               655 crypto/asymmetric_keys/pkcs7_parser.c 	if (!ctx->expect_skid) {
ctx               656 crypto/asymmetric_keys/pkcs7_parser.c 		kid = asymmetric_key_generate_id(ctx->raw_serial,
ctx               657 crypto/asymmetric_keys/pkcs7_parser.c 						 ctx->raw_serial_size,
ctx               658 crypto/asymmetric_keys/pkcs7_parser.c 						 ctx->raw_issuer,
ctx               659 crypto/asymmetric_keys/pkcs7_parser.c 						 ctx->raw_issuer_size);
ctx               661 crypto/asymmetric_keys/pkcs7_parser.c 		kid = asymmetric_key_generate_id(ctx->raw_skid,
ctx               662 crypto/asymmetric_keys/pkcs7_parser.c 						 ctx->raw_skid_size,
ctx               671 crypto/asymmetric_keys/pkcs7_parser.c 	sinfo->index = ++ctx->sinfo_index;
ctx               672 crypto/asymmetric_keys/pkcs7_parser.c 	*ctx->ppsinfo = sinfo;
ctx               673 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->ppsinfo = &sinfo->next;
ctx               674 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->sinfo = kzalloc(sizeof(struct pkcs7_signed_info), GFP_KERNEL);
ctx               675 crypto/asymmetric_keys/pkcs7_parser.c 	if (!ctx->sinfo)
ctx               677 crypto/asymmetric_keys/pkcs7_parser.c 	ctx->sinfo->sig = kzalloc(sizeof(struct public_key_signature),
ctx               679 crypto/asymmetric_keys/pkcs7_parser.c 	if (!ctx->sinfo->sig)
ctx                37 crypto/asymmetric_keys/pkcs8_parser.c 	struct pkcs8_parse_context *ctx = context;
ctx                39 crypto/asymmetric_keys/pkcs8_parser.c 	ctx->last_oid = look_up_OID(value, vlen);
ctx                40 crypto/asymmetric_keys/pkcs8_parser.c 	if (ctx->last_oid == OID__NR) {
ctx                45 crypto/asymmetric_keys/pkcs8_parser.c 			(unsigned long)value - ctx->data, buffer);
ctx                71 crypto/asymmetric_keys/pkcs8_parser.c 	struct pkcs8_parse_context *ctx = context;
ctx                73 crypto/asymmetric_keys/pkcs8_parser.c 	if (ctx->last_oid != OID_rsaEncryption)
ctx                76 crypto/asymmetric_keys/pkcs8_parser.c 	ctx->pub->pkey_algo = "rsa";
ctx                87 crypto/asymmetric_keys/pkcs8_parser.c 	struct pkcs8_parse_context *ctx = context;
ctx                89 crypto/asymmetric_keys/pkcs8_parser.c 	ctx->key = value;
ctx                90 crypto/asymmetric_keys/pkcs8_parser.c 	ctx->key_size = vlen;
ctx                99 crypto/asymmetric_keys/pkcs8_parser.c 	struct pkcs8_parse_context ctx;
ctx               103 crypto/asymmetric_keys/pkcs8_parser.c 	memset(&ctx, 0, sizeof(ctx));
ctx               106 crypto/asymmetric_keys/pkcs8_parser.c 	ctx.pub = kzalloc(sizeof(struct public_key), GFP_KERNEL);
ctx               107 crypto/asymmetric_keys/pkcs8_parser.c 	if (!ctx.pub)
ctx               110 crypto/asymmetric_keys/pkcs8_parser.c 	ctx.data = (unsigned long)data;
ctx               113 crypto/asymmetric_keys/pkcs8_parser.c 	ret = asn1_ber_decoder(&pkcs8_decoder, &ctx, data, datalen);
ctx               118 crypto/asymmetric_keys/pkcs8_parser.c 	pub = ctx.pub;
ctx               119 crypto/asymmetric_keys/pkcs8_parser.c 	pub->key = kmemdup(ctx.key, ctx.key_size, GFP_KERNEL);
ctx               123 crypto/asymmetric_keys/pkcs8_parser.c 	pub->keylen = ctx.key_size;
ctx               128 crypto/asymmetric_keys/pkcs8_parser.c 	kfree(ctx.pub);
ctx                25 crypto/asymmetric_keys/tpm_parser.c 	struct tpm_parse_context *ctx = context;
ctx                27 crypto/asymmetric_keys/tpm_parser.c 	ctx->blob = value;
ctx                28 crypto/asymmetric_keys/tpm_parser.c 	ctx->blob_len = vlen;
ctx                38 crypto/asymmetric_keys/tpm_parser.c 	struct tpm_parse_context ctx;
ctx                41 crypto/asymmetric_keys/tpm_parser.c 	memset(&ctx, 0, sizeof(ctx));
ctx                44 crypto/asymmetric_keys/tpm_parser.c 	ret = asn1_ber_decoder(&tpm_decoder, &ctx, data, datalen);
ctx                48 crypto/asymmetric_keys/tpm_parser.c 	return tpm_key_create(ctx.blob, ctx.blob_len);
ctx                23 crypto/asymmetric_keys/verify_pefile.c 			       struct pefile_context *ctx)
ctx                60 crypto/asymmetric_keys/verify_pefile.c 		ctx->image_checksum_offset =
ctx                62 crypto/asymmetric_keys/verify_pefile.c 		ctx->header_size = pe32->header_size;
ctx                64 crypto/asymmetric_keys/verify_pefile.c 		ctx->n_data_dirents = pe32->data_dirs;
ctx                69 crypto/asymmetric_keys/verify_pefile.c 		ctx->image_checksum_offset =
ctx                71 crypto/asymmetric_keys/verify_pefile.c 		ctx->header_size = pe64->header_size;
ctx                73 crypto/asymmetric_keys/verify_pefile.c 		ctx->n_data_dirents = pe64->data_dirs;
ctx                81 crypto/asymmetric_keys/verify_pefile.c 	pr_debug("checksum @ %x\n", ctx->image_checksum_offset);
ctx                82 crypto/asymmetric_keys/verify_pefile.c 	pr_debug("header size = %x\n", ctx->header_size);
ctx                84 crypto/asymmetric_keys/verify_pefile.c 	if (cursor >= ctx->header_size || ctx->header_size >= datalen)
ctx                87 crypto/asymmetric_keys/verify_pefile.c 	if (ctx->n_data_dirents > (ctx->header_size - cursor) / sizeof(*dde))
ctx                91 crypto/asymmetric_keys/verify_pefile.c 	cursor += sizeof(*dde) * ctx->n_data_dirents;
ctx                93 crypto/asymmetric_keys/verify_pefile.c 	ctx->cert_dirent_offset =
ctx                95 crypto/asymmetric_keys/verify_pefile.c 	ctx->certs_size = ddir->certs.size;
ctx               102 crypto/asymmetric_keys/verify_pefile.c 	chkaddr(ctx->header_size, ddir->certs.virtual_address,
ctx               104 crypto/asymmetric_keys/verify_pefile.c 	ctx->sig_offset = ddir->certs.virtual_address;
ctx               105 crypto/asymmetric_keys/verify_pefile.c 	ctx->sig_len = ddir->certs.size;
ctx               107 crypto/asymmetric_keys/verify_pefile.c 		 ctx->sig_len, ctx->sig_offset,
ctx               108 crypto/asymmetric_keys/verify_pefile.c 		 ctx->sig_len, pebuf + ctx->sig_offset);
ctx               110 crypto/asymmetric_keys/verify_pefile.c 	ctx->n_sections = pe->sections;
ctx               111 crypto/asymmetric_keys/verify_pefile.c 	if (ctx->n_sections > (ctx->header_size - cursor) / sizeof(*sec))
ctx               113 crypto/asymmetric_keys/verify_pefile.c 	ctx->secs = secs = pebuf + cursor;
ctx               123 crypto/asymmetric_keys/verify_pefile.c 				    struct pefile_context *ctx)
ctx               129 crypto/asymmetric_keys/verify_pefile.c 	if (ctx->sig_len < sizeof(wrapper)) {
ctx               134 crypto/asymmetric_keys/verify_pefile.c 	memcpy(&wrapper, pebuf + ctx->sig_offset, sizeof(wrapper));
ctx               141 crypto/asymmetric_keys/verify_pefile.c 	if (round_up(wrapper.length, 8) != ctx->sig_len) {
ctx               159 crypto/asymmetric_keys/verify_pefile.c 	ctx->sig_len = wrapper.length;
ctx               160 crypto/asymmetric_keys/verify_pefile.c 	ctx->sig_offset += sizeof(wrapper);
ctx               161 crypto/asymmetric_keys/verify_pefile.c 	ctx->sig_len -= sizeof(wrapper);
ctx               162 crypto/asymmetric_keys/verify_pefile.c 	if (ctx->sig_len < 4) {
ctx               168 crypto/asymmetric_keys/verify_pefile.c 	pkcs7 = pebuf + ctx->sig_offset;
ctx               191 crypto/asymmetric_keys/verify_pefile.c 	if (len <= ctx->sig_len) {
ctx               193 crypto/asymmetric_keys/verify_pefile.c 		ctx->sig_len = len;
ctx               242 crypto/asymmetric_keys/verify_pefile.c 				     struct pefile_context *ctx,
ctx               251 crypto/asymmetric_keys/verify_pefile.c 	ret = crypto_shash_update(desc, pebuf, ctx->image_checksum_offset);
ctx               255 crypto/asymmetric_keys/verify_pefile.c 	tmp = ctx->image_checksum_offset + sizeof(uint32_t);
ctx               257 crypto/asymmetric_keys/verify_pefile.c 				  ctx->cert_dirent_offset - tmp);
ctx               261 crypto/asymmetric_keys/verify_pefile.c 	tmp = ctx->cert_dirent_offset + sizeof(struct data_dirent);
ctx               262 crypto/asymmetric_keys/verify_pefile.c 	ret = crypto_shash_update(desc, pebuf + tmp, ctx->header_size - tmp);
ctx               266 crypto/asymmetric_keys/verify_pefile.c 	canon = kcalloc(ctx->n_sections, sizeof(unsigned), GFP_KERNEL);
ctx               274 crypto/asymmetric_keys/verify_pefile.c 	for (loop = 1; loop < ctx->n_sections; loop++) {
ctx               276 crypto/asymmetric_keys/verify_pefile.c 			if (pefile_compare_shdrs(&ctx->secs[canon[i]],
ctx               277 crypto/asymmetric_keys/verify_pefile.c 						 &ctx->secs[loop]) > 0) {
ctx               286 crypto/asymmetric_keys/verify_pefile.c 	hashed_bytes = ctx->header_size;
ctx               287 crypto/asymmetric_keys/verify_pefile.c 	for (loop = 0; loop < ctx->n_sections; loop++) {
ctx               289 crypto/asymmetric_keys/verify_pefile.c 		if (ctx->secs[i].raw_data_size == 0)
ctx               292 crypto/asymmetric_keys/verify_pefile.c 					  pebuf + ctx->secs[i].data_addr,
ctx               293 crypto/asymmetric_keys/verify_pefile.c 					  ctx->secs[i].raw_data_size);
ctx               298 crypto/asymmetric_keys/verify_pefile.c 		hashed_bytes += ctx->secs[i].raw_data_size;
ctx               303 crypto/asymmetric_keys/verify_pefile.c 		tmp = hashed_bytes + ctx->certs_size;
ctx               319 crypto/asymmetric_keys/verify_pefile.c 			    struct pefile_context *ctx)
ctx               327 crypto/asymmetric_keys/verify_pefile.c 	kenter(",%s", ctx->digest_algo);
ctx               332 crypto/asymmetric_keys/verify_pefile.c 	tfm = crypto_alloc_shash(ctx->digest_algo, 0, 0);
ctx               339 crypto/asymmetric_keys/verify_pefile.c 	if (digest_size != ctx->digest_len) {
ctx               341 crypto/asymmetric_keys/verify_pefile.c 			 digest_size, ctx->digest_len);
ctx               357 crypto/asymmetric_keys/verify_pefile.c 	ret = pefile_digest_pe_contents(pebuf, pelen, ctx, desc);
ctx               366 crypto/asymmetric_keys/verify_pefile.c 	pr_debug("Digest calc = [%*ph]\n", ctx->digest_len, digest);
ctx               371 crypto/asymmetric_keys/verify_pefile.c 	if (memcmp(digest, ctx->digest, ctx->digest_len) != 0) {
ctx               420 crypto/asymmetric_keys/verify_pefile.c 	struct pefile_context ctx;
ctx               425 crypto/asymmetric_keys/verify_pefile.c 	memset(&ctx, 0, sizeof(ctx));
ctx               426 crypto/asymmetric_keys/verify_pefile.c 	ret = pefile_parse_binary(pebuf, pelen, &ctx);
ctx               430 crypto/asymmetric_keys/verify_pefile.c 	ret = pefile_strip_sig_wrapper(pebuf, &ctx);
ctx               435 crypto/asymmetric_keys/verify_pefile.c 				     pebuf + ctx.sig_offset, ctx.sig_len,
ctx               437 crypto/asymmetric_keys/verify_pefile.c 				     mscode_parse, &ctx);
ctx               442 crypto/asymmetric_keys/verify_pefile.c 		 ctx.digest_len, ctx.digest_len, ctx.digest);
ctx               447 crypto/asymmetric_keys/verify_pefile.c 	ret = pefile_digest_pe(pebuf, pelen, &ctx);
ctx               450 crypto/asymmetric_keys/verify_pefile.c 	kzfree(ctx.digest);
ctx                66 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx;
ctx                80 crypto/asymmetric_keys/x509_cert_parser.c 	ctx = kzalloc(sizeof(struct x509_parse_context), GFP_KERNEL);
ctx                81 crypto/asymmetric_keys/x509_cert_parser.c 	if (!ctx)
ctx                84 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert = cert;
ctx                85 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->data = (unsigned long)data;
ctx                88 crypto/asymmetric_keys/x509_cert_parser.c 	ret = asn1_ber_decoder(&x509_decoder, ctx, data, datalen);
ctx                93 crypto/asymmetric_keys/x509_cert_parser.c 	if (ctx->raw_akid) {
ctx                95 crypto/asymmetric_keys/x509_cert_parser.c 			 ctx->raw_akid_size, ctx->raw_akid_size, ctx->raw_akid);
ctx                96 crypto/asymmetric_keys/x509_cert_parser.c 		ret = asn1_ber_decoder(&x509_akid_decoder, ctx,
ctx                97 crypto/asymmetric_keys/x509_cert_parser.c 				       ctx->raw_akid, ctx->raw_akid_size);
ctx               105 crypto/asymmetric_keys/x509_cert_parser.c 	cert->pub->key = kmemdup(ctx->key, ctx->key_size, GFP_KERNEL);
ctx               109 crypto/asymmetric_keys/x509_cert_parser.c 	cert->pub->keylen = ctx->key_size;
ctx               111 crypto/asymmetric_keys/x509_cert_parser.c 	cert->pub->params = kmemdup(ctx->params, ctx->params_size, GFP_KERNEL);
ctx               115 crypto/asymmetric_keys/x509_cert_parser.c 	cert->pub->paramlen = ctx->params_size;
ctx               116 crypto/asymmetric_keys/x509_cert_parser.c 	cert->pub->algo = ctx->key_algo;
ctx               139 crypto/asymmetric_keys/x509_cert_parser.c 	kfree(ctx);
ctx               143 crypto/asymmetric_keys/x509_cert_parser.c 	kfree(ctx);
ctx               159 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               161 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->last_oid = look_up_OID(value, vlen);
ctx               162 crypto/asymmetric_keys/x509_cert_parser.c 	if (ctx->last_oid == OID__NR) {
ctx               166 crypto/asymmetric_keys/x509_cert_parser.c 			 (unsigned long)value - ctx->data, buffer);
ctx               179 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               182 crypto/asymmetric_keys/x509_cert_parser.c 		 hdrlen, tag, (unsigned long)value - ctx->data, vlen);
ctx               184 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->tbs = value - hdrlen;
ctx               185 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->tbs_size = vlen + hdrlen;
ctx               196 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               198 crypto/asymmetric_keys/x509_cert_parser.c 	pr_debug("PubKey Algo: %u\n", ctx->last_oid);
ctx               200 crypto/asymmetric_keys/x509_cert_parser.c 	switch (ctx->last_oid) {
ctx               207 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cert->sig->hash_algo = "md4";
ctx               211 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cert->sig->hash_algo = "sha1";
ctx               215 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cert->sig->hash_algo = "sha256";
ctx               219 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cert->sig->hash_algo = "sha384";
ctx               223 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cert->sig->hash_algo = "sha512";
ctx               227 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cert->sig->hash_algo = "sha224";
ctx               231 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cert->sig->hash_algo = "streebog256";
ctx               235 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cert->sig->hash_algo = "streebog512";
ctx               240 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->sig->pkey_algo = "rsa";
ctx               241 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->sig->encoding = "pkcs1";
ctx               242 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->algo_oid = ctx->last_oid;
ctx               245 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->sig->pkey_algo = "ecrdsa";
ctx               246 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->sig->encoding = "raw";
ctx               247 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->algo_oid = ctx->last_oid;
ctx               258 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               260 crypto/asymmetric_keys/x509_cert_parser.c 	pr_debug("Signature type: %u size %zu\n", ctx->last_oid, vlen);
ctx               262 crypto/asymmetric_keys/x509_cert_parser.c 	if (ctx->last_oid != ctx->algo_oid) {
ctx               264 crypto/asymmetric_keys/x509_cert_parser.c 			ctx->algo_oid, ctx->last_oid);
ctx               268 crypto/asymmetric_keys/x509_cert_parser.c 	if (strcmp(ctx->cert->sig->pkey_algo, "rsa") == 0 ||
ctx               269 crypto/asymmetric_keys/x509_cert_parser.c 	    strcmp(ctx->cert->sig->pkey_algo, "ecrdsa") == 0) {
ctx               278 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->raw_sig = value;
ctx               279 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->raw_sig_size = vlen;
ctx               290 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               291 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->raw_serial = value;
ctx               292 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->raw_serial_size = vlen;
ctx               303 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               305 crypto/asymmetric_keys/x509_cert_parser.c 	switch (ctx->last_oid) {
ctx               307 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cn_size = vlen;
ctx               308 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cn_offset = (unsigned long)value - ctx->data;
ctx               311 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->o_size = vlen;
ctx               312 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->o_offset = (unsigned long)value - ctx->data;
ctx               315 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->email_size = vlen;
ctx               316 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->email_offset = (unsigned long)value - ctx->data;
ctx               328 crypto/asymmetric_keys/x509_cert_parser.c static int x509_fabricate_name(struct x509_parse_context *ctx, size_t hdrlen,
ctx               332 crypto/asymmetric_keys/x509_cert_parser.c 	const void *name, *data = (const void *)ctx->data;
ctx               340 crypto/asymmetric_keys/x509_cert_parser.c 	if (!ctx->cn_size && !ctx->o_size && !ctx->email_size) {
ctx               348 crypto/asymmetric_keys/x509_cert_parser.c 	if (ctx->cn_size && ctx->o_size) {
ctx               352 crypto/asymmetric_keys/x509_cert_parser.c 		namesize = ctx->cn_size;
ctx               353 crypto/asymmetric_keys/x509_cert_parser.c 		name = data + ctx->cn_offset;
ctx               354 crypto/asymmetric_keys/x509_cert_parser.c 		if (ctx->cn_size >= ctx->o_size &&
ctx               355 crypto/asymmetric_keys/x509_cert_parser.c 		    memcmp(data + ctx->cn_offset, data + ctx->o_offset,
ctx               356 crypto/asymmetric_keys/x509_cert_parser.c 			   ctx->o_size) == 0)
ctx               358 crypto/asymmetric_keys/x509_cert_parser.c 		if (ctx->cn_size >= 7 &&
ctx               359 crypto/asymmetric_keys/x509_cert_parser.c 		    ctx->o_size >= 7 &&
ctx               360 crypto/asymmetric_keys/x509_cert_parser.c 		    memcmp(data + ctx->cn_offset, data + ctx->o_offset, 7) == 0)
ctx               363 crypto/asymmetric_keys/x509_cert_parser.c 		buffer = kmalloc(ctx->o_size + 2 + ctx->cn_size + 1,
ctx               369 crypto/asymmetric_keys/x509_cert_parser.c 		       data + ctx->o_offset, ctx->o_size);
ctx               370 crypto/asymmetric_keys/x509_cert_parser.c 		buffer[ctx->o_size + 0] = ':';
ctx               371 crypto/asymmetric_keys/x509_cert_parser.c 		buffer[ctx->o_size + 1] = ' ';
ctx               372 crypto/asymmetric_keys/x509_cert_parser.c 		memcpy(buffer + ctx->o_size + 2,
ctx               373 crypto/asymmetric_keys/x509_cert_parser.c 		       data + ctx->cn_offset, ctx->cn_size);
ctx               374 crypto/asymmetric_keys/x509_cert_parser.c 		buffer[ctx->o_size + 2 + ctx->cn_size] = 0;
ctx               377 crypto/asymmetric_keys/x509_cert_parser.c 	} else if (ctx->cn_size) {
ctx               378 crypto/asymmetric_keys/x509_cert_parser.c 		namesize = ctx->cn_size;
ctx               379 crypto/asymmetric_keys/x509_cert_parser.c 		name = data + ctx->cn_offset;
ctx               380 crypto/asymmetric_keys/x509_cert_parser.c 	} else if (ctx->o_size) {
ctx               381 crypto/asymmetric_keys/x509_cert_parser.c 		namesize = ctx->o_size;
ctx               382 crypto/asymmetric_keys/x509_cert_parser.c 		name = data + ctx->o_offset;
ctx               384 crypto/asymmetric_keys/x509_cert_parser.c 		namesize = ctx->email_size;
ctx               385 crypto/asymmetric_keys/x509_cert_parser.c 		name = data + ctx->email_offset;
ctx               397 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cn_size = 0;
ctx               398 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->o_size = 0;
ctx               399 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->email_size = 0;
ctx               407 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               408 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->raw_issuer = value;
ctx               409 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->raw_issuer_size = vlen;
ctx               410 crypto/asymmetric_keys/x509_cert_parser.c 	return x509_fabricate_name(ctx, hdrlen, tag, &ctx->cert->issuer, vlen);
ctx               417 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               418 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->raw_subject = value;
ctx               419 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->raw_subject_size = vlen;
ctx               420 crypto/asymmetric_keys/x509_cert_parser.c 	return x509_fabricate_name(ctx, hdrlen, tag, &ctx->cert->subject, vlen);
ctx               430 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               437 crypto/asymmetric_keys/x509_cert_parser.c 	if (!ctx->cert->raw_subject || ctx->key)
ctx               439 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->params = value - hdrlen;
ctx               440 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->params_size = vlen + hdrlen;
ctx               451 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               453 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->key_algo = ctx->last_oid;
ctx               454 crypto/asymmetric_keys/x509_cert_parser.c 	if (ctx->last_oid == OID_rsaEncryption)
ctx               455 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cert->pub->pkey_algo = "rsa";
ctx               456 crypto/asymmetric_keys/x509_cert_parser.c 	else if (ctx->last_oid == OID_gost2012PKey256 ||
ctx               457 crypto/asymmetric_keys/x509_cert_parser.c 		 ctx->last_oid == OID_gost2012PKey512)
ctx               458 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cert->pub->pkey_algo = "ecrdsa";
ctx               465 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->key = value + 1;
ctx               466 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->key_size = vlen - 1;
ctx               480 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               484 crypto/asymmetric_keys/x509_cert_parser.c 	pr_debug("Extension: %u\n", ctx->last_oid);
ctx               486 crypto/asymmetric_keys/x509_cert_parser.c 	if (ctx->last_oid == OID_subjectKeyIdentifier) {
ctx               488 crypto/asymmetric_keys/x509_cert_parser.c 		if (ctx->cert->skid || vlen < 3)
ctx               495 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cert->raw_skid_size = vlen;
ctx               496 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cert->raw_skid = v;
ctx               500 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->cert->skid = kid;
ctx               505 crypto/asymmetric_keys/x509_cert_parser.c 	if (ctx->last_oid == OID_authorityKeyIdentifier) {
ctx               507 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->raw_akid = v;
ctx               508 crypto/asymmetric_keys/x509_cert_parser.c 		ctx->raw_akid_size = vlen;
ctx               614 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               615 crypto/asymmetric_keys/x509_cert_parser.c 	return x509_decode_time(&ctx->cert->valid_from, hdrlen, tag, value, vlen);
ctx               622 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               623 crypto/asymmetric_keys/x509_cert_parser.c 	return x509_decode_time(&ctx->cert->valid_to, hdrlen, tag, value, vlen);
ctx               633 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               638 crypto/asymmetric_keys/x509_cert_parser.c 	if (ctx->cert->sig->auth_ids[1])
ctx               645 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->sig->auth_ids[1] = kid;
ctx               656 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               660 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->akid_raw_issuer = value;
ctx               661 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->akid_raw_issuer_size = vlen;
ctx               672 crypto/asymmetric_keys/x509_cert_parser.c 	struct x509_parse_context *ctx = context;
ctx               677 crypto/asymmetric_keys/x509_cert_parser.c 	if (!ctx->akid_raw_issuer || ctx->cert->sig->auth_ids[0])
ctx               682 crypto/asymmetric_keys/x509_cert_parser.c 					 ctx->akid_raw_issuer,
ctx               683 crypto/asymmetric_keys/x509_cert_parser.c 					 ctx->akid_raw_issuer_size);
ctx               688 crypto/asymmetric_keys/x509_cert_parser.c 	ctx->cert->sig->auth_ids[0] = kid;
ctx                87 crypto/authenc.c 	struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
ctx                88 crypto/authenc.c 	struct crypto_ahash *auth = ctx->auth;
ctx                89 crypto/authenc.c 	struct crypto_skcipher *enc = ctx->enc;
ctx               146 crypto/authenc.c 	struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
ctx               148 crypto/authenc.c 	struct crypto_ahash *auth = ctx->auth;
ctx               190 crypto/authenc.c 	struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
ctx               191 crypto/authenc.c 	SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
ctx               193 crypto/authenc.c 	skcipher_request_set_sync_tfm(skreq, ctx->null);
ctx               206 crypto/authenc.c 	struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
ctx               209 crypto/authenc.c 	struct crypto_skcipher *enc = ctx->enc;
ctx               244 crypto/authenc.c 	struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
ctx               265 crypto/authenc.c 	skcipher_request_set_tfm(skreq, ctx->enc);
ctx               293 crypto/authenc.c 	struct crypto_authenc_ctx *ctx = crypto_aead_ctx(authenc);
ctx               295 crypto/authenc.c 	struct crypto_ahash *auth = ctx->auth;
ctx               321 crypto/authenc.c 	struct crypto_authenc_ctx *ctx = crypto_aead_ctx(tfm);
ctx               341 crypto/authenc.c 	ctx->auth = auth;
ctx               342 crypto/authenc.c 	ctx->enc = enc;
ctx               343 crypto/authenc.c 	ctx->null = null;
ctx               366 crypto/authenc.c 	struct crypto_authenc_ctx *ctx = crypto_aead_ctx(tfm);
ctx               368 crypto/authenc.c 	crypto_free_ahash(ctx->auth);
ctx               369 crypto/authenc.c 	crypto_free_skcipher(ctx->enc);
ctx               375 crypto/authenc.c 	struct authenc_instance_ctx *ctx = aead_instance_ctx(inst);
ctx               377 crypto/authenc.c 	crypto_drop_skcipher(&ctx->enc);
ctx               378 crypto/authenc.c 	crypto_drop_ahash(&ctx->auth);
ctx               390 crypto/authenc.c 	struct authenc_instance_ctx *ctx;
ctx               414 crypto/authenc.c 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
ctx               419 crypto/authenc.c 	ctx = aead_instance_ctx(inst);
ctx               421 crypto/authenc.c 	err = crypto_init_ahash_spawn(&ctx->auth, auth,
ctx               426 crypto/authenc.c 	crypto_set_skcipher_spawn(&ctx->enc, aead_crypto_instance(inst));
ctx               427 crypto/authenc.c 	err = crypto_grab_skcipher(&ctx->enc, enc_name, 0,
ctx               433 crypto/authenc.c 	enc = crypto_spawn_skcipher_alg(&ctx->enc);
ctx               435 crypto/authenc.c 	ctx->reqoff = ALIGN(2 * auth->digestsize + auth_base->cra_alignmask,
ctx               481 crypto/authenc.c 	crypto_drop_skcipher(&ctx->enc);
ctx               483 crypto/authenc.c 	crypto_drop_ahash(&ctx->auth);
ctx                61 crypto/authencesn.c 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
ctx                62 crypto/authencesn.c 	struct crypto_ahash *auth = ctx->auth;
ctx                63 crypto/authencesn.c 	struct crypto_skcipher *enc = ctx->enc;
ctx               100 crypto/authencesn.c 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
ctx               102 crypto/authencesn.c 	struct crypto_ahash *auth = ctx->auth;
ctx               134 crypto/authencesn.c 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
ctx               135 crypto/authencesn.c 	struct crypto_ahash *auth = ctx->auth;
ctx               138 crypto/authencesn.c 	struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
ctx               180 crypto/authencesn.c 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
ctx               181 crypto/authencesn.c 	SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
ctx               183 crypto/authencesn.c 	skcipher_request_set_sync_tfm(skreq, ctx->null);
ctx               195 crypto/authencesn.c 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
ctx               197 crypto/authencesn.c 						  ctx->reqoff);
ctx               198 crypto/authencesn.c 	struct crypto_skcipher *enc = ctx->enc;
ctx               235 crypto/authencesn.c 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
ctx               237 crypto/authencesn.c 						  ctx->reqoff);
ctx               238 crypto/authencesn.c 	struct crypto_ahash *auth = ctx->auth;
ctx               263 crypto/authencesn.c 	skcipher_request_set_tfm(skreq, ctx->enc);
ctx               284 crypto/authencesn.c 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
ctx               285 crypto/authencesn.c 	struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
ctx               287 crypto/authencesn.c 	struct crypto_ahash *auth = ctx->auth;
ctx               336 crypto/authencesn.c 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
ctx               356 crypto/authencesn.c 	ctx->auth = auth;
ctx               357 crypto/authencesn.c 	ctx->enc = enc;
ctx               358 crypto/authencesn.c 	ctx->null = null;
ctx               360 crypto/authencesn.c 	ctx->reqoff = ALIGN(2 * crypto_ahash_digestsize(auth),
ctx               366 crypto/authencesn.c 		ctx->reqoff +
ctx               384 crypto/authencesn.c 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
ctx               386 crypto/authencesn.c 	crypto_free_ahash(ctx->auth);
ctx               387 crypto/authencesn.c 	crypto_free_skcipher(ctx->enc);
ctx               393 crypto/authencesn.c 	struct authenc_esn_instance_ctx *ctx = aead_instance_ctx(inst);
ctx               395 crypto/authencesn.c 	crypto_drop_skcipher(&ctx->enc);
ctx               396 crypto/authencesn.c 	crypto_drop_ahash(&ctx->auth);
ctx               408 crypto/authencesn.c 	struct authenc_esn_instance_ctx *ctx;
ctx               432 crypto/authencesn.c 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
ctx               437 crypto/authencesn.c 	ctx = aead_instance_ctx(inst);
ctx               439 crypto/authencesn.c 	err = crypto_init_ahash_spawn(&ctx->auth, auth,
ctx               444 crypto/authencesn.c 	crypto_set_skcipher_spawn(&ctx->enc, aead_crypto_instance(inst));
ctx               445 crypto/authencesn.c 	err = crypto_grab_skcipher(&ctx->enc, enc_name, 0,
ctx               451 crypto/authencesn.c 	enc = crypto_spawn_skcipher_alg(&ctx->enc);
ctx               496 crypto/authencesn.c 	crypto_drop_skcipher(&ctx->enc);
ctx               498 crypto/authencesn.c 	crypto_drop_ahash(&ctx->auth);
ctx               346 crypto/blowfish_common.c 	struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               347 crypto/blowfish_common.c 	u32 *P = ctx->p;
ctx               348 crypto/blowfish_common.c 	u32 *S = ctx->s;
ctx               376 crypto/blowfish_common.c 		encrypt_block((struct bf_ctx *)ctx, data, data);
ctx               384 crypto/blowfish_common.c 			encrypt_block((struct bf_ctx *)ctx, data, data);
ctx                38 crypto/blowfish_generic.c 	struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                41 crypto/blowfish_generic.c 	const u32 *P = ctx->p;
ctx                42 crypto/blowfish_generic.c 	const u32 *S = ctx->s;
ctx                72 crypto/blowfish_generic.c 	struct bf_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                75 crypto/blowfish_generic.c 	const u32 *P = ctx->p;
ctx                76 crypto/blowfish_generic.c 	const u32 *S = ctx->s;
ctx                91 crypto/ccm.c   	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
ctx                92 crypto/ccm.c   	struct crypto_skcipher *ctr = ctx->ctr;
ctx                93 crypto/ccm.c   	struct crypto_ahash *mac = ctx->mac;
ctx               181 crypto/ccm.c   	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
ctx               207 crypto/ccm.c   	ahash_request_set_tfm(ahreq, ctx->mac);
ctx               296 crypto/ccm.c   	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
ctx               317 crypto/ccm.c   	skcipher_request_set_tfm(skreq, ctx->ctr);
ctx               356 crypto/ccm.c   	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
ctx               382 crypto/ccm.c   	skcipher_request_set_tfm(skreq, ctx->ctr);
ctx               405 crypto/ccm.c   	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
ctx               420 crypto/ccm.c   	ctx->mac = mac;
ctx               421 crypto/ccm.c   	ctx->ctr = ctr;
ctx               439 crypto/ccm.c   	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
ctx               441 crypto/ccm.c   	crypto_free_ahash(ctx->mac);
ctx               442 crypto/ccm.c   	crypto_free_skcipher(ctx->ctr);
ctx               447 crypto/ccm.c   	struct ccm_instance_ctx *ctx = aead_instance_ctx(inst);
ctx               449 crypto/ccm.c   	crypto_drop_ahash(&ctx->mac);
ctx               450 crypto/ccm.c   	crypto_drop_skcipher(&ctx->ctr);
ctx               605 crypto/ccm.c   	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
ctx               606 crypto/ccm.c   	struct crypto_aead *child = ctx->child;
ctx               613 crypto/ccm.c   	memcpy(ctx->nonce, key + keylen, 3);
ctx               628 crypto/ccm.c   	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
ctx               639 crypto/ccm.c   	return crypto_aead_setauthsize(ctx->child, authsize);
ctx               647 crypto/ccm.c   	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
ctx               648 crypto/ccm.c   	struct crypto_aead *child = ctx->child;
ctx               656 crypto/ccm.c   	memcpy(iv + 1, ctx->nonce, 3);
ctx               710 crypto/ccm.c   	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
ctx               718 crypto/ccm.c   	ctx->child = aead;
ctx               733 crypto/ccm.c   	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
ctx               735 crypto/ccm.c   	crypto_free_aead(ctx->child);
ctx               835 crypto/ccm.c   	struct cbcmac_tfm_ctx *ctx = crypto_shash_ctx(parent);
ctx               837 crypto/ccm.c   	return crypto_cipher_setkey(ctx->child, inkey, keylen);
ctx               842 crypto/ccm.c   	struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
ctx               844 crypto/ccm.c   	u8 *dg = (u8 *)ctx + crypto_shash_descsize(pdesc->tfm) - bs;
ctx               846 crypto/ccm.c   	ctx->len = 0;
ctx               857 crypto/ccm.c   	struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
ctx               860 crypto/ccm.c   	u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs;
ctx               863 crypto/ccm.c   		unsigned int l = min(len, bs - ctx->len);
ctx               865 crypto/ccm.c   		crypto_xor(dg + ctx->len, p, l);
ctx               866 crypto/ccm.c   		ctx->len +=l;
ctx               870 crypto/ccm.c   		if (ctx->len == bs) {
ctx               872 crypto/ccm.c   			ctx->len = 0;
ctx               883 crypto/ccm.c   	struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
ctx               886 crypto/ccm.c   	u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs;
ctx               888 crypto/ccm.c   	if (ctx->len)
ctx               900 crypto/ccm.c   	struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               906 crypto/ccm.c   	ctx->child = cipher;
ctx               913 crypto/ccm.c   	struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               914 crypto/ccm.c   	crypto_free_cipher(ctx->child);
ctx                88 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx                92 crypto/chacha20poly1305.c 	memcpy(iv + sizeof(leicb), ctx->salt, ctx->saltlen);
ctx                93 crypto/chacha20poly1305.c 	memcpy(iv + sizeof(leicb) + ctx->saltlen, req->iv,
ctx                94 crypto/chacha20poly1305.c 	       CHACHA_IV_SIZE - sizeof(leicb) - ctx->saltlen);
ctx               127 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               145 crypto/chacha20poly1305.c 	skcipher_request_set_tfm(&creq->req, ctx->chacha);
ctx               174 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(tfm);
ctx               185 crypto/chacha20poly1305.c 	ahash_request_set_tfm(&preq->req, ctx->poly);
ctx               203 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               215 crypto/chacha20poly1305.c 	ahash_request_set_tfm(&preq->req, ctx->poly);
ctx               232 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               245 crypto/chacha20poly1305.c 	ahash_request_set_tfm(&preq->req, ctx->poly);
ctx               262 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               274 crypto/chacha20poly1305.c 	ahash_request_set_tfm(&preq->req, ctx->poly);
ctx               291 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               298 crypto/chacha20poly1305.c 	ahash_request_set_tfm(&preq->req, ctx->poly);
ctx               315 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               324 crypto/chacha20poly1305.c 	ahash_request_set_tfm(&preq->req, ctx->poly);
ctx               341 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               348 crypto/chacha20poly1305.c 	ahash_request_set_tfm(&preq->req, ctx->poly);
ctx               365 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(tfm);
ctx               385 crypto/chacha20poly1305.c 	skcipher_request_set_tfm(&creq->req, ctx->chacha);
ctx               403 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               421 crypto/chacha20poly1305.c 	skcipher_request_set_tfm(&creq->req, ctx->chacha);
ctx               479 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(aead);
ctx               482 crypto/chacha20poly1305.c 	if (keylen != ctx->saltlen + CHACHA_KEY_SIZE)
ctx               485 crypto/chacha20poly1305.c 	keylen -= ctx->saltlen;
ctx               486 crypto/chacha20poly1305.c 	memcpy(ctx->salt, key + keylen, ctx->saltlen);
ctx               488 crypto/chacha20poly1305.c 	crypto_skcipher_clear_flags(ctx->chacha, CRYPTO_TFM_REQ_MASK);
ctx               489 crypto/chacha20poly1305.c 	crypto_skcipher_set_flags(ctx->chacha, crypto_aead_get_flags(aead) &
ctx               492 crypto/chacha20poly1305.c 	err = crypto_skcipher_setkey(ctx->chacha, key, keylen);
ctx               493 crypto/chacha20poly1305.c 	crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctx->chacha) &
ctx               511 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(tfm);
ctx               526 crypto/chacha20poly1305.c 	ctx->chacha = chacha;
ctx               527 crypto/chacha20poly1305.c 	ctx->poly = poly;
ctx               528 crypto/chacha20poly1305.c 	ctx->saltlen = ictx->saltlen;
ctx               547 crypto/chacha20poly1305.c 	struct chachapoly_ctx *ctx = crypto_aead_ctx(tfm);
ctx               549 crypto/chacha20poly1305.c 	crypto_free_ahash(ctx->poly);
ctx               550 crypto/chacha20poly1305.c 	crypto_free_skcipher(ctx->chacha);
ctx               555 crypto/chacha20poly1305.c 	struct chachapoly_instance_ctx *ctx = aead_instance_ctx(inst);
ctx               557 crypto/chacha20poly1305.c 	crypto_drop_skcipher(&ctx->chacha);
ctx               558 crypto/chacha20poly1305.c 	crypto_drop_ahash(&ctx->poly);
ctx               570 crypto/chacha20poly1305.c 	struct chachapoly_instance_ctx *ctx;
ctx               605 crypto/chacha20poly1305.c 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
ctx               609 crypto/chacha20poly1305.c 	ctx = aead_instance_ctx(inst);
ctx               610 crypto/chacha20poly1305.c 	ctx->saltlen = CHACHAPOLY_IV_SIZE - ivsize;
ctx               611 crypto/chacha20poly1305.c 	err = crypto_init_ahash_spawn(&ctx->poly, poly_hash,
ctx               616 crypto/chacha20poly1305.c 	crypto_set_skcipher_spawn(&ctx->chacha, aead_crypto_instance(inst));
ctx               617 crypto/chacha20poly1305.c 	err = crypto_grab_skcipher(&ctx->chacha, chacha_name, 0,
ctx               623 crypto/chacha20poly1305.c 	chacha = crypto_spawn_skcipher_alg(&ctx->chacha);
ctx               651 crypto/chacha20poly1305.c 				     ctx->saltlen;
ctx               673 crypto/chacha20poly1305.c 	crypto_drop_skcipher(&ctx->chacha);
ctx               675 crypto/chacha20poly1305.c 	crypto_drop_ahash(&ctx->poly);
ctx                35 crypto/chacha_generic.c 			     const struct chacha_ctx *ctx, const u8 *iv)
ctx                43 crypto/chacha_generic.c 	crypto_chacha_init(state, ctx, iv);
ctx                52 crypto/chacha_generic.c 			       nbytes, ctx->nrounds);
ctx                59 crypto/chacha_generic.c void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv)
ctx                65 crypto/chacha_generic.c 	state[4]  = ctx->key[0];
ctx                66 crypto/chacha_generic.c 	state[5]  = ctx->key[1];
ctx                67 crypto/chacha_generic.c 	state[6]  = ctx->key[2];
ctx                68 crypto/chacha_generic.c 	state[7]  = ctx->key[3];
ctx                69 crypto/chacha_generic.c 	state[8]  = ctx->key[4];
ctx                70 crypto/chacha_generic.c 	state[9]  = ctx->key[5];
ctx                71 crypto/chacha_generic.c 	state[10] = ctx->key[6];
ctx                72 crypto/chacha_generic.c 	state[11] = ctx->key[7];
ctx                83 crypto/chacha_generic.c 	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                89 crypto/chacha_generic.c 	for (i = 0; i < ARRAY_SIZE(ctx->key); i++)
ctx                90 crypto/chacha_generic.c 		ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
ctx                92 crypto/chacha_generic.c 	ctx->nrounds = nrounds;
ctx               113 crypto/chacha_generic.c 	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               115 crypto/chacha_generic.c 	return chacha_stream_xor(req, ctx, req->iv);
ctx               122 crypto/chacha_generic.c 	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               128 crypto/chacha_generic.c 	crypto_chacha_init(state, ctx, req->iv);
ctx               129 crypto/chacha_generic.c 	hchacha_block(state, subctx.key, ctx->nrounds);
ctx               130 crypto/chacha_generic.c 	subctx.nrounds = ctx->nrounds;
ctx                30 crypto/cmac.c  	u8 ctx[];
ctx                46 crypto/cmac.c  	u8 ctx[];
ctx                53 crypto/cmac.c  	struct cmac_tfm_ctx *ctx = crypto_shash_ctx(parent);
ctx                55 crypto/cmac.c  	__be64 *consts = PTR_ALIGN((void *)ctx->ctx,
ctx                61 crypto/cmac.c  	err = crypto_cipher_setkey(ctx->child, inkey, keylen);
ctx                67 crypto/cmac.c  	crypto_cipher_encrypt_one(ctx->child, (u8 *)consts, (u8 *)consts);
ctx               107 crypto/cmac.c  	struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
ctx               109 crypto/cmac.c  	u8 *prev = PTR_ALIGN((void *)ctx->ctx, alignmask + 1) + bs;
ctx               111 crypto/cmac.c  	ctx->len = 0;
ctx               123 crypto/cmac.c  	struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
ctx               126 crypto/cmac.c  	u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1);
ctx               130 crypto/cmac.c  	if ((ctx->len + len) <= bs) {
ctx               131 crypto/cmac.c  		memcpy(odds + ctx->len, p, len);
ctx               132 crypto/cmac.c  		ctx->len += len;
ctx               137 crypto/cmac.c  	memcpy(odds + ctx->len, p, bs - ctx->len);
ctx               138 crypto/cmac.c  	len -= bs - ctx->len;
ctx               139 crypto/cmac.c  	p += bs - ctx->len;
ctx               145 crypto/cmac.c  	ctx->len = 0;
ctx               158 crypto/cmac.c  		ctx->len = len;
ctx               169 crypto/cmac.c  	struct cmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
ctx               172 crypto/cmac.c  	u8 *consts = PTR_ALIGN((void *)tctx->ctx,
ctx               174 crypto/cmac.c  	u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1);
ctx               178 crypto/cmac.c  	if (ctx->len != bs) {
ctx               180 crypto/cmac.c  		u8 *p = odds + ctx->len;
ctx               185 crypto/cmac.c  		rlen = bs - ctx->len - 1;
ctx               205 crypto/cmac.c  	struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               211 crypto/cmac.c  	ctx->child = cipher;
ctx               218 crypto/cmac.c  	struct cmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               219 crypto/cmac.c  	crypto_free_cipher(ctx->child);
ctx                60 crypto/crc32c_generic.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx                62 crypto/crc32c_generic.c 	ctx->crc = mctx->key;
ctx                88 crypto/crc32c_generic.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx                90 crypto/crc32c_generic.c 	ctx->crc = __crc32c_le(ctx->crc, data, length);
ctx                96 crypto/crc32c_generic.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx                98 crypto/crc32c_generic.c 	put_unaligned_le32(~ctx->crc, out);
ctx               111 crypto/crc32c_generic.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               113 crypto/crc32c_generic.c 	return __chksum_finup(&ctx->crc, data, len, out);
ctx                44 crypto/crct10dif_generic.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx                46 crypto/crct10dif_generic.c 	ctx->crc = 0;
ctx                54 crypto/crct10dif_generic.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx                56 crypto/crct10dif_generic.c 	ctx->crc = crc_t10dif_generic(ctx->crc, data, length);
ctx                62 crypto/crct10dif_generic.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx                64 crypto/crct10dif_generic.c 	*(__u16 *)out = ctx->crc;
ctx                77 crypto/crct10dif_generic.c 	struct chksum_desc_ctx *ctx = shash_desc_ctx(desc);
ctx                79 crypto/crct10dif_generic.c 	return __chksum_finup(ctx->crc, data, len, out);
ctx               253 crypto/cryptd.c 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
ctx               254 crypto/cryptd.c 	struct crypto_sync_skcipher *child = ctx->child;
ctx               271 crypto/cryptd.c 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               273 crypto/cryptd.c 	int refcnt = refcount_read(&ctx->refcnt);
ctx               279 crypto/cryptd.c 	if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
ctx               289 crypto/cryptd.c 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               290 crypto/cryptd.c 	struct crypto_sync_skcipher *child = ctx->child;
ctx               317 crypto/cryptd.c 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               318 crypto/cryptd.c 	struct crypto_sync_skcipher *child = ctx->child;
ctx               368 crypto/cryptd.c 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               375 crypto/cryptd.c 	ctx->child = (struct crypto_sync_skcipher *)cipher;
ctx               383 crypto/cryptd.c 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               385 crypto/cryptd.c 	crypto_free_sync_skcipher(ctx->child);
ctx               390 crypto/cryptd.c 	struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst);
ctx               392 crypto/cryptd.c 	crypto_drop_skcipher(&ctx->spawn);
ctx               400 crypto/cryptd.c 	struct skcipherd_instance_ctx *ctx;
ctx               417 crypto/cryptd.c 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
ctx               421 crypto/cryptd.c 	ctx = skcipher_instance_ctx(inst);
ctx               422 crypto/cryptd.c 	ctx->queue = queue;
ctx               424 crypto/cryptd.c 	crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst));
ctx               425 crypto/cryptd.c 	err = crypto_grab_skcipher(&ctx->spawn, name, type, mask);
ctx               429 crypto/cryptd.c 	alg = crypto_spawn_skcipher_alg(&ctx->spawn);
ctx               456 crypto/cryptd.c 		crypto_drop_skcipher(&ctx->spawn);
ctx               468 crypto/cryptd.c 	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               475 crypto/cryptd.c 	ctx->child = hash;
ctx               484 crypto/cryptd.c 	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               486 crypto/cryptd.c 	crypto_free_shash(ctx->child);
ctx               492 crypto/cryptd.c 	struct cryptd_hash_ctx *ctx   = crypto_ahash_ctx(parent);
ctx               493 crypto/cryptd.c 	struct crypto_shash *child = ctx->child;
ctx               522 crypto/cryptd.c 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               524 crypto/cryptd.c 	int refcnt = refcount_read(&ctx->refcnt);
ctx               530 crypto/cryptd.c 	if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
ctx               536 crypto/cryptd.c 	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
ctx               537 crypto/cryptd.c 	struct crypto_shash *child = ctx->child;
ctx               627 crypto/cryptd.c 	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
ctx               628 crypto/cryptd.c 	struct crypto_shash *child = ctx->child;
ctx               661 crypto/cryptd.c 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               664 crypto/cryptd.c 	desc->tfm = ctx->child;
ctx               672 crypto/cryptd.c 	struct hashd_instance_ctx *ctx;
ctx               688 crypto/cryptd.c 				     sizeof(*ctx));
ctx               693 crypto/cryptd.c 	ctx = ahash_instance_ctx(inst);
ctx               694 crypto/cryptd.c 	ctx->queue = queue;
ctx               696 crypto/cryptd.c 	err = crypto_init_shash_spawn(&ctx->spawn, salg,
ctx               724 crypto/cryptd.c 		crypto_drop_shash(&ctx->spawn);
ctx               737 crypto/cryptd.c 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
ctx               738 crypto/cryptd.c 	struct crypto_aead *child = ctx->child;
ctx               746 crypto/cryptd.c 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
ctx               747 crypto/cryptd.c 	struct crypto_aead *child = ctx->child;
ctx               758 crypto/cryptd.c 	struct cryptd_aead_ctx *ctx;
ctx               774 crypto/cryptd.c 	ctx = crypto_aead_ctx(tfm);
ctx               775 crypto/cryptd.c 	refcnt = refcount_read(&ctx->refcnt);
ctx               781 crypto/cryptd.c 	if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
ctx               787 crypto/cryptd.c 	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
ctx               788 crypto/cryptd.c 	struct crypto_aead *child = ctx->child;
ctx               797 crypto/cryptd.c 	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
ctx               798 crypto/cryptd.c 	struct crypto_aead *child = ctx->child;
ctx               832 crypto/cryptd.c 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               839 crypto/cryptd.c 	ctx->child = cipher;
ctx               848 crypto/cryptd.c 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               849 crypto/cryptd.c 	crypto_free_aead(ctx->child);
ctx               856 crypto/cryptd.c 	struct aead_instance_ctx *ctx;
ctx               870 crypto/cryptd.c 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
ctx               874 crypto/cryptd.c 	ctx = aead_instance_ctx(inst);
ctx               875 crypto/cryptd.c 	ctx->queue = queue;
ctx               877 crypto/cryptd.c 	crypto_set_aead_spawn(&ctx->aead_spawn, aead_crypto_instance(inst));
ctx               878 crypto/cryptd.c 	err = crypto_grab_aead(&ctx->aead_spawn, name, type, mask);
ctx               882 crypto/cryptd.c 	alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
ctx               904 crypto/cryptd.c 		crypto_drop_aead(&ctx->aead_spawn);
ctx               935 crypto/cryptd.c 	struct cryptd_instance_ctx *ctx = crypto_instance_ctx(inst);
ctx               949 crypto/cryptd.c 		crypto_drop_spawn(&ctx->spawn);
ctx               965 crypto/cryptd.c 	struct cryptd_skcipher_ctx *ctx;
ctx               981 crypto/cryptd.c 	ctx = crypto_skcipher_ctx(tfm);
ctx               982 crypto/cryptd.c 	refcount_set(&ctx->refcnt, 1);
ctx               990 crypto/cryptd.c 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
ctx               992 crypto/cryptd.c 	return &ctx->child->base;
ctx               998 crypto/cryptd.c 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
ctx              1000 crypto/cryptd.c 	return refcount_read(&ctx->refcnt) - 1;
ctx              1006 crypto/cryptd.c 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
ctx              1008 crypto/cryptd.c 	if (refcount_dec_and_test(&ctx->refcnt))
ctx              1017 crypto/cryptd.c 	struct cryptd_hash_ctx *ctx;
ctx              1031 crypto/cryptd.c 	ctx = crypto_ahash_ctx(tfm);
ctx              1032 crypto/cryptd.c 	refcount_set(&ctx->refcnt, 1);
ctx              1040 crypto/cryptd.c 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
ctx              1042 crypto/cryptd.c 	return ctx->child;
ctx              1055 crypto/cryptd.c 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
ctx              1057 crypto/cryptd.c 	return refcount_read(&ctx->refcnt) - 1;
ctx              1063 crypto/cryptd.c 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
ctx              1065 crypto/cryptd.c 	if (refcount_dec_and_test(&ctx->refcnt))
ctx              1074 crypto/cryptd.c 	struct cryptd_aead_ctx *ctx;
ctx              1088 crypto/cryptd.c 	ctx = crypto_aead_ctx(tfm);
ctx              1089 crypto/cryptd.c 	refcount_set(&ctx->refcnt, 1);
ctx              1097 crypto/cryptd.c 	struct cryptd_aead_ctx *ctx;
ctx              1098 crypto/cryptd.c 	ctx = crypto_aead_ctx(&tfm->base);
ctx              1099 crypto/cryptd.c 	return ctx->child;
ctx              1105 crypto/cryptd.c 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
ctx              1107 crypto/cryptd.c 	return refcount_read(&ctx->refcnt) - 1;
ctx              1113 crypto/cryptd.c 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
ctx              1115 crypto/cryptd.c 	if (refcount_dec_and_test(&ctx->refcnt))
ctx               172 crypto/ctr.c   	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent);
ctx               173 crypto/ctr.c   	struct crypto_skcipher *child = ctx->child;
ctx               180 crypto/ctr.c   	memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
ctx               198 crypto/ctr.c   	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               199 crypto/ctr.c   	struct crypto_skcipher *child = ctx->child;
ctx               207 crypto/ctr.c   	memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
ctx               227 crypto/ctr.c   	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               236 crypto/ctr.c   	ctx->child = cipher;
ctx               249 crypto/ctr.c   	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               251 crypto/ctr.c   	crypto_free_skcipher(ctx->child);
ctx                69 crypto/cts.c   	struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                70 crypto/cts.c   	struct crypto_skcipher *child = ctx->child;
ctx                79 crypto/cts.c   	struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(parent);
ctx                80 crypto/cts.c   	struct crypto_skcipher *child = ctx->child;
ctx               151 crypto/cts.c   	struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               157 crypto/cts.c   	skcipher_request_set_tfm(subreq, ctx->child);
ctx               245 crypto/cts.c   	struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               252 crypto/cts.c   	skcipher_request_set_tfm(subreq, ctx->child);
ctx               291 crypto/cts.c   	struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               301 crypto/cts.c   	ctx->child = cipher;
ctx               317 crypto/cts.c   	struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               319 crypto/cts.c   	crypto_free_skcipher(ctx->child);
ctx                42 crypto/deflate.c static int deflate_comp_init(struct deflate_ctx *ctx, int format)
ctx                45 crypto/deflate.c 	struct z_stream_s *stream = &ctx->comp_stream;
ctx                71 crypto/deflate.c static int deflate_decomp_init(struct deflate_ctx *ctx, int format)
ctx                74 crypto/deflate.c 	struct z_stream_s *stream = &ctx->decomp_stream;
ctx                96 crypto/deflate.c static void deflate_comp_exit(struct deflate_ctx *ctx)
ctx                98 crypto/deflate.c 	zlib_deflateEnd(&ctx->comp_stream);
ctx                99 crypto/deflate.c 	vfree(ctx->comp_stream.workspace);
ctx               102 crypto/deflate.c static void deflate_decomp_exit(struct deflate_ctx *ctx)
ctx               104 crypto/deflate.c 	zlib_inflateEnd(&ctx->decomp_stream);
ctx               105 crypto/deflate.c 	vfree(ctx->decomp_stream.workspace);
ctx               108 crypto/deflate.c static int __deflate_init(void *ctx, int format)
ctx               112 crypto/deflate.c 	ret = deflate_comp_init(ctx, format);
ctx               115 crypto/deflate.c 	ret = deflate_decomp_init(ctx, format);
ctx               117 crypto/deflate.c 		deflate_comp_exit(ctx);
ctx               124 crypto/deflate.c 	struct deflate_ctx *ctx;
ctx               127 crypto/deflate.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               128 crypto/deflate.c 	if (!ctx)
ctx               131 crypto/deflate.c 	ret = __deflate_init(ctx, format);
ctx               133 crypto/deflate.c 		kfree(ctx);
ctx               137 crypto/deflate.c 	return ctx;
ctx               152 crypto/deflate.c 	struct deflate_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               154 crypto/deflate.c 	return __deflate_init(ctx, 0);
ctx               157 crypto/deflate.c static void __deflate_exit(void *ctx)
ctx               159 crypto/deflate.c 	deflate_comp_exit(ctx);
ctx               160 crypto/deflate.c 	deflate_decomp_exit(ctx);
ctx               163 crypto/deflate.c static void deflate_free_ctx(struct crypto_scomp *tfm, void *ctx)
ctx               165 crypto/deflate.c 	__deflate_exit(ctx);
ctx               166 crypto/deflate.c 	kzfree(ctx);
ctx               171 crypto/deflate.c 	struct deflate_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               173 crypto/deflate.c 	__deflate_exit(ctx);
ctx               177 crypto/deflate.c 			      u8 *dst, unsigned int *dlen, void *ctx)
ctx               180 crypto/deflate.c 	struct deflate_ctx *dctx = ctx;
ctx               215 crypto/deflate.c 			     void *ctx)
ctx               217 crypto/deflate.c 	return __deflate_compress(src, slen, dst, dlen, ctx);
ctx               221 crypto/deflate.c 				u8 *dst, unsigned int *dlen, void *ctx)
ctx               225 crypto/deflate.c 	struct deflate_ctx *dctx = ctx;
ctx               271 crypto/deflate.c 			       void *ctx)
ctx               273 crypto/deflate.c 	return __deflate_decompress(src, slen, dst, dlen, ctx);
ctx                21 crypto/dh.c    static void dh_clear_ctx(struct dh_ctx *ctx)
ctx                23 crypto/dh.c    	mpi_free(ctx->p);
ctx                24 crypto/dh.c    	mpi_free(ctx->q);
ctx                25 crypto/dh.c    	mpi_free(ctx->g);
ctx                26 crypto/dh.c    	mpi_free(ctx->xa);
ctx                27 crypto/dh.c    	memset(ctx, 0, sizeof(*ctx));
ctx                36 crypto/dh.c    static int _compute_val(const struct dh_ctx *ctx, MPI base, MPI val)
ctx                39 crypto/dh.c    	return mpi_powm(val, base, ctx->xa, ctx->p);
ctx                52 crypto/dh.c    static int dh_set_params(struct dh_ctx *ctx, struct dh *params)
ctx                57 crypto/dh.c    	ctx->p = mpi_read_raw_data(params->p, params->p_size);
ctx                58 crypto/dh.c    	if (!ctx->p)
ctx                62 crypto/dh.c    		ctx->q = mpi_read_raw_data(params->q, params->q_size);
ctx                63 crypto/dh.c    		if (!ctx->q)
ctx                67 crypto/dh.c    	ctx->g = mpi_read_raw_data(params->g, params->g_size);
ctx                68 crypto/dh.c    	if (!ctx->g)
ctx                77 crypto/dh.c    	struct dh_ctx *ctx = dh_get_ctx(tfm);
ctx                81 crypto/dh.c    	dh_clear_ctx(ctx);
ctx                86 crypto/dh.c    	if (dh_set_params(ctx, &params) < 0)
ctx                89 crypto/dh.c    	ctx->xa = mpi_read_raw_data(params.key, params.key_size);
ctx                90 crypto/dh.c    	if (!ctx->xa)
ctx                96 crypto/dh.c    	dh_clear_ctx(ctx);
ctx               109 crypto/dh.c    static int dh_is_pubkey_valid(struct dh_ctx *ctx, MPI y)
ctx               111 crypto/dh.c    	if (unlikely(!ctx->p))
ctx               120 crypto/dh.c    	if (mpi_cmp_ui(y, 1) < 1 || mpi_cmp(y, ctx->p) >= 0)
ctx               124 crypto/dh.c    	if (ctx->q) {
ctx               131 crypto/dh.c    		ret = mpi_powm(val, y, ctx->q, ctx->p);
ctx               152 crypto/dh.c    	struct dh_ctx *ctx = dh_get_ctx(tfm);
ctx               160 crypto/dh.c    	if (unlikely(!ctx->xa)) {
ctx               171 crypto/dh.c    		ret = dh_is_pubkey_valid(ctx, base);
ctx               175 crypto/dh.c    		base = ctx->g;
ctx               178 crypto/dh.c    	ret = _compute_val(ctx, base, val);
ctx               198 crypto/dh.c    	struct dh_ctx *ctx = dh_get_ctx(tfm);
ctx               200 crypto/dh.c    	return mpi_get_size(ctx->p);
ctx               205 crypto/dh.c    	struct dh_ctx *ctx = dh_get_ctx(tfm);
ctx               207 crypto/dh.c    	dh_clear_ctx(ctx);
ctx              1657 crypto/drbg.c  	char ctx[];
ctx                38 crypto/ecdh.c  	struct ecdh_ctx *ctx = ecdh_get_ctx(tfm);
ctx                49 crypto/ecdh.c  	ctx->curve_id = params.curve_id;
ctx                50 crypto/ecdh.c  	ctx->ndigits = ndigits;
ctx                53 crypto/ecdh.c  		return ecc_gen_privkey(ctx->curve_id, ctx->ndigits,
ctx                54 crypto/ecdh.c  				       ctx->private_key);
ctx                56 crypto/ecdh.c  	if (ecc_is_key_valid(ctx->curve_id, ctx->ndigits,
ctx                60 crypto/ecdh.c  	memcpy(ctx->private_key, params.key, params.key_size);
ctx                68 crypto/ecdh.c  	struct ecdh_ctx *ctx = ecdh_get_ctx(tfm);
ctx                75 crypto/ecdh.c  	nbytes = ctx->ndigits << ECC_DIGITS_TO_BYTES_SHIFT;
ctx               102 crypto/ecdh.c  		ret = crypto_ecdh_shared_secret(ctx->curve_id, ctx->ndigits,
ctx               103 crypto/ecdh.c  						ctx->private_key, public_key,
ctx               108 crypto/ecdh.c  		ret = ecc_make_pub_key(ctx->curve_id, ctx->ndigits,
ctx               109 crypto/ecdh.c  				       ctx->private_key, public_key);
ctx               135 crypto/ecdh.c  	struct ecdh_ctx *ctx = ecdh_get_ctx(tfm);
ctx               138 crypto/ecdh.c  	return ctx->ndigits << (ECC_DIGITS_TO_BYTES_SHIFT + 1);
ctx                29 crypto/echainiv.c 	struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
ctx                40 crypto/echainiv.c 	aead_request_set_tfm(subreq, ctx->child);
ctx                45 crypto/echainiv.c 		SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->sknull);
ctx                47 crypto/echainiv.c 		skcipher_request_set_sync_tfm(nreq, ctx->sknull);
ctx                74 crypto/echainiv.c 		memcpy(&a, ctx->salt + ivsize - 8, 8);
ctx                88 crypto/echainiv.c 	struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
ctx                97 crypto/echainiv.c 	aead_request_set_tfm(subreq, ctx->child);
ctx                73 crypto/ecrdsa.c 	struct ecrdsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx                91 crypto/ecrdsa.c 	if (!ctx->curve ||
ctx                92 crypto/ecrdsa.c 	    !ctx->digest ||
ctx                94 crypto/ecrdsa.c 	    !ctx->pub_key.x ||
ctx                95 crypto/ecrdsa.c 	    req->dst_len != ctx->digest_len ||
ctx                96 crypto/ecrdsa.c 	    req->dst_len != ctx->curve->g.ndigits * sizeof(u64) ||
ctx                97 crypto/ecrdsa.c 	    ctx->pub_key.ndigits != ctx->curve->g.ndigits ||
ctx               115 crypto/ecrdsa.c 	    vli_cmp(r, ctx->curve->n, ndigits) == 1 ||
ctx               117 crypto/ecrdsa.c 	    vli_cmp(s, ctx->curve->n, ndigits) == 1)
ctx               123 crypto/ecrdsa.c 	if (vli_cmp(e, ctx->curve->n, ndigits) == 1)
ctx               124 crypto/ecrdsa.c 		vli_sub(e, e, ctx->curve->n, ndigits);
ctx               129 crypto/ecrdsa.c 	vli_mod_inv(v, e, ctx->curve->n, ndigits);
ctx               132 crypto/ecrdsa.c 	vli_mod_mult_slow(z1, s, v, ctx->curve->n, ndigits);
ctx               133 crypto/ecrdsa.c 	vli_sub(_r, ctx->curve->n, r, ndigits);
ctx               134 crypto/ecrdsa.c 	vli_mod_mult_slow(z2, _r, v, ctx->curve->n, ndigits);
ctx               137 crypto/ecrdsa.c 	ecc_point_mult_shamir(&cc, z1, &ctx->curve->g, z2, &ctx->pub_key,
ctx               138 crypto/ecrdsa.c 			      ctx->curve);
ctx               139 crypto/ecrdsa.c 	if (vli_cmp(cc.x, ctx->curve->n, ndigits) == 1)
ctx               140 crypto/ecrdsa.c 		vli_sub(cc.x, cc.x, ctx->curve->n, ndigits);
ctx               152 crypto/ecrdsa.c 	struct ecrdsa_ctx *ctx = context;
ctx               154 crypto/ecrdsa.c 	ctx->curve_oid = look_up_OID(value, vlen);
ctx               155 crypto/ecrdsa.c 	if (!ctx->curve_oid)
ctx               157 crypto/ecrdsa.c 	ctx->curve = get_curve_by_oid(ctx->curve_oid);
ctx               165 crypto/ecrdsa.c 	struct ecrdsa_ctx *ctx = context;
ctx               168 crypto/ecrdsa.c 	if (digest_oid != ctx->digest_oid)
ctx               176 crypto/ecrdsa.c 	struct ecrdsa_ctx *ctx = context;
ctx               178 crypto/ecrdsa.c 	ctx->key = value;
ctx               179 crypto/ecrdsa.c 	ctx->key_len = vlen;
ctx               193 crypto/ecrdsa.c 	struct ecrdsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               199 crypto/ecrdsa.c 	err = asn1_ber_decoder(&ecrdsa_pub_key_decoder, ctx, key, keylen);
ctx               208 crypto/ecrdsa.c 		ctx->digest	= "streebog256";
ctx               209 crypto/ecrdsa.c 		ctx->digest_oid	= OID_gost2012Digest256;
ctx               210 crypto/ecrdsa.c 		ctx->digest_len	= 256 / 8;
ctx               212 crypto/ecrdsa.c 		ctx->digest	= "streebog512";
ctx               213 crypto/ecrdsa.c 		ctx->digest_oid	= OID_gost2012Digest512;
ctx               214 crypto/ecrdsa.c 		ctx->digest_len	= 512 / 8;
ctx               217 crypto/ecrdsa.c 	ctx->algo_oid = algo;
ctx               220 crypto/ecrdsa.c 	err = asn1_ber_decoder(&ecrdsa_params_decoder, ctx, params, paramlen);
ctx               227 crypto/ecrdsa.c 	if (!ctx->curve ||
ctx               228 crypto/ecrdsa.c 	    ctx->curve->g.ndigits * sizeof(u64) != ctx->digest_len)
ctx               234 crypto/ecrdsa.c 	if ((ctx->key_len != (2 * 256 / 8) &&
ctx               235 crypto/ecrdsa.c 	     ctx->key_len != (2 * 512 / 8)) ||
ctx               236 crypto/ecrdsa.c 	    ctx->key_len != ctx->curve->g.ndigits * sizeof(u64) * 2)
ctx               239 crypto/ecrdsa.c 	ndigits = ctx->key_len / sizeof(u64) / 2;
ctx               240 crypto/ecrdsa.c 	ctx->pub_key = ECC_POINT_INIT(ctx->_pubp[0], ctx->_pubp[1], ndigits);
ctx               241 crypto/ecrdsa.c 	vli_from_le64(ctx->pub_key.x, ctx->key, ndigits);
ctx               242 crypto/ecrdsa.c 	vli_from_le64(ctx->pub_key.y, ctx->key + ndigits * sizeof(u64),
ctx               245 crypto/ecrdsa.c 	if (ecc_is_pubkey_valid_partial(ctx->curve, &ctx->pub_key))
ctx               253 crypto/ecrdsa.c 	struct ecrdsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               259 crypto/ecrdsa.c 	return ctx->pub_key.ndigits * sizeof(u64);
ctx               238 crypto/fcrypt.c 	const struct fcrypt_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               245 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0x0]);
ctx               246 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0x1]);
ctx               247 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0x2]);
ctx               248 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0x3]);
ctx               249 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0x4]);
ctx               250 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0x5]);
ctx               251 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0x6]);
ctx               252 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0x7]);
ctx               253 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0x8]);
ctx               254 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0x9]);
ctx               255 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0xa]);
ctx               256 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0xb]);
ctx               257 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0xc]);
ctx               258 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0xd]);
ctx               259 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0xe]);
ctx               260 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0xf]);
ctx               270 crypto/fcrypt.c 	const struct fcrypt_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               277 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0xf]);
ctx               278 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0xe]);
ctx               279 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0xd]);
ctx               280 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0xc]);
ctx               281 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0xb]);
ctx               282 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0xa]);
ctx               283 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0x9]);
ctx               284 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0x8]);
ctx               285 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0x7]);
ctx               286 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0x6]);
ctx               287 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0x5]);
ctx               288 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0x4]);
ctx               289 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0x3]);
ctx               290 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0x2]);
ctx               291 crypto/fcrypt.c 	F_ENCRYPT(X.l, X.r, ctx->sched[0x1]);
ctx               292 crypto/fcrypt.c 	F_ENCRYPT(X.r, X.l, ctx->sched[0x0]);
ctx               305 crypto/fcrypt.c 	struct fcrypt_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               331 crypto/fcrypt.c 	ctx->sched[0x0] = cpu_to_be32(k); ror56_64(k, 11);
ctx               332 crypto/fcrypt.c 	ctx->sched[0x1] = cpu_to_be32(k); ror56_64(k, 11);
ctx               333 crypto/fcrypt.c 	ctx->sched[0x2] = cpu_to_be32(k); ror56_64(k, 11);
ctx               334 crypto/fcrypt.c 	ctx->sched[0x3] = cpu_to_be32(k); ror56_64(k, 11);
ctx               335 crypto/fcrypt.c 	ctx->sched[0x4] = cpu_to_be32(k); ror56_64(k, 11);
ctx               336 crypto/fcrypt.c 	ctx->sched[0x5] = cpu_to_be32(k); ror56_64(k, 11);
ctx               337 crypto/fcrypt.c 	ctx->sched[0x6] = cpu_to_be32(k); ror56_64(k, 11);
ctx               338 crypto/fcrypt.c 	ctx->sched[0x7] = cpu_to_be32(k); ror56_64(k, 11);
ctx               339 crypto/fcrypt.c 	ctx->sched[0x8] = cpu_to_be32(k); ror56_64(k, 11);
ctx               340 crypto/fcrypt.c 	ctx->sched[0x9] = cpu_to_be32(k); ror56_64(k, 11);
ctx               341 crypto/fcrypt.c 	ctx->sched[0xa] = cpu_to_be32(k); ror56_64(k, 11);
ctx               342 crypto/fcrypt.c 	ctx->sched[0xb] = cpu_to_be32(k); ror56_64(k, 11);
ctx               343 crypto/fcrypt.c 	ctx->sched[0xc] = cpu_to_be32(k); ror56_64(k, 11);
ctx               344 crypto/fcrypt.c 	ctx->sched[0xd] = cpu_to_be32(k); ror56_64(k, 11);
ctx               345 crypto/fcrypt.c 	ctx->sched[0xe] = cpu_to_be32(k); ror56_64(k, 11);
ctx               346 crypto/fcrypt.c 	ctx->sched[0xf] = cpu_to_be32(k);
ctx               372 crypto/fcrypt.c 	ctx->sched[0x0] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               373 crypto/fcrypt.c 	ctx->sched[0x1] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               374 crypto/fcrypt.c 	ctx->sched[0x2] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               375 crypto/fcrypt.c 	ctx->sched[0x3] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               376 crypto/fcrypt.c 	ctx->sched[0x4] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               377 crypto/fcrypt.c 	ctx->sched[0x5] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               378 crypto/fcrypt.c 	ctx->sched[0x6] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               379 crypto/fcrypt.c 	ctx->sched[0x7] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               380 crypto/fcrypt.c 	ctx->sched[0x8] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               381 crypto/fcrypt.c 	ctx->sched[0x9] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               382 crypto/fcrypt.c 	ctx->sched[0xa] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               383 crypto/fcrypt.c 	ctx->sched[0xb] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               384 crypto/fcrypt.c 	ctx->sched[0xc] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               385 crypto/fcrypt.c 	ctx->sched[0xd] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               386 crypto/fcrypt.c 	ctx->sched[0xe] = cpu_to_be32(lo); ror56(hi, lo, 11);
ctx               387 crypto/fcrypt.c 	ctx->sched[0xf] = cpu_to_be32(lo);
ctx                96 crypto/gcm.c   	struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
ctx                97 crypto/gcm.c   	struct crypto_ahash *ghash = ctx->ghash;
ctx                98 crypto/gcm.c   	struct crypto_skcipher *ctr = ctx->ctr;
ctx               187 crypto/gcm.c   	struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
ctx               194 crypto/gcm.c   	skcipher_request_set_tfm(skreq, ctx->ctr);
ctx               408 crypto/gcm.c   	struct crypto_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               410 crypto/gcm.c   	ahash_request_set_tfm(ahreq, ctx->ghash);
ctx               531 crypto/gcm.c   	struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm);
ctx               546 crypto/gcm.c   	ctx->ctr = ctr;
ctx               547 crypto/gcm.c   	ctx->ghash = ghash;
ctx               567 crypto/gcm.c   	struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm);
ctx               569 crypto/gcm.c   	crypto_free_ahash(ctx->ghash);
ctx               570 crypto/gcm.c   	crypto_free_skcipher(ctx->ctr);
ctx               575 crypto/gcm.c   	struct gcm_instance_ctx *ctx = aead_instance_ctx(inst);
ctx               577 crypto/gcm.c   	crypto_drop_skcipher(&ctx->ctr);
ctx               578 crypto/gcm.c   	crypto_drop_ahash(&ctx->ghash);
ctx               592 crypto/gcm.c   	struct gcm_instance_ctx *ctx;
ctx               613 crypto/gcm.c   	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
ctx               617 crypto/gcm.c   	ctx = aead_instance_ctx(inst);
ctx               618 crypto/gcm.c   	err = crypto_init_ahash_spawn(&ctx->ghash, ghash,
ctx               628 crypto/gcm.c   	crypto_set_skcipher_spawn(&ctx->ctr, aead_crypto_instance(inst));
ctx               629 crypto/gcm.c   	err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0,
ctx               635 crypto/gcm.c   	ctr = crypto_spawn_skcipher_alg(&ctx->ctr);
ctx               684 crypto/gcm.c   	crypto_drop_skcipher(&ctx->ctr);
ctx               686 crypto/gcm.c   	crypto_drop_ahash(&ctx->ghash);
ctx               728 crypto/gcm.c   	struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
ctx               729 crypto/gcm.c   	struct crypto_aead *child = ctx->child;
ctx               736 crypto/gcm.c   	memcpy(ctx->nonce, key + keylen, 4);
ctx               751 crypto/gcm.c   	struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
ctx               758 crypto/gcm.c   	return crypto_aead_setauthsize(ctx->child, authsize);
ctx               765 crypto/gcm.c   	struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(aead);
ctx               767 crypto/gcm.c   	struct crypto_aead *child = ctx->child;
ctx               774 crypto/gcm.c   	memcpy(iv, ctx->nonce, 4);
ctx               832 crypto/gcm.c   	struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm);
ctx               840 crypto/gcm.c   	ctx->child = aead;
ctx               855 crypto/gcm.c   	struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm);
ctx               857 crypto/gcm.c   	crypto_free_aead(ctx->child);
ctx               957 crypto/gcm.c   	struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
ctx               958 crypto/gcm.c   	struct crypto_aead *child = ctx->child;
ctx               965 crypto/gcm.c   	memcpy(ctx->nonce, key + keylen, 4);
ctx               980 crypto/gcm.c   	struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
ctx               985 crypto/gcm.c   	return crypto_aead_setauthsize(ctx->child, authsize);
ctx               991 crypto/gcm.c   	struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead);
ctx               995 crypto/gcm.c   	u8 *iv = PTR_ALIGN((u8 *)(rctx + 1) + crypto_aead_reqsize(ctx->child),
ctx               996 crypto/gcm.c   			   crypto_aead_alignmask(ctx->child) + 1);
ctx              1005 crypto/gcm.c   	memcpy(iv, ctx->nonce, 4);
ctx              1008 crypto/gcm.c   	aead_request_set_tfm(subreq, ctx->child);
ctx              1022 crypto/gcm.c   	struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead);
ctx              1026 crypto/gcm.c   	SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->null);
ctx              1028 crypto/gcm.c   	skcipher_request_set_sync_tfm(nreq, ctx->null);
ctx              1052 crypto/gcm.c   	struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1067 crypto/gcm.c   	ctx->child = aead;
ctx              1068 crypto/gcm.c   	ctx->null = null;
ctx              1087 crypto/gcm.c   	struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1089 crypto/gcm.c   	crypto_free_aead(ctx->child);
ctx              1095 crypto/gcm.c   	struct crypto_rfc4543_instance_ctx *ctx = aead_instance_ctx(inst);
ctx              1097 crypto/gcm.c   	crypto_drop_aead(&ctx->aead);
ctx              1109 crypto/gcm.c   	struct crypto_rfc4543_instance_ctx *ctx;
ctx              1124 crypto/gcm.c   	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
ctx              1128 crypto/gcm.c   	ctx = aead_instance_ctx(inst);
ctx              1129 crypto/gcm.c   	spawn = &ctx->aead;
ctx                58 crypto/ghash-generic.c 	struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
ctx                66 crypto/ghash-generic.c 	if (ctx->gf128)
ctx                67 crypto/ghash-generic.c 		gf128mul_free_4k(ctx->gf128);
ctx                71 crypto/ghash-generic.c 	ctx->gf128 = gf128mul_init_4k_lle(&k);
ctx                74 crypto/ghash-generic.c 	if (!ctx->gf128)
ctx                84 crypto/ghash-generic.c 	struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
ctx                98 crypto/ghash-generic.c 			gf128mul_4k_lle((be128 *)dst, ctx->gf128);
ctx               103 crypto/ghash-generic.c 		gf128mul_4k_lle((be128 *)dst, ctx->gf128);
ctx               117 crypto/ghash-generic.c static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
ctx               127 crypto/ghash-generic.c 		gf128mul_4k_lle((be128 *)dst, ctx->gf128);
ctx               136 crypto/ghash-generic.c 	struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
ctx               139 crypto/ghash-generic.c 	ghash_flush(ctx, dctx);
ctx               147 crypto/ghash-generic.c 	struct ghash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               148 crypto/ghash-generic.c 	if (ctx->gf128)
ctx               149 crypto/ghash-generic.c 		gf128mul_free_4k(ctx->gf128);
ctx                48 crypto/hmac.c  	struct hmac_ctx *ctx = align_ptr(opad + ss,
ctx                50 crypto/hmac.c  	struct crypto_shash *hash = ctx->hash;
ctx                93 crypto/hmac.c  	struct hmac_ctx *ctx = hmac_ctx(pdesc->tfm);
ctx                95 crypto/hmac.c  	desc->tfm = ctx->hash;
ctx               147 crypto/hmac.c  	struct hmac_ctx *ctx = hmac_ctx(parent);
ctx               160 crypto/hmac.c  	ctx->hash = hash;
ctx               166 crypto/hmac.c  	struct hmac_ctx *ctx = hmac_ctx(__crypto_shash_cast(tfm));
ctx               167 crypto/hmac.c  	crypto_free_shash(ctx->hash);
ctx               759 crypto/khazad.c 	struct khazad_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               771 crypto/khazad.c 		ctx->E[r] = T0[(int)(K1 >> 56)       ] ^
ctx               781 crypto/khazad.c 		K1 = ctx->E[r];
ctx               784 crypto/khazad.c 	ctx->D[0] = ctx->E[KHAZAD_ROUNDS];
ctx               786 crypto/khazad.c 		K1 = ctx->E[KHAZAD_ROUNDS - r];
ctx               787 crypto/khazad.c 		ctx->D[r] = T0[(int)S[(int)(K1 >> 56)       ] & 0xff] ^
ctx               796 crypto/khazad.c 	ctx->D[KHAZAD_ROUNDS] = ctx->E[0];
ctx               839 crypto/khazad.c 	struct khazad_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               840 crypto/khazad.c 	khazad_crypt(ctx->E, dst, src);
ctx               845 crypto/khazad.c 	struct khazad_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               846 crypto/khazad.c 	khazad_crypt(ctx->D, dst, src);
ctx                71 crypto/lrw.c   	struct priv *ctx = crypto_skcipher_ctx(parent);
ctx                72 crypto/lrw.c   	struct crypto_skcipher *child = ctx->child;
ctx                87 crypto/lrw.c   	if (ctx->table)
ctx                88 crypto/lrw.c   		gf128mul_free_64k(ctx->table);
ctx                91 crypto/lrw.c   	ctx->table = gf128mul_init_64k_bbe((be128 *)tweak);
ctx                92 crypto/lrw.c   	if (!ctx->table)
ctx                98 crypto/lrw.c   		ctx->mulinc[i] = tmp;
ctx                99 crypto/lrw.c   		gf128mul_64k_bbe(&ctx->mulinc[i], ctx->table);
ctx               146 crypto/lrw.c   	struct priv *ctx = crypto_skcipher_ctx(tfm);
ctx               183 crypto/lrw.c   			be128_xor(&t, &t, &ctx->mulinc[next_index(counter)]);
ctx               225 crypto/lrw.c   	struct priv *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
ctx               229 crypto/lrw.c   	skcipher_request_set_tfm(subreq, ctx->child);
ctx               239 crypto/lrw.c   	gf128mul_64k_bbe(&rctx->t, ctx->table);
ctx               268 crypto/lrw.c   	struct priv *ctx = crypto_skcipher_ctx(tfm);
ctx               275 crypto/lrw.c   	ctx->child = cipher;
ctx               285 crypto/lrw.c   	struct priv *ctx = crypto_skcipher_ctx(tfm);
ctx               287 crypto/lrw.c   	if (ctx->table)
ctx               288 crypto/lrw.c   		gf128mul_free_64k(ctx->table);
ctx               289 crypto/lrw.c   	crypto_free_skcipher(ctx->child);
ctx                21 crypto/lz4.c   	void *ctx;
ctx                23 crypto/lz4.c   	ctx = vmalloc(LZ4_MEM_COMPRESS);
ctx                24 crypto/lz4.c   	if (!ctx)
ctx                27 crypto/lz4.c   	return ctx;
ctx                32 crypto/lz4.c   	struct lz4_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                34 crypto/lz4.c   	ctx->lz4_comp_mem = lz4_alloc_ctx(NULL);
ctx                35 crypto/lz4.c   	if (IS_ERR(ctx->lz4_comp_mem))
ctx                41 crypto/lz4.c   static void lz4_free_ctx(struct crypto_scomp *tfm, void *ctx)
ctx                43 crypto/lz4.c   	vfree(ctx);
ctx                48 crypto/lz4.c   	struct lz4_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                50 crypto/lz4.c   	lz4_free_ctx(NULL, ctx->lz4_comp_mem);
ctx                54 crypto/lz4.c   				 u8 *dst, unsigned int *dlen, void *ctx)
ctx                57 crypto/lz4.c   		slen, *dlen, ctx);
ctx                68 crypto/lz4.c   			 void *ctx)
ctx                70 crypto/lz4.c   	return __lz4_compress_crypto(src, slen, dst, dlen, ctx);
ctx                76 crypto/lz4.c   	struct lz4_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                78 crypto/lz4.c   	return __lz4_compress_crypto(src, slen, dst, dlen, ctx->lz4_comp_mem);
ctx                82 crypto/lz4.c   				   u8 *dst, unsigned int *dlen, void *ctx)
ctx                95 crypto/lz4.c   			   void *ctx)
ctx                20 crypto/lz4hc.c 	void *ctx;
ctx                22 crypto/lz4hc.c 	ctx = vmalloc(LZ4HC_MEM_COMPRESS);
ctx                23 crypto/lz4hc.c 	if (!ctx)
ctx                26 crypto/lz4hc.c 	return ctx;
ctx                31 crypto/lz4hc.c 	struct lz4hc_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                33 crypto/lz4hc.c 	ctx->lz4hc_comp_mem = lz4hc_alloc_ctx(NULL);
ctx                34 crypto/lz4hc.c 	if (IS_ERR(ctx->lz4hc_comp_mem))
ctx                40 crypto/lz4hc.c static void lz4hc_free_ctx(struct crypto_scomp *tfm, void *ctx)
ctx                42 crypto/lz4hc.c 	vfree(ctx);
ctx                47 crypto/lz4hc.c 	struct lz4hc_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                49 crypto/lz4hc.c 	lz4hc_free_ctx(NULL, ctx->lz4hc_comp_mem);
ctx                53 crypto/lz4hc.c 				   u8 *dst, unsigned int *dlen, void *ctx)
ctx                56 crypto/lz4hc.c 		*dlen, LZ4HC_DEFAULT_CLEVEL, ctx);
ctx                67 crypto/lz4hc.c 			   void *ctx)
ctx                69 crypto/lz4hc.c 	return __lz4hc_compress_crypto(src, slen, dst, dlen, ctx);
ctx                76 crypto/lz4hc.c 	struct lz4hc_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                79 crypto/lz4hc.c 					ctx->lz4hc_comp_mem);
ctx                83 crypto/lz4hc.c 				     u8 *dst, unsigned int *dlen, void *ctx)
ctx                96 crypto/lz4hc.c 			     void *ctx)
ctx                20 crypto/lzo-rle.c 	void *ctx;
ctx                22 crypto/lzo-rle.c 	ctx = kvmalloc(LZO1X_MEM_COMPRESS, GFP_KERNEL);
ctx                23 crypto/lzo-rle.c 	if (!ctx)
ctx                26 crypto/lzo-rle.c 	return ctx;
ctx                31 crypto/lzo-rle.c 	struct lzorle_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                33 crypto/lzo-rle.c 	ctx->lzorle_comp_mem = lzorle_alloc_ctx(NULL);
ctx                34 crypto/lzo-rle.c 	if (IS_ERR(ctx->lzorle_comp_mem))
ctx                40 crypto/lzo-rle.c static void lzorle_free_ctx(struct crypto_scomp *tfm, void *ctx)
ctx                42 crypto/lzo-rle.c 	kvfree(ctx);
ctx                47 crypto/lzo-rle.c 	struct lzorle_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                49 crypto/lzo-rle.c 	lzorle_free_ctx(NULL, ctx->lzorle_comp_mem);
ctx                53 crypto/lzo-rle.c 			  u8 *dst, unsigned int *dlen, void *ctx)
ctx                58 crypto/lzo-rle.c 	err = lzorle1x_1_compress(src, slen, dst, &tmp_len, ctx);
ctx                70 crypto/lzo-rle.c 	struct lzorle_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                72 crypto/lzo-rle.c 	return __lzorle_compress(src, slen, dst, dlen, ctx->lzorle_comp_mem);
ctx                77 crypto/lzo-rle.c 			 void *ctx)
ctx                79 crypto/lzo-rle.c 	return __lzorle_compress(src, slen, dst, dlen, ctx);
ctx               105 crypto/lzo-rle.c 			   void *ctx)
ctx                20 crypto/lzo.c   	void *ctx;
ctx                22 crypto/lzo.c   	ctx = kvmalloc(LZO1X_MEM_COMPRESS, GFP_KERNEL);
ctx                23 crypto/lzo.c   	if (!ctx)
ctx                26 crypto/lzo.c   	return ctx;
ctx                31 crypto/lzo.c   	struct lzo_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                33 crypto/lzo.c   	ctx->lzo_comp_mem = lzo_alloc_ctx(NULL);
ctx                34 crypto/lzo.c   	if (IS_ERR(ctx->lzo_comp_mem))
ctx                40 crypto/lzo.c   static void lzo_free_ctx(struct crypto_scomp *tfm, void *ctx)
ctx                42 crypto/lzo.c   	kvfree(ctx);
ctx                47 crypto/lzo.c   	struct lzo_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                49 crypto/lzo.c   	lzo_free_ctx(NULL, ctx->lzo_comp_mem);
ctx                53 crypto/lzo.c   			  u8 *dst, unsigned int *dlen, void *ctx)
ctx                58 crypto/lzo.c   	err = lzo1x_1_compress(src, slen, dst, &tmp_len, ctx);
ctx                70 crypto/lzo.c   	struct lzo_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                72 crypto/lzo.c   	return __lzo_compress(src, slen, dst, dlen, ctx->lzo_comp_mem);
ctx                77 crypto/lzo.c   			 void *ctx)
ctx                79 crypto/lzo.c   	return __lzo_compress(src, slen, dst, dlen, ctx);
ctx               105 crypto/lzo.c   			   void *ctx)
ctx               133 crypto/md4.c   static inline void md4_transform_helper(struct md4_ctx *ctx)
ctx               135 crypto/md4.c   	le32_to_cpu_array(ctx->block, ARRAY_SIZE(ctx->block));
ctx               136 crypto/md4.c   	md4_transform(ctx->hash, ctx->block);
ctx               126 crypto/md5.c   static inline void md5_transform_helper(struct md5_state *ctx)
ctx               128 crypto/md5.c   	le32_to_cpu_array(ctx->block, sizeof(ctx->block) / sizeof(u32));
ctx               129 crypto/md5.c   	md5_transform(ctx->hash, ctx->block);
ctx               207 crypto/md5.c   	struct md5_state *ctx = shash_desc_ctx(desc);
ctx               209 crypto/md5.c   	memcpy(out, ctx, sizeof(*ctx));
ctx               215 crypto/md5.c   	struct md5_state *ctx = shash_desc_ctx(desc);
ctx               217 crypto/md5.c   	memcpy(ctx, in, sizeof(*ctx));
ctx                50 crypto/michael_mic.c 	struct michael_mic_ctx *ctx = crypto_shash_ctx(desc->tfm);
ctx                52 crypto/michael_mic.c 	mctx->l = ctx->l;
ctx                53 crypto/michael_mic.c 	mctx->r = ctx->r;
ctx               127 crypto/nhpoly1305.c 	struct nhpoly1305_key *ctx = crypto_shash_ctx(tfm);
ctx               133 crypto/nhpoly1305.c 	poly1305_core_setkey(&ctx->poly_key, key);
ctx               137 crypto/nhpoly1305.c 		ctx->nh_key[i] = get_unaligned_le32(key + i * sizeof(u32));
ctx                46 crypto/pcrypt.c 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
ctx                48 crypto/pcrypt.c 	return crypto_aead_setkey(ctx->child, key, keylen);
ctx                54 crypto/pcrypt.c 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
ctx                56 crypto/pcrypt.c 	return crypto_aead_setauthsize(ctx->child, authsize);
ctx                98 crypto/pcrypt.c 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
ctx               109 crypto/pcrypt.c 	aead_request_set_tfm(creq, ctx->child);
ctx               116 crypto/pcrypt.c 	err = padata_do_parallel(ictx->psenc, padata, &ctx->cb_cpu);
ctx               143 crypto/pcrypt.c 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
ctx               154 crypto/pcrypt.c 	aead_request_set_tfm(creq, ctx->child);
ctx               161 crypto/pcrypt.c 	err = padata_do_parallel(ictx->psdec, padata, &ctx->cb_cpu);
ctx               173 crypto/pcrypt.c 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               179 crypto/pcrypt.c 	ctx->cb_cpu = cpumask_first(cpu_online_mask);
ctx               181 crypto/pcrypt.c 		ctx->cb_cpu = cpumask_next(ctx->cb_cpu, cpu_online_mask);
ctx               188 crypto/pcrypt.c 	ctx->child = cipher;
ctx               198 crypto/pcrypt.c 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               200 crypto/pcrypt.c 	crypto_free_aead(ctx->child);
ctx               205 crypto/pcrypt.c 	struct pcrypt_instance_ctx *ctx = aead_instance_ctx(inst);
ctx               207 crypto/pcrypt.c 	crypto_drop_aead(&ctx->spawn);
ctx               208 crypto/pcrypt.c 	padata_free_shell(ctx->psdec);
ctx               209 crypto/pcrypt.c 	padata_free_shell(ctx->psenc);
ctx               232 crypto/pcrypt.c 	struct pcrypt_instance_ctx *ctx;
ctx               247 crypto/pcrypt.c 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
ctx               253 crypto/pcrypt.c 	ctx = aead_instance_ctx(inst);
ctx               254 crypto/pcrypt.c 	ctx->psenc = padata_alloc_shell(pencrypt);
ctx               255 crypto/pcrypt.c 	if (!ctx->psenc)
ctx               258 crypto/pcrypt.c 	ctx->psdec = padata_alloc_shell(pdecrypt);
ctx               259 crypto/pcrypt.c 	if (!ctx->psdec)
ctx               262 crypto/pcrypt.c 	crypto_set_aead_spawn(&ctx->spawn, aead_crypto_instance(inst));
ctx               264 crypto/pcrypt.c 	err = crypto_grab_aead(&ctx->spawn, name, 0, 0);
ctx               268 crypto/pcrypt.c 	alg = crypto_spawn_aead_alg(&ctx->spawn);
ctx               298 crypto/pcrypt.c 	crypto_drop_aead(&ctx->spawn);
ctx               300 crypto/pcrypt.c 	padata_free_shell(ctx->psdec);
ctx               302 crypto/pcrypt.c 	padata_free_shell(ctx->psenc);
ctx               109 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               112 crypto/rsa-pkcs1pad.c 	ctx->key_size = 0;
ctx               114 crypto/rsa-pkcs1pad.c 	err = crypto_akcipher_set_pub_key(ctx->child, key, keylen);
ctx               119 crypto/rsa-pkcs1pad.c 	err = crypto_akcipher_maxsize(ctx->child);
ctx               123 crypto/rsa-pkcs1pad.c 	ctx->key_size = err;
ctx               130 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               133 crypto/rsa-pkcs1pad.c 	ctx->key_size = 0;
ctx               135 crypto/rsa-pkcs1pad.c 	err = crypto_akcipher_set_priv_key(ctx->child, key, keylen);
ctx               140 crypto/rsa-pkcs1pad.c 	err = crypto_akcipher_maxsize(ctx->child);
ctx               144 crypto/rsa-pkcs1pad.c 	ctx->key_size = err;
ctx               150 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               158 crypto/rsa-pkcs1pad.c 	return ctx->key_size;
ctx               176 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               186 crypto/rsa-pkcs1pad.c 	pad_len = ctx->key_size - len;
ctx               192 crypto/rsa-pkcs1pad.c 	out_buf = kzalloc(ctx->key_size, GFP_KERNEL);
ctx               200 crypto/rsa-pkcs1pad.c 			    sg_nents_for_len(req->dst, ctx->key_size),
ctx               201 crypto/rsa-pkcs1pad.c 			    out_buf, ctx->key_size);
ctx               205 crypto/rsa-pkcs1pad.c 	req->dst_len = ctx->key_size;
ctx               231 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               236 crypto/rsa-pkcs1pad.c 	if (!ctx->key_size)
ctx               239 crypto/rsa-pkcs1pad.c 	if (req->src_len > ctx->key_size - 11)
ctx               242 crypto/rsa-pkcs1pad.c 	if (req->dst_len < ctx->key_size) {
ctx               243 crypto/rsa-pkcs1pad.c 		req->dst_len = ctx->key_size;
ctx               247 crypto/rsa-pkcs1pad.c 	req_ctx->in_buf = kmalloc(ctx->key_size - 1 - req->src_len,
ctx               252 crypto/rsa-pkcs1pad.c 	ps_end = ctx->key_size - req->src_len - 2;
ctx               259 crypto/rsa-pkcs1pad.c 			ctx->key_size - 1 - req->src_len, req->src);
ctx               261 crypto/rsa-pkcs1pad.c 	akcipher_request_set_tfm(&req_ctx->child_req, ctx->child);
ctx               267 crypto/rsa-pkcs1pad.c 				   req->dst, ctx->key_size - 1, req->dst_len);
ctx               279 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               290 crypto/rsa-pkcs1pad.c 	if (dst_len < ctx->key_size - 1)
ctx               294 crypto/rsa-pkcs1pad.c 	if (dst_len == ctx->key_size) {
ctx               348 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               352 crypto/rsa-pkcs1pad.c 	if (!ctx->key_size || req->src_len != ctx->key_size)
ctx               355 crypto/rsa-pkcs1pad.c 	req_ctx->out_buf = kmalloc(ctx->key_size, GFP_KERNEL);
ctx               360 crypto/rsa-pkcs1pad.c 			    ctx->key_size, NULL);
ctx               362 crypto/rsa-pkcs1pad.c 	akcipher_request_set_tfm(&req_ctx->child_req, ctx->child);
ctx               369 crypto/rsa-pkcs1pad.c 				   ctx->key_size);
ctx               381 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               389 crypto/rsa-pkcs1pad.c 	if (!ctx->key_size)
ctx               395 crypto/rsa-pkcs1pad.c 	if (req->src_len + digest_size > ctx->key_size - 11)
ctx               398 crypto/rsa-pkcs1pad.c 	if (req->dst_len < ctx->key_size) {
ctx               399 crypto/rsa-pkcs1pad.c 		req->dst_len = ctx->key_size;
ctx               403 crypto/rsa-pkcs1pad.c 	req_ctx->in_buf = kmalloc(ctx->key_size - 1 - req->src_len,
ctx               408 crypto/rsa-pkcs1pad.c 	ps_end = ctx->key_size - digest_size - req->src_len - 2;
ctx               418 crypto/rsa-pkcs1pad.c 			ctx->key_size - 1 - req->src_len, req->src);
ctx               420 crypto/rsa-pkcs1pad.c 	akcipher_request_set_tfm(&req_ctx->child_req, ctx->child);
ctx               426 crypto/rsa-pkcs1pad.c 				   req->dst, ctx->key_size - 1, req->dst_len);
ctx               438 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               452 crypto/rsa-pkcs1pad.c 	if (dst_len < ctx->key_size - 1)
ctx               456 crypto/rsa-pkcs1pad.c 	if (dst_len == ctx->key_size) {
ctx               496 crypto/rsa-pkcs1pad.c 			   req_ctx->out_buf + ctx->key_size,
ctx               497 crypto/rsa-pkcs1pad.c 			   req->dst_len, ctx->key_size);
ctx               499 crypto/rsa-pkcs1pad.c 	if (memcmp(req_ctx->out_buf + ctx->key_size, out_buf + pos,
ctx               534 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               540 crypto/rsa-pkcs1pad.c 	    !ctx->key_size || req->src_len < ctx->key_size)
ctx               543 crypto/rsa-pkcs1pad.c 	req_ctx->out_buf = kmalloc(ctx->key_size + req->dst_len, GFP_KERNEL);
ctx               548 crypto/rsa-pkcs1pad.c 			    ctx->key_size, NULL);
ctx               550 crypto/rsa-pkcs1pad.c 	akcipher_request_set_tfm(&req_ctx->child_req, ctx->child);
ctx               557 crypto/rsa-pkcs1pad.c 				   ctx->key_size);
ctx               570 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               577 crypto/rsa-pkcs1pad.c 	ctx->child = child_tfm;
ctx               583 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               585 crypto/rsa-pkcs1pad.c 	crypto_free_akcipher(ctx->child);
ctx               590 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_inst_ctx *ctx = akcipher_instance_ctx(inst);
ctx               591 crypto/rsa-pkcs1pad.c 	struct crypto_akcipher_spawn *spawn = &ctx->spawn;
ctx               602 crypto/rsa-pkcs1pad.c 	struct pkcs1pad_inst_ctx *ctx;
ctx               631 crypto/rsa-pkcs1pad.c 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
ctx               635 crypto/rsa-pkcs1pad.c 	ctx = akcipher_instance_ctx(inst);
ctx               636 crypto/rsa-pkcs1pad.c 	spawn = &ctx->spawn;
ctx               637 crypto/rsa-pkcs1pad.c 	ctx->digest_info = digest_info;
ctx               103 crypto/salsa20_generic.c static void salsa20_init(u32 *state, const struct salsa20_ctx *ctx,
ctx               106 crypto/salsa20_generic.c 	memcpy(state, ctx->initial_state, sizeof(ctx->initial_state));
ctx               116 crypto/salsa20_generic.c 	struct salsa20_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               123 crypto/salsa20_generic.c 	ctx->initial_state[1] = get_unaligned_le32(key + 0);
ctx               124 crypto/salsa20_generic.c 	ctx->initial_state[2] = get_unaligned_le32(key + 4);
ctx               125 crypto/salsa20_generic.c 	ctx->initial_state[3] = get_unaligned_le32(key + 8);
ctx               126 crypto/salsa20_generic.c 	ctx->initial_state[4] = get_unaligned_le32(key + 12);
ctx               133 crypto/salsa20_generic.c 	ctx->initial_state[11] = get_unaligned_le32(key + 0);
ctx               134 crypto/salsa20_generic.c 	ctx->initial_state[12] = get_unaligned_le32(key + 4);
ctx               135 crypto/salsa20_generic.c 	ctx->initial_state[13] = get_unaligned_le32(key + 8);
ctx               136 crypto/salsa20_generic.c 	ctx->initial_state[14] = get_unaligned_le32(key + 12);
ctx               137 crypto/salsa20_generic.c 	ctx->initial_state[0]  = get_unaligned_le32(constants + 0);
ctx               138 crypto/salsa20_generic.c 	ctx->initial_state[5]  = get_unaligned_le32(constants + 4);
ctx               139 crypto/salsa20_generic.c 	ctx->initial_state[10] = get_unaligned_le32(constants + 8);
ctx               140 crypto/salsa20_generic.c 	ctx->initial_state[15] = get_unaligned_le32(constants + 12);
ctx               143 crypto/salsa20_generic.c 	ctx->initial_state[6] = 0;
ctx               144 crypto/salsa20_generic.c 	ctx->initial_state[7] = 0;
ctx               147 crypto/salsa20_generic.c 	ctx->initial_state[8] = 0;
ctx               148 crypto/salsa20_generic.c 	ctx->initial_state[9] = 0;
ctx               156 crypto/salsa20_generic.c 	const struct salsa20_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               163 crypto/salsa20_generic.c 	salsa20_init(state, ctx, req->iv);
ctx               125 crypto/scompress.c 	void **ctx = acomp_request_ctx(req);
ctx               144 crypto/scompress.c 					    scratch->dst, &req->dlen, *ctx);
ctx               147 crypto/scompress.c 					      scratch->dst, &req->dlen, *ctx);
ctx               176 crypto/scompress.c 	struct crypto_scomp **ctx = crypto_tfm_ctx(tfm);
ctx               178 crypto/scompress.c 	crypto_free_scomp(*ctx);
ctx               190 crypto/scompress.c 	struct crypto_scomp **ctx = crypto_tfm_ctx(tfm);
ctx               202 crypto/scompress.c 	*ctx = scomp;
ctx               219 crypto/scompress.c 	void *ctx;
ctx               221 crypto/scompress.c 	ctx = crypto_scomp_alloc_ctx(scomp);
ctx               222 crypto/scompress.c 	if (IS_ERR(ctx)) {
ctx               227 crypto/scompress.c 	*req->__ctx = ctx;
ctx               238 crypto/scompress.c 	void *ctx = *req->__ctx;
ctx               240 crypto/scompress.c 	if (ctx)
ctx               241 crypto/scompress.c 		crypto_scomp_free_ctx(scomp, ctx);
ctx               330 crypto/seed.c  	struct seed_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               331 crypto/seed.c  	u32 *keyout = ctx->keysched;
ctx               366 crypto/seed.c  	const struct seed_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               370 crypto/seed.c  	const u32 *ks = ctx->keysched;
ctx               404 crypto/seed.c  	const struct seed_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               408 crypto/seed.c  	const u32 *ks = ctx->keysched;
ctx                53 crypto/seqiv.c 	struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
ctx                64 crypto/seqiv.c 	aead_request_set_tfm(subreq, ctx->child);
ctx                71 crypto/seqiv.c 		SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->sknull);
ctx                73 crypto/seqiv.c 		skcipher_request_set_sync_tfm(nreq, ctx->sknull);
ctx               102 crypto/seqiv.c 	crypto_xor(info, ctx->salt, ivsize);
ctx               114 crypto/seqiv.c 	struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
ctx               123 crypto/seqiv.c 	aead_request_set_tfm(subreq, ctx->child);
ctx               274 crypto/serpent_generic.c int __serpent_setkey(struct serpent_ctx *ctx, const u8 *key,
ctx               277 crypto/serpent_generic.c 	u32 *k = ctx->expkey;
ctx               440 crypto/serpent_generic.c 	__serpent_setkey_sbox(r0, r1, r2, r3, r4, ctx->expkey);
ctx               452 crypto/serpent_generic.c void __serpent_encrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src)
ctx               454 crypto/serpent_generic.c 	const u32 *k = ctx->expkey;
ctx               512 crypto/serpent_generic.c 	struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               514 crypto/serpent_generic.c 	__serpent_encrypt(ctx, dst, src);
ctx               517 crypto/serpent_generic.c void __serpent_decrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src)
ctx               519 crypto/serpent_generic.c 	const u32 *k = ctx->expkey;
ctx               572 crypto/serpent_generic.c 	struct serpent_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               574 crypto/serpent_generic.c 	__serpent_decrypt(ctx, dst, src);
ctx               225 crypto/shash.c 	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
ctx               227 crypto/shash.c 	return crypto_shash_setkey(*ctx, key, keylen);
ctx               232 crypto/shash.c 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
ctx               235 crypto/shash.c 	desc->tfm = *ctx;
ctx               286 crypto/shash.c 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
ctx               289 crypto/shash.c 	desc->tfm = *ctx;
ctx               320 crypto/shash.c 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
ctx               323 crypto/shash.c 	desc->tfm = *ctx;
ctx               335 crypto/shash.c 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
ctx               338 crypto/shash.c 	desc->tfm = *ctx;
ctx               345 crypto/shash.c 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
ctx               347 crypto/shash.c 	crypto_free_shash(*ctx);
ctx               355 crypto/shash.c 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
ctx               367 crypto/shash.c 	*ctx = shash;
ctx                53 crypto/simd.c  	struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                54 crypto/simd.c  	struct crypto_skcipher *child = &ctx->cryptd_tfm->base;
ctx                69 crypto/simd.c  	struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                77 crypto/simd.c  	    (in_atomic() && cryptd_skcipher_queued(ctx->cryptd_tfm)))
ctx                78 crypto/simd.c  		child = &ctx->cryptd_tfm->base;
ctx                80 crypto/simd.c  		child = cryptd_skcipher_child(ctx->cryptd_tfm);
ctx                90 crypto/simd.c  	struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                98 crypto/simd.c  	    (in_atomic() && cryptd_skcipher_queued(ctx->cryptd_tfm)))
ctx                99 crypto/simd.c  		child = &ctx->cryptd_tfm->base;
ctx               101 crypto/simd.c  		child = cryptd_skcipher_child(ctx->cryptd_tfm);
ctx               110 crypto/simd.c  	struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               112 crypto/simd.c  	cryptd_free_skcipher(ctx->cryptd_tfm);
ctx               117 crypto/simd.c  	struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               132 crypto/simd.c  	ctx->cryptd_tfm = cryptd_tfm;
ctx               296 crypto/simd.c  	struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               297 crypto/simd.c  	struct crypto_aead *child = &ctx->cryptd_tfm->base;
ctx               311 crypto/simd.c  	struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               312 crypto/simd.c  	struct crypto_aead *child = &ctx->cryptd_tfm->base;
ctx               320 crypto/simd.c  	struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               328 crypto/simd.c  	    (in_atomic() && cryptd_aead_queued(ctx->cryptd_tfm)))
ctx               329 crypto/simd.c  		child = &ctx->cryptd_tfm->base;
ctx               331 crypto/simd.c  		child = cryptd_aead_child(ctx->cryptd_tfm);
ctx               341 crypto/simd.c  	struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               349 crypto/simd.c  	    (in_atomic() && cryptd_aead_queued(ctx->cryptd_tfm)))
ctx               350 crypto/simd.c  		child = &ctx->cryptd_tfm->base;
ctx               352 crypto/simd.c  		child = cryptd_aead_child(ctx->cryptd_tfm);
ctx               361 crypto/simd.c  	struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               363 crypto/simd.c  	cryptd_free_aead(ctx->cryptd_tfm);
ctx               368 crypto/simd.c  	struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               382 crypto/simd.c  	ctx->cryptd_tfm = cryptd_tfm;
ctx               601 crypto/skcipher.c 	struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm);
ctx               602 crypto/skcipher.c 	struct crypto_blkcipher *blkcipher = *ctx;
ctx               627 crypto/skcipher.c 	struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm);
ctx               629 crypto/skcipher.c 		.tfm = *ctx,
ctx               658 crypto/skcipher.c 	struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm);
ctx               660 crypto/skcipher.c 	crypto_free_blkcipher(*ctx);
ctx               667 crypto/skcipher.c 	struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm);
ctx               682 crypto/skcipher.c 	*ctx = blkcipher;
ctx               700 crypto/skcipher.c 	struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm);
ctx               701 crypto/skcipher.c 	struct crypto_ablkcipher *ablkcipher = *ctx;
ctx               725 crypto/skcipher.c 	struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm);
ctx               728 crypto/skcipher.c 	ablkcipher_request_set_tfm(subreq, *ctx);
ctx               757 crypto/skcipher.c 	struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm);
ctx               759 crypto/skcipher.c 	crypto_free_ablkcipher(*ctx);
ctx               766 crypto/skcipher.c 	struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm);
ctx               780 crypto/skcipher.c 	*ctx = ablkcipher;
ctx              1127 crypto/skcipher.c 	struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
ctx              1134 crypto/skcipher.c 	ctx->cipher = cipher;
ctx              1140 crypto/skcipher.c 	struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
ctx              1142 crypto/skcipher.c 	crypto_free_cipher(ctx->cipher);
ctx               116 crypto/sm4_generic.c int crypto_sm4_expand_key(struct crypto_sm4_ctx *ctx, const u8 *in_key,
ctx               131 crypto/sm4_generic.c 		ctx->rkey_enc[i] = t;
ctx               139 crypto/sm4_generic.c 		ctx->rkey_dec[i] = ctx->rkey_enc[31 - i];
ctx               159 crypto/sm4_generic.c 	struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               163 crypto/sm4_generic.c 	ret = crypto_sm4_expand_key(ctx, in_key, key_len);
ctx               195 crypto/sm4_generic.c 	const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               197 crypto/sm4_generic.c 	sm4_do_crypt(ctx->rkey_enc, (u32 *)out, (u32 *)in);
ctx               205 crypto/sm4_generic.c 	const struct crypto_sm4_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               207 crypto/sm4_generic.c 	sm4_do_crypt(ctx->rkey_dec, (u32 *)out, (u32 *)in);
ctx               910 crypto/streebog_generic.c 	struct streebog_state *ctx = shash_desc_ctx(desc);
ctx               914 crypto/streebog_generic.c 	memset(ctx, 0, sizeof(struct streebog_state));
ctx               917 crypto/streebog_generic.c 			ctx->h.qword[i] = cpu_to_le64(0x0101010101010101ULL);
ctx               922 crypto/streebog_generic.c static void streebog_pad(struct streebog_state *ctx)
ctx               924 crypto/streebog_generic.c 	if (ctx->fillsize >= STREEBOG_BLOCK_SIZE)
ctx               927 crypto/streebog_generic.c 	memset(ctx->buffer + ctx->fillsize, 0,
ctx               928 crypto/streebog_generic.c 	       sizeof(ctx->buffer) - ctx->fillsize);
ctx               930 crypto/streebog_generic.c 	ctx->buffer[ctx->fillsize] = 1;
ctx               975 crypto/streebog_generic.c static void streebog_stage2(struct streebog_state *ctx, const u8 *data)
ctx               981 crypto/streebog_generic.c 	streebog_g(&ctx->h, &ctx->N, &m);
ctx               983 crypto/streebog_generic.c 	streebog_add512(&ctx->N, &buffer512, &ctx->N);
ctx               984 crypto/streebog_generic.c 	streebog_add512(&ctx->Sigma, &m, &ctx->Sigma);
ctx               987 crypto/streebog_generic.c static void streebog_stage3(struct streebog_state *ctx)
ctx               991 crypto/streebog_generic.c 	buf.qword[0] = cpu_to_le64(ctx->fillsize << 3);
ctx               992 crypto/streebog_generic.c 	streebog_pad(ctx);
ctx               994 crypto/streebog_generic.c 	streebog_g(&ctx->h, &ctx->N, &ctx->m);
ctx               995 crypto/streebog_generic.c 	streebog_add512(&ctx->N, &buf, &ctx->N);
ctx               996 crypto/streebog_generic.c 	streebog_add512(&ctx->Sigma, &ctx->m, &ctx->Sigma);
ctx               997 crypto/streebog_generic.c 	streebog_g(&ctx->h, &buffer0, &ctx->N);
ctx               998 crypto/streebog_generic.c 	streebog_g(&ctx->h, &buffer0, &ctx->Sigma);
ctx               999 crypto/streebog_generic.c 	memcpy(&ctx->hash, &ctx->h, sizeof(struct streebog_uint512));
ctx              1005 crypto/streebog_generic.c 	struct streebog_state *ctx = shash_desc_ctx(desc);
ctx              1008 crypto/streebog_generic.c 	if (ctx->fillsize) {
ctx              1009 crypto/streebog_generic.c 		chunksize = STREEBOG_BLOCK_SIZE - ctx->fillsize;
ctx              1012 crypto/streebog_generic.c 		memcpy(&ctx->buffer[ctx->fillsize], data, chunksize);
ctx              1013 crypto/streebog_generic.c 		ctx->fillsize += chunksize;
ctx              1017 crypto/streebog_generic.c 		if (ctx->fillsize == STREEBOG_BLOCK_SIZE) {
ctx              1018 crypto/streebog_generic.c 			streebog_stage2(ctx, ctx->buffer);
ctx              1019 crypto/streebog_generic.c 			ctx->fillsize = 0;
ctx              1024 crypto/streebog_generic.c 		streebog_stage2(ctx, data);
ctx              1030 crypto/streebog_generic.c 		memcpy(&ctx->buffer, data, len);
ctx              1031 crypto/streebog_generic.c 		ctx->fillsize = len;
ctx              1038 crypto/streebog_generic.c 	struct streebog_state *ctx = shash_desc_ctx(desc);
ctx              1040 crypto/streebog_generic.c 	streebog_stage3(ctx);
ctx              1041 crypto/streebog_generic.c 	ctx->fillsize = 0;
ctx              1043 crypto/streebog_generic.c 		memcpy(digest, &ctx->hash.qword[4], STREEBOG256_DIGEST_SIZE);
ctx              1045 crypto/streebog_generic.c 		memcpy(digest, &ctx->hash.qword[0], STREEBOG512_DIGEST_SIZE);
ctx                45 crypto/tea.c   	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                48 crypto/tea.c   	ctx->KEY[0] = le32_to_cpu(key[0]);
ctx                49 crypto/tea.c   	ctx->KEY[1] = le32_to_cpu(key[1]);
ctx                50 crypto/tea.c   	ctx->KEY[2] = le32_to_cpu(key[2]);
ctx                51 crypto/tea.c   	ctx->KEY[3] = le32_to_cpu(key[3]);
ctx                61 crypto/tea.c   	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                68 crypto/tea.c   	k0 = ctx->KEY[0];
ctx                69 crypto/tea.c   	k1 = ctx->KEY[1];
ctx                70 crypto/tea.c   	k2 = ctx->KEY[2];
ctx                71 crypto/tea.c   	k3 = ctx->KEY[3];
ctx                89 crypto/tea.c   	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                96 crypto/tea.c   	k0 = ctx->KEY[0];
ctx                97 crypto/tea.c   	k1 = ctx->KEY[1];
ctx                98 crypto/tea.c   	k2 = ctx->KEY[2];
ctx                99 crypto/tea.c   	k3 = ctx->KEY[3];
ctx               118 crypto/tea.c   	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               121 crypto/tea.c   	ctx->KEY[0] = le32_to_cpu(key[0]);
ctx               122 crypto/tea.c   	ctx->KEY[1] = le32_to_cpu(key[1]);
ctx               123 crypto/tea.c   	ctx->KEY[2] = le32_to_cpu(key[2]);
ctx               124 crypto/tea.c   	ctx->KEY[3] = le32_to_cpu(key[3]);
ctx               134 crypto/tea.c   	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               142 crypto/tea.c   		y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]); 
ctx               144 crypto/tea.c   		z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]); 
ctx               154 crypto/tea.c   	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               164 crypto/tea.c   		z -= ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 & 3]);
ctx               166 crypto/tea.c   		y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]);
ctx               178 crypto/tea.c   	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               186 crypto/tea.c   		y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3];
ctx               188 crypto/tea.c   		z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3];
ctx               198 crypto/tea.c   	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               208 crypto/tea.c   		z -= (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 & 3];
ctx               210 crypto/tea.c   		y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3];
ctx              3335 crypto/testmgr.c 		u32 *ctx = (u32 *)shash_desc_ctx(shash);
ctx              3339 crypto/testmgr.c 		*ctx = 420553207;
ctx               484 crypto/twofish_common.c    ctx->s[0][i] = mds[0][q0[(a) ^ sa] ^ se]; \
ctx               485 crypto/twofish_common.c    ctx->s[1][i] = mds[1][q0[(b) ^ sb] ^ sf]; \
ctx               486 crypto/twofish_common.c    ctx->s[2][i] = mds[2][q1[(a) ^ sc] ^ sg]; \
ctx               487 crypto/twofish_common.c    ctx->s[3][i] = mds[3][q1[(b) ^ sd] ^ sh]
ctx               492 crypto/twofish_common.c    ctx->s[0][i] = mds[0][q0[q0[(b) ^ sa] ^ se] ^ si]; \
ctx               493 crypto/twofish_common.c    ctx->s[1][i] = mds[1][q0[q1[(b) ^ sb] ^ sf] ^ sj]; \
ctx               494 crypto/twofish_common.c    ctx->s[2][i] = mds[2][q1[q0[(a) ^ sc] ^ sg] ^ sk]; \
ctx               495 crypto/twofish_common.c    ctx->s[3][i] = mds[3][q1[q1[(a) ^ sd] ^ sh] ^ sl];
ctx               500 crypto/twofish_common.c    ctx->s[0][i] = mds[0][q0[q0[q1[(b) ^ sa] ^ se] ^ si] ^ sm]; \
ctx               501 crypto/twofish_common.c    ctx->s[1][i] = mds[1][q0[q1[q1[(a) ^ sb] ^ sf] ^ sj] ^ sn]; \
ctx               502 crypto/twofish_common.c    ctx->s[2][i] = mds[2][q1[q0[q0[(a) ^ sc] ^ sg] ^ sk] ^ so]; \
ctx               503 crypto/twofish_common.c    ctx->s[3][i] = mds[3][q1[q1[q0[(b) ^ sd] ^ sh] ^ sl] ^ sp];
ctx               539 crypto/twofish_common.c    x += y; y += x; ctx->a[j] = x; \
ctx               540 crypto/twofish_common.c    ctx->a[(j) + 1] = rol32(y, 9)
ctx               552 crypto/twofish_common.c    x += y; y += x; ctx->a[j] = x; \
ctx               553 crypto/twofish_common.c    ctx->a[(j) + 1] = rol32(y, 9)
ctx               565 crypto/twofish_common.c    x += y; y += x; ctx->a[j] = x; \
ctx               566 crypto/twofish_common.c    ctx->a[(j) + 1] = rol32(y, 9)
ctx               569 crypto/twofish_common.c int __twofish_setkey(struct twofish_ctx *ctx, const u8 *key,
ctx                41 crypto/twofish_generic.c      (ctx->s[0][(a) & 0xFF]) ^ (ctx->s[1][((a) >> 8) & 0xFF]) \
ctx                42 crypto/twofish_generic.c    ^ (ctx->s[2][((a) >> 16) & 0xFF]) ^ (ctx->s[3][(a) >> 24])
ctx                45 crypto/twofish_generic.c      (ctx->s[1][(b) & 0xFF]) ^ (ctx->s[2][((b) >> 8) & 0xFF]) \
ctx                46 crypto/twofish_generic.c    ^ (ctx->s[3][((b) >> 16) & 0xFF]) ^ (ctx->s[0][(b) >> 24])
ctx                55 crypto/twofish_generic.c    x += y; y += x + ctx->k[2 * (n) + 1]; \
ctx                56 crypto/twofish_generic.c    (c) ^= x + ctx->k[2 * (n)]; \
ctx                63 crypto/twofish_generic.c    (d) ^= y + ctx->k[2 * (n) + 1]; \
ctx                66 crypto/twofish_generic.c    (c) ^= (x + ctx->k[2 * (n)])
ctx                86 crypto/twofish_generic.c    x = le32_to_cpu(src[n]) ^ ctx->w[m]
ctx                89 crypto/twofish_generic.c    x ^= ctx->w[m]; \
ctx                97 crypto/twofish_generic.c 	struct twofish_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               134 crypto/twofish_generic.c 	struct twofish_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                29 crypto/xcbc.c  	u8 ctx[];
ctx                45 crypto/xcbc.c  	u8 ctx[];
ctx                54 crypto/xcbc.c  	struct xcbc_tfm_ctx *ctx = crypto_shash_ctx(parent);
ctx                55 crypto/xcbc.c  	u8 *consts = PTR_ALIGN(&ctx->ctx[0], alignmask + 1);
ctx                60 crypto/xcbc.c  	if ((err = crypto_cipher_setkey(ctx->child, inkey, keylen)))
ctx                63 crypto/xcbc.c  	crypto_cipher_encrypt_one(ctx->child, consts, (u8 *)ks + bs);
ctx                64 crypto/xcbc.c  	crypto_cipher_encrypt_one(ctx->child, consts + bs, (u8 *)ks + bs * 2);
ctx                65 crypto/xcbc.c  	crypto_cipher_encrypt_one(ctx->child, key1, (u8 *)ks);
ctx                67 crypto/xcbc.c  	return crypto_cipher_setkey(ctx->child, key1, bs);
ctx                74 crypto/xcbc.c  	struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc);
ctx                76 crypto/xcbc.c  	u8 *prev = PTR_ALIGN(&ctx->ctx[0], alignmask + 1) + bs;
ctx                78 crypto/xcbc.c  	ctx->len = 0;
ctx                90 crypto/xcbc.c  	struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc);
ctx                93 crypto/xcbc.c  	u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1);
ctx                97 crypto/xcbc.c  	if ((ctx->len + len) <= bs) {
ctx                98 crypto/xcbc.c  		memcpy(odds + ctx->len, p, len);
ctx                99 crypto/xcbc.c  		ctx->len += len;
ctx               104 crypto/xcbc.c  	memcpy(odds + ctx->len, p, bs - ctx->len);
ctx               105 crypto/xcbc.c  	len -= bs - ctx->len;
ctx               106 crypto/xcbc.c  	p += bs - ctx->len;
ctx               112 crypto/xcbc.c  	ctx->len = 0;
ctx               125 crypto/xcbc.c  		ctx->len = len;
ctx               136 crypto/xcbc.c  	struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc);
ctx               139 crypto/xcbc.c  	u8 *consts = PTR_ALIGN(&tctx->ctx[0], alignmask + 1);
ctx               140 crypto/xcbc.c  	u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1);
ctx               144 crypto/xcbc.c  	if (ctx->len != bs) {
ctx               146 crypto/xcbc.c  		u8 *p = odds + ctx->len;
ctx               151 crypto/xcbc.c  		rlen = bs - ctx->len -1;
ctx               171 crypto/xcbc.c  	struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               177 crypto/xcbc.c  	ctx->child = cipher;
ctx               184 crypto/xcbc.c  	struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               185 crypto/xcbc.c  	crypto_free_cipher(ctx->child);
ctx                43 crypto/xts.c   	struct priv *ctx = crypto_skcipher_ctx(parent);
ctx                59 crypto/xts.c   	tweak = ctx->tweak;
ctx                70 crypto/xts.c   	child = ctx->child;
ctx               166 crypto/xts.c   	struct priv *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
ctx               185 crypto/xts.c   	skcipher_request_set_tfm(subreq, ctx->child);
ctx               243 crypto/xts.c   	struct priv *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
ctx               250 crypto/xts.c   	skcipher_request_set_tfm(subreq, ctx->child);
ctx               256 crypto/xts.c   	crypto_cipher_encrypt_one(ctx->tweak, (u8 *)&rctx->t, req->iv);
ctx               299 crypto/xts.c   	struct priv *ctx = crypto_skcipher_ctx(tfm);
ctx               307 crypto/xts.c   	ctx->child = child;
ctx               311 crypto/xts.c   		crypto_free_skcipher(ctx->child);
ctx               315 crypto/xts.c   	ctx->tweak = tweak;
ctx               325 crypto/xts.c   	struct priv *ctx = crypto_skcipher_ctx(tfm);
ctx               327 crypto/xts.c   	crypto_free_skcipher(ctx->child);
ctx               328 crypto/xts.c   	crypto_free_cipher(ctx->tweak);
ctx               341 crypto/xts.c   	struct xts_instance_ctx *ctx;
ctx               358 crypto/xts.c   	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
ctx               362 crypto/xts.c   	ctx = skcipher_instance_ctx(inst);
ctx               364 crypto/xts.c   	crypto_set_skcipher_spawn(&ctx->spawn, skcipher_crypto_instance(inst));
ctx               370 crypto/xts.c   	err = crypto_grab_skcipher(&ctx->spawn, cipher_name, 0, mask);
ctx               373 crypto/xts.c   		if (snprintf(ctx->name, CRYPTO_MAX_ALG_NAME, "ecb(%s)",
ctx               377 crypto/xts.c   		err = crypto_grab_skcipher(&ctx->spawn, ctx->name, 0, mask);
ctx               383 crypto/xts.c   	alg = crypto_skcipher_spawn_alg(&ctx->spawn);
ctx               406 crypto/xts.c   		len = strlcpy(ctx->name, cipher_name + 4, sizeof(ctx->name));
ctx               407 crypto/xts.c   		if (len < 2 || len >= sizeof(ctx->name))
ctx               410 crypto/xts.c   		if (ctx->name[len - 1] != ')')
ctx               413 crypto/xts.c   		ctx->name[len - 1] = 0;
ctx               416 crypto/xts.c   			     "xts(%s)", ctx->name) >= CRYPTO_MAX_ALG_NAME) {
ctx               452 crypto/xts.c   	crypto_drop_skcipher(&ctx->spawn);
ctx                32 crypto/zstd.c  static int zstd_comp_init(struct zstd_ctx *ctx)
ctx                38 crypto/zstd.c  	ctx->cwksp = vzalloc(wksp_size);
ctx                39 crypto/zstd.c  	if (!ctx->cwksp) {
ctx                44 crypto/zstd.c  	ctx->cctx = ZSTD_initCCtx(ctx->cwksp, wksp_size);
ctx                45 crypto/zstd.c  	if (!ctx->cctx) {
ctx                52 crypto/zstd.c  	vfree(ctx->cwksp);
ctx                56 crypto/zstd.c  static int zstd_decomp_init(struct zstd_ctx *ctx)
ctx                61 crypto/zstd.c  	ctx->dwksp = vzalloc(wksp_size);
ctx                62 crypto/zstd.c  	if (!ctx->dwksp) {
ctx                67 crypto/zstd.c  	ctx->dctx = ZSTD_initDCtx(ctx->dwksp, wksp_size);
ctx                68 crypto/zstd.c  	if (!ctx->dctx) {
ctx                75 crypto/zstd.c  	vfree(ctx->dwksp);
ctx                79 crypto/zstd.c  static void zstd_comp_exit(struct zstd_ctx *ctx)
ctx                81 crypto/zstd.c  	vfree(ctx->cwksp);
ctx                82 crypto/zstd.c  	ctx->cwksp = NULL;
ctx                83 crypto/zstd.c  	ctx->cctx = NULL;
ctx                86 crypto/zstd.c  static void zstd_decomp_exit(struct zstd_ctx *ctx)
ctx                88 crypto/zstd.c  	vfree(ctx->dwksp);
ctx                89 crypto/zstd.c  	ctx->dwksp = NULL;
ctx                90 crypto/zstd.c  	ctx->dctx = NULL;
ctx                93 crypto/zstd.c  static int __zstd_init(void *ctx)
ctx                97 crypto/zstd.c  	ret = zstd_comp_init(ctx);
ctx               100 crypto/zstd.c  	ret = zstd_decomp_init(ctx);
ctx               102 crypto/zstd.c  		zstd_comp_exit(ctx);
ctx               109 crypto/zstd.c  	struct zstd_ctx *ctx;
ctx               111 crypto/zstd.c  	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               112 crypto/zstd.c  	if (!ctx)
ctx               115 crypto/zstd.c  	ret = __zstd_init(ctx);
ctx               117 crypto/zstd.c  		kfree(ctx);
ctx               121 crypto/zstd.c  	return ctx;
ctx               126 crypto/zstd.c  	struct zstd_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               128 crypto/zstd.c  	return __zstd_init(ctx);
ctx               131 crypto/zstd.c  static void __zstd_exit(void *ctx)
ctx               133 crypto/zstd.c  	zstd_comp_exit(ctx);
ctx               134 crypto/zstd.c  	zstd_decomp_exit(ctx);
ctx               137 crypto/zstd.c  static void zstd_free_ctx(struct crypto_scomp *tfm, void *ctx)
ctx               139 crypto/zstd.c  	__zstd_exit(ctx);
ctx               140 crypto/zstd.c  	kzfree(ctx);
ctx               145 crypto/zstd.c  	struct zstd_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               147 crypto/zstd.c  	__zstd_exit(ctx);
ctx               151 crypto/zstd.c  			   u8 *dst, unsigned int *dlen, void *ctx)
ctx               154 crypto/zstd.c  	struct zstd_ctx *zctx = ctx;
ctx               167 crypto/zstd.c  	struct zstd_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               169 crypto/zstd.c  	return __zstd_compress(src, slen, dst, dlen, ctx);
ctx               174 crypto/zstd.c  			  void *ctx)
ctx               176 crypto/zstd.c  	return __zstd_compress(src, slen, dst, dlen, ctx);
ctx               180 crypto/zstd.c  			     u8 *dst, unsigned int *dlen, void *ctx)
ctx               183 crypto/zstd.c  	struct zstd_ctx *zctx = ctx;
ctx               195 crypto/zstd.c  	struct zstd_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               197 crypto/zstd.c  	return __zstd_decompress(src, slen, dst, dlen, ctx);
ctx               202 crypto/zstd.c  			    void *ctx)
ctx               204 crypto/zstd.c  	return __zstd_decompress(src, slen, dst, dlen, ctx);
ctx                44 drivers/acpi/apei/apei-base.c void apei_exec_ctx_init(struct apei_exec_context *ctx,
ctx                50 drivers/acpi/apei/apei-base.c 	ctx->ins_table = ins_table;
ctx                51 drivers/acpi/apei/apei-base.c 	ctx->instructions = instructions;
ctx                52 drivers/acpi/apei/apei-base.c 	ctx->action_table = action_table;
ctx                53 drivers/acpi/apei/apei-base.c 	ctx->entries = entries;
ctx                70 drivers/acpi/apei/apei-base.c int apei_exec_read_register(struct apei_exec_context *ctx,
ctx                79 drivers/acpi/apei/apei-base.c 	ctx->value = val;
ctx                85 drivers/acpi/apei/apei-base.c int apei_exec_read_register_value(struct apei_exec_context *ctx,
ctx                90 drivers/acpi/apei/apei-base.c 	rc = apei_exec_read_register(ctx, entry);
ctx                93 drivers/acpi/apei/apei-base.c 	ctx->value = (ctx->value == entry->value);
ctx               118 drivers/acpi/apei/apei-base.c int apei_exec_write_register(struct apei_exec_context *ctx,
ctx               121 drivers/acpi/apei/apei-base.c 	return __apei_exec_write_register(entry, ctx->value);
ctx               125 drivers/acpi/apei/apei-base.c int apei_exec_write_register_value(struct apei_exec_context *ctx,
ctx               130 drivers/acpi/apei/apei-base.c 	ctx->value = entry->value;
ctx               131 drivers/acpi/apei/apei-base.c 	rc = apei_exec_write_register(ctx, entry);
ctx               137 drivers/acpi/apei/apei-base.c int apei_exec_noop(struct apei_exec_context *ctx,
ctx               148 drivers/acpi/apei/apei-base.c int __apei_exec_run(struct apei_exec_context *ctx, u8 action,
ctx               156 drivers/acpi/apei/apei-base.c 	ctx->ip = 0;
ctx               166 drivers/acpi/apei/apei-base.c 	for (i = 0; i < ctx->entries; i++) {
ctx               167 drivers/acpi/apei/apei-base.c 		entry = &ctx->action_table[i];
ctx               170 drivers/acpi/apei/apei-base.c 		if (ip == ctx->ip) {
ctx               171 drivers/acpi/apei/apei-base.c 			if (entry->instruction >= ctx->instructions ||
ctx               172 drivers/acpi/apei/apei-base.c 			    !ctx->ins_table[entry->instruction].run) {
ctx               178 drivers/acpi/apei/apei-base.c 			run = ctx->ins_table[entry->instruction].run;
ctx               179 drivers/acpi/apei/apei-base.c 			rc = run(ctx, entry);
ctx               183 drivers/acpi/apei/apei-base.c 				ctx->ip++;
ctx               186 drivers/acpi/apei/apei-base.c 		if (ctx->ip < ip)
ctx               194 drivers/acpi/apei/apei-base.c typedef int (*apei_exec_entry_func_t)(struct apei_exec_context *ctx,
ctx               198 drivers/acpi/apei/apei-base.c static int apei_exec_for_each_entry(struct apei_exec_context *ctx,
ctx               206 drivers/acpi/apei/apei-base.c 	struct apei_exec_ins_type *ins_table = ctx->ins_table;
ctx               208 drivers/acpi/apei/apei-base.c 	for (i = 0; i < ctx->entries; i++) {
ctx               209 drivers/acpi/apei/apei-base.c 		entry = ctx->action_table + i;
ctx               213 drivers/acpi/apei/apei-base.c 		if (ins >= ctx->instructions || !ins_table[ins].run) {
ctx               219 drivers/acpi/apei/apei-base.c 		rc = func(ctx, entry, data);
ctx               227 drivers/acpi/apei/apei-base.c static int pre_map_gar_callback(struct apei_exec_context *ctx,
ctx               233 drivers/acpi/apei/apei-base.c 	if (ctx->ins_table[ins].flags & APEI_EXEC_INS_ACCESS_REGISTER)
ctx               243 drivers/acpi/apei/apei-base.c int apei_exec_pre_map_gars(struct apei_exec_context *ctx)
ctx               247 drivers/acpi/apei/apei-base.c 	rc = apei_exec_for_each_entry(ctx, pre_map_gar_callback,
ctx               251 drivers/acpi/apei/apei-base.c 		memcpy(&ctx_unmap, ctx, sizeof(*ctx));
ctx               260 drivers/acpi/apei/apei-base.c static int post_unmap_gar_callback(struct apei_exec_context *ctx,
ctx               266 drivers/acpi/apei/apei-base.c 	if (ctx->ins_table[ins].flags & APEI_EXEC_INS_ACCESS_REGISTER)
ctx               273 drivers/acpi/apei/apei-base.c int apei_exec_post_unmap_gars(struct apei_exec_context *ctx)
ctx               275 drivers/acpi/apei/apei-base.c 	return apei_exec_for_each_entry(ctx, post_unmap_gar_callback,
ctx               705 drivers/acpi/apei/apei-base.c static int collect_res_callback(struct apei_exec_context *ctx,
ctx               716 drivers/acpi/apei/apei-base.c 	if (!(ctx->ins_table[ins].flags & APEI_EXEC_INS_ACCESS_REGISTER))
ctx               739 drivers/acpi/apei/apei-base.c int apei_exec_collect_resources(struct apei_exec_context *ctx,
ctx               742 drivers/acpi/apei/apei-base.c 	return apei_exec_for_each_entry(ctx, collect_res_callback,
ctx                15 drivers/acpi/apei/apei-internal.h typedef int (*apei_exec_ins_func_t)(struct apei_exec_context *ctx,
ctx                38 drivers/acpi/apei/apei-internal.h void apei_exec_ctx_init(struct apei_exec_context *ctx,
ctx                44 drivers/acpi/apei/apei-internal.h static inline void apei_exec_ctx_set_input(struct apei_exec_context *ctx,
ctx                47 drivers/acpi/apei/apei-internal.h 	ctx->value = input;
ctx                50 drivers/acpi/apei/apei-internal.h static inline u64 apei_exec_ctx_get_output(struct apei_exec_context *ctx)
ctx                52 drivers/acpi/apei/apei-internal.h 	return ctx->value;
ctx                55 drivers/acpi/apei/apei-internal.h int __apei_exec_run(struct apei_exec_context *ctx, u8 action, bool optional);
ctx                57 drivers/acpi/apei/apei-internal.h static inline int apei_exec_run(struct apei_exec_context *ctx, u8 action)
ctx                59 drivers/acpi/apei/apei-internal.h 	return __apei_exec_run(ctx, action, 0);
ctx                63 drivers/acpi/apei/apei-internal.h static inline int apei_exec_run_optional(struct apei_exec_context *ctx, u8 action)
ctx                65 drivers/acpi/apei/apei-internal.h 	return __apei_exec_run(ctx, action, 1);
ctx                85 drivers/acpi/apei/apei-internal.h int apei_exec_read_register(struct apei_exec_context *ctx,
ctx                87 drivers/acpi/apei/apei-internal.h int apei_exec_read_register_value(struct apei_exec_context *ctx,
ctx                89 drivers/acpi/apei/apei-internal.h int apei_exec_write_register(struct apei_exec_context *ctx,
ctx                91 drivers/acpi/apei/apei-internal.h int apei_exec_write_register_value(struct apei_exec_context *ctx,
ctx                93 drivers/acpi/apei/apei-internal.h int apei_exec_noop(struct apei_exec_context *ctx,
ctx                95 drivers/acpi/apei/apei-internal.h int apei_exec_pre_map_gars(struct apei_exec_context *ctx);
ctx                96 drivers/acpi/apei/apei-internal.h int apei_exec_post_unmap_gars(struct apei_exec_context *ctx);
ctx               118 drivers/acpi/apei/apei-internal.h int apei_exec_collect_resources(struct apei_exec_context *ctx,
ctx               140 drivers/acpi/apei/einj.c static void einj_exec_ctx_init(struct apei_exec_context *ctx)
ctx               142 drivers/acpi/apei/einj.c 	apei_exec_ctx_init(ctx, einj_ins_type, ARRAY_SIZE(einj_ins_type),
ctx               148 drivers/acpi/apei/einj.c 	struct apei_exec_context ctx;
ctx               151 drivers/acpi/apei/einj.c 	einj_exec_ctx_init(&ctx);
ctx               152 drivers/acpi/apei/einj.c 	rc = apei_exec_run(&ctx, ACPI_EINJ_GET_ERROR_TYPE);
ctx               155 drivers/acpi/apei/einj.c 	*type = apei_exec_ctx_get_output(&ctx);
ctx               405 drivers/acpi/apei/einj.c 	struct apei_exec_context ctx;
ctx               409 drivers/acpi/apei/einj.c 	einj_exec_ctx_init(&ctx);
ctx               411 drivers/acpi/apei/einj.c 	rc = apei_exec_run_optional(&ctx, ACPI_EINJ_BEGIN_OPERATION);
ctx               414 drivers/acpi/apei/einj.c 	apei_exec_ctx_set_input(&ctx, type);
ctx               463 drivers/acpi/apei/einj.c 		rc = apei_exec_run(&ctx, ACPI_EINJ_SET_ERROR_TYPE);
ctx               472 drivers/acpi/apei/einj.c 	rc = apei_exec_run(&ctx, ACPI_EINJ_EXECUTE_OPERATION);
ctx               476 drivers/acpi/apei/einj.c 		rc = apei_exec_run(&ctx, ACPI_EINJ_CHECK_BUSY_STATUS);
ctx               479 drivers/acpi/apei/einj.c 		val = apei_exec_ctx_get_output(&ctx);
ctx               485 drivers/acpi/apei/einj.c 	rc = apei_exec_run(&ctx, ACPI_EINJ_GET_COMMAND_STATUS);
ctx               488 drivers/acpi/apei/einj.c 	val = apei_exec_ctx_get_output(&ctx);
ctx               492 drivers/acpi/apei/einj.c 	rc = apei_exec_run(&ctx, ACPI_EINJ_GET_TRIGGER_TABLE);
ctx               495 drivers/acpi/apei/einj.c 	trigger_paddr = apei_exec_ctx_get_output(&ctx);
ctx               501 drivers/acpi/apei/einj.c 	rc = apei_exec_run_optional(&ctx, ACPI_EINJ_END_OPERATION);
ctx               673 drivers/acpi/apei/einj.c 	struct apei_exec_context ctx;
ctx               709 drivers/acpi/apei/einj.c 	einj_exec_ctx_init(&ctx);
ctx               710 drivers/acpi/apei/einj.c 	rc = apei_exec_collect_resources(&ctx, &einj_resources);
ctx               722 drivers/acpi/apei/einj.c 	rc = apei_exec_pre_map_gars(&ctx);
ctx               769 drivers/acpi/apei/einj.c 	struct apei_exec_context ctx;
ctx               778 drivers/acpi/apei/einj.c 	einj_exec_ctx_init(&ctx);
ctx               779 drivers/acpi/apei/einj.c 	apei_exec_post_unmap_gars(&ctx);
ctx               112 drivers/acpi/apei/erst.c static int erst_exec_load_var1(struct apei_exec_context *ctx,
ctx               115 drivers/acpi/apei/erst.c 	return __apei_exec_read_register(entry, &ctx->var1);
ctx               118 drivers/acpi/apei/erst.c static int erst_exec_load_var2(struct apei_exec_context *ctx,
ctx               121 drivers/acpi/apei/erst.c 	return __apei_exec_read_register(entry, &ctx->var2);
ctx               124 drivers/acpi/apei/erst.c static int erst_exec_store_var1(struct apei_exec_context *ctx,
ctx               127 drivers/acpi/apei/erst.c 	return __apei_exec_write_register(entry, ctx->var1);
ctx               130 drivers/acpi/apei/erst.c static int erst_exec_add(struct apei_exec_context *ctx,
ctx               133 drivers/acpi/apei/erst.c 	ctx->var1 += ctx->var2;
ctx               137 drivers/acpi/apei/erst.c static int erst_exec_subtract(struct apei_exec_context *ctx,
ctx               140 drivers/acpi/apei/erst.c 	ctx->var1 -= ctx->var2;
ctx               144 drivers/acpi/apei/erst.c static int erst_exec_add_value(struct apei_exec_context *ctx,
ctx               153 drivers/acpi/apei/erst.c 	val += ctx->value;
ctx               158 drivers/acpi/apei/erst.c static int erst_exec_subtract_value(struct apei_exec_context *ctx,
ctx               167 drivers/acpi/apei/erst.c 	val -= ctx->value;
ctx               172 drivers/acpi/apei/erst.c static int erst_exec_stall(struct apei_exec_context *ctx,
ctx               177 drivers/acpi/apei/erst.c 	if (ctx->value > FIRMWARE_MAX_STALL) {
ctx               181 drivers/acpi/apei/erst.c 				   ctx->value);
ctx               184 drivers/acpi/apei/erst.c 		stall_time = ctx->value;
ctx               189 drivers/acpi/apei/erst.c static int erst_exec_stall_while_true(struct apei_exec_context *ctx,
ctx               197 drivers/acpi/apei/erst.c 	if (ctx->var1 > FIRMWARE_MAX_STALL) {
ctx               201 drivers/acpi/apei/erst.c 				   ctx->var1);
ctx               204 drivers/acpi/apei/erst.c 		stall_time = ctx->var1;
ctx               210 drivers/acpi/apei/erst.c 		if (val != ctx->value)
ctx               219 drivers/acpi/apei/erst.c 	struct apei_exec_context *ctx,
ctx               228 drivers/acpi/apei/erst.c 	if (val == ctx->value) {
ctx               229 drivers/acpi/apei/erst.c 		ctx->ip += 2;
ctx               236 drivers/acpi/apei/erst.c static int erst_exec_goto(struct apei_exec_context *ctx,
ctx               239 drivers/acpi/apei/erst.c 	ctx->ip = ctx->value;
ctx               243 drivers/acpi/apei/erst.c static int erst_exec_set_src_address_base(struct apei_exec_context *ctx,
ctx               246 drivers/acpi/apei/erst.c 	return __apei_exec_read_register(entry, &ctx->src_base);
ctx               249 drivers/acpi/apei/erst.c static int erst_exec_set_dst_address_base(struct apei_exec_context *ctx,
ctx               252 drivers/acpi/apei/erst.c 	return __apei_exec_read_register(entry, &ctx->dst_base);
ctx               255 drivers/acpi/apei/erst.c static int erst_exec_move_data(struct apei_exec_context *ctx,
ctx               272 drivers/acpi/apei/erst.c 	src = ioremap(ctx->src_base + offset, ctx->var2);
ctx               275 drivers/acpi/apei/erst.c 	dst = ioremap(ctx->dst_base + offset, ctx->var2);
ctx               281 drivers/acpi/apei/erst.c 	memmove(dst, src, ctx->var2);
ctx               368 drivers/acpi/apei/erst.c static inline void erst_exec_ctx_init(struct apei_exec_context *ctx)
ctx               370 drivers/acpi/apei/erst.c 	apei_exec_ctx_init(ctx, erst_ins_type, ARRAY_SIZE(erst_ins_type),
ctx               376 drivers/acpi/apei/erst.c 	struct apei_exec_context ctx;
ctx               379 drivers/acpi/apei/erst.c 	erst_exec_ctx_init(&ctx);
ctx               380 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_GET_ERROR_RANGE);
ctx               383 drivers/acpi/apei/erst.c 	range->base = apei_exec_ctx_get_output(&ctx);
ctx               384 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_GET_ERROR_LENGTH);
ctx               387 drivers/acpi/apei/erst.c 	range->size = apei_exec_ctx_get_output(&ctx);
ctx               388 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_GET_ERROR_ATTRIBUTES);
ctx               391 drivers/acpi/apei/erst.c 	range->attr = apei_exec_ctx_get_output(&ctx);
ctx               398 drivers/acpi/apei/erst.c 	struct apei_exec_context ctx;
ctx               401 drivers/acpi/apei/erst.c 	erst_exec_ctx_init(&ctx);
ctx               402 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_GET_RECORD_COUNT);
ctx               405 drivers/acpi/apei/erst.c 	return apei_exec_ctx_get_output(&ctx);
ctx               442 drivers/acpi/apei/erst.c 	struct apei_exec_context ctx;
ctx               445 drivers/acpi/apei/erst.c 	erst_exec_ctx_init(&ctx);
ctx               446 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_GET_RECORD_ID);
ctx               449 drivers/acpi/apei/erst.c 	*record_id = apei_exec_ctx_get_output(&ctx);
ctx               623 drivers/acpi/apei/erst.c 	struct apei_exec_context ctx;
ctx               628 drivers/acpi/apei/erst.c 	erst_exec_ctx_init(&ctx);
ctx               629 drivers/acpi/apei/erst.c 	rc = apei_exec_run_optional(&ctx, ACPI_ERST_BEGIN_WRITE);
ctx               632 drivers/acpi/apei/erst.c 	apei_exec_ctx_set_input(&ctx, offset);
ctx               633 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_SET_RECORD_OFFSET);
ctx               636 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_EXECUTE_OPERATION);
ctx               640 drivers/acpi/apei/erst.c 		rc = apei_exec_run(&ctx, ACPI_ERST_CHECK_BUSY_STATUS);
ctx               643 drivers/acpi/apei/erst.c 		val = apei_exec_ctx_get_output(&ctx);
ctx               649 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_GET_COMMAND_STATUS);
ctx               652 drivers/acpi/apei/erst.c 	val = apei_exec_ctx_get_output(&ctx);
ctx               653 drivers/acpi/apei/erst.c 	rc = apei_exec_run_optional(&ctx, ACPI_ERST_END);
ctx               662 drivers/acpi/apei/erst.c 	struct apei_exec_context ctx;
ctx               667 drivers/acpi/apei/erst.c 	erst_exec_ctx_init(&ctx);
ctx               668 drivers/acpi/apei/erst.c 	rc = apei_exec_run_optional(&ctx, ACPI_ERST_BEGIN_READ);
ctx               671 drivers/acpi/apei/erst.c 	apei_exec_ctx_set_input(&ctx, offset);
ctx               672 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_SET_RECORD_OFFSET);
ctx               675 drivers/acpi/apei/erst.c 	apei_exec_ctx_set_input(&ctx, record_id);
ctx               676 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_SET_RECORD_ID);
ctx               679 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_EXECUTE_OPERATION);
ctx               683 drivers/acpi/apei/erst.c 		rc = apei_exec_run(&ctx, ACPI_ERST_CHECK_BUSY_STATUS);
ctx               686 drivers/acpi/apei/erst.c 		val = apei_exec_ctx_get_output(&ctx);
ctx               692 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_GET_COMMAND_STATUS);
ctx               695 drivers/acpi/apei/erst.c 	val = apei_exec_ctx_get_output(&ctx);
ctx               696 drivers/acpi/apei/erst.c 	rc = apei_exec_run_optional(&ctx, ACPI_ERST_END);
ctx               705 drivers/acpi/apei/erst.c 	struct apei_exec_context ctx;
ctx               710 drivers/acpi/apei/erst.c 	erst_exec_ctx_init(&ctx);
ctx               711 drivers/acpi/apei/erst.c 	rc = apei_exec_run_optional(&ctx, ACPI_ERST_BEGIN_CLEAR);
ctx               714 drivers/acpi/apei/erst.c 	apei_exec_ctx_set_input(&ctx, record_id);
ctx               715 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_SET_RECORD_ID);
ctx               718 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_EXECUTE_OPERATION);
ctx               722 drivers/acpi/apei/erst.c 		rc = apei_exec_run(&ctx, ACPI_ERST_CHECK_BUSY_STATUS);
ctx               725 drivers/acpi/apei/erst.c 		val = apei_exec_ctx_get_output(&ctx);
ctx               731 drivers/acpi/apei/erst.c 	rc = apei_exec_run(&ctx, ACPI_ERST_GET_COMMAND_STATUS);
ctx               734 drivers/acpi/apei/erst.c 	val = apei_exec_ctx_get_output(&ctx);
ctx               735 drivers/acpi/apei/erst.c 	rc = apei_exec_run_optional(&ctx, ACPI_ERST_END);
ctx              1097 drivers/acpi/apei/erst.c 	struct apei_exec_context ctx;
ctx              1129 drivers/acpi/apei/erst.c 	erst_exec_ctx_init(&ctx);
ctx              1130 drivers/acpi/apei/erst.c 	rc = apei_exec_collect_resources(&ctx, &erst_resources);
ctx              1136 drivers/acpi/apei/erst.c 	rc = apei_exec_pre_map_gars(&ctx);
ctx              1194 drivers/acpi/apei/erst.c 	apei_exec_post_unmap_gars(&ctx);
ctx               148 drivers/acpi/irq.c 					    struct acpi_irq_parse_one_ctx *ctx)
ctx               152 drivers/acpi/irq.c 	ctx->rc = 0;
ctx               153 drivers/acpi/irq.c 	*ctx->res_flags = acpi_dev_irq_flags(triggering, polarity, shareable);
ctx               154 drivers/acpi/irq.c 	ctx->fwspec->fwnode = fwnode;
ctx               155 drivers/acpi/irq.c 	ctx->fwspec->param[0] = hwirq;
ctx               156 drivers/acpi/irq.c 	ctx->fwspec->param[1] = acpi_dev_get_irq_type(triggering, polarity);
ctx               157 drivers/acpi/irq.c 	ctx->fwspec->param_count = 2;
ctx               181 drivers/acpi/irq.c 	struct acpi_irq_parse_one_ctx *ctx = context;
ctx               189 drivers/acpi/irq.c 		if (ctx->index >= irq->interrupt_count) {
ctx               190 drivers/acpi/irq.c 			ctx->index -= irq->interrupt_count;
ctx               194 drivers/acpi/irq.c 		acpi_irq_parse_one_match(fwnode, irq->interrupts[ctx->index],
ctx               196 drivers/acpi/irq.c 					 irq->shareable, ctx);
ctx               202 drivers/acpi/irq.c 		if (ctx->index >= eirq->interrupt_count) {
ctx               203 drivers/acpi/irq.c 			ctx->index -= eirq->interrupt_count;
ctx               207 drivers/acpi/irq.c 		acpi_irq_parse_one_match(fwnode, eirq->interrupts[ctx->index],
ctx               209 drivers/acpi/irq.c 					 eirq->shareable, ctx);
ctx               235 drivers/acpi/irq.c 	struct acpi_irq_parse_one_ctx ctx = { -EINVAL, index, flags, fwspec };
ctx               237 drivers/acpi/irq.c 	acpi_walk_resources(handle, METHOD_NAME__CRS, acpi_irq_parse_one_cb, &ctx);
ctx               238 drivers/acpi/irq.c 	return ctx.rc;
ctx                29 drivers/acpi/pmic/intel_pmic.c 	struct intel_pmic_regs_handler_ctx ctx;
ctx               220 drivers/acpi/pmic/intel_pmic.c 		opregion->ctx.addr |= (*value64 & 0xff) << 8;
ctx               223 drivers/acpi/pmic/intel_pmic.c 		opregion->ctx.addr |= *value64 & 0xff;
ctx               226 drivers/acpi/pmic/intel_pmic.c 		opregion->ctx.val = *value64 & 0xff;
ctx               230 drivers/acpi/pmic/intel_pmic.c 			result = regmap_write(opregion->regmap, opregion->ctx.addr,
ctx               231 drivers/acpi/pmic/intel_pmic.c 					      opregion->ctx.val);
ctx               233 drivers/acpi/pmic/intel_pmic.c 			result = regmap_read(opregion->regmap, opregion->ctx.addr,
ctx               234 drivers/acpi/pmic/intel_pmic.c 					     &opregion->ctx.val);
ctx               236 drivers/acpi/pmic/intel_pmic.c 				*value64 = opregion->ctx.val;
ctx               238 drivers/acpi/pmic/intel_pmic.c 		memset(&opregion->ctx, 0x00, sizeof(opregion->ctx));
ctx               123 drivers/amba/tegra-ahb.c 	u32		ctx[0];
ctx               161 drivers/amba/tegra-ahb.c 		ahb->ctx[i] = gizmo_readl(ahb, tegra_ahb_gizmo[i]);
ctx               171 drivers/amba/tegra-ahb.c 		gizmo_writel(ahb, ahb->ctx[i], tegra_ahb_gizmo[i]);
ctx                91 drivers/ata/ahci_xgene.c static int xgene_ahci_init_memram(struct xgene_ahci_context *ctx)
ctx                93 drivers/ata/ahci_xgene.c 	dev_dbg(ctx->dev, "Release memory from shutdown\n");
ctx                94 drivers/ata/ahci_xgene.c 	writel(0x0, ctx->csr_diag + CFG_MEM_RAM_SHUTDOWN);
ctx                95 drivers/ata/ahci_xgene.c 	readl(ctx->csr_diag + CFG_MEM_RAM_SHUTDOWN); /* Force a barrier */
ctx                97 drivers/ata/ahci_xgene.c 	if (readl(ctx->csr_diag + BLOCK_MEM_RDY) != 0xFFFFFFFF) {
ctx                98 drivers/ata/ahci_xgene.c 		dev_err(ctx->dev, "failed to release memory from shutdown\n");
ctx               193 drivers/ata/ahci_xgene.c 	struct xgene_ahci_context *ctx = hpriv->plat_data;
ctx               202 drivers/ata/ahci_xgene.c 	if (ctx->class[ap->port_no] == ATA_DEV_PMP) {
ctx               209 drivers/ata/ahci_xgene.c 	if (unlikely((ctx->last_cmd[ap->port_no] == ATA_CMD_ID_ATA) ||
ctx               210 drivers/ata/ahci_xgene.c 	    (ctx->last_cmd[ap->port_no] == ATA_CMD_PACKET) ||
ctx               211 drivers/ata/ahci_xgene.c 	    (ctx->last_cmd[ap->port_no] == ATA_CMD_SMART)))
ctx               217 drivers/ata/ahci_xgene.c 	ctx->last_cmd[ap->port_no] = qc->tf.command;
ctx               222 drivers/ata/ahci_xgene.c static bool xgene_ahci_is_memram_inited(struct xgene_ahci_context *ctx)
ctx               224 drivers/ata/ahci_xgene.c 	void __iomem *diagcsr = ctx->csr_diag;
ctx               267 drivers/ata/ahci_xgene.c static void xgene_ahci_set_phy_cfg(struct xgene_ahci_context *ctx, int channel)
ctx               269 drivers/ata/ahci_xgene.c 	void __iomem *mmio = ctx->hpriv->mmio;
ctx               272 drivers/ata/ahci_xgene.c 	dev_dbg(ctx->dev, "port configure mmio 0x%p channel %d\n",
ctx               356 drivers/ata/ahci_xgene.c 	struct xgene_ahci_context *ctx = hpriv->plat_data;
ctx               375 drivers/ata/ahci_xgene.c 				dev_warn(ctx->dev, "link has error\n");
ctx               501 drivers/ata/ahci_xgene.c 	struct xgene_ahci_context *ctx = hpriv->plat_data;
ctx               523 drivers/ata/ahci_xgene.c 	ctx->class[ap->port_no] = *class;
ctx               653 drivers/ata/ahci_xgene.c 	struct xgene_ahci_context *ctx = hpriv->plat_data;
ctx               659 drivers/ata/ahci_xgene.c 	rc = xgene_ahci_init_memram(ctx);
ctx               664 drivers/ata/ahci_xgene.c 		xgene_ahci_set_phy_cfg(ctx, i);
ctx               669 drivers/ata/ahci_xgene.c 	writel(0, ctx->csr_core + INTSTATUSMASK);
ctx               670 drivers/ata/ahci_xgene.c 	val = readl(ctx->csr_core + INTSTATUSMASK); /* Force a barrier */
ctx               671 drivers/ata/ahci_xgene.c 	dev_dbg(ctx->dev, "top level interrupt mask 0x%X value 0x%08X\n",
ctx               674 drivers/ata/ahci_xgene.c 	writel(0x0, ctx->csr_core + ERRINTSTATUSMASK);
ctx               675 drivers/ata/ahci_xgene.c 	readl(ctx->csr_core + ERRINTSTATUSMASK); /* Force a barrier */
ctx               676 drivers/ata/ahci_xgene.c 	writel(0x0, ctx->csr_axi + INT_SLV_TMOMASK);
ctx               677 drivers/ata/ahci_xgene.c 	readl(ctx->csr_axi + INT_SLV_TMOMASK);
ctx               680 drivers/ata/ahci_xgene.c 	writel(0xffffffff, ctx->csr_core + SLVRDERRATTRIBUTES);
ctx               681 drivers/ata/ahci_xgene.c 	writel(0xffffffff, ctx->csr_core + SLVWRERRATTRIBUTES);
ctx               682 drivers/ata/ahci_xgene.c 	writel(0xffffffff, ctx->csr_core + MSTRDERRATTRIBUTES);
ctx               683 drivers/ata/ahci_xgene.c 	writel(0xffffffff, ctx->csr_core + MSTWRERRATTRIBUTES);
ctx               686 drivers/ata/ahci_xgene.c 	val = readl(ctx->csr_core + BUSCTLREG);
ctx               689 drivers/ata/ahci_xgene.c 	writel(val, ctx->csr_core + BUSCTLREG);
ctx               691 drivers/ata/ahci_xgene.c 	val = readl(ctx->csr_core + IOFMSTRWAUX);
ctx               694 drivers/ata/ahci_xgene.c 	writel(val, ctx->csr_core + IOFMSTRWAUX);
ctx               695 drivers/ata/ahci_xgene.c 	val = readl(ctx->csr_core + IOFMSTRWAUX);
ctx               696 drivers/ata/ahci_xgene.c 	dev_dbg(ctx->dev, "coherency 0x%X value 0x%08X\n",
ctx               702 drivers/ata/ahci_xgene.c static int xgene_ahci_mux_select(struct xgene_ahci_context *ctx)
ctx               707 drivers/ata/ahci_xgene.c 	if (!ctx->csr_mux)
ctx               710 drivers/ata/ahci_xgene.c 	val = readl(ctx->csr_mux + SATA_ENET_CONFIG_REG);
ctx               712 drivers/ata/ahci_xgene.c 	writel(val, ctx->csr_mux + SATA_ENET_CONFIG_REG);
ctx               713 drivers/ata/ahci_xgene.c 	val = readl(ctx->csr_mux + SATA_ENET_CONFIG_REG);
ctx               741 drivers/ata/ahci_xgene.c 	struct xgene_ahci_context *ctx;
ctx               753 drivers/ata/ahci_xgene.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               754 drivers/ata/ahci_xgene.c 	if (!ctx)
ctx               757 drivers/ata/ahci_xgene.c 	hpriv->plat_data = ctx;
ctx               758 drivers/ata/ahci_xgene.c 	ctx->hpriv = hpriv;
ctx               759 drivers/ata/ahci_xgene.c 	ctx->dev = dev;
ctx               763 drivers/ata/ahci_xgene.c 	ctx->csr_core = devm_ioremap_resource(dev, res);
ctx               764 drivers/ata/ahci_xgene.c 	if (IS_ERR(ctx->csr_core))
ctx               765 drivers/ata/ahci_xgene.c 		return PTR_ERR(ctx->csr_core);
ctx               769 drivers/ata/ahci_xgene.c 	ctx->csr_diag = devm_ioremap_resource(dev, res);
ctx               770 drivers/ata/ahci_xgene.c 	if (IS_ERR(ctx->csr_diag))
ctx               771 drivers/ata/ahci_xgene.c 		return PTR_ERR(ctx->csr_diag);
ctx               775 drivers/ata/ahci_xgene.c 	ctx->csr_axi = devm_ioremap_resource(dev, res);
ctx               776 drivers/ata/ahci_xgene.c 	if (IS_ERR(ctx->csr_axi))
ctx               777 drivers/ata/ahci_xgene.c 		return PTR_ERR(ctx->csr_axi);
ctx               786 drivers/ata/ahci_xgene.c 		ctx->csr_mux = csr;
ctx               820 drivers/ata/ahci_xgene.c 	dev_dbg(dev, "VAddr 0x%p Mmio VAddr 0x%p\n", ctx->csr_core,
ctx               824 drivers/ata/ahci_xgene.c 	if ((rc = xgene_ahci_mux_select(ctx))) {
ctx               829 drivers/ata/ahci_xgene.c 	if (xgene_ahci_is_memram_inited(ctx)) {
ctx                30 drivers/auxdisplay/img-ascii-lcd.c 	void (*update)(struct img_ascii_lcd_ctx *ctx);
ctx                67 drivers/auxdisplay/img-ascii-lcd.c static void boston_update(struct img_ascii_lcd_ctx *ctx)
ctx                72 drivers/auxdisplay/img-ascii-lcd.c 	val = *((u64 *)&ctx->curr[0]);
ctx                73 drivers/auxdisplay/img-ascii-lcd.c 	__raw_writeq(val, ctx->base);
ctx                75 drivers/auxdisplay/img-ascii-lcd.c 	val = *((u32 *)&ctx->curr[0]);
ctx                76 drivers/auxdisplay/img-ascii-lcd.c 	__raw_writel(val, ctx->base);
ctx                77 drivers/auxdisplay/img-ascii-lcd.c 	val = *((u32 *)&ctx->curr[4]);
ctx                78 drivers/auxdisplay/img-ascii-lcd.c 	__raw_writel(val, ctx->base + 4);
ctx                93 drivers/auxdisplay/img-ascii-lcd.c static void malta_update(struct img_ascii_lcd_ctx *ctx)
ctx                98 drivers/auxdisplay/img-ascii-lcd.c 	for (i = 0; i < ctx->cfg->num_chars; i++) {
ctx                99 drivers/auxdisplay/img-ascii-lcd.c 		err = regmap_write(ctx->regmap,
ctx               100 drivers/auxdisplay/img-ascii-lcd.c 				   ctx->offset + (i * 8), ctx->curr[i]);
ctx               129 drivers/auxdisplay/img-ascii-lcd.c static int sead3_wait_sm_idle(struct img_ascii_lcd_ctx *ctx)
ctx               135 drivers/auxdisplay/img-ascii-lcd.c 		err = regmap_read(ctx->regmap,
ctx               136 drivers/auxdisplay/img-ascii-lcd.c 				  ctx->offset + SEAD3_REG_CPLD_STATUS,
ctx               146 drivers/auxdisplay/img-ascii-lcd.c static int sead3_wait_lcd_idle(struct img_ascii_lcd_ctx *ctx)
ctx               151 drivers/auxdisplay/img-ascii-lcd.c 	err = sead3_wait_sm_idle(ctx);
ctx               156 drivers/auxdisplay/img-ascii-lcd.c 		err = regmap_read(ctx->regmap,
ctx               157 drivers/auxdisplay/img-ascii-lcd.c 				  ctx->offset + SEAD3_REG_LCD_CTRL,
ctx               162 drivers/auxdisplay/img-ascii-lcd.c 		err = sead3_wait_sm_idle(ctx);
ctx               166 drivers/auxdisplay/img-ascii-lcd.c 		err = regmap_read(ctx->regmap,
ctx               167 drivers/auxdisplay/img-ascii-lcd.c 				  ctx->offset + SEAD3_REG_CPLD_DATA,
ctx               176 drivers/auxdisplay/img-ascii-lcd.c static void sead3_update(struct img_ascii_lcd_ctx *ctx)
ctx               181 drivers/auxdisplay/img-ascii-lcd.c 	for (i = 0; i < ctx->cfg->num_chars; i++) {
ctx               182 drivers/auxdisplay/img-ascii-lcd.c 		err = sead3_wait_lcd_idle(ctx);
ctx               186 drivers/auxdisplay/img-ascii-lcd.c 		err = regmap_write(ctx->regmap,
ctx               187 drivers/auxdisplay/img-ascii-lcd.c 				   ctx->offset + SEAD3_REG_LCD_CTRL,
ctx               192 drivers/auxdisplay/img-ascii-lcd.c 		err = sead3_wait_lcd_idle(ctx);
ctx               196 drivers/auxdisplay/img-ascii-lcd.c 		err = regmap_write(ctx->regmap,
ctx               197 drivers/auxdisplay/img-ascii-lcd.c 				   ctx->offset + SEAD3_REG_LCD_DATA,
ctx               198 drivers/auxdisplay/img-ascii-lcd.c 				   ctx->curr[i]);
ctx               230 drivers/auxdisplay/img-ascii-lcd.c 	struct img_ascii_lcd_ctx *ctx = from_timer(ctx, t, timer);
ctx               231 drivers/auxdisplay/img-ascii-lcd.c 	unsigned int i, ch = ctx->scroll_pos;
ctx               232 drivers/auxdisplay/img-ascii-lcd.c 	unsigned int num_chars = ctx->cfg->num_chars;
ctx               237 drivers/auxdisplay/img-ascii-lcd.c 		for (; i < num_chars && ch < ctx->message_len; i++, ch++)
ctx               238 drivers/auxdisplay/img-ascii-lcd.c 			ctx->curr[i] = ctx->message[ch];
ctx               245 drivers/auxdisplay/img-ascii-lcd.c 	ctx->cfg->update(ctx);
ctx               248 drivers/auxdisplay/img-ascii-lcd.c 	ctx->scroll_pos++;
ctx               249 drivers/auxdisplay/img-ascii-lcd.c 	ctx->scroll_pos %= ctx->message_len;
ctx               252 drivers/auxdisplay/img-ascii-lcd.c 	if (ctx->message_len > ctx->cfg->num_chars)
ctx               253 drivers/auxdisplay/img-ascii-lcd.c 		mod_timer(&ctx->timer, jiffies + ctx->scroll_rate);
ctx               268 drivers/auxdisplay/img-ascii-lcd.c static int img_ascii_lcd_display(struct img_ascii_lcd_ctx *ctx,
ctx               274 drivers/auxdisplay/img-ascii-lcd.c 	del_timer_sync(&ctx->timer);
ctx               283 drivers/auxdisplay/img-ascii-lcd.c 	new_msg = devm_kmalloc(&ctx->pdev->dev, count + 1, GFP_KERNEL);
ctx               290 drivers/auxdisplay/img-ascii-lcd.c 	if (ctx->message)
ctx               291 drivers/auxdisplay/img-ascii-lcd.c 		devm_kfree(&ctx->pdev->dev, ctx->message);
ctx               293 drivers/auxdisplay/img-ascii-lcd.c 	ctx->message = new_msg;
ctx               294 drivers/auxdisplay/img-ascii-lcd.c 	ctx->message_len = count;
ctx               295 drivers/auxdisplay/img-ascii-lcd.c 	ctx->scroll_pos = 0;
ctx               298 drivers/auxdisplay/img-ascii-lcd.c 	img_ascii_lcd_scroll(&ctx->timer);
ctx               317 drivers/auxdisplay/img-ascii-lcd.c 	struct img_ascii_lcd_ctx *ctx = dev_get_drvdata(dev);
ctx               319 drivers/auxdisplay/img-ascii-lcd.c 	return sprintf(buf, "%s\n", ctx->message);
ctx               336 drivers/auxdisplay/img-ascii-lcd.c 	struct img_ascii_lcd_ctx *ctx = dev_get_drvdata(dev);
ctx               339 drivers/auxdisplay/img-ascii-lcd.c 	err = img_ascii_lcd_display(ctx, buf, count);
ctx               358 drivers/auxdisplay/img-ascii-lcd.c 	struct img_ascii_lcd_ctx *ctx;
ctx               367 drivers/auxdisplay/img-ascii-lcd.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx) + cfg->num_chars,
ctx               369 drivers/auxdisplay/img-ascii-lcd.c 	if (!ctx)
ctx               373 drivers/auxdisplay/img-ascii-lcd.c 		ctx->regmap = syscon_node_to_regmap(pdev->dev.parent->of_node);
ctx               374 drivers/auxdisplay/img-ascii-lcd.c 		if (IS_ERR(ctx->regmap))
ctx               375 drivers/auxdisplay/img-ascii-lcd.c 			return PTR_ERR(ctx->regmap);
ctx               378 drivers/auxdisplay/img-ascii-lcd.c 					 &ctx->offset))
ctx               382 drivers/auxdisplay/img-ascii-lcd.c 		ctx->base = devm_ioremap_resource(&pdev->dev, res);
ctx               383 drivers/auxdisplay/img-ascii-lcd.c 		if (IS_ERR(ctx->base))
ctx               384 drivers/auxdisplay/img-ascii-lcd.c 			return PTR_ERR(ctx->base);
ctx               387 drivers/auxdisplay/img-ascii-lcd.c 	ctx->pdev = pdev;
ctx               388 drivers/auxdisplay/img-ascii-lcd.c 	ctx->cfg = cfg;
ctx               389 drivers/auxdisplay/img-ascii-lcd.c 	ctx->message = NULL;
ctx               390 drivers/auxdisplay/img-ascii-lcd.c 	ctx->scroll_pos = 0;
ctx               391 drivers/auxdisplay/img-ascii-lcd.c 	ctx->scroll_rate = HZ / 2;
ctx               394 drivers/auxdisplay/img-ascii-lcd.c 	timer_setup(&ctx->timer, img_ascii_lcd_scroll, 0);
ctx               396 drivers/auxdisplay/img-ascii-lcd.c 	platform_set_drvdata(pdev, ctx);
ctx               399 drivers/auxdisplay/img-ascii-lcd.c 	err = img_ascii_lcd_display(ctx, "Linux " UTS_RELEASE "       ", -1);
ctx               409 drivers/auxdisplay/img-ascii-lcd.c 	del_timer_sync(&ctx->timer);
ctx               424 drivers/auxdisplay/img-ascii-lcd.c 	struct img_ascii_lcd_ctx *ctx = platform_get_drvdata(pdev);
ctx               427 drivers/auxdisplay/img-ascii-lcd.c 	del_timer_sync(&ctx->timer);
ctx                23 drivers/base/regmap/regmap-mmio.c 	void (*reg_write)(struct regmap_mmio_context *ctx,
ctx                25 drivers/base/regmap/regmap-mmio.c 	unsigned int (*reg_read)(struct regmap_mmio_context *ctx,
ctx                71 drivers/base/regmap/regmap-mmio.c static void regmap_mmio_write8(struct regmap_mmio_context *ctx,
ctx                75 drivers/base/regmap/regmap-mmio.c 	writeb(val, ctx->regs + reg);
ctx                78 drivers/base/regmap/regmap-mmio.c static void regmap_mmio_write16le(struct regmap_mmio_context *ctx,
ctx                82 drivers/base/regmap/regmap-mmio.c 	writew(val, ctx->regs + reg);
ctx                85 drivers/base/regmap/regmap-mmio.c static void regmap_mmio_write16be(struct regmap_mmio_context *ctx,
ctx                89 drivers/base/regmap/regmap-mmio.c 	iowrite16be(val, ctx->regs + reg);
ctx                92 drivers/base/regmap/regmap-mmio.c static void regmap_mmio_write32le(struct regmap_mmio_context *ctx,
ctx                96 drivers/base/regmap/regmap-mmio.c 	writel(val, ctx->regs + reg);
ctx                99 drivers/base/regmap/regmap-mmio.c static void regmap_mmio_write32be(struct regmap_mmio_context *ctx,
ctx               103 drivers/base/regmap/regmap-mmio.c 	iowrite32be(val, ctx->regs + reg);
ctx               107 drivers/base/regmap/regmap-mmio.c static void regmap_mmio_write64le(struct regmap_mmio_context *ctx,
ctx               111 drivers/base/regmap/regmap-mmio.c 	writeq(val, ctx->regs + reg);
ctx               117 drivers/base/regmap/regmap-mmio.c 	struct regmap_mmio_context *ctx = context;
ctx               120 drivers/base/regmap/regmap-mmio.c 	if (!IS_ERR(ctx->clk)) {
ctx               121 drivers/base/regmap/regmap-mmio.c 		ret = clk_enable(ctx->clk);
ctx               126 drivers/base/regmap/regmap-mmio.c 	ctx->reg_write(ctx, reg, val);
ctx               128 drivers/base/regmap/regmap-mmio.c 	if (!IS_ERR(ctx->clk))
ctx               129 drivers/base/regmap/regmap-mmio.c 		clk_disable(ctx->clk);
ctx               134 drivers/base/regmap/regmap-mmio.c static unsigned int regmap_mmio_read8(struct regmap_mmio_context *ctx,
ctx               137 drivers/base/regmap/regmap-mmio.c 	return readb(ctx->regs + reg);
ctx               140 drivers/base/regmap/regmap-mmio.c static unsigned int regmap_mmio_read16le(struct regmap_mmio_context *ctx,
ctx               143 drivers/base/regmap/regmap-mmio.c 	return readw(ctx->regs + reg);
ctx               146 drivers/base/regmap/regmap-mmio.c static unsigned int regmap_mmio_read16be(struct regmap_mmio_context *ctx,
ctx               149 drivers/base/regmap/regmap-mmio.c 	return ioread16be(ctx->regs + reg);
ctx               152 drivers/base/regmap/regmap-mmio.c static unsigned int regmap_mmio_read32le(struct regmap_mmio_context *ctx,
ctx               155 drivers/base/regmap/regmap-mmio.c 	return readl(ctx->regs + reg);
ctx               158 drivers/base/regmap/regmap-mmio.c static unsigned int regmap_mmio_read32be(struct regmap_mmio_context *ctx,
ctx               161 drivers/base/regmap/regmap-mmio.c 	return ioread32be(ctx->regs + reg);
ctx               165 drivers/base/regmap/regmap-mmio.c static unsigned int regmap_mmio_read64le(struct regmap_mmio_context *ctx,
ctx               168 drivers/base/regmap/regmap-mmio.c 	return readq(ctx->regs + reg);
ctx               174 drivers/base/regmap/regmap-mmio.c 	struct regmap_mmio_context *ctx = context;
ctx               177 drivers/base/regmap/regmap-mmio.c 	if (!IS_ERR(ctx->clk)) {
ctx               178 drivers/base/regmap/regmap-mmio.c 		ret = clk_enable(ctx->clk);
ctx               183 drivers/base/regmap/regmap-mmio.c 	*val = ctx->reg_read(ctx, reg);
ctx               185 drivers/base/regmap/regmap-mmio.c 	if (!IS_ERR(ctx->clk))
ctx               186 drivers/base/regmap/regmap-mmio.c 		clk_disable(ctx->clk);
ctx               193 drivers/base/regmap/regmap-mmio.c 	struct regmap_mmio_context *ctx = context;
ctx               195 drivers/base/regmap/regmap-mmio.c 	if (!IS_ERR(ctx->clk)) {
ctx               196 drivers/base/regmap/regmap-mmio.c 		clk_unprepare(ctx->clk);
ctx               197 drivers/base/regmap/regmap-mmio.c 		if (!ctx->attached_clk)
ctx               198 drivers/base/regmap/regmap-mmio.c 			clk_put(ctx->clk);
ctx               216 drivers/base/regmap/regmap-mmio.c 	struct regmap_mmio_context *ctx;
ctx               234 drivers/base/regmap/regmap-mmio.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               235 drivers/base/regmap/regmap-mmio.c 	if (!ctx)
ctx               238 drivers/base/regmap/regmap-mmio.c 	ctx->regs = regs;
ctx               239 drivers/base/regmap/regmap-mmio.c 	ctx->val_bytes = config->val_bits / 8;
ctx               240 drivers/base/regmap/regmap-mmio.c 	ctx->clk = ERR_PTR(-ENODEV);
ctx               250 drivers/base/regmap/regmap-mmio.c 			ctx->reg_read = regmap_mmio_read8;
ctx               251 drivers/base/regmap/regmap-mmio.c 			ctx->reg_write = regmap_mmio_write8;
ctx               254 drivers/base/regmap/regmap-mmio.c 			ctx->reg_read = regmap_mmio_read16le;
ctx               255 drivers/base/regmap/regmap-mmio.c 			ctx->reg_write = regmap_mmio_write16le;
ctx               258 drivers/base/regmap/regmap-mmio.c 			ctx->reg_read = regmap_mmio_read32le;
ctx               259 drivers/base/regmap/regmap-mmio.c 			ctx->reg_write = regmap_mmio_write32le;
ctx               263 drivers/base/regmap/regmap-mmio.c 			ctx->reg_read = regmap_mmio_read64le;
ctx               264 drivers/base/regmap/regmap-mmio.c 			ctx->reg_write = regmap_mmio_write64le;
ctx               278 drivers/base/regmap/regmap-mmio.c 			ctx->reg_read = regmap_mmio_read8;
ctx               279 drivers/base/regmap/regmap-mmio.c 			ctx->reg_write = regmap_mmio_write8;
ctx               282 drivers/base/regmap/regmap-mmio.c 			ctx->reg_read = regmap_mmio_read16be;
ctx               283 drivers/base/regmap/regmap-mmio.c 			ctx->reg_write = regmap_mmio_write16be;
ctx               286 drivers/base/regmap/regmap-mmio.c 			ctx->reg_read = regmap_mmio_read32be;
ctx               287 drivers/base/regmap/regmap-mmio.c 			ctx->reg_write = regmap_mmio_write32be;
ctx               300 drivers/base/regmap/regmap-mmio.c 		return ctx;
ctx               302 drivers/base/regmap/regmap-mmio.c 	ctx->clk = clk_get(dev, clk_id);
ctx               303 drivers/base/regmap/regmap-mmio.c 	if (IS_ERR(ctx->clk)) {
ctx               304 drivers/base/regmap/regmap-mmio.c 		ret = PTR_ERR(ctx->clk);
ctx               308 drivers/base/regmap/regmap-mmio.c 	ret = clk_prepare(ctx->clk);
ctx               310 drivers/base/regmap/regmap-mmio.c 		clk_put(ctx->clk);
ctx               314 drivers/base/regmap/regmap-mmio.c 	return ctx;
ctx               317 drivers/base/regmap/regmap-mmio.c 	kfree(ctx);
ctx               328 drivers/base/regmap/regmap-mmio.c 	struct regmap_mmio_context *ctx;
ctx               330 drivers/base/regmap/regmap-mmio.c 	ctx = regmap_mmio_gen_context(dev, clk_id, regs, config);
ctx               331 drivers/base/regmap/regmap-mmio.c 	if (IS_ERR(ctx))
ctx               332 drivers/base/regmap/regmap-mmio.c 		return ERR_CAST(ctx);
ctx               334 drivers/base/regmap/regmap-mmio.c 	return __regmap_init(dev, &regmap_mmio, ctx, config,
ctx               346 drivers/base/regmap/regmap-mmio.c 	struct regmap_mmio_context *ctx;
ctx               348 drivers/base/regmap/regmap-mmio.c 	ctx = regmap_mmio_gen_context(dev, clk_id, regs, config);
ctx               349 drivers/base/regmap/regmap-mmio.c 	if (IS_ERR(ctx))
ctx               350 drivers/base/regmap/regmap-mmio.c 		return ERR_CAST(ctx);
ctx               352 drivers/base/regmap/regmap-mmio.c 	return __devm_regmap_init(dev, &regmap_mmio, ctx, config,
ctx               359 drivers/base/regmap/regmap-mmio.c 	struct regmap_mmio_context *ctx = map->bus_context;
ctx               361 drivers/base/regmap/regmap-mmio.c 	ctx->clk = clk;
ctx               362 drivers/base/regmap/regmap-mmio.c 	ctx->attached_clk = true;
ctx               364 drivers/base/regmap/regmap-mmio.c 	return clk_prepare(ctx->clk);
ctx               370 drivers/base/regmap/regmap-mmio.c 	struct regmap_mmio_context *ctx = map->bus_context;
ctx               372 drivers/base/regmap/regmap-mmio.c 	clk_unprepare(ctx->clk);
ctx               374 drivers/base/regmap/regmap-mmio.c 	ctx->attached_clk = false;
ctx               375 drivers/base/regmap/regmap-mmio.c 	ctx->clk = NULL;
ctx               928 drivers/block/drbd/drbd_bitmap.c 	struct drbd_bm_aio_ctx *ctx = container_of(kref, struct drbd_bm_aio_ctx, kref);
ctx               931 drivers/block/drbd/drbd_bitmap.c 	spin_lock_irqsave(&ctx->device->resource->req_lock, flags);
ctx               932 drivers/block/drbd/drbd_bitmap.c 	list_del(&ctx->list);
ctx               933 drivers/block/drbd/drbd_bitmap.c 	spin_unlock_irqrestore(&ctx->device->resource->req_lock, flags);
ctx               934 drivers/block/drbd/drbd_bitmap.c 	put_ldev(ctx->device);
ctx               935 drivers/block/drbd/drbd_bitmap.c 	kfree(ctx);
ctx               941 drivers/block/drbd/drbd_bitmap.c 	struct drbd_bm_aio_ctx *ctx = bio->bi_private;
ctx               942 drivers/block/drbd/drbd_bitmap.c 	struct drbd_device *device = ctx->device;
ctx               946 drivers/block/drbd/drbd_bitmap.c 	if ((ctx->flags & BM_AIO_COPY_PAGES) == 0 &&
ctx               953 drivers/block/drbd/drbd_bitmap.c 		ctx->error = blk_status_to_errno(bio->bi_status);
ctx               967 drivers/block/drbd/drbd_bitmap.c 	if (ctx->flags & BM_AIO_COPY_PAGES)
ctx               972 drivers/block/drbd/drbd_bitmap.c 	if (atomic_dec_and_test(&ctx->in_flight)) {
ctx               973 drivers/block/drbd/drbd_bitmap.c 		ctx->done = 1;
ctx               975 drivers/block/drbd/drbd_bitmap.c 		kref_put(&ctx->kref, &drbd_bm_aio_ctx_destroy);
ctx               979 drivers/block/drbd/drbd_bitmap.c static void bm_page_io_async(struct drbd_bm_aio_ctx *ctx, int page_nr) __must_hold(local)
ctx               982 drivers/block/drbd/drbd_bitmap.c 	struct drbd_device *device = ctx->device;
ctx               986 drivers/block/drbd/drbd_bitmap.c 	unsigned int op = (ctx->flags & BM_AIO_READ) ? REQ_OP_READ : REQ_OP_WRITE;
ctx              1004 drivers/block/drbd/drbd_bitmap.c 	if (ctx->flags & BM_AIO_COPY_PAGES) {
ctx              1016 drivers/block/drbd/drbd_bitmap.c 	bio->bi_private = ctx;
ctx              1035 drivers/block/drbd/drbd_bitmap.c 	struct drbd_bm_aio_ctx *ctx;
ctx              1051 drivers/block/drbd/drbd_bitmap.c 	ctx = kmalloc(sizeof(struct drbd_bm_aio_ctx), GFP_NOIO);
ctx              1052 drivers/block/drbd/drbd_bitmap.c 	if (!ctx)
ctx              1055 drivers/block/drbd/drbd_bitmap.c 	*ctx = (struct drbd_bm_aio_ctx) {
ctx              1067 drivers/block/drbd/drbd_bitmap.c 		kfree(ctx);
ctx              1073 drivers/block/drbd/drbd_bitmap.c 	if (0 == (ctx->flags & ~BM_AIO_READ))
ctx              1077 drivers/block/drbd/drbd_bitmap.c 	list_add_tail(&ctx->list, &device->pending_bitmap_io);
ctx              1088 drivers/block/drbd/drbd_bitmap.c 			atomic_inc(&ctx->in_flight);
ctx              1089 drivers/block/drbd/drbd_bitmap.c 			bm_page_io_async(ctx, i);
ctx              1107 drivers/block/drbd/drbd_bitmap.c 			atomic_inc(&ctx->in_flight);
ctx              1108 drivers/block/drbd/drbd_bitmap.c 			bm_page_io_async(ctx, i);
ctx              1128 drivers/block/drbd/drbd_bitmap.c 			atomic_inc(&ctx->in_flight);
ctx              1129 drivers/block/drbd/drbd_bitmap.c 			bm_page_io_async(ctx, i);
ctx              1143 drivers/block/drbd/drbd_bitmap.c 	if (!atomic_dec_and_test(&ctx->in_flight))
ctx              1144 drivers/block/drbd/drbd_bitmap.c 		wait_until_done_or_force_detached(device, device->ldev, &ctx->done);
ctx              1146 drivers/block/drbd/drbd_bitmap.c 		kref_put(&ctx->kref, &drbd_bm_aio_ctx_destroy);
ctx              1158 drivers/block/drbd/drbd_bitmap.c 	if (ctx->error) {
ctx              1164 drivers/block/drbd/drbd_bitmap.c 	if (atomic_read(&ctx->in_flight))
ctx              1179 drivers/block/drbd/drbd_bitmap.c 	kref_put(&ctx->kref, &drbd_bm_aio_ctx_destroy);
ctx               193 drivers/block/drbd/drbd_debugfs.c 	struct drbd_bm_aio_ctx *ctx;
ctx               198 drivers/block/drbd/drbd_debugfs.c 	ctx = list_first_entry_or_null(&device->pending_bitmap_io, struct drbd_bm_aio_ctx, list);
ctx               199 drivers/block/drbd/drbd_debugfs.c 	if (ctx && ctx->done)
ctx               200 drivers/block/drbd/drbd_debugfs.c 		ctx = NULL;
ctx               201 drivers/block/drbd/drbd_debugfs.c 	if (ctx) {
ctx               202 drivers/block/drbd/drbd_debugfs.c 		start_jif = ctx->start_jif;
ctx               203 drivers/block/drbd/drbd_debugfs.c 		in_flight = atomic_read(&ctx->in_flight);
ctx               204 drivers/block/drbd/drbd_debugfs.c 		flags = ctx->flags;
ctx               207 drivers/block/drbd/drbd_debugfs.c 	if (ctx) {
ctx              1253 drivers/block/drbd/drbd_receiver.c 	struct issue_flush_context *ctx;
ctx              1260 drivers/block/drbd/drbd_receiver.c 	struct issue_flush_context *ctx = octx->ctx;
ctx              1263 drivers/block/drbd/drbd_receiver.c 		ctx->error = blk_status_to_errno(bio->bi_status);
ctx              1273 drivers/block/drbd/drbd_receiver.c 	if (atomic_dec_and_test(&ctx->pending))
ctx              1274 drivers/block/drbd/drbd_receiver.c 		complete(&ctx->done);
ctx              1277 drivers/block/drbd/drbd_receiver.c static void submit_one_flush(struct drbd_device *device, struct issue_flush_context *ctx)
ctx              1290 drivers/block/drbd/drbd_receiver.c 		ctx->error = -ENOMEM;
ctx              1297 drivers/block/drbd/drbd_receiver.c 	octx->ctx = ctx;
ctx              1305 drivers/block/drbd/drbd_receiver.c 	atomic_inc(&ctx->pending);
ctx              1313 drivers/block/drbd/drbd_receiver.c 		struct issue_flush_context ctx;
ctx              1316 drivers/block/drbd/drbd_receiver.c 		atomic_set(&ctx.pending, 1);
ctx              1317 drivers/block/drbd/drbd_receiver.c 		ctx.error = 0;
ctx              1318 drivers/block/drbd/drbd_receiver.c 		init_completion(&ctx.done);
ctx              1329 drivers/block/drbd/drbd_receiver.c 			submit_one_flush(device, &ctx);
ctx              1337 drivers/block/drbd/drbd_receiver.c 		if (!atomic_dec_and_test(&ctx.pending))
ctx              1338 drivers/block/drbd/drbd_receiver.c 			wait_for_completion(&ctx.done);
ctx              1340 drivers/block/drbd/drbd_receiver.c 		if (ctx.error) {
ctx               391 drivers/bluetooth/btintel.c 	struct regmap_ibt_context *ctx = context;
ctx               418 drivers/bluetooth/btintel.c 	bt_dev_dbg(ctx->hdev, "Register (0x%x) read", le32_to_cpu(cp.addr));
ctx               420 drivers/bluetooth/btintel.c 	skb = hci_cmd_sync(ctx->hdev, ctx->op_read, sizeof(cp), &cp,
ctx               424 drivers/bluetooth/btintel.c 		bt_dev_err(ctx->hdev, "regmap: Register (0x%x) read error (%d)",
ctx               430 drivers/bluetooth/btintel.c 		bt_dev_err(ctx->hdev, "regmap: Register (0x%x) read error, bad len",
ctx               439 drivers/bluetooth/btintel.c 		bt_dev_err(ctx->hdev, "regmap: Register (0x%x) read error, bad addr",
ctx               456 drivers/bluetooth/btintel.c 	struct regmap_ibt_context *ctx = context;
ctx               490 drivers/bluetooth/btintel.c 	bt_dev_dbg(ctx->hdev, "Register (0x%x) write", le32_to_cpu(cp->addr));
ctx               492 drivers/bluetooth/btintel.c 	skb = hci_cmd_sync(ctx->hdev, ctx->op_write, plen, cp, HCI_CMD_TIMEOUT);
ctx               495 drivers/bluetooth/btintel.c 		bt_dev_err(ctx->hdev, "regmap: Register (0x%x) write error (%d)",
ctx               541 drivers/bluetooth/btintel.c 	struct regmap_ibt_context *ctx;
ctx               546 drivers/bluetooth/btintel.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               547 drivers/bluetooth/btintel.c 	if (!ctx)
ctx               550 drivers/bluetooth/btintel.c 	ctx->op_read = opcode_read;
ctx               551 drivers/bluetooth/btintel.c 	ctx->op_write = opcode_write;
ctx               552 drivers/bluetooth/btintel.c 	ctx->hdev = hdev;
ctx               554 drivers/bluetooth/btintel.c 	return regmap_init(&hdev->dev, &regmap_ibt, ctx, &regmap_ibt_cfg);
ctx               402 drivers/bus/sunxi-rsb.c 	struct sunxi_rsb_ctx *ctx = context;
ctx               403 drivers/bus/sunxi-rsb.c 	struct sunxi_rsb_device *rdev = ctx->rdev;
ctx               408 drivers/bus/sunxi-rsb.c 	return sunxi_rsb_read(rdev->rsb, rdev->rtaddr, reg, val, ctx->size);
ctx               414 drivers/bus/sunxi-rsb.c 	struct sunxi_rsb_ctx *ctx = context;
ctx               415 drivers/bus/sunxi-rsb.c 	struct sunxi_rsb_device *rdev = ctx->rdev;
ctx               417 drivers/bus/sunxi-rsb.c 	return sunxi_rsb_write(rdev->rsb, rdev->rtaddr, reg, &val, ctx->size);
ctx               422 drivers/bus/sunxi-rsb.c 	struct sunxi_rsb_ctx *ctx = context;
ctx               424 drivers/bus/sunxi-rsb.c 	kfree(ctx);
ctx               438 drivers/bus/sunxi-rsb.c 	struct sunxi_rsb_ctx *ctx;
ctx               449 drivers/bus/sunxi-rsb.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               450 drivers/bus/sunxi-rsb.c 	if (!ctx)
ctx               453 drivers/bus/sunxi-rsb.c 	ctx->rdev = rdev;
ctx               454 drivers/bus/sunxi-rsb.c 	ctx->size = config->val_bits / 8;
ctx               456 drivers/bus/sunxi-rsb.c 	return ctx;
ctx               464 drivers/bus/sunxi-rsb.c 	struct sunxi_rsb_ctx *ctx = regmap_sunxi_rsb_init_ctx(rdev, config);
ctx               466 drivers/bus/sunxi-rsb.c 	if (IS_ERR(ctx))
ctx               467 drivers/bus/sunxi-rsb.c 		return ERR_CAST(ctx);
ctx               469 drivers/bus/sunxi-rsb.c 	return __devm_regmap_init(&rdev->dev, &regmap_sunxi_rsb, ctx, config,
ctx                63 drivers/char/hw_random/optee-rng.c 	struct tee_context *ctx;
ctx                96 drivers/char/hw_random/optee-rng.c 	ret = tee_client_invoke_func(pvt_data->ctx, &inv_arg, param);
ctx               148 drivers/char/hw_random/optee-rng.c 	entropy_shm_pool = tee_shm_alloc(pvt_data->ctx, MAX_ENTROPY_REQ_SZ,
ctx               193 drivers/char/hw_random/optee-rng.c 	ret = tee_client_invoke_func(pvt_data.ctx, &inv_arg, param);
ctx               223 drivers/char/hw_random/optee-rng.c 	pvt_data.ctx = tee_client_open_context(NULL, optee_ctx_match, NULL,
ctx               225 drivers/char/hw_random/optee-rng.c 	if (IS_ERR(pvt_data.ctx))
ctx               233 drivers/char/hw_random/optee-rng.c 	ret = tee_client_open_session(pvt_data.ctx, &sess_arg, NULL);
ctx               257 drivers/char/hw_random/optee-rng.c 	tee_client_close_session(pvt_data.ctx, pvt_data.session_id);
ctx               259 drivers/char/hw_random/optee-rng.c 	tee_client_close_context(pvt_data.ctx);
ctx               267 drivers/char/hw_random/optee-rng.c 	tee_client_close_session(pvt_data.ctx, pvt_data.session_id);
ctx               268 drivers/char/hw_random/optee-rng.c 	tee_client_close_context(pvt_data.ctx);
ctx                92 drivers/char/hw_random/xgene-rng.c 	struct xgene_rng_dev *ctx = from_timer(ctx, t, failure_timer);
ctx                95 drivers/char/hw_random/xgene-rng.c 	disable_irq(ctx->irq);
ctx                96 drivers/char/hw_random/xgene-rng.c 	ctx->failure_cnt = 0;
ctx                97 drivers/char/hw_random/xgene-rng.c 	del_timer(&ctx->failure_timer);
ctx                98 drivers/char/hw_random/xgene-rng.c 	enable_irq(ctx->irq);
ctx               101 drivers/char/hw_random/xgene-rng.c static void xgene_rng_start_timer(struct xgene_rng_dev *ctx)
ctx               103 drivers/char/hw_random/xgene-rng.c 	ctx->failure_timer.expires = jiffies + 120 * HZ;
ctx               104 drivers/char/hw_random/xgene-rng.c 	add_timer(&ctx->failure_timer);
ctx               110 drivers/char/hw_random/xgene-rng.c static void xgene_rng_init_fro(struct xgene_rng_dev *ctx, u32 fro_val)
ctx               112 drivers/char/hw_random/xgene-rng.c 	writel(fro_val, ctx->csr_base + RNG_FRODETUNE);
ctx               113 drivers/char/hw_random/xgene-rng.c 	writel(0x00000000, ctx->csr_base + RNG_ALARMMASK);
ctx               114 drivers/char/hw_random/xgene-rng.c 	writel(0x00000000, ctx->csr_base + RNG_ALARMSTOP);
ctx               115 drivers/char/hw_random/xgene-rng.c 	writel(0xFFFFFFFF, ctx->csr_base + RNG_FROENABLE);
ctx               118 drivers/char/hw_random/xgene-rng.c static void xgene_rng_chk_overflow(struct xgene_rng_dev *ctx)
ctx               122 drivers/char/hw_random/xgene-rng.c 	val = readl(ctx->csr_base + RNG_INTR_STS_ACK);
ctx               129 drivers/char/hw_random/xgene-rng.c 		dev_err(ctx->dev, "test monobit failure error 0x%08X\n", val);
ctx               137 drivers/char/hw_random/xgene-rng.c 		dev_err(ctx->dev, "test poker failure error 0x%08X\n", val);
ctx               143 drivers/char/hw_random/xgene-rng.c 		dev_err(ctx->dev, "test long run failure error 0x%08X\n", val);
ctx               150 drivers/char/hw_random/xgene-rng.c 		dev_err(ctx->dev, "test run failure error 0x%08X\n", val);
ctx               153 drivers/char/hw_random/xgene-rng.c 		dev_err(ctx->dev, "noise failure error 0x%08X\n", val);
ctx               159 drivers/char/hw_random/xgene-rng.c 		dev_err(ctx->dev, "stuck out failure error 0x%08X\n", val);
ctx               165 drivers/char/hw_random/xgene-rng.c 		if (++ctx->failure_cnt == 1) {
ctx               167 drivers/char/hw_random/xgene-rng.c 			ctx->failure_ts = jiffies;
ctx               168 drivers/char/hw_random/xgene-rng.c 			frostopped = readl(ctx->csr_base + RNG_ALARMSTOP);
ctx               169 drivers/char/hw_random/xgene-rng.c 			xgene_rng_init_fro(ctx, frostopped);
ctx               175 drivers/char/hw_random/xgene-rng.c 			xgene_rng_start_timer(ctx);
ctx               178 drivers/char/hw_random/xgene-rng.c 			if (time_after(ctx->failure_ts + 60 * HZ, jiffies)) {
ctx               179 drivers/char/hw_random/xgene-rng.c 				dev_err(ctx->dev,
ctx               184 drivers/char/hw_random/xgene-rng.c 				ctx->failure_ts = jiffies;
ctx               185 drivers/char/hw_random/xgene-rng.c 				ctx->failure_cnt = 1;
ctx               191 drivers/char/hw_random/xgene-rng.c 				xgene_rng_start_timer(ctx);
ctx               193 drivers/char/hw_random/xgene-rng.c 			frostopped = readl(ctx->csr_base + RNG_ALARMSTOP);
ctx               194 drivers/char/hw_random/xgene-rng.c 			xgene_rng_init_fro(ctx, frostopped);
ctx               198 drivers/char/hw_random/xgene-rng.c 	writel(val, ctx->csr_base + RNG_INTR_STS_ACK);
ctx               203 drivers/char/hw_random/xgene-rng.c 	struct xgene_rng_dev *ctx = (struct xgene_rng_dev *) id;
ctx               206 drivers/char/hw_random/xgene-rng.c 	xgene_rng_chk_overflow(ctx);
ctx               213 drivers/char/hw_random/xgene-rng.c 	struct xgene_rng_dev *ctx = (struct xgene_rng_dev *) rng->priv;
ctx               217 drivers/char/hw_random/xgene-rng.c 		val = readl(ctx->csr_base + RNG_INTR_STS_ACK);
ctx               228 drivers/char/hw_random/xgene-rng.c 	struct xgene_rng_dev *ctx = (struct xgene_rng_dev *) rng->priv;
ctx               231 drivers/char/hw_random/xgene-rng.c 	for (i = 0; i < ctx->datum_size; i++)
ctx               232 drivers/char/hw_random/xgene-rng.c 		data[i] = readl(ctx->csr_base + RNG_INOUT_0 + i * 4);
ctx               235 drivers/char/hw_random/xgene-rng.c 	writel(READY_MASK, ctx->csr_base + RNG_INTR_STS_ACK);
ctx               237 drivers/char/hw_random/xgene-rng.c 	return ctx->datum_size << 2;
ctx               240 drivers/char/hw_random/xgene-rng.c static void xgene_rng_init_internal(struct xgene_rng_dev *ctx)
ctx               244 drivers/char/hw_random/xgene-rng.c 	writel(0x00000000, ctx->csr_base + RNG_CONTROL);
ctx               248 drivers/char/hw_random/xgene-rng.c 	writel(val, ctx->csr_base + RNG_CONFIG);
ctx               251 drivers/char/hw_random/xgene-rng.c 	writel(val, ctx->csr_base + RNG_ALARMCNT);
ctx               253 drivers/char/hw_random/xgene-rng.c 	xgene_rng_init_fro(ctx, 0);
ctx               262 drivers/char/hw_random/xgene-rng.c 		READY_MASK, ctx->csr_base + RNG_INTR_STS_ACK);
ctx               272 drivers/char/hw_random/xgene-rng.c 	writel(val, ctx->csr_base + RNG_CONTROL);
ctx               277 drivers/char/hw_random/xgene-rng.c 	struct xgene_rng_dev *ctx = (struct xgene_rng_dev *) rng->priv;
ctx               279 drivers/char/hw_random/xgene-rng.c 	ctx->failure_cnt = 0;
ctx               280 drivers/char/hw_random/xgene-rng.c 	timer_setup(&ctx->failure_timer, xgene_rng_expired_timer, 0);
ctx               282 drivers/char/hw_random/xgene-rng.c 	ctx->revision = readl(ctx->csr_base + RNG_EIP_REV);
ctx               284 drivers/char/hw_random/xgene-rng.c 	dev_dbg(ctx->dev, "Rev %d.%d.%d\n",
ctx               285 drivers/char/hw_random/xgene-rng.c 		MAJOR_HW_REV_RD(ctx->revision),
ctx               286 drivers/char/hw_random/xgene-rng.c 		MINOR_HW_REV_RD(ctx->revision),
ctx               287 drivers/char/hw_random/xgene-rng.c 		HW_PATCH_LEVEL_RD(ctx->revision));
ctx               289 drivers/char/hw_random/xgene-rng.c 	dev_dbg(ctx->dev, "Options 0x%08X",
ctx               290 drivers/char/hw_random/xgene-rng.c 		readl(ctx->csr_base + RNG_OPTIONS));
ctx               292 drivers/char/hw_random/xgene-rng.c 	xgene_rng_init_internal(ctx);
ctx               294 drivers/char/hw_random/xgene-rng.c 	ctx->datum_size = RNG_MAX_DATUM;
ctx               317 drivers/char/hw_random/xgene-rng.c 	struct xgene_rng_dev *ctx;
ctx               320 drivers/char/hw_random/xgene-rng.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               321 drivers/char/hw_random/xgene-rng.c 	if (!ctx)
ctx               324 drivers/char/hw_random/xgene-rng.c 	ctx->dev = &pdev->dev;
ctx               325 drivers/char/hw_random/xgene-rng.c 	platform_set_drvdata(pdev, ctx);
ctx               328 drivers/char/hw_random/xgene-rng.c 	ctx->csr_base = devm_ioremap_resource(&pdev->dev, res);
ctx               329 drivers/char/hw_random/xgene-rng.c 	if (IS_ERR(ctx->csr_base))
ctx               330 drivers/char/hw_random/xgene-rng.c 		return PTR_ERR(ctx->csr_base);
ctx               337 drivers/char/hw_random/xgene-rng.c 	ctx->irq = rc;
ctx               340 drivers/char/hw_random/xgene-rng.c 		ctx->csr_base, ctx->irq);
ctx               342 drivers/char/hw_random/xgene-rng.c 	rc = devm_request_irq(&pdev->dev, ctx->irq, xgene_rng_irq_handler, 0,
ctx               343 drivers/char/hw_random/xgene-rng.c 				dev_name(&pdev->dev), ctx);
ctx               350 drivers/char/hw_random/xgene-rng.c 	ctx->clk = devm_clk_get(&pdev->dev, NULL);
ctx               351 drivers/char/hw_random/xgene-rng.c 	if (IS_ERR(ctx->clk)) {
ctx               354 drivers/char/hw_random/xgene-rng.c 		rc = clk_prepare_enable(ctx->clk);
ctx               362 drivers/char/hw_random/xgene-rng.c 	xgene_rng_func.priv = (unsigned long) ctx;
ctx               367 drivers/char/hw_random/xgene-rng.c 		if (!IS_ERR(ctx->clk))
ctx               368 drivers/char/hw_random/xgene-rng.c 			clk_disable_unprepare(ctx->clk);
ctx               376 drivers/char/hw_random/xgene-rng.c 		if (!IS_ERR(ctx->clk))
ctx               377 drivers/char/hw_random/xgene-rng.c 			clk_disable_unprepare(ctx->clk);
ctx               386 drivers/char/hw_random/xgene-rng.c 	struct xgene_rng_dev *ctx = platform_get_drvdata(pdev);
ctx               392 drivers/char/hw_random/xgene-rng.c 	if (!IS_ERR(ctx->clk))
ctx               393 drivers/char/hw_random/xgene-rng.c 		clk_disable_unprepare(ctx->clk);
ctx                72 drivers/char/tpm/tpm2-space.c 	struct tpm2_context *ctx;
ctx                80 drivers/char/tpm/tpm2-space.c 	ctx = (struct tpm2_context *)&buf[*offset];
ctx                81 drivers/char/tpm/tpm2-space.c 	body_size = sizeof(*ctx) + be16_to_cpu(ctx->blob_size);
ctx               130 drivers/char/tpm/tpm_ftpm_tee.c 	rc = tee_client_invoke_func(pvt_data->ctx, &transceive_args,
ctx               233 drivers/char/tpm/tpm_ftpm_tee.c 	pvt_data->ctx = tee_client_open_context(NULL, ftpm_tee_match, NULL,
ctx               235 drivers/char/tpm/tpm_ftpm_tee.c 	if (IS_ERR(pvt_data->ctx)) {
ctx               236 drivers/char/tpm/tpm_ftpm_tee.c 		if (PTR_ERR(pvt_data->ctx) == -ENOENT)
ctx               239 drivers/char/tpm/tpm_ftpm_tee.c 		return PTR_ERR(pvt_data->ctx);
ctx               248 drivers/char/tpm/tpm_ftpm_tee.c 	rc = tee_client_open_session(pvt_data->ctx, &sess_arg, NULL);
ctx               258 drivers/char/tpm/tpm_ftpm_tee.c 	pvt_data->shm = tee_shm_alloc(pvt_data->ctx,
ctx               293 drivers/char/tpm/tpm_ftpm_tee.c 	tee_client_close_session(pvt_data->ctx, pvt_data->session);
ctx               295 drivers/char/tpm/tpm_ftpm_tee.c 	tee_client_close_context(pvt_data->ctx);
ctx               321 drivers/char/tpm/tpm_ftpm_tee.c 	tee_client_close_session(pvt_data->ctx, pvt_data->session);
ctx               324 drivers/char/tpm/tpm_ftpm_tee.c 	tee_client_close_context(pvt_data->ctx);
ctx               340 drivers/char/tpm/tpm_ftpm_tee.c 	tee_client_close_session(pvt_data->ctx, pvt_data->session);
ctx               341 drivers/char/tpm/tpm_ftpm_tee.c 	tee_client_close_context(pvt_data->ctx);
ctx                36 drivers/char/tpm/tpm_ftpm_tee.h 	struct tee_context *ctx;
ctx                42 drivers/clk/rockchip/clk-pll.c 	struct rockchip_clk_provider *ctx;
ctx                87 drivers/clk/rockchip/clk-pll.c 	struct regmap *grf = pll->ctx->grf;
ctx               831 drivers/clk/rockchip/clk-pll.c struct clk *rockchip_clk_register_pll(struct rockchip_clk_provider *ctx,
ctx               862 drivers/clk/rockchip/clk-pll.c 	pll_mux->reg = ctx->reg_base + mode_offset;
ctx               869 drivers/clk/rockchip/clk-pll.c 	pll_mux->lock = &ctx->lock;
ctx               925 drivers/clk/rockchip/clk-pll.c 		if (!pll->rate_table || IS_ERR(ctx->grf))
ctx               931 drivers/clk/rockchip/clk-pll.c 		if (!pll->rate_table || IS_ERR(ctx->grf))
ctx               949 drivers/clk/rockchip/clk-pll.c 	pll->reg_base = ctx->reg_base + con_offset;
ctx               953 drivers/clk/rockchip/clk-pll.c 	pll->lock = &ctx->lock;
ctx               954 drivers/clk/rockchip/clk-pll.c 	pll->ctx = ctx;
ctx               961 drivers/clk/rockchip/clk-px30.c 	struct rockchip_clk_provider *ctx;
ctx               970 drivers/clk/rockchip/clk-px30.c 	ctx = rockchip_clk_init(np, reg_base, CLK_NR_CLKS);
ctx               971 drivers/clk/rockchip/clk-px30.c 	if (IS_ERR(ctx)) {
ctx               977 drivers/clk/rockchip/clk-px30.c 	rockchip_clk_register_plls(ctx, px30_pll_clks,
ctx               980 drivers/clk/rockchip/clk-px30.c 	rockchip_clk_register_branches(ctx, px30_clk_branches,
ctx               983 drivers/clk/rockchip/clk-px30.c 	rockchip_clk_register_armclk(ctx, ARMCLK, "armclk",
ctx               991 drivers/clk/rockchip/clk-px30.c 	rockchip_register_restart_notifier(ctx, PX30_GLB_SRST_FST, NULL);
ctx               993 drivers/clk/rockchip/clk-px30.c 	rockchip_clk_of_add_provider(np, ctx);
ctx               999 drivers/clk/rockchip/clk-px30.c 	struct rockchip_clk_provider *ctx;
ctx              1008 drivers/clk/rockchip/clk-px30.c 	ctx = rockchip_clk_init(np, reg_base, CLKPMU_NR_CLKS);
ctx              1009 drivers/clk/rockchip/clk-px30.c 	if (IS_ERR(ctx)) {
ctx              1014 drivers/clk/rockchip/clk-px30.c 	rockchip_clk_register_plls(ctx, px30_pmu_pll_clks,
ctx              1017 drivers/clk/rockchip/clk-px30.c 	rockchip_clk_register_branches(ctx, px30_clk_pmu_branches,
ctx              1023 drivers/clk/rockchip/clk-px30.c 	rockchip_clk_of_add_provider(np, ctx);
ctx               436 drivers/clk/rockchip/clk-rk3036.c 	struct rockchip_clk_provider *ctx;
ctx               453 drivers/clk/rockchip/clk-rk3036.c 	ctx = rockchip_clk_init(np, reg_base, CLK_NR_CLKS);
ctx               454 drivers/clk/rockchip/clk-rk3036.c 	if (IS_ERR(ctx)) {
ctx               465 drivers/clk/rockchip/clk-rk3036.c 	rockchip_clk_register_plls(ctx, rk3036_pll_clks,
ctx               468 drivers/clk/rockchip/clk-rk3036.c 	rockchip_clk_register_branches(ctx, rk3036_clk_branches,
ctx               473 drivers/clk/rockchip/clk-rk3036.c 	rockchip_clk_register_armclk(ctx, ARMCLK, "armclk",
ctx               481 drivers/clk/rockchip/clk-rk3036.c 	rockchip_register_restart_notifier(ctx, RK2928_GLB_SRST_FST, NULL);
ctx               483 drivers/clk/rockchip/clk-rk3036.c 	rockchip_clk_of_add_provider(np, ctx);
ctx               578 drivers/clk/rockchip/clk-rk3128.c 	struct rockchip_clk_provider *ctx;
ctx               587 drivers/clk/rockchip/clk-rk3128.c 	ctx = rockchip_clk_init(np, reg_base, CLK_NR_CLKS);
ctx               588 drivers/clk/rockchip/clk-rk3128.c 	if (IS_ERR(ctx)) {
ctx               594 drivers/clk/rockchip/clk-rk3128.c 	rockchip_clk_register_plls(ctx, rk3128_pll_clks,
ctx               597 drivers/clk/rockchip/clk-rk3128.c 	rockchip_clk_register_branches(ctx, common_clk_branches,
ctx               600 drivers/clk/rockchip/clk-rk3128.c 	rockchip_clk_register_armclk(ctx, ARMCLK, "armclk",
ctx               608 drivers/clk/rockchip/clk-rk3128.c 	rockchip_register_restart_notifier(ctx, RK2928_GLB_SRST_FST, NULL);
ctx               610 drivers/clk/rockchip/clk-rk3128.c 	return ctx;
ctx               615 drivers/clk/rockchip/clk-rk3128.c 	struct rockchip_clk_provider *ctx;
ctx               617 drivers/clk/rockchip/clk-rk3128.c 	ctx = rk3128_common_clk_init(np);
ctx               618 drivers/clk/rockchip/clk-rk3128.c 	if (IS_ERR(ctx))
ctx               621 drivers/clk/rockchip/clk-rk3128.c 	rockchip_clk_register_branches(ctx, rk3126_clk_branches,
ctx               626 drivers/clk/rockchip/clk-rk3128.c 	rockchip_clk_of_add_provider(np, ctx);
ctx               633 drivers/clk/rockchip/clk-rk3128.c 	struct rockchip_clk_provider *ctx;
ctx               635 drivers/clk/rockchip/clk-rk3128.c 	ctx = rk3128_common_clk_init(np);
ctx               636 drivers/clk/rockchip/clk-rk3128.c 	if (IS_ERR(ctx))
ctx               639 drivers/clk/rockchip/clk-rk3128.c 	rockchip_clk_register_branches(ctx, rk3128_clk_branches,
ctx               644 drivers/clk/rockchip/clk-rk3128.c 	rockchip_clk_of_add_provider(np, ctx);
ctx               758 drivers/clk/rockchip/clk-rk3188.c 	struct rockchip_clk_provider *ctx;
ctx               767 drivers/clk/rockchip/clk-rk3188.c 	ctx = rockchip_clk_init(np, reg_base, CLK_NR_CLKS);
ctx               768 drivers/clk/rockchip/clk-rk3188.c 	if (IS_ERR(ctx)) {
ctx               774 drivers/clk/rockchip/clk-rk3188.c 	rockchip_clk_register_branches(ctx, common_clk_branches,
ctx               780 drivers/clk/rockchip/clk-rk3188.c 	rockchip_register_restart_notifier(ctx, RK2928_GLB_SRST_FST, NULL);
ctx               782 drivers/clk/rockchip/clk-rk3188.c 	return ctx;
ctx               787 drivers/clk/rockchip/clk-rk3188.c 	struct rockchip_clk_provider *ctx;
ctx               789 drivers/clk/rockchip/clk-rk3188.c 	ctx = rk3188_common_clk_init(np);
ctx               790 drivers/clk/rockchip/clk-rk3188.c 	if (IS_ERR(ctx))
ctx               793 drivers/clk/rockchip/clk-rk3188.c 	rockchip_clk_register_plls(ctx, rk3066_pll_clks,
ctx               796 drivers/clk/rockchip/clk-rk3188.c 	rockchip_clk_register_branches(ctx, rk3066a_clk_branches,
ctx               798 drivers/clk/rockchip/clk-rk3188.c 	rockchip_clk_register_armclk(ctx, ARMCLK, "armclk",
ctx               804 drivers/clk/rockchip/clk-rk3188.c 	rockchip_clk_of_add_provider(np, ctx);
ctx               810 drivers/clk/rockchip/clk-rk3188.c 	struct rockchip_clk_provider *ctx;
ctx               815 drivers/clk/rockchip/clk-rk3188.c 	ctx = rk3188_common_clk_init(np);
ctx               816 drivers/clk/rockchip/clk-rk3188.c 	if (IS_ERR(ctx))
ctx               819 drivers/clk/rockchip/clk-rk3188.c 	rockchip_clk_register_plls(ctx, rk3188_pll_clks,
ctx               822 drivers/clk/rockchip/clk-rk3188.c 	rockchip_clk_register_branches(ctx, rk3188_clk_branches,
ctx               824 drivers/clk/rockchip/clk-rk3188.c 	rockchip_clk_register_armclk(ctx, ARMCLK, "armclk",
ctx               848 drivers/clk/rockchip/clk-rk3188.c 	rockchip_clk_of_add_provider(np, ctx);
ctx               684 drivers/clk/rockchip/clk-rk3228.c 	struct rockchip_clk_provider *ctx;
ctx               693 drivers/clk/rockchip/clk-rk3228.c 	ctx = rockchip_clk_init(np, reg_base, CLK_NR_CLKS);
ctx               694 drivers/clk/rockchip/clk-rk3228.c 	if (IS_ERR(ctx)) {
ctx               700 drivers/clk/rockchip/clk-rk3228.c 	rockchip_clk_register_plls(ctx, rk3228_pll_clks,
ctx               703 drivers/clk/rockchip/clk-rk3228.c 	rockchip_clk_register_branches(ctx, rk3228_clk_branches,
ctx               708 drivers/clk/rockchip/clk-rk3228.c 	rockchip_clk_register_armclk(ctx, ARMCLK, "armclk",
ctx               716 drivers/clk/rockchip/clk-rk3228.c 	rockchip_register_restart_notifier(ctx, RK3228_GLB_SRST_FST, NULL);
ctx               718 drivers/clk/rockchip/clk-rk3228.c 	rockchip_clk_of_add_provider(np, ctx);
ctx               919 drivers/clk/rockchip/clk-rk3288.c 	struct rockchip_clk_provider *ctx;
ctx               927 drivers/clk/rockchip/clk-rk3288.c 	ctx = rockchip_clk_init(np, rk3288_cru_base, CLK_NR_CLKS);
ctx               928 drivers/clk/rockchip/clk-rk3288.c 	if (IS_ERR(ctx)) {
ctx               934 drivers/clk/rockchip/clk-rk3288.c 	rockchip_clk_register_plls(ctx, rk3288_pll_clks,
ctx               937 drivers/clk/rockchip/clk-rk3288.c 	rockchip_clk_register_branches(ctx, rk3288_clk_branches,
ctx               942 drivers/clk/rockchip/clk-rk3288.c 	rockchip_clk_register_armclk(ctx, ARMCLK, "armclk",
ctx               951 drivers/clk/rockchip/clk-rk3288.c 	rockchip_register_restart_notifier(ctx, RK3288_GLB_SRST_FST,
ctx               955 drivers/clk/rockchip/clk-rk3288.c 	rockchip_clk_of_add_provider(np, ctx);
ctx               918 drivers/clk/rockchip/clk-rk3308.c 	struct rockchip_clk_provider *ctx;
ctx               927 drivers/clk/rockchip/clk-rk3308.c 	ctx = rockchip_clk_init(np, reg_base, CLK_NR_CLKS);
ctx               928 drivers/clk/rockchip/clk-rk3308.c 	if (IS_ERR(ctx)) {
ctx               934 drivers/clk/rockchip/clk-rk3308.c 	rockchip_clk_register_plls(ctx, rk3308_pll_clks,
ctx               937 drivers/clk/rockchip/clk-rk3308.c 	rockchip_clk_register_branches(ctx, rk3308_clk_branches,
ctx               942 drivers/clk/rockchip/clk-rk3308.c 	rockchip_clk_register_armclk(ctx, ARMCLK, "armclk",
ctx               950 drivers/clk/rockchip/clk-rk3308.c 	rockchip_register_restart_notifier(ctx, RK3308_GLB_SRST_FST, NULL);
ctx               952 drivers/clk/rockchip/clk-rk3308.c 	rockchip_clk_of_add_provider(np, ctx);
ctx               882 drivers/clk/rockchip/clk-rk3328.c 	struct rockchip_clk_provider *ctx;
ctx               891 drivers/clk/rockchip/clk-rk3328.c 	ctx = rockchip_clk_init(np, reg_base, CLK_NR_CLKS);
ctx               892 drivers/clk/rockchip/clk-rk3328.c 	if (IS_ERR(ctx)) {
ctx               898 drivers/clk/rockchip/clk-rk3328.c 	rockchip_clk_register_plls(ctx, rk3328_pll_clks,
ctx               901 drivers/clk/rockchip/clk-rk3328.c 	rockchip_clk_register_branches(ctx, rk3328_clk_branches,
ctx               906 drivers/clk/rockchip/clk-rk3328.c 	rockchip_clk_register_armclk(ctx, ARMCLK, "armclk",
ctx               914 drivers/clk/rockchip/clk-rk3328.c 	rockchip_register_restart_notifier(ctx, RK3328_GLB_SRST_FST, NULL);
ctx               916 drivers/clk/rockchip/clk-rk3328.c 	rockchip_clk_of_add_provider(np, ctx);
ctx               866 drivers/clk/rockchip/clk-rk3368.c 	struct rockchip_clk_provider *ctx;
ctx               875 drivers/clk/rockchip/clk-rk3368.c 	ctx = rockchip_clk_init(np, reg_base, CLK_NR_CLKS);
ctx               876 drivers/clk/rockchip/clk-rk3368.c 	if (IS_ERR(ctx)) {
ctx               882 drivers/clk/rockchip/clk-rk3368.c 	rockchip_clk_register_plls(ctx, rk3368_pll_clks,
ctx               885 drivers/clk/rockchip/clk-rk3368.c 	rockchip_clk_register_branches(ctx, rk3368_clk_branches,
ctx               890 drivers/clk/rockchip/clk-rk3368.c 	rockchip_clk_register_armclk(ctx, ARMCLKB, "armclkb",
ctx               895 drivers/clk/rockchip/clk-rk3368.c 	rockchip_clk_register_armclk(ctx, ARMCLKL, "armclkl",
ctx               903 drivers/clk/rockchip/clk-rk3368.c 	rockchip_register_restart_notifier(ctx, RK3368_GLB_SRST_FST, NULL);
ctx               905 drivers/clk/rockchip/clk-rk3368.c 	rockchip_clk_of_add_provider(np, ctx);
ctx              1526 drivers/clk/rockchip/clk-rk3399.c 	struct rockchip_clk_provider *ctx;
ctx              1535 drivers/clk/rockchip/clk-rk3399.c 	ctx = rockchip_clk_init(np, reg_base, CLK_NR_CLKS);
ctx              1536 drivers/clk/rockchip/clk-rk3399.c 	if (IS_ERR(ctx)) {
ctx              1542 drivers/clk/rockchip/clk-rk3399.c 	rockchip_clk_register_plls(ctx, rk3399_pll_clks,
ctx              1545 drivers/clk/rockchip/clk-rk3399.c 	rockchip_clk_register_branches(ctx, rk3399_clk_branches,
ctx              1551 drivers/clk/rockchip/clk-rk3399.c 	rockchip_clk_register_armclk(ctx, ARMCLKL, "armclkl",
ctx              1556 drivers/clk/rockchip/clk-rk3399.c 	rockchip_clk_register_armclk(ctx, ARMCLKB, "armclkb",
ctx              1564 drivers/clk/rockchip/clk-rk3399.c 	rockchip_register_restart_notifier(ctx, RK3399_GLB_SRST_FST, NULL);
ctx              1566 drivers/clk/rockchip/clk-rk3399.c 	rockchip_clk_of_add_provider(np, ctx);
ctx              1572 drivers/clk/rockchip/clk-rk3399.c 	struct rockchip_clk_provider *ctx;
ctx              1581 drivers/clk/rockchip/clk-rk3399.c 	ctx = rockchip_clk_init(np, reg_base, CLKPMU_NR_CLKS);
ctx              1582 drivers/clk/rockchip/clk-rk3399.c 	if (IS_ERR(ctx)) {
ctx              1588 drivers/clk/rockchip/clk-rk3399.c 	rockchip_clk_register_plls(ctx, rk3399_pmu_pll_clks,
ctx              1591 drivers/clk/rockchip/clk-rk3399.c 	rockchip_clk_register_branches(ctx, rk3399_clk_pmu_branches,
ctx              1600 drivers/clk/rockchip/clk-rk3399.c 	rockchip_clk_of_add_provider(np, ctx);
ctx               786 drivers/clk/rockchip/clk-rv1108.c 	struct rockchip_clk_provider *ctx;
ctx               795 drivers/clk/rockchip/clk-rv1108.c 	ctx = rockchip_clk_init(np, reg_base, CLK_NR_CLKS);
ctx               796 drivers/clk/rockchip/clk-rv1108.c 	if (IS_ERR(ctx)) {
ctx               802 drivers/clk/rockchip/clk-rv1108.c 	rockchip_clk_register_plls(ctx, rv1108_pll_clks,
ctx               805 drivers/clk/rockchip/clk-rv1108.c 	rockchip_clk_register_branches(ctx, rv1108_clk_branches,
ctx               810 drivers/clk/rockchip/clk-rv1108.c 	rockchip_clk_register_armclk(ctx, ARMCLK, "armclk",
ctx               818 drivers/clk/rockchip/clk-rv1108.c 	rockchip_register_restart_notifier(ctx, RV1108_GLB_SRST_FST, NULL);
ctx               820 drivers/clk/rockchip/clk-rv1108.c 	rockchip_clk_of_add_provider(np, ctx);
ctx               210 drivers/clk/rockchip/clk.c 		struct rockchip_clk_provider *ctx, const char *name,
ctx               298 drivers/clk/rockchip/clk.c 		rockchip_clk_add_lookup(ctx, mux_clk, child->id);
ctx               367 drivers/clk/rockchip/clk.c 	struct rockchip_clk_provider *ctx;
ctx               371 drivers/clk/rockchip/clk.c 	ctx = kzalloc(sizeof(struct rockchip_clk_provider), GFP_KERNEL);
ctx               372 drivers/clk/rockchip/clk.c 	if (!ctx)
ctx               382 drivers/clk/rockchip/clk.c 	ctx->reg_base = base;
ctx               383 drivers/clk/rockchip/clk.c 	ctx->clk_data.clks = clk_table;
ctx               384 drivers/clk/rockchip/clk.c 	ctx->clk_data.clk_num = nr_clks;
ctx               385 drivers/clk/rockchip/clk.c 	ctx->cru_node = np;
ctx               386 drivers/clk/rockchip/clk.c 	spin_lock_init(&ctx->lock);
ctx               388 drivers/clk/rockchip/clk.c 	ctx->grf = syscon_regmap_lookup_by_phandle(ctx->cru_node,
ctx               391 drivers/clk/rockchip/clk.c 	return ctx;
ctx               394 drivers/clk/rockchip/clk.c 	kfree(ctx);
ctx               399 drivers/clk/rockchip/clk.c 				struct rockchip_clk_provider *ctx)
ctx               402 drivers/clk/rockchip/clk.c 				&ctx->clk_data))
ctx               406 drivers/clk/rockchip/clk.c void rockchip_clk_add_lookup(struct rockchip_clk_provider *ctx,
ctx               409 drivers/clk/rockchip/clk.c 	if (ctx->clk_data.clks && id)
ctx               410 drivers/clk/rockchip/clk.c 		ctx->clk_data.clks[id] = clk;
ctx               413 drivers/clk/rockchip/clk.c void __init rockchip_clk_register_plls(struct rockchip_clk_provider *ctx,
ctx               421 drivers/clk/rockchip/clk.c 		clk = rockchip_clk_register_pll(ctx, list->type, list->name,
ctx               433 drivers/clk/rockchip/clk.c 		rockchip_clk_add_lookup(ctx, clk, list->id);
ctx               438 drivers/clk/rockchip/clk.c 				      struct rockchip_clk_provider *ctx,
ctx               454 drivers/clk/rockchip/clk.c 				flags, ctx->reg_base + list->muxdiv_offset,
ctx               456 drivers/clk/rockchip/clk.c 				list->mux_flags, &ctx->lock);
ctx               461 drivers/clk/rockchip/clk.c 				flags, ctx->grf, list->muxdiv_offset,
ctx               470 drivers/clk/rockchip/clk.c 					ctx->reg_base + list->muxdiv_offset,
ctx               473 drivers/clk/rockchip/clk.c 					&ctx->lock);
ctx               477 drivers/clk/rockchip/clk.c 					ctx->reg_base + list->muxdiv_offset,
ctx               479 drivers/clk/rockchip/clk.c 					list->div_flags, &ctx->lock);
ctx               482 drivers/clk/rockchip/clk.c 			clk = rockchip_clk_register_frac_branch(ctx, list->name,
ctx               484 drivers/clk/rockchip/clk.c 				ctx->reg_base, list->muxdiv_offset,
ctx               488 drivers/clk/rockchip/clk.c 				&ctx->lock);
ctx               493 drivers/clk/rockchip/clk.c 				ctx->reg_base, list->muxdiv_offset,
ctx               498 drivers/clk/rockchip/clk.c 				list->gate_flags, flags, &ctx->lock);
ctx               505 drivers/clk/rockchip/clk.c 				ctx->reg_base + list->gate_offset,
ctx               506 drivers/clk/rockchip/clk.c 				list->gate_shift, list->gate_flags, &ctx->lock);
ctx               511 drivers/clk/rockchip/clk.c 				ctx->reg_base, list->muxdiv_offset,
ctx               517 drivers/clk/rockchip/clk.c 				list->gate_flags, flags, &ctx->lock);
ctx               523 drivers/clk/rockchip/clk.c 				ctx->reg_base + list->muxdiv_offset,
ctx               531 drivers/clk/rockchip/clk.c 				ctx->reg_base + list->muxdiv_offset,
ctx               532 drivers/clk/rockchip/clk.c 				list->div_shift, list->div_flags, &ctx->lock);
ctx               537 drivers/clk/rockchip/clk.c 				list->num_parents, ctx->reg_base,
ctx               540 drivers/clk/rockchip/clk.c 				list->gate_flags, flags, &ctx->lock);
ctx               549 drivers/clk/rockchip/clk.c 				ctx->reg_base, &ctx->lock);
ctx               566 drivers/clk/rockchip/clk.c 		rockchip_clk_add_lookup(ctx, clk, list->id);
ctx               570 drivers/clk/rockchip/clk.c void __init rockchip_clk_register_armclk(struct rockchip_clk_provider *ctx,
ctx               582 drivers/clk/rockchip/clk.c 					   ctx->reg_base, &ctx->lock);
ctx               589 drivers/clk/rockchip/clk.c 	rockchip_clk_add_lookup(ctx, clk, lookup_id);
ctx               625 drivers/clk/rockchip/clk.c rockchip_register_restart_notifier(struct rockchip_clk_provider *ctx,
ctx               631 drivers/clk/rockchip/clk.c 	rst_base = ctx->reg_base;
ctx               312 drivers/clk/rockchip/clk.h struct clk *rockchip_clk_register_pll(struct rockchip_clk_provider *ctx,
ctx               834 drivers/clk/rockchip/clk.h 				struct rockchip_clk_provider *ctx);
ctx               835 drivers/clk/rockchip/clk.h void rockchip_clk_add_lookup(struct rockchip_clk_provider *ctx,
ctx               837 drivers/clk/rockchip/clk.h void rockchip_clk_register_branches(struct rockchip_clk_provider *ctx,
ctx               840 drivers/clk/rockchip/clk.h void rockchip_clk_register_plls(struct rockchip_clk_provider *ctx,
ctx               843 drivers/clk/rockchip/clk.h void rockchip_clk_register_armclk(struct rockchip_clk_provider *ctx,
ctx               850 drivers/clk/rockchip/clk.h void rockchip_register_restart_notifier(struct rockchip_clk_provider *ctx,
ctx               403 drivers/clk/samsung/clk-cpu.c int __init exynos_register_cpu_clock(struct samsung_clk_provider *ctx,
ctx               425 drivers/clk/samsung/clk-cpu.c 	cpuclk->ctrl_base = ctx->reg_base + offset;
ctx               426 drivers/clk/samsung/clk-cpu.c 	cpuclk->lock = &ctx->lock;
ctx               468 drivers/clk/samsung/clk-cpu.c 	samsung_clk_add_lookup(ctx, &cpuclk->hw, lookup_id);
ctx                65 drivers/clk/samsung/clk-cpu.h extern int __init exynos_register_cpu_clock(struct samsung_clk_provider *ctx,
ctx               810 drivers/clk/samsung/clk-exynos3250.c 	struct samsung_clk_provider *ctx;
ctx               812 drivers/clk/samsung/clk-exynos3250.c 	ctx = samsung_cmu_register_one(np, &cmu_info);
ctx               813 drivers/clk/samsung/clk-exynos3250.c 	if (!ctx)
ctx               816 drivers/clk/samsung/clk-exynos3250.c 	exynos_register_cpu_clock(ctx, CLK_ARM_CLK, "armclk",
ctx               821 drivers/clk/samsung/clk-exynos3250.c 	exynos3_core_down_clock(ctx->reg_base);
ctx              1036 drivers/clk/samsung/clk-exynos4.c static void __init exynos4_clk_register_finpll(struct samsung_clk_provider *ctx)
ctx              1059 drivers/clk/samsung/clk-exynos4.c 	samsung_clk_register_fixed_rate(ctx, &fclk, 1);
ctx              1235 drivers/clk/samsung/clk-exynos4.c 	struct samsung_clk_provider *ctx;
ctx              1242 drivers/clk/samsung/clk-exynos4.c 	ctx = samsung_clk_init(np, reg_base, CLK_NR_CLKS);
ctx              1244 drivers/clk/samsung/clk-exynos4.c 	samsung_clk_of_register_fixed_ext(ctx, exynos4_fixed_rate_ext_clks,
ctx              1248 drivers/clk/samsung/clk-exynos4.c 	exynos4_clk_register_finpll(ctx);
ctx              1251 drivers/clk/samsung/clk-exynos4.c 		samsung_clk_register_mux(ctx, exynos4210_mux_early,
ctx              1265 drivers/clk/samsung/clk-exynos4.c 		samsung_clk_register_pll(ctx, exynos4210_plls,
ctx              1277 drivers/clk/samsung/clk-exynos4.c 		samsung_clk_register_pll(ctx, exynos4x12_plls,
ctx              1281 drivers/clk/samsung/clk-exynos4.c 	samsung_clk_register_fixed_rate(ctx, exynos4_fixed_rate_clks,
ctx              1283 drivers/clk/samsung/clk-exynos4.c 	samsung_clk_register_mux(ctx, exynos4_mux_clks,
ctx              1285 drivers/clk/samsung/clk-exynos4.c 	samsung_clk_register_div(ctx, exynos4_div_clks,
ctx              1287 drivers/clk/samsung/clk-exynos4.c 	samsung_clk_register_gate(ctx, exynos4_gate_clks,
ctx              1289 drivers/clk/samsung/clk-exynos4.c 	samsung_clk_register_fixed_factor(ctx, exynos4_fixed_factor_clks,
ctx              1293 drivers/clk/samsung/clk-exynos4.c 		samsung_clk_register_fixed_rate(ctx, exynos4210_fixed_rate_clks,
ctx              1295 drivers/clk/samsung/clk-exynos4.c 		samsung_clk_register_mux(ctx, exynos4210_mux_clks,
ctx              1297 drivers/clk/samsung/clk-exynos4.c 		samsung_clk_register_div(ctx, exynos4210_div_clks,
ctx              1299 drivers/clk/samsung/clk-exynos4.c 		samsung_clk_register_gate(ctx, exynos4210_gate_clks,
ctx              1301 drivers/clk/samsung/clk-exynos4.c 		samsung_clk_register_fixed_factor(ctx,
ctx              1304 drivers/clk/samsung/clk-exynos4.c 		exynos_register_cpu_clock(ctx, CLK_ARM_CLK, "armclk",
ctx              1309 drivers/clk/samsung/clk-exynos4.c 		samsung_clk_register_mux(ctx, exynos4x12_mux_clks,
ctx              1311 drivers/clk/samsung/clk-exynos4.c 		samsung_clk_register_div(ctx, exynos4x12_div_clks,
ctx              1313 drivers/clk/samsung/clk-exynos4.c 		samsung_clk_register_gate(ctx, exynos4x12_gate_clks,
ctx              1315 drivers/clk/samsung/clk-exynos4.c 		samsung_clk_register_fixed_factor(ctx,
ctx              1319 drivers/clk/samsung/clk-exynos4.c 		exynos_register_cpu_clock(ctx, CLK_ARM_CLK, "armclk",
ctx              1339 drivers/clk/samsung/clk-exynos4.c 	samsung_clk_of_add_provider(np, ctx);
ctx                92 drivers/clk/samsung/clk-exynos4412-isp.c 	struct samsung_clk_provider *ctx = dev_get_drvdata(dev);
ctx                94 drivers/clk/samsung/clk-exynos4412-isp.c 	samsung_clk_save(ctx->reg_base, exynos4x12_save_isp,
ctx               101 drivers/clk/samsung/clk-exynos4412-isp.c 	struct samsung_clk_provider *ctx = dev_get_drvdata(dev);
ctx               103 drivers/clk/samsung/clk-exynos4412-isp.c 	samsung_clk_restore(ctx->reg_base, exynos4x12_save_isp,
ctx               110 drivers/clk/samsung/clk-exynos4412-isp.c 	struct samsung_clk_provider *ctx;
ctx               128 drivers/clk/samsung/clk-exynos4412-isp.c 	ctx = samsung_clk_init(np, reg_base, CLK_NR_ISP_CLKS);
ctx               129 drivers/clk/samsung/clk-exynos4412-isp.c 	ctx->dev = dev;
ctx               131 drivers/clk/samsung/clk-exynos4412-isp.c 	platform_set_drvdata(pdev, ctx);
ctx               137 drivers/clk/samsung/clk-exynos4412-isp.c 	samsung_clk_register_div(ctx, exynos4x12_isp_div_clks,
ctx               139 drivers/clk/samsung/clk-exynos4412-isp.c 	samsung_clk_register_gate(ctx, exynos4x12_isp_gate_clks,
ctx               142 drivers/clk/samsung/clk-exynos4412-isp.c 	samsung_clk_of_add_provider(np, ctx);
ctx                16 drivers/clk/samsung/clk-exynos5-subcmu.c static struct samsung_clk_provider *ctx;
ctx                40 drivers/clk/samsung/clk-exynos5-subcmu.c static void exynos5_subcmu_defer_gate(struct samsung_clk_provider *ctx,
ctx                44 drivers/clk/samsung/clk-exynos5-subcmu.c 		samsung_clk_add_lookup(ctx, ERR_PTR(-EPROBE_DEFER), list++->id);
ctx                61 drivers/clk/samsung/clk-exynos5-subcmu.c 	ctx = _ctx;
ctx                66 drivers/clk/samsung/clk-exynos5-subcmu.c 		exynos5_subcmu_defer_gate(ctx, (*_cmu)->gate_clks,
ctx                68 drivers/clk/samsung/clk-exynos5-subcmu.c 		exynos5_subcmu_clk_save(ctx->reg_base, (*_cmu)->suspend_regs,
ctx                78 drivers/clk/samsung/clk-exynos5-subcmu.c 	spin_lock_irqsave(&ctx->lock, flags);
ctx                79 drivers/clk/samsung/clk-exynos5-subcmu.c 	exynos5_subcmu_clk_save(ctx->reg_base, info->suspend_regs,
ctx                81 drivers/clk/samsung/clk-exynos5-subcmu.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx                91 drivers/clk/samsung/clk-exynos5-subcmu.c 	spin_lock_irqsave(&ctx->lock, flags);
ctx                92 drivers/clk/samsung/clk-exynos5-subcmu.c 	exynos5_subcmu_clk_restore(ctx->reg_base, info->suspend_regs,
ctx                94 drivers/clk/samsung/clk-exynos5-subcmu.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx               108 drivers/clk/samsung/clk-exynos5-subcmu.c 	ctx->dev = dev;
ctx               109 drivers/clk/samsung/clk-exynos5-subcmu.c 	samsung_clk_register_div(ctx, info->div_clks, info->nr_div_clks);
ctx               110 drivers/clk/samsung/clk-exynos5-subcmu.c 	samsung_clk_register_gate(ctx, info->gate_clks, info->nr_gate_clks);
ctx               111 drivers/clk/samsung/clk-exynos5-subcmu.c 	ctx->dev = NULL;
ctx                23 drivers/clk/samsung/clk-exynos5-subcmu.h void exynos5_subcmus_init(struct samsung_clk_provider *ctx, int nr_cmus,
ctx               783 drivers/clk/samsung/clk-exynos5250.c 	struct samsung_clk_provider *ctx;
ctx               794 drivers/clk/samsung/clk-exynos5250.c 	ctx = samsung_clk_init(np, reg_base, CLK_NR_CLKS);
ctx               796 drivers/clk/samsung/clk-exynos5250.c 	samsung_clk_of_register_fixed_ext(ctx, exynos5250_fixed_rate_ext_clks,
ctx               799 drivers/clk/samsung/clk-exynos5250.c 	samsung_clk_register_mux(ctx, exynos5250_pll_pmux_clks,
ctx               810 drivers/clk/samsung/clk-exynos5250.c 	samsung_clk_register_pll(ctx, exynos5250_plls,
ctx               813 drivers/clk/samsung/clk-exynos5250.c 	samsung_clk_register_fixed_rate(ctx, exynos5250_fixed_rate_clks,
ctx               815 drivers/clk/samsung/clk-exynos5250.c 	samsung_clk_register_fixed_factor(ctx, exynos5250_fixed_factor_clks,
ctx               817 drivers/clk/samsung/clk-exynos5250.c 	samsung_clk_register_mux(ctx, exynos5250_mux_clks,
ctx               819 drivers/clk/samsung/clk-exynos5250.c 	samsung_clk_register_div(ctx, exynos5250_div_clks,
ctx               821 drivers/clk/samsung/clk-exynos5250.c 	samsung_clk_register_gate(ctx, exynos5250_gate_clks,
ctx               823 drivers/clk/samsung/clk-exynos5250.c 	exynos_register_cpu_clock(ctx, CLK_ARM_CLK, "armclk",
ctx               850 drivers/clk/samsung/clk-exynos5250.c 	exynos5_subcmus_init(ctx, ARRAY_SIZE(exynos5250_subcmus),
ctx               853 drivers/clk/samsung/clk-exynos5250.c 	samsung_clk_of_add_provider(np, ctx);
ctx              1545 drivers/clk/samsung/clk-exynos5420.c 	struct samsung_clk_provider *ctx;
ctx              1557 drivers/clk/samsung/clk-exynos5420.c 	ctx = samsung_clk_init(np, reg_base, CLK_NR_CLKS);
ctx              1559 drivers/clk/samsung/clk-exynos5420.c 	samsung_clk_of_register_fixed_ext(ctx, exynos5x_fixed_rate_ext_clks,
ctx              1574 drivers/clk/samsung/clk-exynos5420.c 	samsung_clk_register_pll(ctx, exynos5x_plls, ARRAY_SIZE(exynos5x_plls),
ctx              1576 drivers/clk/samsung/clk-exynos5420.c 	samsung_clk_register_fixed_rate(ctx, exynos5x_fixed_rate_clks,
ctx              1578 drivers/clk/samsung/clk-exynos5420.c 	samsung_clk_register_fixed_factor(ctx, exynos5x_fixed_factor_clks,
ctx              1580 drivers/clk/samsung/clk-exynos5420.c 	samsung_clk_register_mux(ctx, exynos5x_mux_clks,
ctx              1582 drivers/clk/samsung/clk-exynos5420.c 	samsung_clk_register_div(ctx, exynos5x_div_clks,
ctx              1584 drivers/clk/samsung/clk-exynos5420.c 	samsung_clk_register_gate(ctx, exynos5x_gate_clks,
ctx              1588 drivers/clk/samsung/clk-exynos5420.c 		samsung_clk_register_mux(ctx, exynos5420_mux_clks,
ctx              1590 drivers/clk/samsung/clk-exynos5420.c 		samsung_clk_register_div(ctx, exynos5420_div_clks,
ctx              1592 drivers/clk/samsung/clk-exynos5420.c 		samsung_clk_register_gate(ctx, exynos5420_gate_clks,
ctx              1596 drivers/clk/samsung/clk-exynos5420.c 				ctx, exynos5800_fixed_factor_clks,
ctx              1598 drivers/clk/samsung/clk-exynos5420.c 		samsung_clk_register_mux(ctx, exynos5800_mux_clks,
ctx              1600 drivers/clk/samsung/clk-exynos5420.c 		samsung_clk_register_div(ctx, exynos5800_div_clks,
ctx              1602 drivers/clk/samsung/clk-exynos5420.c 		samsung_clk_register_gate(ctx, exynos5800_gate_clks,
ctx              1607 drivers/clk/samsung/clk-exynos5420.c 		exynos_register_cpu_clock(ctx, CLK_ARM_CLK, "armclk",
ctx              1611 drivers/clk/samsung/clk-exynos5420.c 		exynos_register_cpu_clock(ctx, CLK_ARM_CLK, "armclk",
ctx              1615 drivers/clk/samsung/clk-exynos5420.c 	exynos_register_cpu_clock(ctx, CLK_KFC_CLK, "kfcclk",
ctx              1627 drivers/clk/samsung/clk-exynos5420.c 		exynos5_subcmus_init(ctx, ARRAY_SIZE(exynos5800_subcmus),
ctx              1630 drivers/clk/samsung/clk-exynos5420.c 		exynos5_subcmus_init(ctx, ARRAY_SIZE(exynos5x_subcmus),
ctx              1641 drivers/clk/samsung/clk-exynos5420.c 	samsung_clk_of_add_provider(np, ctx);
ctx              3680 drivers/clk/samsung/clk-exynos5433.c 	struct samsung_clk_provider *ctx;
ctx              3688 drivers/clk/samsung/clk-exynos5433.c 	ctx = samsung_clk_init(np, reg_base, APOLLO_NR_CLK);
ctx              3689 drivers/clk/samsung/clk-exynos5433.c 	if (!ctx) {
ctx              3694 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_register_pll(ctx, apollo_pll_clks,
ctx              3696 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_register_mux(ctx, apollo_mux_clks,
ctx              3698 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_register_div(ctx, apollo_div_clks,
ctx              3700 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_register_gate(ctx, apollo_gate_clks,
ctx              3703 drivers/clk/samsung/clk-exynos5433.c 	exynos_register_cpu_clock(ctx, CLK_SCLK_APOLLO, "apolloclk",
ctx              3711 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_of_add_provider(np, ctx);
ctx              3934 drivers/clk/samsung/clk-exynos5433.c 	struct samsung_clk_provider *ctx;
ctx              3942 drivers/clk/samsung/clk-exynos5433.c 	ctx = samsung_clk_init(np, reg_base, ATLAS_NR_CLK);
ctx              3943 drivers/clk/samsung/clk-exynos5433.c 	if (!ctx) {
ctx              3948 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_register_pll(ctx, atlas_pll_clks,
ctx              3950 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_register_mux(ctx, atlas_mux_clks,
ctx              3952 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_register_div(ctx, atlas_div_clks,
ctx              3954 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_register_gate(ctx, atlas_gate_clks,
ctx              3957 drivers/clk/samsung/clk-exynos5433.c 	exynos_register_cpu_clock(ctx, CLK_SCLK_ATLAS, "atlasclk",
ctx              3965 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_of_add_provider(np, ctx);
ctx              5509 drivers/clk/samsung/clk-exynos5433.c 	struct samsung_clk_provider ctx;
ctx              5517 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_save(data->ctx.reg_base, data->clk_save,
ctx              5524 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_restore(data->ctx.reg_base, data->clk_suspend,
ctx              5545 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_restore(data->ctx.reg_base, data->clk_save,
ctx              5558 drivers/clk/samsung/clk-exynos5433.c 	struct samsung_clk_provider *ctx;
ctx              5567 drivers/clk/samsung/clk-exynos5433.c 			    struct_size(data, ctx.clk_data.hws, info->nr_clk_ids),
ctx              5571 drivers/clk/samsung/clk-exynos5433.c 	ctx = &data->ctx;
ctx              5579 drivers/clk/samsung/clk-exynos5433.c 		ctx->clk_data.hws[i] = ERR_PTR(-ENOENT);
ctx              5581 drivers/clk/samsung/clk-exynos5433.c 	ctx->clk_data.num = info->nr_clk_ids;
ctx              5582 drivers/clk/samsung/clk-exynos5433.c 	ctx->reg_base = reg_base;
ctx              5583 drivers/clk/samsung/clk-exynos5433.c 	ctx->dev = dev;
ctx              5584 drivers/clk/samsung/clk-exynos5433.c 	spin_lock_init(&ctx->lock);
ctx              5632 drivers/clk/samsung/clk-exynos5433.c 		samsung_clk_register_pll(ctx, info->pll_clks, info->nr_pll_clks,
ctx              5635 drivers/clk/samsung/clk-exynos5433.c 		samsung_clk_register_mux(ctx, info->mux_clks,
ctx              5638 drivers/clk/samsung/clk-exynos5433.c 		samsung_clk_register_div(ctx, info->div_clks,
ctx              5641 drivers/clk/samsung/clk-exynos5433.c 		samsung_clk_register_gate(ctx, info->gate_clks,
ctx              5644 drivers/clk/samsung/clk-exynos5433.c 		samsung_clk_register_fixed_rate(ctx, info->fixed_clks,
ctx              5647 drivers/clk/samsung/clk-exynos5433.c 		samsung_clk_register_fixed_factor(ctx, info->fixed_factor_clks,
ctx              5650 drivers/clk/samsung/clk-exynos5433.c 	samsung_clk_of_add_provider(dev->of_node, ctx);
ctx              1249 drivers/clk/samsung/clk-pll.c static void __init _samsung_clk_register_pll(struct samsung_clk_provider *ctx,
ctx              1389 drivers/clk/samsung/clk-pll.c 	ret = clk_hw_register(ctx->dev, &pll->hw);
ctx              1397 drivers/clk/samsung/clk-pll.c 	samsung_clk_add_lookup(ctx, &pll->hw, pll_clk->id);
ctx              1400 drivers/clk/samsung/clk-pll.c void __init samsung_clk_register_pll(struct samsung_clk_provider *ctx,
ctx              1407 drivers/clk/samsung/clk-pll.c 		_samsung_clk_register_pll(ctx, &pll_list[cnt], base);
ctx               308 drivers/clk/samsung/clk-s3c2410.c 		struct samsung_clk_provider *ctx,
ctx               314 drivers/clk/samsung/clk-s3c2410.c 	samsung_clk_register_fixed_rate(ctx, s3c2410_common_frate_clks,
ctx               317 drivers/clk/samsung/clk-s3c2410.c 	samsung_clk_register_alias(ctx, &xti_alias, 1);
ctx               324 drivers/clk/samsung/clk-s3c2410.c 	struct samsung_clk_provider *ctx;
ctx               333 drivers/clk/samsung/clk-s3c2410.c 	ctx = samsung_clk_init(np, reg_base, NR_CLKS);
ctx               337 drivers/clk/samsung/clk-s3c2410.c 		s3c2410_common_clk_register_fixed_ext(ctx, xti_f);
ctx               346 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_pll(ctx, s3c2410_plls,
ctx               362 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_pll(ctx, s3c244x_common_plls,
ctx               367 drivers/clk/samsung/clk-s3c2410.c 	samsung_clk_register_mux(ctx, s3c2410_common_muxes,
ctx               369 drivers/clk/samsung/clk-s3c2410.c 	samsung_clk_register_div(ctx, s3c2410_common_dividers,
ctx               371 drivers/clk/samsung/clk-s3c2410.c 	samsung_clk_register_gate(ctx, s3c2410_common_gates,
ctx               375 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_div(ctx, s3c244x_common_dividers,
ctx               377 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_gate(ctx, s3c244x_common_gates,
ctx               379 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_mux(ctx, s3c244x_common_muxes,
ctx               381 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_fixed_factor(ctx, s3c244x_common_ffactor,
ctx               388 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_div(ctx, s3c2410_dividers,
ctx               390 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_fixed_factor(ctx, s3c2410_ffactor,
ctx               392 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_alias(ctx, s3c2410_aliases,
ctx               396 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_mux(ctx, s3c2440_muxes,
ctx               398 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_gate(ctx, s3c2440_gates,
ctx               402 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_mux(ctx, s3c2442_muxes,
ctx               404 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_fixed_factor(ctx, s3c2442_ffactor,
ctx               413 drivers/clk/samsung/clk-s3c2410.c 	samsung_clk_register_alias(ctx, s3c2410_common_aliases,
ctx               417 drivers/clk/samsung/clk-s3c2410.c 		samsung_clk_register_alias(ctx, s3c244x_common_aliases,
ctx               424 drivers/clk/samsung/clk-s3c2410.c 	samsung_clk_of_add_provider(np, ctx);
ctx               191 drivers/clk/samsung/clk-s3c2412.c 		struct samsung_clk_provider *ctx,
ctx               199 drivers/clk/samsung/clk-s3c2412.c 	samsung_clk_register_fixed_rate(ctx, s3c2412_common_frate_clks,
ctx               202 drivers/clk/samsung/clk-s3c2412.c 	samsung_clk_register_alias(ctx, &xti_alias, 1);
ctx               208 drivers/clk/samsung/clk-s3c2412.c 	struct samsung_clk_provider *ctx;
ctx               218 drivers/clk/samsung/clk-s3c2412.c 	ctx = samsung_clk_init(np, reg_base, NR_CLKS);
ctx               222 drivers/clk/samsung/clk-s3c2412.c 		s3c2412_common_clk_register_fixed_ext(ctx, xti_f, ext_f);
ctx               225 drivers/clk/samsung/clk-s3c2412.c 	samsung_clk_register_pll(ctx, s3c2412_plls, ARRAY_SIZE(s3c2412_plls),
ctx               229 drivers/clk/samsung/clk-s3c2412.c 	samsung_clk_register_mux(ctx, s3c2412_muxes, ARRAY_SIZE(s3c2412_muxes));
ctx               230 drivers/clk/samsung/clk-s3c2412.c 	samsung_clk_register_div(ctx, s3c2412_dividers,
ctx               232 drivers/clk/samsung/clk-s3c2412.c 	samsung_clk_register_gate(ctx, s3c2412_gates,
ctx               234 drivers/clk/samsung/clk-s3c2412.c 	samsung_clk_register_fixed_factor(ctx, s3c2412_ffactor,
ctx               236 drivers/clk/samsung/clk-s3c2412.c 	samsung_clk_register_alias(ctx, s3c2412_aliases,
ctx               242 drivers/clk/samsung/clk-s3c2412.c 	samsung_clk_of_add_provider(np, ctx);
ctx               334 drivers/clk/samsung/clk-s3c2443.c 		struct samsung_clk_provider *ctx, unsigned long xti_f)
ctx               337 drivers/clk/samsung/clk-s3c2443.c 	samsung_clk_register_fixed_rate(ctx, s3c2443_common_frate_clks,
ctx               345 drivers/clk/samsung/clk-s3c2443.c 	struct samsung_clk_provider *ctx;
ctx               355 drivers/clk/samsung/clk-s3c2443.c 	ctx = samsung_clk_init(np, reg_base, NR_CLKS);
ctx               359 drivers/clk/samsung/clk-s3c2443.c 		s3c2443_common_clk_register_fixed_ext(ctx, xti_f);
ctx               363 drivers/clk/samsung/clk-s3c2443.c 		samsung_clk_register_pll(ctx, s3c2416_pll_clks,
ctx               366 drivers/clk/samsung/clk-s3c2443.c 		samsung_clk_register_pll(ctx, s3c2443_pll_clks,
ctx               370 drivers/clk/samsung/clk-s3c2443.c 	samsung_clk_register_mux(ctx, s3c2443_common_muxes,
ctx               372 drivers/clk/samsung/clk-s3c2443.c 	samsung_clk_register_div(ctx, s3c2443_common_dividers,
ctx               374 drivers/clk/samsung/clk-s3c2443.c 	samsung_clk_register_gate(ctx, s3c2443_common_gates,
ctx               376 drivers/clk/samsung/clk-s3c2443.c 	samsung_clk_register_alias(ctx, s3c2443_common_aliases,
ctx               382 drivers/clk/samsung/clk-s3c2443.c 		samsung_clk_register_div(ctx, s3c2450_dividers,
ctx               384 drivers/clk/samsung/clk-s3c2443.c 		samsung_clk_register_mux(ctx, s3c2450_muxes,
ctx               386 drivers/clk/samsung/clk-s3c2443.c 		samsung_clk_register_gate(ctx, s3c2450_gates,
ctx               388 drivers/clk/samsung/clk-s3c2443.c 		samsung_clk_register_alias(ctx, s3c2450_aliases,
ctx               392 drivers/clk/samsung/clk-s3c2443.c 		samsung_clk_register_div(ctx, s3c2416_dividers,
ctx               394 drivers/clk/samsung/clk-s3c2443.c 		samsung_clk_register_mux(ctx, s3c2416_muxes,
ctx               396 drivers/clk/samsung/clk-s3c2443.c 		samsung_clk_register_gate(ctx, s3c2416_gates,
ctx               398 drivers/clk/samsung/clk-s3c2443.c 		samsung_clk_register_alias(ctx, s3c2416_aliases,
ctx               402 drivers/clk/samsung/clk-s3c2443.c 		samsung_clk_register_div(ctx, s3c2443_dividers,
ctx               404 drivers/clk/samsung/clk-s3c2443.c 		samsung_clk_register_gate(ctx, s3c2443_gates,
ctx               406 drivers/clk/samsung/clk-s3c2443.c 		samsung_clk_register_alias(ctx, s3c2443_aliases,
ctx               414 drivers/clk/samsung/clk-s3c2443.c 	samsung_clk_of_add_provider(np, ctx);
ctx               380 drivers/clk/samsung/clk-s3c64xx.c 				struct samsung_clk_provider *ctx,
ctx               386 drivers/clk/samsung/clk-s3c64xx.c 	samsung_clk_register_fixed_rate(ctx, s3c64xx_fixed_rate_ext_clks,
ctx               395 drivers/clk/samsung/clk-s3c64xx.c 	struct samsung_clk_provider *ctx;
ctx               406 drivers/clk/samsung/clk-s3c64xx.c 	ctx = samsung_clk_init(np, reg_base, NR_CLKS);
ctx               410 drivers/clk/samsung/clk-s3c64xx.c 		s3c64xx_clk_register_fixed_ext(ctx, xtal_f, xusbxti_f);
ctx               413 drivers/clk/samsung/clk-s3c64xx.c 	samsung_clk_register_pll(ctx, s3c64xx_pll_clks,
ctx               417 drivers/clk/samsung/clk-s3c64xx.c 	samsung_clk_register_fixed_rate(ctx, s3c64xx_fixed_rate_clks,
ctx               419 drivers/clk/samsung/clk-s3c64xx.c 	samsung_clk_register_mux(ctx, s3c64xx_mux_clks,
ctx               421 drivers/clk/samsung/clk-s3c64xx.c 	samsung_clk_register_div(ctx, s3c64xx_div_clks,
ctx               423 drivers/clk/samsung/clk-s3c64xx.c 	samsung_clk_register_gate(ctx, s3c64xx_gate_clks,
ctx               428 drivers/clk/samsung/clk-s3c64xx.c 		samsung_clk_register_mux(ctx, s3c6400_mux_clks,
ctx               430 drivers/clk/samsung/clk-s3c64xx.c 		samsung_clk_register_div(ctx, s3c6400_div_clks,
ctx               432 drivers/clk/samsung/clk-s3c64xx.c 		samsung_clk_register_gate(ctx, s3c6400_gate_clks,
ctx               434 drivers/clk/samsung/clk-s3c64xx.c 		samsung_clk_register_alias(ctx, s3c6400_clock_aliases,
ctx               437 drivers/clk/samsung/clk-s3c64xx.c 		samsung_clk_register_mux(ctx, s3c6410_mux_clks,
ctx               439 drivers/clk/samsung/clk-s3c64xx.c 		samsung_clk_register_div(ctx, s3c6410_div_clks,
ctx               441 drivers/clk/samsung/clk-s3c64xx.c 		samsung_clk_register_gate(ctx, s3c6410_gate_clks,
ctx               443 drivers/clk/samsung/clk-s3c64xx.c 		samsung_clk_register_alias(ctx, s3c6410_clock_aliases,
ctx               447 drivers/clk/samsung/clk-s3c64xx.c 	samsung_clk_register_alias(ctx, s3c64xx_clock_aliases,
ctx               456 drivers/clk/samsung/clk-s3c64xx.c 	samsung_clk_of_add_provider(np, ctx);
ctx               743 drivers/clk/samsung/clk-s5pv210.c 	struct samsung_clk_provider *ctx;
ctx               745 drivers/clk/samsung/clk-s5pv210.c 	ctx = samsung_clk_init(np, reg_base, NR_CLKS);
ctx               747 drivers/clk/samsung/clk-s5pv210.c 	samsung_clk_register_mux(ctx, early_mux_clks,
ctx               751 drivers/clk/samsung/clk-s5pv210.c 		samsung_clk_register_fixed_rate(ctx, s5p6442_frate_clks,
ctx               753 drivers/clk/samsung/clk-s5pv210.c 		samsung_clk_register_pll(ctx, s5p6442_pll_clks,
ctx               755 drivers/clk/samsung/clk-s5pv210.c 		samsung_clk_register_mux(ctx, s5p6442_mux_clks,
ctx               757 drivers/clk/samsung/clk-s5pv210.c 		samsung_clk_register_div(ctx, s5p6442_div_clks,
ctx               759 drivers/clk/samsung/clk-s5pv210.c 		samsung_clk_register_gate(ctx, s5p6442_gate_clks,
ctx               762 drivers/clk/samsung/clk-s5pv210.c 		samsung_clk_register_fixed_rate(ctx, s5pv210_frate_clks,
ctx               764 drivers/clk/samsung/clk-s5pv210.c 		samsung_clk_register_pll(ctx, s5pv210_pll_clks,
ctx               766 drivers/clk/samsung/clk-s5pv210.c 		samsung_clk_register_mux(ctx, s5pv210_mux_clks,
ctx               768 drivers/clk/samsung/clk-s5pv210.c 		samsung_clk_register_div(ctx, s5pv210_div_clks,
ctx               770 drivers/clk/samsung/clk-s5pv210.c 		samsung_clk_register_gate(ctx, s5pv210_gate_clks,
ctx               774 drivers/clk/samsung/clk-s5pv210.c 	samsung_clk_register_mux(ctx, mux_clks, ARRAY_SIZE(mux_clks));
ctx               775 drivers/clk/samsung/clk-s5pv210.c 	samsung_clk_register_div(ctx, div_clks, ARRAY_SIZE(div_clks));
ctx               776 drivers/clk/samsung/clk-s5pv210.c 	samsung_clk_register_gate(ctx, gate_clks, ARRAY_SIZE(gate_clks));
ctx               778 drivers/clk/samsung/clk-s5pv210.c 	samsung_clk_register_fixed_factor(ctx, ffactor_clks,
ctx               781 drivers/clk/samsung/clk-s5pv210.c 	samsung_clk_register_alias(ctx, s5pv210_aliases,
ctx               787 drivers/clk/samsung/clk-s5pv210.c 	samsung_clk_of_add_provider(np, ctx);
ctx                60 drivers/clk/samsung/clk.c 	struct samsung_clk_provider *ctx;
ctx                63 drivers/clk/samsung/clk.c 	ctx = kzalloc(sizeof(struct samsung_clk_provider) +
ctx                64 drivers/clk/samsung/clk.c 		      sizeof(*ctx->clk_data.hws) * nr_clks, GFP_KERNEL);
ctx                65 drivers/clk/samsung/clk.c 	if (!ctx)
ctx                69 drivers/clk/samsung/clk.c 		ctx->clk_data.hws[i] = ERR_PTR(-ENOENT);
ctx                71 drivers/clk/samsung/clk.c 	ctx->reg_base = base;
ctx                72 drivers/clk/samsung/clk.c 	ctx->clk_data.num = nr_clks;
ctx                73 drivers/clk/samsung/clk.c 	spin_lock_init(&ctx->lock);
ctx                75 drivers/clk/samsung/clk.c 	return ctx;
ctx                79 drivers/clk/samsung/clk.c 				struct samsung_clk_provider *ctx)
ctx                83 drivers/clk/samsung/clk.c 					&ctx->clk_data))
ctx                89 drivers/clk/samsung/clk.c void samsung_clk_add_lookup(struct samsung_clk_provider *ctx,
ctx                93 drivers/clk/samsung/clk.c 		ctx->clk_data.hws[id] = clk_hw;
ctx                97 drivers/clk/samsung/clk.c void __init samsung_clk_register_alias(struct samsung_clk_provider *ctx,
ctx               111 drivers/clk/samsung/clk.c 		clk_hw = ctx->clk_data.hws[list->id];
ctx               127 drivers/clk/samsung/clk.c void __init samsung_clk_register_fixed_rate(struct samsung_clk_provider *ctx,
ctx               135 drivers/clk/samsung/clk.c 		clk_hw = clk_hw_register_fixed_rate(ctx->dev, list->name,
ctx               143 drivers/clk/samsung/clk.c 		samsung_clk_add_lookup(ctx, clk_hw, list->id);
ctx               157 drivers/clk/samsung/clk.c void __init samsung_clk_register_fixed_factor(struct samsung_clk_provider *ctx,
ctx               164 drivers/clk/samsung/clk.c 		clk_hw = clk_hw_register_fixed_factor(ctx->dev, list->name,
ctx               172 drivers/clk/samsung/clk.c 		samsung_clk_add_lookup(ctx, clk_hw, list->id);
ctx               177 drivers/clk/samsung/clk.c void __init samsung_clk_register_mux(struct samsung_clk_provider *ctx,
ctx               185 drivers/clk/samsung/clk.c 		clk_hw = clk_hw_register_mux(ctx->dev, list->name,
ctx               187 drivers/clk/samsung/clk.c 			ctx->reg_base + list->offset,
ctx               188 drivers/clk/samsung/clk.c 			list->shift, list->width, list->mux_flags, &ctx->lock);
ctx               195 drivers/clk/samsung/clk.c 		samsung_clk_add_lookup(ctx, clk_hw, list->id);
ctx               200 drivers/clk/samsung/clk.c void __init samsung_clk_register_div(struct samsung_clk_provider *ctx,
ctx               209 drivers/clk/samsung/clk.c 			clk_hw = clk_hw_register_divider_table(ctx->dev,
ctx               211 drivers/clk/samsung/clk.c 				ctx->reg_base + list->offset,
ctx               213 drivers/clk/samsung/clk.c 				list->table, &ctx->lock);
ctx               215 drivers/clk/samsung/clk.c 			clk_hw = clk_hw_register_divider(ctx->dev, list->name,
ctx               217 drivers/clk/samsung/clk.c 				ctx->reg_base + list->offset, list->shift,
ctx               218 drivers/clk/samsung/clk.c 				list->width, list->div_flags, &ctx->lock);
ctx               225 drivers/clk/samsung/clk.c 		samsung_clk_add_lookup(ctx, clk_hw, list->id);
ctx               230 drivers/clk/samsung/clk.c void __init samsung_clk_register_gate(struct samsung_clk_provider *ctx,
ctx               238 drivers/clk/samsung/clk.c 		clk_hw = clk_hw_register_gate(ctx->dev, list->name, list->parent_name,
ctx               239 drivers/clk/samsung/clk.c 				list->flags, ctx->reg_base + list->offset,
ctx               240 drivers/clk/samsung/clk.c 				list->bit_idx, list->gate_flags, &ctx->lock);
ctx               247 drivers/clk/samsung/clk.c 		samsung_clk_add_lookup(ctx, clk_hw, list->id);
ctx               255 drivers/clk/samsung/clk.c void __init samsung_clk_of_register_fixed_ext(struct samsung_clk_provider *ctx,
ctx               269 drivers/clk/samsung/clk.c 	samsung_clk_register_fixed_rate(ctx, fixed_rate_clk, nr_fixed_rate_clk);
ctx               351 drivers/clk/samsung/clk.c 	struct samsung_clk_provider *ctx;
ctx               359 drivers/clk/samsung/clk.c 	ctx = samsung_clk_init(np, reg_base, cmu->nr_clk_ids);
ctx               360 drivers/clk/samsung/clk.c 	if (!ctx) {
ctx               362 drivers/clk/samsung/clk.c 		return ctx;
ctx               366 drivers/clk/samsung/clk.c 		samsung_clk_register_pll(ctx, cmu->pll_clks, cmu->nr_pll_clks,
ctx               369 drivers/clk/samsung/clk.c 		samsung_clk_register_mux(ctx, cmu->mux_clks,
ctx               372 drivers/clk/samsung/clk.c 		samsung_clk_register_div(ctx, cmu->div_clks, cmu->nr_div_clks);
ctx               374 drivers/clk/samsung/clk.c 		samsung_clk_register_gate(ctx, cmu->gate_clks,
ctx               377 drivers/clk/samsung/clk.c 		samsung_clk_register_fixed_rate(ctx, cmu->fixed_clks,
ctx               380 drivers/clk/samsung/clk.c 		samsung_clk_register_fixed_factor(ctx, cmu->fixed_factor_clks,
ctx               387 drivers/clk/samsung/clk.c 	samsung_clk_of_add_provider(np, ctx);
ctx               389 drivers/clk/samsung/clk.c 	return ctx;
ctx               320 drivers/clk/samsung/clk.h 			struct samsung_clk_provider *ctx);
ctx               322 drivers/clk/samsung/clk.h 			struct samsung_clk_provider *ctx,
ctx               327 drivers/clk/samsung/clk.h extern void samsung_clk_add_lookup(struct samsung_clk_provider *ctx,
ctx               330 drivers/clk/samsung/clk.h extern void __init samsung_clk_register_alias(struct samsung_clk_provider *ctx,
ctx               334 drivers/clk/samsung/clk.h 			struct samsung_clk_provider *ctx,
ctx               338 drivers/clk/samsung/clk.h 			struct samsung_clk_provider *ctx,
ctx               341 drivers/clk/samsung/clk.h extern void __init samsung_clk_register_mux(struct samsung_clk_provider *ctx,
ctx               344 drivers/clk/samsung/clk.h extern void __init samsung_clk_register_div(struct samsung_clk_provider *ctx,
ctx               347 drivers/clk/samsung/clk.h extern void __init samsung_clk_register_gate(struct samsung_clk_provider *ctx,
ctx               350 drivers/clk/samsung/clk.h extern void __init samsung_clk_register_pll(struct samsung_clk_provider *ctx,
ctx                74 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
ctx                83 drivers/crypto/amcc/crypto4xx_alg.c 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
ctx                84 drivers/crypto/amcc/crypto4xx_alg.c 		req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
ctx                85 drivers/crypto/amcc/crypto4xx_alg.c 		ctx->sa_len, 0, NULL);
ctx               127 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
ctx               139 drivers/crypto/amcc/crypto4xx_alg.c 	if (ctx->sa_in || ctx->sa_out)
ctx               140 drivers/crypto/amcc/crypto4xx_alg.c 		crypto4xx_free_sa(ctx);
ctx               142 drivers/crypto/amcc/crypto4xx_alg.c 	rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4);
ctx               147 drivers/crypto/amcc/crypto4xx_alg.c 	sa = ctx->sa_in;
ctx               168 drivers/crypto/amcc/crypto4xx_alg.c 	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
ctx               169 drivers/crypto/amcc/crypto4xx_alg.c 	sa = ctx->sa_out;
ctx               211 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
ctx               219 drivers/crypto/amcc/crypto4xx_alg.c 	ctx->iv_nonce = cpu_to_le32p((u32 *)&key[keylen -
ctx               228 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
ctx               230 drivers/crypto/amcc/crypto4xx_alg.c 		ctx->iv_nonce,
ctx               235 drivers/crypto/amcc/crypto4xx_alg.c 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
ctx               237 drivers/crypto/amcc/crypto4xx_alg.c 				  ctx->sa_out, ctx->sa_len, 0, NULL);
ctx               243 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
ctx               245 drivers/crypto/amcc/crypto4xx_alg.c 		ctx->iv_nonce,
ctx               250 drivers/crypto/amcc/crypto4xx_alg.c 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
ctx               252 drivers/crypto/amcc/crypto4xx_alg.c 				  ctx->sa_out, ctx->sa_len, 0, NULL);
ctx               259 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
ctx               272 drivers/crypto/amcc/crypto4xx_alg.c 		SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher.cipher);
ctx               275 drivers/crypto/amcc/crypto4xx_alg.c 		skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher.cipher);
ctx               290 drivers/crypto/amcc/crypto4xx_alg.c static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
ctx               297 drivers/crypto/amcc/crypto4xx_alg.c 	crypto_sync_skcipher_clear_flags(ctx->sw_cipher.cipher,
ctx               299 drivers/crypto/amcc/crypto4xx_alg.c 	crypto_sync_skcipher_set_flags(ctx->sw_cipher.cipher,
ctx               301 drivers/crypto/amcc/crypto4xx_alg.c 	rc = crypto_sync_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
ctx               304 drivers/crypto/amcc/crypto4xx_alg.c 		crypto_sync_skcipher_get_flags(ctx->sw_cipher.cipher) &
ctx               313 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
ctx               316 drivers/crypto/amcc/crypto4xx_alg.c 	rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
ctx               363 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx, bool do_decrypt)
ctx               367 drivers/crypto/amcc/crypto4xx_alg.c 	aead_request_set_tfm(subreq, ctx->sw_cipher.aead);
ctx               377 drivers/crypto/amcc/crypto4xx_alg.c static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
ctx               384 drivers/crypto/amcc/crypto4xx_alg.c 	crypto_aead_clear_flags(ctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);
ctx               385 drivers/crypto/amcc/crypto4xx_alg.c 	crypto_aead_set_flags(ctx->sw_cipher.aead,
ctx               387 drivers/crypto/amcc/crypto4xx_alg.c 	rc = crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen);
ctx               390 drivers/crypto/amcc/crypto4xx_alg.c 		crypto_aead_get_flags(ctx->sw_cipher.aead) &
ctx               404 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               408 drivers/crypto/amcc/crypto4xx_alg.c 	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
ctx               412 drivers/crypto/amcc/crypto4xx_alg.c 	if (ctx->sa_in || ctx->sa_out)
ctx               413 drivers/crypto/amcc/crypto4xx_alg.c 		crypto4xx_free_sa(ctx);
ctx               415 drivers/crypto/amcc/crypto4xx_alg.c 	rc = crypto4xx_alloc_sa(ctx, SA_AES128_CCM_LEN + (keylen - 16) / 4);
ctx               420 drivers/crypto/amcc/crypto4xx_alg.c 	sa = (struct dynamic_sa_ctl *) ctx->sa_in;
ctx               440 drivers/crypto/amcc/crypto4xx_alg.c 	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
ctx               441 drivers/crypto/amcc/crypto4xx_alg.c 	sa = (struct dynamic_sa_ctl *) ctx->sa_out;
ctx               462 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx  = crypto_tfm_ctx(req->base.tfm);
ctx               474 drivers/crypto/amcc/crypto4xx_alg.c 		return crypto4xx_aead_fallback(req, ctx, decrypt);
ctx               476 drivers/crypto/amcc/crypto4xx_alg.c 	memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len * 4);
ctx               487 drivers/crypto/amcc/crypto4xx_alg.c 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
ctx               489 drivers/crypto/amcc/crypto4xx_alg.c 				  sa, ctx->sa_len, req->assoclen, rctx->dst);
ctx               506 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               508 drivers/crypto/amcc/crypto4xx_alg.c 	return crypto_aead_setauthsize(ctx->sw_cipher.aead, authsize);
ctx               530 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto_aes_ctx ctx;
ctx               534 drivers/crypto/amcc/crypto4xx_alg.c 	rc = aes_expandkey(&ctx, key, keylen);
ctx               540 drivers/crypto/amcc/crypto4xx_alg.c 	aes_encrypt(&ctx, src, src);
ctx               542 drivers/crypto/amcc/crypto4xx_alg.c 	memzero_explicit(&ctx, sizeof(ctx));
ctx               550 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               559 drivers/crypto/amcc/crypto4xx_alg.c 	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
ctx               563 drivers/crypto/amcc/crypto4xx_alg.c 	if (ctx->sa_in || ctx->sa_out)
ctx               564 drivers/crypto/amcc/crypto4xx_alg.c 		crypto4xx_free_sa(ctx);
ctx               566 drivers/crypto/amcc/crypto4xx_alg.c 	rc = crypto4xx_alloc_sa(ctx, SA_AES128_GCM_LEN + (keylen - 16) / 4);
ctx               570 drivers/crypto/amcc/crypto4xx_alg.c 	sa  = (struct dynamic_sa_ctl *) ctx->sa_in;
ctx               597 drivers/crypto/amcc/crypto4xx_alg.c 	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
ctx               598 drivers/crypto/amcc/crypto4xx_alg.c 	sa = (struct dynamic_sa_ctl *) ctx->sa_out;
ctx               604 drivers/crypto/amcc/crypto4xx_alg.c 	crypto4xx_free_sa(ctx);
ctx               611 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx               620 drivers/crypto/amcc/crypto4xx_alg.c 		return crypto4xx_aead_fallback(req, ctx, decrypt);
ctx               625 drivers/crypto/amcc/crypto4xx_alg.c 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
ctx               627 drivers/crypto/amcc/crypto4xx_alg.c 				  decrypt ? ctx->sa_in : ctx->sa_out,
ctx               628 drivers/crypto/amcc/crypto4xx_alg.c 				  ctx->sa_len, req->assoclen, rctx->dst);
ctx               651 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               657 drivers/crypto/amcc/crypto4xx_alg.c 	ctx->dev   = my_alg->dev;
ctx               660 drivers/crypto/amcc/crypto4xx_alg.c 	if (ctx->sa_in || ctx->sa_out)
ctx               661 drivers/crypto/amcc/crypto4xx_alg.c 		crypto4xx_free_sa(ctx);
ctx               663 drivers/crypto/amcc/crypto4xx_alg.c 	rc = crypto4xx_alloc_sa(ctx, sa_len);
ctx               669 drivers/crypto/amcc/crypto4xx_alg.c 	sa = (struct dynamic_sa_hash160 *)ctx->sa_in;
ctx               689 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx               693 drivers/crypto/amcc/crypto4xx_alg.c 	sa = ctx->sa_in;
ctx               705 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx               711 drivers/crypto/amcc/crypto4xx_alg.c 	return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
ctx               712 drivers/crypto/amcc/crypto4xx_alg.c 				  req->nbytes, NULL, 0, ctx->sa_in,
ctx               713 drivers/crypto/amcc/crypto4xx_alg.c 				  ctx->sa_len, 0, NULL);
ctx               724 drivers/crypto/amcc/crypto4xx_alg.c 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx               730 drivers/crypto/amcc/crypto4xx_alg.c 	return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
ctx               731 drivers/crypto/amcc/crypto4xx_alg.c 				  req->nbytes, NULL, 0, ctx->sa_in,
ctx               732 drivers/crypto/amcc/crypto4xx_alg.c 				  ctx->sa_len, 0, NULL);
ctx               135 drivers/crypto/amcc/crypto4xx_core.c int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size)
ctx               137 drivers/crypto/amcc/crypto4xx_core.c 	ctx->sa_in = kcalloc(size, 4, GFP_ATOMIC);
ctx               138 drivers/crypto/amcc/crypto4xx_core.c 	if (ctx->sa_in == NULL)
ctx               141 drivers/crypto/amcc/crypto4xx_core.c 	ctx->sa_out = kcalloc(size, 4, GFP_ATOMIC);
ctx               142 drivers/crypto/amcc/crypto4xx_core.c 	if (ctx->sa_out == NULL) {
ctx               143 drivers/crypto/amcc/crypto4xx_core.c 		kfree(ctx->sa_in);
ctx               144 drivers/crypto/amcc/crypto4xx_core.c 		ctx->sa_in = NULL;
ctx               148 drivers/crypto/amcc/crypto4xx_core.c 	ctx->sa_len = size;
ctx               153 drivers/crypto/amcc/crypto4xx_core.c void crypto4xx_free_sa(struct crypto4xx_ctx *ctx)
ctx               155 drivers/crypto/amcc/crypto4xx_core.c 	kfree(ctx->sa_in);
ctx               156 drivers/crypto/amcc/crypto4xx_core.c 	ctx->sa_in = NULL;
ctx               157 drivers/crypto/amcc/crypto4xx_core.c 	kfree(ctx->sa_out);
ctx               158 drivers/crypto/amcc/crypto4xx_core.c 	ctx->sa_out = NULL;
ctx               159 drivers/crypto/amcc/crypto4xx_core.c 	ctx->sa_len = 0;
ctx               489 drivers/crypto/amcc/crypto4xx_core.c 					struct crypto4xx_ctx *ctx)
ctx               491 drivers/crypto/amcc/crypto4xx_core.c 	struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *) ctx->sa_in;
ctx               555 drivers/crypto/amcc/crypto4xx_core.c 	struct crypto4xx_ctx *ctx;
ctx               559 drivers/crypto/amcc/crypto4xx_core.c 	ctx  = crypto_tfm_ctx(ahash_req->base.tfm);
ctx               679 drivers/crypto/amcc/crypto4xx_core.c 		       struct crypto4xx_ctx *ctx,
ctx               689 drivers/crypto/amcc/crypto4xx_core.c 	struct crypto4xx_device *dev = ctx->dev;
ctx               936 drivers/crypto/amcc/crypto4xx_core.c 			       struct crypto4xx_ctx *ctx)
ctx               938 drivers/crypto/amcc/crypto4xx_core.c 	ctx->dev = amcc_alg->dev;
ctx               939 drivers/crypto/amcc/crypto4xx_core.c 	ctx->sa_in = NULL;
ctx               940 drivers/crypto/amcc/crypto4xx_core.c 	ctx->sa_out = NULL;
ctx               941 drivers/crypto/amcc/crypto4xx_core.c 	ctx->sa_len = 0;
ctx               948 drivers/crypto/amcc/crypto4xx_core.c 	struct crypto4xx_ctx *ctx =  crypto_skcipher_ctx(sk);
ctx               951 drivers/crypto/amcc/crypto4xx_core.c 		ctx->sw_cipher.cipher =
ctx               954 drivers/crypto/amcc/crypto4xx_core.c 		if (IS_ERR(ctx->sw_cipher.cipher))
ctx               955 drivers/crypto/amcc/crypto4xx_core.c 			return PTR_ERR(ctx->sw_cipher.cipher);
ctx               959 drivers/crypto/amcc/crypto4xx_core.c 	crypto4xx_ctx_init(amcc_alg, ctx);
ctx               963 drivers/crypto/amcc/crypto4xx_core.c static void crypto4xx_common_exit(struct crypto4xx_ctx *ctx)
ctx               965 drivers/crypto/amcc/crypto4xx_core.c 	crypto4xx_free_sa(ctx);
ctx               970 drivers/crypto/amcc/crypto4xx_core.c 	struct crypto4xx_ctx *ctx =  crypto_skcipher_ctx(sk);
ctx               972 drivers/crypto/amcc/crypto4xx_core.c 	crypto4xx_common_exit(ctx);
ctx               973 drivers/crypto/amcc/crypto4xx_core.c 	if (ctx->sw_cipher.cipher)
ctx               974 drivers/crypto/amcc/crypto4xx_core.c 		crypto_free_sync_skcipher(ctx->sw_cipher.cipher);
ctx               980 drivers/crypto/amcc/crypto4xx_core.c 	struct crypto4xx_ctx *ctx = crypto_aead_ctx(tfm);
ctx               983 drivers/crypto/amcc/crypto4xx_core.c 	ctx->sw_cipher.aead = crypto_alloc_aead(alg->base.cra_name, 0,
ctx               986 drivers/crypto/amcc/crypto4xx_core.c 	if (IS_ERR(ctx->sw_cipher.aead))
ctx               987 drivers/crypto/amcc/crypto4xx_core.c 		return PTR_ERR(ctx->sw_cipher.aead);
ctx               990 drivers/crypto/amcc/crypto4xx_core.c 	crypto4xx_ctx_init(amcc_alg, ctx);
ctx               992 drivers/crypto/amcc/crypto4xx_core.c 				crypto_aead_reqsize(ctx->sw_cipher.aead),
ctx               999 drivers/crypto/amcc/crypto4xx_core.c 	struct crypto4xx_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1001 drivers/crypto/amcc/crypto4xx_core.c 	crypto4xx_common_exit(ctx);
ctx              1002 drivers/crypto/amcc/crypto4xx_core.c 	crypto_free_aead(ctx->sw_cipher.aead);
ctx               149 drivers/crypto/amcc/crypto4xx_core.h int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size);
ctx               150 drivers/crypto/amcc/crypto4xx_core.h void crypto4xx_free_sa(struct crypto4xx_ctx *ctx);
ctx               151 drivers/crypto/amcc/crypto4xx_core.h void crypto4xx_free_ctx(struct crypto4xx_ctx *ctx);
ctx               153 drivers/crypto/amcc/crypto4xx_core.h 		       struct crypto4xx_ctx *ctx,
ctx               187 drivers/crypto/atmel-aes.c 	struct atmel_aes_base_ctx	*ctx;
ctx               420 drivers/crypto/atmel-aes.c static struct atmel_aes_dev *atmel_aes_find_dev(struct atmel_aes_base_ctx *ctx)
ctx               426 drivers/crypto/atmel-aes.c 	if (!ctx->dd) {
ctx               431 drivers/crypto/atmel-aes.c 		ctx->dd = aes_dd;
ctx               433 drivers/crypto/atmel-aes.c 		aes_dd = ctx->dd;
ctx               518 drivers/crypto/atmel-aes.c 	if (dd->ctx->is_aead)
ctx               525 drivers/crypto/atmel-aes.c 	if (!dd->ctx->is_aead)
ctx               572 drivers/crypto/atmel-aes.c 				 dd->ctx->key, dd->ctx->keylen);
ctx               644 drivers/crypto/atmel-aes.c 	if (!IS_ALIGNED(len, dd->ctx->block_size))
ctx               652 drivers/crypto/atmel-aes.c 			if (!IS_ALIGNED(len, dd->ctx->block_size))
ctx               661 drivers/crypto/atmel-aes.c 		if (!IS_ALIGNED(sg->length, dd->ctx->block_size))
ctx               706 drivers/crypto/atmel-aes.c 		padlen = atmel_aes_padlen(len, dd->ctx->block_size);
ctx               862 drivers/crypto/atmel-aes.c 	switch (dd->ctx->block_size) {
ctx               937 drivers/crypto/atmel-aes.c 	struct atmel_aes_base_ctx *ctx;
ctx               961 drivers/crypto/atmel-aes.c 	ctx = crypto_tfm_ctx(areq->tfm);
ctx               964 drivers/crypto/atmel-aes.c 	dd->ctx = ctx;
ctx               969 drivers/crypto/atmel-aes.c 	err = ctx->start(dd);
ctx               986 drivers/crypto/atmel-aes.c 			dd->ctx->block_size != AES_BLOCK_SIZE);
ctx              1005 drivers/crypto/atmel-aes.c atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx *ctx)
ctx              1007 drivers/crypto/atmel-aes.c 	return container_of(ctx, struct atmel_aes_ctr_ctx, base);
ctx              1012 drivers/crypto/atmel-aes.c 	struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
ctx              1021 drivers/crypto/atmel-aes.c 	ctx->offset += dd->total;
ctx              1022 drivers/crypto/atmel-aes.c 	if (ctx->offset >= req->nbytes)
ctx              1026 drivers/crypto/atmel-aes.c 	datalen = req->nbytes - ctx->offset;
ctx              1028 drivers/crypto/atmel-aes.c 	ctr = be32_to_cpu(ctx->iv[3]);
ctx              1043 drivers/crypto/atmel-aes.c 	src = scatterwalk_ffwd(ctx->src, req->src, ctx->offset);
ctx              1045 drivers/crypto/atmel-aes.c 	       scatterwalk_ffwd(ctx->dst, req->dst, ctx->offset));
ctx              1048 drivers/crypto/atmel-aes.c 	atmel_aes_write_ctrl(dd, use_dma, ctx->iv);
ctx              1054 drivers/crypto/atmel-aes.c 		ctx->iv[3] = cpu_to_be32(ctr);
ctx              1055 drivers/crypto/atmel-aes.c 		crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE);
ctx              1068 drivers/crypto/atmel-aes.c 	struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx);
ctx              1079 drivers/crypto/atmel-aes.c 	memcpy(ctx->iv, req->info, AES_BLOCK_SIZE);
ctx              1080 drivers/crypto/atmel-aes.c 	ctx->offset = 0;
ctx              1088 drivers/crypto/atmel-aes.c 	struct atmel_aes_base_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
ctx              1094 drivers/crypto/atmel-aes.c 		ctx->block_size = CFB8_BLOCK_SIZE;
ctx              1098 drivers/crypto/atmel-aes.c 		ctx->block_size = CFB16_BLOCK_SIZE;
ctx              1102 drivers/crypto/atmel-aes.c 		ctx->block_size = CFB32_BLOCK_SIZE;
ctx              1106 drivers/crypto/atmel-aes.c 		ctx->block_size = CFB64_BLOCK_SIZE;
ctx              1110 drivers/crypto/atmel-aes.c 		ctx->block_size = AES_BLOCK_SIZE;
ctx              1113 drivers/crypto/atmel-aes.c 	ctx->is_aead = false;
ctx              1115 drivers/crypto/atmel-aes.c 	dd = atmel_aes_find_dev(ctx);
ctx              1137 drivers/crypto/atmel-aes.c 	struct atmel_aes_base_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx              1146 drivers/crypto/atmel-aes.c 	memcpy(ctx->key, key, keylen);
ctx              1147 drivers/crypto/atmel-aes.c 	ctx->keylen = keylen;
ctx              1244 drivers/crypto/atmel-aes.c 	struct atmel_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1247 drivers/crypto/atmel-aes.c 	ctx->base.start = atmel_aes_start;
ctx              1254 drivers/crypto/atmel-aes.c 	struct atmel_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1257 drivers/crypto/atmel-aes.c 	ctx->base.start = atmel_aes_ctr_start;
ctx              1464 drivers/crypto/atmel-aes.c atmel_aes_gcm_ctx_cast(struct atmel_aes_base_ctx *ctx)
ctx              1466 drivers/crypto/atmel-aes.c 	return container_of(ctx, struct atmel_aes_gcm_ctx, base);
ctx              1474 drivers/crypto/atmel-aes.c 	struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
ctx              1478 drivers/crypto/atmel-aes.c 	ctx->ghash_in = ghash_in;
ctx              1479 drivers/crypto/atmel-aes.c 	ctx->ghash_out = ghash_out;
ctx              1480 drivers/crypto/atmel-aes.c 	ctx->ghash_resume = resume;
ctx              1488 drivers/crypto/atmel-aes.c 	struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
ctx              1495 drivers/crypto/atmel-aes.c 	if (ctx->ghash_in)
ctx              1496 drivers/crypto/atmel-aes.c 		atmel_aes_write_block(dd, AES_GHASHR(0), ctx->ghash_in);
ctx              1503 drivers/crypto/atmel-aes.c 	struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
ctx              1521 drivers/crypto/atmel-aes.c 	atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash_out);
ctx              1523 drivers/crypto/atmel-aes.c 	return ctx->ghash_resume(dd);
ctx              1529 drivers/crypto/atmel-aes.c 	struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
ctx              1546 drivers/crypto/atmel-aes.c 		memcpy(ctx->j0, iv, ivsize);
ctx              1547 drivers/crypto/atmel-aes.c 		ctx->j0[3] = cpu_to_be32(1);
ctx              1561 drivers/crypto/atmel-aes.c 				   NULL, ctx->j0, atmel_aes_gcm_process);
ctx              1566 drivers/crypto/atmel-aes.c 	struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
ctx              1574 drivers/crypto/atmel-aes.c 	ctx->textlen = req->cryptlen - (enc ? 0 : authsize);
ctx              1580 drivers/crypto/atmel-aes.c 	if (likely(req->assoclen != 0 || ctx->textlen != 0))
ctx              1589 drivers/crypto/atmel-aes.c 	struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
ctx              1591 drivers/crypto/atmel-aes.c 	u32 j0_lsw, *j0 = ctx->j0;
ctx              1602 drivers/crypto/atmel-aes.c 	atmel_aes_write(dd, AES_CLENR, ctx->textlen);
ctx              1624 drivers/crypto/atmel-aes.c 	struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
ctx              1626 drivers/crypto/atmel-aes.c 	bool use_dma = (ctx->textlen >= ATMEL_AES_DMA_THRESHOLD);
ctx              1645 drivers/crypto/atmel-aes.c 	if (unlikely(ctx->textlen == 0))
ctx              1649 drivers/crypto/atmel-aes.c 	src = scatterwalk_ffwd(ctx->src, req->src, req->assoclen);
ctx              1651 drivers/crypto/atmel-aes.c 	       scatterwalk_ffwd(ctx->dst, req->dst, req->assoclen));
ctx              1662 drivers/crypto/atmel-aes.c 		return atmel_aes_dma_start(dd, src, dst, ctx->textlen,
ctx              1666 drivers/crypto/atmel-aes.c 	return atmel_aes_cpu_start(dd, src, dst, ctx->textlen,
ctx              1672 drivers/crypto/atmel-aes.c 	struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
ctx              1687 drivers/crypto/atmel-aes.c 	atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash);
ctx              1690 drivers/crypto/atmel-aes.c 	data[1] = cpu_to_be64(ctx->textlen * 8);
ctx              1693 drivers/crypto/atmel-aes.c 				   ctx->ghash, ctx->ghash, atmel_aes_gcm_tag);
ctx              1698 drivers/crypto/atmel-aes.c 	struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
ctx              1708 drivers/crypto/atmel-aes.c 	atmel_aes_write_ctrl(dd, false, ctx->j0);
ctx              1711 drivers/crypto/atmel-aes.c 	atmel_aes_write_block(dd, AES_IDATAR(0), ctx->ghash);
ctx              1717 drivers/crypto/atmel-aes.c 	struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx);
ctx              1721 drivers/crypto/atmel-aes.c 	u32 offset, authsize, itag[4], *otag = ctx->tag;
ctx              1726 drivers/crypto/atmel-aes.c 		atmel_aes_read_block(dd, AES_TAGR(0), ctx->tag);
ctx              1728 drivers/crypto/atmel-aes.c 		atmel_aes_read_block(dd, AES_ODATAR(0), ctx->tag);
ctx              1730 drivers/crypto/atmel-aes.c 	offset = req->assoclen + ctx->textlen;
ctx              1746 drivers/crypto/atmel-aes.c 	struct atmel_aes_base_ctx *ctx;
ctx              1750 drivers/crypto/atmel-aes.c 	ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx              1751 drivers/crypto/atmel-aes.c 	ctx->block_size = AES_BLOCK_SIZE;
ctx              1752 drivers/crypto/atmel-aes.c 	ctx->is_aead = true;
ctx              1754 drivers/crypto/atmel-aes.c 	dd = atmel_aes_find_dev(ctx);
ctx              1767 drivers/crypto/atmel-aes.c 	struct atmel_aes_base_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1776 drivers/crypto/atmel-aes.c 	memcpy(ctx->key, key, keylen);
ctx              1777 drivers/crypto/atmel-aes.c 	ctx->keylen = keylen;
ctx              1814 drivers/crypto/atmel-aes.c 	struct atmel_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1817 drivers/crypto/atmel-aes.c 	ctx->base.start = atmel_aes_gcm_start;
ctx              1847 drivers/crypto/atmel-aes.c atmel_aes_xts_ctx_cast(struct atmel_aes_base_ctx *ctx)
ctx              1849 drivers/crypto/atmel-aes.c 	return container_of(ctx, struct atmel_aes_xts_ctx, base);
ctx              1856 drivers/crypto/atmel-aes.c 	struct atmel_aes_xts_ctx *ctx = atmel_aes_xts_ctx_cast(dd->ctx);
ctx              1873 drivers/crypto/atmel-aes.c 				 ctx->key2, ctx->base.keylen);
ctx              1918 drivers/crypto/atmel-aes.c 	struct atmel_aes_xts_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx              1925 drivers/crypto/atmel-aes.c 	memcpy(ctx->base.key, key, keylen/2);
ctx              1926 drivers/crypto/atmel-aes.c 	memcpy(ctx->key2, key + keylen/2, keylen/2);
ctx              1927 drivers/crypto/atmel-aes.c 	ctx->base.keylen = keylen/2;
ctx              1944 drivers/crypto/atmel-aes.c 	struct atmel_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1947 drivers/crypto/atmel-aes.c 	ctx->base.start = atmel_aes_xts_start;
ctx              2000 drivers/crypto/atmel-aes.c 	struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2009 drivers/crypto/atmel-aes.c 	return atmel_sha_authenc_schedule(&rctx->auth_req, ctx->auth,
ctx              2121 drivers/crypto/atmel-aes.c 	struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2129 drivers/crypto/atmel-aes.c 	if (keys.enckeylen > sizeof(ctx->base.key))
ctx              2134 drivers/crypto/atmel-aes.c 	err = atmel_sha_authenc_setkey(ctx->auth,
ctx              2144 drivers/crypto/atmel-aes.c 	ctx->base.keylen = keys.enckeylen;
ctx              2145 drivers/crypto/atmel-aes.c 	memcpy(ctx->base.key, keys.enckey, keys.enckeylen);
ctx              2159 drivers/crypto/atmel-aes.c 	struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2162 drivers/crypto/atmel-aes.c 	ctx->auth = atmel_sha_authenc_spawn(auth_mode);
ctx              2163 drivers/crypto/atmel-aes.c 	if (IS_ERR(ctx->auth))
ctx              2164 drivers/crypto/atmel-aes.c 		return PTR_ERR(ctx->auth);
ctx              2168 drivers/crypto/atmel-aes.c 	ctx->base.start = atmel_aes_authenc_start;
ctx              2200 drivers/crypto/atmel-aes.c 	struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2202 drivers/crypto/atmel-aes.c 	atmel_sha_authenc_free(ctx->auth);
ctx              2210 drivers/crypto/atmel-aes.c 	struct atmel_aes_base_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2229 drivers/crypto/atmel-aes.c 	ctx->block_size = AES_BLOCK_SIZE;
ctx              2230 drivers/crypto/atmel-aes.c 	ctx->is_aead = true;
ctx              2232 drivers/crypto/atmel-aes.c 	dd = atmel_aes_find_dev(ctx);
ctx                54 drivers/crypto/atmel-ecc.c 	struct atmel_ecdh_ctx *ctx = work_data->ctx;
ctx                62 drivers/crypto/atmel-ecc.c 	n_sz = min_t(size_t, ctx->n_sz, req->dst_len);
ctx                91 drivers/crypto/atmel-ecc.c 	struct atmel_ecdh_ctx *ctx = kpp_tfm_ctx(tfm);
ctx                98 drivers/crypto/atmel-ecc.c 	kfree(ctx->public_key);
ctx               100 drivers/crypto/atmel-ecc.c 	ctx->public_key = NULL;
ctx               103 drivers/crypto/atmel-ecc.c 		dev_err(&ctx->client->dev, "crypto_ecdh_decode_key failed\n");
ctx               107 drivers/crypto/atmel-ecc.c 	ctx->n_sz = atmel_ecdh_supported_curve(params.curve_id);
ctx               108 drivers/crypto/atmel-ecc.c 	if (!ctx->n_sz || params.key_size) {
ctx               110 drivers/crypto/atmel-ecc.c 		ctx->do_fallback = true;
ctx               111 drivers/crypto/atmel-ecc.c 		return crypto_kpp_set_secret(ctx->fallback, buf, len);
ctx               127 drivers/crypto/atmel-ecc.c 	ctx->do_fallback = false;
ctx               128 drivers/crypto/atmel-ecc.c 	ctx->curve_id = params.curve_id;
ctx               132 drivers/crypto/atmel-ecc.c 	ret = atmel_i2c_send_receive(ctx->client, cmd);
ctx               138 drivers/crypto/atmel-ecc.c 	ctx->public_key = public_key;
ctx               153 drivers/crypto/atmel-ecc.c 	struct atmel_ecdh_ctx *ctx = kpp_tfm_ctx(tfm);
ctx               157 drivers/crypto/atmel-ecc.c 	if (ctx->do_fallback) {
ctx               158 drivers/crypto/atmel-ecc.c 		kpp_request_set_tfm(req, ctx->fallback);
ctx               162 drivers/crypto/atmel-ecc.c 	if (!ctx->public_key)
ctx               171 drivers/crypto/atmel-ecc.c 				     ctx->public_key, nbytes);
ctx               181 drivers/crypto/atmel-ecc.c 	struct atmel_ecdh_ctx *ctx = kpp_tfm_ctx(tfm);
ctx               186 drivers/crypto/atmel-ecc.c 	if (ctx->do_fallback) {
ctx               187 drivers/crypto/atmel-ecc.c 		kpp_request_set_tfm(req, ctx->fallback);
ctx               202 drivers/crypto/atmel-ecc.c 	work_data->ctx = ctx;
ctx               203 drivers/crypto/atmel-ecc.c 	work_data->client = ctx->client;
ctx               264 drivers/crypto/atmel-ecc.c 	struct atmel_ecdh_ctx *ctx = kpp_tfm_ctx(tfm);
ctx               266 drivers/crypto/atmel-ecc.c 	ctx->client = atmel_ecc_i2c_client_alloc();
ctx               267 drivers/crypto/atmel-ecc.c 	if (IS_ERR(ctx->client)) {
ctx               269 drivers/crypto/atmel-ecc.c 		return PTR_ERR(ctx->client);
ctx               274 drivers/crypto/atmel-ecc.c 		dev_err(&ctx->client->dev, "Failed to allocate transformation for '%s': %ld\n",
ctx               280 drivers/crypto/atmel-ecc.c 	ctx->fallback = fallback;
ctx               287 drivers/crypto/atmel-ecc.c 	struct atmel_ecdh_ctx *ctx = kpp_tfm_ctx(tfm);
ctx               289 drivers/crypto/atmel-ecc.c 	kfree(ctx->public_key);
ctx               290 drivers/crypto/atmel-ecc.c 	crypto_free_kpp(ctx->fallback);
ctx               291 drivers/crypto/atmel-ecc.c 	atmel_ecc_i2c_client_free(ctx->client);
ctx               296 drivers/crypto/atmel-ecc.c 	struct atmel_ecdh_ctx *ctx = kpp_tfm_ctx(tfm);
ctx               298 drivers/crypto/atmel-ecc.c 	if (ctx->fallback)
ctx               299 drivers/crypto/atmel-ecc.c 		return crypto_kpp_maxsize(ctx->fallback);
ctx               161 drivers/crypto/atmel-i2c.h 	void *ctx;
ctx               302 drivers/crypto/atmel-sha.c static size_t atmel_sha_append_sg(struct atmel_sha_reqctx *ctx)
ctx               306 drivers/crypto/atmel-sha.c 	while ((ctx->bufcnt < ctx->buflen) && ctx->total) {
ctx               307 drivers/crypto/atmel-sha.c 		count = min(ctx->sg->length - ctx->offset, ctx->total);
ctx               308 drivers/crypto/atmel-sha.c 		count = min(count, ctx->buflen - ctx->bufcnt);
ctx               317 drivers/crypto/atmel-sha.c 			if ((ctx->sg->length == 0) && !sg_is_last(ctx->sg)) {
ctx               318 drivers/crypto/atmel-sha.c 				ctx->sg = sg_next(ctx->sg);
ctx               325 drivers/crypto/atmel-sha.c 		scatterwalk_map_and_copy(ctx->buffer + ctx->bufcnt, ctx->sg,
ctx               326 drivers/crypto/atmel-sha.c 			ctx->offset, count, 0);
ctx               328 drivers/crypto/atmel-sha.c 		ctx->bufcnt += count;
ctx               329 drivers/crypto/atmel-sha.c 		ctx->offset += count;
ctx               330 drivers/crypto/atmel-sha.c 		ctx->total -= count;
ctx               332 drivers/crypto/atmel-sha.c 		if (ctx->offset == ctx->sg->length) {
ctx               333 drivers/crypto/atmel-sha.c 			ctx->sg = sg_next(ctx->sg);
ctx               334 drivers/crypto/atmel-sha.c 			if (ctx->sg)
ctx               335 drivers/crypto/atmel-sha.c 				ctx->offset = 0;
ctx               337 drivers/crypto/atmel-sha.c 				ctx->total = 0;
ctx               360 drivers/crypto/atmel-sha.c static void atmel_sha_fill_padding(struct atmel_sha_reqctx *ctx, int length)
ctx               366 drivers/crypto/atmel-sha.c 	size[0] = ctx->digcnt[0];
ctx               367 drivers/crypto/atmel-sha.c 	size[1] = ctx->digcnt[1];
ctx               369 drivers/crypto/atmel-sha.c 	size[0] += ctx->bufcnt;
ctx               370 drivers/crypto/atmel-sha.c 	if (size[0] < ctx->bufcnt)
ctx               380 drivers/crypto/atmel-sha.c 	switch (ctx->flags & SHA_FLAGS_ALGO_MASK) {
ctx               383 drivers/crypto/atmel-sha.c 		index = ctx->bufcnt & 0x7f;
ctx               385 drivers/crypto/atmel-sha.c 		*(ctx->buffer + ctx->bufcnt) = 0x80;
ctx               386 drivers/crypto/atmel-sha.c 		memset(ctx->buffer + ctx->bufcnt + 1, 0, padlen-1);
ctx               387 drivers/crypto/atmel-sha.c 		memcpy(ctx->buffer + ctx->bufcnt + padlen, bits, 16);
ctx               388 drivers/crypto/atmel-sha.c 		ctx->bufcnt += padlen + 16;
ctx               389 drivers/crypto/atmel-sha.c 		ctx->flags |= SHA_FLAGS_PAD;
ctx               393 drivers/crypto/atmel-sha.c 		index = ctx->bufcnt & 0x3f;
ctx               395 drivers/crypto/atmel-sha.c 		*(ctx->buffer + ctx->bufcnt) = 0x80;
ctx               396 drivers/crypto/atmel-sha.c 		memset(ctx->buffer + ctx->bufcnt + 1, 0, padlen-1);
ctx               397 drivers/crypto/atmel-sha.c 		memcpy(ctx->buffer + ctx->bufcnt + padlen, &bits[1], 8);
ctx               398 drivers/crypto/atmel-sha.c 		ctx->bufcnt += padlen + 8;
ctx               399 drivers/crypto/atmel-sha.c 		ctx->flags |= SHA_FLAGS_PAD;
ctx               429 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               432 drivers/crypto/atmel-sha.c 	ctx->dd = dd;
ctx               434 drivers/crypto/atmel-sha.c 	ctx->flags = 0;
ctx               441 drivers/crypto/atmel-sha.c 		ctx->flags |= SHA_FLAGS_SHA1;
ctx               442 drivers/crypto/atmel-sha.c 		ctx->block_size = SHA1_BLOCK_SIZE;
ctx               445 drivers/crypto/atmel-sha.c 		ctx->flags |= SHA_FLAGS_SHA224;
ctx               446 drivers/crypto/atmel-sha.c 		ctx->block_size = SHA224_BLOCK_SIZE;
ctx               449 drivers/crypto/atmel-sha.c 		ctx->flags |= SHA_FLAGS_SHA256;
ctx               450 drivers/crypto/atmel-sha.c 		ctx->block_size = SHA256_BLOCK_SIZE;
ctx               453 drivers/crypto/atmel-sha.c 		ctx->flags |= SHA_FLAGS_SHA384;
ctx               454 drivers/crypto/atmel-sha.c 		ctx->block_size = SHA384_BLOCK_SIZE;
ctx               457 drivers/crypto/atmel-sha.c 		ctx->flags |= SHA_FLAGS_SHA512;
ctx               458 drivers/crypto/atmel-sha.c 		ctx->block_size = SHA512_BLOCK_SIZE;
ctx               465 drivers/crypto/atmel-sha.c 	ctx->bufcnt = 0;
ctx               466 drivers/crypto/atmel-sha.c 	ctx->digcnt[0] = 0;
ctx               467 drivers/crypto/atmel-sha.c 	ctx->digcnt[1] = 0;
ctx               468 drivers/crypto/atmel-sha.c 	ctx->buflen = SHA_BUFFER_LEN;
ctx               475 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req);
ctx               489 drivers/crypto/atmel-sha.c 	switch (ctx->flags & SHA_FLAGS_ALGO_MASK) {
ctx               520 drivers/crypto/atmel-sha.c 	if (!(ctx->digcnt[0] || ctx->digcnt[1])) {
ctx               522 drivers/crypto/atmel-sha.c 	} else if (dd->caps.has_uihv && (ctx->flags & SHA_FLAGS_RESTORE)) {
ctx               523 drivers/crypto/atmel-sha.c 		const u32 *hash = (const u32 *)ctx->digest;
ctx               531 drivers/crypto/atmel-sha.c 		ctx->flags &= ~SHA_FLAGS_RESTORE;
ctx               564 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req);
ctx               569 drivers/crypto/atmel-sha.c 		ctx->digcnt[1], ctx->digcnt[0], length, final);
ctx               574 drivers/crypto/atmel-sha.c 	ctx->digcnt[0] += length;
ctx               575 drivers/crypto/atmel-sha.c 	if (ctx->digcnt[0] < length)
ctx               576 drivers/crypto/atmel-sha.c 		ctx->digcnt[1]++;
ctx               594 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req);
ctx               598 drivers/crypto/atmel-sha.c 		ctx->digcnt[1], ctx->digcnt[0], length1, final);
ctx               612 drivers/crypto/atmel-sha.c 	ctx->digcnt[0] += length1;
ctx               613 drivers/crypto/atmel-sha.c 	if (ctx->digcnt[0] < length1)
ctx               614 drivers/crypto/atmel-sha.c 		ctx->digcnt[1]++;
ctx               640 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req);
ctx               645 drivers/crypto/atmel-sha.c 		ctx->digcnt[1], ctx->digcnt[0], length1, final);
ctx               676 drivers/crypto/atmel-sha.c 	ctx->digcnt[0] += length1;
ctx               677 drivers/crypto/atmel-sha.c 	if (ctx->digcnt[0] < length1)
ctx               678 drivers/crypto/atmel-sha.c 		ctx->digcnt[1]++;
ctx               705 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req);
ctx               708 drivers/crypto/atmel-sha.c 	atmel_sha_append_sg(ctx);
ctx               709 drivers/crypto/atmel-sha.c 	atmel_sha_fill_padding(ctx, 0);
ctx               710 drivers/crypto/atmel-sha.c 	bufcnt = ctx->bufcnt;
ctx               711 drivers/crypto/atmel-sha.c 	ctx->bufcnt = 0;
ctx               713 drivers/crypto/atmel-sha.c 	return atmel_sha_xmit_cpu(dd, ctx->buffer, bufcnt, 1);
ctx               717 drivers/crypto/atmel-sha.c 					struct atmel_sha_reqctx *ctx,
ctx               720 drivers/crypto/atmel-sha.c 	ctx->dma_addr = dma_map_single(dd->dev, ctx->buffer,
ctx               721 drivers/crypto/atmel-sha.c 				ctx->buflen + ctx->block_size, DMA_TO_DEVICE);
ctx               722 drivers/crypto/atmel-sha.c 	if (dma_mapping_error(dd->dev, ctx->dma_addr)) {
ctx               723 drivers/crypto/atmel-sha.c 		dev_err(dd->dev, "dma %zu bytes error\n", ctx->buflen +
ctx               724 drivers/crypto/atmel-sha.c 				ctx->block_size);
ctx               728 drivers/crypto/atmel-sha.c 	ctx->flags &= ~SHA_FLAGS_SG;
ctx               731 drivers/crypto/atmel-sha.c 	return atmel_sha_xmit_start(dd, ctx->dma_addr, length, 0, 0, final);
ctx               736 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req);
ctx               740 drivers/crypto/atmel-sha.c 	atmel_sha_append_sg(ctx);
ctx               742 drivers/crypto/atmel-sha.c 	final = (ctx->flags & SHA_FLAGS_FINUP) && !ctx->total;
ctx               745 drivers/crypto/atmel-sha.c 		 ctx->bufcnt, ctx->digcnt[1], ctx->digcnt[0], final);
ctx               748 drivers/crypto/atmel-sha.c 		atmel_sha_fill_padding(ctx, 0);
ctx               750 drivers/crypto/atmel-sha.c 	if (final || (ctx->bufcnt == ctx->buflen)) {
ctx               751 drivers/crypto/atmel-sha.c 		count = ctx->bufcnt;
ctx               752 drivers/crypto/atmel-sha.c 		ctx->bufcnt = 0;
ctx               753 drivers/crypto/atmel-sha.c 		return atmel_sha_xmit_dma_map(dd, ctx, count, final);
ctx               761 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req);
ctx               766 drivers/crypto/atmel-sha.c 	if (!ctx->total)
ctx               769 drivers/crypto/atmel-sha.c 	if (ctx->bufcnt || ctx->offset)
ctx               773 drivers/crypto/atmel-sha.c 		ctx->digcnt[1], ctx->digcnt[0], ctx->bufcnt, ctx->total);
ctx               775 drivers/crypto/atmel-sha.c 	sg = ctx->sg;
ctx               780 drivers/crypto/atmel-sha.c 	if (!sg_is_last(sg) && !IS_ALIGNED(sg->length, ctx->block_size))
ctx               784 drivers/crypto/atmel-sha.c 	length = min(ctx->total, sg->length);
ctx               787 drivers/crypto/atmel-sha.c 		if (!(ctx->flags & SHA_FLAGS_FINUP)) {
ctx               789 drivers/crypto/atmel-sha.c 			tail = length & (ctx->block_size - 1);
ctx               794 drivers/crypto/atmel-sha.c 	ctx->total -= length;
ctx               795 drivers/crypto/atmel-sha.c 	ctx->offset = length; /* offset where to start slow */
ctx               797 drivers/crypto/atmel-sha.c 	final = (ctx->flags & SHA_FLAGS_FINUP) && !ctx->total;
ctx               801 drivers/crypto/atmel-sha.c 		tail = length & (ctx->block_size - 1);
ctx               803 drivers/crypto/atmel-sha.c 		ctx->total += tail;
ctx               804 drivers/crypto/atmel-sha.c 		ctx->offset = length; /* offset where to start slow */
ctx               806 drivers/crypto/atmel-sha.c 		sg = ctx->sg;
ctx               807 drivers/crypto/atmel-sha.c 		atmel_sha_append_sg(ctx);
ctx               809 drivers/crypto/atmel-sha.c 		atmel_sha_fill_padding(ctx, length);
ctx               811 drivers/crypto/atmel-sha.c 		ctx->dma_addr = dma_map_single(dd->dev, ctx->buffer,
ctx               812 drivers/crypto/atmel-sha.c 			ctx->buflen + ctx->block_size, DMA_TO_DEVICE);
ctx               813 drivers/crypto/atmel-sha.c 		if (dma_mapping_error(dd->dev, ctx->dma_addr)) {
ctx               815 drivers/crypto/atmel-sha.c 				ctx->buflen + ctx->block_size);
ctx               820 drivers/crypto/atmel-sha.c 			ctx->flags &= ~SHA_FLAGS_SG;
ctx               821 drivers/crypto/atmel-sha.c 			count = ctx->bufcnt;
ctx               822 drivers/crypto/atmel-sha.c 			ctx->bufcnt = 0;
ctx               823 drivers/crypto/atmel-sha.c 			return atmel_sha_xmit_start(dd, ctx->dma_addr, count, 0,
ctx               826 drivers/crypto/atmel-sha.c 			ctx->sg = sg;
ctx               827 drivers/crypto/atmel-sha.c 			if (!dma_map_sg(dd->dev, ctx->sg, 1,
ctx               833 drivers/crypto/atmel-sha.c 			ctx->flags |= SHA_FLAGS_SG;
ctx               835 drivers/crypto/atmel-sha.c 			count = ctx->bufcnt;
ctx               836 drivers/crypto/atmel-sha.c 			ctx->bufcnt = 0;
ctx               837 drivers/crypto/atmel-sha.c 			return atmel_sha_xmit_start(dd, sg_dma_address(ctx->sg),
ctx               838 drivers/crypto/atmel-sha.c 					length, ctx->dma_addr, count, final);
ctx               842 drivers/crypto/atmel-sha.c 	if (!dma_map_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE)) {
ctx               847 drivers/crypto/atmel-sha.c 	ctx->flags |= SHA_FLAGS_SG;
ctx               850 drivers/crypto/atmel-sha.c 	return atmel_sha_xmit_start(dd, sg_dma_address(ctx->sg), length, 0,
ctx               856 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req);
ctx               858 drivers/crypto/atmel-sha.c 	if (ctx->flags & SHA_FLAGS_SG) {
ctx               859 drivers/crypto/atmel-sha.c 		dma_unmap_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE);
ctx               860 drivers/crypto/atmel-sha.c 		if (ctx->sg->length == ctx->offset) {
ctx               861 drivers/crypto/atmel-sha.c 			ctx->sg = sg_next(ctx->sg);
ctx               862 drivers/crypto/atmel-sha.c 			if (ctx->sg)
ctx               863 drivers/crypto/atmel-sha.c 				ctx->offset = 0;
ctx               865 drivers/crypto/atmel-sha.c 		if (ctx->flags & SHA_FLAGS_PAD) {
ctx               866 drivers/crypto/atmel-sha.c 			dma_unmap_single(dd->dev, ctx->dma_addr,
ctx               867 drivers/crypto/atmel-sha.c 				ctx->buflen + ctx->block_size, DMA_TO_DEVICE);
ctx               870 drivers/crypto/atmel-sha.c 		dma_unmap_single(dd->dev, ctx->dma_addr, ctx->buflen +
ctx               871 drivers/crypto/atmel-sha.c 						ctx->block_size, DMA_TO_DEVICE);
ctx               880 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               884 drivers/crypto/atmel-sha.c 		ctx->total, ctx->digcnt[1], ctx->digcnt[0]);
ctx               886 drivers/crypto/atmel-sha.c 	if (ctx->flags & SHA_FLAGS_CPU)
ctx               893 drivers/crypto/atmel-sha.c 			err, ctx->digcnt[1], ctx->digcnt[0]);
ctx               901 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               905 drivers/crypto/atmel-sha.c 	if (ctx->bufcnt >= ATMEL_SHA_DMA_THRESHOLD) {
ctx               906 drivers/crypto/atmel-sha.c 		atmel_sha_fill_padding(ctx, 0);
ctx               907 drivers/crypto/atmel-sha.c 		count = ctx->bufcnt;
ctx               908 drivers/crypto/atmel-sha.c 		ctx->bufcnt = 0;
ctx               909 drivers/crypto/atmel-sha.c 		err = atmel_sha_xmit_dma_map(dd, ctx, count, 1);
ctx               913 drivers/crypto/atmel-sha.c 		atmel_sha_fill_padding(ctx, 0);
ctx               914 drivers/crypto/atmel-sha.c 		count = ctx->bufcnt;
ctx               915 drivers/crypto/atmel-sha.c 		ctx->bufcnt = 0;
ctx               916 drivers/crypto/atmel-sha.c 		err = atmel_sha_xmit_cpu(dd, ctx->buffer, count, 1);
ctx               926 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               927 drivers/crypto/atmel-sha.c 	u32 *hash = (u32 *)ctx->digest;
ctx               930 drivers/crypto/atmel-sha.c 	switch (ctx->flags & SHA_FLAGS_ALGO_MASK) {
ctx               951 drivers/crypto/atmel-sha.c 		hash[i] = atmel_sha_read(ctx->dd, SHA_REG_DIGEST(i));
ctx               952 drivers/crypto/atmel-sha.c 	ctx->flags |= SHA_FLAGS_RESTORE;
ctx               957 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               962 drivers/crypto/atmel-sha.c 	switch (ctx->flags & SHA_FLAGS_ALGO_MASK) {
ctx               965 drivers/crypto/atmel-sha.c 		memcpy(req->result, ctx->digest, SHA1_DIGEST_SIZE);
ctx               969 drivers/crypto/atmel-sha.c 		memcpy(req->result, ctx->digest, SHA224_DIGEST_SIZE);
ctx               973 drivers/crypto/atmel-sha.c 		memcpy(req->result, ctx->digest, SHA256_DIGEST_SIZE);
ctx               977 drivers/crypto/atmel-sha.c 		memcpy(req->result, ctx->digest, SHA384_DIGEST_SIZE);
ctx               981 drivers/crypto/atmel-sha.c 		memcpy(req->result, ctx->digest, SHA512_DIGEST_SIZE);
ctx               988 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               989 drivers/crypto/atmel-sha.c 	struct atmel_sha_dev *dd = ctx->dd;
ctx               991 drivers/crypto/atmel-sha.c 	if (ctx->digcnt[0] || ctx->digcnt[1])
ctx               994 drivers/crypto/atmel-sha.c 	dev_dbg(dd->dev, "digcnt: 0x%llx 0x%llx, bufcnt: %zd\n", ctx->digcnt[1],
ctx               995 drivers/crypto/atmel-sha.c 		ctx->digcnt[0], ctx->bufcnt);
ctx              1002 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1003 drivers/crypto/atmel-sha.c 	struct atmel_sha_dev *dd = ctx->dd;
ctx              1010 drivers/crypto/atmel-sha.c 		ctx->flags |= SHA_FLAGS_ERROR;
ctx              1055 drivers/crypto/atmel-sha.c 	struct atmel_sha_ctx *ctx;
ctx              1082 drivers/crypto/atmel-sha.c 	ctx = crypto_tfm_ctx(async_req->tfm);
ctx              1090 drivers/crypto/atmel-sha.c 	err = ctx->start(dd);
ctx              1099 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1103 drivers/crypto/atmel-sha.c 						ctx->op, req->nbytes);
ctx              1129 drivers/crypto/atmel-sha.c 	if (ctx->op == SHA_OP_UPDATE) {
ctx              1131 drivers/crypto/atmel-sha.c 		if (!err && (ctx->flags & SHA_FLAGS_FINUP))
ctx              1134 drivers/crypto/atmel-sha.c 	} else if (ctx->op == SHA_OP_FINAL) {
ctx              1149 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1153 drivers/crypto/atmel-sha.c 	ctx->op = op;
ctx              1160 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1165 drivers/crypto/atmel-sha.c 	ctx->total = req->nbytes;
ctx              1166 drivers/crypto/atmel-sha.c 	ctx->sg = req->src;
ctx              1167 drivers/crypto/atmel-sha.c 	ctx->offset = 0;
ctx              1169 drivers/crypto/atmel-sha.c 	if (ctx->flags & SHA_FLAGS_FINUP) {
ctx              1170 drivers/crypto/atmel-sha.c 		if (ctx->bufcnt + ctx->total < ATMEL_SHA_DMA_THRESHOLD)
ctx              1172 drivers/crypto/atmel-sha.c 			ctx->flags |= SHA_FLAGS_CPU;
ctx              1173 drivers/crypto/atmel-sha.c 	} else if (ctx->bufcnt + ctx->total < ctx->buflen) {
ctx              1174 drivers/crypto/atmel-sha.c 		atmel_sha_append_sg(ctx);
ctx              1182 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1184 drivers/crypto/atmel-sha.c 	ctx->flags |= SHA_FLAGS_FINUP;
ctx              1186 drivers/crypto/atmel-sha.c 	if (ctx->flags & SHA_FLAGS_ERROR)
ctx              1189 drivers/crypto/atmel-sha.c 	if (ctx->flags & SHA_FLAGS_PAD)
ctx              1198 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1201 drivers/crypto/atmel-sha.c 	ctx->flags |= SHA_FLAGS_FINUP;
ctx              1226 drivers/crypto/atmel-sha.c 	const struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1228 drivers/crypto/atmel-sha.c 	memcpy(out, ctx, sizeof(*ctx));
ctx              1234 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1236 drivers/crypto/atmel-sha.c 	memcpy(ctx, in, sizeof(*ctx));
ctx              1242 drivers/crypto/atmel-sha.c 	struct atmel_sha_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1246 drivers/crypto/atmel-sha.c 	ctx->start = atmel_sha_start;
ctx              1460 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1461 drivers/crypto/atmel-sha.c 	size_t bs = ctx->block_size;
ctx              1570 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1571 drivers/crypto/atmel-sha.c 	const u32 *words = (const u32 *)ctx->buffer;
ctx              1575 drivers/crypto/atmel-sha.c 	din_inc = (ctx->flags & SHA_FLAGS_IDATAR0) ? 0 : 1;
ctx              1578 drivers/crypto/atmel-sha.c 		num_words = DIV_ROUND_UP(ctx->bufcnt, sizeof(u32));
ctx              1582 drivers/crypto/atmel-sha.c 		ctx->offset += ctx->bufcnt;
ctx              1583 drivers/crypto/atmel-sha.c 		ctx->total -= ctx->bufcnt;
ctx              1585 drivers/crypto/atmel-sha.c 		if (!ctx->total)
ctx              1596 drivers/crypto/atmel-sha.c 		ctx->bufcnt = min_t(size_t, ctx->block_size, ctx->total);
ctx              1597 drivers/crypto/atmel-sha.c 		scatterwalk_map_and_copy(ctx->buffer, ctx->sg,
ctx              1598 drivers/crypto/atmel-sha.c 					 ctx->offset, ctx->bufcnt, 0);
ctx              1610 drivers/crypto/atmel-sha.c 	if (unlikely(!(ctx->flags & SHA_FLAGS_WAIT_DATARDY)))
ctx              1624 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1629 drivers/crypto/atmel-sha.c 	ctx->flags &= ~(SHA_FLAGS_IDATAR0 | SHA_FLAGS_WAIT_DATARDY);
ctx              1632 drivers/crypto/atmel-sha.c 		ctx->flags |= SHA_FLAGS_IDATAR0;
ctx              1635 drivers/crypto/atmel-sha.c 		ctx->flags |= SHA_FLAGS_WAIT_DATARDY;
ctx              1637 drivers/crypto/atmel-sha.c 	ctx->sg = sg;
ctx              1638 drivers/crypto/atmel-sha.c 	ctx->total = len;
ctx              1639 drivers/crypto/atmel-sha.c 	ctx->offset = 0;
ctx              1642 drivers/crypto/atmel-sha.c 	ctx->bufcnt = min_t(size_t, ctx->block_size, ctx->total);
ctx              1643 drivers/crypto/atmel-sha.c 	scatterwalk_map_and_copy(ctx->buffer, ctx->sg,
ctx              1644 drivers/crypto/atmel-sha.c 				 ctx->offset, ctx->bufcnt, 0);
ctx              1656 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1660 drivers/crypto/atmel-sha.c 	if (!(IS_ALIGNED(datalen, ctx->block_size) || auto_padding))
ctx              1663 drivers/crypto/atmel-sha.c 	mr |= (ctx->flags & SHA_FLAGS_ALGO_MASK);
ctx              1754 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1762 drivers/crypto/atmel-sha.c 	switch (ctx->flags & SHA_FLAGS_ALGO_MASK) {
ctx              1764 drivers/crypto/atmel-sha.c 		ctx->block_size = SHA1_BLOCK_SIZE;
ctx              1765 drivers/crypto/atmel-sha.c 		ctx->hash_size = SHA1_DIGEST_SIZE;
ctx              1769 drivers/crypto/atmel-sha.c 		ctx->block_size = SHA224_BLOCK_SIZE;
ctx              1770 drivers/crypto/atmel-sha.c 		ctx->hash_size = SHA256_DIGEST_SIZE;
ctx              1774 drivers/crypto/atmel-sha.c 		ctx->block_size = SHA256_BLOCK_SIZE;
ctx              1775 drivers/crypto/atmel-sha.c 		ctx->hash_size = SHA256_DIGEST_SIZE;
ctx              1779 drivers/crypto/atmel-sha.c 		ctx->block_size = SHA384_BLOCK_SIZE;
ctx              1780 drivers/crypto/atmel-sha.c 		ctx->hash_size = SHA512_DIGEST_SIZE;
ctx              1784 drivers/crypto/atmel-sha.c 		ctx->block_size = SHA512_BLOCK_SIZE;
ctx              1785 drivers/crypto/atmel-sha.c 		ctx->hash_size = SHA512_DIGEST_SIZE;
ctx              1791 drivers/crypto/atmel-sha.c 	bs = ctx->block_size;
ctx              1818 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1820 drivers/crypto/atmel-sha.c 	size_t bs = ctx->block_size;
ctx              1835 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1836 drivers/crypto/atmel-sha.c 	size_t bs = ctx->block_size;
ctx              1854 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1855 drivers/crypto/atmel-sha.c 	size_t bs = ctx->block_size;
ctx              1856 drivers/crypto/atmel-sha.c 	size_t hs = ctx->hash_size;
ctx              1870 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1871 drivers/crypto/atmel-sha.c 	size_t hs = ctx->hash_size;
ctx              1883 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1890 drivers/crypto/atmel-sha.c 	switch (ctx->op) {
ctx              1938 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1941 drivers/crypto/atmel-sha.c 	size_t bs = ctx->block_size;
ctx              1942 drivers/crypto/atmel-sha.c 	size_t hs = ctx->hash_size;
ctx              1944 drivers/crypto/atmel-sha.c 	ctx->bufcnt = 0;
ctx              1945 drivers/crypto/atmel-sha.c 	ctx->digcnt[0] = bs;
ctx              1946 drivers/crypto/atmel-sha.c 	ctx->digcnt[1] = 0;
ctx              1947 drivers/crypto/atmel-sha.c 	ctx->flags |= SHA_FLAGS_RESTORE;
ctx              1948 drivers/crypto/atmel-sha.c 	memcpy(ctx->digest, hmac->ipad, hs);
ctx              1955 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              1958 drivers/crypto/atmel-sha.c 	u32 *digest = (u32 *)ctx->digest;
ctx              1960 drivers/crypto/atmel-sha.c 	size_t bs = ctx->block_size;
ctx              1961 drivers/crypto/atmel-sha.c 	size_t hs = ctx->hash_size;
ctx              1977 drivers/crypto/atmel-sha.c 	mr |= (ctx->flags & SHA_FLAGS_ALGO_MASK);
ctx              2014 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = ahash_request_ctx(req);
ctx              2017 drivers/crypto/atmel-sha.c 	size_t hs = ctx->hash_size;
ctx              2042 drivers/crypto/atmel-sha.c 	mr |= ctx->flags & SHA_FLAGS_ALGO_MASK;
ctx              2366 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = &authctx->base;
ctx              2380 drivers/crypto/atmel-sha.c 	ctx->dd = dd;
ctx              2381 drivers/crypto/atmel-sha.c 	ctx->buflen = SHA_BUFFER_LEN;
ctx              2398 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = &authctx->base;
ctx              2401 drivers/crypto/atmel-sha.c 	struct atmel_sha_dev *dd = ctx->dd;
ctx              2412 drivers/crypto/atmel-sha.c 	ctx->flags = hmac->base.flags;
ctx              2421 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = &authctx->base;
ctx              2424 drivers/crypto/atmel-sha.c 	size_t hs = ctx->hash_size;
ctx              2439 drivers/crypto/atmel-sha.c 	mr |= ctx->flags & SHA_FLAGS_ALGO_MASK;
ctx              2468 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = &authctx->base;
ctx              2469 drivers/crypto/atmel-sha.c 	struct atmel_sha_dev *dd = ctx->dd;
ctx              2471 drivers/crypto/atmel-sha.c 	switch (ctx->flags & SHA_FLAGS_ALGO_MASK) {
ctx              2521 drivers/crypto/atmel-sha.c 	struct atmel_sha_reqctx *ctx = &authctx->base;
ctx              2522 drivers/crypto/atmel-sha.c 	struct atmel_sha_dev *dd = ctx->dd;
ctx                24 drivers/crypto/atmel-sha204a.c 	struct atmel_i2c_client_priv *i2c_priv = work_data->ctx;
ctx                58 drivers/crypto/atmel-sha204a.c 		work_data->ctx = i2c_priv;
ctx                95 drivers/crypto/atmel-tdes.c 	struct atmel_tdes_ctx	*ctx;
ctx               196 drivers/crypto/atmel-tdes.c static struct atmel_tdes_dev *atmel_tdes_find_dev(struct atmel_tdes_ctx *ctx)
ctx               202 drivers/crypto/atmel-tdes.c 	if (!ctx->dd) {
ctx               207 drivers/crypto/atmel-tdes.c 		ctx->dd = tdes_dd;
ctx               209 drivers/crypto/atmel-tdes.c 		tdes_dd = ctx->dd;
ctx               273 drivers/crypto/atmel-tdes.c 	if (dd->ctx->keylen > (DES_KEY_SIZE << 1)) {
ctx               276 drivers/crypto/atmel-tdes.c 	} else if (dd->ctx->keylen > DES_KEY_SIZE) {
ctx               306 drivers/crypto/atmel-tdes.c 	atmel_tdes_write_n(dd, TDES_KEY1W1R, dd->ctx->key,
ctx               307 drivers/crypto/atmel-tdes.c 						dd->ctx->keylen >> 2);
ctx               401 drivers/crypto/atmel-tdes.c 	struct atmel_tdes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               402 drivers/crypto/atmel-tdes.c 	struct atmel_tdes_dev *dd = ctx->dd;
ctx               437 drivers/crypto/atmel-tdes.c 	struct atmel_tdes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               438 drivers/crypto/atmel-tdes.c 	struct atmel_tdes_dev *dd = ctx->dd;
ctx               514 drivers/crypto/atmel-tdes.c 			IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size);
ctx               516 drivers/crypto/atmel-tdes.c 			IS_ALIGNED(dd->out_sg->length, dd->ctx->block_size);
ctx               589 drivers/crypto/atmel-tdes.c 	struct atmel_tdes_ctx *ctx;
ctx               624 drivers/crypto/atmel-tdes.c 	ctx = crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req));
ctx               627 drivers/crypto/atmel-tdes.c 	dd->ctx = ctx;
ctx               628 drivers/crypto/atmel-tdes.c 	ctx->dd = dd;
ctx               670 drivers/crypto/atmel-tdes.c 	struct atmel_tdes_ctx *ctx = crypto_ablkcipher_ctx(
ctx               679 drivers/crypto/atmel-tdes.c 		ctx->block_size = CFB8_BLOCK_SIZE;
ctx               685 drivers/crypto/atmel-tdes.c 		ctx->block_size = CFB16_BLOCK_SIZE;
ctx               691 drivers/crypto/atmel-tdes.c 		ctx->block_size = CFB32_BLOCK_SIZE;
ctx               697 drivers/crypto/atmel-tdes.c 		ctx->block_size = DES_BLOCK_SIZE;
ctx               702 drivers/crypto/atmel-tdes.c 	return atmel_tdes_handle_queue(ctx->dd, req);
ctx               776 drivers/crypto/atmel-tdes.c 	struct atmel_tdes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               783 drivers/crypto/atmel-tdes.c 	memcpy(ctx->key, key, keylen);
ctx               784 drivers/crypto/atmel-tdes.c 	ctx->keylen = keylen;
ctx               792 drivers/crypto/atmel-tdes.c 	struct atmel_tdes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               799 drivers/crypto/atmel-tdes.c 	memcpy(ctx->key, key, keylen);
ctx               800 drivers/crypto/atmel-tdes.c 	ctx->keylen = keylen;
ctx               879 drivers/crypto/atmel-tdes.c 	struct atmel_tdes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               884 drivers/crypto/atmel-tdes.c 	dd = atmel_tdes_find_dev(ctx);
ctx              1088 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
ctx              1095 drivers/crypto/axis/artpec6_crypto.c 	switch (ctx->crypto_type) {
ctx              1105 drivers/crypto/axis/artpec6_crypto.c 	switch (ctx->crypto_type) {
ctx              1134 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
ctx              1140 drivers/crypto/axis/artpec6_crypto.c 	switch (ctx->crypto_type) {
ctx              1151 drivers/crypto/axis/artpec6_crypto.c 	switch (ctx->crypto_type) {
ctx              1179 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
ctx              1198 drivers/crypto/axis/artpec6_crypto.c 		ret = crypto_sync_skcipher_setkey(ctx->fallback, ctx->aes_key,
ctx              1199 drivers/crypto/axis/artpec6_crypto.c 						  ctx->key_length);
ctx              1204 drivers/crypto/axis/artpec6_crypto.c 			SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
ctx              1206 drivers/crypto/axis/artpec6_crypto.c 			skcipher_request_set_sync_tfm(subreq, ctx->fallback);
ctx              1250 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx = crypto_tfm_ctx(&tfm->base);
ctx              1257 drivers/crypto/axis/artpec6_crypto.c 	ctx->key_length = len;
ctx              1259 drivers/crypto/axis/artpec6_crypto.c 	memcpy(ctx->aes_key, key, len);
ctx              1311 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_hashalg_context *ctx = crypto_tfm_ctx(areq->base.tfm);
ctx              1338 drivers/crypto/axis/artpec6_crypto.c 		memcpy(req_ctx->key_buffer, ctx->hmac_key,
ctx              1339 drivers/crypto/axis/artpec6_crypto.c 		       ctx->hmac_key_length);
ctx              1340 drivers/crypto/axis/artpec6_crypto.c 		memset(req_ctx->key_buffer + ctx->hmac_key_length, 0,
ctx              1341 drivers/crypto/axis/artpec6_crypto.c 		       blocksize - ctx->hmac_key_length);
ctx              1537 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
ctx              1540 drivers/crypto/axis/artpec6_crypto.c 	ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_ECB;
ctx              1547 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
ctx              1549 drivers/crypto/axis/artpec6_crypto.c 	ctx->fallback =
ctx              1552 drivers/crypto/axis/artpec6_crypto.c 	if (IS_ERR(ctx->fallback))
ctx              1553 drivers/crypto/axis/artpec6_crypto.c 		return PTR_ERR(ctx->fallback);
ctx              1556 drivers/crypto/axis/artpec6_crypto.c 	ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_CTR;
ctx              1563 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
ctx              1566 drivers/crypto/axis/artpec6_crypto.c 	ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_CBC;
ctx              1573 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
ctx              1576 drivers/crypto/axis/artpec6_crypto.c 	ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_XTS;
ctx              1583 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
ctx              1585 drivers/crypto/axis/artpec6_crypto.c 	memset(ctx, 0, sizeof(*ctx));
ctx              1590 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
ctx              1592 drivers/crypto/axis/artpec6_crypto.c 	crypto_free_sync_skcipher(ctx->fallback);
ctx              1600 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx =
ctx              1614 drivers/crypto/axis/artpec6_crypto.c 	memcpy(ctx->aes_key, key, keylen);
ctx              1615 drivers/crypto/axis/artpec6_crypto.c 	ctx->key_length = keylen;
ctx              1623 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx =
ctx              1642 drivers/crypto/axis/artpec6_crypto.c 	memcpy(ctx->aes_key, key, keylen);
ctx              1643 drivers/crypto/axis/artpec6_crypto.c 	ctx->key_length = keylen;
ctx              1670 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
ctx              1687 drivers/crypto/axis/artpec6_crypto.c 		ctx->key_md = FIELD_PREP(A6_CRY_MD_OPER, a6_regk_crypto_dlkey);
ctx              1689 drivers/crypto/axis/artpec6_crypto.c 		ctx->key_md = FIELD_PREP(A7_CRY_MD_OPER, a7_regk_crypto_dlkey);
ctx              1691 drivers/crypto/axis/artpec6_crypto.c 	ret = artpec6_crypto_setup_out_descr(common, (void *)&ctx->key_md,
ctx              1692 drivers/crypto/axis/artpec6_crypto.c 					     sizeof(ctx->key_md), false, false);
ctx              1696 drivers/crypto/axis/artpec6_crypto.c 	ret = artpec6_crypto_setup_out_descr(common, ctx->aes_key,
ctx              1697 drivers/crypto/axis/artpec6_crypto.c 					      ctx->key_length, true, false);
ctx              1703 drivers/crypto/axis/artpec6_crypto.c 	if (ctx->crypto_type == ARTPEC6_CRYPTO_CIPHER_AES_XTS)
ctx              1704 drivers/crypto/axis/artpec6_crypto.c 		cipher_klen = ctx->key_length/2;
ctx              1706 drivers/crypto/axis/artpec6_crypto.c 		cipher_klen =  ctx->key_length;
ctx              1721 drivers/crypto/axis/artpec6_crypto.c 			MODULE_NAME, ctx->key_length);
ctx              1725 drivers/crypto/axis/artpec6_crypto.c 	switch (ctx->crypto_type) {
ctx              1753 drivers/crypto/axis/artpec6_crypto.c 			MODULE_NAME, ctx->crypto_type);
ctx              1801 drivers/crypto/axis/artpec6_crypto.c 	if (ctx->crypto_type == ARTPEC6_CRYPTO_CIPHER_AES_CTR ||
ctx              1802 drivers/crypto/axis/artpec6_crypto.c 	    ctx->crypto_type == ARTPEC6_CRYPTO_CIPHER_AES_XTS) {
ctx              1837 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_cryptotfm_context *ctx = crypto_tfm_ctx(areq->base.tfm);
ctx              1849 drivers/crypto/axis/artpec6_crypto.c 		ctx->key_md = FIELD_PREP(A6_CRY_MD_OPER,
ctx              1852 drivers/crypto/axis/artpec6_crypto.c 		ctx->key_md = FIELD_PREP(A7_CRY_MD_OPER,
ctx              1855 drivers/crypto/axis/artpec6_crypto.c 	ret = artpec6_crypto_setup_out_descr(common, (void *)&ctx->key_md,
ctx              1856 drivers/crypto/axis/artpec6_crypto.c 					     sizeof(ctx->key_md), false, false);
ctx              1860 drivers/crypto/axis/artpec6_crypto.c 	ret = artpec6_crypto_setup_out_descr(common, ctx->aes_key,
ctx              1861 drivers/crypto/axis/artpec6_crypto.c 					     ctx->key_length, true, false);
ctx              1867 drivers/crypto/axis/artpec6_crypto.c 	switch (ctx->key_length) {
ctx              2444 drivers/crypto/axis/artpec6_crypto.c 	const struct artpec6_hash_request_context *ctx = ahash_request_ctx(req);
ctx              2450 drivers/crypto/axis/artpec6_crypto.c 		     sizeof(ctx->partial_buffer));
ctx              2451 drivers/crypto/axis/artpec6_crypto.c 	BUILD_BUG_ON(sizeof(state->digeststate) != sizeof(ctx->digeststate));
ctx              2453 drivers/crypto/axis/artpec6_crypto.c 	state->digcnt = ctx->digcnt;
ctx              2454 drivers/crypto/axis/artpec6_crypto.c 	state->partial_bytes = ctx->partial_bytes;
ctx              2455 drivers/crypto/axis/artpec6_crypto.c 	state->hash_flags = ctx->hash_flags;
ctx              2458 drivers/crypto/axis/artpec6_crypto.c 		state->oper = FIELD_GET(A6_CRY_MD_OPER, ctx->hash_md);
ctx              2460 drivers/crypto/axis/artpec6_crypto.c 		state->oper = FIELD_GET(A7_CRY_MD_OPER, ctx->hash_md);
ctx              2462 drivers/crypto/axis/artpec6_crypto.c 	memcpy(state->partial_buffer, ctx->partial_buffer,
ctx              2464 drivers/crypto/axis/artpec6_crypto.c 	memcpy(state->digeststate, ctx->digeststate,
ctx              2472 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_hash_request_context *ctx = ahash_request_ctx(req);
ctx              2477 drivers/crypto/axis/artpec6_crypto.c 	memset(ctx, 0, sizeof(*ctx));
ctx              2479 drivers/crypto/axis/artpec6_crypto.c 	ctx->digcnt = state->digcnt;
ctx              2480 drivers/crypto/axis/artpec6_crypto.c 	ctx->partial_bytes = state->partial_bytes;
ctx              2481 drivers/crypto/axis/artpec6_crypto.c 	ctx->hash_flags = state->hash_flags;
ctx              2484 drivers/crypto/axis/artpec6_crypto.c 		ctx->hash_md = FIELD_PREP(A6_CRY_MD_OPER, state->oper);
ctx              2486 drivers/crypto/axis/artpec6_crypto.c 		ctx->hash_md = FIELD_PREP(A7_CRY_MD_OPER, state->oper);
ctx              2488 drivers/crypto/axis/artpec6_crypto.c 	memcpy(ctx->partial_buffer, state->partial_buffer,
ctx              2490 drivers/crypto/axis/artpec6_crypto.c 	memcpy(ctx->digeststate, state->digeststate,
ctx               140 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = rctx->ctx;
ctx               151 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg++, rctx->msg_buf.spu_resp_hdr, ctx->spu_resp_hdr_len);
ctx               154 drivers/crypto/bcm/cipher.c 	if ((ctx->cipher.mode == CIPHER_MODE_XTS) &&
ctx               168 drivers/crypto/bcm/cipher.c 	if (ctx->cipher.alg == CIPHER_ALG_RC4)
ctx               207 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = rctx->ctx;
ctx               220 drivers/crypto/bcm/cipher.c 		   BCM_HDR_LEN + ctx->spu_req_hdr_len);
ctx               223 drivers/crypto/bcm/cipher.c 	if ((ctx->cipher.mode == CIPHER_MODE_XTS) &&
ctx               309 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = rctx->ctx;
ctx               329 drivers/crypto/bcm/cipher.c 	cipher_parms.alg = ctx->cipher.alg;
ctx               330 drivers/crypto/bcm/cipher.c 	cipher_parms.mode = ctx->cipher.mode;
ctx               331 drivers/crypto/bcm/cipher.c 	cipher_parms.type = ctx->cipher_type;
ctx               332 drivers/crypto/bcm/cipher.c 	cipher_parms.key_len = ctx->enckeylen;
ctx               333 drivers/crypto/bcm/cipher.c 	cipher_parms.key_buf = ctx->enckey;
ctx               342 drivers/crypto/bcm/cipher.c 	if ((ctx->max_payload != SPU_MAX_PAYLOAD_INF) &&
ctx               343 drivers/crypto/bcm/cipher.c 	    (remaining > ctx->max_payload))
ctx               344 drivers/crypto/bcm/cipher.c 		chunksize = ctx->max_payload;
ctx               355 drivers/crypto/bcm/cipher.c 	if ((ctx->cipher.mode == CIPHER_MODE_CBC) &&
ctx               371 drivers/crypto/bcm/cipher.c 		if ((ctx->cipher.mode == CIPHER_MODE_CBC) &&
ctx               380 drivers/crypto/bcm/cipher.c 		} else if (ctx->cipher.mode == CIPHER_MODE_CTR) {
ctx               394 drivers/crypto/bcm/cipher.c 	if (ctx->cipher.alg == CIPHER_ALG_RC4) {
ctx               416 drivers/crypto/bcm/cipher.c 	if (ctx->max_payload == SPU_MAX_PAYLOAD_INF)
ctx               419 drivers/crypto/bcm/cipher.c 		flow_log("max_payload %u\n", ctx->max_payload);
ctx               425 drivers/crypto/bcm/cipher.c 	memcpy(rctx->msg_buf.bcm_spu_req_hdr, ctx->bcm_spu_req_hdr,
ctx               434 drivers/crypto/bcm/cipher.c 				   ctx->spu_req_hdr_len, !(rctx->is_encrypt),
ctx               446 drivers/crypto/bcm/cipher.c 				     0, ctx->auth.alg, ctx->auth.mode,
ctx               451 drivers/crypto/bcm/cipher.c 			      ctx->spu_req_hdr_len);
ctx               462 drivers/crypto/bcm/cipher.c 	mssg->ctx = rctx;	/* Will be returned in response */
ctx               467 drivers/crypto/bcm/cipher.c 	if ((ctx->cipher.mode == CIPHER_MODE_XTS) &&
ctx               481 drivers/crypto/bcm/cipher.c 	if ((ctx->cipher.mode == CIPHER_MODE_XTS) &&
ctx               509 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = rctx->ctx;
ctx               519 drivers/crypto/bcm/cipher.c 	if ((ctx->cipher.mode == CIPHER_MODE_XTS) &&
ctx               530 drivers/crypto/bcm/cipher.c 	if (ctx->cipher.alg == CIPHER_ALG_RC4)
ctx               538 drivers/crypto/bcm/cipher.c 		   &iproc_priv.cipher_cnt[ctx->cipher.alg][ctx->cipher.mode]);
ctx               569 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = rctx->ctx;
ctx               579 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg++, rctx->msg_buf.spu_resp_hdr, ctx->spu_resp_hdr_len);
ctx               697 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = rctx->ctx;
ctx               740 drivers/crypto/bcm/cipher.c 	hash_parms.alg = ctx->auth.alg;
ctx               741 drivers/crypto/bcm/cipher.c 	hash_parms.mode = ctx->auth.mode;
ctx               743 drivers/crypto/bcm/cipher.c 	hash_parms.key_buf = (u8 *)ctx->authkey;
ctx               744 drivers/crypto/bcm/cipher.c 	hash_parms.key_len = ctx->authkeylen;
ctx               753 drivers/crypto/bcm/cipher.c 	cipher_parms.type = ctx->cipher_type;
ctx               764 drivers/crypto/bcm/cipher.c 	if ((ctx->max_payload != SPU_MAX_PAYLOAD_INF) &&
ctx               765 drivers/crypto/bcm/cipher.c 	    (chunksize > ctx->max_payload))
ctx               766 drivers/crypto/bcm/cipher.c 		chunksize = ctx->max_payload;
ctx               814 drivers/crypto/bcm/cipher.c 	digestsize = spu->spu_digest_size(ctx->digestsize, ctx->auth.alg,
ctx               844 drivers/crypto/bcm/cipher.c 	if (ctx->max_payload == SPU_MAX_PAYLOAD_INF)
ctx               847 drivers/crypto/bcm/cipher.c 		flow_log("max_payload %u\n", ctx->max_payload);
ctx               870 drivers/crypto/bcm/cipher.c 	data_pad_len = spu->spu_gcm_ccm_pad_len(ctx->cipher.mode, chunksize);
ctx               881 drivers/crypto/bcm/cipher.c 				     hash_parms.pad_len, ctx->auth.alg,
ctx               882 drivers/crypto/bcm/cipher.c 				     ctx->auth.mode, rctx->total_sent,
ctx               899 drivers/crypto/bcm/cipher.c 	mssg->ctx = rctx;	/* Will be returned in response */
ctx               934 drivers/crypto/bcm/cipher.c 			       struct iproc_ctx_s *ctx)
ctx               941 drivers/crypto/bcm/cipher.c 	switch (ctx->auth.alg) {
ctx               943 drivers/crypto/bcm/cipher.c 		rc = do_shash("md5", req->result, ctx->opad, blocksize,
ctx               944 drivers/crypto/bcm/cipher.c 			      req->result, ctx->digestsize, NULL, 0);
ctx               947 drivers/crypto/bcm/cipher.c 		rc = do_shash("sha1", req->result, ctx->opad, blocksize,
ctx               948 drivers/crypto/bcm/cipher.c 			      req->result, ctx->digestsize, NULL, 0);
ctx               951 drivers/crypto/bcm/cipher.c 		rc = do_shash("sha224", req->result, ctx->opad, blocksize,
ctx               952 drivers/crypto/bcm/cipher.c 			      req->result, ctx->digestsize, NULL, 0);
ctx               955 drivers/crypto/bcm/cipher.c 		rc = do_shash("sha256", req->result, ctx->opad, blocksize,
ctx               956 drivers/crypto/bcm/cipher.c 			      req->result, ctx->digestsize, NULL, 0);
ctx               959 drivers/crypto/bcm/cipher.c 		rc = do_shash("sha384", req->result, ctx->opad, blocksize,
ctx               960 drivers/crypto/bcm/cipher.c 			      req->result, ctx->digestsize, NULL, 0);
ctx               963 drivers/crypto/bcm/cipher.c 		rc = do_shash("sha512", req->result, ctx->opad, blocksize,
ctx               964 drivers/crypto/bcm/cipher.c 			      req->result, ctx->digestsize, NULL, 0);
ctx               985 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = rctx->ctx;
ctx               988 drivers/crypto/bcm/cipher.c 	memcpy(req->result, rctx->msg_buf.digest, ctx->digestsize);
ctx               994 drivers/crypto/bcm/cipher.c 		if (ctx->auth.alg == HASH_ALG_MD5) {
ctx              1003 drivers/crypto/bcm/cipher.c 	flow_dump("  digest ", req->result, ctx->digestsize);
ctx              1007 drivers/crypto/bcm/cipher.c 		err = spu_hmac_outer_hash(req, ctx);
ctx              1010 drivers/crypto/bcm/cipher.c 		flow_dump("  hmac: ", req->result, ctx->digestsize);
ctx              1013 drivers/crypto/bcm/cipher.c 	if (rctx->is_sw_hmac || ctx->auth.mode == HASH_MODE_HMAC) {
ctx              1015 drivers/crypto/bcm/cipher.c 		atomic_inc(&iproc_priv.hmac_cnt[ctx->auth.alg]);
ctx              1018 drivers/crypto/bcm/cipher.c 		atomic_inc(&iproc_priv.hash_cnt[ctx->auth.alg]);
ctx              1032 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = rctx->ctx;
ctx              1047 drivers/crypto/bcm/cipher.c 		 __func__, blocksize, ctx->digestsize);
ctx              1049 drivers/crypto/bcm/cipher.c 	atomic64_add(ctx->digestsize, &iproc_priv.bytes_in);
ctx              1089 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = rctx->ctx;
ctx              1094 drivers/crypto/bcm/cipher.c 	if (ctx->is_rfc4543) {
ctx              1096 drivers/crypto/bcm/cipher.c 		data_padlen = spu->spu_gcm_ccm_pad_len(ctx->cipher.mode,
ctx              1100 drivers/crypto/bcm/cipher.c 		data_padlen = spu->spu_gcm_ccm_pad_len(ctx->cipher.mode,
ctx              1102 drivers/crypto/bcm/cipher.c 		assoc_buf_len = spu->spu_assoc_resp_len(ctx->cipher.mode,
ctx              1107 drivers/crypto/bcm/cipher.c 	if (ctx->cipher.mode == CIPHER_MODE_CCM)
ctx              1126 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg++, rctx->msg_buf.spu_resp_hdr, ctx->spu_resp_hdr_len);
ctx              1213 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = rctx->ctx;
ctx              1250 drivers/crypto/bcm/cipher.c 	if ((chunksize > ctx->digestsize) && incl_icv)
ctx              1251 drivers/crypto/bcm/cipher.c 		datalen -= ctx->digestsize;
ctx              1269 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.digest, ctx->digestsize);
ctx              1302 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = rctx->ctx;
ctx              1317 drivers/crypto/bcm/cipher.c 	unsigned int digestsize = ctx->digestsize;
ctx              1334 drivers/crypto/bcm/cipher.c 	req_opts.auth_first = ctx->auth_first;
ctx              1336 drivers/crypto/bcm/cipher.c 	req_opts.is_esp = ctx->is_esp;
ctx              1338 drivers/crypto/bcm/cipher.c 	cipher_parms.alg = ctx->cipher.alg;
ctx              1339 drivers/crypto/bcm/cipher.c 	cipher_parms.mode = ctx->cipher.mode;
ctx              1340 drivers/crypto/bcm/cipher.c 	cipher_parms.type = ctx->cipher_type;
ctx              1341 drivers/crypto/bcm/cipher.c 	cipher_parms.key_buf = ctx->enckey;
ctx              1342 drivers/crypto/bcm/cipher.c 	cipher_parms.key_len = ctx->enckeylen;
ctx              1346 drivers/crypto/bcm/cipher.c 	hash_parms.alg = ctx->auth.alg;
ctx              1347 drivers/crypto/bcm/cipher.c 	hash_parms.mode = ctx->auth.mode;
ctx              1349 drivers/crypto/bcm/cipher.c 	hash_parms.key_buf = (u8 *)ctx->authkey;
ctx              1350 drivers/crypto/bcm/cipher.c 	hash_parms.key_len = ctx->authkeylen;
ctx              1353 drivers/crypto/bcm/cipher.c 	if ((ctx->auth.alg == HASH_ALG_SHA224) &&
ctx              1354 drivers/crypto/bcm/cipher.c 	    (ctx->authkeylen < SHA224_DIGEST_SIZE))
ctx              1358 drivers/crypto/bcm/cipher.c 	if (ctx->is_esp && !ctx->is_rfc4543) {
ctx              1391 drivers/crypto/bcm/cipher.c 	if (spu->spu_assoc_resp_len(ctx->cipher.mode,
ctx              1397 drivers/crypto/bcm/cipher.c 	aead_parms.iv_len = spu->spu_aead_ivlen(ctx->cipher.mode,
ctx              1400 drivers/crypto/bcm/cipher.c 	if (ctx->auth.alg == HASH_ALG_AES)
ctx              1401 drivers/crypto/bcm/cipher.c 		hash_parms.type = (enum hash_type)ctx->cipher_type;
ctx              1404 drivers/crypto/bcm/cipher.c 	aead_parms.aad_pad_len = spu->spu_gcm_ccm_pad_len(ctx->cipher.mode,
ctx              1408 drivers/crypto/bcm/cipher.c 	aead_parms.data_pad_len = spu->spu_gcm_ccm_pad_len(ctx->cipher.mode,
ctx              1411 drivers/crypto/bcm/cipher.c 	if (ctx->cipher.mode == CIPHER_MODE_CCM) {
ctx              1417 drivers/crypto/bcm/cipher.c 					 ctx->cipher.mode,
ctx              1426 drivers/crypto/bcm/cipher.c 				spu->spu_gcm_ccm_pad_len(ctx->cipher.mode,
ctx              1432 drivers/crypto/bcm/cipher.c 				       ctx->is_esp);
ctx              1435 drivers/crypto/bcm/cipher.c 	if (ctx->is_rfc4543) {
ctx              1443 drivers/crypto/bcm/cipher.c 					ctx->cipher.mode,
ctx              1448 drivers/crypto/bcm/cipher.c 					ctx->cipher.mode,
ctx              1454 drivers/crypto/bcm/cipher.c 	if (spu_req_incl_icv(ctx->cipher.mode, rctx->is_encrypt)) {
ctx              1489 drivers/crypto/bcm/cipher.c 				     ctx->auth.alg, ctx->auth.mode,
ctx              1507 drivers/crypto/bcm/cipher.c 	mssg->ctx = rctx;	/* Will be returned in response */
ctx              1520 drivers/crypto/bcm/cipher.c 	if (((ctx->cipher.mode == CIPHER_MODE_GCM) ||
ctx              1521 drivers/crypto/bcm/cipher.c 	     (ctx->cipher.mode == CIPHER_MODE_CCM)) && !rctx->is_encrypt) {
ctx              1526 drivers/crypto/bcm/cipher.c 		resp_len -= ctx->digestsize;
ctx              1572 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = rctx->ctx;
ctx              1596 drivers/crypto/bcm/cipher.c 		packet_dump("  ICV: ", rctx->msg_buf.digest, ctx->digestsize);
ctx              1599 drivers/crypto/bcm/cipher.c 				      ctx->digestsize, icv_offset);
ctx              1600 drivers/crypto/bcm/cipher.c 		result_len += ctx->digestsize;
ctx              1607 drivers/crypto/bcm/cipher.c 	if (ctx->cipher.alg == CIPHER_ALG_AES) {
ctx              1608 drivers/crypto/bcm/cipher.c 		if (ctx->cipher.mode == CIPHER_MODE_CCM)
ctx              1610 drivers/crypto/bcm/cipher.c 		else if (ctx->cipher.mode == CIPHER_MODE_GCM)
ctx              1669 drivers/crypto/bcm/cipher.c 	rctx = mssg->ctx;
ctx              1687 drivers/crypto/bcm/cipher.c 	switch (rctx->ctx->alg->type) {
ctx              1710 drivers/crypto/bcm/cipher.c 		switch (rctx->ctx->alg->type) {
ctx              1753 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx =
ctx              1768 drivers/crypto/bcm/cipher.c 	rctx->ctx = ctx;
ctx              1778 drivers/crypto/bcm/cipher.c 	if (ctx->cipher.mode == CIPHER_MODE_CBC ||
ctx              1779 drivers/crypto/bcm/cipher.c 	    ctx->cipher.mode == CIPHER_MODE_CTR ||
ctx              1780 drivers/crypto/bcm/cipher.c 	    ctx->cipher.mode == CIPHER_MODE_OFB ||
ctx              1781 drivers/crypto/bcm/cipher.c 	    ctx->cipher.mode == CIPHER_MODE_XTS ||
ctx              1782 drivers/crypto/bcm/cipher.c 	    ctx->cipher.mode == CIPHER_MODE_GCM ||
ctx              1783 drivers/crypto/bcm/cipher.c 	    ctx->cipher.mode == CIPHER_MODE_CCM) {
ctx              1804 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1811 drivers/crypto/bcm/cipher.c 	ctx->cipher_type = CIPHER_TYPE_DES;
ctx              1818 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1825 drivers/crypto/bcm/cipher.c 	ctx->cipher_type = CIPHER_TYPE_3DES;
ctx              1832 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1834 drivers/crypto/bcm/cipher.c 	if (ctx->cipher.mode == CIPHER_MODE_XTS)
ctx              1840 drivers/crypto/bcm/cipher.c 		ctx->cipher_type = CIPHER_TYPE_AES128;
ctx              1843 drivers/crypto/bcm/cipher.c 		ctx->cipher_type = CIPHER_TYPE_AES192;
ctx              1846 drivers/crypto/bcm/cipher.c 		ctx->cipher_type = CIPHER_TYPE_AES256;
ctx              1852 drivers/crypto/bcm/cipher.c 	WARN_ON((ctx->max_payload != SPU_MAX_PAYLOAD_INF) &&
ctx              1853 drivers/crypto/bcm/cipher.c 		((ctx->max_payload % AES_BLOCK_SIZE) != 0));
ctx              1860 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1863 drivers/crypto/bcm/cipher.c 	ctx->enckeylen = ARC4_MAX_KEY_SIZE + ARC4_STATE_SIZE;
ctx              1865 drivers/crypto/bcm/cipher.c 	ctx->enckey[0] = 0x00;	/* 0x00 */
ctx              1866 drivers/crypto/bcm/cipher.c 	ctx->enckey[1] = 0x00;	/* i    */
ctx              1867 drivers/crypto/bcm/cipher.c 	ctx->enckey[2] = 0x00;	/* 0x00 */
ctx              1868 drivers/crypto/bcm/cipher.c 	ctx->enckey[3] = 0x00;	/* j    */
ctx              1870 drivers/crypto/bcm/cipher.c 		ctx->enckey[i + ARC4_STATE_SIZE] = key[i % keylen];
ctx              1872 drivers/crypto/bcm/cipher.c 	ctx->cipher_type = CIPHER_TYPE_INIT;
ctx              1881 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1889 drivers/crypto/bcm/cipher.c 	switch (ctx->cipher.alg) {
ctx              1910 drivers/crypto/bcm/cipher.c 	if (ctx->cipher.alg != CIPHER_ALG_RC4) {
ctx              1911 drivers/crypto/bcm/cipher.c 		memcpy(ctx->enckey, key, keylen);
ctx              1912 drivers/crypto/bcm/cipher.c 		ctx->enckeylen = keylen;
ctx              1915 drivers/crypto/bcm/cipher.c 	if ((ctx->cipher.alg == CIPHER_ALG_AES) &&
ctx              1916 drivers/crypto/bcm/cipher.c 	    (ctx->cipher.mode == CIPHER_MODE_XTS)) {
ctx              1919 drivers/crypto/bcm/cipher.c 		memcpy(ctx->enckey, key + xts_keylen, xts_keylen);
ctx              1920 drivers/crypto/bcm/cipher.c 		memcpy(ctx->enckey + xts_keylen, key, xts_keylen);
ctx              1927 drivers/crypto/bcm/cipher.c 	memset(ctx->bcm_spu_req_hdr, 0, alloc_len);
ctx              1932 drivers/crypto/bcm/cipher.c 	cipher_parms.alg = ctx->cipher.alg;
ctx              1933 drivers/crypto/bcm/cipher.c 	cipher_parms.mode = ctx->cipher.mode;
ctx              1934 drivers/crypto/bcm/cipher.c 	cipher_parms.type = ctx->cipher_type;
ctx              1935 drivers/crypto/bcm/cipher.c 	cipher_parms.key_buf = ctx->enckey;
ctx              1936 drivers/crypto/bcm/cipher.c 	cipher_parms.key_len = ctx->enckeylen;
ctx              1939 drivers/crypto/bcm/cipher.c 	memcpy(ctx->bcm_spu_req_hdr, BCMHEADER, BCM_HDR_LEN);
ctx              1940 drivers/crypto/bcm/cipher.c 	ctx->spu_req_hdr_len =
ctx              1941 drivers/crypto/bcm/cipher.c 	    spu->spu_cipher_req_init(ctx->bcm_spu_req_hdr + BCM_HDR_LEN,
ctx              1944 drivers/crypto/bcm/cipher.c 	ctx->spu_resp_hdr_len = spu->spu_response_hdr_len(ctx->authkeylen,
ctx              1945 drivers/crypto/bcm/cipher.c 							  ctx->enckeylen,
ctx              1970 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ahash_ctx(tfm);
ctx              1979 drivers/crypto/bcm/cipher.c 	rctx->ctx = ctx;
ctx              1998 drivers/crypto/bcm/cipher.c 			       NULL, 0, NULL, 0, ctx->authkey,
ctx              1999 drivers/crypto/bcm/cipher.c 			       ctx->authkeylen);
ctx              2027 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ahash_ctx(tfm);
ctx              2040 drivers/crypto/bcm/cipher.c 	ctx->digestsize = crypto_ahash_digestsize(tfm);
ctx              2042 drivers/crypto/bcm/cipher.c 	WARN_ON(ctx->digestsize > MAX_DIGEST_SIZE);
ctx              2046 drivers/crypto/bcm/cipher.c 	ctx->spu_resp_hdr_len = spu->spu_response_hdr_len(ctx->authkeylen, 0,
ctx              2065 drivers/crypto/bcm/cipher.c static bool spu_no_incr_hash(struct iproc_ctx_s *ctx)
ctx              2072 drivers/crypto/bcm/cipher.c 	if ((ctx->auth.alg == HASH_ALG_AES) &&
ctx              2073 drivers/crypto/bcm/cipher.c 	    (ctx->auth.mode == HASH_MODE_XCBC))
ctx              2083 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ahash_ctx(tfm);
ctx              2089 drivers/crypto/bcm/cipher.c 	if (spu_no_incr_hash(ctx)) {
ctx              2104 drivers/crypto/bcm/cipher.c 		ctx->shash = kmalloc(sizeof(*ctx->shash) +
ctx              2106 drivers/crypto/bcm/cipher.c 		if (!ctx->shash) {
ctx              2110 drivers/crypto/bcm/cipher.c 		ctx->shash->tfm = hash;
ctx              2113 drivers/crypto/bcm/cipher.c 		if (ctx->authkeylen > 0) {
ctx              2114 drivers/crypto/bcm/cipher.c 			ret = crypto_shash_setkey(hash, ctx->authkey,
ctx              2115 drivers/crypto/bcm/cipher.c 						  ctx->authkeylen);
ctx              2121 drivers/crypto/bcm/cipher.c 		ret = crypto_shash_init(ctx->shash);
ctx              2132 drivers/crypto/bcm/cipher.c 	kfree(ctx->shash);
ctx              2156 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ahash_ctx(tfm);
ctx              2162 drivers/crypto/bcm/cipher.c 	if (spu_no_incr_hash(ctx)) {
ctx              2187 drivers/crypto/bcm/cipher.c 		ret = crypto_shash_update(ctx->shash, tmpbuf, req->nbytes);
ctx              2211 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ahash_ctx(tfm);
ctx              2214 drivers/crypto/bcm/cipher.c 	if (spu_no_incr_hash(ctx)) {
ctx              2220 drivers/crypto/bcm/cipher.c 		ret = crypto_shash_final(ctx->shash, req->result);
ctx              2223 drivers/crypto/bcm/cipher.c 		crypto_free_shash(ctx->shash->tfm);
ctx              2224 drivers/crypto/bcm/cipher.c 		kfree(ctx->shash);
ctx              2250 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ahash_ctx(tfm);
ctx              2256 drivers/crypto/bcm/cipher.c 	if (spu_no_incr_hash(ctx)) {
ctx              2285 drivers/crypto/bcm/cipher.c 		ret = crypto_shash_finup(ctx->shash, tmpbuf, req->nbytes,
ctx              2296 drivers/crypto/bcm/cipher.c 	crypto_free_shash(ctx->shash->tfm);
ctx              2297 drivers/crypto/bcm/cipher.c 	kfree(ctx->shash);
ctx              2318 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ahash_ctx(ahash);
ctx              2324 drivers/crypto/bcm/cipher.c 	if (ctx->auth.alg == HASH_ALG_AES) {
ctx              2327 drivers/crypto/bcm/cipher.c 			ctx->cipher_type = CIPHER_TYPE_AES128;
ctx              2330 drivers/crypto/bcm/cipher.c 			ctx->cipher_type = CIPHER_TYPE_AES192;
ctx              2333 drivers/crypto/bcm/cipher.c 			ctx->cipher_type = CIPHER_TYPE_AES256;
ctx              2343 drivers/crypto/bcm/cipher.c 	memcpy(ctx->authkey, key, keylen);
ctx              2344 drivers/crypto/bcm/cipher.c 	ctx->authkeylen = keylen;
ctx              2382 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ahash_ctx(ahash);
ctx              2394 drivers/crypto/bcm/cipher.c 		switch (ctx->auth.alg) {
ctx              2396 drivers/crypto/bcm/cipher.c 			rc = do_shash("md5", ctx->authkey, key, keylen, NULL,
ctx              2400 drivers/crypto/bcm/cipher.c 			rc = do_shash("sha1", ctx->authkey, key, keylen, NULL,
ctx              2404 drivers/crypto/bcm/cipher.c 			rc = do_shash("sha224", ctx->authkey, key, keylen, NULL,
ctx              2408 drivers/crypto/bcm/cipher.c 			rc = do_shash("sha256", ctx->authkey, key, keylen, NULL,
ctx              2412 drivers/crypto/bcm/cipher.c 			rc = do_shash("sha384", ctx->authkey, key, keylen, NULL,
ctx              2416 drivers/crypto/bcm/cipher.c 			rc = do_shash("sha512", ctx->authkey, key, keylen, NULL,
ctx              2420 drivers/crypto/bcm/cipher.c 			rc = do_shash("sha3-224", ctx->authkey, key, keylen,
ctx              2424 drivers/crypto/bcm/cipher.c 			rc = do_shash("sha3-256", ctx->authkey, key, keylen,
ctx              2428 drivers/crypto/bcm/cipher.c 			rc = do_shash("sha3-384", ctx->authkey, key, keylen,
ctx              2432 drivers/crypto/bcm/cipher.c 			rc = do_shash("sha3-512", ctx->authkey, key, keylen,
ctx              2441 drivers/crypto/bcm/cipher.c 			       __func__, rc, hash_alg_name[ctx->auth.alg]);
ctx              2444 drivers/crypto/bcm/cipher.c 		ctx->authkeylen = digestsize;
ctx              2447 drivers/crypto/bcm/cipher.c 		flow_dump("  newkey: ", ctx->authkey, ctx->authkeylen);
ctx              2449 drivers/crypto/bcm/cipher.c 		memcpy(ctx->authkey, key, keylen);
ctx              2450 drivers/crypto/bcm/cipher.c 		ctx->authkeylen = keylen;
ctx              2459 drivers/crypto/bcm/cipher.c 		memcpy(ctx->ipad, ctx->authkey, ctx->authkeylen);
ctx              2460 drivers/crypto/bcm/cipher.c 		memset(ctx->ipad + ctx->authkeylen, 0,
ctx              2461 drivers/crypto/bcm/cipher.c 		       blocksize - ctx->authkeylen);
ctx              2462 drivers/crypto/bcm/cipher.c 		ctx->authkeylen = 0;
ctx              2463 drivers/crypto/bcm/cipher.c 		memcpy(ctx->opad, ctx->ipad, blocksize);
ctx              2466 drivers/crypto/bcm/cipher.c 			ctx->ipad[index] ^= HMAC_IPAD_VALUE;
ctx              2467 drivers/crypto/bcm/cipher.c 			ctx->opad[index] ^= HMAC_OPAD_VALUE;
ctx              2470 drivers/crypto/bcm/cipher.c 		flow_dump("  ipad: ", ctx->ipad, blocksize);
ctx              2471 drivers/crypto/bcm/cipher.c 		flow_dump("  opad: ", ctx->opad, blocksize);
ctx              2473 drivers/crypto/bcm/cipher.c 	ctx->digestsize = digestsize;
ctx              2483 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ahash_ctx(tfm);
ctx              2492 drivers/crypto/bcm/cipher.c 	if (!spu_no_incr_hash(ctx)) {
ctx              2495 drivers/crypto/bcm/cipher.c 		ctx->auth.mode = HASH_MODE_HASH;
ctx              2497 drivers/crypto/bcm/cipher.c 		memcpy(rctx->hash_carry, ctx->ipad, blocksize);
ctx              2533 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_ahash_ctx(tfm);
ctx              2552 drivers/crypto/bcm/cipher.c 		ctx->auth.mode = HASH_MODE_HMAC;
ctx              2555 drivers/crypto/bcm/cipher.c 		ctx->auth.mode = HASH_MODE_HASH;
ctx              2557 drivers/crypto/bcm/cipher.c 		memcpy(rctx->hash_carry, ctx->ipad, blocksize);
ctx              2572 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_aead_ctx(aead);
ctx              2579 drivers/crypto/bcm/cipher.c 	if (((ctx->cipher.mode == CIPHER_MODE_GCM) ||
ctx              2580 drivers/crypto/bcm/cipher.c 	     (ctx->cipher.mode == CIPHER_MODE_CCM)) &&
ctx              2583 drivers/crypto/bcm/cipher.c 		    (!rctx->is_encrypt && (req->cryptlen == ctx->digestsize))) {
ctx              2590 drivers/crypto/bcm/cipher.c 	if ((ctx->cipher.mode == CIPHER_MODE_CCM) &&
ctx              2592 drivers/crypto/bcm/cipher.c 	    (ctx->digestsize != 8) && (ctx->digestsize != 12) &&
ctx              2593 drivers/crypto/bcm/cipher.c 	    (ctx->digestsize != 16)) {
ctx              2595 drivers/crypto/bcm/cipher.c 			 __func__, ctx->digestsize);
ctx              2603 drivers/crypto/bcm/cipher.c 	if ((ctx->cipher.mode == CIPHER_MODE_CCM) &&
ctx              2615 drivers/crypto/bcm/cipher.c 	if (ctx->cipher.mode == CIPHER_MODE_GCM &&
ctx              2616 drivers/crypto/bcm/cipher.c 	    ctx->cipher.alg == CIPHER_ALG_AES &&
ctx              2630 drivers/crypto/bcm/cipher.c 	if (ctx->max_payload == SPU_MAX_PAYLOAD_INF)
ctx              2633 drivers/crypto/bcm/cipher.c 		return payload_len > ctx->max_payload;
ctx              2658 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_tfm_ctx(tfm);
ctx              2664 drivers/crypto/bcm/cipher.c 	if (ctx->fallback_cipher) {
ctx              2667 drivers/crypto/bcm/cipher.c 		aead_request_set_tfm(req, ctx->fallback_cipher);
ctx              2702 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_aead_ctx(aead);
ctx              2724 drivers/crypto/bcm/cipher.c 	rctx->ctx = ctx;
ctx              2761 drivers/crypto/bcm/cipher.c 	if (ctx->cipher.mode == CIPHER_MODE_CBC ||
ctx              2762 drivers/crypto/bcm/cipher.c 	    ctx->cipher.mode == CIPHER_MODE_CTR ||
ctx              2763 drivers/crypto/bcm/cipher.c 	    ctx->cipher.mode == CIPHER_MODE_OFB ||
ctx              2764 drivers/crypto/bcm/cipher.c 	    ctx->cipher.mode == CIPHER_MODE_XTS ||
ctx              2765 drivers/crypto/bcm/cipher.c 	    ctx->cipher.mode == CIPHER_MODE_GCM) {
ctx              2767 drivers/crypto/bcm/cipher.c 			ctx->salt_len +
ctx              2769 drivers/crypto/bcm/cipher.c 	} else if (ctx->cipher.mode == CIPHER_MODE_CCM) {
ctx              2786 drivers/crypto/bcm/cipher.c 	flow_log("  authkeylen:%u\n", ctx->authkeylen);
ctx              2787 drivers/crypto/bcm/cipher.c 	flow_log("  is_esp: %s\n", ctx->is_esp ? "yes" : "no");
ctx              2789 drivers/crypto/bcm/cipher.c 	if (ctx->max_payload == SPU_MAX_PAYLOAD_INF)
ctx              2792 drivers/crypto/bcm/cipher.c 		flow_log("  max_payload: %u\n", ctx->max_payload);
ctx              2802 drivers/crypto/bcm/cipher.c 		if (ctx->salt_len)
ctx              2803 drivers/crypto/bcm/cipher.c 			memcpy(rctx->msg_buf.iv_ctr + ctx->salt_offset,
ctx              2804 drivers/crypto/bcm/cipher.c 			       ctx->salt, ctx->salt_len);
ctx              2805 drivers/crypto/bcm/cipher.c 		memcpy(rctx->msg_buf.iv_ctr + ctx->salt_offset + ctx->salt_len,
ctx              2807 drivers/crypto/bcm/cipher.c 		       rctx->iv_ctr_len - ctx->salt_len - ctx->salt_offset);
ctx              2823 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_aead_ctx(cipher);
ctx              2840 drivers/crypto/bcm/cipher.c 	ctx->enckeylen = keys.enckeylen;
ctx              2841 drivers/crypto/bcm/cipher.c 	ctx->authkeylen = keys.authkeylen;
ctx              2843 drivers/crypto/bcm/cipher.c 	memcpy(ctx->enckey, keys.enckey, keys.enckeylen);
ctx              2845 drivers/crypto/bcm/cipher.c 	memset(ctx->authkey, 0, sizeof(ctx->authkey));
ctx              2846 drivers/crypto/bcm/cipher.c 	memcpy(ctx->authkey, keys.authkey, keys.authkeylen);
ctx              2848 drivers/crypto/bcm/cipher.c 	switch (ctx->alg->cipher_info.alg) {
ctx              2853 drivers/crypto/bcm/cipher.c 		ctx->cipher_type = CIPHER_TYPE_DES;
ctx              2859 drivers/crypto/bcm/cipher.c 		ctx->cipher_type = CIPHER_TYPE_3DES;
ctx              2862 drivers/crypto/bcm/cipher.c 		switch (ctx->enckeylen) {
ctx              2864 drivers/crypto/bcm/cipher.c 			ctx->cipher_type = CIPHER_TYPE_AES128;
ctx              2867 drivers/crypto/bcm/cipher.c 			ctx->cipher_type = CIPHER_TYPE_AES192;
ctx              2870 drivers/crypto/bcm/cipher.c 			ctx->cipher_type = CIPHER_TYPE_AES256;
ctx              2877 drivers/crypto/bcm/cipher.c 		ctx->cipher_type = CIPHER_TYPE_INIT;
ctx              2884 drivers/crypto/bcm/cipher.c 	flow_log("  enckeylen:%u authkeylen:%u\n", ctx->enckeylen,
ctx              2885 drivers/crypto/bcm/cipher.c 		 ctx->authkeylen);
ctx              2886 drivers/crypto/bcm/cipher.c 	flow_dump("  enc: ", ctx->enckey, ctx->enckeylen);
ctx              2887 drivers/crypto/bcm/cipher.c 	flow_dump("  auth: ", ctx->authkey, ctx->authkeylen);
ctx              2890 drivers/crypto/bcm/cipher.c 	if (ctx->fallback_cipher) {
ctx              2893 drivers/crypto/bcm/cipher.c 		ctx->fallback_cipher->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
ctx              2894 drivers/crypto/bcm/cipher.c 		ctx->fallback_cipher->base.crt_flags |=
ctx              2896 drivers/crypto/bcm/cipher.c 		ret = crypto_aead_setkey(ctx->fallback_cipher, key, keylen);
ctx              2901 drivers/crypto/bcm/cipher.c 			    (ctx->fallback_cipher->base.crt_flags &
ctx              2906 drivers/crypto/bcm/cipher.c 	ctx->spu_resp_hdr_len = spu->spu_response_hdr_len(ctx->authkeylen,
ctx              2907 drivers/crypto/bcm/cipher.c 							  ctx->enckeylen,
ctx              2915 drivers/crypto/bcm/cipher.c 	ctx->enckeylen = 0;
ctx              2916 drivers/crypto/bcm/cipher.c 	ctx->authkeylen = 0;
ctx              2917 drivers/crypto/bcm/cipher.c 	ctx->digestsize = 0;
ctx              2927 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_aead_ctx(cipher);
ctx              2935 drivers/crypto/bcm/cipher.c 	if (!ctx->is_esp)
ctx              2936 drivers/crypto/bcm/cipher.c 		ctx->digestsize = keylen;
ctx              2938 drivers/crypto/bcm/cipher.c 	ctx->enckeylen = keylen;
ctx              2939 drivers/crypto/bcm/cipher.c 	ctx->authkeylen = 0;
ctx              2940 drivers/crypto/bcm/cipher.c 	memcpy(ctx->enckey, key, ctx->enckeylen);
ctx              2942 drivers/crypto/bcm/cipher.c 	switch (ctx->enckeylen) {
ctx              2944 drivers/crypto/bcm/cipher.c 		ctx->cipher_type = CIPHER_TYPE_AES128;
ctx              2947 drivers/crypto/bcm/cipher.c 		ctx->cipher_type = CIPHER_TYPE_AES192;
ctx              2950 drivers/crypto/bcm/cipher.c 		ctx->cipher_type = CIPHER_TYPE_AES256;
ctx              2956 drivers/crypto/bcm/cipher.c 	flow_log("  enckeylen:%u authkeylen:%u\n", ctx->enckeylen,
ctx              2957 drivers/crypto/bcm/cipher.c 		 ctx->authkeylen);
ctx              2958 drivers/crypto/bcm/cipher.c 	flow_dump("  enc: ", ctx->enckey, ctx->enckeylen);
ctx              2959 drivers/crypto/bcm/cipher.c 	flow_dump("  auth: ", ctx->authkey, ctx->authkeylen);
ctx              2962 drivers/crypto/bcm/cipher.c 	if (ctx->fallback_cipher) {
ctx              2965 drivers/crypto/bcm/cipher.c 		ctx->fallback_cipher->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
ctx              2966 drivers/crypto/bcm/cipher.c 		ctx->fallback_cipher->base.crt_flags |=
ctx              2968 drivers/crypto/bcm/cipher.c 		ret = crypto_aead_setkey(ctx->fallback_cipher, key,
ctx              2969 drivers/crypto/bcm/cipher.c 					 keylen + ctx->salt_len);
ctx              2974 drivers/crypto/bcm/cipher.c 			    (ctx->fallback_cipher->base.crt_flags &
ctx              2979 drivers/crypto/bcm/cipher.c 	ctx->spu_resp_hdr_len = spu->spu_response_hdr_len(ctx->authkeylen,
ctx              2980 drivers/crypto/bcm/cipher.c 							  ctx->enckeylen,
ctx              2985 drivers/crypto/bcm/cipher.c 	flow_log("  enckeylen:%u authkeylen:%u\n", ctx->enckeylen,
ctx              2986 drivers/crypto/bcm/cipher.c 		 ctx->authkeylen);
ctx              2991 drivers/crypto/bcm/cipher.c 	ctx->enckeylen = 0;
ctx              2992 drivers/crypto/bcm/cipher.c 	ctx->authkeylen = 0;
ctx              2993 drivers/crypto/bcm/cipher.c 	ctx->digestsize = 0;
ctx              3013 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_aead_ctx(cipher);
ctx              3016 drivers/crypto/bcm/cipher.c 	ctx->salt_len = GCM_ESP_SALT_SIZE;
ctx              3017 drivers/crypto/bcm/cipher.c 	ctx->salt_offset = GCM_ESP_SALT_OFFSET;
ctx              3018 drivers/crypto/bcm/cipher.c 	memcpy(ctx->salt, key + keylen - GCM_ESP_SALT_SIZE, GCM_ESP_SALT_SIZE);
ctx              3020 drivers/crypto/bcm/cipher.c 	ctx->digestsize = GCM_ESP_DIGESTSIZE;
ctx              3021 drivers/crypto/bcm/cipher.c 	ctx->is_esp = true;
ctx              3022 drivers/crypto/bcm/cipher.c 	flow_dump("salt: ", ctx->salt, GCM_ESP_SALT_SIZE);
ctx              3041 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_aead_ctx(cipher);
ctx              3044 drivers/crypto/bcm/cipher.c 	ctx->salt_len = GCM_ESP_SALT_SIZE;
ctx              3045 drivers/crypto/bcm/cipher.c 	ctx->salt_offset = GCM_ESP_SALT_OFFSET;
ctx              3046 drivers/crypto/bcm/cipher.c 	memcpy(ctx->salt, key + keylen - GCM_ESP_SALT_SIZE, GCM_ESP_SALT_SIZE);
ctx              3048 drivers/crypto/bcm/cipher.c 	ctx->digestsize = GCM_ESP_DIGESTSIZE;
ctx              3049 drivers/crypto/bcm/cipher.c 	ctx->is_esp = true;
ctx              3050 drivers/crypto/bcm/cipher.c 	ctx->is_rfc4543 = true;
ctx              3051 drivers/crypto/bcm/cipher.c 	flow_dump("salt: ", ctx->salt, GCM_ESP_SALT_SIZE);
ctx              3070 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_aead_ctx(cipher);
ctx              3073 drivers/crypto/bcm/cipher.c 	ctx->salt_len = CCM_ESP_SALT_SIZE;
ctx              3074 drivers/crypto/bcm/cipher.c 	ctx->salt_offset = CCM_ESP_SALT_OFFSET;
ctx              3075 drivers/crypto/bcm/cipher.c 	memcpy(ctx->salt, key + keylen - CCM_ESP_SALT_SIZE, CCM_ESP_SALT_SIZE);
ctx              3077 drivers/crypto/bcm/cipher.c 	ctx->is_esp = true;
ctx              3078 drivers/crypto/bcm/cipher.c 	flow_dump("salt: ", ctx->salt, CCM_ESP_SALT_SIZE);
ctx              3085 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_aead_ctx(cipher);
ctx              3089 drivers/crypto/bcm/cipher.c 		 __func__, ctx->authkeylen, authsize);
ctx              3091 drivers/crypto/bcm/cipher.c 	ctx->digestsize = authsize;
ctx              3094 drivers/crypto/bcm/cipher.c 	if (ctx->fallback_cipher) {
ctx              3097 drivers/crypto/bcm/cipher.c 		ret = crypto_aead_setauthsize(ctx->fallback_cipher, authsize);
ctx              4262 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_tfm_ctx(tfm);
ctx              4267 drivers/crypto/bcm/cipher.c 	ctx->alg = cipher_alg;
ctx              4268 drivers/crypto/bcm/cipher.c 	ctx->cipher = cipher_alg->cipher_info;
ctx              4269 drivers/crypto/bcm/cipher.c 	ctx->auth = cipher_alg->auth_info;
ctx              4270 drivers/crypto/bcm/cipher.c 	ctx->auth_first = cipher_alg->auth_first;
ctx              4271 drivers/crypto/bcm/cipher.c 	ctx->max_payload = spu->spu_ctx_max_payload(ctx->cipher.alg,
ctx              4272 drivers/crypto/bcm/cipher.c 						    ctx->cipher.mode,
ctx              4274 drivers/crypto/bcm/cipher.c 	ctx->fallback_cipher = NULL;
ctx              4276 drivers/crypto/bcm/cipher.c 	ctx->enckeylen = 0;
ctx              4277 drivers/crypto/bcm/cipher.c 	ctx->authkeylen = 0;
ctx              4323 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_tfm_ctx(tfm);
ctx              4334 drivers/crypto/bcm/cipher.c 	ctx->is_esp = false;
ctx              4335 drivers/crypto/bcm/cipher.c 	ctx->salt_len = 0;
ctx              4336 drivers/crypto/bcm/cipher.c 	ctx->salt_offset = 0;
ctx              4339 drivers/crypto/bcm/cipher.c 	get_random_bytes(ctx->iv, MAX_IV_SIZE);
ctx              4340 drivers/crypto/bcm/cipher.c 	flow_dump("  iv: ", ctx->iv, MAX_IV_SIZE);
ctx              4346 drivers/crypto/bcm/cipher.c 			ctx->fallback_cipher =
ctx              4350 drivers/crypto/bcm/cipher.c 			if (IS_ERR(ctx->fallback_cipher)) {
ctx              4353 drivers/crypto/bcm/cipher.c 				return PTR_ERR(ctx->fallback_cipher);
ctx              4369 drivers/crypto/bcm/cipher.c 	struct iproc_ctx_s *ctx = crypto_tfm_ctx(tfm);
ctx              4373 drivers/crypto/bcm/cipher.c 	if (ctx->fallback_cipher) {
ctx              4374 drivers/crypto/bcm/cipher.c 		crypto_free_aead(ctx->fallback_cipher);
ctx              4375 drivers/crypto/bcm/cipher.c 		ctx->fallback_cipher = NULL;
ctx               253 drivers/crypto/bcm/cipher.h 	struct iproc_ctx_s *ctx;
ctx               190 drivers/crypto/bcm/util.c 	char ctx[];
ctx               119 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               120 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx               124 drivers/crypto/caam/caamalg.c 			ctx->adata.keylen_pad;
ctx               131 drivers/crypto/caam/caamalg.c 		ctx->adata.key_inline = true;
ctx               132 drivers/crypto/caam/caamalg.c 		ctx->adata.key_virt = ctx->key;
ctx               134 drivers/crypto/caam/caamalg.c 		ctx->adata.key_inline = false;
ctx               135 drivers/crypto/caam/caamalg.c 		ctx->adata.key_dma = ctx->key_dma;
ctx               139 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_enc;
ctx               140 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
ctx               142 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
ctx               143 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               150 drivers/crypto/caam/caamalg.c 		ctx->adata.key_inline = true;
ctx               151 drivers/crypto/caam/caamalg.c 		ctx->adata.key_virt = ctx->key;
ctx               153 drivers/crypto/caam/caamalg.c 		ctx->adata.key_inline = false;
ctx               154 drivers/crypto/caam/caamalg.c 		ctx->adata.key_dma = ctx->key_dma;
ctx               158 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_dec;
ctx               159 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
ctx               161 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
ctx               162 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               172 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               173 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx               179 drivers/crypto/caam/caamalg.c 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
ctx               183 drivers/crypto/caam/caamalg.c 	if (!ctx->authsize)
ctx               187 drivers/crypto/caam/caamalg.c 	if (!ctx->cdata.keylen)
ctx               204 drivers/crypto/caam/caamalg.c 		nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
ctx               205 drivers/crypto/caam/caamalg.c 				ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
ctx               214 drivers/crypto/caam/caamalg.c 	ctx->adata.key_virt = ctx->key;
ctx               215 drivers/crypto/caam/caamalg.c 	ctx->adata.key_dma = ctx->key_dma;
ctx               217 drivers/crypto/caam/caamalg.c 	ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
ctx               218 drivers/crypto/caam/caamalg.c 	ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
ctx               220 drivers/crypto/caam/caamalg.c 	data_len[0] = ctx->adata.keylen_pad;
ctx               221 drivers/crypto/caam/caamalg.c 	data_len[1] = ctx->cdata.keylen;
ctx               236 drivers/crypto/caam/caamalg.c 	ctx->adata.key_inline = !!(inl_mask & 1);
ctx               237 drivers/crypto/caam/caamalg.c 	ctx->cdata.key_inline = !!(inl_mask & 2);
ctx               240 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_enc;
ctx               241 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
ctx               242 drivers/crypto/caam/caamalg.c 			       ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
ctx               244 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
ctx               245 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               258 drivers/crypto/caam/caamalg.c 	ctx->adata.key_inline = !!(inl_mask & 1);
ctx               259 drivers/crypto/caam/caamalg.c 	ctx->cdata.key_inline = !!(inl_mask & 2);
ctx               262 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_dec;
ctx               263 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
ctx               264 drivers/crypto/caam/caamalg.c 			       ctx->authsize, alg->caam.geniv, is_rfc3686,
ctx               266 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
ctx               267 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               282 drivers/crypto/caam/caamalg.c 	ctx->adata.key_inline = !!(inl_mask & 1);
ctx               283 drivers/crypto/caam/caamalg.c 	ctx->cdata.key_inline = !!(inl_mask & 2);
ctx               286 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_enc;
ctx               287 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
ctx               288 drivers/crypto/caam/caamalg.c 				  ctx->authsize, is_rfc3686, nonce,
ctx               290 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
ctx               291 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               300 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
ctx               302 drivers/crypto/caam/caamalg.c 	ctx->authsize = authsize;
ctx               310 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               311 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx               315 drivers/crypto/caam/caamalg.c 			ctx->cdata.keylen;
ctx               317 drivers/crypto/caam/caamalg.c 	if (!ctx->cdata.keylen || !ctx->authsize)
ctx               326 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_inline = true;
ctx               327 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_virt = ctx->key;
ctx               329 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_inline = false;
ctx               330 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               333 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_enc;
ctx               334 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
ctx               335 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
ctx               336 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               343 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_inline = true;
ctx               344 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_virt = ctx->key;
ctx               346 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_inline = false;
ctx               347 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               350 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_dec;
ctx               351 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
ctx               352 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
ctx               353 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               360 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
ctx               367 drivers/crypto/caam/caamalg.c 	ctx->authsize = authsize;
ctx               375 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               376 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx               380 drivers/crypto/caam/caamalg.c 			ctx->cdata.keylen;
ctx               382 drivers/crypto/caam/caamalg.c 	if (!ctx->cdata.keylen || !ctx->authsize)
ctx               391 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_inline = true;
ctx               392 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_virt = ctx->key;
ctx               394 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_inline = false;
ctx               395 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               398 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_enc;
ctx               399 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
ctx               401 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
ctx               402 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               409 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_inline = true;
ctx               410 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_virt = ctx->key;
ctx               412 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_inline = false;
ctx               413 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               416 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_dec;
ctx               417 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
ctx               419 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
ctx               420 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               428 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
ctx               435 drivers/crypto/caam/caamalg.c 	ctx->authsize = authsize;
ctx               443 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               444 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx               448 drivers/crypto/caam/caamalg.c 			ctx->cdata.keylen;
ctx               450 drivers/crypto/caam/caamalg.c 	if (!ctx->cdata.keylen || !ctx->authsize)
ctx               459 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_inline = true;
ctx               460 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_virt = ctx->key;
ctx               462 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_inline = false;
ctx               463 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               466 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_enc;
ctx               467 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
ctx               469 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
ctx               470 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               477 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_inline = true;
ctx               478 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_virt = ctx->key;
ctx               480 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_inline = false;
ctx               481 drivers/crypto/caam/caamalg.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               484 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_dec;
ctx               485 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
ctx               487 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
ctx               488 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               496 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
ctx               501 drivers/crypto/caam/caamalg.c 	ctx->authsize = authsize;
ctx               509 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               510 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx               514 drivers/crypto/caam/caamalg.c 	if (!ctx->cdata.keylen || !ctx->authsize)
ctx               517 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_enc;
ctx               518 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
ctx               519 drivers/crypto/caam/caamalg.c 			       ctx->authsize, true, false);
ctx               520 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
ctx               521 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               523 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_dec;
ctx               524 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
ctx               525 drivers/crypto/caam/caamalg.c 			       ctx->authsize, false, false);
ctx               526 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
ctx               527 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               535 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               540 drivers/crypto/caam/caamalg.c 	ctx->authsize = authsize;
ctx               547 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               556 drivers/crypto/caam/caamalg.c 	ctx->cdata.key_virt = key;
ctx               557 drivers/crypto/caam/caamalg.c 	ctx->cdata.keylen = keylen - saltlen;
ctx               565 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               566 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx               585 drivers/crypto/caam/caamalg.c 		ctx->adata.keylen = keys.authkeylen;
ctx               586 drivers/crypto/caam/caamalg.c 		ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
ctx               589 drivers/crypto/caam/caamalg.c 		if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
ctx               592 drivers/crypto/caam/caamalg.c 		memcpy(ctx->key, keys.authkey, keys.authkeylen);
ctx               593 drivers/crypto/caam/caamalg.c 		memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
ctx               595 drivers/crypto/caam/caamalg.c 		dma_sync_single_for_device(jrdev, ctx->key_dma,
ctx               596 drivers/crypto/caam/caamalg.c 					   ctx->adata.keylen_pad +
ctx               597 drivers/crypto/caam/caamalg.c 					   keys.enckeylen, ctx->dir);
ctx               601 drivers/crypto/caam/caamalg.c 	ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
ctx               609 drivers/crypto/caam/caamalg.c 	memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
ctx               610 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
ctx               611 drivers/crypto/caam/caamalg.c 				   keys.enckeylen, ctx->dir);
ctx               614 drivers/crypto/caam/caamalg.c 			     DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
ctx               615 drivers/crypto/caam/caamalg.c 			     ctx->adata.keylen_pad + keys.enckeylen, 1);
ctx               618 drivers/crypto/caam/caamalg.c 	ctx->cdata.keylen = keys.enckeylen;
ctx               647 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               648 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx               660 drivers/crypto/caam/caamalg.c 	memcpy(ctx->key, key, keylen);
ctx               661 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
ctx               662 drivers/crypto/caam/caamalg.c 	ctx->cdata.keylen = keylen;
ctx               670 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               671 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx               683 drivers/crypto/caam/caamalg.c 	memcpy(ctx->key, key, keylen);
ctx               689 drivers/crypto/caam/caamalg.c 	ctx->cdata.keylen = keylen - 4;
ctx               690 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
ctx               691 drivers/crypto/caam/caamalg.c 				   ctx->dir);
ctx               698 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               699 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx               711 drivers/crypto/caam/caamalg.c 	memcpy(ctx->key, key, keylen);
ctx               717 drivers/crypto/caam/caamalg.c 	ctx->cdata.keylen = keylen - 4;
ctx               718 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
ctx               719 drivers/crypto/caam/caamalg.c 				   ctx->dir);
ctx               726 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx               730 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx               738 drivers/crypto/caam/caamalg.c 	ctx->cdata.keylen = keylen;
ctx               739 drivers/crypto/caam/caamalg.c 	ctx->cdata.key_virt = key;
ctx               740 drivers/crypto/caam/caamalg.c 	ctx->cdata.key_inline = true;
ctx               743 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_enc;
ctx               744 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
ctx               746 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
ctx               747 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               750 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_dec;
ctx               751 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
ctx               753 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
ctx               754 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               844 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx               845 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx               854 drivers/crypto/caam/caamalg.c 	ctx->cdata.keylen = keylen;
ctx               855 drivers/crypto/caam/caamalg.c 	ctx->cdata.key_virt = key;
ctx               856 drivers/crypto/caam/caamalg.c 	ctx->cdata.key_inline = true;
ctx               859 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_enc;
ctx               860 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
ctx               861 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
ctx               862 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx               865 drivers/crypto/caam/caamalg.c 	desc = ctx->sh_desc_dec;
ctx               866 drivers/crypto/caam/caamalg.c 	cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
ctx               867 drivers/crypto/caam/caamalg.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
ctx               868 drivers/crypto/caam/caamalg.c 				   desc_bytes(desc), ctx->dir);
ctx              1093 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1094 drivers/crypto/caam/caamalg.c 	int authsize = ctx->authsize;
ctx              1102 drivers/crypto/caam/caamalg.c 	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
ctx              1103 drivers/crypto/caam/caamalg.c 	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
ctx              1154 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1173 drivers/crypto/caam/caamalg.c 		append_data(desc, ctx->key + ctx->cdata.keylen, 4);
ctx              1221 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1222 drivers/crypto/caam/caamalg.c 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
ctx              1223 drivers/crypto/caam/caamalg.c 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
ctx              1270 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx              1271 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx              1288 drivers/crypto/caam/caamalg.c 	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
ctx              1289 drivers/crypto/caam/caamalg.c 	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
ctx              1326 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1327 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx              1334 drivers/crypto/caam/caamalg.c 	unsigned int authsize = ctx->authsize;
ctx              1462 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1463 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx              1496 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1497 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx              1529 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1530 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx              1567 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1568 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx              1602 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1603 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx              1641 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1642 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx              1683 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx              1684 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx              1841 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx              1842 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx              1878 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx              1879 drivers/crypto/caam/caamalg.c 	struct device *jrdev = ctx->jrdev;
ctx              3408 drivers/crypto/caam/caamalg.c static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
ctx              3414 drivers/crypto/caam/caamalg.c 	ctx->jrdev = caam_jr_alloc();
ctx              3415 drivers/crypto/caam/caamalg.c 	if (IS_ERR(ctx->jrdev)) {
ctx              3417 drivers/crypto/caam/caamalg.c 		return PTR_ERR(ctx->jrdev);
ctx              3420 drivers/crypto/caam/caamalg.c 	priv = dev_get_drvdata(ctx->jrdev->parent);
ctx              3422 drivers/crypto/caam/caamalg.c 		ctx->dir = DMA_BIDIRECTIONAL;
ctx              3424 drivers/crypto/caam/caamalg.c 		ctx->dir = DMA_TO_DEVICE;
ctx              3426 drivers/crypto/caam/caamalg.c 	dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
ctx              3429 drivers/crypto/caam/caamalg.c 					ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
ctx              3430 drivers/crypto/caam/caamalg.c 	if (dma_mapping_error(ctx->jrdev, dma_addr)) {
ctx              3431 drivers/crypto/caam/caamalg.c 		dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
ctx              3432 drivers/crypto/caam/caamalg.c 		caam_jr_free(ctx->jrdev);
ctx              3436 drivers/crypto/caam/caamalg.c 	ctx->sh_desc_enc_dma = dma_addr;
ctx              3437 drivers/crypto/caam/caamalg.c 	ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
ctx              3439 drivers/crypto/caam/caamalg.c 	ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
ctx              3442 drivers/crypto/caam/caamalg.c 	ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
ctx              3443 drivers/crypto/caam/caamalg.c 	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
ctx              3463 drivers/crypto/caam/caamalg.c 	struct caam_ctx *ctx = crypto_aead_ctx(tfm);
ctx              3465 drivers/crypto/caam/caamalg.c 	return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
ctx              3468 drivers/crypto/caam/caamalg.c static void caam_exit_common(struct caam_ctx *ctx)
ctx              3470 drivers/crypto/caam/caamalg.c 	dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
ctx              3472 drivers/crypto/caam/caamalg.c 			       ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
ctx              3473 drivers/crypto/caam/caamalg.c 	caam_jr_free(ctx->jrdev);
ctx                76 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx                82 drivers/crypto/caam/caamalg_qi.c 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
ctx                85 drivers/crypto/caam/caamalg_qi.c 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
ctx                87 drivers/crypto/caam/caamalg_qi.c 	if (!ctx->cdata.keylen || !ctx->authsize)
ctx               104 drivers/crypto/caam/caamalg_qi.c 		nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
ctx               105 drivers/crypto/caam/caamalg_qi.c 				ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
ctx               114 drivers/crypto/caam/caamalg_qi.c 	ctx->adata.key_virt = ctx->key;
ctx               115 drivers/crypto/caam/caamalg_qi.c 	ctx->adata.key_dma = ctx->key_dma;
ctx               117 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
ctx               118 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
ctx               120 drivers/crypto/caam/caamalg_qi.c 	data_len[0] = ctx->adata.keylen_pad;
ctx               121 drivers/crypto/caam/caamalg_qi.c 	data_len[1] = ctx->cdata.keylen;
ctx               133 drivers/crypto/caam/caamalg_qi.c 	ctx->adata.key_inline = !!(inl_mask & 1);
ctx               134 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.key_inline = !!(inl_mask & 2);
ctx               136 drivers/crypto/caam/caamalg_qi.c 	cnstr_shdsc_aead_encap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
ctx               137 drivers/crypto/caam/caamalg_qi.c 			       ivsize, ctx->authsize, is_rfc3686, nonce,
ctx               148 drivers/crypto/caam/caamalg_qi.c 	ctx->adata.key_inline = !!(inl_mask & 1);
ctx               149 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.key_inline = !!(inl_mask & 2);
ctx               151 drivers/crypto/caam/caamalg_qi.c 	cnstr_shdsc_aead_decap(ctx->sh_desc_dec, &ctx->cdata, &ctx->adata,
ctx               152 drivers/crypto/caam/caamalg_qi.c 			       ivsize, ctx->authsize, alg->caam.geniv,
ctx               166 drivers/crypto/caam/caamalg_qi.c 	ctx->adata.key_inline = !!(inl_mask & 1);
ctx               167 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.key_inline = !!(inl_mask & 2);
ctx               169 drivers/crypto/caam/caamalg_qi.c 	cnstr_shdsc_aead_givencap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
ctx               170 drivers/crypto/caam/caamalg_qi.c 				  ivsize, ctx->authsize, is_rfc3686, nonce,
ctx               179 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
ctx               181 drivers/crypto/caam/caamalg_qi.c 	ctx->authsize = authsize;
ctx               190 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               191 drivers/crypto/caam/caamalg_qi.c 	struct device *jrdev = ctx->jrdev;
ctx               210 drivers/crypto/caam/caamalg_qi.c 		ctx->adata.keylen = keys.authkeylen;
ctx               211 drivers/crypto/caam/caamalg_qi.c 		ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
ctx               214 drivers/crypto/caam/caamalg_qi.c 		if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
ctx               217 drivers/crypto/caam/caamalg_qi.c 		memcpy(ctx->key, keys.authkey, keys.authkeylen);
ctx               218 drivers/crypto/caam/caamalg_qi.c 		memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
ctx               220 drivers/crypto/caam/caamalg_qi.c 		dma_sync_single_for_device(jrdev->parent, ctx->key_dma,
ctx               221 drivers/crypto/caam/caamalg_qi.c 					   ctx->adata.keylen_pad +
ctx               222 drivers/crypto/caam/caamalg_qi.c 					   keys.enckeylen, ctx->dir);
ctx               226 drivers/crypto/caam/caamalg_qi.c 	ret = gen_split_key(jrdev, ctx->key, &ctx->adata, keys.authkey,
ctx               233 drivers/crypto/caam/caamalg_qi.c 	memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
ctx               234 drivers/crypto/caam/caamalg_qi.c 	dma_sync_single_for_device(jrdev->parent, ctx->key_dma,
ctx               235 drivers/crypto/caam/caamalg_qi.c 				   ctx->adata.keylen_pad + keys.enckeylen,
ctx               236 drivers/crypto/caam/caamalg_qi.c 				   ctx->dir);
ctx               239 drivers/crypto/caam/caamalg_qi.c 			     DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
ctx               240 drivers/crypto/caam/caamalg_qi.c 			     ctx->adata.keylen_pad + keys.enckeylen, 1);
ctx               243 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.keylen = keys.enckeylen;
ctx               250 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[ENCRYPT]) {
ctx               251 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
ctx               252 drivers/crypto/caam/caamalg_qi.c 					  ctx->sh_desc_enc);
ctx               259 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[DECRYPT]) {
ctx               260 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
ctx               261 drivers/crypto/caam/caamalg_qi.c 					  ctx->sh_desc_dec);
ctx               295 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               298 drivers/crypto/caam/caamalg_qi.c 			ctx->cdata.keylen;
ctx               300 drivers/crypto/caam/caamalg_qi.c 	if (!ctx->cdata.keylen || !ctx->authsize)
ctx               308 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_inline = true;
ctx               309 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_virt = ctx->key;
ctx               311 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_inline = false;
ctx               312 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               315 drivers/crypto/caam/caamalg_qi.c 	cnstr_shdsc_gcm_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
ctx               316 drivers/crypto/caam/caamalg_qi.c 			      ctx->authsize, true);
ctx               323 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_inline = true;
ctx               324 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_virt = ctx->key;
ctx               326 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_inline = false;
ctx               327 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               330 drivers/crypto/caam/caamalg_qi.c 	cnstr_shdsc_gcm_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
ctx               331 drivers/crypto/caam/caamalg_qi.c 			      ctx->authsize, true);
ctx               338 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
ctx               345 drivers/crypto/caam/caamalg_qi.c 	ctx->authsize = authsize;
ctx               354 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               355 drivers/crypto/caam/caamalg_qi.c 	struct device *jrdev = ctx->jrdev;
ctx               367 drivers/crypto/caam/caamalg_qi.c 	memcpy(ctx->key, key, keylen);
ctx               368 drivers/crypto/caam/caamalg_qi.c 	dma_sync_single_for_device(jrdev->parent, ctx->key_dma, keylen,
ctx               369 drivers/crypto/caam/caamalg_qi.c 				   ctx->dir);
ctx               370 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.keylen = keylen;
ctx               377 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[ENCRYPT]) {
ctx               378 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
ctx               379 drivers/crypto/caam/caamalg_qi.c 					  ctx->sh_desc_enc);
ctx               386 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[DECRYPT]) {
ctx               387 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
ctx               388 drivers/crypto/caam/caamalg_qi.c 					  ctx->sh_desc_dec);
ctx               400 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               403 drivers/crypto/caam/caamalg_qi.c 			ctx->cdata.keylen;
ctx               405 drivers/crypto/caam/caamalg_qi.c 	if (!ctx->cdata.keylen || !ctx->authsize)
ctx               408 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.key_virt = ctx->key;
ctx               415 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_inline = true;
ctx               417 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_inline = false;
ctx               418 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               421 drivers/crypto/caam/caamalg_qi.c 	cnstr_shdsc_rfc4106_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
ctx               422 drivers/crypto/caam/caamalg_qi.c 				  ctx->authsize, true);
ctx               429 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_inline = true;
ctx               431 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_inline = false;
ctx               432 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               435 drivers/crypto/caam/caamalg_qi.c 	cnstr_shdsc_rfc4106_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
ctx               436 drivers/crypto/caam/caamalg_qi.c 				  ctx->authsize, true);
ctx               444 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
ctx               451 drivers/crypto/caam/caamalg_qi.c 	ctx->authsize = authsize;
ctx               460 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               461 drivers/crypto/caam/caamalg_qi.c 	struct device *jrdev = ctx->jrdev;
ctx               473 drivers/crypto/caam/caamalg_qi.c 	memcpy(ctx->key, key, keylen);
ctx               478 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.keylen = keylen - 4;
ctx               479 drivers/crypto/caam/caamalg_qi.c 	dma_sync_single_for_device(jrdev->parent, ctx->key_dma,
ctx               480 drivers/crypto/caam/caamalg_qi.c 				   ctx->cdata.keylen, ctx->dir);
ctx               487 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[ENCRYPT]) {
ctx               488 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
ctx               489 drivers/crypto/caam/caamalg_qi.c 					  ctx->sh_desc_enc);
ctx               496 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[DECRYPT]) {
ctx               497 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
ctx               498 drivers/crypto/caam/caamalg_qi.c 					  ctx->sh_desc_dec);
ctx               510 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               513 drivers/crypto/caam/caamalg_qi.c 			ctx->cdata.keylen;
ctx               515 drivers/crypto/caam/caamalg_qi.c 	if (!ctx->cdata.keylen || !ctx->authsize)
ctx               518 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.key_virt = ctx->key;
ctx               525 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_inline = true;
ctx               527 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_inline = false;
ctx               528 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               531 drivers/crypto/caam/caamalg_qi.c 	cnstr_shdsc_rfc4543_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
ctx               532 drivers/crypto/caam/caamalg_qi.c 				  ctx->authsize, true);
ctx               539 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_inline = true;
ctx               541 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_inline = false;
ctx               542 drivers/crypto/caam/caamalg_qi.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               545 drivers/crypto/caam/caamalg_qi.c 	cnstr_shdsc_rfc4543_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
ctx               546 drivers/crypto/caam/caamalg_qi.c 				  ctx->authsize, true);
ctx               554 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
ctx               559 drivers/crypto/caam/caamalg_qi.c 	ctx->authsize = authsize;
ctx               568 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               569 drivers/crypto/caam/caamalg_qi.c 	struct device *jrdev = ctx->jrdev;
ctx               581 drivers/crypto/caam/caamalg_qi.c 	memcpy(ctx->key, key, keylen);
ctx               586 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.keylen = keylen - 4;
ctx               587 drivers/crypto/caam/caamalg_qi.c 	dma_sync_single_for_device(jrdev->parent, ctx->key_dma,
ctx               588 drivers/crypto/caam/caamalg_qi.c 				   ctx->cdata.keylen, ctx->dir);
ctx               595 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[ENCRYPT]) {
ctx               596 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
ctx               597 drivers/crypto/caam/caamalg_qi.c 					  ctx->sh_desc_enc);
ctx               604 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[DECRYPT]) {
ctx               605 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
ctx               606 drivers/crypto/caam/caamalg_qi.c 					  ctx->sh_desc_dec);
ctx               619 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx               623 drivers/crypto/caam/caamalg_qi.c 	struct device *jrdev = ctx->jrdev;
ctx               631 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.keylen = keylen;
ctx               632 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.key_virt = key;
ctx               633 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.key_inline = true;
ctx               636 drivers/crypto/caam/caamalg_qi.c 	cnstr_shdsc_skcipher_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize,
ctx               638 drivers/crypto/caam/caamalg_qi.c 	cnstr_shdsc_skcipher_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize,
ctx               642 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[ENCRYPT]) {
ctx               643 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
ctx               644 drivers/crypto/caam/caamalg_qi.c 					  ctx->sh_desc_enc);
ctx               651 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[DECRYPT]) {
ctx               652 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
ctx               653 drivers/crypto/caam/caamalg_qi.c 					  ctx->sh_desc_dec);
ctx               745 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx               746 drivers/crypto/caam/caamalg_qi.c 	struct device *jrdev = ctx->jrdev;
ctx               754 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.keylen = keylen;
ctx               755 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.key_virt = key;
ctx               756 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.key_inline = true;
ctx               759 drivers/crypto/caam/caamalg_qi.c 	cnstr_shdsc_xts_skcipher_encap(ctx->sh_desc_enc, &ctx->cdata);
ctx               760 drivers/crypto/caam/caamalg_qi.c 	cnstr_shdsc_xts_skcipher_decap(ctx->sh_desc_dec, &ctx->cdata);
ctx               763 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[ENCRYPT]) {
ctx               764 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
ctx               765 drivers/crypto/caam/caamalg_qi.c 					  ctx->sh_desc_enc);
ctx               772 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[DECRYPT]) {
ctx               773 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
ctx               774 drivers/crypto/caam/caamalg_qi.c 					  ctx->sh_desc_dec);
ctx               831 drivers/crypto/caam/caamalg_qi.c static struct caam_drv_ctx *get_drv_ctx(struct caam_ctx *ctx,
ctx               839 drivers/crypto/caam/caamalg_qi.c 	struct caam_drv_ctx *drv_ctx = ctx->drv_ctx[type];
ctx               843 drivers/crypto/caam/caamalg_qi.c 		spin_lock(&ctx->lock);
ctx               846 drivers/crypto/caam/caamalg_qi.c 		drv_ctx = ctx->drv_ctx[type];
ctx               851 drivers/crypto/caam/caamalg_qi.c 				desc = ctx->sh_desc_enc;
ctx               853 drivers/crypto/caam/caamalg_qi.c 				desc = ctx->sh_desc_dec;
ctx               856 drivers/crypto/caam/caamalg_qi.c 			drv_ctx = caam_drv_ctx_init(ctx->qidev, &cpu, desc);
ctx               860 drivers/crypto/caam/caamalg_qi.c 			ctx->drv_ctx[type] = drv_ctx;
ctx               863 drivers/crypto/caam/caamalg_qi.c 		spin_unlock(&ctx->lock);
ctx               942 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               945 drivers/crypto/caam/caamalg_qi.c 	struct device *qidev = ctx->qidev;
ctx               953 drivers/crypto/caam/caamalg_qi.c 	unsigned int authsize = ctx->authsize;
ctx               959 drivers/crypto/caam/caamalg_qi.c 	drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
ctx              1133 drivers/crypto/caam/caamalg_qi.c 		  (encrypt ? ctx->authsize : (-ctx->authsize));
ctx              1162 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1174 drivers/crypto/caam/caamalg_qi.c 	ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req);
ctx              1178 drivers/crypto/caam/caamalg_qi.c 		aead_unmap(ctx->qidev, edesc, req);
ctx              1250 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx              1251 drivers/crypto/caam/caamalg_qi.c 	struct device *qidev = ctx->qidev;
ctx              1263 drivers/crypto/caam/caamalg_qi.c 	drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
ctx              1402 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx              1416 drivers/crypto/caam/caamalg_qi.c 	ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req);
ctx              1420 drivers/crypto/caam/caamalg_qi.c 		skcipher_unmap(ctx->qidev, edesc, req);
ctx              2416 drivers/crypto/caam/caamalg_qi.c static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
ctx              2426 drivers/crypto/caam/caamalg_qi.c 	ctx->jrdev = caam_jr_alloc();
ctx              2427 drivers/crypto/caam/caamalg_qi.c 	if (IS_ERR(ctx->jrdev)) {
ctx              2429 drivers/crypto/caam/caamalg_qi.c 		return PTR_ERR(ctx->jrdev);
ctx              2432 drivers/crypto/caam/caamalg_qi.c 	dev = ctx->jrdev->parent;
ctx              2435 drivers/crypto/caam/caamalg_qi.c 		ctx->dir = DMA_BIDIRECTIONAL;
ctx              2437 drivers/crypto/caam/caamalg_qi.c 		ctx->dir = DMA_TO_DEVICE;
ctx              2439 drivers/crypto/caam/caamalg_qi.c 	ctx->key_dma = dma_map_single(dev, ctx->key, sizeof(ctx->key),
ctx              2440 drivers/crypto/caam/caamalg_qi.c 				      ctx->dir);
ctx              2441 drivers/crypto/caam/caamalg_qi.c 	if (dma_mapping_error(dev, ctx->key_dma)) {
ctx              2443 drivers/crypto/caam/caamalg_qi.c 		caam_jr_free(ctx->jrdev);
ctx              2448 drivers/crypto/caam/caamalg_qi.c 	ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
ctx              2449 drivers/crypto/caam/caamalg_qi.c 	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
ctx              2451 drivers/crypto/caam/caamalg_qi.c 	ctx->qidev = dev;
ctx              2453 drivers/crypto/caam/caamalg_qi.c 	spin_lock_init(&ctx->lock);
ctx              2454 drivers/crypto/caam/caamalg_qi.c 	ctx->drv_ctx[ENCRYPT] = NULL;
ctx              2455 drivers/crypto/caam/caamalg_qi.c 	ctx->drv_ctx[DECRYPT] = NULL;
ctx              2475 drivers/crypto/caam/caamalg_qi.c 	struct caam_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2477 drivers/crypto/caam/caamalg_qi.c 	return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
ctx              2480 drivers/crypto/caam/caamalg_qi.c static void caam_exit_common(struct caam_ctx *ctx)
ctx              2482 drivers/crypto/caam/caamalg_qi.c 	caam_drv_ctx_rel(ctx->drv_ctx[ENCRYPT]);
ctx              2483 drivers/crypto/caam/caamalg_qi.c 	caam_drv_ctx_rel(ctx->drv_ctx[DECRYPT]);
ctx              2485 drivers/crypto/caam/caamalg_qi.c 	dma_unmap_single(ctx->jrdev->parent, ctx->key_dma, sizeof(ctx->key),
ctx              2486 drivers/crypto/caam/caamalg_qi.c 			 ctx->dir);
ctx              2488 drivers/crypto/caam/caamalg_qi.c 	caam_jr_free(ctx->jrdev);
ctx               167 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               169 drivers/crypto/caam/caamalg_qi2.c 	struct device *dev = ctx->dev;
ctx               177 drivers/crypto/caam/caamalg_qi2.c 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
ctx               181 drivers/crypto/caam/caamalg_qi2.c 	if (!ctx->cdata.keylen || !ctx->authsize)
ctx               198 drivers/crypto/caam/caamalg_qi2.c 		nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
ctx               199 drivers/crypto/caam/caamalg_qi2.c 				ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
ctx               208 drivers/crypto/caam/caamalg_qi2.c 	ctx->adata.key_virt = ctx->key;
ctx               209 drivers/crypto/caam/caamalg_qi2.c 	ctx->adata.key_dma = ctx->key_dma;
ctx               211 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
ctx               212 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
ctx               214 drivers/crypto/caam/caamalg_qi2.c 	data_len[0] = ctx->adata.keylen_pad;
ctx               215 drivers/crypto/caam/caamalg_qi2.c 	data_len[1] = ctx->cdata.keylen;
ctx               225 drivers/crypto/caam/caamalg_qi2.c 	ctx->adata.key_inline = !!(inl_mask & 1);
ctx               226 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.key_inline = !!(inl_mask & 2);
ctx               228 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[ENCRYPT];
ctx               232 drivers/crypto/caam/caamalg_qi2.c 		cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata,
ctx               233 drivers/crypto/caam/caamalg_qi2.c 					  ivsize, ctx->authsize, is_rfc3686,
ctx               237 drivers/crypto/caam/caamalg_qi2.c 		cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata,
ctx               238 drivers/crypto/caam/caamalg_qi2.c 				       ivsize, ctx->authsize, is_rfc3686, nonce,
ctx               242 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[ENCRYPT],
ctx               244 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx               253 drivers/crypto/caam/caamalg_qi2.c 	ctx->adata.key_inline = !!(inl_mask & 1);
ctx               254 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.key_inline = !!(inl_mask & 2);
ctx               256 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[DECRYPT];
ctx               258 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata,
ctx               259 drivers/crypto/caam/caamalg_qi2.c 			       ivsize, ctx->authsize, alg->caam.geniv,
ctx               263 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[DECRYPT],
ctx               265 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx               272 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
ctx               274 drivers/crypto/caam/caamalg_qi2.c 	ctx->authsize = authsize;
ctx               283 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               284 drivers/crypto/caam/caamalg_qi2.c 	struct device *dev = ctx->dev;
ctx               296 drivers/crypto/caam/caamalg_qi2.c 	ctx->adata.keylen = keys.authkeylen;
ctx               297 drivers/crypto/caam/caamalg_qi2.c 	ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
ctx               300 drivers/crypto/caam/caamalg_qi2.c 	if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
ctx               303 drivers/crypto/caam/caamalg_qi2.c 	memcpy(ctx->key, keys.authkey, keys.authkeylen);
ctx               304 drivers/crypto/caam/caamalg_qi2.c 	memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
ctx               305 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->key_dma, ctx->adata.keylen_pad +
ctx               306 drivers/crypto/caam/caamalg_qi2.c 				   keys.enckeylen, ctx->dir);
ctx               308 drivers/crypto/caam/caamalg_qi2.c 			     DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
ctx               309 drivers/crypto/caam/caamalg_qi2.c 			     ctx->adata.keylen_pad + keys.enckeylen, 1);
ctx               311 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.keylen = keys.enckeylen;
ctx               354 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               357 drivers/crypto/caam/caamalg_qi2.c 	struct device *dev = ctx->dev;
ctx               365 drivers/crypto/caam/caamalg_qi2.c 	unsigned int authsize = ctx->authsize;
ctx               545 drivers/crypto/caam/caamalg_qi2.c 		  (encrypt ? ctx->authsize : (-ctx->authsize));
ctx               588 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               590 drivers/crypto/caam/caamalg_qi2.c 	struct device *dev = ctx->dev;
ctx               594 drivers/crypto/caam/caamalg_qi2.c 	if (!ctx->cdata.keylen || !ctx->authsize)
ctx               597 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[ENCRYPT];
ctx               599 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
ctx               600 drivers/crypto/caam/caamalg_qi2.c 			       ctx->authsize, true, true);
ctx               602 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[ENCRYPT],
ctx               604 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx               606 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[DECRYPT];
ctx               608 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
ctx               609 drivers/crypto/caam/caamalg_qi2.c 			       ctx->authsize, false, true);
ctx               611 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[DECRYPT],
ctx               613 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx               621 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               626 drivers/crypto/caam/caamalg_qi2.c 	ctx->authsize = authsize;
ctx               633 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               642 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.key_virt = key;
ctx               643 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.keylen = keylen - saltlen;
ctx               650 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               651 drivers/crypto/caam/caamalg_qi2.c 	struct device *dev = ctx->dev;
ctx               656 drivers/crypto/caam/caamalg_qi2.c 			ctx->cdata.keylen;
ctx               658 drivers/crypto/caam/caamalg_qi2.c 	if (!ctx->cdata.keylen || !ctx->authsize)
ctx               667 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_inline = true;
ctx               668 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_virt = ctx->key;
ctx               670 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_inline = false;
ctx               671 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               674 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[ENCRYPT];
ctx               676 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, true);
ctx               678 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[ENCRYPT],
ctx               680 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx               687 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_inline = true;
ctx               688 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_virt = ctx->key;
ctx               690 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_inline = false;
ctx               691 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               694 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[DECRYPT];
ctx               696 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, true);
ctx               698 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[DECRYPT],
ctx               700 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx               707 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
ctx               714 drivers/crypto/caam/caamalg_qi2.c 	ctx->authsize = authsize;
ctx               723 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               724 drivers/crypto/caam/caamalg_qi2.c 	struct device *dev = ctx->dev;
ctx               735 drivers/crypto/caam/caamalg_qi2.c 	memcpy(ctx->key, key, keylen);
ctx               736 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->key_dma, keylen, ctx->dir);
ctx               737 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.keylen = keylen;
ctx               744 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               745 drivers/crypto/caam/caamalg_qi2.c 	struct device *dev = ctx->dev;
ctx               750 drivers/crypto/caam/caamalg_qi2.c 			ctx->cdata.keylen;
ctx               752 drivers/crypto/caam/caamalg_qi2.c 	if (!ctx->cdata.keylen || !ctx->authsize)
ctx               755 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.key_virt = ctx->key;
ctx               763 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_inline = true;
ctx               765 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_inline = false;
ctx               766 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               769 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[ENCRYPT];
ctx               771 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
ctx               774 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[ENCRYPT],
ctx               776 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx               783 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_inline = true;
ctx               785 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_inline = false;
ctx               786 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               789 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[DECRYPT];
ctx               791 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
ctx               794 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[DECRYPT],
ctx               796 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx               804 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
ctx               811 drivers/crypto/caam/caamalg_qi2.c 	ctx->authsize = authsize;
ctx               820 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               821 drivers/crypto/caam/caamalg_qi2.c 	struct device *dev = ctx->dev;
ctx               833 drivers/crypto/caam/caamalg_qi2.c 	memcpy(ctx->key, key, keylen);
ctx               838 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.keylen = keylen - 4;
ctx               839 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->key_dma, ctx->cdata.keylen,
ctx               840 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx               847 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               848 drivers/crypto/caam/caamalg_qi2.c 	struct device *dev = ctx->dev;
ctx               853 drivers/crypto/caam/caamalg_qi2.c 			ctx->cdata.keylen;
ctx               855 drivers/crypto/caam/caamalg_qi2.c 	if (!ctx->cdata.keylen || !ctx->authsize)
ctx               858 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.key_virt = ctx->key;
ctx               866 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_inline = true;
ctx               868 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_inline = false;
ctx               869 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               872 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[ENCRYPT];
ctx               874 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
ctx               877 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[ENCRYPT],
ctx               879 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx               886 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_inline = true;
ctx               888 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_inline = false;
ctx               889 drivers/crypto/caam/caamalg_qi2.c 		ctx->cdata.key_dma = ctx->key_dma;
ctx               892 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[DECRYPT];
ctx               894 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
ctx               897 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[DECRYPT],
ctx               899 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx               907 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
ctx               912 drivers/crypto/caam/caamalg_qi2.c 	ctx->authsize = authsize;
ctx               921 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx               922 drivers/crypto/caam/caamalg_qi2.c 	struct device *dev = ctx->dev;
ctx               934 drivers/crypto/caam/caamalg_qi2.c 	memcpy(ctx->key, key, keylen);
ctx               939 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.keylen = keylen - 4;
ctx               940 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->key_dma, ctx->cdata.keylen,
ctx               941 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx               949 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx               953 drivers/crypto/caam/caamalg_qi2.c 	struct device *dev = ctx->dev;
ctx               962 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.keylen = keylen;
ctx               963 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.key_virt = key;
ctx               964 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.key_inline = true;
ctx               967 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[ENCRYPT];
ctx               969 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
ctx               972 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[ENCRYPT],
ctx               974 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx               977 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[DECRYPT];
ctx               979 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
ctx               982 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[DECRYPT],
ctx               984 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx              1080 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx              1081 drivers/crypto/caam/caamalg_qi2.c 	struct device *dev = ctx->dev;
ctx              1091 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.keylen = keylen;
ctx              1092 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.key_virt = key;
ctx              1093 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.key_inline = true;
ctx              1096 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[ENCRYPT];
ctx              1098 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
ctx              1100 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[ENCRYPT],
ctx              1102 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx              1105 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[DECRYPT];
ctx              1107 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
ctx              1109 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(dev, ctx->flc_dma[DECRYPT],
ctx              1111 drivers/crypto/caam/caamalg_qi2.c 				   ctx->dir);
ctx              1122 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx              1123 drivers/crypto/caam/caamalg_qi2.c 	struct device *dev = ctx->dev;
ctx              1296 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1299 drivers/crypto/caam/caamalg_qi2.c 	dev_dbg(ctx->dev, "%s %d: err 0x%x\n", __func__, __LINE__, status);
ctx              1302 drivers/crypto/caam/caamalg_qi2.c 		ecode = caam_qi2_strstatus(ctx->dev, status);
ctx              1304 drivers/crypto/caam/caamalg_qi2.c 	aead_unmap(ctx->dev, edesc, req);
ctx              1317 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1320 drivers/crypto/caam/caamalg_qi2.c 	dev_dbg(ctx->dev, "%s %d: err 0x%x\n", __func__, __LINE__, status);
ctx              1323 drivers/crypto/caam/caamalg_qi2.c 		ecode = caam_qi2_strstatus(ctx->dev, status);
ctx              1325 drivers/crypto/caam/caamalg_qi2.c 	aead_unmap(ctx->dev, edesc, req);
ctx              1334 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1343 drivers/crypto/caam/caamalg_qi2.c 	caam_req->flc = &ctx->flc[ENCRYPT];
ctx              1344 drivers/crypto/caam/caamalg_qi2.c 	caam_req->flc_dma = ctx->flc_dma[ENCRYPT];
ctx              1346 drivers/crypto/caam/caamalg_qi2.c 	caam_req->ctx = &req->base;
ctx              1348 drivers/crypto/caam/caamalg_qi2.c 	ret = dpaa2_caam_enqueue(ctx->dev, caam_req);
ctx              1351 drivers/crypto/caam/caamalg_qi2.c 		aead_unmap(ctx->dev, edesc, req);
ctx              1362 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
ctx              1371 drivers/crypto/caam/caamalg_qi2.c 	caam_req->flc = &ctx->flc[DECRYPT];
ctx              1372 drivers/crypto/caam/caamalg_qi2.c 	caam_req->flc_dma = ctx->flc_dma[DECRYPT];
ctx              1374 drivers/crypto/caam/caamalg_qi2.c 	caam_req->ctx = &req->base;
ctx              1376 drivers/crypto/caam/caamalg_qi2.c 	ret = dpaa2_caam_enqueue(ctx->dev, caam_req);
ctx              1379 drivers/crypto/caam/caamalg_qi2.c 		aead_unmap(ctx->dev, edesc, req);
ctx              1402 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx              1407 drivers/crypto/caam/caamalg_qi2.c 	dev_dbg(ctx->dev, "%s %d: err 0x%x\n", __func__, __LINE__, status);
ctx              1410 drivers/crypto/caam/caamalg_qi2.c 		ecode = caam_qi2_strstatus(ctx->dev, status);
ctx              1419 drivers/crypto/caam/caamalg_qi2.c 	skcipher_unmap(ctx->dev, edesc, req);
ctx              1440 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx              1445 drivers/crypto/caam/caamalg_qi2.c 	dev_dbg(ctx->dev, "%s %d: err 0x%x\n", __func__, __LINE__, status);
ctx              1448 drivers/crypto/caam/caamalg_qi2.c 		ecode = caam_qi2_strstatus(ctx->dev, status);
ctx              1457 drivers/crypto/caam/caamalg_qi2.c 	skcipher_unmap(ctx->dev, edesc, req);
ctx              1476 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx              1488 drivers/crypto/caam/caamalg_qi2.c 	caam_req->flc = &ctx->flc[ENCRYPT];
ctx              1489 drivers/crypto/caam/caamalg_qi2.c 	caam_req->flc_dma = ctx->flc_dma[ENCRYPT];
ctx              1491 drivers/crypto/caam/caamalg_qi2.c 	caam_req->ctx = &req->base;
ctx              1493 drivers/crypto/caam/caamalg_qi2.c 	ret = dpaa2_caam_enqueue(ctx->dev, caam_req);
ctx              1496 drivers/crypto/caam/caamalg_qi2.c 		skcipher_unmap(ctx->dev, edesc, req);
ctx              1507 drivers/crypto/caam/caamalg_qi2.c 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx              1518 drivers/crypto/caam/caamalg_qi2.c 	caam_req->flc = &ctx->flc[DECRYPT];
ctx              1519 drivers/crypto/caam/caamalg_qi2.c 	caam_req->flc_dma = ctx->flc_dma[DECRYPT];
ctx              1521 drivers/crypto/caam/caamalg_qi2.c 	caam_req->ctx = &req->base;
ctx              1523 drivers/crypto/caam/caamalg_qi2.c 	ret = dpaa2_caam_enqueue(ctx->dev, caam_req);
ctx              1526 drivers/crypto/caam/caamalg_qi2.c 		skcipher_unmap(ctx->dev, edesc, req);
ctx              1533 drivers/crypto/caam/caamalg_qi2.c static int caam_cra_init(struct caam_ctx *ctx, struct caam_alg_entry *caam,
ctx              1540 drivers/crypto/caam/caamalg_qi2.c 	ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
ctx              1541 drivers/crypto/caam/caamalg_qi2.c 	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
ctx              1543 drivers/crypto/caam/caamalg_qi2.c 	ctx->dev = caam->dev;
ctx              1544 drivers/crypto/caam/caamalg_qi2.c 	ctx->dir = uses_dkp ? DMA_BIDIRECTIONAL : DMA_TO_DEVICE;
ctx              1546 drivers/crypto/caam/caamalg_qi2.c 	dma_addr = dma_map_single_attrs(ctx->dev, ctx->flc,
ctx              1548 drivers/crypto/caam/caamalg_qi2.c 					ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
ctx              1549 drivers/crypto/caam/caamalg_qi2.c 	if (dma_mapping_error(ctx->dev, dma_addr)) {
ctx              1550 drivers/crypto/caam/caamalg_qi2.c 		dev_err(ctx->dev, "unable to map key, shared descriptors\n");
ctx              1555 drivers/crypto/caam/caamalg_qi2.c 		ctx->flc_dma[i] = dma_addr + i * sizeof(ctx->flc[i]);
ctx              1556 drivers/crypto/caam/caamalg_qi2.c 	ctx->key_dma = dma_addr + NUM_OP * sizeof(ctx->flc[0]);
ctx              1582 drivers/crypto/caam/caamalg_qi2.c static void caam_exit_common(struct caam_ctx *ctx)
ctx              1584 drivers/crypto/caam/caamalg_qi2.c 	dma_unmap_single_attrs(ctx->dev, ctx->flc_dma[0],
ctx              1585 drivers/crypto/caam/caamalg_qi2.c 			       offsetof(struct caam_ctx, flc_dma), ctx->dir,
ctx              3089 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              3091 drivers/crypto/caam/caamalg_qi2.c 	struct dpaa2_caam_priv *priv = dev_get_drvdata(ctx->dev);
ctx              3096 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[UPDATE];
ctx              3098 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_ahash(desc, &ctx->adata, OP_ALG_AS_UPDATE, ctx->ctx_len,
ctx              3099 drivers/crypto/caam/caamalg_qi2.c 			  ctx->ctx_len, true, priv->sec_attr.era);
ctx              3101 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(ctx->dev, ctx->flc_dma[UPDATE],
ctx              3108 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[UPDATE_FIRST];
ctx              3110 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_ahash(desc, &ctx->adata, OP_ALG_AS_INIT, ctx->ctx_len,
ctx              3111 drivers/crypto/caam/caamalg_qi2.c 			  ctx->ctx_len, false, priv->sec_attr.era);
ctx              3113 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(ctx->dev, ctx->flc_dma[UPDATE_FIRST],
ctx              3120 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[FINALIZE];
ctx              3122 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_ahash(desc, &ctx->adata, OP_ALG_AS_FINALIZE, digestsize,
ctx              3123 drivers/crypto/caam/caamalg_qi2.c 			  ctx->ctx_len, true, priv->sec_attr.era);
ctx              3125 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(ctx->dev, ctx->flc_dma[FINALIZE],
ctx              3132 drivers/crypto/caam/caamalg_qi2.c 	flc = &ctx->flc[DIGEST];
ctx              3134 drivers/crypto/caam/caamalg_qi2.c 	cnstr_shdsc_ahash(desc, &ctx->adata, OP_ALG_AS_INITFINAL, digestsize,
ctx              3135 drivers/crypto/caam/caamalg_qi2.c 			  ctx->ctx_len, false, priv->sec_attr.era);
ctx              3137 drivers/crypto/caam/caamalg_qi2.c 	dma_sync_single_for_device(ctx->dev, ctx->flc_dma[DIGEST],
ctx              3163 drivers/crypto/caam/caamalg_qi2.c static int hash_digest_key(struct caam_hash_ctx *ctx, u32 *keylen, u8 *key,
ctx              3186 drivers/crypto/caam/caamalg_qi2.c 	key_dma = dma_map_single(ctx->dev, key, *keylen, DMA_BIDIRECTIONAL);
ctx              3187 drivers/crypto/caam/caamalg_qi2.c 	if (dma_mapping_error(ctx->dev, key_dma)) {
ctx              3188 drivers/crypto/caam/caamalg_qi2.c 		dev_err(ctx->dev, "unable to map key memory\n");
ctx              3197 drivers/crypto/caam/caamalg_qi2.c 	append_operation(desc, ctx->adata.algtype | OP_ALG_ENCRYPT |
ctx              3205 drivers/crypto/caam/caamalg_qi2.c 	flc_dma = dma_map_single(ctx->dev, flc, sizeof(flc->flc) +
ctx              3207 drivers/crypto/caam/caamalg_qi2.c 	if (dma_mapping_error(ctx->dev, flc_dma)) {
ctx              3208 drivers/crypto/caam/caamalg_qi2.c 		dev_err(ctx->dev, "unable to map shared descriptor\n");
ctx              3228 drivers/crypto/caam/caamalg_qi2.c 	result.dev = ctx->dev;
ctx              3233 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->ctx = &result;
ctx              3235 drivers/crypto/caam/caamalg_qi2.c 	ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
ctx              3245 drivers/crypto/caam/caamalg_qi2.c 	dma_unmap_single(ctx->dev, flc_dma, sizeof(flc->flc) + desc_bytes(desc),
ctx              3248 drivers/crypto/caam/caamalg_qi2.c 	dma_unmap_single(ctx->dev, key_dma, *keylen, DMA_BIDIRECTIONAL);
ctx              3262 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              3268 drivers/crypto/caam/caamalg_qi2.c 	dev_dbg(ctx->dev, "keylen %d blocksize %d\n", keylen, blocksize);
ctx              3274 drivers/crypto/caam/caamalg_qi2.c 		ret = hash_digest_key(ctx, &keylen, hashed_key, digestsize);
ctx              3280 drivers/crypto/caam/caamalg_qi2.c 	ctx->adata.keylen = keylen;
ctx              3281 drivers/crypto/caam/caamalg_qi2.c 	ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
ctx              3283 drivers/crypto/caam/caamalg_qi2.c 	if (ctx->adata.keylen_pad > CAAM_MAX_HASH_KEY_SIZE)
ctx              3286 drivers/crypto/caam/caamalg_qi2.c 	ctx->adata.key_virt = key;
ctx              3287 drivers/crypto/caam/caamalg_qi2.c 	ctx->adata.key_inline = true;
ctx              3295 drivers/crypto/caam/caamalg_qi2.c 	if (keylen > ctx->adata.keylen_pad) {
ctx              3296 drivers/crypto/caam/caamalg_qi2.c 		memcpy(ctx->key, key, keylen);
ctx              3297 drivers/crypto/caam/caamalg_qi2.c 		dma_sync_single_for_device(ctx->dev, ctx->adata.key_dma,
ctx              3298 drivers/crypto/caam/caamalg_qi2.c 					   ctx->adata.keylen_pad,
ctx              3350 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              3354 drivers/crypto/caam/caamalg_qi2.c 	dev_dbg(ctx->dev, "%s %d: err 0x%x\n", __func__, __LINE__, status);
ctx              3357 drivers/crypto/caam/caamalg_qi2.c 		ecode = caam_qi2_strstatus(ctx->dev, status);
ctx              3359 drivers/crypto/caam/caamalg_qi2.c 	ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE);
ctx              3365 drivers/crypto/caam/caamalg_qi2.c 			     ctx->ctx_len, 1);
ctx              3377 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              3380 drivers/crypto/caam/caamalg_qi2.c 	dev_dbg(ctx->dev, "%s %d: err 0x%x\n", __func__, __LINE__, status);
ctx              3383 drivers/crypto/caam/caamalg_qi2.c 		ecode = caam_qi2_strstatus(ctx->dev, status);
ctx              3385 drivers/crypto/caam/caamalg_qi2.c 	ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL);
ctx              3391 drivers/crypto/caam/caamalg_qi2.c 			     ctx->ctx_len, 1);
ctx              3407 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              3411 drivers/crypto/caam/caamalg_qi2.c 	dev_dbg(ctx->dev, "%s %d: err 0x%x\n", __func__, __LINE__, status);
ctx              3414 drivers/crypto/caam/caamalg_qi2.c 		ecode = caam_qi2_strstatus(ctx->dev, status);
ctx              3416 drivers/crypto/caam/caamalg_qi2.c 	ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL);
ctx              3422 drivers/crypto/caam/caamalg_qi2.c 			     ctx->ctx_len, 1);
ctx              3434 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              3437 drivers/crypto/caam/caamalg_qi2.c 	dev_dbg(ctx->dev, "%s %d: err 0x%x\n", __func__, __LINE__, status);
ctx              3440 drivers/crypto/caam/caamalg_qi2.c 		ecode = caam_qi2_strstatus(ctx->dev, status);
ctx              3442 drivers/crypto/caam/caamalg_qi2.c 	ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE);
ctx              3448 drivers/crypto/caam/caamalg_qi2.c 			     ctx->ctx_len, 1);
ctx              3460 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              3486 drivers/crypto/caam/caamalg_qi2.c 			dev_err(ctx->dev, "Invalid number of src SG.\n");
ctx              3491 drivers/crypto/caam/caamalg_qi2.c 			mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents,
ctx              3494 drivers/crypto/caam/caamalg_qi2.c 				dev_err(ctx->dev, "unable to DMA map source\n");
ctx              3504 drivers/crypto/caam/caamalg_qi2.c 			dma_unmap_sg(ctx->dev, req->src, src_nents,
ctx              3515 drivers/crypto/caam/caamalg_qi2.c 		ret = ctx_map_to_qm_sg(ctx->dev, state, ctx->ctx_len, sg_table,
ctx              3520 drivers/crypto/caam/caamalg_qi2.c 		ret = buf_map_to_qm_sg(ctx->dev, sg_table + 1, state);
ctx              3536 drivers/crypto/caam/caamalg_qi2.c 		edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table,
ctx              3538 drivers/crypto/caam/caamalg_qi2.c 		if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) {
ctx              3539 drivers/crypto/caam/caamalg_qi2.c 			dev_err(ctx->dev, "unable to map S/G table\n");
ctx              3549 drivers/crypto/caam/caamalg_qi2.c 		dpaa2_fl_set_len(in_fle, ctx->ctx_len + to_hash);
ctx              3552 drivers/crypto/caam/caamalg_qi2.c 		dpaa2_fl_set_len(out_fle, ctx->ctx_len);
ctx              3554 drivers/crypto/caam/caamalg_qi2.c 		req_ctx->flc = &ctx->flc[UPDATE];
ctx              3555 drivers/crypto/caam/caamalg_qi2.c 		req_ctx->flc_dma = ctx->flc_dma[UPDATE];
ctx              3557 drivers/crypto/caam/caamalg_qi2.c 		req_ctx->ctx = &req->base;
ctx              3560 drivers/crypto/caam/caamalg_qi2.c 		ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
ctx              3580 drivers/crypto/caam/caamalg_qi2.c 	ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL);
ctx              3588 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              3610 drivers/crypto/caam/caamalg_qi2.c 	ret = ctx_map_to_qm_sg(ctx->dev, state, ctx->ctx_len, sg_table,
ctx              3615 drivers/crypto/caam/caamalg_qi2.c 	ret = buf_map_to_qm_sg(ctx->dev, sg_table + 1, state);
ctx              3621 drivers/crypto/caam/caamalg_qi2.c 	edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes,
ctx              3623 drivers/crypto/caam/caamalg_qi2.c 	if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) {
ctx              3624 drivers/crypto/caam/caamalg_qi2.c 		dev_err(ctx->dev, "unable to map S/G table\n");
ctx              3634 drivers/crypto/caam/caamalg_qi2.c 	dpaa2_fl_set_len(in_fle, ctx->ctx_len + buflen);
ctx              3639 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->flc = &ctx->flc[FINALIZE];
ctx              3640 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->flc_dma = ctx->flc_dma[FINALIZE];
ctx              3642 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->ctx = &req->base;
ctx              3645 drivers/crypto/caam/caamalg_qi2.c 	ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
ctx              3651 drivers/crypto/caam/caamalg_qi2.c 	ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL);
ctx              3659 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              3676 drivers/crypto/caam/caamalg_qi2.c 		dev_err(ctx->dev, "Invalid number of src SG.\n");
ctx              3681 drivers/crypto/caam/caamalg_qi2.c 		mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents,
ctx              3684 drivers/crypto/caam/caamalg_qi2.c 			dev_err(ctx->dev, "unable to DMA map source\n");
ctx              3694 drivers/crypto/caam/caamalg_qi2.c 		dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE);
ctx              3704 drivers/crypto/caam/caamalg_qi2.c 	ret = ctx_map_to_qm_sg(ctx->dev, state, ctx->ctx_len, sg_table,
ctx              3709 drivers/crypto/caam/caamalg_qi2.c 	ret = buf_map_to_qm_sg(ctx->dev, sg_table + 1, state);
ctx              3715 drivers/crypto/caam/caamalg_qi2.c 	edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes,
ctx              3717 drivers/crypto/caam/caamalg_qi2.c 	if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) {
ctx              3718 drivers/crypto/caam/caamalg_qi2.c 		dev_err(ctx->dev, "unable to map S/G table\n");
ctx              3728 drivers/crypto/caam/caamalg_qi2.c 	dpaa2_fl_set_len(in_fle, ctx->ctx_len + buflen + req->nbytes);
ctx              3733 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->flc = &ctx->flc[FINALIZE];
ctx              3734 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->flc_dma = ctx->flc_dma[FINALIZE];
ctx              3736 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->ctx = &req->base;
ctx              3739 drivers/crypto/caam/caamalg_qi2.c 	ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
ctx              3745 drivers/crypto/caam/caamalg_qi2.c 	ahash_unmap_ctx(ctx->dev, edesc, req, DMA_BIDIRECTIONAL);
ctx              3753 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              3769 drivers/crypto/caam/caamalg_qi2.c 		dev_err(ctx->dev, "Invalid number of src SG.\n");
ctx              3774 drivers/crypto/caam/caamalg_qi2.c 		mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents,
ctx              3777 drivers/crypto/caam/caamalg_qi2.c 			dev_err(ctx->dev, "unable to map source for DMA\n");
ctx              3787 drivers/crypto/caam/caamalg_qi2.c 		dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE);
ctx              3800 drivers/crypto/caam/caamalg_qi2.c 		edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table,
ctx              3802 drivers/crypto/caam/caamalg_qi2.c 		if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) {
ctx              3803 drivers/crypto/caam/caamalg_qi2.c 			dev_err(ctx->dev, "unable to map S/G table\n");
ctx              3815 drivers/crypto/caam/caamalg_qi2.c 	state->ctx_dma = dma_map_single(ctx->dev, state->caam_ctx, digestsize,
ctx              3817 drivers/crypto/caam/caamalg_qi2.c 	if (dma_mapping_error(ctx->dev, state->ctx_dma)) {
ctx              3818 drivers/crypto/caam/caamalg_qi2.c 		dev_err(ctx->dev, "unable to map ctx\n");
ctx              3829 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->flc = &ctx->flc[DIGEST];
ctx              3830 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->flc_dma = ctx->flc_dma[DIGEST];
ctx              3832 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->ctx = &req->base;
ctx              3834 drivers/crypto/caam/caamalg_qi2.c 	ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
ctx              3840 drivers/crypto/caam/caamalg_qi2.c 	ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE);
ctx              3848 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              3867 drivers/crypto/caam/caamalg_qi2.c 		state->buf_dma = dma_map_single(ctx->dev, buf, buflen,
ctx              3869 drivers/crypto/caam/caamalg_qi2.c 		if (dma_mapping_error(ctx->dev, state->buf_dma)) {
ctx              3870 drivers/crypto/caam/caamalg_qi2.c 			dev_err(ctx->dev, "unable to map src\n");
ctx              3876 drivers/crypto/caam/caamalg_qi2.c 	state->ctx_dma = dma_map_single(ctx->dev, state->caam_ctx, digestsize,
ctx              3878 drivers/crypto/caam/caamalg_qi2.c 	if (dma_mapping_error(ctx->dev, state->ctx_dma)) {
ctx              3879 drivers/crypto/caam/caamalg_qi2.c 		dev_err(ctx->dev, "unable to map ctx\n");
ctx              3901 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->flc = &ctx->flc[DIGEST];
ctx              3902 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->flc_dma = ctx->flc_dma[DIGEST];
ctx              3904 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->ctx = &req->base;
ctx              3907 drivers/crypto/caam/caamalg_qi2.c 	ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
ctx              3913 drivers/crypto/caam/caamalg_qi2.c 	ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE);
ctx              3921 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              3946 drivers/crypto/caam/caamalg_qi2.c 			dev_err(ctx->dev, "Invalid number of src SG.\n");
ctx              3951 drivers/crypto/caam/caamalg_qi2.c 			mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents,
ctx              3954 drivers/crypto/caam/caamalg_qi2.c 				dev_err(ctx->dev, "unable to DMA map source\n");
ctx              3964 drivers/crypto/caam/caamalg_qi2.c 			dma_unmap_sg(ctx->dev, req->src, src_nents,
ctx              3974 drivers/crypto/caam/caamalg_qi2.c 		ret = buf_map_to_qm_sg(ctx->dev, sg_table, state);
ctx              3985 drivers/crypto/caam/caamalg_qi2.c 		edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table,
ctx              3987 drivers/crypto/caam/caamalg_qi2.c 		if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) {
ctx              3988 drivers/crypto/caam/caamalg_qi2.c 			dev_err(ctx->dev, "unable to map S/G table\n");
ctx              3994 drivers/crypto/caam/caamalg_qi2.c 		state->ctx_dma_len = ctx->ctx_len;
ctx              3995 drivers/crypto/caam/caamalg_qi2.c 		state->ctx_dma = dma_map_single(ctx->dev, state->caam_ctx,
ctx              3996 drivers/crypto/caam/caamalg_qi2.c 						ctx->ctx_len, DMA_FROM_DEVICE);
ctx              3997 drivers/crypto/caam/caamalg_qi2.c 		if (dma_mapping_error(ctx->dev, state->ctx_dma)) {
ctx              3998 drivers/crypto/caam/caamalg_qi2.c 			dev_err(ctx->dev, "unable to map ctx\n");
ctx              4011 drivers/crypto/caam/caamalg_qi2.c 		dpaa2_fl_set_len(out_fle, ctx->ctx_len);
ctx              4013 drivers/crypto/caam/caamalg_qi2.c 		req_ctx->flc = &ctx->flc[UPDATE_FIRST];
ctx              4014 drivers/crypto/caam/caamalg_qi2.c 		req_ctx->flc_dma = ctx->flc_dma[UPDATE_FIRST];
ctx              4016 drivers/crypto/caam/caamalg_qi2.c 		req_ctx->ctx = &req->base;
ctx              4019 drivers/crypto/caam/caamalg_qi2.c 		ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
ctx              4043 drivers/crypto/caam/caamalg_qi2.c 	ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE);
ctx              4051 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              4067 drivers/crypto/caam/caamalg_qi2.c 		dev_err(ctx->dev, "Invalid number of src SG.\n");
ctx              4072 drivers/crypto/caam/caamalg_qi2.c 		mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents,
ctx              4075 drivers/crypto/caam/caamalg_qi2.c 			dev_err(ctx->dev, "unable to DMA map source\n");
ctx              4085 drivers/crypto/caam/caamalg_qi2.c 		dma_unmap_sg(ctx->dev, req->src, src_nents, DMA_TO_DEVICE);
ctx              4093 drivers/crypto/caam/caamalg_qi2.c 	ret = buf_map_to_qm_sg(ctx->dev, sg_table, state);
ctx              4099 drivers/crypto/caam/caamalg_qi2.c 	edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table, qm_sg_bytes,
ctx              4101 drivers/crypto/caam/caamalg_qi2.c 	if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) {
ctx              4102 drivers/crypto/caam/caamalg_qi2.c 		dev_err(ctx->dev, "unable to map S/G table\n");
ctx              4109 drivers/crypto/caam/caamalg_qi2.c 	state->ctx_dma = dma_map_single(ctx->dev, state->caam_ctx, digestsize,
ctx              4111 drivers/crypto/caam/caamalg_qi2.c 	if (dma_mapping_error(ctx->dev, state->ctx_dma)) {
ctx              4112 drivers/crypto/caam/caamalg_qi2.c 		dev_err(ctx->dev, "unable to map ctx\n");
ctx              4127 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->flc = &ctx->flc[DIGEST];
ctx              4128 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->flc_dma = ctx->flc_dma[DIGEST];
ctx              4130 drivers/crypto/caam/caamalg_qi2.c 	req_ctx->ctx = &req->base;
ctx              4132 drivers/crypto/caam/caamalg_qi2.c 	ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
ctx              4139 drivers/crypto/caam/caamalg_qi2.c 	ahash_unmap_ctx(ctx->dev, edesc, req, DMA_FROM_DEVICE);
ctx              4147 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              4171 drivers/crypto/caam/caamalg_qi2.c 			dev_err(ctx->dev, "Invalid number of src SG.\n");
ctx              4176 drivers/crypto/caam/caamalg_qi2.c 			mapped_nents = dma_map_sg(ctx->dev, req->src, src_nents,
ctx              4179 drivers/crypto/caam/caamalg_qi2.c 				dev_err(ctx->dev, "unable to map source for DMA\n");
ctx              4189 drivers/crypto/caam/caamalg_qi2.c 			dma_unmap_sg(ctx->dev, req->src, src_nents,
ctx              4207 drivers/crypto/caam/caamalg_qi2.c 			edesc->qm_sg_dma = dma_map_single(ctx->dev, sg_table,
ctx              4210 drivers/crypto/caam/caamalg_qi2.c 			if (dma_mapping_error(ctx->dev, edesc->qm_sg_dma)) {
ctx              4211 drivers/crypto/caam/caamalg_qi2.c 				dev_err(ctx->dev, "unable to map S/G table\n");
ctx              4227 drivers/crypto/caam/caamalg_qi2.c 		state->ctx_dma_len = ctx->ctx_len;
ctx              4228 drivers/crypto/caam/caamalg_qi2.c 		state->ctx_dma = dma_map_single(ctx->dev, state->caam_ctx,
ctx              4229 drivers/crypto/caam/caamalg_qi2.c 						ctx->ctx_len, DMA_FROM_DEVICE);
ctx              4230 drivers/crypto/caam/caamalg_qi2.c 		if (dma_mapping_error(ctx->dev, state->ctx_dma)) {
ctx              4231 drivers/crypto/caam/caamalg_qi2.c 			dev_err(ctx->dev, "unable to map ctx\n");
ctx              4239 drivers/crypto/caam/caamalg_qi2.c 		dpaa2_fl_set_len(out_fle, ctx->ctx_len);
ctx              4241 drivers/crypto/caam/caamalg_qi2.c 		req_ctx->flc = &ctx->flc[UPDATE_FIRST];
ctx              4242 drivers/crypto/caam/caamalg_qi2.c 		req_ctx->flc_dma = ctx->flc_dma[UPDATE_FIRST];
ctx              4244 drivers/crypto/caam/caamalg_qi2.c 		req_ctx->ctx = &req->base;
ctx              4247 drivers/crypto/caam/caamalg_qi2.c 		ret = dpaa2_caam_enqueue(ctx->dev, req_ctx);
ctx              4271 drivers/crypto/caam/caamalg_qi2.c 	ahash_unmap_ctx(ctx->dev, edesc, req, DMA_TO_DEVICE);
ctx              4519 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              4530 drivers/crypto/caam/caamalg_qi2.c 	ctx->dev = caam_hash->dev;
ctx              4533 drivers/crypto/caam/caamalg_qi2.c 		ctx->adata.key_dma = dma_map_single_attrs(ctx->dev, ctx->key,
ctx              4534 drivers/crypto/caam/caamalg_qi2.c 							  ARRAY_SIZE(ctx->key),
ctx              4537 drivers/crypto/caam/caamalg_qi2.c 		if (dma_mapping_error(ctx->dev, ctx->adata.key_dma)) {
ctx              4538 drivers/crypto/caam/caamalg_qi2.c 			dev_err(ctx->dev, "unable to map key\n");
ctx              4543 drivers/crypto/caam/caamalg_qi2.c 	dma_addr = dma_map_single_attrs(ctx->dev, ctx->flc, sizeof(ctx->flc),
ctx              4546 drivers/crypto/caam/caamalg_qi2.c 	if (dma_mapping_error(ctx->dev, dma_addr)) {
ctx              4547 drivers/crypto/caam/caamalg_qi2.c 		dev_err(ctx->dev, "unable to map shared descriptors\n");
ctx              4548 drivers/crypto/caam/caamalg_qi2.c 		if (ctx->adata.key_dma)
ctx              4549 drivers/crypto/caam/caamalg_qi2.c 			dma_unmap_single_attrs(ctx->dev, ctx->adata.key_dma,
ctx              4550 drivers/crypto/caam/caamalg_qi2.c 					       ARRAY_SIZE(ctx->key),
ctx              4557 drivers/crypto/caam/caamalg_qi2.c 		ctx->flc_dma[i] = dma_addr + i * sizeof(ctx->flc[i]);
ctx              4560 drivers/crypto/caam/caamalg_qi2.c 	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam_hash->alg_type;
ctx              4562 drivers/crypto/caam/caamalg_qi2.c 	ctx->ctx_len = runninglen[(ctx->adata.algtype &
ctx              4574 drivers/crypto/caam/caamalg_qi2.c 	struct caam_hash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              4576 drivers/crypto/caam/caamalg_qi2.c 	dma_unmap_single_attrs(ctx->dev, ctx->flc_dma[0], sizeof(ctx->flc),
ctx              4578 drivers/crypto/caam/caamalg_qi2.c 	if (ctx->adata.key_dma)
ctx              4579 drivers/crypto/caam/caamalg_qi2.c 		dma_unmap_single_attrs(ctx->dev, ctx->adata.key_dma,
ctx              4580 drivers/crypto/caam/caamalg_qi2.c 				       ARRAY_SIZE(ctx->key), DMA_TO_DEVICE,
ctx              4795 drivers/crypto/caam/caamalg_qi2.c 	req->cbk(req->ctx, dpaa2_fd_get_frc(fd));
ctx               186 drivers/crypto/caam/caamalg_qi2.h 	void (*cbk)(void *ctx, u32 err);
ctx               187 drivers/crypto/caam/caamalg_qi2.h 	void *ctx;
ctx               224 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               226 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx               230 drivers/crypto/caam/caamhash.c 	ctx->adata.key_virt = ctx->key;
ctx               233 drivers/crypto/caam/caamhash.c 	desc = ctx->sh_desc_update;
ctx               234 drivers/crypto/caam/caamhash.c 	cnstr_shdsc_ahash(desc, &ctx->adata, OP_ALG_AS_UPDATE, ctx->ctx_len,
ctx               235 drivers/crypto/caam/caamhash.c 			  ctx->ctx_len, true, ctrlpriv->era);
ctx               236 drivers/crypto/caam/caamhash.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_update_dma,
ctx               237 drivers/crypto/caam/caamhash.c 				   desc_bytes(desc), ctx->dir);
ctx               244 drivers/crypto/caam/caamhash.c 	desc = ctx->sh_desc_update_first;
ctx               245 drivers/crypto/caam/caamhash.c 	cnstr_shdsc_ahash(desc, &ctx->adata, OP_ALG_AS_INIT, ctx->ctx_len,
ctx               246 drivers/crypto/caam/caamhash.c 			  ctx->ctx_len, false, ctrlpriv->era);
ctx               247 drivers/crypto/caam/caamhash.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_update_first_dma,
ctx               248 drivers/crypto/caam/caamhash.c 				   desc_bytes(desc), ctx->dir);
ctx               254 drivers/crypto/caam/caamhash.c 	desc = ctx->sh_desc_fin;
ctx               255 drivers/crypto/caam/caamhash.c 	cnstr_shdsc_ahash(desc, &ctx->adata, OP_ALG_AS_FINALIZE, digestsize,
ctx               256 drivers/crypto/caam/caamhash.c 			  ctx->ctx_len, true, ctrlpriv->era);
ctx               257 drivers/crypto/caam/caamhash.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_fin_dma,
ctx               258 drivers/crypto/caam/caamhash.c 				   desc_bytes(desc), ctx->dir);
ctx               265 drivers/crypto/caam/caamhash.c 	desc = ctx->sh_desc_digest;
ctx               266 drivers/crypto/caam/caamhash.c 	cnstr_shdsc_ahash(desc, &ctx->adata, OP_ALG_AS_INITFINAL, digestsize,
ctx               267 drivers/crypto/caam/caamhash.c 			  ctx->ctx_len, false, ctrlpriv->era);
ctx               268 drivers/crypto/caam/caamhash.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_digest_dma,
ctx               269 drivers/crypto/caam/caamhash.c 				   desc_bytes(desc), ctx->dir);
ctx               280 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               282 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx               286 drivers/crypto/caam/caamhash.c 	desc = ctx->sh_desc_update;
ctx               287 drivers/crypto/caam/caamhash.c 	cnstr_shdsc_sk_hash(desc, &ctx->adata, OP_ALG_AS_UPDATE,
ctx               288 drivers/crypto/caam/caamhash.c 			    ctx->ctx_len, ctx->ctx_len);
ctx               289 drivers/crypto/caam/caamhash.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_update_dma,
ctx               290 drivers/crypto/caam/caamhash.c 				   desc_bytes(desc), ctx->dir);
ctx               296 drivers/crypto/caam/caamhash.c 	desc = ctx->sh_desc_fin;
ctx               297 drivers/crypto/caam/caamhash.c 	cnstr_shdsc_sk_hash(desc, &ctx->adata, OP_ALG_AS_FINALIZE,
ctx               298 drivers/crypto/caam/caamhash.c 			    digestsize, ctx->ctx_len);
ctx               299 drivers/crypto/caam/caamhash.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_fin_dma,
ctx               300 drivers/crypto/caam/caamhash.c 				   desc_bytes(desc), ctx->dir);
ctx               306 drivers/crypto/caam/caamhash.c 	ctx->adata.key_virt = ctx->key;
ctx               309 drivers/crypto/caam/caamhash.c 	desc = ctx->sh_desc_update_first;
ctx               310 drivers/crypto/caam/caamhash.c 	cnstr_shdsc_sk_hash(desc, &ctx->adata, OP_ALG_AS_INIT, ctx->ctx_len,
ctx               311 drivers/crypto/caam/caamhash.c 			    ctx->ctx_len);
ctx               312 drivers/crypto/caam/caamhash.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_update_first_dma,
ctx               313 drivers/crypto/caam/caamhash.c 				   desc_bytes(desc), ctx->dir);
ctx               319 drivers/crypto/caam/caamhash.c 	desc = ctx->sh_desc_digest;
ctx               320 drivers/crypto/caam/caamhash.c 	cnstr_shdsc_sk_hash(desc, &ctx->adata, OP_ALG_AS_INITFINAL,
ctx               321 drivers/crypto/caam/caamhash.c 			    digestsize, ctx->ctx_len);
ctx               322 drivers/crypto/caam/caamhash.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_digest_dma,
ctx               323 drivers/crypto/caam/caamhash.c 				   desc_bytes(desc), ctx->dir);
ctx               332 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               334 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx               338 drivers/crypto/caam/caamhash.c 	desc = ctx->sh_desc_update;
ctx               339 drivers/crypto/caam/caamhash.c 	cnstr_shdsc_sk_hash(desc, &ctx->adata, OP_ALG_AS_UPDATE,
ctx               340 drivers/crypto/caam/caamhash.c 			    ctx->ctx_len, ctx->ctx_len);
ctx               341 drivers/crypto/caam/caamhash.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_update_dma,
ctx               342 drivers/crypto/caam/caamhash.c 				   desc_bytes(desc), ctx->dir);
ctx               348 drivers/crypto/caam/caamhash.c 	desc = ctx->sh_desc_fin;
ctx               349 drivers/crypto/caam/caamhash.c 	cnstr_shdsc_sk_hash(desc, &ctx->adata, OP_ALG_AS_FINALIZE,
ctx               350 drivers/crypto/caam/caamhash.c 			    digestsize, ctx->ctx_len);
ctx               351 drivers/crypto/caam/caamhash.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_fin_dma,
ctx               352 drivers/crypto/caam/caamhash.c 				   desc_bytes(desc), ctx->dir);
ctx               358 drivers/crypto/caam/caamhash.c 	desc = ctx->sh_desc_update_first;
ctx               359 drivers/crypto/caam/caamhash.c 	cnstr_shdsc_sk_hash(desc, &ctx->adata, OP_ALG_AS_INIT, ctx->ctx_len,
ctx               360 drivers/crypto/caam/caamhash.c 			    ctx->ctx_len);
ctx               361 drivers/crypto/caam/caamhash.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_update_first_dma,
ctx               362 drivers/crypto/caam/caamhash.c 				   desc_bytes(desc), ctx->dir);
ctx               368 drivers/crypto/caam/caamhash.c 	desc = ctx->sh_desc_digest;
ctx               369 drivers/crypto/caam/caamhash.c 	cnstr_shdsc_sk_hash(desc, &ctx->adata, OP_ALG_AS_INITFINAL,
ctx               370 drivers/crypto/caam/caamhash.c 			    digestsize, ctx->ctx_len);
ctx               371 drivers/crypto/caam/caamhash.c 	dma_sync_single_for_device(jrdev, ctx->sh_desc_digest_dma,
ctx               372 drivers/crypto/caam/caamhash.c 				   desc_bytes(desc), ctx->dir);
ctx               381 drivers/crypto/caam/caamhash.c static int hash_digest_key(struct caam_hash_ctx *ctx, u32 *keylen, u8 *key,
ctx               384 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx               406 drivers/crypto/caam/caamhash.c 	append_operation(desc, ctx->adata.algtype | OP_ALG_ENCRYPT |
ctx               446 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               447 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx               450 drivers/crypto/caam/caamhash.c 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
ctx               460 drivers/crypto/caam/caamhash.c 		ret = hash_digest_key(ctx, &keylen, hashed_key, digestsize);
ctx               471 drivers/crypto/caam/caamhash.c 		ctx->adata.key_inline = true;
ctx               472 drivers/crypto/caam/caamhash.c 		ctx->adata.keylen = keylen;
ctx               473 drivers/crypto/caam/caamhash.c 		ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
ctx               476 drivers/crypto/caam/caamhash.c 		if (ctx->adata.keylen_pad > CAAM_MAX_HASH_KEY_SIZE)
ctx               479 drivers/crypto/caam/caamhash.c 		memcpy(ctx->key, key, keylen);
ctx               487 drivers/crypto/caam/caamhash.c 		if (keylen > ctx->adata.keylen_pad)
ctx               488 drivers/crypto/caam/caamhash.c 			dma_sync_single_for_device(ctx->jrdev,
ctx               489 drivers/crypto/caam/caamhash.c 						   ctx->adata.key_dma,
ctx               490 drivers/crypto/caam/caamhash.c 						   ctx->adata.keylen_pad,
ctx               493 drivers/crypto/caam/caamhash.c 		ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, key,
ctx               510 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               511 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx               518 drivers/crypto/caam/caamhash.c 	memcpy(ctx->key, key, keylen);
ctx               519 drivers/crypto/caam/caamhash.c 	dma_sync_single_for_device(jrdev, ctx->adata.key_dma, keylen,
ctx               521 drivers/crypto/caam/caamhash.c 	ctx->adata.keylen = keylen;
ctx               524 drivers/crypto/caam/caamhash.c 			     DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, keylen, 1);
ctx               532 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               542 drivers/crypto/caam/caamhash.c 	ctx->adata.key_virt = key;
ctx               543 drivers/crypto/caam/caamhash.c 	ctx->adata.keylen = keylen;
ctx               608 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               623 drivers/crypto/caam/caamhash.c 			     ctx->ctx_len, 1);
ctx               634 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               645 drivers/crypto/caam/caamhash.c 	ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_BIDIRECTIONAL);
ctx               651 drivers/crypto/caam/caamhash.c 			     ctx->ctx_len, 1);
ctx               668 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               683 drivers/crypto/caam/caamhash.c 			     ctx->ctx_len, 1);
ctx               694 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               705 drivers/crypto/caam/caamhash.c 	ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_FROM_DEVICE);
ctx               711 drivers/crypto/caam/caamhash.c 			     ctx->ctx_len, 1);
ctx               724 drivers/crypto/caam/caamhash.c static struct ahash_edesc *ahash_edesc_alloc(struct caam_hash_ctx *ctx,
ctx               734 drivers/crypto/caam/caamhash.c 		dev_err(ctx->jrdev, "could not allocate extended descriptor\n");
ctx               744 drivers/crypto/caam/caamhash.c static int ahash_edesc_add_src(struct caam_hash_ctx *ctx,
ctx               760 drivers/crypto/caam/caamhash.c 		src_dma = dma_map_single(ctx->jrdev, sg, sgsize, DMA_TO_DEVICE);
ctx               761 drivers/crypto/caam/caamhash.c 		if (dma_mapping_error(ctx->jrdev, src_dma)) {
ctx               762 drivers/crypto/caam/caamhash.c 			dev_err(ctx->jrdev, "unable to map S/G table\n");
ctx               784 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               786 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx               808 drivers/crypto/caam/caamhash.c 	if ((is_xcbc_aes(ctx->adata.algtype) ||
ctx               809 drivers/crypto/caam/caamhash.c 	     is_cmac_aes(ctx->adata.algtype)) && to_hash >= blocksize &&
ctx               844 drivers/crypto/caam/caamhash.c 		edesc = ahash_edesc_alloc(ctx, pad_nents, ctx->sh_desc_update,
ctx               845 drivers/crypto/caam/caamhash.c 					  ctx->sh_desc_update_dma, flags);
ctx               854 drivers/crypto/caam/caamhash.c 		ret = ctx_map_to_sec4_sg(jrdev, state, ctx->ctx_len,
ctx               886 drivers/crypto/caam/caamhash.c 		append_seq_in_ptr(desc, edesc->sec4_sg_dma, ctx->ctx_len +
ctx               889 drivers/crypto/caam/caamhash.c 		append_seq_out_ptr(desc, state->ctx_dma, ctx->ctx_len, 0);
ctx               915 drivers/crypto/caam/caamhash.c 	ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_BIDIRECTIONAL);
ctx               923 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               925 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx               939 drivers/crypto/caam/caamhash.c 	edesc = ahash_edesc_alloc(ctx, 4, ctx->sh_desc_fin,
ctx               940 drivers/crypto/caam/caamhash.c 				  ctx->sh_desc_fin_dma, flags);
ctx               948 drivers/crypto/caam/caamhash.c 	ret = ctx_map_to_sec4_sg(jrdev, state, ctx->ctx_len,
ctx               967 drivers/crypto/caam/caamhash.c 	append_seq_in_ptr(desc, edesc->sec4_sg_dma, ctx->ctx_len + buflen,
ctx               989 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               991 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx              1022 drivers/crypto/caam/caamhash.c 	edesc = ahash_edesc_alloc(ctx, sec4_sg_src_index + mapped_nents,
ctx              1023 drivers/crypto/caam/caamhash.c 				  ctx->sh_desc_fin, ctx->sh_desc_fin_dma,
ctx              1034 drivers/crypto/caam/caamhash.c 	ret = ctx_map_to_sec4_sg(jrdev, state, ctx->ctx_len,
ctx              1043 drivers/crypto/caam/caamhash.c 	ret = ahash_edesc_add_src(ctx, edesc, req, mapped_nents,
ctx              1044 drivers/crypto/caam/caamhash.c 				  sec4_sg_src_index, ctx->ctx_len + buflen,
ctx              1069 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              1071 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx              1100 drivers/crypto/caam/caamhash.c 	edesc = ahash_edesc_alloc(ctx, mapped_nents > 1 ? mapped_nents : 0,
ctx              1101 drivers/crypto/caam/caamhash.c 				  ctx->sh_desc_digest, ctx->sh_desc_digest_dma,
ctx              1110 drivers/crypto/caam/caamhash.c 	ret = ahash_edesc_add_src(ctx, edesc, req, mapped_nents, 0, 0,
ctx              1146 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              1148 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx              1159 drivers/crypto/caam/caamhash.c 	edesc = ahash_edesc_alloc(ctx, 0, ctx->sh_desc_digest,
ctx              1160 drivers/crypto/caam/caamhash.c 				  ctx->sh_desc_digest_dma, flags);
ctx              1205 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              1207 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx              1228 drivers/crypto/caam/caamhash.c 	if ((is_xcbc_aes(ctx->adata.algtype) ||
ctx              1229 drivers/crypto/caam/caamhash.c 	     is_cmac_aes(ctx->adata.algtype)) && to_hash >= blocksize &&
ctx              1263 drivers/crypto/caam/caamhash.c 		edesc = ahash_edesc_alloc(ctx, pad_nents,
ctx              1264 drivers/crypto/caam/caamhash.c 					  ctx->sh_desc_update_first,
ctx              1265 drivers/crypto/caam/caamhash.c 					  ctx->sh_desc_update_first_dma,
ctx              1300 drivers/crypto/caam/caamhash.c 		ret = map_seq_out_ptr_ctx(desc, jrdev, state, ctx->ctx_len);
ctx              1331 drivers/crypto/caam/caamhash.c 	ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_TO_DEVICE);
ctx              1340 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              1342 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx              1374 drivers/crypto/caam/caamhash.c 	edesc = ahash_edesc_alloc(ctx, sec4_sg_src_index + mapped_nents,
ctx              1375 drivers/crypto/caam/caamhash.c 				  ctx->sh_desc_digest, ctx->sh_desc_digest_dma,
ctx              1391 drivers/crypto/caam/caamhash.c 	ret = ahash_edesc_add_src(ctx, edesc, req, mapped_nents, 1, buflen,
ctx              1426 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              1428 drivers/crypto/caam/caamhash.c 	struct device *jrdev = ctx->jrdev;
ctx              1447 drivers/crypto/caam/caamhash.c 	if ((is_xcbc_aes(ctx->adata.algtype) ||
ctx              1448 drivers/crypto/caam/caamhash.c 	     is_cmac_aes(ctx->adata.algtype)) && to_hash >= blocksize &&
ctx              1477 drivers/crypto/caam/caamhash.c 		edesc = ahash_edesc_alloc(ctx, mapped_nents > 1 ?
ctx              1479 drivers/crypto/caam/caamhash.c 					  ctx->sh_desc_update_first,
ctx              1480 drivers/crypto/caam/caamhash.c 					  ctx->sh_desc_update_first_dma,
ctx              1489 drivers/crypto/caam/caamhash.c 		ret = ahash_edesc_add_src(ctx, edesc, req, mapped_nents, 0, 0,
ctx              1500 drivers/crypto/caam/caamhash.c 		ret = map_seq_out_ptr_ctx(desc, jrdev, state, ctx->ctx_len);
ctx              1531 drivers/crypto/caam/caamhash.c 	ahash_unmap_ctx(jrdev, edesc, req, ctx->ctx_len, DMA_TO_DEVICE);
ctx              1816 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1831 drivers/crypto/caam/caamhash.c 	ctx->jrdev = caam_jr_alloc();
ctx              1832 drivers/crypto/caam/caamhash.c 	if (IS_ERR(ctx->jrdev)) {
ctx              1834 drivers/crypto/caam/caamhash.c 		return PTR_ERR(ctx->jrdev);
ctx              1837 drivers/crypto/caam/caamhash.c 	priv = dev_get_drvdata(ctx->jrdev->parent);
ctx              1840 drivers/crypto/caam/caamhash.c 		ctx->dir = DMA_TO_DEVICE;
ctx              1841 drivers/crypto/caam/caamhash.c 		ctx->key_dir = DMA_BIDIRECTIONAL;
ctx              1842 drivers/crypto/caam/caamhash.c 		ctx->adata.algtype = OP_TYPE_CLASS1_ALG | caam_hash->alg_type;
ctx              1843 drivers/crypto/caam/caamhash.c 		ctx->ctx_len = 48;
ctx              1845 drivers/crypto/caam/caamhash.c 		ctx->dir = DMA_TO_DEVICE;
ctx              1846 drivers/crypto/caam/caamhash.c 		ctx->key_dir = DMA_NONE;
ctx              1847 drivers/crypto/caam/caamhash.c 		ctx->adata.algtype = OP_TYPE_CLASS1_ALG | caam_hash->alg_type;
ctx              1848 drivers/crypto/caam/caamhash.c 		ctx->ctx_len = 32;
ctx              1851 drivers/crypto/caam/caamhash.c 			ctx->dir = DMA_BIDIRECTIONAL;
ctx              1852 drivers/crypto/caam/caamhash.c 			ctx->key_dir = alg->setkey ? DMA_TO_DEVICE : DMA_NONE;
ctx              1854 drivers/crypto/caam/caamhash.c 			ctx->dir = DMA_TO_DEVICE;
ctx              1855 drivers/crypto/caam/caamhash.c 			ctx->key_dir = DMA_NONE;
ctx              1857 drivers/crypto/caam/caamhash.c 		ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam_hash->alg_type;
ctx              1858 drivers/crypto/caam/caamhash.c 		ctx->ctx_len = runninglen[(ctx->adata.algtype &
ctx              1863 drivers/crypto/caam/caamhash.c 	if (ctx->key_dir != DMA_NONE) {
ctx              1864 drivers/crypto/caam/caamhash.c 		ctx->adata.key_dma = dma_map_single_attrs(ctx->jrdev, ctx->key,
ctx              1865 drivers/crypto/caam/caamhash.c 							  ARRAY_SIZE(ctx->key),
ctx              1866 drivers/crypto/caam/caamhash.c 							  ctx->key_dir,
ctx              1868 drivers/crypto/caam/caamhash.c 		if (dma_mapping_error(ctx->jrdev, ctx->adata.key_dma)) {
ctx              1869 drivers/crypto/caam/caamhash.c 			dev_err(ctx->jrdev, "unable to map key\n");
ctx              1870 drivers/crypto/caam/caamhash.c 			caam_jr_free(ctx->jrdev);
ctx              1875 drivers/crypto/caam/caamhash.c 	dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_update,
ctx              1877 drivers/crypto/caam/caamhash.c 					ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
ctx              1878 drivers/crypto/caam/caamhash.c 	if (dma_mapping_error(ctx->jrdev, dma_addr)) {
ctx              1879 drivers/crypto/caam/caamhash.c 		dev_err(ctx->jrdev, "unable to map shared descriptors\n");
ctx              1881 drivers/crypto/caam/caamhash.c 		if (ctx->key_dir != DMA_NONE)
ctx              1882 drivers/crypto/caam/caamhash.c 			dma_unmap_single_attrs(ctx->jrdev, ctx->adata.key_dma,
ctx              1883 drivers/crypto/caam/caamhash.c 					       ARRAY_SIZE(ctx->key),
ctx              1884 drivers/crypto/caam/caamhash.c 					       ctx->key_dir,
ctx              1887 drivers/crypto/caam/caamhash.c 		caam_jr_free(ctx->jrdev);
ctx              1891 drivers/crypto/caam/caamhash.c 	ctx->sh_desc_update_dma = dma_addr;
ctx              1892 drivers/crypto/caam/caamhash.c 	ctx->sh_desc_update_first_dma = dma_addr +
ctx              1895 drivers/crypto/caam/caamhash.c 	ctx->sh_desc_fin_dma = dma_addr + offsetof(struct caam_hash_ctx,
ctx              1897 drivers/crypto/caam/caamhash.c 	ctx->sh_desc_digest_dma = dma_addr + offsetof(struct caam_hash_ctx,
ctx              1912 drivers/crypto/caam/caamhash.c 	struct caam_hash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1914 drivers/crypto/caam/caamhash.c 	dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_update_dma,
ctx              1916 drivers/crypto/caam/caamhash.c 			       ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
ctx              1917 drivers/crypto/caam/caamhash.c 	if (ctx->key_dir != DMA_NONE)
ctx              1918 drivers/crypto/caam/caamhash.c 		dma_unmap_single_attrs(ctx->jrdev, ctx->adata.key_dma,
ctx              1919 drivers/crypto/caam/caamhash.c 				       ARRAY_SIZE(ctx->key), ctx->key_dir,
ctx              1921 drivers/crypto/caam/caamhash.c 	caam_jr_free(ctx->jrdev);
ctx                60 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx                61 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *key = &ctx->key;
ctx                72 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx                73 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *key = &ctx->key;
ctx                84 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx                85 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *key = &ctx->key;
ctx               101 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               102 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *key = &ctx->key;
ctx               247 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               248 drivers/crypto/caam/caampkc.c 	struct device *dev = ctx->dev;
ctx               250 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *key = &ctx->key;
ctx               320 drivers/crypto/caam/caampkc.c 		dma_to_sec4_sg_one(edesc->sec4_sg, ctx->padding_dma, diff_size,
ctx               367 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               368 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *key = &ctx->key;
ctx               369 drivers/crypto/caam/caampkc.c 	struct device *dev = ctx->dev;
ctx               412 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               413 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *key = &ctx->key;
ctx               414 drivers/crypto/caam/caampkc.c 	struct device *dev = ctx->dev;
ctx               458 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               459 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *key = &ctx->key;
ctx               460 drivers/crypto/caam/caampkc.c 	struct device *dev = ctx->dev;
ctx               535 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               536 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *key = &ctx->key;
ctx               537 drivers/crypto/caam/caampkc.c 	struct device *dev = ctx->dev;
ctx               627 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               628 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *key = &ctx->key;
ctx               629 drivers/crypto/caam/caampkc.c 	struct device *jrdev = ctx->dev;
ctx               670 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               671 drivers/crypto/caam/caampkc.c 	struct device *jrdev = ctx->dev;
ctx               703 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               704 drivers/crypto/caam/caampkc.c 	struct device *jrdev = ctx->dev;
ctx               736 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               737 drivers/crypto/caam/caampkc.c 	struct device *jrdev = ctx->dev;
ctx               769 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               770 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *key = &ctx->key;
ctx               778 drivers/crypto/caam/caampkc.c 		dev_err(ctx->dev, "Output buffer length less than parameter n\n");
ctx               872 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               874 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *rsa_key = &ctx->key;
ctx               913 drivers/crypto/caam/caampkc.c static void caam_rsa_set_priv_key_form(struct caam_rsa_ctx *ctx,
ctx               916 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *rsa_key = &ctx->key;
ctx               974 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               976 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *rsa_key = &ctx->key;
ctx              1014 drivers/crypto/caam/caampkc.c 	caam_rsa_set_priv_key_form(ctx, &raw_key);
ctx              1025 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx              1027 drivers/crypto/caam/caampkc.c 	return ctx->key.n_sz;
ctx              1033 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx              1035 drivers/crypto/caam/caampkc.c 	ctx->dev = caam_jr_alloc();
ctx              1037 drivers/crypto/caam/caampkc.c 	if (IS_ERR(ctx->dev)) {
ctx              1039 drivers/crypto/caam/caampkc.c 		return PTR_ERR(ctx->dev);
ctx              1042 drivers/crypto/caam/caampkc.c 	ctx->padding_dma = dma_map_single(ctx->dev, zero_buffer,
ctx              1045 drivers/crypto/caam/caampkc.c 	if (dma_mapping_error(ctx->dev, ctx->padding_dma)) {
ctx              1046 drivers/crypto/caam/caampkc.c 		dev_err(ctx->dev, "unable to map padding\n");
ctx              1047 drivers/crypto/caam/caampkc.c 		caam_jr_free(ctx->dev);
ctx              1057 drivers/crypto/caam/caampkc.c 	struct caam_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx              1058 drivers/crypto/caam/caampkc.c 	struct caam_rsa_key *key = &ctx->key;
ctx              1060 drivers/crypto/caam/caampkc.c 	dma_unmap_single(ctx->dev, ctx->padding_dma, CAAM_RSA_MAX_INPUT_SIZE -
ctx              1063 drivers/crypto/caam/caampkc.c 	caam_jr_free(ctx->dev);
ctx                96 drivers/crypto/caam/caamrng.c static inline void rng_unmap_ctx(struct caam_rng_ctx *ctx)
ctx                98 drivers/crypto/caam/caamrng.c 	struct device *jrdev = ctx->jrdev;
ctx               100 drivers/crypto/caam/caamrng.c 	if (ctx->sh_desc_dma)
ctx               101 drivers/crypto/caam/caamrng.c 		dma_unmap_single(jrdev, ctx->sh_desc_dma,
ctx               102 drivers/crypto/caam/caamrng.c 				 desc_bytes(ctx->sh_desc), DMA_TO_DEVICE);
ctx               103 drivers/crypto/caam/caamrng.c 	rng_unmap_buf(jrdev, &ctx->bufs[0]);
ctx               104 drivers/crypto/caam/caamrng.c 	rng_unmap_buf(jrdev, &ctx->bufs[1]);
ctx               126 drivers/crypto/caam/caamrng.c static inline int submit_job(struct caam_rng_ctx *ctx, int to_current)
ctx               128 drivers/crypto/caam/caamrng.c 	struct buf_data *bd = &ctx->bufs[!(to_current ^ ctx->current_buf)];
ctx               129 drivers/crypto/caam/caamrng.c 	struct device *jrdev = ctx->jrdev;
ctx               133 drivers/crypto/caam/caamrng.c 	dev_dbg(jrdev, "submitting job %d\n", !(to_current ^ ctx->current_buf));
ctx               135 drivers/crypto/caam/caamrng.c 	err = caam_jr_enqueue(jrdev, desc, rng_done, ctx);
ctx               146 drivers/crypto/caam/caamrng.c 	struct caam_rng_ctx *ctx = rng_ctx;
ctx               147 drivers/crypto/caam/caamrng.c 	struct buf_data *bd = &ctx->bufs[ctx->current_buf];
ctx               154 drivers/crypto/caam/caamrng.c 			err = submit_job(ctx, 1);
ctx               168 drivers/crypto/caam/caamrng.c 	next_buf_idx = ctx->cur_buf_idx + max;
ctx               169 drivers/crypto/caam/caamrng.c 	dev_dbg(ctx->jrdev, "%s: start reading at buffer %d, idx %d\n",
ctx               170 drivers/crypto/caam/caamrng.c 		 __func__, ctx->current_buf, ctx->cur_buf_idx);
ctx               174 drivers/crypto/caam/caamrng.c 		memcpy(data, bd->buf + ctx->cur_buf_idx, max);
ctx               175 drivers/crypto/caam/caamrng.c 		ctx->cur_buf_idx = next_buf_idx;
ctx               180 drivers/crypto/caam/caamrng.c 	copied_idx = RN_BUF_SIZE - ctx->cur_buf_idx;
ctx               181 drivers/crypto/caam/caamrng.c 	memcpy(data, bd->buf + ctx->cur_buf_idx, copied_idx);
ctx               182 drivers/crypto/caam/caamrng.c 	ctx->cur_buf_idx = 0;
ctx               186 drivers/crypto/caam/caamrng.c 	submit_job(ctx, 1);
ctx               189 drivers/crypto/caam/caamrng.c 	ctx->current_buf = !ctx->current_buf;
ctx               190 drivers/crypto/caam/caamrng.c 	dev_dbg(ctx->jrdev, "switched to buffer %d\n", ctx->current_buf);
ctx               197 drivers/crypto/caam/caamrng.c static inline int rng_create_sh_desc(struct caam_rng_ctx *ctx)
ctx               199 drivers/crypto/caam/caamrng.c 	struct device *jrdev = ctx->jrdev;
ctx               200 drivers/crypto/caam/caamrng.c 	u32 *desc = ctx->sh_desc;
ctx               210 drivers/crypto/caam/caamrng.c 	ctx->sh_desc_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
ctx               212 drivers/crypto/caam/caamrng.c 	if (dma_mapping_error(jrdev, ctx->sh_desc_dma)) {
ctx               223 drivers/crypto/caam/caamrng.c static inline int rng_create_job_desc(struct caam_rng_ctx *ctx, int buf_id)
ctx               225 drivers/crypto/caam/caamrng.c 	struct device *jrdev = ctx->jrdev;
ctx               226 drivers/crypto/caam/caamrng.c 	struct buf_data *bd = &ctx->bufs[buf_id];
ctx               228 drivers/crypto/caam/caamrng.c 	int sh_len = desc_len(ctx->sh_desc);
ctx               230 drivers/crypto/caam/caamrng.c 	init_job_desc_shared(desc, ctx->sh_desc_dma, sh_len, HDR_SHARE_DEFER |
ctx               261 drivers/crypto/caam/caamrng.c static int caam_init_buf(struct caam_rng_ctx *ctx, int buf_id)
ctx               263 drivers/crypto/caam/caamrng.c 	struct buf_data *bd = &ctx->bufs[buf_id];
ctx               266 drivers/crypto/caam/caamrng.c 	err = rng_create_job_desc(ctx, buf_id);
ctx               271 drivers/crypto/caam/caamrng.c 	submit_job(ctx, buf_id == ctx->current_buf);
ctx               277 drivers/crypto/caam/caamrng.c static int caam_init_rng(struct caam_rng_ctx *ctx, struct device *jrdev)
ctx               281 drivers/crypto/caam/caamrng.c 	ctx->jrdev = jrdev;
ctx               283 drivers/crypto/caam/caamrng.c 	err = rng_create_sh_desc(ctx);
ctx               287 drivers/crypto/caam/caamrng.c 	ctx->current_buf = 0;
ctx               288 drivers/crypto/caam/caamrng.c 	ctx->cur_buf_idx = 0;
ctx               290 drivers/crypto/caam/caamrng.c 	err = caam_init_buf(ctx, 0);
ctx               294 drivers/crypto/caam/caamrng.c 	return caam_init_buf(ctx, 1);
ctx                99 drivers/crypto/cavium/cpt/cptvf_algs.c 	struct cvm_enc_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               121 drivers/crypto/cavium/cpt/cptvf_algs.c 	fctx->enc.enc_ctrl.e.enc_cipher = ctx->cipher_type;
ctx               122 drivers/crypto/cavium/cpt/cptvf_algs.c 	fctx->enc.enc_ctrl.e.aes_key = ctx->key_type;
ctx               125 drivers/crypto/cavium/cpt/cptvf_algs.c 	if (ctx->cipher_type == AES_XTS)
ctx               126 drivers/crypto/cavium/cpt/cptvf_algs.c 		memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len * 2);
ctx               128 drivers/crypto/cavium/cpt/cptvf_algs.c 		memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len);
ctx               234 drivers/crypto/cavium/cpt/cptvf_algs.c 	struct cvm_enc_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               242 drivers/crypto/cavium/cpt/cptvf_algs.c 	ctx->key_len = keylen;
ctx               243 drivers/crypto/cavium/cpt/cptvf_algs.c 	memcpy(ctx->enc_key, key1, keylen / 2);
ctx               244 drivers/crypto/cavium/cpt/cptvf_algs.c 	memcpy(ctx->enc_key + KEY2_OFFSET, key2, keylen / 2);
ctx               245 drivers/crypto/cavium/cpt/cptvf_algs.c 	ctx->cipher_type = AES_XTS;
ctx               246 drivers/crypto/cavium/cpt/cptvf_algs.c 	switch (ctx->key_len) {
ctx               248 drivers/crypto/cavium/cpt/cptvf_algs.c 		ctx->key_type = AES_128_BIT;
ctx               251 drivers/crypto/cavium/cpt/cptvf_algs.c 		ctx->key_type = AES_256_BIT;
ctx               260 drivers/crypto/cavium/cpt/cptvf_algs.c static int cvm_validate_keylen(struct cvm_enc_ctx *ctx, u32 keylen)
ctx               263 drivers/crypto/cavium/cpt/cptvf_algs.c 		ctx->key_len = keylen;
ctx               264 drivers/crypto/cavium/cpt/cptvf_algs.c 		switch (ctx->key_len) {
ctx               266 drivers/crypto/cavium/cpt/cptvf_algs.c 			ctx->key_type = AES_128_BIT;
ctx               269 drivers/crypto/cavium/cpt/cptvf_algs.c 			ctx->key_type = AES_192_BIT;
ctx               272 drivers/crypto/cavium/cpt/cptvf_algs.c 			ctx->key_type = AES_256_BIT;
ctx               278 drivers/crypto/cavium/cpt/cptvf_algs.c 		if (ctx->cipher_type == DES3_CBC)
ctx               279 drivers/crypto/cavium/cpt/cptvf_algs.c 			ctx->key_type = 0;
ctx               291 drivers/crypto/cavium/cpt/cptvf_algs.c 	struct cvm_enc_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               293 drivers/crypto/cavium/cpt/cptvf_algs.c 	ctx->cipher_type = cipher_type;
ctx               294 drivers/crypto/cavium/cpt/cptvf_algs.c 	if (!cvm_validate_keylen(ctx, keylen)) {
ctx               295 drivers/crypto/cavium/cpt/cptvf_algs.c 		memcpy(ctx->enc_key, key, keylen);
ctx                15 drivers/crypto/cavium/nitrox/nitrox_common.h void crypto_free_context(void *ctx);
ctx               217 drivers/crypto/cavium/nitrox/nitrox_lib.c 	struct ctx_hdr *ctx;
ctx               233 drivers/crypto/cavium/nitrox/nitrox_lib.c 	ctx = vaddr;
ctx               234 drivers/crypto/cavium/nitrox/nitrox_lib.c 	ctx->pool = ndev->ctx_pool;
ctx               235 drivers/crypto/cavium/nitrox/nitrox_lib.c 	ctx->dma = dma;
ctx               236 drivers/crypto/cavium/nitrox/nitrox_lib.c 	ctx->ctx_dma = dma + sizeof(struct ctx_hdr);
ctx               249 drivers/crypto/cavium/nitrox/nitrox_lib.c void crypto_free_context(void *ctx)
ctx               253 drivers/crypto/cavium/nitrox/nitrox_lib.c 	if (!ctx)
ctx               256 drivers/crypto/cavium/nitrox/nitrox_lib.c 	ctxp = ctx;
ctx               289 drivers/crypto/cavium/zip/zip_crypto.c void zip_free_scomp_ctx(struct crypto_scomp *tfm, void *ctx)
ctx               291 drivers/crypto/cavium/zip/zip_crypto.c 	struct zip_kernel_ctx *zip_ctx = ctx;
ctx               299 drivers/crypto/cavium/zip/zip_crypto.c 		       u8 *dst, unsigned int *dlen, void *ctx)
ctx               302 drivers/crypto/cavium/zip/zip_crypto.c 	struct zip_kernel_ctx *zip_ctx  = ctx;
ctx               311 drivers/crypto/cavium/zip/zip_crypto.c 			 u8 *dst, unsigned int *dlen, void *ctx)
ctx               314 drivers/crypto/cavium/zip/zip_crypto.c 	struct zip_kernel_ctx *zip_ctx = ctx;
ctx                75 drivers/crypto/cavium/zip/zip_crypto.h 			 u8 *dst, unsigned int *dlen, void *ctx);
ctx                78 drivers/crypto/cavium/zip/zip_crypto.h 			   u8 *dst, unsigned int *dlen, void *ctx);
ctx                99 drivers/crypto/cavium/zip/zip_device.h 	union zip_zptr_s *ctx;
ctx                59 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	struct ccp_ctx *ctx = crypto_ahash_ctx(tfm);
ctx                69 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	if (!ctx->u.aes.key_len)
ctx               153 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 		cmac_key_sg = (need_pad) ? &ctx->u.aes.k2_sg
ctx               154 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 					 : &ctx->u.aes.k1_sg;
ctx               159 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	rctx->cmd.u.aes.type = ctx->u.aes.type;
ctx               160 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	rctx->cmd.u.aes.mode = ctx->u.aes.mode;
ctx               162 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	rctx->cmd.u.aes.key = &ctx->u.aes.key_sg;
ctx               163 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	rctx->cmd.u.aes.key_len = ctx->u.aes.key_len;
ctx               170 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	rctx->cmd.u.aes.cmac_key_len = ctx->u.aes.kn_len;
ctx               259 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
ctx               270 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 		ctx->u.aes.type = CCP_AES_TYPE_128;
ctx               273 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 		ctx->u.aes.type = CCP_AES_TYPE_192;
ctx               276 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 		ctx->u.aes.type = CCP_AES_TYPE_256;
ctx               282 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	ctx->u.aes.mode = alg->mode;
ctx               285 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	ctx->u.aes.key_len = 0;
ctx               293 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	memset(ctx->u.aes.key, 0, sizeof(ctx->u.aes.key));
ctx               294 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	aes_encrypt(&aes, ctx->u.aes.key, ctx->u.aes.key);
ctx               298 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	k0_hi = be64_to_cpu(*((__be64 *)ctx->u.aes.key));
ctx               299 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	k0_lo = be64_to_cpu(*((__be64 *)ctx->u.aes.key + 1));
ctx               303 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	if (ctx->u.aes.key[0] & 0x80) {
ctx               307 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	gk = (__be64 *)ctx->u.aes.k1;
ctx               314 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	if (ctx->u.aes.k1[0] & 0x80) {
ctx               318 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	gk = (__be64 *)ctx->u.aes.k2;
ctx               323 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	ctx->u.aes.kn_len = sizeof(ctx->u.aes.k1);
ctx               324 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	sg_init_one(&ctx->u.aes.k1_sg, ctx->u.aes.k1, sizeof(ctx->u.aes.k1));
ctx               325 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	sg_init_one(&ctx->u.aes.k2_sg, ctx->u.aes.k2, sizeof(ctx->u.aes.k2));
ctx               328 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	memset(ctx->u.aes.key, 0, sizeof(ctx->u.aes.key));
ctx               329 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	memcpy(ctx->u.aes.key, key, key_len);
ctx               330 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	ctx->u.aes.key_len = key_len;
ctx               331 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len);
ctx               338 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               341 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	ctx->complete = ccp_aes_cmac_complete;
ctx               342 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	ctx->u.aes.key_len = 0;
ctx                32 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	struct ccp_ctx *ctx = crypto_aead_ctx(tfm);
ctx                36 drivers/crypto/ccp/ccp-crypto-aes-galois.c 		ctx->u.aes.type = CCP_AES_TYPE_128;
ctx                39 drivers/crypto/ccp/ccp-crypto-aes-galois.c 		ctx->u.aes.type = CCP_AES_TYPE_192;
ctx                42 drivers/crypto/ccp/ccp-crypto-aes-galois.c 		ctx->u.aes.type = CCP_AES_TYPE_256;
ctx                49 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	ctx->u.aes.mode = CCP_AES_MODE_GCM;
ctx                50 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	ctx->u.aes.key_len = key_len;
ctx                52 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	memcpy(ctx->u.aes.key, key, key_len);
ctx                53 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len);
ctx                80 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	struct ccp_ctx *ctx = crypto_aead_ctx(tfm);
ctx                87 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	if (!ctx->u.aes.key_len)
ctx                90 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	if (ctx->u.aes.mode != CCP_AES_MODE_GCM)
ctx               121 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	rctx->cmd.u.aes.type = ctx->u.aes.type;
ctx               122 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	rctx->cmd.u.aes.mode = ctx->u.aes.mode;
ctx               124 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	rctx->cmd.u.aes.key = &ctx->u.aes.key_sg;
ctx               125 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	rctx->cmd.u.aes.key_len = ctx->u.aes.key_len;
ctx               152 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	struct ccp_ctx *ctx = crypto_aead_ctx(tfm);
ctx               154 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	ctx->complete = ccp_aes_gcm_complete;
ctx               155 drivers/crypto/ccp/ccp-crypto-aes-galois.c 	ctx->u.aes.key_len = 0;
ctx                79 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(xfm);
ctx                92 drivers/crypto/ccp/ccp-crypto-aes-xts.c 		memcpy(ctx->u.aes.key, key, key_len);
ctx                96 drivers/crypto/ccp/ccp-crypto-aes-xts.c 			memcpy(ctx->u.aes.key, key, key_len);
ctx                99 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	ctx->u.aes.key_len = key_len / 2;
ctx               100 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len);
ctx               102 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	return crypto_sync_skcipher_setkey(ctx->u.aes.tfm_skcipher, key, key_len);
ctx               108 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx               116 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	if (!ctx->u.aes.key_len)
ctx               142 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	    (ctx->u.aes.key_len != AES_KEYSIZE_128))
ctx               144 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	if ((ctx->u.aes.key_len != AES_KEYSIZE_128) &&
ctx               145 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	    (ctx->u.aes.key_len != AES_KEYSIZE_256))
ctx               149 drivers/crypto/ccp/ccp-crypto-aes-xts.c 					       ctx->u.aes.tfm_skcipher);
ctx               154 drivers/crypto/ccp/ccp-crypto-aes-xts.c 		skcipher_request_set_sync_tfm(subreq, ctx->u.aes.tfm_skcipher);
ctx               175 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	rctx->cmd.u.xts.key = &ctx->u.aes.key_sg;
ctx               176 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	rctx->cmd.u.xts.key_len = ctx->u.aes.key_len;
ctx               200 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               203 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	ctx->complete = ccp_aes_xts_complete;
ctx               204 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	ctx->u.aes.key_len = 0;
ctx               213 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	ctx->u.aes.tfm_skcipher = fallback_tfm;
ctx               222 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               224 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	crypto_free_sync_skcipher(ctx->u.aes.tfm_skcipher);
ctx                25 drivers/crypto/ccp/ccp-crypto-aes.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx                31 drivers/crypto/ccp/ccp-crypto-aes.c 	if (ctx->u.aes.mode != CCP_AES_MODE_ECB)
ctx                40 drivers/crypto/ccp/ccp-crypto-aes.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ablkcipher_tfm(tfm));
ctx                46 drivers/crypto/ccp/ccp-crypto-aes.c 		ctx->u.aes.type = CCP_AES_TYPE_128;
ctx                49 drivers/crypto/ccp/ccp-crypto-aes.c 		ctx->u.aes.type = CCP_AES_TYPE_192;
ctx                52 drivers/crypto/ccp/ccp-crypto-aes.c 		ctx->u.aes.type = CCP_AES_TYPE_256;
ctx                58 drivers/crypto/ccp/ccp-crypto-aes.c 	ctx->u.aes.mode = alg->mode;
ctx                59 drivers/crypto/ccp/ccp-crypto-aes.c 	ctx->u.aes.key_len = key_len;
ctx                61 drivers/crypto/ccp/ccp-crypto-aes.c 	memcpy(ctx->u.aes.key, key, key_len);
ctx                62 drivers/crypto/ccp/ccp-crypto-aes.c 	sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len);
ctx                69 drivers/crypto/ccp/ccp-crypto-aes.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx                75 drivers/crypto/ccp/ccp-crypto-aes.c 	if (!ctx->u.aes.key_len)
ctx                78 drivers/crypto/ccp/ccp-crypto-aes.c 	if (((ctx->u.aes.mode == CCP_AES_MODE_ECB) ||
ctx                79 drivers/crypto/ccp/ccp-crypto-aes.c 	     (ctx->u.aes.mode == CCP_AES_MODE_CBC)) &&
ctx                83 drivers/crypto/ccp/ccp-crypto-aes.c 	if (ctx->u.aes.mode != CCP_AES_MODE_ECB) {
ctx                96 drivers/crypto/ccp/ccp-crypto-aes.c 	rctx->cmd.u.aes.type = ctx->u.aes.type;
ctx                97 drivers/crypto/ccp/ccp-crypto-aes.c 	rctx->cmd.u.aes.mode = ctx->u.aes.mode;
ctx               100 drivers/crypto/ccp/ccp-crypto-aes.c 	rctx->cmd.u.aes.key = &ctx->u.aes.key_sg;
ctx               101 drivers/crypto/ccp/ccp-crypto-aes.c 	rctx->cmd.u.aes.key_len = ctx->u.aes.key_len;
ctx               125 drivers/crypto/ccp/ccp-crypto-aes.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               127 drivers/crypto/ccp/ccp-crypto-aes.c 	ctx->complete = ccp_aes_complete;
ctx               128 drivers/crypto/ccp/ccp-crypto-aes.c 	ctx->u.aes.key_len = 0;
ctx               154 drivers/crypto/ccp/ccp-crypto-aes.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ablkcipher_tfm(tfm));
ctx               160 drivers/crypto/ccp/ccp-crypto-aes.c 	memcpy(ctx->u.aes.nonce, key + key_len, CTR_RFC3686_NONCE_SIZE);
ctx               167 drivers/crypto/ccp/ccp-crypto-aes.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx               173 drivers/crypto/ccp/ccp-crypto-aes.c 	memcpy(iv, ctx->u.aes.nonce, CTR_RFC3686_NONCE_SIZE);
ctx               200 drivers/crypto/ccp/ccp-crypto-aes.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               202 drivers/crypto/ccp/ccp-crypto-aes.c 	ctx->complete = ccp_aes_rfc3686_complete;
ctx               203 drivers/crypto/ccp/ccp-crypto-aes.c 	ctx->u.aes.key_len = 0;
ctx                24 drivers/crypto/ccp/ccp-crypto-des3.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx                30 drivers/crypto/ccp/ccp-crypto-des3.c 	if (ctx->u.des3.mode != CCP_DES3_MODE_ECB)
ctx                39 drivers/crypto/ccp/ccp-crypto-des3.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ablkcipher_tfm(tfm));
ctx                51 drivers/crypto/ccp/ccp-crypto-des3.c 	ctx->u.des3.type = CCP_DES3_TYPE_168;
ctx                52 drivers/crypto/ccp/ccp-crypto-des3.c 	ctx->u.des3.mode = alg->mode;
ctx                53 drivers/crypto/ccp/ccp-crypto-des3.c 	ctx->u.des3.key_len = key_len;
ctx                55 drivers/crypto/ccp/ccp-crypto-des3.c 	memcpy(ctx->u.des3.key, key, key_len);
ctx                56 drivers/crypto/ccp/ccp-crypto-des3.c 	sg_init_one(&ctx->u.des3.key_sg, ctx->u.des3.key, key_len);
ctx                63 drivers/crypto/ccp/ccp-crypto-des3.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx                69 drivers/crypto/ccp/ccp-crypto-des3.c 	if (!ctx->u.des3.key_len)
ctx                72 drivers/crypto/ccp/ccp-crypto-des3.c 	if (((ctx->u.des3.mode == CCP_DES3_MODE_ECB) ||
ctx                73 drivers/crypto/ccp/ccp-crypto-des3.c 	     (ctx->u.des3.mode == CCP_DES3_MODE_CBC)) &&
ctx                77 drivers/crypto/ccp/ccp-crypto-des3.c 	if (ctx->u.des3.mode != CCP_DES3_MODE_ECB) {
ctx                90 drivers/crypto/ccp/ccp-crypto-des3.c 	rctx->cmd.u.des3.type = ctx->u.des3.type;
ctx                91 drivers/crypto/ccp/ccp-crypto-des3.c 	rctx->cmd.u.des3.mode = ctx->u.des3.mode;
ctx                95 drivers/crypto/ccp/ccp-crypto-des3.c 	rctx->cmd.u.des3.key = &ctx->u.des3.key_sg;
ctx                96 drivers/crypto/ccp/ccp-crypto-des3.c 	rctx->cmd.u.des3.key_len = ctx->u.des3.key_len;
ctx               120 drivers/crypto/ccp/ccp-crypto-des3.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               122 drivers/crypto/ccp/ccp-crypto-des3.c 	ctx->complete = ccp_des3_complete;
ctx               123 drivers/crypto/ccp/ccp-crypto-des3.c 	ctx->u.des3.key_len = 0;
ctx               149 drivers/crypto/ccp/ccp-crypto-main.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(req->tfm);
ctx               178 drivers/crypto/ccp/ccp-crypto-main.c 	if (ctx->complete)
ctx               179 drivers/crypto/ccp/ccp-crypto-main.c 		ret = ctx->complete(req, ret);
ctx               193 drivers/crypto/ccp/ccp-crypto-main.c 		ctx = crypto_tfm_ctx(held->req->tfm);
ctx               194 drivers/crypto/ccp/ccp-crypto-main.c 		if (ctx->complete)
ctx               195 drivers/crypto/ccp/ccp-crypto-main.c 			ret = ctx->complete(held->req, ret);
ctx                59 drivers/crypto/ccp/ccp-crypto-rsa.c 	struct ccp_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx                61 drivers/crypto/ccp/ccp-crypto-rsa.c 	return ctx->u.rsa.n_len;
ctx                67 drivers/crypto/ccp/ccp-crypto-rsa.c 	struct ccp_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx                75 drivers/crypto/ccp/ccp-crypto-rsa.c 	rctx->cmd.u.rsa.key_size = ctx->u.rsa.key_len; /* in bits */
ctx                77 drivers/crypto/ccp/ccp-crypto-rsa.c 		rctx->cmd.u.rsa.exp = &ctx->u.rsa.e_sg;
ctx                78 drivers/crypto/ccp/ccp-crypto-rsa.c 		rctx->cmd.u.rsa.exp_len = ctx->u.rsa.e_len;
ctx                80 drivers/crypto/ccp/ccp-crypto-rsa.c 		rctx->cmd.u.rsa.exp = &ctx->u.rsa.d_sg;
ctx                81 drivers/crypto/ccp/ccp-crypto-rsa.c 		rctx->cmd.u.rsa.exp_len = ctx->u.rsa.d_len;
ctx                83 drivers/crypto/ccp/ccp-crypto-rsa.c 	rctx->cmd.u.rsa.mod = &ctx->u.rsa.n_sg;
ctx                84 drivers/crypto/ccp/ccp-crypto-rsa.c 	rctx->cmd.u.rsa.mod_len = ctx->u.rsa.n_len;
ctx               112 drivers/crypto/ccp/ccp-crypto-rsa.c static void ccp_rsa_free_key_bufs(struct ccp_ctx *ctx)
ctx               115 drivers/crypto/ccp/ccp-crypto-rsa.c 	kzfree(ctx->u.rsa.e_buf);
ctx               116 drivers/crypto/ccp/ccp-crypto-rsa.c 	ctx->u.rsa.e_buf = NULL;
ctx               117 drivers/crypto/ccp/ccp-crypto-rsa.c 	ctx->u.rsa.e_len = 0;
ctx               118 drivers/crypto/ccp/ccp-crypto-rsa.c 	kzfree(ctx->u.rsa.n_buf);
ctx               119 drivers/crypto/ccp/ccp-crypto-rsa.c 	ctx->u.rsa.n_buf = NULL;
ctx               120 drivers/crypto/ccp/ccp-crypto-rsa.c 	ctx->u.rsa.n_len = 0;
ctx               121 drivers/crypto/ccp/ccp-crypto-rsa.c 	kzfree(ctx->u.rsa.d_buf);
ctx               122 drivers/crypto/ccp/ccp-crypto-rsa.c 	ctx->u.rsa.d_buf = NULL;
ctx               123 drivers/crypto/ccp/ccp-crypto-rsa.c 	ctx->u.rsa.d_len = 0;
ctx               129 drivers/crypto/ccp/ccp-crypto-rsa.c 	struct ccp_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               133 drivers/crypto/ccp/ccp-crypto-rsa.c 	ccp_rsa_free_key_bufs(ctx);
ctx               144 drivers/crypto/ccp/ccp-crypto-rsa.c 	ret = ccp_copy_and_save_keypart(&ctx->u.rsa.n_buf, &ctx->u.rsa.n_len,
ctx               148 drivers/crypto/ccp/ccp-crypto-rsa.c 	sg_init_one(&ctx->u.rsa.n_sg, ctx->u.rsa.n_buf, ctx->u.rsa.n_len);
ctx               150 drivers/crypto/ccp/ccp-crypto-rsa.c 	ctx->u.rsa.key_len = ctx->u.rsa.n_len << 3; /* convert to bits */
ctx               151 drivers/crypto/ccp/ccp-crypto-rsa.c 	if (ccp_check_key_length(ctx->u.rsa.key_len)) {
ctx               156 drivers/crypto/ccp/ccp-crypto-rsa.c 	ret = ccp_copy_and_save_keypart(&ctx->u.rsa.e_buf, &ctx->u.rsa.e_len,
ctx               160 drivers/crypto/ccp/ccp-crypto-rsa.c 	sg_init_one(&ctx->u.rsa.e_sg, ctx->u.rsa.e_buf, ctx->u.rsa.e_len);
ctx               163 drivers/crypto/ccp/ccp-crypto-rsa.c 		ret = ccp_copy_and_save_keypart(&ctx->u.rsa.d_buf,
ctx               164 drivers/crypto/ccp/ccp-crypto-rsa.c 						&ctx->u.rsa.d_len,
ctx               168 drivers/crypto/ccp/ccp-crypto-rsa.c 		sg_init_one(&ctx->u.rsa.d_sg,
ctx               169 drivers/crypto/ccp/ccp-crypto-rsa.c 			    ctx->u.rsa.d_buf, ctx->u.rsa.d_len);
ctx               175 drivers/crypto/ccp/ccp-crypto-rsa.c 	ccp_rsa_free_key_bufs(ctx);
ctx               195 drivers/crypto/ccp/ccp-crypto-rsa.c 	struct ccp_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               198 drivers/crypto/ccp/ccp-crypto-rsa.c 	ctx->complete = ccp_rsa_complete;
ctx               205 drivers/crypto/ccp/ccp-crypto-rsa.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(&tfm->base);
ctx               207 drivers/crypto/ccp/ccp-crypto-rsa.c 	ccp_rsa_free_key_bufs(ctx);
ctx                48 drivers/crypto/ccp/ccp-crypto-sha.c 		memcpy(req->result, rctx->ctx, digest_size);
ctx                60 drivers/crypto/ccp/ccp-crypto-sha.c 	struct ccp_ctx *ctx = crypto_ahash_ctx(tfm);
ctx                93 drivers/crypto/ccp/ccp-crypto-sha.c 	sg_init_one(&rctx->ctx_sg, rctx->ctx, sizeof(rctx->ctx));
ctx               135 drivers/crypto/ccp/ccp-crypto-sha.c 	rctx->cmd.u.sha.ctx = &rctx->ctx_sg;
ctx               160 drivers/crypto/ccp/ccp-crypto-sha.c 	rctx->cmd.u.sha.opad = ctx->u.sha.key_len ?
ctx               161 drivers/crypto/ccp/ccp-crypto-sha.c 		&ctx->u.sha.opad_sg : NULL;
ctx               162 drivers/crypto/ccp/ccp-crypto-sha.c 	rctx->cmd.u.sha.opad_len = ctx->u.sha.key_len ?
ctx               163 drivers/crypto/ccp/ccp-crypto-sha.c 		ctx->u.sha.opad_count : 0;
ctx               183 drivers/crypto/ccp/ccp-crypto-sha.c 	struct ccp_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               195 drivers/crypto/ccp/ccp-crypto-sha.c 	if (ctx->u.sha.key_len) {
ctx               197 drivers/crypto/ccp/ccp-crypto-sha.c 		memcpy(rctx->buf, ctx->u.sha.ipad, block_size);
ctx               241 drivers/crypto/ccp/ccp-crypto-sha.c 	memcpy(state.ctx, rctx->ctx, sizeof(state.ctx));
ctx               263 drivers/crypto/ccp/ccp-crypto-sha.c 	memcpy(rctx->ctx, state.ctx, sizeof(rctx->ctx));
ctx               273 drivers/crypto/ccp/ccp-crypto-sha.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
ctx               274 drivers/crypto/ccp/ccp-crypto-sha.c 	struct crypto_shash *shash = ctx->u.sha.hmac_tfm;
ctx               283 drivers/crypto/ccp/ccp-crypto-sha.c 	ctx->u.sha.key_len = 0;
ctx               288 drivers/crypto/ccp/ccp-crypto-sha.c 	memset(ctx->u.sha.key, 0, sizeof(ctx->u.sha.key));
ctx               295 drivers/crypto/ccp/ccp-crypto-sha.c 					  ctx->u.sha.key);
ctx               303 drivers/crypto/ccp/ccp-crypto-sha.c 		memcpy(ctx->u.sha.key, key, key_len);
ctx               307 drivers/crypto/ccp/ccp-crypto-sha.c 		ctx->u.sha.ipad[i] = ctx->u.sha.key[i] ^ HMAC_IPAD_VALUE;
ctx               308 drivers/crypto/ccp/ccp-crypto-sha.c 		ctx->u.sha.opad[i] = ctx->u.sha.key[i] ^ HMAC_OPAD_VALUE;
ctx               311 drivers/crypto/ccp/ccp-crypto-sha.c 	sg_init_one(&ctx->u.sha.opad_sg, ctx->u.sha.opad, block_size);
ctx               312 drivers/crypto/ccp/ccp-crypto-sha.c 	ctx->u.sha.opad_count = block_size;
ctx               314 drivers/crypto/ccp/ccp-crypto-sha.c 	ctx->u.sha.key_len = key_len;
ctx               321 drivers/crypto/ccp/ccp-crypto-sha.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               324 drivers/crypto/ccp/ccp-crypto-sha.c 	ctx->complete = ccp_sha_complete;
ctx               325 drivers/crypto/ccp/ccp-crypto-sha.c 	ctx->u.sha.key_len = 0;
ctx               338 drivers/crypto/ccp/ccp-crypto-sha.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               349 drivers/crypto/ccp/ccp-crypto-sha.c 	ctx->u.sha.hmac_tfm = hmac_tfm;
ctx               356 drivers/crypto/ccp/ccp-crypto-sha.c 	struct ccp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               358 drivers/crypto/ccp/ccp-crypto-sha.c 	if (ctx->u.sha.hmac_tfm)
ctx               359 drivers/crypto/ccp/ccp-crypto-sha.c 		crypto_free_shash(ctx->u.sha.hmac_tfm);
ctx               212 drivers/crypto/ccp/ccp-crypto.h 	u8 ctx[MAX_SHA_CONTEXT_SIZE];
ctx               229 drivers/crypto/ccp/ccp-crypto.h 	u8 ctx[MAX_SHA_CONTEXT_SIZE];
ctx               460 drivers/crypto/ccp/ccp-ops.c 	struct ccp_dm_workarea key, ctx;
ctx               528 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_init_dm_workarea(&ctx, cmd_q,
ctx               535 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_set_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len);
ctx               538 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx               557 drivers/crypto/ccp/ccp-ops.c 			ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid,
ctx               565 drivers/crypto/ccp/ccp-ops.c 			ret = ccp_set_dm_area(&ctx, 0, aes->cmac_key, 0,
ctx               569 drivers/crypto/ccp/ccp-ops.c 			ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx               589 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx               598 drivers/crypto/ccp/ccp-ops.c 	ccp_get_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len);
ctx               604 drivers/crypto/ccp/ccp-ops.c 	ccp_dm_free(&ctx);
ctx               616 drivers/crypto/ccp/ccp-ops.c 	struct ccp_dm_workarea key, ctx, final_wa, tag;
ctx               710 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_init_dm_workarea(&ctx, cmd_q,
ctx               717 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_set_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len);
ctx               721 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx               803 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx               810 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_set_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len);
ctx               814 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx               887 drivers/crypto/ccp/ccp-ops.c 	ccp_dm_free(&ctx);
ctx               899 drivers/crypto/ccp/ccp-ops.c 	struct ccp_dm_workarea key, ctx;
ctx               967 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_init_dm_workarea(&ctx, cmd_q,
ctx               976 drivers/crypto/ccp/ccp-ops.c 		ret = ccp_set_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len);
ctx               979 drivers/crypto/ccp/ccp-ops.c 		ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx              1044 drivers/crypto/ccp/ccp-ops.c 		ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx              1053 drivers/crypto/ccp/ccp-ops.c 		ccp_get_dm_area(&ctx, dm_offset, aes->iv, 0, aes->iv_len);
ctx              1064 drivers/crypto/ccp/ccp-ops.c 	ccp_dm_free(&ctx);
ctx              1076 drivers/crypto/ccp/ccp-ops.c 	struct ccp_dm_workarea key, ctx;
ctx              1189 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_init_dm_workarea(&ctx, cmd_q,
ctx              1195 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_set_dm_area(&ctx, 0, xts->iv, 0, xts->iv_len);
ctx              1198 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx              1245 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx              1254 drivers/crypto/ccp/ccp-ops.c 	ccp_get_dm_area(&ctx, dm_offset, xts->iv, 0, xts->iv_len);
ctx              1264 drivers/crypto/ccp/ccp-ops.c 	ccp_dm_free(&ctx);
ctx              1277 drivers/crypto/ccp/ccp-ops.c 	struct ccp_dm_workarea key, ctx;
ctx              1373 drivers/crypto/ccp/ccp-ops.c 		ret = ccp_init_dm_workarea(&ctx, cmd_q,
ctx              1381 drivers/crypto/ccp/ccp-ops.c 		ret = ccp_set_dm_area(&ctx, dm_offset, des3->iv, 0,
ctx              1386 drivers/crypto/ccp/ccp-ops.c 		ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx              1441 drivers/crypto/ccp/ccp-ops.c 		ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx              1449 drivers/crypto/ccp/ccp-ops.c 		ccp_get_dm_area(&ctx, dm_offset, des3->iv, 0,
ctx              1461 drivers/crypto/ccp/ccp-ops.c 		ccp_dm_free(&ctx);
ctx              1473 drivers/crypto/ccp/ccp-ops.c 	struct ccp_dm_workarea ctx;
ctx              1516 drivers/crypto/ccp/ccp-ops.c 	if (!sha->ctx)
ctx              1562 drivers/crypto/ccp/ccp-ops.c 			scatterwalk_map_and_copy((void *)sha_zero, sha->ctx, 0,
ctx              1637 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_init_dm_workarea(&ctx, cmd_q, sb_count * CCP_SB_BYTES,
ctx              1646 drivers/crypto/ccp/ccp-ops.c 			memcpy(ctx.address + ioffset, init, ctx_size);
ctx              1650 drivers/crypto/ccp/ccp-ops.c 			memcpy(ctx.address + ctx_size / 2, init,
ctx              1652 drivers/crypto/ccp/ccp-ops.c 			memcpy(ctx.address, init + ctx_size / 2,
ctx              1661 drivers/crypto/ccp/ccp-ops.c 		ret = ccp_set_dm_area(&ctx, 0, sha->ctx, 0,
ctx              1667 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_copy_to_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx              1706 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_copy_from_sb(cmd_q, &ctx, op.jobid, op.sb_ctx,
ctx              1719 drivers/crypto/ccp/ccp-ops.c 			ccp_get_dm_area(&ctx, ooffset,
ctx              1720 drivers/crypto/ccp/ccp-ops.c 					sha->ctx, 0,
ctx              1725 drivers/crypto/ccp/ccp-ops.c 			ccp_get_dm_area(&ctx, 0,
ctx              1726 drivers/crypto/ccp/ccp-ops.c 					sha->ctx, LSB_ITEM_SIZE - ooffset,
ctx              1728 drivers/crypto/ccp/ccp-ops.c 			ccp_get_dm_area(&ctx, LSB_ITEM_SIZE + ooffset,
ctx              1729 drivers/crypto/ccp/ccp-ops.c 					sha->ctx, 0,
ctx              1738 drivers/crypto/ccp/ccp-ops.c 		ccp_get_dm_area(&ctx, 0, sha->ctx, 0,
ctx              1766 drivers/crypto/ccp/ccp-ops.c 			       ctx.address + ooffset,
ctx              1772 drivers/crypto/ccp/ccp-ops.c 			       ctx.address + LSB_ITEM_SIZE + ooffset,
ctx              1776 drivers/crypto/ccp/ccp-ops.c 			       ctx.address,
ctx              1787 drivers/crypto/ccp/ccp-ops.c 		hmac_cmd.u.sha.ctx = sha->ctx;
ctx              1809 drivers/crypto/ccp/ccp-ops.c 	ccp_dm_free(&ctx);
ctx                70 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx                71 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx                77 drivers/crypto/ccree/cc_aead.c 	if (ctx->enckey) {
ctx                78 drivers/crypto/ccree/cc_aead.c 		dma_free_coherent(dev, AES_MAX_KEY_SIZE, ctx->enckey,
ctx                79 drivers/crypto/ccree/cc_aead.c 				  ctx->enckey_dma_addr);
ctx                81 drivers/crypto/ccree/cc_aead.c 			&ctx->enckey_dma_addr);
ctx                82 drivers/crypto/ccree/cc_aead.c 		ctx->enckey_dma_addr = 0;
ctx                83 drivers/crypto/ccree/cc_aead.c 		ctx->enckey = NULL;
ctx                86 drivers/crypto/ccree/cc_aead.c 	if (ctx->auth_mode == DRV_HASH_XCBC_MAC) { /* XCBC authetication */
ctx                87 drivers/crypto/ccree/cc_aead.c 		struct cc_xcbc_s *xcbc = &ctx->auth_state.xcbc;
ctx                98 drivers/crypto/ccree/cc_aead.c 	} else if (ctx->auth_mode != DRV_HASH_NULL) { /* HMAC auth. */
ctx                99 drivers/crypto/ccree/cc_aead.c 		struct cc_hmac_s *hmac = &ctx->auth_state.hmac;
ctx               124 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               126 drivers/crypto/ccree/cc_aead.c 	return cc_get_default_hash_len(ctx->drvdata);
ctx               132 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               137 drivers/crypto/ccree/cc_aead.c 	dev_dbg(dev, "Initializing context @%p for %s\n", ctx,
ctx               141 drivers/crypto/ccree/cc_aead.c 	ctx->cipher_mode = cc_alg->cipher_mode;
ctx               142 drivers/crypto/ccree/cc_aead.c 	ctx->flow_mode = cc_alg->flow_mode;
ctx               143 drivers/crypto/ccree/cc_aead.c 	ctx->auth_mode = cc_alg->auth_mode;
ctx               144 drivers/crypto/ccree/cc_aead.c 	ctx->drvdata = cc_alg->drvdata;
ctx               148 drivers/crypto/ccree/cc_aead.c 	ctx->enckey = dma_alloc_coherent(dev, AES_MAX_KEY_SIZE,
ctx               149 drivers/crypto/ccree/cc_aead.c 					 &ctx->enckey_dma_addr, GFP_KERNEL);
ctx               150 drivers/crypto/ccree/cc_aead.c 	if (!ctx->enckey) {
ctx               155 drivers/crypto/ccree/cc_aead.c 		ctx->enckey);
ctx               159 drivers/crypto/ccree/cc_aead.c 	if (ctx->auth_mode == DRV_HASH_XCBC_MAC) { /* XCBC authetication */
ctx               160 drivers/crypto/ccree/cc_aead.c 		struct cc_xcbc_s *xcbc = &ctx->auth_state.xcbc;
ctx               172 drivers/crypto/ccree/cc_aead.c 	} else if (ctx->auth_mode != DRV_HASH_NULL) { /* HMAC authentication */
ctx               173 drivers/crypto/ccree/cc_aead.c 		struct cc_hmac_s *hmac = &ctx->auth_state.hmac;
ctx               200 drivers/crypto/ccree/cc_aead.c 		ctx->auth_state.hmac.ipad_opad = NULL;
ctx               201 drivers/crypto/ccree/cc_aead.c 		ctx->auth_state.hmac.padded_authkey = NULL;
ctx               203 drivers/crypto/ccree/cc_aead.c 	ctx->hash_len = cc_get_aead_hash_len(tfm);
ctx               217 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               233 drivers/crypto/ccree/cc_aead.c 			   ctx->authsize) != 0) {
ctx               235 drivers/crypto/ccree/cc_aead.c 				ctx->authsize, ctx->cipher_mode);
ctx               248 drivers/crypto/ccree/cc_aead.c 				   skip, (skip + ctx->authsize),
ctx               256 drivers/crypto/ccree/cc_aead.c 				struct cc_aead_ctx *ctx)
ctx               265 drivers/crypto/ccree/cc_aead.c 		     ctx->auth_state.xcbc.xcbc_keys_dma_addr, ctx->auth_keylen,
ctx               269 drivers/crypto/ccree/cc_aead.c 	set_key_size_aes(&desc[0], ctx->auth_keylen);
ctx               276 drivers/crypto/ccree/cc_aead.c 	set_dout_dlli(&desc[1], ctx->auth_state.xcbc.xcbc_keys_dma_addr,
ctx               282 drivers/crypto/ccree/cc_aead.c 	set_dout_dlli(&desc[2], (ctx->auth_state.xcbc.xcbc_keys_dma_addr
ctx               289 drivers/crypto/ccree/cc_aead.c 	set_dout_dlli(&desc[3], (ctx->auth_state.xcbc.xcbc_keys_dma_addr
ctx               296 drivers/crypto/ccree/cc_aead.c static int hmac_setkey(struct cc_hw_desc *desc, struct cc_aead_ctx *ctx)
ctx               300 drivers/crypto/ccree/cc_aead.c 	unsigned int hash_mode = (ctx->auth_mode == DRV_HASH_SHA1) ?
ctx               302 drivers/crypto/ccree/cc_aead.c 	unsigned int digest_size = (ctx->auth_mode == DRV_HASH_SHA1) ?
ctx               304 drivers/crypto/ccree/cc_aead.c 	struct cc_hmac_s *hmac = &ctx->auth_state.hmac;
ctx               315 drivers/crypto/ccree/cc_aead.c 			     cc_larval_digest_addr(ctx->drvdata,
ctx               316 drivers/crypto/ccree/cc_aead.c 						   ctx->auth_mode),
ctx               325 drivers/crypto/ccree/cc_aead.c 		set_din_const(&desc[idx], 0, ctx->hash_len);
ctx               365 drivers/crypto/ccree/cc_aead.c static int validate_keys_sizes(struct cc_aead_ctx *ctx)
ctx               367 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx               370 drivers/crypto/ccree/cc_aead.c 		ctx->enc_keylen, ctx->auth_keylen);
ctx               372 drivers/crypto/ccree/cc_aead.c 	switch (ctx->auth_mode) {
ctx               377 drivers/crypto/ccree/cc_aead.c 		if (ctx->auth_keylen != AES_KEYSIZE_128 &&
ctx               378 drivers/crypto/ccree/cc_aead.c 		    ctx->auth_keylen != AES_KEYSIZE_192 &&
ctx               379 drivers/crypto/ccree/cc_aead.c 		    ctx->auth_keylen != AES_KEYSIZE_256)
ctx               383 drivers/crypto/ccree/cc_aead.c 		if (ctx->auth_keylen > 0)
ctx               387 drivers/crypto/ccree/cc_aead.c 		dev_err(dev, "Invalid auth_mode=%d\n", ctx->auth_mode);
ctx               391 drivers/crypto/ccree/cc_aead.c 	if (ctx->flow_mode == S_DIN_to_DES) {
ctx               392 drivers/crypto/ccree/cc_aead.c 		if (ctx->enc_keylen != DES3_EDE_KEY_SIZE) {
ctx               394 drivers/crypto/ccree/cc_aead.c 				ctx->enc_keylen);
ctx               398 drivers/crypto/ccree/cc_aead.c 		if (ctx->enc_keylen != AES_KEYSIZE_128 &&
ctx               399 drivers/crypto/ccree/cc_aead.c 		    ctx->enc_keylen != AES_KEYSIZE_192 &&
ctx               400 drivers/crypto/ccree/cc_aead.c 		    ctx->enc_keylen != AES_KEYSIZE_256) {
ctx               402 drivers/crypto/ccree/cc_aead.c 				ctx->enc_keylen);
ctx               417 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               418 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx               419 drivers/crypto/ccree/cc_aead.c 	u32 larval_addr = cc_larval_digest_addr(ctx->drvdata, ctx->auth_mode);
ctx               429 drivers/crypto/ccree/cc_aead.c 		ctx->auth_state.hmac.padded_authkey_dma_addr;
ctx               431 drivers/crypto/ccree/cc_aead.c 	switch (ctx->auth_mode) { /* auth_key required and >0 */
ctx               470 drivers/crypto/ccree/cc_aead.c 			set_din_const(&desc[idx], 0, ctx->hash_len);
ctx               531 drivers/crypto/ccree/cc_aead.c 	rc = cc_send_sync_request(ctx->drvdata, &cc_req, desc, idx);
ctx               546 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               550 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx               555 drivers/crypto/ccree/cc_aead.c 		ctx, crypto_tfm_alg_name(crypto_aead_tfm(tfm)), key, keylen);
ctx               559 drivers/crypto/ccree/cc_aead.c 	if (ctx->auth_mode != DRV_HASH_NULL) { /* authenc() alg. */
ctx               567 drivers/crypto/ccree/cc_aead.c 		ctx->enc_keylen = keys.enckeylen;
ctx               568 drivers/crypto/ccree/cc_aead.c 		ctx->auth_keylen = keys.authkeylen;
ctx               570 drivers/crypto/ccree/cc_aead.c 		if (ctx->cipher_mode == DRV_CIPHER_CTR) {
ctx               573 drivers/crypto/ccree/cc_aead.c 			if (ctx->enc_keylen <
ctx               579 drivers/crypto/ccree/cc_aead.c 			memcpy(ctx->ctr_nonce, enckey + ctx->enc_keylen -
ctx               582 drivers/crypto/ccree/cc_aead.c 			ctx->enc_keylen -= CTR_RFC3686_NONCE_SIZE;
ctx               587 drivers/crypto/ccree/cc_aead.c 		ctx->enc_keylen = keylen;
ctx               588 drivers/crypto/ccree/cc_aead.c 		ctx->auth_keylen = 0;
ctx               591 drivers/crypto/ccree/cc_aead.c 	rc = validate_keys_sizes(ctx);
ctx               598 drivers/crypto/ccree/cc_aead.c 	memcpy(ctx->enckey, enckey, ctx->enc_keylen);
ctx               599 drivers/crypto/ccree/cc_aead.c 	if (ctx->enc_keylen == 24)
ctx               600 drivers/crypto/ccree/cc_aead.c 		memset(ctx->enckey + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
ctx               601 drivers/crypto/ccree/cc_aead.c 	if (ctx->auth_mode == DRV_HASH_XCBC_MAC) {
ctx               602 drivers/crypto/ccree/cc_aead.c 		memcpy(ctx->auth_state.xcbc.xcbc_keys, authkey,
ctx               603 drivers/crypto/ccree/cc_aead.c 		       ctx->auth_keylen);
ctx               604 drivers/crypto/ccree/cc_aead.c 	} else if (ctx->auth_mode != DRV_HASH_NULL) { /* HMAC */
ctx               605 drivers/crypto/ccree/cc_aead.c 		rc = cc_get_plain_hmac_key(tfm, authkey, ctx->auth_keylen);
ctx               612 drivers/crypto/ccree/cc_aead.c 	switch (ctx->auth_mode) {
ctx               615 drivers/crypto/ccree/cc_aead.c 		seq_len = hmac_setkey(desc, ctx);
ctx               618 drivers/crypto/ccree/cc_aead.c 		seq_len = xcbc_setkey(desc, ctx);
ctx               623 drivers/crypto/ccree/cc_aead.c 		dev_err(dev, "Unsupported authenc (%d)\n", ctx->auth_mode);
ctx               631 drivers/crypto/ccree/cc_aead.c 		rc = cc_send_sync_request(ctx->drvdata, &cc_req, desc, seq_len);
ctx               668 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               674 drivers/crypto/ccree/cc_aead.c 	memcpy(ctx->ctr_nonce, key + keylen, 3);
ctx               682 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(authenc);
ctx               683 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx               691 drivers/crypto/ccree/cc_aead.c 	ctx->authsize = authsize;
ctx               692 drivers/crypto/ccree/cc_aead.c 	dev_dbg(dev, "authlen=%d\n", ctx->authsize);
ctx               735 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               739 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx               748 drivers/crypto/ccree/cc_aead.c 		if (ctx->auth_mode == DRV_HASH_XCBC_MAC &&
ctx               758 drivers/crypto/ccree/cc_aead.c 		if (ctx->auth_mode == DRV_HASH_XCBC_MAC &&
ctx               779 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               780 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx               843 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               844 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx               885 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               888 drivers/crypto/ccree/cc_aead.c 	unsigned int hash_mode = (ctx->auth_mode == DRV_HASH_SHA1) ?
ctx               897 drivers/crypto/ccree/cc_aead.c 		set_dout_dlli(&desc[idx], req_ctx->icv_dma_addr, ctx->authsize,
ctx               899 drivers/crypto/ccree/cc_aead.c 		set_queue_last_ind(ctx->drvdata, &desc[idx]);
ctx               900 drivers/crypto/ccree/cc_aead.c 		if (ctx->auth_mode == DRV_HASH_XCBC_MAC) {
ctx               914 drivers/crypto/ccree/cc_aead.c 			      ctx->authsize, NS_BIT, 1);
ctx               915 drivers/crypto/ccree/cc_aead.c 		set_queue_last_ind(ctx->drvdata, &desc[idx]);
ctx               919 drivers/crypto/ccree/cc_aead.c 		if (ctx->auth_mode == DRV_HASH_XCBC_MAC) {
ctx               935 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               944 drivers/crypto/ccree/cc_aead.c 	set_flow_mode(&desc[idx], ctx->flow_mode);
ctx               947 drivers/crypto/ccree/cc_aead.c 	if (ctx->cipher_mode == DRV_CIPHER_CTR)
ctx               951 drivers/crypto/ccree/cc_aead.c 	set_cipher_mode(&desc[idx], ctx->cipher_mode);
ctx               958 drivers/crypto/ccree/cc_aead.c 	set_flow_mode(&desc[idx], ctx->flow_mode);
ctx               959 drivers/crypto/ccree/cc_aead.c 	if (ctx->flow_mode == S_DIN_to_AES) {
ctx               960 drivers/crypto/ccree/cc_aead.c 		set_din_type(&desc[idx], DMA_DLLI, ctx->enckey_dma_addr,
ctx               961 drivers/crypto/ccree/cc_aead.c 			     ((ctx->enc_keylen == 24) ? CC_AES_KEY_SIZE_MAX :
ctx               962 drivers/crypto/ccree/cc_aead.c 			      ctx->enc_keylen), NS_BIT);
ctx               963 drivers/crypto/ccree/cc_aead.c 		set_key_size_aes(&desc[idx], ctx->enc_keylen);
ctx               965 drivers/crypto/ccree/cc_aead.c 		set_din_type(&desc[idx], DMA_DLLI, ctx->enckey_dma_addr,
ctx               966 drivers/crypto/ccree/cc_aead.c 			     ctx->enc_keylen, NS_BIT);
ctx               967 drivers/crypto/ccree/cc_aead.c 		set_key_size_des(&desc[idx], ctx->enc_keylen);
ctx               969 drivers/crypto/ccree/cc_aead.c 	set_cipher_mode(&desc[idx], ctx->cipher_mode);
ctx              1002 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1003 drivers/crypto/ccree/cc_aead.c 	unsigned int hash_mode = (ctx->auth_mode == DRV_HASH_SHA1) ?
ctx              1005 drivers/crypto/ccree/cc_aead.c 	unsigned int digest_size = (ctx->auth_mode == DRV_HASH_SHA1) ?
ctx              1013 drivers/crypto/ccree/cc_aead.c 		     ctx->auth_state.hmac.ipad_opad_dma_addr, digest_size,
ctx              1022 drivers/crypto/ccree/cc_aead.c 	set_din_sram(&desc[idx], cc_digest_len_addr(ctx->drvdata, hash_mode),
ctx              1023 drivers/crypto/ccree/cc_aead.c 		     ctx->hash_len);
ctx              1035 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1052 drivers/crypto/ccree/cc_aead.c 		     ctx->auth_state.xcbc.xcbc_keys_dma_addr,
ctx              1065 drivers/crypto/ccree/cc_aead.c 		     (ctx->auth_state.xcbc.xcbc_keys_dma_addr +
ctx              1078 drivers/crypto/ccree/cc_aead.c 		     (ctx->auth_state.xcbc.xcbc_keys_dma_addr +
ctx              1111 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1112 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_handle *aead_handle = ctx->drvdata->aead_handle;
ctx              1113 drivers/crypto/ccree/cc_aead.c 	unsigned int hash_mode = (ctx->auth_mode == DRV_HASH_SHA1) ?
ctx              1115 drivers/crypto/ccree/cc_aead.c 	unsigned int digest_size = (ctx->auth_mode == DRV_HASH_SHA1) ?
ctx              1122 drivers/crypto/ccree/cc_aead.c 		      ctx->hash_len);
ctx              1142 drivers/crypto/ccree/cc_aead.c 		     (ctx->auth_state.hmac.ipad_opad_dma_addr + digest_size),
ctx              1151 drivers/crypto/ccree/cc_aead.c 	set_din_sram(&desc[idx], cc_digest_len_addr(ctx->drvdata, hash_mode),
ctx              1152 drivers/crypto/ccree/cc_aead.c 		     ctx->hash_len);
ctx              1173 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1174 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1180 drivers/crypto/ccree/cc_aead.c 			(unsigned int)ctx->drvdata->mlli_sram_addr,
ctx              1188 drivers/crypto/ccree/cc_aead.c 			      ctx->drvdata->mlli_sram_addr,
ctx              1224 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1228 drivers/crypto/ccree/cc_aead.c 		cc_get_data_flow(direct, ctx->flow_mode,
ctx              1277 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1281 drivers/crypto/ccree/cc_aead.c 		cc_get_data_flow(direct, ctx->flow_mode,
ctx              1321 drivers/crypto/ccree/cc_aead.c static int validate_data_size(struct cc_aead_ctx *ctx,
ctx              1326 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1329 drivers/crypto/ccree/cc_aead.c 			(req->cryptlen - ctx->authsize) : req->cryptlen;
ctx              1332 drivers/crypto/ccree/cc_aead.c 	    req->cryptlen < ctx->authsize)
ctx              1337 drivers/crypto/ccree/cc_aead.c 	switch (ctx->flow_mode) {
ctx              1339 drivers/crypto/ccree/cc_aead.c 		if (ctx->cipher_mode == DRV_CIPHER_CBC &&
ctx              1342 drivers/crypto/ccree/cc_aead.c 		if (ctx->cipher_mode == DRV_CIPHER_CCM)
ctx              1344 drivers/crypto/ccree/cc_aead.c 		if (ctx->cipher_mode == DRV_CIPHER_GCTR) {
ctx              1353 drivers/crypto/ccree/cc_aead.c 		if (ctx->cipher_mode == DRV_CIPHER_CTR &&
ctx              1365 drivers/crypto/ccree/cc_aead.c 		dev_err(dev, "Unexpected flow mode (%d)\n", ctx->flow_mode);
ctx              1423 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1440 drivers/crypto/ccree/cc_aead.c 	set_din_type(&desc[idx], DMA_DLLI, ctx->enckey_dma_addr,
ctx              1441 drivers/crypto/ccree/cc_aead.c 		     ((ctx->enc_keylen == 24) ?  CC_AES_KEY_SIZE_MAX :
ctx              1442 drivers/crypto/ccree/cc_aead.c 		      ctx->enc_keylen), NS_BIT);
ctx              1443 drivers/crypto/ccree/cc_aead.c 	set_key_size_aes(&desc[idx], ctx->enc_keylen);
ctx              1452 drivers/crypto/ccree/cc_aead.c 	set_key_size_aes(&desc[idx], ctx->enc_keylen);
ctx              1463 drivers/crypto/ccree/cc_aead.c 	set_din_type(&desc[idx], DMA_DLLI, ctx->enckey_dma_addr,
ctx              1464 drivers/crypto/ccree/cc_aead.c 		     ((ctx->enc_keylen == 24) ?  CC_AES_KEY_SIZE_MAX :
ctx              1465 drivers/crypto/ccree/cc_aead.c 		      ctx->enc_keylen), NS_BIT);
ctx              1466 drivers/crypto/ccree/cc_aead.c 	set_key_size_aes(&desc[idx], ctx->enc_keylen);
ctx              1476 drivers/crypto/ccree/cc_aead.c 	set_key_size_aes(&desc[idx], ctx->enc_keylen);
ctx              1504 drivers/crypto/ccree/cc_aead.c 	set_dout_dlli(&desc[idx], req_ctx->mac_buf_dma_addr, ctx->authsize,
ctx              1518 drivers/crypto/ccree/cc_aead.c 	set_key_size_aes(&desc[idx], ctx->enc_keylen);
ctx              1531 drivers/crypto/ccree/cc_aead.c 		     ctx->authsize, NS_BIT);
ctx              1532 drivers/crypto/ccree/cc_aead.c 	set_dout_dlli(&desc[idx], mac_result, ctx->authsize, NS_BIT, 1);
ctx              1533 drivers/crypto/ccree/cc_aead.c 	set_queue_last_ind(ctx->drvdata, &desc[idx]);
ctx              1544 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1545 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1553 drivers/crypto/ccree/cc_aead.c 	unsigned int m = ctx->authsize;  /* This is M' of RFC 3610. */
ctx              1560 drivers/crypto/ccree/cc_aead.c 				(req->cryptlen - ctx->authsize);
ctx              1603 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1616 drivers/crypto/ccree/cc_aead.c 	memcpy(areq_ctx->ctr_iv + CCM_BLOCK_NONCE_OFFSET, ctx->ctr_nonce,
ctx              1628 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1636 drivers/crypto/ccree/cc_aead.c 	set_din_type(&desc[idx], DMA_DLLI, ctx->enckey_dma_addr,
ctx              1637 drivers/crypto/ccree/cc_aead.c 		     ctx->enc_keylen, NS_BIT);
ctx              1638 drivers/crypto/ccree/cc_aead.c 	set_key_size_aes(&desc[idx], ctx->enc_keylen);
ctx              1706 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1714 drivers/crypto/ccree/cc_aead.c 	set_din_type(&desc[idx], DMA_DLLI, ctx->enckey_dma_addr,
ctx              1715 drivers/crypto/ccree/cc_aead.c 		     ctx->enc_keylen, NS_BIT);
ctx              1716 drivers/crypto/ccree/cc_aead.c 	set_key_size_aes(&desc[idx], ctx->enc_keylen);
ctx              1725 drivers/crypto/ccree/cc_aead.c 		set_key_size_aes(&desc[idx], ctx->enc_keylen);
ctx              1743 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1776 drivers/crypto/ccree/cc_aead.c 	set_key_size_aes(&desc[idx], ctx->enc_keylen);
ctx              1795 drivers/crypto/ccree/cc_aead.c 	set_dout_dlli(&desc[idx], mac_result, ctx->authsize, NS_BIT, 1);
ctx              1796 drivers/crypto/ccree/cc_aead.c 	set_queue_last_ind(ctx->drvdata, &desc[idx]);
ctx              1843 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1845 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1850 drivers/crypto/ccree/cc_aead.c 				(req->cryptlen - ctx->authsize);
ctx              1854 drivers/crypto/ccree/cc_aead.c 		__func__, cryptlen, req_ctx->assoclen, ctx->authsize);
ctx              1893 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1897 drivers/crypto/ccree/cc_aead.c 	       ctx->ctr_nonce, GCM_BLOCK_RFC4_NONCE_SIZE);
ctx              1911 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1913 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1918 drivers/crypto/ccree/cc_aead.c 		ctx, req, req->iv, sg_virt(req->src), req->src->offset,
ctx              1924 drivers/crypto/ccree/cc_aead.c 	if (validate_data_size(ctx, direct, req)) {
ctx              1937 drivers/crypto/ccree/cc_aead.c 	areq_ctx->req_authsize = ctx->authsize;
ctx              1938 drivers/crypto/ccree/cc_aead.c 	areq_ctx->cipher_mode = ctx->cipher_mode;
ctx              1942 drivers/crypto/ccree/cc_aead.c 	if (ctx->cipher_mode == DRV_CIPHER_CTR) {
ctx              1946 drivers/crypto/ccree/cc_aead.c 		memcpy(areq_ctx->ctr_iv, ctx->ctr_nonce,
ctx              1957 drivers/crypto/ccree/cc_aead.c 	} else if ((ctx->cipher_mode == DRV_CIPHER_CCM) ||
ctx              1958 drivers/crypto/ccree/cc_aead.c 		   (ctx->cipher_mode == DRV_CIPHER_GCTR)) {
ctx              1969 drivers/crypto/ccree/cc_aead.c 	if (ctx->cipher_mode == DRV_CIPHER_CCM) {
ctx              1980 drivers/crypto/ccree/cc_aead.c 	if (ctx->cipher_mode == DRV_CIPHER_GCTR) {
ctx              1989 drivers/crypto/ccree/cc_aead.c 	rc = cc_map_aead_request(ctx->drvdata, req);
ctx              2001 drivers/crypto/ccree/cc_aead.c 	switch (ctx->auth_mode) {
ctx              2010 drivers/crypto/ccree/cc_aead.c 		if (ctx->cipher_mode == DRV_CIPHER_CCM)
ctx              2012 drivers/crypto/ccree/cc_aead.c 		if (ctx->cipher_mode == DRV_CIPHER_GCTR)
ctx              2016 drivers/crypto/ccree/cc_aead.c 		dev_err(dev, "Unsupported authenc (%d)\n", ctx->auth_mode);
ctx              2024 drivers/crypto/ccree/cc_aead.c 	rc = cc_send_request(ctx->drvdata, &cc_req, desc, seq_len, &req->base);
ctx              2062 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2063 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              2111 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2112 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              2141 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2142 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              2150 drivers/crypto/ccree/cc_aead.c 	memcpy(ctx->ctr_nonce, key + keylen, 4);
ctx              2158 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2159 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              2167 drivers/crypto/ccree/cc_aead.c 	memcpy(ctx->ctr_nonce, key + keylen, 4);
ctx              2194 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(authenc);
ctx              2195 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              2214 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(authenc);
ctx              2215 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              2230 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2231 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              2261 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2262 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              2295 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2296 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              2326 drivers/crypto/ccree/cc_aead.c 	struct cc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              2327 drivers/crypto/ccree/cc_aead.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx               369 drivers/crypto/ccree/cc_buffer_mgr.c void cc_unmap_cipher_request(struct device *dev, void *ctx,
ctx               373 drivers/crypto/ccree/cc_buffer_mgr.c 	struct cipher_req_ctx *req_ctx = (struct cipher_req_ctx *)ctx;
ctx               398 drivers/crypto/ccree/cc_buffer_mgr.c int cc_map_cipher_request(struct cc_drvdata *drvdata, void *ctx,
ctx               403 drivers/crypto/ccree/cc_buffer_mgr.c 	struct cipher_req_ctx *req_ctx = (struct cipher_req_ctx *)ctx;
ctx              1202 drivers/crypto/ccree/cc_buffer_mgr.c int cc_map_hash_request_final(struct cc_drvdata *drvdata, void *ctx,
ctx              1206 drivers/crypto/ccree/cc_buffer_mgr.c 	struct ahash_req_ctx *areq_ctx = (struct ahash_req_ctx *)ctx;
ctx              1283 drivers/crypto/ccree/cc_buffer_mgr.c int cc_map_hash_request_update(struct cc_drvdata *drvdata, void *ctx,
ctx              1287 drivers/crypto/ccree/cc_buffer_mgr.c 	struct ahash_req_ctx *areq_ctx = (struct ahash_req_ctx *)ctx;
ctx              1395 drivers/crypto/ccree/cc_buffer_mgr.c void cc_unmap_hash_request(struct device *dev, void *ctx,
ctx              1398 drivers/crypto/ccree/cc_buffer_mgr.c 	struct ahash_req_ctx *areq_ctx = (struct ahash_req_ctx *)ctx;
ctx                44 drivers/crypto/ccree/cc_buffer_mgr.h int cc_map_cipher_request(struct cc_drvdata *drvdata, void *ctx,
ctx                49 drivers/crypto/ccree/cc_buffer_mgr.h void cc_unmap_cipher_request(struct device *dev, void *ctx, unsigned int ivsize,
ctx                56 drivers/crypto/ccree/cc_buffer_mgr.h int cc_map_hash_request_final(struct cc_drvdata *drvdata, void *ctx,
ctx                60 drivers/crypto/ccree/cc_buffer_mgr.h int cc_map_hash_request_update(struct cc_drvdata *drvdata, void *ctx,
ctx                64 drivers/crypto/ccree/cc_buffer_mgr.h void cc_unmap_hash_request(struct device *dev, void *ctx,
ctx                70 drivers/crypto/ccree/cc_debugfs.c 	struct cc_debugfs_ctx *ctx;
ctx                73 drivers/crypto/ccree/cc_debugfs.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx                74 drivers/crypto/ccree/cc_debugfs.c 	if (!ctx)
ctx                85 drivers/crypto/ccree/cc_debugfs.c 	ctx->dir = debugfs_create_dir(drvdata->plat_dev->name, cc_debugfs_dir);
ctx                87 drivers/crypto/ccree/cc_debugfs.c 	debugfs_create_regset32("regs", 0400, ctx->dir, regset);
ctx                88 drivers/crypto/ccree/cc_debugfs.c 	debugfs_create_bool("coherent", 0400, ctx->dir, &drvdata->coherent);
ctx               106 drivers/crypto/ccree/cc_debugfs.c 	debugfs_create_regset32("version", 0400, ctx->dir, verset);
ctx               109 drivers/crypto/ccree/cc_debugfs.c 	drvdata->debugfs = ctx;
ctx               115 drivers/crypto/ccree/cc_debugfs.c 	struct cc_debugfs_ctx *ctx = (struct cc_debugfs_ctx *)drvdata->debugfs;
ctx               117 drivers/crypto/ccree/cc_debugfs.c 	debugfs_remove_recursive(ctx->dir);
ctx                96 drivers/crypto/ccree/cc_hash.c static void cc_set_desc(struct ahash_req_ctx *areq_ctx, struct cc_hash_ctx *ctx,
ctx               129 drivers/crypto/ccree/cc_hash.c 			struct cc_hash_ctx *ctx)
ctx               131 drivers/crypto/ccree/cc_hash.c 	bool is_hmac = ctx->is_hmac;
ctx               136 drivers/crypto/ccree/cc_hash.c 		if (ctx->hw_mode != DRV_CIPHER_XCBC_MAC &&
ctx               137 drivers/crypto/ccree/cc_hash.c 		    ctx->hw_mode != DRV_CIPHER_CMAC) {
ctx               138 drivers/crypto/ccree/cc_hash.c 			dma_sync_single_for_cpu(dev, ctx->digest_buff_dma_addr,
ctx               139 drivers/crypto/ccree/cc_hash.c 						ctx->inter_digestsize,
ctx               142 drivers/crypto/ccree/cc_hash.c 			memcpy(state->digest_buff, ctx->digest_buff,
ctx               143 drivers/crypto/ccree/cc_hash.c 			       ctx->inter_digestsize);
ctx               144 drivers/crypto/ccree/cc_hash.c 			if (ctx->hash_mode == DRV_HASH_SHA512 ||
ctx               145 drivers/crypto/ccree/cc_hash.c 			    ctx->hash_mode == DRV_HASH_SHA384)
ctx               148 drivers/crypto/ccree/cc_hash.c 				       ctx->hash_len);
ctx               152 drivers/crypto/ccree/cc_hash.c 				       ctx->hash_len);
ctx               155 drivers/crypto/ccree/cc_hash.c 		if (ctx->hash_mode != DRV_HASH_NULL) {
ctx               157 drivers/crypto/ccree/cc_hash.c 						ctx->opad_tmp_keys_dma_addr,
ctx               158 drivers/crypto/ccree/cc_hash.c 						ctx->inter_digestsize,
ctx               161 drivers/crypto/ccree/cc_hash.c 			       ctx->opad_tmp_keys_buff, ctx->inter_digestsize);
ctx               165 drivers/crypto/ccree/cc_hash.c 		const void *larval = cc_larval_digest(dev, ctx->hash_mode);
ctx               167 drivers/crypto/ccree/cc_hash.c 		memcpy(state->digest_buff, larval, ctx->inter_digestsize);
ctx               172 drivers/crypto/ccree/cc_hash.c 		      struct cc_hash_ctx *ctx)
ctx               174 drivers/crypto/ccree/cc_hash.c 	bool is_hmac = ctx->is_hmac;
ctx               178 drivers/crypto/ccree/cc_hash.c 			       ctx->inter_digestsize, DMA_BIDIRECTIONAL);
ctx               181 drivers/crypto/ccree/cc_hash.c 			ctx->inter_digestsize, state->digest_buff);
ctx               185 drivers/crypto/ccree/cc_hash.c 		ctx->inter_digestsize, state->digest_buff,
ctx               188 drivers/crypto/ccree/cc_hash.c 	if (ctx->hw_mode != DRV_CIPHER_XCBC_MAC) {
ctx               202 drivers/crypto/ccree/cc_hash.c 	if (is_hmac && ctx->hash_mode != DRV_HASH_NULL) {
ctx               205 drivers/crypto/ccree/cc_hash.c 				       ctx->inter_digestsize,
ctx               209 drivers/crypto/ccree/cc_hash.c 				ctx->inter_digestsize,
ctx               214 drivers/crypto/ccree/cc_hash.c 			ctx->inter_digestsize, state->opad_digest_buff,
ctx               229 drivers/crypto/ccree/cc_hash.c 				 ctx->inter_digestsize, DMA_BIDIRECTIONAL);
ctx               237 drivers/crypto/ccree/cc_hash.c 			 struct cc_hash_ctx *ctx)
ctx               241 drivers/crypto/ccree/cc_hash.c 				 ctx->inter_digestsize, DMA_BIDIRECTIONAL);
ctx               255 drivers/crypto/ccree/cc_hash.c 				 ctx->inter_digestsize, DMA_BIDIRECTIONAL);
ctx               281 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               288 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx               299 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               308 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx               319 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               328 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx               339 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               344 drivers/crypto/ccree/cc_hash.c 	set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
ctx               348 drivers/crypto/ccree/cc_hash.c 	set_queue_last_ind(ctx->drvdata, &desc[idx]);
ctx               352 drivers/crypto/ccree/cc_hash.c 	cc_set_endianity(ctx->hash_mode, &desc[idx]);
ctx               363 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               368 drivers/crypto/ccree/cc_hash.c 	set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx               372 drivers/crypto/ccree/cc_hash.c 	cc_set_endianity(ctx->hash_mode, &desc[idx]);
ctx               378 drivers/crypto/ccree/cc_hash.c 	set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx               380 drivers/crypto/ccree/cc_hash.c 		     ctx->inter_digestsize, NS_BIT);
ctx               387 drivers/crypto/ccree/cc_hash.c 	set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx               389 drivers/crypto/ccree/cc_hash.c 		     cc_digest_len_addr(ctx->drvdata, ctx->hash_mode),
ctx               390 drivers/crypto/ccree/cc_hash.c 		     ctx->hash_len);
ctx               416 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               421 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx               422 drivers/crypto/ccree/cc_hash.c 	bool is_hmac = ctx->is_hmac;
ctx               426 drivers/crypto/ccree/cc_hash.c 		cc_larval_digest_addr(ctx->drvdata, ctx->hash_mode);
ctx               434 drivers/crypto/ccree/cc_hash.c 	cc_init_req(dev, state, ctx);
ctx               436 drivers/crypto/ccree/cc_hash.c 	if (cc_map_req(dev, state, ctx)) {
ctx               443 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx               447 drivers/crypto/ccree/cc_hash.c 	if (cc_map_hash_request_final(ctx->drvdata, state, src, nbytes, 1,
ctx               451 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx               463 drivers/crypto/ccree/cc_hash.c 	set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
ctx               466 drivers/crypto/ccree/cc_hash.c 			     ctx->inter_digestsize, NS_BIT);
ctx               469 drivers/crypto/ccree/cc_hash.c 			     ctx->inter_digestsize);
ctx               477 drivers/crypto/ccree/cc_hash.c 	set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
ctx               482 drivers/crypto/ccree/cc_hash.c 			     ctx->hash_len, NS_BIT);
ctx               484 drivers/crypto/ccree/cc_hash.c 		set_din_const(&desc[idx], 0, ctx->hash_len);
ctx               494 drivers/crypto/ccree/cc_hash.c 	cc_set_desc(state, ctx, DIN_HASH, desc, false, &idx);
ctx               499 drivers/crypto/ccree/cc_hash.c 		set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx               501 drivers/crypto/ccree/cc_hash.c 			      ctx->hash_len, NS_BIT, 0);
ctx               512 drivers/crypto/ccree/cc_hash.c 	rc = cc_send_request(ctx->drvdata, &cc_req, desc, idx, &req->base);
ctx               517 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx               522 drivers/crypto/ccree/cc_hash.c static int cc_restore_hash(struct cc_hw_desc *desc, struct cc_hash_ctx *ctx,
ctx               527 drivers/crypto/ccree/cc_hash.c 	set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
ctx               529 drivers/crypto/ccree/cc_hash.c 		     ctx->inter_digestsize, NS_BIT);
ctx               536 drivers/crypto/ccree/cc_hash.c 	set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
ctx               539 drivers/crypto/ccree/cc_hash.c 		     ctx->hash_len, NS_BIT);
ctx               544 drivers/crypto/ccree/cc_hash.c 	cc_set_desc(state, ctx, DIN_HASH, desc, false, &idx);
ctx               553 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               557 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx               564 drivers/crypto/ccree/cc_hash.c 	dev_dbg(dev, "===== %s-update (%d) ====\n", ctx->is_hmac ?
ctx               572 drivers/crypto/ccree/cc_hash.c 	rc = cc_map_hash_request_update(ctx->drvdata, state, src, nbytes,
ctx               585 drivers/crypto/ccree/cc_hash.c 	if (cc_map_req(dev, state, ctx)) {
ctx               595 drivers/crypto/ccree/cc_hash.c 	idx = cc_restore_hash(desc, ctx, state, idx);
ctx               599 drivers/crypto/ccree/cc_hash.c 	set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
ctx               601 drivers/crypto/ccree/cc_hash.c 		      ctx->inter_digestsize, NS_BIT, 0);
ctx               608 drivers/crypto/ccree/cc_hash.c 	set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
ctx               610 drivers/crypto/ccree/cc_hash.c 		      ctx->hash_len, NS_BIT, 1);
ctx               611 drivers/crypto/ccree/cc_hash.c 	set_queue_last_ind(ctx->drvdata, &desc[idx]);
ctx               616 drivers/crypto/ccree/cc_hash.c 	rc = cc_send_request(ctx->drvdata, &cc_req, desc, idx, &req->base);
ctx               620 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx               629 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               634 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx               635 drivers/crypto/ccree/cc_hash.c 	bool is_hmac = ctx->is_hmac;
ctx               645 drivers/crypto/ccree/cc_hash.c 	if (cc_map_req(dev, state, ctx)) {
ctx               650 drivers/crypto/ccree/cc_hash.c 	if (cc_map_hash_request_final(ctx->drvdata, state, src, nbytes, update,
ctx               653 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx               659 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx               667 drivers/crypto/ccree/cc_hash.c 	idx = cc_restore_hash(desc, ctx, state, idx);
ctx               672 drivers/crypto/ccree/cc_hash.c 	set_hash_cipher_mode(&desc[idx], ctx->hw_mode, ctx->hash_mode);
ctx               674 drivers/crypto/ccree/cc_hash.c 		      ctx->hash_len, NS_BIT, 0);
ctx               684 drivers/crypto/ccree/cc_hash.c 	rc = cc_send_request(ctx->drvdata, &cc_req, desc, idx, &req->base);
ctx               689 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx               709 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               710 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx               714 drivers/crypto/ccree/cc_hash.c 	cc_init_req(dev, state, ctx);
ctx               724 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = NULL;
ctx               732 drivers/crypto/ccree/cc_hash.c 	ctx = crypto_ahash_ctx(ahash);
ctx               733 drivers/crypto/ccree/cc_hash.c 	dev = drvdata_to_dev(ctx->drvdata);
ctx               739 drivers/crypto/ccree/cc_hash.c 	larval_addr = cc_larval_digest_addr(ctx->drvdata, ctx->hash_mode);
ctx               744 drivers/crypto/ccree/cc_hash.c 	ctx->key_params.keylen = keylen;
ctx               745 drivers/crypto/ccree/cc_hash.c 	ctx->key_params.key_dma_addr = 0;
ctx               746 drivers/crypto/ccree/cc_hash.c 	ctx->is_hmac = true;
ctx               747 drivers/crypto/ccree/cc_hash.c 	ctx->key_params.key = NULL;
ctx               750 drivers/crypto/ccree/cc_hash.c 		ctx->key_params.key = kmemdup(key, keylen, GFP_KERNEL);
ctx               751 drivers/crypto/ccree/cc_hash.c 		if (!ctx->key_params.key)
ctx               754 drivers/crypto/ccree/cc_hash.c 		ctx->key_params.key_dma_addr =
ctx               755 drivers/crypto/ccree/cc_hash.c 			dma_map_single(dev, (void *)ctx->key_params.key, keylen,
ctx               757 drivers/crypto/ccree/cc_hash.c 		if (dma_mapping_error(dev, ctx->key_params.key_dma_addr)) {
ctx               759 drivers/crypto/ccree/cc_hash.c 				ctx->key_params.key, keylen);
ctx               760 drivers/crypto/ccree/cc_hash.c 			kzfree(ctx->key_params.key);
ctx               764 drivers/crypto/ccree/cc_hash.c 			&ctx->key_params.key_dma_addr, ctx->key_params.keylen);
ctx               769 drivers/crypto/ccree/cc_hash.c 			set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx               771 drivers/crypto/ccree/cc_hash.c 				     ctx->inter_digestsize);
ctx               778 drivers/crypto/ccree/cc_hash.c 			set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx               779 drivers/crypto/ccree/cc_hash.c 			set_din_const(&desc[idx], 0, ctx->hash_len);
ctx               787 drivers/crypto/ccree/cc_hash.c 				     ctx->key_params.key_dma_addr, keylen,
ctx               794 drivers/crypto/ccree/cc_hash.c 			set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx               795 drivers/crypto/ccree/cc_hash.c 			set_dout_dlli(&desc[idx], ctx->opad_tmp_keys_dma_addr,
ctx               800 drivers/crypto/ccree/cc_hash.c 			cc_set_endianity(ctx->hash_mode, &desc[idx]);
ctx               807 drivers/crypto/ccree/cc_hash.c 				      (ctx->opad_tmp_keys_dma_addr +
ctx               814 drivers/crypto/ccree/cc_hash.c 				     ctx->key_params.key_dma_addr, keylen,
ctx               817 drivers/crypto/ccree/cc_hash.c 			set_dout_dlli(&desc[idx], ctx->opad_tmp_keys_dma_addr,
ctx               827 drivers/crypto/ccree/cc_hash.c 					      (ctx->opad_tmp_keys_dma_addr +
ctx               837 drivers/crypto/ccree/cc_hash.c 		set_dout_dlli(&desc[idx], (ctx->opad_tmp_keys_dma_addr),
ctx               842 drivers/crypto/ccree/cc_hash.c 	rc = cc_send_sync_request(ctx->drvdata, &cc_req, desc, idx);
ctx               852 drivers/crypto/ccree/cc_hash.c 		set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx               853 drivers/crypto/ccree/cc_hash.c 		set_din_sram(&desc[idx], larval_addr, ctx->inter_digestsize);
ctx               860 drivers/crypto/ccree/cc_hash.c 		set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx               861 drivers/crypto/ccree/cc_hash.c 		set_din_const(&desc[idx], 0, ctx->hash_len);
ctx               869 drivers/crypto/ccree/cc_hash.c 		set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx               876 drivers/crypto/ccree/cc_hash.c 		set_din_type(&desc[idx], DMA_DLLI, ctx->opad_tmp_keys_dma_addr,
ctx               878 drivers/crypto/ccree/cc_hash.c 		set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx               887 drivers/crypto/ccree/cc_hash.c 		set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx               889 drivers/crypto/ccree/cc_hash.c 			set_dout_dlli(&desc[idx], ctx->opad_tmp_keys_dma_addr,
ctx               890 drivers/crypto/ccree/cc_hash.c 				      ctx->inter_digestsize, NS_BIT, 0);
ctx               892 drivers/crypto/ccree/cc_hash.c 			set_dout_dlli(&desc[idx], ctx->digest_buff_dma_addr,
ctx               893 drivers/crypto/ccree/cc_hash.c 				      ctx->inter_digestsize, NS_BIT, 0);
ctx               899 drivers/crypto/ccree/cc_hash.c 	rc = cc_send_sync_request(ctx->drvdata, &cc_req, desc, idx);
ctx               905 drivers/crypto/ccree/cc_hash.c 	if (ctx->key_params.key_dma_addr) {
ctx               906 drivers/crypto/ccree/cc_hash.c 		dma_unmap_single(dev, ctx->key_params.key_dma_addr,
ctx               907 drivers/crypto/ccree/cc_hash.c 				 ctx->key_params.keylen, DMA_TO_DEVICE);
ctx               909 drivers/crypto/ccree/cc_hash.c 			&ctx->key_params.key_dma_addr, ctx->key_params.keylen);
ctx               912 drivers/crypto/ccree/cc_hash.c 	kzfree(ctx->key_params.key);
ctx               921 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               922 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx               938 drivers/crypto/ccree/cc_hash.c 	ctx->key_params.keylen = keylen;
ctx               940 drivers/crypto/ccree/cc_hash.c 	ctx->key_params.key = kmemdup(key, keylen, GFP_KERNEL);
ctx               941 drivers/crypto/ccree/cc_hash.c 	if (!ctx->key_params.key)
ctx               944 drivers/crypto/ccree/cc_hash.c 	ctx->key_params.key_dma_addr =
ctx               945 drivers/crypto/ccree/cc_hash.c 		dma_map_single(dev, ctx->key_params.key, keylen, DMA_TO_DEVICE);
ctx               946 drivers/crypto/ccree/cc_hash.c 	if (dma_mapping_error(dev, ctx->key_params.key_dma_addr)) {
ctx               949 drivers/crypto/ccree/cc_hash.c 		kzfree(ctx->key_params.key);
ctx               953 drivers/crypto/ccree/cc_hash.c 		&ctx->key_params.key_dma_addr, ctx->key_params.keylen);
ctx               955 drivers/crypto/ccree/cc_hash.c 	ctx->is_hmac = true;
ctx               958 drivers/crypto/ccree/cc_hash.c 	set_din_type(&desc[idx], DMA_DLLI, ctx->key_params.key_dma_addr,
ctx               971 drivers/crypto/ccree/cc_hash.c 		      (ctx->opad_tmp_keys_dma_addr + XCBC_MAC_K1_OFFSET),
ctx               979 drivers/crypto/ccree/cc_hash.c 		      (ctx->opad_tmp_keys_dma_addr + XCBC_MAC_K2_OFFSET),
ctx               987 drivers/crypto/ccree/cc_hash.c 		      (ctx->opad_tmp_keys_dma_addr + XCBC_MAC_K3_OFFSET),
ctx               991 drivers/crypto/ccree/cc_hash.c 	rc = cc_send_sync_request(ctx->drvdata, &cc_req, desc, idx);
ctx               996 drivers/crypto/ccree/cc_hash.c 	dma_unmap_single(dev, ctx->key_params.key_dma_addr,
ctx               997 drivers/crypto/ccree/cc_hash.c 			 ctx->key_params.keylen, DMA_TO_DEVICE);
ctx               999 drivers/crypto/ccree/cc_hash.c 		&ctx->key_params.key_dma_addr, ctx->key_params.keylen);
ctx              1001 drivers/crypto/ccree/cc_hash.c 	kzfree(ctx->key_params.key);
ctx              1009 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              1010 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1014 drivers/crypto/ccree/cc_hash.c 	ctx->is_hmac = true;
ctx              1025 drivers/crypto/ccree/cc_hash.c 	ctx->key_params.keylen = keylen;
ctx              1029 drivers/crypto/ccree/cc_hash.c 	dma_sync_single_for_cpu(dev, ctx->opad_tmp_keys_dma_addr,
ctx              1032 drivers/crypto/ccree/cc_hash.c 	memcpy(ctx->opad_tmp_keys_buff, key, keylen);
ctx              1034 drivers/crypto/ccree/cc_hash.c 		memset(ctx->opad_tmp_keys_buff + 24, 0,
ctx              1038 drivers/crypto/ccree/cc_hash.c 	dma_sync_single_for_device(dev, ctx->opad_tmp_keys_dma_addr,
ctx              1041 drivers/crypto/ccree/cc_hash.c 	ctx->key_params.keylen = keylen;
ctx              1046 drivers/crypto/ccree/cc_hash.c static void cc_free_ctx(struct cc_hash_ctx *ctx)
ctx              1048 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1050 drivers/crypto/ccree/cc_hash.c 	if (ctx->digest_buff_dma_addr) {
ctx              1051 drivers/crypto/ccree/cc_hash.c 		dma_unmap_single(dev, ctx->digest_buff_dma_addr,
ctx              1052 drivers/crypto/ccree/cc_hash.c 				 sizeof(ctx->digest_buff), DMA_BIDIRECTIONAL);
ctx              1054 drivers/crypto/ccree/cc_hash.c 			&ctx->digest_buff_dma_addr);
ctx              1055 drivers/crypto/ccree/cc_hash.c 		ctx->digest_buff_dma_addr = 0;
ctx              1057 drivers/crypto/ccree/cc_hash.c 	if (ctx->opad_tmp_keys_dma_addr) {
ctx              1058 drivers/crypto/ccree/cc_hash.c 		dma_unmap_single(dev, ctx->opad_tmp_keys_dma_addr,
ctx              1059 drivers/crypto/ccree/cc_hash.c 				 sizeof(ctx->opad_tmp_keys_buff),
ctx              1062 drivers/crypto/ccree/cc_hash.c 			&ctx->opad_tmp_keys_dma_addr);
ctx              1063 drivers/crypto/ccree/cc_hash.c 		ctx->opad_tmp_keys_dma_addr = 0;
ctx              1066 drivers/crypto/ccree/cc_hash.c 	ctx->key_params.keylen = 0;
ctx              1069 drivers/crypto/ccree/cc_hash.c static int cc_alloc_ctx(struct cc_hash_ctx *ctx)
ctx              1071 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1073 drivers/crypto/ccree/cc_hash.c 	ctx->key_params.keylen = 0;
ctx              1075 drivers/crypto/ccree/cc_hash.c 	ctx->digest_buff_dma_addr =
ctx              1076 drivers/crypto/ccree/cc_hash.c 		dma_map_single(dev, (void *)ctx->digest_buff,
ctx              1077 drivers/crypto/ccree/cc_hash.c 			       sizeof(ctx->digest_buff), DMA_BIDIRECTIONAL);
ctx              1078 drivers/crypto/ccree/cc_hash.c 	if (dma_mapping_error(dev, ctx->digest_buff_dma_addr)) {
ctx              1080 drivers/crypto/ccree/cc_hash.c 			sizeof(ctx->digest_buff), ctx->digest_buff);
ctx              1084 drivers/crypto/ccree/cc_hash.c 		sizeof(ctx->digest_buff), ctx->digest_buff,
ctx              1085 drivers/crypto/ccree/cc_hash.c 		&ctx->digest_buff_dma_addr);
ctx              1087 drivers/crypto/ccree/cc_hash.c 	ctx->opad_tmp_keys_dma_addr =
ctx              1088 drivers/crypto/ccree/cc_hash.c 		dma_map_single(dev, (void *)ctx->opad_tmp_keys_buff,
ctx              1089 drivers/crypto/ccree/cc_hash.c 			       sizeof(ctx->opad_tmp_keys_buff),
ctx              1091 drivers/crypto/ccree/cc_hash.c 	if (dma_mapping_error(dev, ctx->opad_tmp_keys_dma_addr)) {
ctx              1093 drivers/crypto/ccree/cc_hash.c 			sizeof(ctx->opad_tmp_keys_buff),
ctx              1094 drivers/crypto/ccree/cc_hash.c 			ctx->opad_tmp_keys_buff);
ctx              1098 drivers/crypto/ccree/cc_hash.c 		sizeof(ctx->opad_tmp_keys_buff), ctx->opad_tmp_keys_buff,
ctx              1099 drivers/crypto/ccree/cc_hash.c 		&ctx->opad_tmp_keys_dma_addr);
ctx              1101 drivers/crypto/ccree/cc_hash.c 	ctx->is_hmac = false;
ctx              1105 drivers/crypto/ccree/cc_hash.c 	cc_free_ctx(ctx);
ctx              1111 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1113 drivers/crypto/ccree/cc_hash.c 	if (ctx->hash_mode == DRV_HASH_SM3)
ctx              1116 drivers/crypto/ccree/cc_hash.c 		return cc_get_default_hash_len(ctx->drvdata);
ctx              1121 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1132 drivers/crypto/ccree/cc_hash.c 	ctx->hash_mode = cc_alg->hash_mode;
ctx              1133 drivers/crypto/ccree/cc_hash.c 	ctx->hw_mode = cc_alg->hw_mode;
ctx              1134 drivers/crypto/ccree/cc_hash.c 	ctx->inter_digestsize = cc_alg->inter_digestsize;
ctx              1135 drivers/crypto/ccree/cc_hash.c 	ctx->drvdata = cc_alg->drvdata;
ctx              1136 drivers/crypto/ccree/cc_hash.c 	ctx->hash_len = cc_get_hash_len(tfm);
ctx              1137 drivers/crypto/ccree/cc_hash.c 	return cc_alloc_ctx(ctx);
ctx              1142 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1143 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1146 drivers/crypto/ccree/cc_hash.c 	cc_free_ctx(ctx);
ctx              1153 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1154 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1169 drivers/crypto/ccree/cc_hash.c 	rc = cc_map_hash_request_update(ctx->drvdata, state, req->src,
ctx              1182 drivers/crypto/ccree/cc_hash.c 	if (cc_map_req(dev, state, ctx)) {
ctx              1187 drivers/crypto/ccree/cc_hash.c 	if (ctx->hw_mode == DRV_CIPHER_XCBC_MAC)
ctx              1192 drivers/crypto/ccree/cc_hash.c 	cc_set_desc(state, ctx, DIN_AES_DOUT, desc, true, &idx);
ctx              1196 drivers/crypto/ccree/cc_hash.c 	set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx              1198 drivers/crypto/ccree/cc_hash.c 		      ctx->inter_digestsize, NS_BIT, 1);
ctx              1199 drivers/crypto/ccree/cc_hash.c 	set_queue_last_ind(ctx->drvdata, &desc[idx]);
ctx              1208 drivers/crypto/ccree/cc_hash.c 	rc = cc_send_request(ctx->drvdata, &cc_req, desc, idx, &req->base);
ctx              1212 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx              1221 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1222 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1232 drivers/crypto/ccree/cc_hash.c 	if (ctx->hw_mode == DRV_CIPHER_XCBC_MAC) {
ctx              1236 drivers/crypto/ccree/cc_hash.c 		key_size = (ctx->key_params.keylen == 24) ? AES_MAX_KEY_SIZE :
ctx              1237 drivers/crypto/ccree/cc_hash.c 			ctx->key_params.keylen;
ctx              1238 drivers/crypto/ccree/cc_hash.c 		key_len =  ctx->key_params.keylen;
ctx              1243 drivers/crypto/ccree/cc_hash.c 	if (cc_map_req(dev, state, ctx)) {
ctx              1248 drivers/crypto/ccree/cc_hash.c 	if (cc_map_hash_request_final(ctx->drvdata, state, req->src,
ctx              1251 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx              1258 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx              1272 drivers/crypto/ccree/cc_hash.c 			     (ctx->opad_tmp_keys_dma_addr + XCBC_MAC_K1_OFFSET),
ctx              1297 drivers/crypto/ccree/cc_hash.c 	if (ctx->hw_mode == DRV_CIPHER_XCBC_MAC)
ctx              1304 drivers/crypto/ccree/cc_hash.c 		set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx              1310 drivers/crypto/ccree/cc_hash.c 		cc_set_desc(state, ctx, DIN_AES_DOUT, desc, false, &idx);
ctx              1323 drivers/crypto/ccree/cc_hash.c 	set_queue_last_ind(ctx->drvdata, &desc[idx]);
ctx              1326 drivers/crypto/ccree/cc_hash.c 	set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx              1329 drivers/crypto/ccree/cc_hash.c 	rc = cc_send_request(ctx->drvdata, &cc_req, desc, idx, &req->base);
ctx              1334 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx              1343 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1344 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1359 drivers/crypto/ccree/cc_hash.c 	if (cc_map_req(dev, state, ctx)) {
ctx              1364 drivers/crypto/ccree/cc_hash.c 	if (cc_map_hash_request_final(ctx->drvdata, state, req->src,
ctx              1367 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx              1373 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx              1381 drivers/crypto/ccree/cc_hash.c 	if (ctx->hw_mode == DRV_CIPHER_XCBC_MAC) {
ctx              1385 drivers/crypto/ccree/cc_hash.c 		key_len = ctx->key_params.keylen;
ctx              1391 drivers/crypto/ccree/cc_hash.c 		set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx              1397 drivers/crypto/ccree/cc_hash.c 		cc_set_desc(state, ctx, DIN_AES_DOUT, desc, false, &idx);
ctx              1405 drivers/crypto/ccree/cc_hash.c 	set_queue_last_ind(ctx->drvdata, &desc[idx]);
ctx              1408 drivers/crypto/ccree/cc_hash.c 	set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx              1411 drivers/crypto/ccree/cc_hash.c 	rc = cc_send_request(ctx->drvdata, &cc_req, desc, idx, &req->base);
ctx              1416 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx              1425 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1426 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1437 drivers/crypto/ccree/cc_hash.c 	cc_init_req(dev, state, ctx);
ctx              1439 drivers/crypto/ccree/cc_hash.c 	if (cc_map_req(dev, state, ctx)) {
ctx              1445 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx              1449 drivers/crypto/ccree/cc_hash.c 	if (cc_map_hash_request_final(ctx->drvdata, state, req->src,
ctx              1452 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx              1460 drivers/crypto/ccree/cc_hash.c 	if (ctx->hw_mode == DRV_CIPHER_XCBC_MAC) {
ctx              1464 drivers/crypto/ccree/cc_hash.c 		key_len = ctx->key_params.keylen;
ctx              1470 drivers/crypto/ccree/cc_hash.c 		set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx              1476 drivers/crypto/ccree/cc_hash.c 		cc_set_desc(state, ctx, DIN_AES_DOUT, desc, false, &idx);
ctx              1483 drivers/crypto/ccree/cc_hash.c 	set_queue_last_ind(ctx->drvdata, &desc[idx]);
ctx              1487 drivers/crypto/ccree/cc_hash.c 	set_cipher_mode(&desc[idx], ctx->hw_mode);
ctx              1490 drivers/crypto/ccree/cc_hash.c 	rc = cc_send_request(ctx->drvdata, &cc_req, desc, idx, &req->base);
ctx              1495 drivers/crypto/ccree/cc_hash.c 		cc_unmap_req(dev, state, ctx);
ctx              1503 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              1512 drivers/crypto/ccree/cc_hash.c 	memcpy(out, state->digest_buff, ctx->inter_digestsize);
ctx              1513 drivers/crypto/ccree/cc_hash.c 	out += ctx->inter_digestsize;
ctx              1515 drivers/crypto/ccree/cc_hash.c 	memcpy(out, state->digest_bytes_len, ctx->hash_len);
ctx              1516 drivers/crypto/ccree/cc_hash.c 	out += ctx->hash_len;
ctx              1529 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx              1530 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              1539 drivers/crypto/ccree/cc_hash.c 	cc_init_req(dev, state, ctx);
ctx              1541 drivers/crypto/ccree/cc_hash.c 	memcpy(state->digest_buff, in, ctx->inter_digestsize);
ctx              1542 drivers/crypto/ccree/cc_hash.c 	in += ctx->inter_digestsize;
ctx              1544 drivers/crypto/ccree/cc_hash.c 	memcpy(state->digest_bytes_len, in, ctx->hash_len);
ctx              1545 drivers/crypto/ccree/cc_hash.c 	in += ctx->hash_len;
ctx              2130 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              2134 drivers/crypto/ccree/cc_hash.c 	set_din_type(&desc[idx], DMA_DLLI, (ctx->opad_tmp_keys_dma_addr +
ctx              2138 drivers/crypto/ccree/cc_hash.c 	set_hash_cipher_mode(&desc[idx], DRV_CIPHER_XCBC_MAC, ctx->hash_mode);
ctx              2147 drivers/crypto/ccree/cc_hash.c 		     (ctx->opad_tmp_keys_dma_addr + XCBC_MAC_K2_OFFSET),
ctx              2159 drivers/crypto/ccree/cc_hash.c 		     (ctx->opad_tmp_keys_dma_addr + XCBC_MAC_K3_OFFSET),
ctx              2187 drivers/crypto/ccree/cc_hash.c 	struct cc_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              2191 drivers/crypto/ccree/cc_hash.c 	set_din_type(&desc[idx], DMA_DLLI, ctx->opad_tmp_keys_dma_addr,
ctx              2192 drivers/crypto/ccree/cc_hash.c 		     ((ctx->key_params.keylen == 24) ? AES_MAX_KEY_SIZE :
ctx              2193 drivers/crypto/ccree/cc_hash.c 		      ctx->key_params.keylen), NS_BIT);
ctx              2197 drivers/crypto/ccree/cc_hash.c 	set_key_size_aes(&desc[idx], ctx->key_params.keylen);
ctx              2208 drivers/crypto/ccree/cc_hash.c 	set_key_size_aes(&desc[idx], ctx->key_params.keylen);
ctx              2215 drivers/crypto/ccree/cc_hash.c 			struct cc_hash_ctx *ctx, unsigned int flow_mode,
ctx              2220 drivers/crypto/ccree/cc_hash.c 	struct device *dev = drvdata_to_dev(ctx->drvdata);
ctx              2240 drivers/crypto/ccree/cc_hash.c 		set_dout_sram(&desc[idx], ctx->drvdata->mlli_sram_addr,
ctx              2247 drivers/crypto/ccree/cc_hash.c 			     ctx->drvdata->mlli_sram_addr,
ctx                34 drivers/crypto/ccree/cc_sram_mgr.c 	struct cc_sram_ctx *ctx;
ctx                50 drivers/crypto/ccree/cc_sram_mgr.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx                52 drivers/crypto/ccree/cc_sram_mgr.c 	if (!ctx)
ctx                55 drivers/crypto/ccree/cc_sram_mgr.c 	ctx->sram_free_offset = start;
ctx                56 drivers/crypto/ccree/cc_sram_mgr.c 	drvdata->sram_mgr_handle = ctx;
ctx                99 drivers/crypto/chelsio/chcr_algo.c static inline  struct chcr_aead_ctx *AEAD_CTX(struct chcr_context *ctx)
ctx               101 drivers/crypto/chelsio/chcr_algo.c 	return ctx->crypto_ctx->aeadctx;
ctx               104 drivers/crypto/chelsio/chcr_algo.c static inline struct ablk_ctx *ABLK_CTX(struct chcr_context *ctx)
ctx               106 drivers/crypto/chelsio/chcr_algo.c 	return ctx->crypto_ctx->ablkctx;
ctx               109 drivers/crypto/chelsio/chcr_algo.c static inline struct hmac_ctx *HMAC_CTX(struct chcr_context *ctx)
ctx               111 drivers/crypto/chelsio/chcr_algo.c 	return ctx->crypto_ctx->hmacctx;
ctx               116 drivers/crypto/chelsio/chcr_algo.c 	return gctx->ctx->gcm;
ctx               121 drivers/crypto/chelsio/chcr_algo.c 	return gctx->ctx->authenc;
ctx               124 drivers/crypto/chelsio/chcr_algo.c static inline struct uld_ctx *ULD_CTX(struct chcr_context *ctx)
ctx               126 drivers/crypto/chelsio/chcr_algo.c 	return container_of(ctx->dev, struct uld_ctx, dev);
ctx               718 drivers/crypto/chelsio/chcr_algo.c static inline void create_wreq(struct chcr_context *ctx,
ctx               727 drivers/crypto/chelsio/chcr_algo.c 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
ctx               728 drivers/crypto/chelsio/chcr_algo.c 	int qid = u_ctx->lldi.rxq_ids[ctx->rx_qidx];
ctx               738 drivers/crypto/chelsio/chcr_algo.c 		FILL_WR_RX_Q_ID(ctx->tx_chan_id, qid,
ctx               739 drivers/crypto/chelsio/chcr_algo.c 				!!lcb, ctx->tx_qidx);
ctx               741 drivers/crypto/chelsio/chcr_algo.c 	chcr_req->ulptx.cmd_dest = FILL_ULPTX_CMD_DEST(ctx->tx_chan_id,
ctx              1361 drivers/crypto/chelsio/chcr_algo.c static int chcr_device_init(struct chcr_context *ctx)
ctx              1369 drivers/crypto/chelsio/chcr_algo.c 	if (!ctx->dev) {
ctx              1376 drivers/crypto/chelsio/chcr_algo.c 		ctx->dev = &u_ctx->dev;
ctx              1380 drivers/crypto/chelsio/chcr_algo.c 		spin_lock(&ctx->dev->lock_chcr_dev);
ctx              1381 drivers/crypto/chelsio/chcr_algo.c 		ctx->tx_chan_id = ctx->dev->tx_channel_id;
ctx              1382 drivers/crypto/chelsio/chcr_algo.c 		ctx->dev->tx_channel_id = !ctx->dev->tx_channel_id;
ctx              1383 drivers/crypto/chelsio/chcr_algo.c 		spin_unlock(&ctx->dev->lock_chcr_dev);
ctx              1384 drivers/crypto/chelsio/chcr_algo.c 		rxq_idx = ctx->tx_chan_id * rxq_perchan;
ctx              1386 drivers/crypto/chelsio/chcr_algo.c 		txq_idx = ctx->tx_chan_id * txq_perchan;
ctx              1388 drivers/crypto/chelsio/chcr_algo.c 		ctx->rx_qidx = rxq_idx;
ctx              1389 drivers/crypto/chelsio/chcr_algo.c 		ctx->tx_qidx = txq_idx;
ctx              1395 drivers/crypto/chelsio/chcr_algo.c 		ctx->pci_chan_id = txq_idx / txq_perchan;
ctx              1404 drivers/crypto/chelsio/chcr_algo.c 	struct chcr_context *ctx = crypto_tfm_ctx(tfm);
ctx              1405 drivers/crypto/chelsio/chcr_algo.c 	struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
ctx              1421 drivers/crypto/chelsio/chcr_algo.c 	struct chcr_context *ctx = crypto_tfm_ctx(tfm);
ctx              1422 drivers/crypto/chelsio/chcr_algo.c 	struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
ctx              1440 drivers/crypto/chelsio/chcr_algo.c 	struct chcr_context *ctx = crypto_tfm_ctx(tfm);
ctx              1441 drivers/crypto/chelsio/chcr_algo.c 	struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
ctx              2051 drivers/crypto/chelsio/chcr_algo.c 	struct chcr_context *ctx = crypto_tfm_ctx(tfm);
ctx              2052 drivers/crypto/chelsio/chcr_algo.c 	struct adapter *adap = padap(ctx->dev);
ctx              2229 drivers/crypto/chelsio/chcr_algo.c 	struct chcr_context *ctx = crypto_tfm_ctx(tfm);
ctx              2230 drivers/crypto/chelsio/chcr_algo.c 	struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
ctx              2244 drivers/crypto/chelsio/chcr_algo.c 	struct chcr_context *ctx = crypto_tfm_ctx(tfm);
ctx              2245 drivers/crypto/chelsio/chcr_algo.c 	struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
ctx              2568 drivers/crypto/chelsio/chcr_algo.c 	struct chcr_context *ctx = a_ctx(tfm);
ctx              2576 drivers/crypto/chelsio/chcr_algo.c 	dsgl_walk_end(&dsgl_walk, qid, ctx->pci_chan_id);
ctx              2609 drivers/crypto/chelsio/chcr_algo.c 	struct chcr_context *ctx = c_ctx(tfm);
ctx              2618 drivers/crypto/chelsio/chcr_algo.c 	dsgl_walk_end(&dsgl_walk, qid, ctx->pci_chan_id);
ctx               236 drivers/crypto/chelsio/chcr_crypto.h 	struct	__aead_ctx ctx[0];
ctx               599 drivers/crypto/chelsio/chtls/chtls_cm.c 	struct listen_ctx *ctx;
ctx               622 drivers/crypto/chelsio/chtls/chtls_cm.c 	ctx = kmalloc(sizeof(*ctx), GFP_KERNEL);
ctx               623 drivers/crypto/chelsio/chtls/chtls_cm.c 	if (!ctx)
ctx               627 drivers/crypto/chelsio/chtls/chtls_cm.c 	ctx->lsk = sk;
ctx               628 drivers/crypto/chelsio/chtls/chtls_cm.c 	ctx->cdev = cdev;
ctx               629 drivers/crypto/chelsio/chtls/chtls_cm.c 	ctx->state = T4_LISTEN_START_PENDING;
ctx               630 drivers/crypto/chelsio/chtls/chtls_cm.c 	skb_queue_head_init(&ctx->synq);
ctx               632 drivers/crypto/chelsio/chtls/chtls_cm.c 	stid = cxgb4_alloc_stid(cdev->tids, sk->sk_family, ctx);
ctx               655 drivers/crypto/chelsio/chtls/chtls_cm.c 	kfree(ctx);
ctx              1029 drivers/crypto/chelsio/chtls/chtls_cm.c 	struct tls_context *ctx;
ctx              1078 drivers/crypto/chelsio/chtls/chtls_cm.c 	ctx = tls_get_ctx(lsk);
ctx              1079 drivers/crypto/chelsio/chtls/chtls_cm.c 	newsk->sk_destruct = ctx->sk_destruct;
ctx              1273 drivers/crypto/chelsio/chtls/chtls_cm.c 	struct listen_ctx *ctx;
ctx              1286 drivers/crypto/chelsio/chtls/chtls_cm.c 	ctx = (struct listen_ctx *)data;
ctx              1287 drivers/crypto/chelsio/chtls/chtls_cm.c 	lsk = ctx->lsk;
ctx              1939 drivers/crypto/chelsio/chtls/chtls_cm.c 	void *ctx;
ctx              1948 drivers/crypto/chelsio/chtls/chtls_cm.c 	ctx = lookup_stid(cdev->tids, oreq->ts_recent);
ctx              1949 drivers/crypto/chelsio/chtls/chtls_cm.c 	if (!ctx)
ctx              1952 drivers/crypto/chelsio/chtls/chtls_cm.c 	listen_ctx = (struct listen_ctx *)ctx;
ctx               474 drivers/crypto/chelsio/chtls/chtls_main.c 	struct tls_context *ctx = tls_get_ctx(sk);
ctx               477 drivers/crypto/chelsio/chtls/chtls_main.c 		return ctx->sk_proto->getsockopt(sk, level,
ctx               542 drivers/crypto/chelsio/chtls/chtls_main.c 	struct tls_context *ctx = tls_get_ctx(sk);
ctx               545 drivers/crypto/chelsio/chtls/chtls_main.c 		return ctx->sk_proto->setsockopt(sk, level,
ctx               198 drivers/crypto/exynos-rng.c 	struct exynos_rng_ctx *ctx = crypto_rng_ctx(tfm);
ctx               199 drivers/crypto/exynos-rng.c 	struct exynos_rng_dev *rng = ctx->rng;
ctx               228 drivers/crypto/exynos-rng.c 	struct exynos_rng_ctx *ctx = crypto_rng_ctx(tfm);
ctx               229 drivers/crypto/exynos-rng.c 	struct exynos_rng_dev *rng = ctx->rng;
ctx               237 drivers/crypto/exynos-rng.c 	ret = exynos_rng_set_seed(ctx->rng, seed, slen);
ctx               247 drivers/crypto/exynos-rng.c 	struct exynos_rng_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               249 drivers/crypto/exynos-rng.c 	ctx->rng = exynos_rng_dev;
ctx              1112 drivers/crypto/hifn_795x.c 		struct hifn_context *ctx, struct hifn_request_context *rctx,
ctx              1144 drivers/crypto/hifn_795x.c 		if (ctx->keysize)
ctx              1168 drivers/crypto/hifn_795x.c 			if (ctx->keysize != 16)
ctx              1174 drivers/crypto/hifn_795x.c 			if (ctx->keysize != 24)
ctx              1180 drivers/crypto/hifn_795x.c 			if (ctx->keysize != 32)
ctx              1186 drivers/crypto/hifn_795x.c 			if (ctx->keysize != 24)
ctx              1191 drivers/crypto/hifn_795x.c 			if (ctx->keysize != 8)
ctx              1200 drivers/crypto/hifn_795x.c 				nbytes, nbytes, ctx->key, ctx->keysize,
ctx              1318 drivers/crypto/hifn_795x.c 		struct hifn_context *ctx, struct hifn_request_context *rctx,
ctx              1362 drivers/crypto/hifn_795x.c 	hifn_setup_cmd_desc(dev, ctx, rctx, priv, nbytes);
ctx              1523 drivers/crypto/hifn_795x.c 	struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
ctx              1525 drivers/crypto/hifn_795x.c 	struct hifn_device *dev = ctx->dev;
ctx              1566 drivers/crypto/hifn_795x.c 	err = hifn_setup_dma(dev, ctx, rctx, req->src, req->dst, req->nbytes, req);
ctx              1584 drivers/crypto/hifn_795x.c 			 ctx->key, ctx->keysize,
ctx              1942 drivers/crypto/hifn_795x.c 	struct hifn_context *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1943 drivers/crypto/hifn_795x.c 	struct hifn_device *dev = ctx->dev;
ctx              1952 drivers/crypto/hifn_795x.c 	memcpy(ctx->key, key, len);
ctx              1953 drivers/crypto/hifn_795x.c 	ctx->keysize = len;
ctx              1961 drivers/crypto/hifn_795x.c 	struct hifn_context *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1962 drivers/crypto/hifn_795x.c 	struct hifn_device *dev = ctx->dev;
ctx              1971 drivers/crypto/hifn_795x.c 	memcpy(ctx->key, key, len);
ctx              1972 drivers/crypto/hifn_795x.c 	ctx->keysize = len;
ctx              1979 drivers/crypto/hifn_795x.c 	struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
ctx              1980 drivers/crypto/hifn_795x.c 	struct hifn_device *dev = ctx->dev;
ctx              2000 drivers/crypto/hifn_795x.c 	struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
ctx              2015 drivers/crypto/hifn_795x.c 	if (ctx->keysize != 16 && type == ACRYPTO_TYPE_AES_128) {
ctx              2016 drivers/crypto/hifn_795x.c 		if (ctx->keysize == 24)
ctx              2018 drivers/crypto/hifn_795x.c 		else if (ctx->keysize == 32)
ctx              2070 drivers/crypto/hifn_795x.c 	struct hifn_context *ctx = crypto_tfm_ctx(req->base.tfm);
ctx              2071 drivers/crypto/hifn_795x.c 	struct hifn_device *dev = ctx->dev;
ctx              2373 drivers/crypto/hifn_795x.c 	struct hifn_context *ctx = crypto_tfm_ctx(tfm);
ctx              2375 drivers/crypto/hifn_795x.c 	ctx->dev = ha->dev;
ctx               126 drivers/crypto/hisilicon/sec/sec_algs.c static void sec_alg_skcipher_init_template(struct sec_alg_tfm_ctx *ctx,
ctx               138 drivers/crypto/hisilicon/sec/sec_algs.c 	req->cipher_key_addr_lo = lower_32_bits(ctx->pkey);
ctx               139 drivers/crypto/hisilicon/sec/sec_algs.c 	req->cipher_key_addr_hi = upper_32_bits(ctx->pkey);
ctx               148 drivers/crypto/hisilicon/sec/sec_algs.c 	struct sec_alg_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               150 drivers/crypto/hisilicon/sec/sec_algs.c 	ctx->cipher_alg = alg;
ctx               151 drivers/crypto/hisilicon/sec/sec_algs.c 	memcpy(ctx->key, key, keylen);
ctx               152 drivers/crypto/hisilicon/sec/sec_algs.c 	sec_alg_skcipher_init_template(ctx, &ctx->req_template,
ctx               153 drivers/crypto/hisilicon/sec/sec_algs.c 				       ctx->cipher_alg);
ctx               230 drivers/crypto/hisilicon/sec/sec_algs.c 	struct sec_alg_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               231 drivers/crypto/hisilicon/sec/sec_algs.c 	struct device *dev = ctx->queue->dev_info->dev;
ctx               233 drivers/crypto/hisilicon/sec/sec_algs.c 	mutex_lock(&ctx->lock);
ctx               234 drivers/crypto/hisilicon/sec/sec_algs.c 	if (ctx->key) {
ctx               236 drivers/crypto/hisilicon/sec/sec_algs.c 		memset(ctx->key, 0, SEC_MAX_CIPHER_KEY);
ctx               239 drivers/crypto/hisilicon/sec/sec_algs.c 		ctx->key = dma_alloc_coherent(dev, SEC_MAX_CIPHER_KEY,
ctx               240 drivers/crypto/hisilicon/sec/sec_algs.c 					      &ctx->pkey, GFP_KERNEL);
ctx               241 drivers/crypto/hisilicon/sec/sec_algs.c 		if (!ctx->key) {
ctx               242 drivers/crypto/hisilicon/sec/sec_algs.c 			mutex_unlock(&ctx->lock);
ctx               246 drivers/crypto/hisilicon/sec/sec_algs.c 	mutex_unlock(&ctx->lock);
ctx               429 drivers/crypto/hisilicon/sec/sec_algs.c 	struct sec_alg_tfm_ctx *ctx = sec_req->tfm_ctx;
ctx               431 drivers/crypto/hisilicon/sec/sec_algs.c 	struct device *dev = ctx->queue->dev_info->dev;
ctx               451 drivers/crypto/hisilicon/sec/sec_algs.c 	mutex_lock(&ctx->queue->queuelock);
ctx               453 drivers/crypto/hisilicon/sec/sec_algs.c 	switch (ctx->cipher_alg) {
ctx               483 drivers/crypto/hisilicon/sec/sec_algs.c 	if (ctx->queue->havesoftqueue &&
ctx               484 drivers/crypto/hisilicon/sec/sec_algs.c 	    !kfifo_is_empty(&ctx->queue->softqueue) &&
ctx               485 drivers/crypto/hisilicon/sec/sec_algs.c 	    sec_queue_empty(ctx->queue)) {
ctx               486 drivers/crypto/hisilicon/sec/sec_algs.c 		ret = kfifo_get(&ctx->queue->softqueue, &nextrequest);
ctx               493 drivers/crypto/hisilicon/sec/sec_algs.c 			sec_queue_send(ctx->queue, &nextrequest->req,
ctx               495 drivers/crypto/hisilicon/sec/sec_algs.c 	} else if (!list_empty(&ctx->backlog)) {
ctx               497 drivers/crypto/hisilicon/sec/sec_algs.c 		backlog_req = list_first_entry(&ctx->backlog,
ctx               500 drivers/crypto/hisilicon/sec/sec_algs.c 		if (sec_queue_can_enqueue(ctx->queue,
ctx               502 drivers/crypto/hisilicon/sec/sec_algs.c 		    (ctx->queue->havesoftqueue &&
ctx               503 drivers/crypto/hisilicon/sec/sec_algs.c 		     kfifo_avail(&ctx->queue->softqueue) >
ctx               505 drivers/crypto/hisilicon/sec/sec_algs.c 			sec_send_request(backlog_req, ctx->queue);
ctx               511 drivers/crypto/hisilicon/sec/sec_algs.c 	mutex_unlock(&ctx->queue->queuelock);
ctx               516 drivers/crypto/hisilicon/sec/sec_algs.c 	sec_alg_free_el(sec_req_el, ctx->queue->dev_info);
ctx               711 drivers/crypto/hisilicon/sec/sec_algs.c 	struct sec_alg_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               712 drivers/crypto/hisilicon/sec/sec_algs.c 	struct sec_queue *queue = ctx->queue;
ctx               750 drivers/crypto/hisilicon/sec/sec_algs.c 	sec_req->tfm_ctx = ctx;
ctx               771 drivers/crypto/hisilicon/sec/sec_algs.c 		el = sec_alg_alloc_and_fill_el(&ctx->req_template,
ctx               811 drivers/crypto/hisilicon/sec/sec_algs.c 	    !list_empty(&ctx->backlog)) {
ctx               814 drivers/crypto/hisilicon/sec/sec_algs.c 			list_add_tail(&sec_req->backlog_head, &ctx->backlog);
ctx               872 drivers/crypto/hisilicon/sec/sec_algs.c 	struct sec_alg_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               874 drivers/crypto/hisilicon/sec/sec_algs.c 	mutex_init(&ctx->lock);
ctx               875 drivers/crypto/hisilicon/sec/sec_algs.c 	INIT_LIST_HEAD(&ctx->backlog);
ctx               878 drivers/crypto/hisilicon/sec/sec_algs.c 	ctx->queue = sec_queue_alloc_start_safe();
ctx               879 drivers/crypto/hisilicon/sec/sec_algs.c 	if (IS_ERR(ctx->queue))
ctx               880 drivers/crypto/hisilicon/sec/sec_algs.c 		return PTR_ERR(ctx->queue);
ctx               882 drivers/crypto/hisilicon/sec/sec_algs.c 	mutex_init(&ctx->queue->queuelock);
ctx               883 drivers/crypto/hisilicon/sec/sec_algs.c 	ctx->queue->havesoftqueue = false;
ctx               890 drivers/crypto/hisilicon/sec/sec_algs.c 	struct sec_alg_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               891 drivers/crypto/hisilicon/sec/sec_algs.c 	struct device *dev = ctx->queue->dev_info->dev;
ctx               893 drivers/crypto/hisilicon/sec/sec_algs.c 	if (ctx->key) {
ctx               894 drivers/crypto/hisilicon/sec/sec_algs.c 		memzero_explicit(ctx->key, SEC_MAX_CIPHER_KEY);
ctx               895 drivers/crypto/hisilicon/sec/sec_algs.c 		dma_free_coherent(dev, SEC_MAX_CIPHER_KEY, ctx->key,
ctx               896 drivers/crypto/hisilicon/sec/sec_algs.c 				  ctx->pkey);
ctx               898 drivers/crypto/hisilicon/sec/sec_algs.c 	sec_queue_stop_release(ctx->queue);
ctx               903 drivers/crypto/hisilicon/sec/sec_algs.c 	struct sec_alg_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               910 drivers/crypto/hisilicon/sec/sec_algs.c 	INIT_KFIFO(ctx->queue->softqueue);
ctx               911 drivers/crypto/hisilicon/sec/sec_algs.c 	ret = kfifo_alloc(&ctx->queue->softqueue, 512, GFP_KERNEL);
ctx               916 drivers/crypto/hisilicon/sec/sec_algs.c 	ctx->queue->havesoftqueue = true;
ctx               923 drivers/crypto/hisilicon/sec/sec_algs.c 	struct sec_alg_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               925 drivers/crypto/hisilicon/sec/sec_algs.c 	kfifo_free(&ctx->queue->softqueue);
ctx               857 drivers/crypto/hisilicon/sec/sec_drv.c int sec_queue_send(struct sec_queue *queue, struct sec_bd_info *msg, void *ctx)
ctx               871 drivers/crypto/hisilicon/sec/sec_drv.c 	queue->shadow[write] = ctx;
ctx               185 drivers/crypto/hisilicon/sec/sec_drv.h 	void (*callback)(struct sec_bd_info *resp, void *ctx);
ctx               417 drivers/crypto/hisilicon/sec/sec_drv.h int sec_queue_send(struct sec_queue *queue, struct sec_bd_info *msg, void *ctx);
ctx               424 drivers/crypto/hisilicon/sec/sec_drv.h void sec_alg_callback(struct sec_bd_info *resp, void *ctx);
ctx                70 drivers/crypto/hisilicon/zip/zip_crypto.c 	struct hisi_zip_ctx *ctx;
ctx               110 drivers/crypto/hisilicon/zip/zip_crypto.c static int hisi_zip_create_qp(struct hisi_qm *qm, struct hisi_zip_qp_ctx *ctx,
ctx               121 drivers/crypto/hisilicon/zip/zip_crypto.c 	qp->qp_ctx = ctx;
ctx               122 drivers/crypto/hisilicon/zip/zip_crypto.c 	ctx->qp = qp;
ctx               135 drivers/crypto/hisilicon/zip/zip_crypto.c static void hisi_zip_release_qp(struct hisi_zip_qp_ctx *ctx)
ctx               137 drivers/crypto/hisilicon/zip/zip_crypto.c 	hisi_qm_stop_qp(ctx->qp);
ctx               138 drivers/crypto/hisilicon/zip/zip_crypto.c 	hisi_qm_release_qp(ctx->qp);
ctx               213 drivers/crypto/hisilicon/zip/zip_crypto.c static int hisi_zip_create_req_q(struct hisi_zip_ctx *ctx)
ctx               219 drivers/crypto/hisilicon/zip/zip_crypto.c 		req_q = &ctx->qp_ctx[i].req_q;
ctx               247 drivers/crypto/hisilicon/zip/zip_crypto.c 	kfree(ctx->qp_ctx[QPC_DECOMP].req_q.req_bitmap);
ctx               249 drivers/crypto/hisilicon/zip/zip_crypto.c 	kfree(ctx->qp_ctx[QPC_COMP].req_q.q);
ctx               251 drivers/crypto/hisilicon/zip/zip_crypto.c 	kfree(ctx->qp_ctx[QPC_COMP].req_q.req_bitmap);
ctx               255 drivers/crypto/hisilicon/zip/zip_crypto.c static void hisi_zip_release_req_q(struct hisi_zip_ctx *ctx)
ctx               260 drivers/crypto/hisilicon/zip/zip_crypto.c 		kfree(ctx->qp_ctx[i].req_q.q);
ctx               261 drivers/crypto/hisilicon/zip/zip_crypto.c 		kfree(ctx->qp_ctx[i].req_q.req_bitmap);
ctx               265 drivers/crypto/hisilicon/zip/zip_crypto.c static int hisi_zip_create_sgl_pool(struct hisi_zip_ctx *ctx)
ctx               271 drivers/crypto/hisilicon/zip/zip_crypto.c 		tmp = &ctx->qp_ctx[i];
ctx               285 drivers/crypto/hisilicon/zip/zip_crypto.c 	hisi_acc_free_sgl_pool(&ctx->qp_ctx[QPC_COMP].qp->qm->pdev->dev,
ctx               286 drivers/crypto/hisilicon/zip/zip_crypto.c 			       &ctx->qp_ctx[QPC_COMP].sgl_pool);
ctx               290 drivers/crypto/hisilicon/zip/zip_crypto.c static void hisi_zip_release_sgl_pool(struct hisi_zip_ctx *ctx)
ctx               295 drivers/crypto/hisilicon/zip/zip_crypto.c 		hisi_acc_free_sgl_pool(&ctx->qp_ctx[i].qp->qm->pdev->dev,
ctx               296 drivers/crypto/hisilicon/zip/zip_crypto.c 				       &ctx->qp_ctx[i].sgl_pool);
ctx               343 drivers/crypto/hisilicon/zip/zip_crypto.c static void hisi_zip_set_acomp_cb(struct hisi_zip_ctx *ctx,
ctx               349 drivers/crypto/hisilicon/zip/zip_crypto.c 		ctx->qp_ctx[i].qp->req_cb = fn;
ctx               355 drivers/crypto/hisilicon/zip/zip_crypto.c 	struct hisi_zip_ctx *ctx = crypto_tfm_ctx(&tfm->base);
ctx               358 drivers/crypto/hisilicon/zip/zip_crypto.c 	ret = hisi_zip_ctx_init(ctx, COMP_NAME_TO_TYPE(alg_name));
ctx               362 drivers/crypto/hisilicon/zip/zip_crypto.c 	ret = hisi_zip_create_req_q(ctx);
ctx               366 drivers/crypto/hisilicon/zip/zip_crypto.c 	ret = hisi_zip_create_sgl_pool(ctx);
ctx               370 drivers/crypto/hisilicon/zip/zip_crypto.c 	hisi_zip_set_acomp_cb(ctx, hisi_zip_acomp_cb);
ctx               375 drivers/crypto/hisilicon/zip/zip_crypto.c 	hisi_zip_release_req_q(ctx);
ctx               377 drivers/crypto/hisilicon/zip/zip_crypto.c 	hisi_zip_ctx_exit(ctx);
ctx               383 drivers/crypto/hisilicon/zip/zip_crypto.c 	struct hisi_zip_ctx *ctx = crypto_tfm_ctx(&tfm->base);
ctx               385 drivers/crypto/hisilicon/zip/zip_crypto.c 	hisi_zip_set_acomp_cb(ctx, NULL);
ctx               386 drivers/crypto/hisilicon/zip/zip_crypto.c 	hisi_zip_release_sgl_pool(ctx);
ctx               387 drivers/crypto/hisilicon/zip/zip_crypto.c 	hisi_zip_release_req_q(ctx);
ctx               388 drivers/crypto/hisilicon/zip/zip_crypto.c 	hisi_zip_ctx_exit(ctx);
ctx               513 drivers/crypto/hisilicon/zip/zip_crypto.c 	struct hisi_zip_ctx *ctx = crypto_tfm_ctx(acomp_req->base.tfm);
ctx               514 drivers/crypto/hisilicon/zip/zip_crypto.c 	struct hisi_zip_qp_ctx *qp_ctx = &ctx->qp_ctx[QPC_COMP];
ctx               537 drivers/crypto/hisilicon/zip/zip_crypto.c 	struct hisi_zip_ctx *ctx = crypto_tfm_ctx(acomp_req->base.tfm);
ctx               538 drivers/crypto/hisilicon/zip/zip_crypto.c 	struct hisi_zip_qp_ctx *qp_ctx = &ctx->qp_ctx[QPC_DECOMP];
ctx               165 drivers/crypto/img-hash.c 	struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);
ctx               168 drivers/crypto/img-hash.c 	if (ctx->flags & DRIVER_FLAGS_MD5)
ctx               170 drivers/crypto/img-hash.c 	else if (ctx->flags & DRIVER_FLAGS_SHA1)
ctx               172 drivers/crypto/img-hash.c 	else if (ctx->flags & DRIVER_FLAGS_SHA224)
ctx               174 drivers/crypto/img-hash.c 	else if (ctx->flags & DRIVER_FLAGS_SHA256)
ctx               211 drivers/crypto/img-hash.c 	struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);
ctx               213 drivers/crypto/img-hash.c 	if (ctx->bufcnt) {
ctx               214 drivers/crypto/img-hash.c 		img_hash_xmit_cpu(hdev, ctx->buffer, ctx->bufcnt, 0);
ctx               215 drivers/crypto/img-hash.c 		ctx->bufcnt = 0;
ctx               217 drivers/crypto/img-hash.c 	if (ctx->sg)
ctx               224 drivers/crypto/img-hash.c 	struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);
ctx               226 drivers/crypto/img-hash.c 	ctx->dma_ct = dma_map_sg(hdev->dev, sg, 1, DMA_TO_DEVICE);
ctx               227 drivers/crypto/img-hash.c 	if (ctx->dma_ct == 0) {
ctx               235 drivers/crypto/img-hash.c 				       ctx->dma_ct,
ctx               254 drivers/crypto/img-hash.c 	struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);
ctx               256 drivers/crypto/img-hash.c 	ctx->bufcnt = sg_copy_to_buffer(hdev->req->src, sg_nents(ctx->sg),
ctx               257 drivers/crypto/img-hash.c 					ctx->buffer, hdev->req->nbytes);
ctx               259 drivers/crypto/img-hash.c 	ctx->total = hdev->req->nbytes;
ctx               260 drivers/crypto/img-hash.c 	ctx->bufcnt = 0;
ctx               266 drivers/crypto/img-hash.c 	return img_hash_xmit_cpu(hdev, ctx->buffer, ctx->total, 1);
ctx               271 drivers/crypto/img-hash.c 	struct img_hash_request_ctx *ctx = ahash_request_ctx(req);
ctx               276 drivers/crypto/img-hash.c 	memcpy(req->result, ctx->digest, ctx->digsize);
ctx               283 drivers/crypto/img-hash.c 	struct img_hash_request_ctx *ctx = ahash_request_ctx(req);
ctx               284 drivers/crypto/img-hash.c 	u32 *hash = (u32 *)ctx->digest;
ctx               287 drivers/crypto/img-hash.c 	for (i = (ctx->digsize / sizeof(u32)) - 1; i >= 0; i--)
ctx               288 drivers/crypto/img-hash.c 		hash[i] = img_hash_read_result_queue(ctx->hdev);
ctx               293 drivers/crypto/img-hash.c 	struct img_hash_request_ctx *ctx = ahash_request_ctx(req);
ctx               294 drivers/crypto/img-hash.c 	struct img_hash_dev *hdev =  ctx->hdev;
ctx               302 drivers/crypto/img-hash.c 		ctx->flags |= DRIVER_FLAGS_ERROR;
ctx               314 drivers/crypto/img-hash.c 	struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);
ctx               318 drivers/crypto/img-hash.c 	dev_dbg(hdev->dev, "xmit dma size: %d\n", ctx->total);
ctx               320 drivers/crypto/img-hash.c 	if (!ctx->total)
ctx               359 drivers/crypto/img-hash.c 	struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);
ctx               364 drivers/crypto/img-hash.c 	if (!hdev->req || !ctx->sg)
ctx               367 drivers/crypto/img-hash.c 	addr = sg_virt(ctx->sg);
ctx               368 drivers/crypto/img-hash.c 	nbytes = ctx->sg->length - ctx->offset;
ctx               384 drivers/crypto/img-hash.c 		sg_init_one(&tsg, addr + ctx->offset, wsend * 4);
ctx               387 drivers/crypto/img-hash.c 			ctx->flags |= DRIVER_FLAGS_CPU;
ctx               389 drivers/crypto/img-hash.c 			img_hash_xmit_cpu(hdev, addr + ctx->offset,
ctx               391 drivers/crypto/img-hash.c 			ctx->sent += wsend * 4;
ctx               394 drivers/crypto/img-hash.c 			ctx->sent += wsend * 4;
ctx               399 drivers/crypto/img-hash.c 		ctx->bufcnt = sg_pcopy_to_buffer(ctx->sgfirst, ctx->nents,
ctx               400 drivers/crypto/img-hash.c 						 ctx->buffer, bleft, ctx->sent);
ctx               402 drivers/crypto/img-hash.c 		ctx->sg = sg_next(ctx->sg);
ctx               403 drivers/crypto/img-hash.c 		while (ctx->sg && (ctx->bufcnt < 4)) {
ctx               404 drivers/crypto/img-hash.c 			len = ctx->sg->length;
ctx               405 drivers/crypto/img-hash.c 			if (likely(len > (4 - ctx->bufcnt)))
ctx               406 drivers/crypto/img-hash.c 				len = 4 - ctx->bufcnt;
ctx               407 drivers/crypto/img-hash.c 			tbc = sg_pcopy_to_buffer(ctx->sgfirst, ctx->nents,
ctx               408 drivers/crypto/img-hash.c 						 ctx->buffer + ctx->bufcnt, len,
ctx               409 drivers/crypto/img-hash.c 					ctx->sent + ctx->bufcnt);
ctx               410 drivers/crypto/img-hash.c 			ctx->bufcnt += tbc;
ctx               411 drivers/crypto/img-hash.c 			if (tbc >= ctx->sg->length) {
ctx               412 drivers/crypto/img-hash.c 				ctx->sg = sg_next(ctx->sg);
ctx               417 drivers/crypto/img-hash.c 		ctx->sent += ctx->bufcnt;
ctx               418 drivers/crypto/img-hash.c 		ctx->offset = tbc;
ctx               423 drivers/crypto/img-hash.c 		ctx->offset = 0;
ctx               424 drivers/crypto/img-hash.c 		ctx->sg = sg_next(ctx->sg);
ctx               430 drivers/crypto/img-hash.c 	struct img_hash_request_ctx *ctx = ahash_request_ctx(hdev->req);
ctx               432 drivers/crypto/img-hash.c 	if (ctx->flags & DRIVER_FLAGS_SG)
ctx               433 drivers/crypto/img-hash.c 		dma_unmap_sg(hdev->dev, ctx->sg, ctx->dma_ct, DMA_TO_DEVICE);
ctx               441 drivers/crypto/img-hash.c 	struct img_hash_request_ctx *ctx = ahash_request_ctx(req);
ctx               444 drivers/crypto/img-hash.c 	ctx->bufcnt = 0;
ctx               485 drivers/crypto/img-hash.c 	struct img_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               487 drivers/crypto/img-hash.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback);
ctx               498 drivers/crypto/img-hash.c 	struct img_hash_request_ctx *ctx;
ctx               528 drivers/crypto/img-hash.c 	ctx = ahash_request_ctx(req);
ctx               531 drivers/crypto/img-hash.c 		 ctx->op, req->nbytes);
ctx               549 drivers/crypto/img-hash.c 	struct img_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               551 drivers/crypto/img-hash.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback);
ctx               564 drivers/crypto/img-hash.c 	struct img_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               566 drivers/crypto/img-hash.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback);
ctx               578 drivers/crypto/img-hash.c 	struct img_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               580 drivers/crypto/img-hash.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback);
ctx               594 drivers/crypto/img-hash.c 	struct img_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               596 drivers/crypto/img-hash.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback);
ctx               607 drivers/crypto/img-hash.c 	struct img_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               609 drivers/crypto/img-hash.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback);
ctx               620 drivers/crypto/img-hash.c 	struct img_hash_request_ctx *ctx = ahash_request_ctx(req);
ctx               638 drivers/crypto/img-hash.c 	ctx->hdev = hdev;
ctx               639 drivers/crypto/img-hash.c 	ctx->flags = 0;
ctx               640 drivers/crypto/img-hash.c 	ctx->digsize = crypto_ahash_digestsize(tfm);
ctx               642 drivers/crypto/img-hash.c 	switch (ctx->digsize) {
ctx               644 drivers/crypto/img-hash.c 		ctx->flags |= DRIVER_FLAGS_SHA1;
ctx               647 drivers/crypto/img-hash.c 		ctx->flags |= DRIVER_FLAGS_SHA256;
ctx               650 drivers/crypto/img-hash.c 		ctx->flags |= DRIVER_FLAGS_SHA224;
ctx               653 drivers/crypto/img-hash.c 		ctx->flags |= DRIVER_FLAGS_MD5;
ctx               659 drivers/crypto/img-hash.c 	ctx->bufcnt = 0;
ctx               660 drivers/crypto/img-hash.c 	ctx->offset = 0;
ctx               661 drivers/crypto/img-hash.c 	ctx->sent = 0;
ctx               662 drivers/crypto/img-hash.c 	ctx->total = req->nbytes;
ctx               663 drivers/crypto/img-hash.c 	ctx->sg = req->src;
ctx               664 drivers/crypto/img-hash.c 	ctx->sgfirst = req->src;
ctx               665 drivers/crypto/img-hash.c 	ctx->nents = sg_nents(ctx->sg);
ctx               674 drivers/crypto/img-hash.c 	struct img_hash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               677 drivers/crypto/img-hash.c 	ctx->fallback = crypto_alloc_ahash(alg_name, 0,
ctx               679 drivers/crypto/img-hash.c 	if (IS_ERR(ctx->fallback)) {
ctx               681 drivers/crypto/img-hash.c 		err = PTR_ERR(ctx->fallback);
ctx               686 drivers/crypto/img-hash.c 				 crypto_ahash_reqsize(ctx->fallback) +
ctx               772 drivers/crypto/inside-secure/safexcel.c 	struct safexcel_context *ctx;
ctx               796 drivers/crypto/inside-secure/safexcel.c 		ctx = crypto_tfm_ctx(req->tfm);
ctx               797 drivers/crypto/inside-secure/safexcel.c 		ret = ctx->send(req, ring, &commands, &results);
ctx               968 drivers/crypto/inside-secure/safexcel.c 	struct safexcel_context *ctx;
ctx               984 drivers/crypto/inside-secure/safexcel.c 		ctx = crypto_tfm_ctx(req->tfm);
ctx               985 drivers/crypto/inside-secure/safexcel.c 		ndesc = ctx->handle_result(priv, ring, req,
ctx                68 drivers/crypto/inside-secure/safexcel_cipher.c static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
ctx                73 drivers/crypto/inside-secure/safexcel_cipher.c 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
ctx                77 drivers/crypto/inside-secure/safexcel_cipher.c 		cdesc->control_data.token[0] = ctx->nonce;
ctx                84 drivers/crypto/inside-secure/safexcel_cipher.c 	} else if (ctx->xcm == EIP197_XCM_MODE_GCM) {
ctx                93 drivers/crypto/inside-secure/safexcel_cipher.c 	} else if (ctx->xcm == EIP197_XCM_MODE_CCM) {
ctx               105 drivers/crypto/inside-secure/safexcel_cipher.c 	if (ctx->mode != CONTEXT_CONTROL_CRYPTO_MODE_ECB) {
ctx               106 drivers/crypto/inside-secure/safexcel_cipher.c 		switch (ctx->alg) {
ctx               124 drivers/crypto/inside-secure/safexcel_cipher.c static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
ctx               130 drivers/crypto/inside-secure/safexcel_cipher.c 	safexcel_cipher_token(ctx, iv, cdesc);
ctx               144 drivers/crypto/inside-secure/safexcel_cipher.c static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
ctx               151 drivers/crypto/inside-secure/safexcel_cipher.c 	safexcel_cipher_token(ctx, iv, cdesc);
ctx               198 drivers/crypto/inside-secure/safexcel_cipher.c 	} else if (ctx->xcm != EIP197_XCM_MODE_CCM) {
ctx               204 drivers/crypto/inside-secure/safexcel_cipher.c 	if (!ctx->xcm)
ctx               216 drivers/crypto/inside-secure/safexcel_cipher.c 	if (ctx->xcm == EIP197_XCM_MODE_GCM) {
ctx               274 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               275 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx               285 drivers/crypto/inside-secure/safexcel_cipher.c 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
ctx               287 drivers/crypto/inside-secure/safexcel_cipher.c 			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
ctx               288 drivers/crypto/inside-secure/safexcel_cipher.c 				ctx->base.needs_inv = true;
ctx               295 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
ctx               297 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->key_len = len;
ctx               307 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               309 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx               317 drivers/crypto/inside-secure/safexcel_cipher.c 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
ctx               323 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
ctx               330 drivers/crypto/inside-secure/safexcel_cipher.c 	switch (ctx->alg) {
ctx               346 drivers/crypto/inside-secure/safexcel_cipher.c 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
ctx               347 drivers/crypto/inside-secure/safexcel_cipher.c 	    memcmp(ctx->key, keys.enckey, keys.enckeylen))
ctx               348 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->base.needs_inv = true;
ctx               351 drivers/crypto/inside-secure/safexcel_cipher.c 	switch (ctx->hash_alg) {
ctx               385 drivers/crypto/inside-secure/safexcel_cipher.c 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
ctx               386 drivers/crypto/inside-secure/safexcel_cipher.c 	    (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
ctx               387 drivers/crypto/inside-secure/safexcel_cipher.c 	     memcmp(ctx->opad, ostate.state, ctx->state_sz)))
ctx               388 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->base.needs_inv = true;
ctx               391 drivers/crypto/inside-secure/safexcel_cipher.c 	memcpy(ctx->key, keys.enckey, keys.enckeylen);
ctx               392 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->key_len = keys.enckeylen;
ctx               394 drivers/crypto/inside-secure/safexcel_cipher.c 	memcpy(ctx->ipad, &istate.state, ctx->state_sz);
ctx               395 drivers/crypto/inside-secure/safexcel_cipher.c 	memcpy(ctx->opad, &ostate.state, ctx->state_sz);
ctx               407 drivers/crypto/inside-secure/safexcel_cipher.c static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
ctx               412 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx               413 drivers/crypto/inside-secure/safexcel_cipher.c 	int ctrl_size = ctx->key_len / sizeof(u32);
ctx               415 drivers/crypto/inside-secure/safexcel_cipher.c 	cdesc->control_data.control1 = ctx->mode;
ctx               417 drivers/crypto/inside-secure/safexcel_cipher.c 	if (ctx->aead) {
ctx               419 drivers/crypto/inside-secure/safexcel_cipher.c 		if (ctx->xcm) {
ctx               420 drivers/crypto/inside-secure/safexcel_cipher.c 			ctrl_size += ctx->state_sz / sizeof(u32);
ctx               424 drivers/crypto/inside-secure/safexcel_cipher.c 				ctx->hash_alg |
ctx               427 drivers/crypto/inside-secure/safexcel_cipher.c 			ctrl_size += ctx->state_sz / sizeof(u32) * 2;
ctx               431 drivers/crypto/inside-secure/safexcel_cipher.c 				ctx->hash_alg |
ctx               436 drivers/crypto/inside-secure/safexcel_cipher.c 				(ctx->xcm == EIP197_XCM_MODE_CCM) ?
ctx               442 drivers/crypto/inside-secure/safexcel_cipher.c 				(ctx->xcm == EIP197_XCM_MODE_CCM) ?
ctx               458 drivers/crypto/inside-secure/safexcel_cipher.c 	if (ctx->alg == SAFEXCEL_DES) {
ctx               461 drivers/crypto/inside-secure/safexcel_cipher.c 	} else if (ctx->alg == SAFEXCEL_3DES) {
ctx               464 drivers/crypto/inside-secure/safexcel_cipher.c 	} else if (ctx->alg == SAFEXCEL_AES) {
ctx               465 drivers/crypto/inside-secure/safexcel_cipher.c 		switch (ctx->key_len >> ctx->xts) {
ctx               480 drivers/crypto/inside-secure/safexcel_cipher.c 				ctx->key_len >> ctx->xts);
ctx               498 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
ctx               534 drivers/crypto/inside-secure/safexcel_cipher.c 	if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
ctx               557 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
ctx               558 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx               572 drivers/crypto/inside-secure/safexcel_cipher.c 	if (ctx->aead) {
ctx               582 drivers/crypto/inside-secure/safexcel_cipher.c 		memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
ctx               583 drivers/crypto/inside-secure/safexcel_cipher.c 		       ctx->ipad, ctx->state_sz);
ctx               584 drivers/crypto/inside-secure/safexcel_cipher.c 		if (!ctx->xcm)
ctx               585 drivers/crypto/inside-secure/safexcel_cipher.c 			memcpy(ctx->base.ctxr->data + (ctx->key_len +
ctx               586 drivers/crypto/inside-secure/safexcel_cipher.c 			       ctx->state_sz) / sizeof(u32), ctx->opad,
ctx               587 drivers/crypto/inside-secure/safexcel_cipher.c 			       ctx->state_sz);
ctx               588 drivers/crypto/inside-secure/safexcel_cipher.c 	} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
ctx               638 drivers/crypto/inside-secure/safexcel_cipher.c 	memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
ctx               655 drivers/crypto/inside-secure/safexcel_cipher.c 					   ctx->base.ctxr_dma);
ctx               678 drivers/crypto/inside-secure/safexcel_cipher.c 						 ctx->base.ctxr_dma);
ctx               683 drivers/crypto/inside-secure/safexcel_cipher.c 	safexcel_context_control(ctx, base, sreq, first_cdesc);
ctx               684 drivers/crypto/inside-secure/safexcel_cipher.c 	if (ctx->aead)
ctx               685 drivers/crypto/inside-secure/safexcel_cipher.c 		safexcel_aead_token(ctx, iv, first_cdesc,
ctx               689 drivers/crypto/inside-secure/safexcel_cipher.c 		safexcel_skcipher_token(ctx, iv, first_cdesc,
ctx               778 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
ctx               804 drivers/crypto/inside-secure/safexcel_cipher.c 	if (ctx->base.exit_inv) {
ctx               805 drivers/crypto/inside-secure/safexcel_cipher.c 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
ctx               806 drivers/crypto/inside-secure/safexcel_cipher.c 			      ctx->base.ctxr_dma);
ctx               814 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->base.ring = ring;
ctx               880 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
ctx               881 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx               884 drivers/crypto/inside-secure/safexcel_cipher.c 	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
ctx               898 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx               900 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx               931 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx               933 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx               954 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               955 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx               956 drivers/crypto/inside-secure/safexcel_cipher.c 	int ring = ctx->base.ring;
ctx               960 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx = crypto_tfm_ctx(base->tfm);
ctx               961 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->base.exit_inv = true;
ctx              1017 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
ctx              1018 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx              1024 drivers/crypto/inside-secure/safexcel_cipher.c 	if (ctx->base.ctxr) {
ctx              1025 drivers/crypto/inside-secure/safexcel_cipher.c 		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
ctx              1027 drivers/crypto/inside-secure/safexcel_cipher.c 			ctx->base.needs_inv = false;
ctx              1030 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->base.ring = safexcel_select_ring(priv);
ctx              1031 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
ctx              1033 drivers/crypto/inside-secure/safexcel_cipher.c 						 &ctx->base.ctxr_dma);
ctx              1034 drivers/crypto/inside-secure/safexcel_cipher.c 		if (!ctx->base.ctxr)
ctx              1038 drivers/crypto/inside-secure/safexcel_cipher.c 	ring = ctx->base.ring;
ctx              1064 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1072 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->priv = tmpl->priv;
ctx              1074 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->base.send = safexcel_skcipher_send;
ctx              1075 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->base.handle_result = safexcel_skcipher_handle_result;
ctx              1081 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1083 drivers/crypto/inside-secure/safexcel_cipher.c 	memzero_explicit(ctx->key, sizeof(ctx->key));
ctx              1086 drivers/crypto/inside-secure/safexcel_cipher.c 	if (!ctx->base.ctxr)
ctx              1089 drivers/crypto/inside-secure/safexcel_cipher.c 	memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
ctx              1095 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1096 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx              1108 drivers/crypto/inside-secure/safexcel_cipher.c 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
ctx              1109 drivers/crypto/inside-secure/safexcel_cipher.c 			      ctx->base.ctxr_dma);
ctx              1115 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1116 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx              1128 drivers/crypto/inside-secure/safexcel_cipher.c 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
ctx              1129 drivers/crypto/inside-secure/safexcel_cipher.c 			      ctx->base.ctxr_dma);
ctx              1135 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1138 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->alg  = SAFEXCEL_AES;
ctx              1139 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
ctx              1170 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1173 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->alg  = SAFEXCEL_AES;
ctx              1174 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
ctx              1206 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1209 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->alg  = SAFEXCEL_AES;
ctx              1210 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
ctx              1242 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1245 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->alg  = SAFEXCEL_AES;
ctx              1246 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
ctx              1280 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1281 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx              1287 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
ctx              1296 drivers/crypto/inside-secure/safexcel_cipher.c 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
ctx              1298 drivers/crypto/inside-secure/safexcel_cipher.c 			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
ctx              1299 drivers/crypto/inside-secure/safexcel_cipher.c 				ctx->base.needs_inv = true;
ctx              1306 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
ctx              1308 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->key_len = keylen;
ctx              1316 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1319 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->alg  = SAFEXCEL_AES;
ctx              1320 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
ctx              1354 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
ctx              1362 drivers/crypto/inside-secure/safexcel_cipher.c 	if (ctx->base.ctxr_dma)
ctx              1363 drivers/crypto/inside-secure/safexcel_cipher.c 		if (memcmp(ctx->key, key, len))
ctx              1364 drivers/crypto/inside-secure/safexcel_cipher.c 			ctx->base.needs_inv = true;
ctx              1366 drivers/crypto/inside-secure/safexcel_cipher.c 	memcpy(ctx->key, key, len);
ctx              1367 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->key_len = len;
ctx              1374 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1377 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->alg  = SAFEXCEL_DES;
ctx              1378 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
ctx              1410 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1413 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->alg  = SAFEXCEL_DES;
ctx              1414 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
ctx              1446 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
ctx              1454 drivers/crypto/inside-secure/safexcel_cipher.c 	if (ctx->base.ctxr_dma) {
ctx              1455 drivers/crypto/inside-secure/safexcel_cipher.c 		if (memcmp(ctx->key, key, len))
ctx              1456 drivers/crypto/inside-secure/safexcel_cipher.c 			ctx->base.needs_inv = true;
ctx              1459 drivers/crypto/inside-secure/safexcel_cipher.c 	memcpy(ctx->key, key, len);
ctx              1461 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->key_len = len;
ctx              1468 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1471 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->alg  = SAFEXCEL_3DES;
ctx              1472 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
ctx              1504 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1507 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->alg  = SAFEXCEL_3DES;
ctx              1508 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
ctx              1553 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1561 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->priv = tmpl->priv;
ctx              1563 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->alg  = SAFEXCEL_AES; /* default */
ctx              1564 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
ctx              1565 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->aead = true;
ctx              1566 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->base.send = safexcel_aead_send;
ctx              1567 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->base.handle_result = safexcel_aead_handle_result;
ctx              1573 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1576 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
ctx              1577 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->state_sz = SHA1_DIGEST_SIZE;
ctx              1608 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1611 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
ctx              1612 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->state_sz = SHA256_DIGEST_SIZE;
ctx              1643 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1646 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
ctx              1647 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->state_sz = SHA256_DIGEST_SIZE;
ctx              1678 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1681 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
ctx              1682 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->state_sz = SHA512_DIGEST_SIZE;
ctx              1713 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1716 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
ctx              1717 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->state_sz = SHA512_DIGEST_SIZE;
ctx              1748 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1751 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->alg = SAFEXCEL_3DES; /* override default */
ctx              1782 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1785 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
ctx              1816 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1819 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
ctx              1850 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1853 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
ctx              1884 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1887 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
ctx              1918 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1921 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
ctx              1954 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1955 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx              1973 drivers/crypto/inside-secure/safexcel_cipher.c 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
ctx              1975 drivers/crypto/inside-secure/safexcel_cipher.c 			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
ctx              1976 drivers/crypto/inside-secure/safexcel_cipher.c 				ctx->base.needs_inv = true;
ctx              1983 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
ctx              1992 drivers/crypto/inside-secure/safexcel_cipher.c 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
ctx              1994 drivers/crypto/inside-secure/safexcel_cipher.c 			if (ctx->key[i + keylen / sizeof(u32)] !=
ctx              1996 drivers/crypto/inside-secure/safexcel_cipher.c 				ctx->base.needs_inv = true;
ctx              2003 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->key[i + keylen / sizeof(u32)] =
ctx              2006 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->key_len = keylen << 1;
ctx              2014 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              2017 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->alg  = SAFEXCEL_AES;
ctx              2018 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->xts  = 1;
ctx              2019 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
ctx              2070 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              2071 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx              2083 drivers/crypto/inside-secure/safexcel_cipher.c 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
ctx              2085 drivers/crypto/inside-secure/safexcel_cipher.c 			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
ctx              2086 drivers/crypto/inside-secure/safexcel_cipher.c 				ctx->base.needs_inv = true;
ctx              2093 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
ctx              2095 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->key_len = len;
ctx              2098 drivers/crypto/inside-secure/safexcel_cipher.c 	crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
ctx              2099 drivers/crypto/inside-secure/safexcel_cipher.c 	crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
ctx              2101 drivers/crypto/inside-secure/safexcel_cipher.c 	ret = crypto_cipher_setkey(ctx->hkaes, key, len);
ctx              2102 drivers/crypto/inside-secure/safexcel_cipher.c 	crypto_aead_set_flags(ctfm, crypto_cipher_get_flags(ctx->hkaes) &
ctx              2108 drivers/crypto/inside-secure/safexcel_cipher.c 	crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
ctx              2110 drivers/crypto/inside-secure/safexcel_cipher.c 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
ctx              2112 drivers/crypto/inside-secure/safexcel_cipher.c 			if (ctx->ipad[i] != cpu_to_be32(hashkey[i])) {
ctx              2113 drivers/crypto/inside-secure/safexcel_cipher.c 				ctx->base.needs_inv = true;
ctx              2120 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->ipad[i] = cpu_to_be32(hashkey[i]);
ctx              2129 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              2132 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
ctx              2133 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->state_sz = GHASH_BLOCK_SIZE;
ctx              2134 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->xcm = EIP197_XCM_MODE_GCM;
ctx              2135 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
ctx              2137 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
ctx              2138 drivers/crypto/inside-secure/safexcel_cipher.c 	if (IS_ERR(ctx->hkaes))
ctx              2139 drivers/crypto/inside-secure/safexcel_cipher.c 		return PTR_ERR(ctx->hkaes);
ctx              2146 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              2148 drivers/crypto/inside-secure/safexcel_cipher.c 	crypto_free_cipher(ctx->hkaes);
ctx              2188 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              2189 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx              2200 drivers/crypto/inside-secure/safexcel_cipher.c 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
ctx              2202 drivers/crypto/inside-secure/safexcel_cipher.c 			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
ctx              2203 drivers/crypto/inside-secure/safexcel_cipher.c 				ctx->base.needs_inv = true;
ctx              2210 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
ctx              2211 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
ctx              2215 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->key_len = len;
ctx              2216 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
ctx              2219 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
ctx              2221 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
ctx              2223 drivers/crypto/inside-secure/safexcel_cipher.c 		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
ctx              2231 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              2234 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
ctx              2235 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->state_sz = 3 * AES_BLOCK_SIZE;
ctx              2236 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->xcm = EIP197_XCM_MODE_CCM;
ctx              2237 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
ctx                78 drivers/crypto/inside-secure/safexcel_hash.c static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
ctx                82 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx                85 drivers/crypto/inside-secure/safexcel_hash.c 	cdesc->control_data.control0 |= ctx->alg;
ctx               112 drivers/crypto/inside-secure/safexcel_hash.c 	memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
ctx               148 drivers/crypto/inside-secure/safexcel_hash.c 			ctx->base.ctxr->data[req->state_sz >> 2] =
ctx               156 drivers/crypto/inside-secure/safexcel_hash.c 			memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
ctx               157 drivers/crypto/inside-secure/safexcel_hash.c 			       ctx->opad, req->state_sz);
ctx               185 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               226 drivers/crypto/inside-secure/safexcel_hash.c 			memcpy(sreq->state, ctx->opad, sreq->state_sz);
ctx               233 drivers/crypto/inside-secure/safexcel_hash.c 			ctx->base.needs_inv = true;
ctx               259 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx               260 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx               310 drivers/crypto/inside-secure/safexcel_hash.c 						 ctx->base.ctxr_dma);
ctx               346 drivers/crypto/inside-secure/safexcel_hash.c 					   sglen, len, ctx->base.ctxr_dma);
ctx               363 drivers/crypto/inside-secure/safexcel_hash.c 	safexcel_context_control(ctx, req, first_cdesc);
ctx               418 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               434 drivers/crypto/inside-secure/safexcel_hash.c 	if (ctx->base.exit_inv) {
ctx               435 drivers/crypto/inside-secure/safexcel_hash.c 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
ctx               436 drivers/crypto/inside-secure/safexcel_hash.c 			      ctx->base.ctxr_dma);
ctx               443 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->base.ring = ring;
ctx               486 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx               489 drivers/crypto/inside-secure/safexcel_hash.c 	ret = safexcel_invalidate_cache(async, ctx->priv,
ctx               490 drivers/crypto/inside-secure/safexcel_hash.c 					ctx->base.ctxr_dma, ring);
ctx               517 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               518 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx               522 drivers/crypto/inside-secure/safexcel_hash.c 	int ring = ctx->base.ring;
ctx               532 drivers/crypto/inside-secure/safexcel_hash.c 	ctx = crypto_tfm_ctx(req->base.tfm);
ctx               533 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->base.exit_inv = true;
ctx               584 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx               586 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx               591 drivers/crypto/inside-secure/safexcel_hash.c 	if (ctx->base.ctxr) {
ctx               592 drivers/crypto/inside-secure/safexcel_hash.c 		if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
ctx               598 drivers/crypto/inside-secure/safexcel_hash.c 		     memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
ctx               603 drivers/crypto/inside-secure/safexcel_hash.c 		      memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
ctx               604 drivers/crypto/inside-secure/safexcel_hash.c 			     ctx->opad, req->state_sz))))
ctx               611 drivers/crypto/inside-secure/safexcel_hash.c 			ctx->base.needs_inv = true;
ctx               613 drivers/crypto/inside-secure/safexcel_hash.c 		if (ctx->base.needs_inv) {
ctx               614 drivers/crypto/inside-secure/safexcel_hash.c 			ctx->base.needs_inv = false;
ctx               618 drivers/crypto/inside-secure/safexcel_hash.c 		ctx->base.ring = safexcel_select_ring(priv);
ctx               619 drivers/crypto/inside-secure/safexcel_hash.c 		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
ctx               621 drivers/crypto/inside-secure/safexcel_hash.c 						 &ctx->base.ctxr_dma);
ctx               622 drivers/crypto/inside-secure/safexcel_hash.c 		if (!ctx->base.ctxr)
ctx               626 drivers/crypto/inside-secure/safexcel_hash.c 	ring = ctx->base.ring;
ctx               666 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx               676 drivers/crypto/inside-secure/safexcel_hash.c 		if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
ctx               679 drivers/crypto/inside-secure/safexcel_hash.c 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
ctx               682 drivers/crypto/inside-secure/safexcel_hash.c 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
ctx               685 drivers/crypto/inside-secure/safexcel_hash.c 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
ctx               688 drivers/crypto/inside-secure/safexcel_hash.c 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
ctx               691 drivers/crypto/inside-secure/safexcel_hash.c 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
ctx               787 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               792 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->priv = tmpl->priv;
ctx               793 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->base.send = safexcel_ahash_send;
ctx               794 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->base.handle_result = safexcel_handle_result;
ctx               803 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx               808 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
ctx               828 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               829 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx               833 drivers/crypto/inside-secure/safexcel_hash.c 	if (!ctx->base.ctxr)
ctx               841 drivers/crypto/inside-secure/safexcel_hash.c 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
ctx               842 drivers/crypto/inside-secure/safexcel_hash.c 			      ctx->base.ctxr_dma);
ctx               878 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx               884 drivers/crypto/inside-secure/safexcel_hash.c 	memcpy(req->state, ctx->ipad, SHA1_DIGEST_SIZE);
ctx               889 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
ctx              1060 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
ctx              1061 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_crypto_priv *priv = ctx->priv;
ctx              1069 drivers/crypto/inside-secure/safexcel_hash.c 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr &&
ctx              1070 drivers/crypto/inside-secure/safexcel_hash.c 	    (memcmp(ctx->ipad, istate.state, state_sz) ||
ctx              1071 drivers/crypto/inside-secure/safexcel_hash.c 	     memcmp(ctx->opad, ostate.state, state_sz)))
ctx              1072 drivers/crypto/inside-secure/safexcel_hash.c 		ctx->base.needs_inv = true;
ctx              1074 drivers/crypto/inside-secure/safexcel_hash.c 	memcpy(ctx->ipad, &istate.state, state_sz);
ctx              1075 drivers/crypto/inside-secure/safexcel_hash.c 	memcpy(ctx->opad, &ostate.state, state_sz);
ctx              1120 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx              1125 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
ctx              1175 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx              1180 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
ctx              1237 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx              1243 drivers/crypto/inside-secure/safexcel_hash.c 	memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
ctx              1248 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
ctx              1307 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx              1313 drivers/crypto/inside-secure/safexcel_hash.c 	memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
ctx              1318 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
ctx              1370 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx              1375 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
ctx              1425 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx              1430 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
ctx              1487 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx              1493 drivers/crypto/inside-secure/safexcel_hash.c 	memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
ctx              1498 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
ctx              1557 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx              1563 drivers/crypto/inside-secure/safexcel_hash.c 	memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
ctx              1568 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
ctx              1620 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx              1625 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
ctx              1675 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
ctx              1681 drivers/crypto/inside-secure/safexcel_hash.c 	memcpy(req->state, ctx->ipad, MD5_DIGEST_SIZE);
ctx              1686 drivers/crypto/inside-secure/safexcel_hash.c 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
ctx               360 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx;
ctx               392 drivers/crypto/ixp4xx_crypto.c 		ctx = crypto_tfm_ctx(crypt->data.tfm);
ctx               396 drivers/crypto/ixp4xx_crypto.c 		if (atomic_dec_and_test(&ctx->configuring))
ctx               397 drivers/crypto/ixp4xx_crypto.c 			complete(&ctx->completion);
ctx               400 drivers/crypto/ixp4xx_crypto.c 		ctx = crypto_tfm_ctx(crypt->data.tfm);
ctx               401 drivers/crypto/ixp4xx_crypto.c 		*(u32*)ctx->decrypt.npe_ctx &= cpu_to_be32(~CIPH_ENCR);
ctx               402 drivers/crypto/ixp4xx_crypto.c 		if (atomic_dec_and_test(&ctx->configuring))
ctx               403 drivers/crypto/ixp4xx_crypto.c 			complete(&ctx->completion);
ctx               560 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               563 drivers/crypto/ixp4xx_crypto.c 	atomic_set(&ctx->configuring, 0);
ctx               564 drivers/crypto/ixp4xx_crypto.c 	ret = init_sa_dir(&ctx->encrypt);
ctx               567 drivers/crypto/ixp4xx_crypto.c 	ret = init_sa_dir(&ctx->decrypt);
ctx               569 drivers/crypto/ixp4xx_crypto.c 		free_sa_dir(&ctx->encrypt);
ctx               588 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               589 drivers/crypto/ixp4xx_crypto.c 	free_sa_dir(&ctx->encrypt);
ctx               590 drivers/crypto/ixp4xx_crypto.c 	free_sa_dir(&ctx->decrypt);
ctx               601 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               648 drivers/crypto/ixp4xx_crypto.c 	atomic_inc(&ctx->configuring);
ctx               662 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               665 drivers/crypto/ixp4xx_crypto.c 	dir = encrypt ? &ctx->encrypt : &ctx->decrypt;
ctx               704 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               705 drivers/crypto/ixp4xx_crypto.c 	struct ix_sa_dir *dir = &ctx->decrypt;
ctx               723 drivers/crypto/ixp4xx_crypto.c 	atomic_inc(&ctx->configuring);
ctx               736 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               739 drivers/crypto/ixp4xx_crypto.c 	dir = encrypt ? &ctx->encrypt : &ctx->decrypt;
ctx               815 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               819 drivers/crypto/ixp4xx_crypto.c 	init_completion(&ctx->completion);
ctx               820 drivers/crypto/ixp4xx_crypto.c 	atomic_inc(&ctx->configuring);
ctx               822 drivers/crypto/ixp4xx_crypto.c 	reset_sa_dir(&ctx->encrypt);
ctx               823 drivers/crypto/ixp4xx_crypto.c 	reset_sa_dir(&ctx->decrypt);
ctx               825 drivers/crypto/ixp4xx_crypto.c 	ctx->encrypt.npe_mode = NPE_OP_HMAC_DISABLE;
ctx               826 drivers/crypto/ixp4xx_crypto.c 	ctx->decrypt.npe_mode = NPE_OP_HMAC_DISABLE;
ctx               843 drivers/crypto/ixp4xx_crypto.c 	if (!atomic_dec_and_test(&ctx->configuring))
ctx               844 drivers/crypto/ixp4xx_crypto.c 		wait_for_completion(&ctx->completion);
ctx               858 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               864 drivers/crypto/ixp4xx_crypto.c 	memcpy(ctx->nonce, key + (key_len - CTR_RFC3686_NONCE_SIZE),
ctx               874 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               888 drivers/crypto/ixp4xx_crypto.c 	if (atomic_read(&ctx->configuring))
ctx               891 drivers/crypto/ixp4xx_crypto.c 	dir = encrypt ? &ctx->encrypt : &ctx->decrypt;
ctx               957 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               963 drivers/crypto/ixp4xx_crypto.c         memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
ctx               980 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
ctx               996 drivers/crypto/ixp4xx_crypto.c 	if (atomic_read(&ctx->configuring))
ctx              1000 drivers/crypto/ixp4xx_crypto.c 		dir = &ctx->encrypt;
ctx              1003 drivers/crypto/ixp4xx_crypto.c 		dir = &ctx->decrypt;
ctx              1092 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1097 drivers/crypto/ixp4xx_crypto.c 	if (!ctx->enckey_len && !ctx->authkey_len)
ctx              1099 drivers/crypto/ixp4xx_crypto.c 	init_completion(&ctx->completion);
ctx              1100 drivers/crypto/ixp4xx_crypto.c 	atomic_inc(&ctx->configuring);
ctx              1102 drivers/crypto/ixp4xx_crypto.c 	reset_sa_dir(&ctx->encrypt);
ctx              1103 drivers/crypto/ixp4xx_crypto.c 	reset_sa_dir(&ctx->decrypt);
ctx              1105 drivers/crypto/ixp4xx_crypto.c 	ret = setup_cipher(&tfm->base, 0, ctx->enckey, ctx->enckey_len);
ctx              1108 drivers/crypto/ixp4xx_crypto.c 	ret = setup_cipher(&tfm->base, 1, ctx->enckey, ctx->enckey_len);
ctx              1111 drivers/crypto/ixp4xx_crypto.c 	ret = setup_auth(&tfm->base, 0, authsize, ctx->authkey,
ctx              1112 drivers/crypto/ixp4xx_crypto.c 			ctx->authkey_len, digest_len);
ctx              1115 drivers/crypto/ixp4xx_crypto.c 	ret = setup_auth(&tfm->base, 1, authsize,  ctx->authkey,
ctx              1116 drivers/crypto/ixp4xx_crypto.c 			ctx->authkey_len, digest_len);
ctx              1129 drivers/crypto/ixp4xx_crypto.c 	if (!atomic_dec_and_test(&ctx->configuring))
ctx              1130 drivers/crypto/ixp4xx_crypto.c 		wait_for_completion(&ctx->completion);
ctx              1146 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1152 drivers/crypto/ixp4xx_crypto.c 	if (keys.authkeylen > sizeof(ctx->authkey))
ctx              1155 drivers/crypto/ixp4xx_crypto.c 	if (keys.enckeylen > sizeof(ctx->enckey))
ctx              1158 drivers/crypto/ixp4xx_crypto.c 	memcpy(ctx->authkey, keys.authkey, keys.authkeylen);
ctx              1159 drivers/crypto/ixp4xx_crypto.c 	memcpy(ctx->enckey, keys.enckey, keys.enckeylen);
ctx              1160 drivers/crypto/ixp4xx_crypto.c 	ctx->authkey_len = keys.authkeylen;
ctx              1161 drivers/crypto/ixp4xx_crypto.c 	ctx->enckey_len = keys.enckeylen;
ctx              1174 drivers/crypto/ixp4xx_crypto.c 	struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1183 drivers/crypto/ixp4xx_crypto.c 	if (keys.authkeylen > sizeof(ctx->authkey))
ctx              1190 drivers/crypto/ixp4xx_crypto.c 	memcpy(ctx->authkey, keys.authkey, keys.authkeylen);
ctx              1191 drivers/crypto/ixp4xx_crypto.c 	memcpy(ctx->enckey, keys.enckey, keys.enckeylen);
ctx              1192 drivers/crypto/ixp4xx_crypto.c 	ctx->authkey_len = keys.authkeylen;
ctx              1193 drivers/crypto/ixp4xx_crypto.c 	ctx->enckey_len = keys.enckeylen;
ctx                55 drivers/crypto/marvell/cesa.c 	struct mv_cesa_ctx *ctx;
ctx                71 drivers/crypto/marvell/cesa.c 	ctx = crypto_tfm_ctx(req->tfm);
ctx                72 drivers/crypto/marvell/cesa.c 	ctx->ops->step(req);
ctx                78 drivers/crypto/marvell/cesa.c 	struct mv_cesa_ctx *ctx;
ctx                82 drivers/crypto/marvell/cesa.c 	ctx = crypto_tfm_ctx(req->tfm);
ctx                83 drivers/crypto/marvell/cesa.c 	res = ctx->ops->process(req, status);
ctx                86 drivers/crypto/marvell/cesa.c 		ctx->ops->complete(req);
ctx                89 drivers/crypto/marvell/cesa.c 		ctx->ops->step(req);
ctx               104 drivers/crypto/marvell/cesa.c mv_cesa_complete_req(struct mv_cesa_ctx *ctx, struct crypto_async_request *req,
ctx               107 drivers/crypto/marvell/cesa.c 	ctx->ops->cleanup(req);
ctx               117 drivers/crypto/marvell/cesa.c 	struct mv_cesa_ctx *ctx;
ctx               147 drivers/crypto/marvell/cesa.c 		ctx = crypto_tfm_ctx(req->tfm);
ctx               150 drivers/crypto/marvell/cesa.c 			mv_cesa_complete_req(ctx, req, res);
ctx               161 drivers/crypto/marvell/cesa.c 			ctx = crypto_tfm_ctx(req->tfm);
ctx               162 drivers/crypto/marvell/cesa.c 			mv_cesa_complete_req(ctx, req, 0);
ctx               270 drivers/crypto/marvell/cesa.h 	} ctx;
ctx               212 drivers/crypto/marvell/cipher.c 		memcpy(skreq->iv, basereq->chain.last->op->ctx.blkcipher.iv,
ctx               230 drivers/crypto/marvell/cipher.c 	void *ctx = crypto_tfm_ctx(tfm);
ctx               232 drivers/crypto/marvell/cipher.c 	memzero_explicit(ctx, tfm->__crt_alg->cra_ctxsize);
ctx               237 drivers/crypto/marvell/cipher.c 	struct mv_cesa_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               239 drivers/crypto/marvell/cipher.c 	ctx->ops = &mv_cesa_skcipher_req_ops;
ctx               251 drivers/crypto/marvell/cipher.c 	struct mv_cesa_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               257 drivers/crypto/marvell/cipher.c 	ret = aes_expandkey(&ctx->aes, key, len);
ctx               263 drivers/crypto/marvell/cipher.c 	remaining = (ctx->aes.key_length - 16) / 4;
ctx               264 drivers/crypto/marvell/cipher.c 	offset = ctx->aes.key_length + 24 - remaining;
ctx               266 drivers/crypto/marvell/cipher.c 		ctx->aes.key_dec[4 + i] =
ctx               267 drivers/crypto/marvell/cipher.c 			cpu_to_le32(ctx->aes.key_enc[offset + i]);
ctx               275 drivers/crypto/marvell/cipher.c 	struct mv_cesa_des_ctx *ctx = crypto_skcipher_ctx(cipher);
ctx               282 drivers/crypto/marvell/cipher.c 	memcpy(ctx->key, key, DES_KEY_SIZE);
ctx               290 drivers/crypto/marvell/cipher.c 	struct mv_cesa_des_ctx *ctx = crypto_skcipher_ctx(cipher);
ctx               297 drivers/crypto/marvell/cipher.c 	memcpy(ctx->key, key, DES3_EDE_KEY_SIZE);
ctx               468 drivers/crypto/marvell/cipher.c 	struct mv_cesa_des_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx               473 drivers/crypto/marvell/cipher.c 	memcpy(tmpl->ctx.blkcipher.key, ctx->key, DES_KEY_SIZE);
ctx               526 drivers/crypto/marvell/cipher.c 	memcpy(tmpl->ctx.blkcipher.iv, req->iv, DES_BLOCK_SIZE);
ctx               573 drivers/crypto/marvell/cipher.c 	struct mv_cesa_des3_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx               578 drivers/crypto/marvell/cipher.c 	memcpy(tmpl->ctx.blkcipher.key, ctx->key, DES3_EDE_KEY_SIZE);
ctx               631 drivers/crypto/marvell/cipher.c 	memcpy(tmpl->ctx.blkcipher.iv, req->iv, DES3_EDE_BLOCK_SIZE);
ctx               684 drivers/crypto/marvell/cipher.c 	struct mv_cesa_aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx               692 drivers/crypto/marvell/cipher.c 		key = ctx->aes.key_dec;
ctx               694 drivers/crypto/marvell/cipher.c 		key = ctx->aes.key_enc;
ctx               696 drivers/crypto/marvell/cipher.c 	for (i = 0; i < ctx->aes.key_length / sizeof(u32); i++)
ctx               697 drivers/crypto/marvell/cipher.c 		tmpl->ctx.blkcipher.key[i] = cpu_to_le32(key[i]);
ctx               699 drivers/crypto/marvell/cipher.c 	if (ctx->aes.key_length == 24)
ctx               701 drivers/crypto/marvell/cipher.c 	else if (ctx->aes.key_length == 32)
ctx               758 drivers/crypto/marvell/cipher.c 	memcpy(tmpl->ctx.blkcipher.iv, req->iv, AES_BLOCK_SIZE);
ctx               339 drivers/crypto/marvell/hash.c 		data = creq->base.chain.last->op->ctx.hash.hash;
ctx               428 drivers/crypto/marvell/hash.c 	struct mv_cesa_hash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               430 drivers/crypto/marvell/hash.c 	ctx->base.ops = &mv_cesa_ahash_req_ops;
ctx              1225 drivers/crypto/marvell/hash.c 	struct mv_cesa_hmac_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1227 drivers/crypto/marvell/hash.c 	ctx->base.ops = &mv_cesa_ahash_req_ops;
ctx              1236 drivers/crypto/marvell/hash.c 	struct mv_cesa_hmac_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx              1240 drivers/crypto/marvell/hash.c 	memcpy(tmpl.ctx.hash.iv, ctx->iv, sizeof(ctx->iv));
ctx              1250 drivers/crypto/marvell/hash.c 	struct mv_cesa_hmac_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
ctx              1259 drivers/crypto/marvell/hash.c 		ctx->iv[i] = be32_to_cpu(istate.hash[i]);
ctx              1262 drivers/crypto/marvell/hash.c 		ctx->iv[i + 8] = be32_to_cpu(ostate.hash[i]);
ctx              1306 drivers/crypto/marvell/hash.c 	struct mv_cesa_hmac_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx              1310 drivers/crypto/marvell/hash.c 	memcpy(tmpl.ctx.hash.iv, ctx->iv, sizeof(ctx->iv));
ctx              1320 drivers/crypto/marvell/hash.c 	struct mv_cesa_hmac_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
ctx              1329 drivers/crypto/marvell/hash.c 		ctx->iv[i] = be32_to_cpu(istate.state[i]);
ctx              1332 drivers/crypto/marvell/hash.c 		ctx->iv[i + 8] = be32_to_cpu(ostate.state[i]);
ctx              1377 drivers/crypto/marvell/hash.c 	struct mv_cesa_hmac_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
ctx              1386 drivers/crypto/marvell/hash.c 		ctx->iv[i] = be32_to_cpu(istate.state[i]);
ctx              1389 drivers/crypto/marvell/hash.c 		ctx->iv[i + 8] = be32_to_cpu(ostate.state[i]);
ctx              1396 drivers/crypto/marvell/hash.c 	struct mv_cesa_hmac_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx              1400 drivers/crypto/marvell/hash.c 	memcpy(tmpl.ctx.hash.iv, ctx->iv, sizeof(ctx->iv));
ctx               137 drivers/crypto/marvell/tdma.c 			struct mv_cesa_ctx *ctx;
ctx               160 drivers/crypto/marvell/tdma.c 			ctx = crypto_tfm_ctx(req->tfm);
ctx               163 drivers/crypto/marvell/tdma.c 			res = ctx->ops->process(req, current_status);
ctx               164 drivers/crypto/marvell/tdma.c 			ctx->ops->complete(req);
ctx               165 drivers/crypto/mediatek/mtk-aes.c static struct mtk_cryp *mtk_aes_find_dev(struct mtk_aes_base_ctx *ctx)
ctx               171 drivers/crypto/mediatek/mtk-aes.c 	if (!ctx->cryp) {
ctx               176 drivers/crypto/mediatek/mtk-aes.c 		ctx->cryp = cryp;
ctx               178 drivers/crypto/mediatek/mtk-aes.c 		cryp = ctx->cryp;
ctx               293 drivers/crypto/mediatek/mtk-aes.c 				    MTK_DESC_CT_LEN(aes->ctx->ct_size);
ctx               294 drivers/crypto/mediatek/mtk-aes.c 			cmd->ct = cpu_to_le32(aes->ctx->ct_dma);
ctx               295 drivers/crypto/mediatek/mtk-aes.c 			cmd->ct_hdr = aes->ctx->ct_hdr;
ctx               296 drivers/crypto/mediatek/mtk-aes.c 			cmd->tfm = cpu_to_le32(aes->ctx->tfm_dma);
ctx               341 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_base_ctx *ctx = aes->ctx;
ctx               343 drivers/crypto/mediatek/mtk-aes.c 	dma_unmap_single(cryp->dev, ctx->ct_dma, sizeof(ctx->info),
ctx               373 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_base_ctx *ctx = aes->ctx;
ctx               374 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_info *info = &ctx->info;
ctx               376 drivers/crypto/mediatek/mtk-aes.c 	ctx->ct_dma = dma_map_single(cryp->dev, info, sizeof(*info),
ctx               378 drivers/crypto/mediatek/mtk-aes.c 	if (unlikely(dma_mapping_error(cryp->dev, ctx->ct_dma)))
ctx               381 drivers/crypto/mediatek/mtk-aes.c 	ctx->tfm_dma = ctx->ct_dma + sizeof(info->cmd);
ctx               408 drivers/crypto/mediatek/mtk-aes.c 	dma_unmap_single(cryp->dev, ctx->ct_dma, sizeof(*info), DMA_TO_DEVICE);
ctx               418 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_base_ctx *ctx = aes->ctx;
ctx               419 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_info *info = &ctx->info;
ctx               422 drivers/crypto/mediatek/mtk-aes.c 	ctx->ct_hdr = AES_CT_CTRL_HDR | cpu_to_le32(len);
ctx               426 drivers/crypto/mediatek/mtk-aes.c 	info->tfm[0] = AES_TFM_SIZE(ctx->keylen) | ctx->keymode;
ctx               453 drivers/crypto/mediatek/mtk-aes.c 	mtk_aes_write_state_le(info->state + ctx->keylen, req->info,
ctx               460 drivers/crypto/mediatek/mtk-aes.c 	ctx->ct_size = cnt;
ctx               514 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_base_ctx *ctx;
ctx               537 drivers/crypto/mediatek/mtk-aes.c 	ctx = crypto_tfm_ctx(areq->tfm);
ctx               539 drivers/crypto/mediatek/mtk-aes.c 	memcpy(ctx->info.state, ctx->key, sizeof(ctx->key));
ctx               542 drivers/crypto/mediatek/mtk-aes.c 	aes->ctx = ctx;
ctx               544 drivers/crypto/mediatek/mtk-aes.c 	return ctx->start(cryp, aes);
ctx               565 drivers/crypto/mediatek/mtk-aes.c mtk_aes_ctr_ctx_cast(struct mtk_aes_base_ctx *ctx)
ctx               567 drivers/crypto/mediatek/mtk-aes.c 	return container_of(ctx, struct mtk_aes_ctr_ctx, base);
ctx               572 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_base_ctx *ctx = aes->ctx;
ctx               573 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_ctr_ctx *cctx = mtk_aes_ctr_ctx_cast(ctx);
ctx               605 drivers/crypto/mediatek/mtk-aes.c 	mtk_aes_write_state_le(ctx->info.state + ctx->keylen, cctx->iv,
ctx               622 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_ctr_ctx *cctx = mtk_aes_ctr_ctx_cast(aes->ctx);
ctx               640 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_base_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               644 drivers/crypto/mediatek/mtk-aes.c 		ctx->keymode = AES_TFM_128BITS;
ctx               647 drivers/crypto/mediatek/mtk-aes.c 		ctx->keymode = AES_TFM_192BITS;
ctx               650 drivers/crypto/mediatek/mtk-aes.c 		ctx->keymode = AES_TFM_256BITS;
ctx               658 drivers/crypto/mediatek/mtk-aes.c 	ctx->keylen = SIZE_IN_WORDS(keylen);
ctx               659 drivers/crypto/mediatek/mtk-aes.c 	mtk_aes_write_state_le(ctx->key, (const u32 *)key, keylen);
ctx               667 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_base_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
ctx               671 drivers/crypto/mediatek/mtk-aes.c 	cryp = mtk_aes_find_dev(ctx);
ctx               734 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               737 drivers/crypto/mediatek/mtk-aes.c 	ctx->base.start = mtk_aes_start;
ctx               743 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               746 drivers/crypto/mediatek/mtk-aes.c 	ctx->base.start = mtk_aes_ctr_start;
ctx               858 drivers/crypto/mediatek/mtk-aes.c mtk_aes_gcm_ctx_cast(struct mtk_aes_base_ctx *ctx)
ctx               860 drivers/crypto/mediatek/mtk-aes.c 	return container_of(ctx, struct mtk_aes_gcm_ctx, base);
ctx               882 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_base_ctx *ctx = aes->ctx;
ctx               883 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_gcm_ctx *gctx = mtk_aes_gcm_ctx_cast(ctx);
ctx               884 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_info *info = &ctx->info;
ctx               888 drivers/crypto/mediatek/mtk-aes.c 	ctx->ct_hdr = AES_CT_CTRL_HDR | len;
ctx               903 drivers/crypto/mediatek/mtk-aes.c 	ctx->ct_size = cnt;
ctx               906 drivers/crypto/mediatek/mtk-aes.c 			ctx->keylen + SIZE_IN_WORDS(AES_BLOCK_SIZE + ivsize)) |
ctx               907 drivers/crypto/mediatek/mtk-aes.c 			ctx->keymode;
ctx               911 drivers/crypto/mediatek/mtk-aes.c 	mtk_aes_write_state_le(info->state + ctx->keylen + SIZE_IN_WORDS(
ctx               960 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_gcm_ctx *gctx = mtk_aes_gcm_ctx_cast(aes->ctx);
ctx               985 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_base_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               986 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_gcm_ctx *gctx = mtk_aes_gcm_ctx_cast(ctx);
ctx               991 drivers/crypto/mediatek/mtk-aes.c 	cryp = mtk_aes_find_dev(ctx);
ctx              1015 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_base_ctx *ctx = crypto_aead_ctx(aead);
ctx              1016 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_gcm_ctx *gctx = mtk_aes_gcm_ctx_cast(ctx);
ctx              1031 drivers/crypto/mediatek/mtk-aes.c 		ctx->keymode = AES_TFM_128BITS;
ctx              1034 drivers/crypto/mediatek/mtk-aes.c 		ctx->keymode = AES_TFM_192BITS;
ctx              1037 drivers/crypto/mediatek/mtk-aes.c 		ctx->keymode = AES_TFM_256BITS;
ctx              1045 drivers/crypto/mediatek/mtk-aes.c 	ctx->keylen = SIZE_IN_WORDS(keylen);
ctx              1076 drivers/crypto/mediatek/mtk-aes.c 	mtk_aes_write_state_le(ctx->key, (const u32 *)key, keylen);
ctx              1077 drivers/crypto/mediatek/mtk-aes.c 	mtk_aes_write_state_be(ctx->key + ctx->keylen, data->hash,
ctx              1087 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_base_ctx *ctx = crypto_aead_ctx(aead);
ctx              1088 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_gcm_ctx *gctx = mtk_aes_gcm_ctx_cast(ctx);
ctx              1116 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_gcm_ctx *ctx = crypto_aead_ctx(aead);
ctx              1118 drivers/crypto/mediatek/mtk-aes.c 	ctx->ctr = crypto_alloc_skcipher("ctr(aes)", 0,
ctx              1120 drivers/crypto/mediatek/mtk-aes.c 	if (IS_ERR(ctx->ctr)) {
ctx              1122 drivers/crypto/mediatek/mtk-aes.c 		return PTR_ERR(ctx->ctr);
ctx              1126 drivers/crypto/mediatek/mtk-aes.c 	ctx->base.start = mtk_aes_gcm_start;
ctx              1132 drivers/crypto/mediatek/mtk-aes.c 	struct mtk_aes_gcm_ctx *ctx = crypto_aead_ctx(aead);
ctx              1134 drivers/crypto/mediatek/mtk-aes.c 	crypto_free_skcipher(ctx->ctr);
ctx               151 drivers/crypto/mediatek/mtk-platform.h 	struct mtk_aes_base_ctx *ctx;
ctx               181 drivers/crypto/mediatek/mtk-sha.c static int mtk_sha_append_sg(struct mtk_sha_reqctx *ctx)
ctx               185 drivers/crypto/mediatek/mtk-sha.c 	while ((ctx->bufcnt < SHA_BUF_SIZE) && ctx->total) {
ctx               186 drivers/crypto/mediatek/mtk-sha.c 		count = min(ctx->sg->length - ctx->offset, ctx->total);
ctx               187 drivers/crypto/mediatek/mtk-sha.c 		count = min(count, SHA_BUF_SIZE - ctx->bufcnt);
ctx               196 drivers/crypto/mediatek/mtk-sha.c 			if ((ctx->sg->length == 0) && !sg_is_last(ctx->sg)) {
ctx               197 drivers/crypto/mediatek/mtk-sha.c 				ctx->sg = sg_next(ctx->sg);
ctx               204 drivers/crypto/mediatek/mtk-sha.c 		scatterwalk_map_and_copy(ctx->buffer + ctx->bufcnt, ctx->sg,
ctx               205 drivers/crypto/mediatek/mtk-sha.c 					 ctx->offset, count, 0);
ctx               207 drivers/crypto/mediatek/mtk-sha.c 		ctx->bufcnt += count;
ctx               208 drivers/crypto/mediatek/mtk-sha.c 		ctx->offset += count;
ctx               209 drivers/crypto/mediatek/mtk-sha.c 		ctx->total -= count;
ctx               211 drivers/crypto/mediatek/mtk-sha.c 		if (ctx->offset == ctx->sg->length) {
ctx               212 drivers/crypto/mediatek/mtk-sha.c 			ctx->sg = sg_next(ctx->sg);
ctx               213 drivers/crypto/mediatek/mtk-sha.c 			if (ctx->sg)
ctx               214 drivers/crypto/mediatek/mtk-sha.c 				ctx->offset = 0;
ctx               216 drivers/crypto/mediatek/mtk-sha.c 				ctx->total = 0;
ctx               239 drivers/crypto/mediatek/mtk-sha.c static void mtk_sha_fill_padding(struct mtk_sha_reqctx *ctx, u32 len)
ctx               243 drivers/crypto/mediatek/mtk-sha.c 	u64 size = ctx->digcnt;
ctx               245 drivers/crypto/mediatek/mtk-sha.c 	size += ctx->bufcnt;
ctx               251 drivers/crypto/mediatek/mtk-sha.c 	switch (ctx->flags & SHA_FLAGS_ALGO_MSK) {
ctx               254 drivers/crypto/mediatek/mtk-sha.c 		index = ctx->bufcnt & 0x7f;
ctx               256 drivers/crypto/mediatek/mtk-sha.c 		*(ctx->buffer + ctx->bufcnt) = 0x80;
ctx               257 drivers/crypto/mediatek/mtk-sha.c 		memset(ctx->buffer + ctx->bufcnt + 1, 0, padlen - 1);
ctx               258 drivers/crypto/mediatek/mtk-sha.c 		memcpy(ctx->buffer + ctx->bufcnt + padlen, bits, 16);
ctx               259 drivers/crypto/mediatek/mtk-sha.c 		ctx->bufcnt += padlen + 16;
ctx               260 drivers/crypto/mediatek/mtk-sha.c 		ctx->flags |= SHA_FLAGS_PAD;
ctx               264 drivers/crypto/mediatek/mtk-sha.c 		index = ctx->bufcnt & 0x3f;
ctx               266 drivers/crypto/mediatek/mtk-sha.c 		*(ctx->buffer + ctx->bufcnt) = 0x80;
ctx               267 drivers/crypto/mediatek/mtk-sha.c 		memset(ctx->buffer + ctx->bufcnt + 1, 0, padlen - 1);
ctx               268 drivers/crypto/mediatek/mtk-sha.c 		memcpy(ctx->buffer + ctx->bufcnt + padlen, &bits[1], 8);
ctx               269 drivers/crypto/mediatek/mtk-sha.c 		ctx->bufcnt += padlen + 8;
ctx               270 drivers/crypto/mediatek/mtk-sha.c 		ctx->flags |= SHA_FLAGS_PAD;
ctx               276 drivers/crypto/mediatek/mtk-sha.c static void mtk_sha_info_init(struct mtk_sha_reqctx *ctx)
ctx               278 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_info *info = &ctx->info;
ctx               280 drivers/crypto/mediatek/mtk-sha.c 	ctx->ct_hdr = SHA_CT_CTRL_HDR;
ctx               281 drivers/crypto/mediatek/mtk-sha.c 	ctx->ct_size = SHA_CT_SIZE;
ctx               283 drivers/crypto/mediatek/mtk-sha.c 	info->tfm[0] = SHA_TFM_HASH | SHA_TFM_SIZE(SIZE_IN_WORDS(ctx->ds));
ctx               285 drivers/crypto/mediatek/mtk-sha.c 	switch (ctx->flags & SHA_FLAGS_ALGO_MSK) {
ctx               313 drivers/crypto/mediatek/mtk-sha.c 	info->cmd[2] = SHA_CMD2 | SHA_TFM_DIGEST(SIZE_IN_WORDS(ctx->ds));
ctx               324 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(sha->req);
ctx               325 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_info *info = &ctx->info;
ctx               327 drivers/crypto/mediatek/mtk-sha.c 	ctx->ct_hdr &= ~SHA_DATA_LEN_MSK;
ctx               328 drivers/crypto/mediatek/mtk-sha.c 	ctx->ct_hdr |= cpu_to_le32(len1 + len2);
ctx               333 drivers/crypto/mediatek/mtk-sha.c 	if (ctx->digcnt)
ctx               336 drivers/crypto/mediatek/mtk-sha.c 	ctx->digcnt += len1;
ctx               338 drivers/crypto/mediatek/mtk-sha.c 	ctx->ct_dma = dma_map_single(cryp->dev, info, sizeof(*info),
ctx               340 drivers/crypto/mediatek/mtk-sha.c 	if (unlikely(dma_mapping_error(cryp->dev, ctx->ct_dma))) {
ctx               345 drivers/crypto/mediatek/mtk-sha.c 	ctx->tfm_dma = ctx->ct_dma + sizeof(info->ctrl) + sizeof(info->cmd);
ctx               360 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               367 drivers/crypto/mediatek/mtk-sha.c 	       crypto_shash_update(shash, bctx->opad, ctx->bs) ?:
ctx               368 drivers/crypto/mediatek/mtk-sha.c 	       crypto_shash_finup(shash, req->result, ctx->ds, req->result);
ctx               376 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               378 drivers/crypto/mediatek/mtk-sha.c 	ctx->flags = 0;
ctx               379 drivers/crypto/mediatek/mtk-sha.c 	ctx->ds = crypto_ahash_digestsize(tfm);
ctx               381 drivers/crypto/mediatek/mtk-sha.c 	switch (ctx->ds) {
ctx               383 drivers/crypto/mediatek/mtk-sha.c 		ctx->flags |= SHA_FLAGS_SHA1;
ctx               384 drivers/crypto/mediatek/mtk-sha.c 		ctx->bs = SHA1_BLOCK_SIZE;
ctx               387 drivers/crypto/mediatek/mtk-sha.c 		ctx->flags |= SHA_FLAGS_SHA224;
ctx               388 drivers/crypto/mediatek/mtk-sha.c 		ctx->bs = SHA224_BLOCK_SIZE;
ctx               391 drivers/crypto/mediatek/mtk-sha.c 		ctx->flags |= SHA_FLAGS_SHA256;
ctx               392 drivers/crypto/mediatek/mtk-sha.c 		ctx->bs = SHA256_BLOCK_SIZE;
ctx               395 drivers/crypto/mediatek/mtk-sha.c 		ctx->flags |= SHA_FLAGS_SHA384;
ctx               396 drivers/crypto/mediatek/mtk-sha.c 		ctx->bs = SHA384_BLOCK_SIZE;
ctx               399 drivers/crypto/mediatek/mtk-sha.c 		ctx->flags |= SHA_FLAGS_SHA512;
ctx               400 drivers/crypto/mediatek/mtk-sha.c 		ctx->bs = SHA512_BLOCK_SIZE;
ctx               406 drivers/crypto/mediatek/mtk-sha.c 	ctx->bufcnt = 0;
ctx               407 drivers/crypto/mediatek/mtk-sha.c 	ctx->digcnt = 0;
ctx               408 drivers/crypto/mediatek/mtk-sha.c 	ctx->buffer = tctx->buf;
ctx               413 drivers/crypto/mediatek/mtk-sha.c 		memcpy(ctx->buffer, bctx->ipad, ctx->bs);
ctx               414 drivers/crypto/mediatek/mtk-sha.c 		ctx->bufcnt = ctx->bs;
ctx               415 drivers/crypto/mediatek/mtk-sha.c 		ctx->flags |= SHA_FLAGS_HMAC;
ctx               425 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(sha->req);
ctx               439 drivers/crypto/mediatek/mtk-sha.c 		   MTK_DESC_CT_LEN(ctx->ct_size);
ctx               441 drivers/crypto/mediatek/mtk-sha.c 	cmd->ct = cpu_to_le32(ctx->ct_dma);
ctx               442 drivers/crypto/mediatek/mtk-sha.c 	cmd->ct_hdr = ctx->ct_hdr;
ctx               443 drivers/crypto/mediatek/mtk-sha.c 	cmd->tfm = cpu_to_le32(ctx->tfm_dma);
ctx               470 drivers/crypto/mediatek/mtk-sha.c 			   struct mtk_sha_reqctx *ctx,
ctx               473 drivers/crypto/mediatek/mtk-sha.c 	ctx->dma_addr = dma_map_single(cryp->dev, ctx->buffer,
ctx               475 drivers/crypto/mediatek/mtk-sha.c 	if (unlikely(dma_mapping_error(cryp->dev, ctx->dma_addr))) {
ctx               480 drivers/crypto/mediatek/mtk-sha.c 	ctx->flags &= ~SHA_FLAGS_SG;
ctx               482 drivers/crypto/mediatek/mtk-sha.c 	return mtk_sha_xmit(cryp, sha, ctx->dma_addr, count, 0, 0);
ctx               488 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(sha->req);
ctx               492 drivers/crypto/mediatek/mtk-sha.c 	mtk_sha_append_sg(ctx);
ctx               494 drivers/crypto/mediatek/mtk-sha.c 	final = (ctx->flags & SHA_FLAGS_FINUP) && !ctx->total;
ctx               496 drivers/crypto/mediatek/mtk-sha.c 	dev_dbg(cryp->dev, "slow: bufcnt: %zu\n", ctx->bufcnt);
ctx               500 drivers/crypto/mediatek/mtk-sha.c 		mtk_sha_fill_padding(ctx, 0);
ctx               503 drivers/crypto/mediatek/mtk-sha.c 	if (final || (ctx->bufcnt == SHA_BUF_SIZE && ctx->total)) {
ctx               504 drivers/crypto/mediatek/mtk-sha.c 		count = ctx->bufcnt;
ctx               505 drivers/crypto/mediatek/mtk-sha.c 		ctx->bufcnt = 0;
ctx               507 drivers/crypto/mediatek/mtk-sha.c 		return mtk_sha_dma_map(cryp, sha, ctx, count);
ctx               515 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(sha->req);
ctx               519 drivers/crypto/mediatek/mtk-sha.c 	if (!ctx->total)
ctx               522 drivers/crypto/mediatek/mtk-sha.c 	if (ctx->bufcnt || ctx->offset)
ctx               525 drivers/crypto/mediatek/mtk-sha.c 	sg = ctx->sg;
ctx               530 drivers/crypto/mediatek/mtk-sha.c 	if (!sg_is_last(sg) && !IS_ALIGNED(sg->length, ctx->bs))
ctx               534 drivers/crypto/mediatek/mtk-sha.c 	len = min(ctx->total, sg->length);
ctx               537 drivers/crypto/mediatek/mtk-sha.c 		if (!(ctx->flags & SHA_FLAGS_FINUP)) {
ctx               539 drivers/crypto/mediatek/mtk-sha.c 			tail = len & (ctx->bs - 1);
ctx               544 drivers/crypto/mediatek/mtk-sha.c 	ctx->total -= len;
ctx               545 drivers/crypto/mediatek/mtk-sha.c 	ctx->offset = len; /* offset where to start slow */
ctx               547 drivers/crypto/mediatek/mtk-sha.c 	final = (ctx->flags & SHA_FLAGS_FINUP) && !ctx->total;
ctx               553 drivers/crypto/mediatek/mtk-sha.c 		tail = len & (ctx->bs - 1);
ctx               555 drivers/crypto/mediatek/mtk-sha.c 		ctx->total += tail;
ctx               556 drivers/crypto/mediatek/mtk-sha.c 		ctx->offset = len; /* offset where to start slow */
ctx               558 drivers/crypto/mediatek/mtk-sha.c 		sg = ctx->sg;
ctx               559 drivers/crypto/mediatek/mtk-sha.c 		mtk_sha_append_sg(ctx);
ctx               560 drivers/crypto/mediatek/mtk-sha.c 		mtk_sha_fill_padding(ctx, len);
ctx               562 drivers/crypto/mediatek/mtk-sha.c 		ctx->dma_addr = dma_map_single(cryp->dev, ctx->buffer,
ctx               564 drivers/crypto/mediatek/mtk-sha.c 		if (unlikely(dma_mapping_error(cryp->dev, ctx->dma_addr))) {
ctx               570 drivers/crypto/mediatek/mtk-sha.c 		count = ctx->bufcnt;
ctx               571 drivers/crypto/mediatek/mtk-sha.c 		ctx->bufcnt = 0;
ctx               574 drivers/crypto/mediatek/mtk-sha.c 			ctx->flags &= ~SHA_FLAGS_SG;
ctx               575 drivers/crypto/mediatek/mtk-sha.c 			return mtk_sha_xmit(cryp, sha, ctx->dma_addr,
ctx               579 drivers/crypto/mediatek/mtk-sha.c 			ctx->sg = sg;
ctx               580 drivers/crypto/mediatek/mtk-sha.c 			if (!dma_map_sg(cryp->dev, ctx->sg, 1, DMA_TO_DEVICE)) {
ctx               585 drivers/crypto/mediatek/mtk-sha.c 			ctx->flags |= SHA_FLAGS_SG;
ctx               586 drivers/crypto/mediatek/mtk-sha.c 			return mtk_sha_xmit(cryp, sha, sg_dma_address(ctx->sg),
ctx               587 drivers/crypto/mediatek/mtk-sha.c 					    len, ctx->dma_addr, count);
ctx               591 drivers/crypto/mediatek/mtk-sha.c 	if (!dma_map_sg(cryp->dev, ctx->sg, 1, DMA_TO_DEVICE)) {
ctx               596 drivers/crypto/mediatek/mtk-sha.c 	ctx->flags |= SHA_FLAGS_SG;
ctx               598 drivers/crypto/mediatek/mtk-sha.c 	return mtk_sha_xmit(cryp, sha, sg_dma_address(ctx->sg),
ctx               605 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(sha->req);
ctx               608 drivers/crypto/mediatek/mtk-sha.c 	mtk_sha_fill_padding(ctx, 0);
ctx               611 drivers/crypto/mediatek/mtk-sha.c 	count = ctx->bufcnt;
ctx               612 drivers/crypto/mediatek/mtk-sha.c 	ctx->bufcnt = 0;
ctx               614 drivers/crypto/mediatek/mtk-sha.c 	return mtk_sha_dma_map(cryp, sha, ctx, count);
ctx               620 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               621 drivers/crypto/mediatek/mtk-sha.c 	__le32 *digest = ctx->info.digest;
ctx               626 drivers/crypto/mediatek/mtk-sha.c 	for (i = 0; i < SIZE_IN_WORDS(ctx->ds); i++)
ctx               629 drivers/crypto/mediatek/mtk-sha.c 	if (ctx->flags & SHA_FLAGS_HMAC)
ctx               655 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx;
ctx               681 drivers/crypto/mediatek/mtk-sha.c 	ctx = ahash_request_ctx(req);
ctx               685 drivers/crypto/mediatek/mtk-sha.c 	mtk_sha_info_init(ctx);
ctx               687 drivers/crypto/mediatek/mtk-sha.c 	if (ctx->op == SHA_OP_UPDATE) {
ctx               689 drivers/crypto/mediatek/mtk-sha.c 		if (err != -EINPROGRESS && (ctx->flags & SHA_FLAGS_FINUP))
ctx               692 drivers/crypto/mediatek/mtk-sha.c 	} else if (ctx->op == SHA_OP_FINAL) {
ctx               705 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               708 drivers/crypto/mediatek/mtk-sha.c 	ctx->op = op;
ctx               715 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(sha->req);
ctx               717 drivers/crypto/mediatek/mtk-sha.c 	dma_unmap_single(cryp->dev, ctx->ct_dma, sizeof(ctx->info),
ctx               720 drivers/crypto/mediatek/mtk-sha.c 	if (ctx->flags & SHA_FLAGS_SG) {
ctx               721 drivers/crypto/mediatek/mtk-sha.c 		dma_unmap_sg(cryp->dev, ctx->sg, 1, DMA_TO_DEVICE);
ctx               722 drivers/crypto/mediatek/mtk-sha.c 		if (ctx->sg->length == ctx->offset) {
ctx               723 drivers/crypto/mediatek/mtk-sha.c 			ctx->sg = sg_next(ctx->sg);
ctx               724 drivers/crypto/mediatek/mtk-sha.c 			if (ctx->sg)
ctx               725 drivers/crypto/mediatek/mtk-sha.c 				ctx->offset = 0;
ctx               727 drivers/crypto/mediatek/mtk-sha.c 		if (ctx->flags & SHA_FLAGS_PAD) {
ctx               728 drivers/crypto/mediatek/mtk-sha.c 			dma_unmap_single(cryp->dev, ctx->dma_addr,
ctx               732 drivers/crypto/mediatek/mtk-sha.c 		dma_unmap_single(cryp->dev, ctx->dma_addr,
ctx               748 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               750 drivers/crypto/mediatek/mtk-sha.c 	ctx->total = req->nbytes;
ctx               751 drivers/crypto/mediatek/mtk-sha.c 	ctx->sg = req->src;
ctx               752 drivers/crypto/mediatek/mtk-sha.c 	ctx->offset = 0;
ctx               754 drivers/crypto/mediatek/mtk-sha.c 	if ((ctx->bufcnt + ctx->total < SHA_BUF_SIZE) &&
ctx               755 drivers/crypto/mediatek/mtk-sha.c 	    !(ctx->flags & SHA_FLAGS_FINUP))
ctx               756 drivers/crypto/mediatek/mtk-sha.c 		return mtk_sha_append_sg(ctx);
ctx               763 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               765 drivers/crypto/mediatek/mtk-sha.c 	ctx->flags |= SHA_FLAGS_FINUP;
ctx               767 drivers/crypto/mediatek/mtk-sha.c 	if (ctx->flags & SHA_FLAGS_PAD)
ctx               775 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               778 drivers/crypto/mediatek/mtk-sha.c 	ctx->flags |= SHA_FLAGS_FINUP;
ctx               834 drivers/crypto/mediatek/mtk-sha.c 	const struct mtk_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               836 drivers/crypto/mediatek/mtk-sha.c 	memcpy(out, ctx, sizeof(*ctx));
ctx               842 drivers/crypto/mediatek/mtk-sha.c 	struct mtk_sha_reqctx *ctx = ahash_request_ctx(req);
ctx               844 drivers/crypto/mediatek/mtk-sha.c 	memcpy(ctx, in, sizeof(*ctx));
ctx               429 drivers/crypto/mxs-dcp.c 	struct dcp_async_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               430 drivers/crypto/mxs-dcp.c 	SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
ctx               433 drivers/crypto/mxs-dcp.c 	skcipher_request_set_sync_tfm(subreq, ctx->fallback);
ctx               313 drivers/crypto/n2_core.c 	struct n2_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               315 drivers/crypto/n2_core.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
ctx               325 drivers/crypto/n2_core.c 	struct n2_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               327 drivers/crypto/n2_core.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
ctx               339 drivers/crypto/n2_core.c 	struct n2_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               341 drivers/crypto/n2_core.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
ctx               352 drivers/crypto/n2_core.c 	struct n2_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               354 drivers/crypto/n2_core.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
ctx               377 drivers/crypto/n2_core.c 	struct n2_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               393 drivers/crypto/n2_core.c 	ctx->fallback_tfm = fallback_tfm;
ctx               403 drivers/crypto/n2_core.c 	struct n2_hash_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               405 drivers/crypto/n2_core.c 	crypto_free_ahash(ctx->fallback_tfm);
ctx               412 drivers/crypto/n2_core.c 	struct n2_hmac_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               438 drivers/crypto/n2_core.c 	ctx->child_shash = child_shash;
ctx               439 drivers/crypto/n2_core.c 	ctx->base.fallback_tfm = fallback_tfm;
ctx               452 drivers/crypto/n2_core.c 	struct n2_hmac_ctx *ctx = crypto_ahash_ctx(ahash);
ctx               454 drivers/crypto/n2_core.c 	crypto_free_ahash(ctx->base.fallback_tfm);
ctx               455 drivers/crypto/n2_core.c 	crypto_free_shash(ctx->child_shash);
ctx               461 drivers/crypto/n2_core.c 	struct n2_hmac_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               462 drivers/crypto/n2_core.c 	struct crypto_shash *child_shash = ctx->child_shash;
ctx               467 drivers/crypto/n2_core.c 	fallback_tfm = ctx->base.fallback_tfm;
ctx               479 drivers/crypto/n2_core.c 					  ctx->hash_key);
ctx               484 drivers/crypto/n2_core.c 		memcpy(ctx->hash_key, key, keylen);
ctx               486 drivers/crypto/n2_core.c 	ctx->hash_key_len = keylen;
ctx               538 drivers/crypto/n2_core.c 		struct n2_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               540 drivers/crypto/n2_core.c 		ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
ctx               632 drivers/crypto/n2_core.c 	struct n2_hmac_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               637 drivers/crypto/n2_core.c 	    unlikely(ctx->hash_key_len > N2_HASH_KEY_MAX)) {
ctx               639 drivers/crypto/n2_core.c 		struct n2_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               641 drivers/crypto/n2_core.c 		ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
ctx               656 drivers/crypto/n2_core.c 				  __pa(&ctx->hash_key),
ctx               657 drivers/crypto/n2_core.c 				  ctx->hash_key_len);
ctx               732 drivers/crypto/n2_core.c 	struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm);
ctx               735 drivers/crypto/n2_core.c 	ctx->enc_type = (n2alg->enc_type & ENC_TYPE_CHAINING_MASK);
ctx               739 drivers/crypto/n2_core.c 		ctx->enc_type |= ENC_TYPE_ALG_AES128;
ctx               742 drivers/crypto/n2_core.c 		ctx->enc_type |= ENC_TYPE_ALG_AES192;
ctx               745 drivers/crypto/n2_core.c 		ctx->enc_type |= ENC_TYPE_ALG_AES256;
ctx               752 drivers/crypto/n2_core.c 	ctx->key_len = keylen;
ctx               753 drivers/crypto/n2_core.c 	memcpy(ctx->key.aes, key, keylen);
ctx               761 drivers/crypto/n2_core.c 	struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm);
ctx               769 drivers/crypto/n2_core.c 	ctx->enc_type = n2alg->enc_type;
ctx               771 drivers/crypto/n2_core.c 	ctx->key_len = keylen;
ctx               772 drivers/crypto/n2_core.c 	memcpy(ctx->key.des, key, keylen);
ctx               780 drivers/crypto/n2_core.c 	struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm);
ctx               788 drivers/crypto/n2_core.c 	ctx->enc_type = n2alg->enc_type;
ctx               790 drivers/crypto/n2_core.c 	ctx->key_len = keylen;
ctx               791 drivers/crypto/n2_core.c 	memcpy(ctx->key.des3, key, keylen);
ctx               799 drivers/crypto/n2_core.c 	struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm);
ctx               801 drivers/crypto/n2_core.c 	u8 *s = ctx->key.arc4;
ctx               806 drivers/crypto/n2_core.c 	ctx->enc_type = n2alg->enc_type;
ctx               836 drivers/crypto/n2_core.c 	struct n2_cipher_context *ctx = crypto_tfm_ctx(tfm);
ctx               851 drivers/crypto/n2_core.c 					 0, ctx->enc_type, 0, 0,
ctx               859 drivers/crypto/n2_core.c 	ent->enc_key_addr = __pa(&ctx->key);
ctx               106 drivers/crypto/nx/nx-842.c 	struct nx842_crypto_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               108 drivers/crypto/nx/nx-842.c 	spin_lock_init(&ctx->lock);
ctx               109 drivers/crypto/nx/nx-842.c 	ctx->driver = driver;
ctx               110 drivers/crypto/nx/nx-842.c 	ctx->wmem = kmalloc(driver->workmem_size, GFP_KERNEL);
ctx               111 drivers/crypto/nx/nx-842.c 	ctx->sbounce = (u8 *)__get_free_pages(GFP_KERNEL, BOUNCE_BUFFER_ORDER);
ctx               112 drivers/crypto/nx/nx-842.c 	ctx->dbounce = (u8 *)__get_free_pages(GFP_KERNEL, BOUNCE_BUFFER_ORDER);
ctx               113 drivers/crypto/nx/nx-842.c 	if (!ctx->wmem || !ctx->sbounce || !ctx->dbounce) {
ctx               114 drivers/crypto/nx/nx-842.c 		kfree(ctx->wmem);
ctx               115 drivers/crypto/nx/nx-842.c 		free_page((unsigned long)ctx->sbounce);
ctx               116 drivers/crypto/nx/nx-842.c 		free_page((unsigned long)ctx->dbounce);
ctx               126 drivers/crypto/nx/nx-842.c 	struct nx842_crypto_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               128 drivers/crypto/nx/nx-842.c 	kfree(ctx->wmem);
ctx               129 drivers/crypto/nx/nx-842.c 	free_page((unsigned long)ctx->sbounce);
ctx               130 drivers/crypto/nx/nx-842.c 	free_page((unsigned long)ctx->dbounce);
ctx               158 drivers/crypto/nx/nx-842.c static int compress(struct nx842_crypto_ctx *ctx,
ctx               187 drivers/crypto/nx/nx-842.c 			memset(ctx->sbounce + slen, 0, adj_slen - slen);
ctx               188 drivers/crypto/nx/nx-842.c 		memcpy(ctx->sbounce, src, slen);
ctx               189 drivers/crypto/nx/nx-842.c 		src = ctx->sbounce;
ctx               206 drivers/crypto/nx/nx-842.c 		dst = ctx->dbounce;
ctx               219 drivers/crypto/nx/nx-842.c 		ret = ctx->driver->compress(src, slen, dst, &dlen, ctx->wmem);
ctx               223 drivers/crypto/nx/nx-842.c 		if (ret == -ENOSPC && dst != ctx->dbounce)
ctx               231 drivers/crypto/nx/nx-842.c 	if (dst == ctx->dbounce)
ctx               253 drivers/crypto/nx/nx-842.c 	struct nx842_crypto_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               254 drivers/crypto/nx/nx-842.c 	struct nx842_crypto_header *hdr = &ctx->header;
ctx               256 drivers/crypto/nx/nx-842.c 	struct nx842_constraints c = *ctx->driver->constraints;
ctx               276 drivers/crypto/nx/nx-842.c 	spin_lock_bh(&ctx->lock);
ctx               304 drivers/crypto/nx/nx-842.c 		ret = compress(ctx, &p, &hdr->group[n], &c, &ignore, h);
ctx               330 drivers/crypto/nx/nx-842.c 	spin_unlock_bh(&ctx->lock);
ctx               335 drivers/crypto/nx/nx-842.c static int decompress(struct nx842_crypto_ctx *ctx,
ctx               373 drivers/crypto/nx/nx-842.c 			memset(ctx->sbounce + slen, 0, adj_slen - slen);
ctx               374 drivers/crypto/nx/nx-842.c 		memcpy(ctx->sbounce, src, slen);
ctx               375 drivers/crypto/nx/nx-842.c 		src = ctx->sbounce;
ctx               384 drivers/crypto/nx/nx-842.c 		dst = ctx->dbounce;
ctx               397 drivers/crypto/nx/nx-842.c 		ret = ctx->driver->decompress(src, slen, dst, &dlen, ctx->wmem);
ctx               408 drivers/crypto/nx/nx-842.c 			dst = ctx->dbounce;
ctx               423 drivers/crypto/nx/nx-842.c 	if (dst == ctx->dbounce)
ctx               436 drivers/crypto/nx/nx-842.c 	struct nx842_crypto_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               439 drivers/crypto/nx/nx-842.c 	struct nx842_constraints c = *ctx->driver->constraints;
ctx               455 drivers/crypto/nx/nx-842.c 	spin_lock_bh(&ctx->lock);
ctx               467 drivers/crypto/nx/nx-842.c 		ret = decompress(ctx, &p, &g, &c, 0);
ctx               492 drivers/crypto/nx/nx-842.c 	memcpy(&ctx->header, src, hdr_len);
ctx               493 drivers/crypto/nx/nx-842.c 	hdr = &ctx->header;
ctx               500 drivers/crypto/nx/nx-842.c 		ret = decompress(ctx, &p, &hdr->group[n], &c, ignore);
ctx               513 drivers/crypto/nx/nx-842.c 	spin_unlock_bh(&ctx->lock);
ctx               155 drivers/crypto/nx/nx.h int nx_hcall_sync(struct nx_crypto_ctx *ctx, struct vio_pfo_op *op,
ctx               180 drivers/crypto/omap-aes-gcm.c 	struct omap_aes_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               183 drivers/crypto/omap-aes-gcm.c 	sk_req = skcipher_request_alloc(ctx->ctr, GFP_KERNEL);
ctx               195 drivers/crypto/omap-aes-gcm.c 	ret = crypto_skcipher_setkey(ctx->ctr, (u8 *)ctx->key, ctx->keylen);
ctx               249 drivers/crypto/omap-aes-gcm.c 	struct omap_aes_ctx *ctx;
ctx               275 drivers/crypto/omap-aes-gcm.c 	ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               278 drivers/crypto/omap-aes-gcm.c 	dd->ctx = ctx;
ctx               353 drivers/crypto/omap-aes-gcm.c 	struct omap_aes_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               356 drivers/crypto/omap-aes-gcm.c 	memcpy(rctx->iv, ctx->nonce, 4);
ctx               364 drivers/crypto/omap-aes-gcm.c 	struct omap_aes_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               367 drivers/crypto/omap-aes-gcm.c 	memcpy(rctx->iv, ctx->nonce, 4);
ctx               375 drivers/crypto/omap-aes-gcm.c 	struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm);
ctx               381 drivers/crypto/omap-aes-gcm.c 	memcpy(ctx->key, key, keylen);
ctx               382 drivers/crypto/omap-aes-gcm.c 	ctx->keylen = keylen;
ctx               390 drivers/crypto/omap-aes-gcm.c 	struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm);
ctx               400 drivers/crypto/omap-aes-gcm.c 	memcpy(ctx->key, key, keylen);
ctx               401 drivers/crypto/omap-aes-gcm.c 	memcpy(ctx->nonce, key + keylen, 4);
ctx               402 drivers/crypto/omap-aes-gcm.c 	ctx->keylen = keylen;
ctx               133 drivers/crypto/omap-aes.c 	key32 = dd->ctx->keylen / sizeof(u32);
ctx               142 drivers/crypto/omap-aes.c 			__le32_to_cpu(dd->ctx->key[i]));
ctx               153 drivers/crypto/omap-aes.c 	val = FLD_VAL(((dd->ctx->keylen >> 3) - 1), 4, 3);
ctx               418 drivers/crypto/omap-aes.c 	struct omap_aes_ctx *ctx = crypto_ablkcipher_ctx(
ctx               463 drivers/crypto/omap-aes.c 	dd->ctx = ctx;
ctx               510 drivers/crypto/omap-aes.c 	struct omap_aes_ctx *ctx = crypto_ablkcipher_ctx(
ctx               521 drivers/crypto/omap-aes.c 		SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
ctx               523 drivers/crypto/omap-aes.c 		skcipher_request_set_sync_tfm(subreq, ctx->fallback);
ctx               551 drivers/crypto/omap-aes.c 	struct omap_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               560 drivers/crypto/omap-aes.c 	memcpy(ctx->key, key, keylen);
ctx               561 drivers/crypto/omap-aes.c 	ctx->keylen = keylen;
ctx               563 drivers/crypto/omap-aes.c 	crypto_sync_skcipher_clear_flags(ctx->fallback, CRYPTO_TFM_REQ_MASK);
ctx               564 drivers/crypto/omap-aes.c 	crypto_sync_skcipher_set_flags(ctx->fallback, tfm->base.crt_flags &
ctx               567 drivers/crypto/omap-aes.c 	ret = crypto_sync_skcipher_setkey(ctx->fallback, key, keylen);
ctx               612 drivers/crypto/omap-aes.c 	struct omap_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               619 drivers/crypto/omap-aes.c 	ctx->fallback = blk;
ctx               623 drivers/crypto/omap-aes.c 	ctx->enginectx.op.prepare_request = omap_aes_prepare_req;
ctx               624 drivers/crypto/omap-aes.c 	ctx->enginectx.op.unprepare_request = NULL;
ctx               625 drivers/crypto/omap-aes.c 	ctx->enginectx.op.do_one_request = omap_aes_crypt_req;
ctx               633 drivers/crypto/omap-aes.c 	struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm);
ctx               651 drivers/crypto/omap-aes.c 	ctx->ctr = crypto_alloc_skcipher("ecb(aes)", 0, 0);
ctx               652 drivers/crypto/omap-aes.c 	if (IS_ERR(ctx->ctr)) {
ctx               654 drivers/crypto/omap-aes.c 		return PTR_ERR(ctx->ctr);
ctx               662 drivers/crypto/omap-aes.c 	struct omap_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               664 drivers/crypto/omap-aes.c 	if (ctx->fallback)
ctx               665 drivers/crypto/omap-aes.c 		crypto_free_sync_skcipher(ctx->fallback);
ctx               667 drivers/crypto/omap-aes.c 	ctx->fallback = NULL;
ctx               672 drivers/crypto/omap-aes.c 	struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm);
ctx               676 drivers/crypto/omap-aes.c 	if (ctx->ctr)
ctx               677 drivers/crypto/omap-aes.c 		crypto_free_skcipher(ctx->ctr);
ctx               156 drivers/crypto/omap-aes.h 	struct omap_aes_ctx	*ctx;
ctx               135 drivers/crypto/omap-des.c 	struct omap_des_ctx	*ctx;
ctx               256 drivers/crypto/omap-des.c 	key32 = dd->ctx->keylen / sizeof(u32);
ctx               261 drivers/crypto/omap-des.c 			       __le32_to_cpu(dd->ctx->key[i]));
ctx               310 drivers/crypto/omap-des.c static struct omap_des_dev *omap_des_find_dev(struct omap_des_ctx *ctx)
ctx               315 drivers/crypto/omap-des.c 	if (!ctx->dd) {
ctx               321 drivers/crypto/omap-des.c 		ctx->dd = dd;
ctx               324 drivers/crypto/omap-des.c 		dd = ctx->dd;
ctx               380 drivers/crypto/omap-des.c 	struct omap_des_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               381 drivers/crypto/omap-des.c 	struct omap_des_dev *dd = ctx->dd;
ctx               529 drivers/crypto/omap-des.c 	struct omap_des_ctx *ctx = crypto_ablkcipher_ctx(
ctx               531 drivers/crypto/omap-des.c 	struct omap_des_dev *dd = omap_des_find_dev(ctx);
ctx               572 drivers/crypto/omap-des.c 	ctx = crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req));
ctx               576 drivers/crypto/omap-des.c 	dd->ctx = ctx;
ctx               577 drivers/crypto/omap-des.c 	ctx->dd = dd;
ctx               586 drivers/crypto/omap-des.c 	struct omap_des_ctx *ctx = crypto_ablkcipher_ctx(
ctx               588 drivers/crypto/omap-des.c 	struct omap_des_dev *dd = omap_des_find_dev(ctx);
ctx               624 drivers/crypto/omap-des.c 	struct omap_des_ctx *ctx = crypto_ablkcipher_ctx(
ctx               638 drivers/crypto/omap-des.c 	dd = omap_des_find_dev(ctx);
ctx               652 drivers/crypto/omap-des.c 	struct omap_des_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx               661 drivers/crypto/omap-des.c 	memcpy(ctx->key, key, keylen);
ctx               662 drivers/crypto/omap-des.c 	ctx->keylen = keylen;
ctx               670 drivers/crypto/omap-des.c 	struct omap_des_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx               679 drivers/crypto/omap-des.c 	memcpy(ctx->key, key, keylen);
ctx               680 drivers/crypto/omap-des.c 	ctx->keylen = keylen;
ctx               712 drivers/crypto/omap-des.c 	struct omap_des_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               718 drivers/crypto/omap-des.c 	ctx->enginectx.op.prepare_request = omap_des_prepare_req;
ctx               719 drivers/crypto/omap-des.c 	ctx->enginectx.op.unprepare_request = NULL;
ctx               720 drivers/crypto/omap-des.c 	ctx->enginectx.op.do_one_request = omap_des_crypt_req;
ctx               283 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(req);
ctx               284 drivers/crypto/omap-sham.c 	struct omap_sham_dev *dd = ctx->dd;
ctx               285 drivers/crypto/omap-sham.c 	u32 *hash = (u32 *)ctx->digest;
ctx               298 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(req);
ctx               299 drivers/crypto/omap-sham.c 	struct omap_sham_dev *dd = ctx->dd;
ctx               302 drivers/crypto/omap-sham.c 	if (ctx->flags & BIT(FLAGS_HMAC)) {
ctx               323 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(req);
ctx               324 drivers/crypto/omap-sham.c 	u32 *in = (u32 *)ctx->digest;
ctx               331 drivers/crypto/omap-sham.c 	switch (ctx->flags & FLAGS_MODE_MASK) {
ctx               337 drivers/crypto/omap-sham.c 		if (test_bit(FLAGS_BE32_SHA1, &ctx->dd->flags))
ctx               386 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(dd->req);
ctx               389 drivers/crypto/omap-sham.c 	if (likely(ctx->digcnt))
ctx               390 drivers/crypto/omap-sham.c 		omap_sham_write(dd, SHA_REG_DIGCNT(dd), ctx->digcnt);
ctx               399 drivers/crypto/omap-sham.c 	if ((ctx->flags & FLAGS_MODE_MASK) == FLAGS_MODE_SHA1)
ctx               401 drivers/crypto/omap-sham.c 	if (!ctx->digcnt)
ctx               421 drivers/crypto/omap-sham.c static int get_block_size(struct omap_sham_reqctx *ctx)
ctx               425 drivers/crypto/omap-sham.c 	switch (ctx->flags & FLAGS_MODE_MASK) {
ctx               455 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(dd->req);
ctx               463 drivers/crypto/omap-sham.c 	val = (ctx->flags & FLAGS_MODE_MASK) >> (FLAGS_MODE_SHIFT);
ctx               464 drivers/crypto/omap-sham.c 	if (!ctx->digcnt) {
ctx               472 drivers/crypto/omap-sham.c 		if (ctx->flags & BIT(FLAGS_HMAC)) {
ctx               473 drivers/crypto/omap-sham.c 			bs = get_block_size(ctx);
ctx               480 drivers/crypto/omap-sham.c 			ctx->digcnt += bs;
ctx               487 drivers/crypto/omap-sham.c 		if (ctx->flags & BIT(FLAGS_HMAC))
ctx               495 drivers/crypto/omap-sham.c 	dev_dbg(dd->dev, "ctrl: %08x, flags: %08lx\n", val, ctx->flags);
ctx               518 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(dd->req);
ctx               525 drivers/crypto/omap-sham.c 						ctx->digcnt, length, final);
ctx               531 drivers/crypto/omap-sham.c 	ctx->digcnt += length;
ctx               532 drivers/crypto/omap-sham.c 	ctx->total -= length;
ctx               540 drivers/crypto/omap-sham.c 	bs32 = get_block_size(ctx) / sizeof(u32);
ctx               542 drivers/crypto/omap-sham.c 	sg_miter_start(&mi, ctx->sg, ctx->sg_len,
ctx               585 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(dd->req);
ctx               591 drivers/crypto/omap-sham.c 						ctx->digcnt, length, final);
ctx               593 drivers/crypto/omap-sham.c 	if (!dma_map_sg(dd->dev, ctx->sg, ctx->sg_len, DMA_TO_DEVICE)) {
ctx               602 drivers/crypto/omap-sham.c 	cfg.dst_maxburst = get_block_size(ctx) / DMA_SLAVE_BUSWIDTH_4_BYTES;
ctx               610 drivers/crypto/omap-sham.c 	tx = dmaengine_prep_slave_sg(dd->dma_lch, ctx->sg, ctx->sg_len,
ctx               624 drivers/crypto/omap-sham.c 	ctx->digcnt += length;
ctx               625 drivers/crypto/omap-sham.c 	ctx->total -= length;
ctx               640 drivers/crypto/omap-sham.c static int omap_sham_copy_sg_lists(struct omap_sham_reqctx *ctx,
ctx               645 drivers/crypto/omap-sham.c 	int offset = ctx->offset;
ctx               647 drivers/crypto/omap-sham.c 	if (ctx->bufcnt)
ctx               650 drivers/crypto/omap-sham.c 	ctx->sg = kmalloc_array(n, sizeof(*sg), GFP_KERNEL);
ctx               651 drivers/crypto/omap-sham.c 	if (!ctx->sg)
ctx               654 drivers/crypto/omap-sham.c 	sg_init_table(ctx->sg, n);
ctx               656 drivers/crypto/omap-sham.c 	tmp = ctx->sg;
ctx               658 drivers/crypto/omap-sham.c 	ctx->sg_len = 0;
ctx               660 drivers/crypto/omap-sham.c 	if (ctx->bufcnt) {
ctx               661 drivers/crypto/omap-sham.c 		sg_set_buf(tmp, ctx->dd->xmit_buf, ctx->bufcnt);
ctx               663 drivers/crypto/omap-sham.c 		ctx->sg_len++;
ctx               684 drivers/crypto/omap-sham.c 			ctx->sg_len++;
ctx               690 drivers/crypto/omap-sham.c 	set_bit(FLAGS_SGS_ALLOCED, &ctx->dd->flags);
ctx               692 drivers/crypto/omap-sham.c 	ctx->bufcnt = 0;
ctx               697 drivers/crypto/omap-sham.c static int omap_sham_copy_sgs(struct omap_sham_reqctx *ctx,
ctx               704 drivers/crypto/omap-sham.c 	len = new_len + ctx->bufcnt;
ctx               706 drivers/crypto/omap-sham.c 	pages = get_order(ctx->total);
ctx               714 drivers/crypto/omap-sham.c 	if (ctx->bufcnt)
ctx               715 drivers/crypto/omap-sham.c 		memcpy(buf, ctx->dd->xmit_buf, ctx->bufcnt);
ctx               717 drivers/crypto/omap-sham.c 	scatterwalk_map_and_copy(buf + ctx->bufcnt, sg, ctx->offset,
ctx               718 drivers/crypto/omap-sham.c 				 ctx->total - ctx->bufcnt, 0);
ctx               719 drivers/crypto/omap-sham.c 	sg_init_table(ctx->sgl, 1);
ctx               720 drivers/crypto/omap-sham.c 	sg_set_buf(ctx->sgl, buf, len);
ctx               721 drivers/crypto/omap-sham.c 	ctx->sg = ctx->sgl;
ctx               722 drivers/crypto/omap-sham.c 	set_bit(FLAGS_SGS_COPIED, &ctx->dd->flags);
ctx               723 drivers/crypto/omap-sham.c 	ctx->sg_len = 1;
ctx               724 drivers/crypto/omap-sham.c 	ctx->bufcnt = 0;
ctx               725 drivers/crypto/omap-sham.c 	ctx->offset = 0;
ctx               912 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(dd->req);
ctx               914 drivers/crypto/omap-sham.c 	dma_unmap_sg(dd->dev, ctx->sg, ctx->sg_len, DMA_TO_DEVICE);
ctx               925 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(req);
ctx               941 drivers/crypto/omap-sham.c 	ctx->dd = dd;
ctx               943 drivers/crypto/omap-sham.c 	ctx->flags = 0;
ctx               950 drivers/crypto/omap-sham.c 		ctx->flags |= FLAGS_MODE_MD5;
ctx               954 drivers/crypto/omap-sham.c 		ctx->flags |= FLAGS_MODE_SHA1;
ctx               958 drivers/crypto/omap-sham.c 		ctx->flags |= FLAGS_MODE_SHA224;
ctx               962 drivers/crypto/omap-sham.c 		ctx->flags |= FLAGS_MODE_SHA256;
ctx               966 drivers/crypto/omap-sham.c 		ctx->flags |= FLAGS_MODE_SHA384;
ctx               970 drivers/crypto/omap-sham.c 		ctx->flags |= FLAGS_MODE_SHA512;
ctx               975 drivers/crypto/omap-sham.c 	ctx->bufcnt = 0;
ctx               976 drivers/crypto/omap-sham.c 	ctx->digcnt = 0;
ctx               977 drivers/crypto/omap-sham.c 	ctx->total = 0;
ctx               978 drivers/crypto/omap-sham.c 	ctx->offset = 0;
ctx               979 drivers/crypto/omap-sham.c 	ctx->buflen = BUFLEN;
ctx               985 drivers/crypto/omap-sham.c 			memcpy(ctx->buffer, bctx->ipad, bs);
ctx               986 drivers/crypto/omap-sham.c 			ctx->bufcnt = bs;
ctx               989 drivers/crypto/omap-sham.c 		ctx->flags |= BIT(FLAGS_HMAC);
ctx               999 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(req);
ctx              1001 drivers/crypto/omap-sham.c 	bool final = ctx->flags & BIT(FLAGS_FINUP);
ctx              1004 drivers/crypto/omap-sham.c 		 ctx->total, ctx->digcnt, (ctx->flags & BIT(FLAGS_FINUP)) != 0);
ctx              1006 drivers/crypto/omap-sham.c 	if (ctx->total < get_block_size(ctx) ||
ctx              1007 drivers/crypto/omap-sham.c 	    ctx->total < dd->fallback_sz)
ctx              1008 drivers/crypto/omap-sham.c 		ctx->flags |= BIT(FLAGS_CPU);
ctx              1010 drivers/crypto/omap-sham.c 	if (ctx->flags & BIT(FLAGS_CPU))
ctx              1011 drivers/crypto/omap-sham.c 		err = omap_sham_xmit_cpu(dd, ctx->total, final);
ctx              1013 drivers/crypto/omap-sham.c 		err = omap_sham_xmit_dma(dd, ctx->total, final);
ctx              1016 drivers/crypto/omap-sham.c 	dev_dbg(dd->dev, "update: err: %d, digcnt: %d\n", err, ctx->digcnt);
ctx              1024 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(req);
ctx              1027 drivers/crypto/omap-sham.c 	if ((ctx->total <= get_block_size(ctx)) || dd->polling_mode)
ctx              1035 drivers/crypto/omap-sham.c 		err = omap_sham_xmit_dma(dd, ctx->total, 1);
ctx              1037 drivers/crypto/omap-sham.c 		err = omap_sham_xmit_cpu(dd, ctx->total, 1);
ctx              1039 drivers/crypto/omap-sham.c 	ctx->bufcnt = 0;
ctx              1063 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(req);
ctx              1064 drivers/crypto/omap-sham.c 	struct omap_sham_dev *dd = ctx->dd;
ctx              1067 drivers/crypto/omap-sham.c 	if (ctx->digcnt) {
ctx              1069 drivers/crypto/omap-sham.c 		if ((ctx->flags & BIT(FLAGS_HMAC)) &&
ctx              1074 drivers/crypto/omap-sham.c 	dev_dbg(dd->dev, "digcnt: %d, bufcnt: %d\n", ctx->digcnt, ctx->bufcnt);
ctx              1081 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(req);
ctx              1082 drivers/crypto/omap-sham.c 	struct omap_sham_dev *dd = ctx->dd;
ctx              1085 drivers/crypto/omap-sham.c 		free_pages((unsigned long)sg_virt(ctx->sg),
ctx              1086 drivers/crypto/omap-sham.c 			   get_order(ctx->sg->length + ctx->bufcnt));
ctx              1089 drivers/crypto/omap-sham.c 		kfree(ctx->sg);
ctx              1091 drivers/crypto/omap-sham.c 	ctx->sg = NULL;
ctx              1100 drivers/crypto/omap-sham.c 		ctx->flags |= BIT(FLAGS_ERROR);
ctx              1118 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx;
ctx              1144 drivers/crypto/omap-sham.c 	ctx = ahash_request_ctx(req);
ctx              1146 drivers/crypto/omap-sham.c 	err = omap_sham_prepare_request(req, ctx->op == OP_UPDATE);
ctx              1147 drivers/crypto/omap-sham.c 	if (err || !ctx->total)
ctx              1151 drivers/crypto/omap-sham.c 						ctx->op, req->nbytes);
ctx              1157 drivers/crypto/omap-sham.c 	if (ctx->digcnt)
ctx              1161 drivers/crypto/omap-sham.c 	if (ctx->op == OP_UPDATE) {
ctx              1163 drivers/crypto/omap-sham.c 		if (err != -EINPROGRESS && (ctx->flags & BIT(FLAGS_FINUP)))
ctx              1166 drivers/crypto/omap-sham.c 	} else if (ctx->op == OP_FINAL) {
ctx              1189 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(req);
ctx              1193 drivers/crypto/omap-sham.c 	ctx->op = op;
ctx              1200 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(req);
ctx              1201 drivers/crypto/omap-sham.c 	struct omap_sham_dev *dd = ctx->dd;
ctx              1206 drivers/crypto/omap-sham.c 	if (ctx->bufcnt + req->nbytes <= ctx->buflen) {
ctx              1207 drivers/crypto/omap-sham.c 		scatterwalk_map_and_copy(ctx->buffer + ctx->bufcnt, req->src,
ctx              1209 drivers/crypto/omap-sham.c 		ctx->bufcnt += req->nbytes;
ctx              1214 drivers/crypto/omap-sham.c 		ctx->flags |= BIT(FLAGS_CPU);
ctx              1232 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(req);
ctx              1240 drivers/crypto/omap-sham.c 	if (test_bit(FLAGS_HMAC, &ctx->flags) &&
ctx              1241 drivers/crypto/omap-sham.c 	    !test_bit(FLAGS_AUTO_XOR, &ctx->dd->flags))
ctx              1242 drivers/crypto/omap-sham.c 		offset = get_block_size(ctx);
ctx              1245 drivers/crypto/omap-sham.c 				      ctx->buffer + offset,
ctx              1246 drivers/crypto/omap-sham.c 				      ctx->bufcnt - offset, req->result);
ctx              1251 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(req);
ctx              1253 drivers/crypto/omap-sham.c 	ctx->flags |= BIT(FLAGS_FINUP);
ctx              1255 drivers/crypto/omap-sham.c 	if (ctx->flags & BIT(FLAGS_ERROR))
ctx              1265 drivers/crypto/omap-sham.c 	if (!ctx->digcnt && ctx->bufcnt < ctx->dd->fallback_sz)
ctx              1267 drivers/crypto/omap-sham.c 	else if (ctx->bufcnt)
ctx              1276 drivers/crypto/omap-sham.c 	struct omap_sham_reqctx *ctx = ahash_request_ctx(req);
ctx              1279 drivers/crypto/omap-sham.c 	ctx->flags |= BIT(FLAGS_FINUP);
ctx                85 drivers/crypto/padlock-aes.c static inline struct aes_ctx *aes_ctx_common(void *ctx)
ctx                87 drivers/crypto/padlock-aes.c 	unsigned long addr = (unsigned long)ctx;
ctx               108 drivers/crypto/padlock-aes.c 	struct aes_ctx *ctx = aes_ctx(tfm);
ctx               124 drivers/crypto/padlock-aes.c 	ctx->D = ctx->E;
ctx               126 drivers/crypto/padlock-aes.c 	ctx->E[0] = le32_to_cpu(key[0]);
ctx               127 drivers/crypto/padlock-aes.c 	ctx->E[1] = le32_to_cpu(key[1]);
ctx               128 drivers/crypto/padlock-aes.c 	ctx->E[2] = le32_to_cpu(key[2]);
ctx               129 drivers/crypto/padlock-aes.c 	ctx->E[3] = le32_to_cpu(key[3]);
ctx               132 drivers/crypto/padlock-aes.c 	memset(&ctx->cword, 0, sizeof(ctx->cword));
ctx               134 drivers/crypto/padlock-aes.c 	ctx->cword.decrypt.encdec = 1;
ctx               135 drivers/crypto/padlock-aes.c 	ctx->cword.encrypt.rounds = 10 + (key_len - 16) / 4;
ctx               136 drivers/crypto/padlock-aes.c 	ctx->cword.decrypt.rounds = ctx->cword.encrypt.rounds;
ctx               137 drivers/crypto/padlock-aes.c 	ctx->cword.encrypt.ksize = (key_len - 16) / 8;
ctx               138 drivers/crypto/padlock-aes.c 	ctx->cword.decrypt.ksize = ctx->cword.encrypt.ksize;
ctx               144 drivers/crypto/padlock-aes.c 	ctx->D = ctx->d_data;
ctx               145 drivers/crypto/padlock-aes.c 	ctx->cword.encrypt.keygen = 1;
ctx               146 drivers/crypto/padlock-aes.c 	ctx->cword.decrypt.keygen = 1;
ctx               153 drivers/crypto/padlock-aes.c 	memcpy(ctx->E, gen_aes.key_enc, AES_MAX_KEYLENGTH);
ctx               154 drivers/crypto/padlock-aes.c 	memcpy(ctx->D, gen_aes.key_dec, AES_MAX_KEYLENGTH);
ctx               158 drivers/crypto/padlock-aes.c 		if (&ctx->cword.encrypt == per_cpu(paes_last_cword, cpu) ||
ctx               159 drivers/crypto/padlock-aes.c 		    &ctx->cword.decrypt == per_cpu(paes_last_cword, cpu))
ctx               305 drivers/crypto/padlock-aes.c 	struct aes_ctx *ctx = aes_ctx(tfm);
ctx               307 drivers/crypto/padlock-aes.c 	padlock_reset_key(&ctx->cword.encrypt);
ctx               308 drivers/crypto/padlock-aes.c 	ecb_crypt(in, out, ctx->E, &ctx->cword.encrypt, 1);
ctx               309 drivers/crypto/padlock-aes.c 	padlock_store_cword(&ctx->cword.encrypt);
ctx               314 drivers/crypto/padlock-aes.c 	struct aes_ctx *ctx = aes_ctx(tfm);
ctx               316 drivers/crypto/padlock-aes.c 	padlock_reset_key(&ctx->cword.encrypt);
ctx               317 drivers/crypto/padlock-aes.c 	ecb_crypt(in, out, ctx->D, &ctx->cword.decrypt, 1);
ctx               318 drivers/crypto/padlock-aes.c 	padlock_store_cword(&ctx->cword.encrypt);
ctx               345 drivers/crypto/padlock-aes.c 	struct aes_ctx *ctx = blk_aes_ctx(desc->tfm);
ctx               349 drivers/crypto/padlock-aes.c 	padlock_reset_key(&ctx->cword.encrypt);
ctx               356 drivers/crypto/padlock-aes.c 				   ctx->E, &ctx->cword.encrypt,
ctx               362 drivers/crypto/padlock-aes.c 	padlock_store_cword(&ctx->cword.encrypt);
ctx               371 drivers/crypto/padlock-aes.c 	struct aes_ctx *ctx = blk_aes_ctx(desc->tfm);
ctx               375 drivers/crypto/padlock-aes.c 	padlock_reset_key(&ctx->cword.decrypt);
ctx               382 drivers/crypto/padlock-aes.c 				   ctx->D, &ctx->cword.decrypt,
ctx               388 drivers/crypto/padlock-aes.c 	padlock_store_cword(&ctx->cword.encrypt);
ctx               418 drivers/crypto/padlock-aes.c 	struct aes_ctx *ctx = blk_aes_ctx(desc->tfm);
ctx               422 drivers/crypto/padlock-aes.c 	padlock_reset_key(&ctx->cword.encrypt);
ctx               429 drivers/crypto/padlock-aes.c 					    walk.dst.virt.addr, ctx->E,
ctx               430 drivers/crypto/padlock-aes.c 					    walk.iv, &ctx->cword.encrypt,
ctx               437 drivers/crypto/padlock-aes.c 	padlock_store_cword(&ctx->cword.decrypt);
ctx               446 drivers/crypto/padlock-aes.c 	struct aes_ctx *ctx = blk_aes_ctx(desc->tfm);
ctx               450 drivers/crypto/padlock-aes.c 	padlock_reset_key(&ctx->cword.encrypt);
ctx               457 drivers/crypto/padlock-aes.c 				   ctx->D, walk.iv, &ctx->cword.decrypt,
ctx               463 drivers/crypto/padlock-aes.c 	padlock_store_cword(&ctx->cword.encrypt);
ctx                34 drivers/crypto/padlock-sha.c 	struct padlock_sha_ctx *ctx = crypto_shash_ctx(desc->tfm);
ctx                36 drivers/crypto/padlock-sha.c 	dctx->fallback.tfm = ctx->fallback;
ctx                58 drivers/crypto/padlock-sha.c 	struct padlock_sha_ctx *ctx = crypto_shash_ctx(desc->tfm);
ctx                60 drivers/crypto/padlock-sha.c 	dctx->fallback.tfm = ctx->fallback;
ctx               197 drivers/crypto/padlock-sha.c 	struct padlock_sha_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               211 drivers/crypto/padlock-sha.c 	ctx->fallback = fallback_tfm;
ctx               221 drivers/crypto/padlock-sha.c 	struct padlock_sha_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               223 drivers/crypto/padlock-sha.c 	crypto_free_shash(ctx->fallback);
ctx               200 drivers/crypto/picoxcell_crypto.c static inline void __iomem *spacc_ctx_page_addr(struct spacc_generic_ctx *ctx,
ctx               204 drivers/crypto/picoxcell_crypto.c 	return is_cipher_ctx ? ctx->engine->cipher_ctx_base +
ctx               205 drivers/crypto/picoxcell_crypto.c 			(indx * ctx->engine->cipher_pg_sz) :
ctx               206 drivers/crypto/picoxcell_crypto.c 		ctx->engine->hash_key_base + (indx * ctx->engine->hash_pg_sz);
ctx               219 drivers/crypto/picoxcell_crypto.c static void spacc_cipher_write_ctx(struct spacc_generic_ctx *ctx,
ctx               223 drivers/crypto/picoxcell_crypto.c 	void __iomem *key_ptr = page_addr + ctx->key_offs;
ctx               224 drivers/crypto/picoxcell_crypto.c 	void __iomem *iv_ptr = page_addr + ctx->iv_offs;
ctx               235 drivers/crypto/picoxcell_crypto.c static unsigned spacc_load_ctx(struct spacc_generic_ctx *ctx,
ctx               240 drivers/crypto/picoxcell_crypto.c 	unsigned indx = ctx->engine->next_ctx++;
ctx               243 drivers/crypto/picoxcell_crypto.c 	ciph_page_addr = spacc_ctx_page_addr(ctx, indx, 1);
ctx               244 drivers/crypto/picoxcell_crypto.c 	hash_page_addr = spacc_ctx_page_addr(ctx, indx, 0);
ctx               246 drivers/crypto/picoxcell_crypto.c 	ctx->engine->next_ctx &= ctx->engine->fifo_sz - 1;
ctx               247 drivers/crypto/picoxcell_crypto.c 	spacc_cipher_write_ctx(ctx, ciph_page_addr, ciph_key, ciph_len, iv,
ctx               251 drivers/crypto/picoxcell_crypto.c 	       ctx->engine->regs + SPA_KEY_SZ_REG_OFFSET);
ctx               256 drivers/crypto/picoxcell_crypto.c 		       ctx->engine->regs + SPA_KEY_SZ_REG_OFFSET);
ctx               460 drivers/crypto/picoxcell_crypto.c 	struct spacc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               464 drivers/crypto/picoxcell_crypto.c 	crypto_aead_clear_flags(ctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
ctx               465 drivers/crypto/picoxcell_crypto.c 	crypto_aead_set_flags(ctx->sw_cipher, crypto_aead_get_flags(tfm) &
ctx               467 drivers/crypto/picoxcell_crypto.c 	err = crypto_aead_setkey(ctx->sw_cipher, key, keylen);
ctx               469 drivers/crypto/picoxcell_crypto.c 	crypto_aead_set_flags(tfm, crypto_aead_get_flags(ctx->sw_cipher) &
ctx               480 drivers/crypto/picoxcell_crypto.c 	if (keys.authkeylen > sizeof(ctx->hash_ctx))
ctx               483 drivers/crypto/picoxcell_crypto.c 	memcpy(ctx->cipher_key, keys.enckey, keys.enckeylen);
ctx               484 drivers/crypto/picoxcell_crypto.c 	ctx->cipher_key_len = keys.enckeylen;
ctx               486 drivers/crypto/picoxcell_crypto.c 	memcpy(ctx->hash_ctx, keys.authkey, keys.authkeylen);
ctx               487 drivers/crypto/picoxcell_crypto.c 	ctx->hash_key_len = keys.authkeylen;
ctx               501 drivers/crypto/picoxcell_crypto.c 	struct spacc_aead_ctx *ctx = crypto_tfm_ctx(crypto_aead_tfm(tfm));
ctx               503 drivers/crypto/picoxcell_crypto.c 	return crypto_aead_setauthsize(ctx->sw_cipher, authsize);
ctx               516 drivers/crypto/picoxcell_crypto.c 	struct spacc_aead_ctx *ctx = crypto_aead_ctx(aead);
ctx               524 drivers/crypto/picoxcell_crypto.c 	    ctx->cipher_key_len != AES_KEYSIZE_128 &&
ctx               525 drivers/crypto/picoxcell_crypto.c 	    ctx->cipher_key_len != AES_KEYSIZE_256)
ctx               535 drivers/crypto/picoxcell_crypto.c 	struct spacc_aead_ctx *ctx = crypto_tfm_ctx(old_tfm);
ctx               538 drivers/crypto/picoxcell_crypto.c 	aead_request_set_tfm(subreq, ctx->sw_cipher);
ctx               561 drivers/crypto/picoxcell_crypto.c 	struct spacc_aead_ctx *ctx = crypto_aead_ctx(aead);
ctx               564 drivers/crypto/picoxcell_crypto.c 	struct spacc_engine *engine = ctx->generic.engine;
ctx               568 drivers/crypto/picoxcell_crypto.c 	req->ctx_id = spacc_load_ctx(&ctx->generic, ctx->cipher_key,
ctx               569 drivers/crypto/picoxcell_crypto.c 		ctx->cipher_key_len, aead_req->iv, crypto_aead_ivsize(aead),
ctx               570 drivers/crypto/picoxcell_crypto.c 		ctx->hash_ctx, ctx->hash_key_len);
ctx               698 drivers/crypto/picoxcell_crypto.c 	struct spacc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               703 drivers/crypto/picoxcell_crypto.c 	ctx->generic.flags = spacc_alg->type;
ctx               704 drivers/crypto/picoxcell_crypto.c 	ctx->generic.engine = engine;
ctx               705 drivers/crypto/picoxcell_crypto.c 	ctx->sw_cipher = crypto_alloc_aead(alg->base.cra_name, 0,
ctx               707 drivers/crypto/picoxcell_crypto.c 	if (IS_ERR(ctx->sw_cipher))
ctx               708 drivers/crypto/picoxcell_crypto.c 		return PTR_ERR(ctx->sw_cipher);
ctx               709 drivers/crypto/picoxcell_crypto.c 	ctx->generic.key_offs = spacc_alg->key_offs;
ctx               710 drivers/crypto/picoxcell_crypto.c 	ctx->generic.iv_offs = spacc_alg->iv_offs;
ctx               716 drivers/crypto/picoxcell_crypto.c 		    crypto_aead_reqsize(ctx->sw_cipher)));
ctx               727 drivers/crypto/picoxcell_crypto.c 	struct spacc_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               729 drivers/crypto/picoxcell_crypto.c 	crypto_free_aead(ctx->sw_cipher);
ctx               739 drivers/crypto/picoxcell_crypto.c 	struct spacc_ablk_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx               746 drivers/crypto/picoxcell_crypto.c 	memcpy(ctx->key, key, len);
ctx               747 drivers/crypto/picoxcell_crypto.c 	ctx->key_len = len;
ctx               759 drivers/crypto/picoxcell_crypto.c 	struct spacc_ablk_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx               766 drivers/crypto/picoxcell_crypto.c 	memcpy(ctx->key, key, len);
ctx               767 drivers/crypto/picoxcell_crypto.c 	ctx->key_len = len;
ctx               780 drivers/crypto/picoxcell_crypto.c 	struct spacc_ablk_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               794 drivers/crypto/picoxcell_crypto.c 		if (!ctx->sw_cipher)
ctx               801 drivers/crypto/picoxcell_crypto.c 		crypto_sync_skcipher_clear_flags(ctx->sw_cipher,
ctx               803 drivers/crypto/picoxcell_crypto.c 		crypto_sync_skcipher_set_flags(ctx->sw_cipher,
ctx               807 drivers/crypto/picoxcell_crypto.c 		err = crypto_sync_skcipher_setkey(ctx->sw_cipher, key, len);
ctx               811 drivers/crypto/picoxcell_crypto.c 			crypto_sync_skcipher_get_flags(ctx->sw_cipher) &
ctx               818 drivers/crypto/picoxcell_crypto.c 	memcpy(ctx->key, key, len);
ctx               819 drivers/crypto/picoxcell_crypto.c 	ctx->key_len = len;
ctx               829 drivers/crypto/picoxcell_crypto.c 	struct spacc_ablk_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               838 drivers/crypto/picoxcell_crypto.c 	memcpy(ctx->key, key, len);
ctx               839 drivers/crypto/picoxcell_crypto.c 	ctx->key_len = len;
ctx               847 drivers/crypto/picoxcell_crypto.c 	struct spacc_ablk_ctx *ctx;
ctx               852 drivers/crypto/picoxcell_crypto.c 	ctx = crypto_tfm_ctx(tfm);
ctx               856 drivers/crypto/picoxcell_crypto.c 			ctx->key_len != AES_KEYSIZE_128 &&
ctx               857 drivers/crypto/picoxcell_crypto.c 			ctx->key_len != AES_KEYSIZE_256;
ctx               879 drivers/crypto/picoxcell_crypto.c 	struct spacc_ablk_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               883 drivers/crypto/picoxcell_crypto.c 	struct spacc_engine *engine = ctx->generic.engine;
ctx               886 drivers/crypto/picoxcell_crypto.c 	req->ctx_id = spacc_load_ctx(&ctx->generic, ctx->key,
ctx               887 drivers/crypto/picoxcell_crypto.c 		ctx->key_len, ablk_req->info, alg->cra_ablkcipher.ivsize,
ctx               915 drivers/crypto/picoxcell_crypto.c 	struct spacc_ablk_ctx *ctx = crypto_tfm_ctx(old_tfm);
ctx               916 drivers/crypto/picoxcell_crypto.c 	SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher);
ctx               924 drivers/crypto/picoxcell_crypto.c 	skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher);
ctx              1014 drivers/crypto/picoxcell_crypto.c 	struct spacc_ablk_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1019 drivers/crypto/picoxcell_crypto.c 	ctx->generic.flags = spacc_alg->type;
ctx              1020 drivers/crypto/picoxcell_crypto.c 	ctx->generic.engine = engine;
ctx              1022 drivers/crypto/picoxcell_crypto.c 		ctx->sw_cipher = crypto_alloc_sync_skcipher(
ctx              1024 drivers/crypto/picoxcell_crypto.c 		if (IS_ERR(ctx->sw_cipher)) {
ctx              1027 drivers/crypto/picoxcell_crypto.c 			return PTR_ERR(ctx->sw_cipher);
ctx              1030 drivers/crypto/picoxcell_crypto.c 	ctx->generic.key_offs = spacc_alg->key_offs;
ctx              1031 drivers/crypto/picoxcell_crypto.c 	ctx->generic.iv_offs = spacc_alg->iv_offs;
ctx              1040 drivers/crypto/picoxcell_crypto.c 	struct spacc_ablk_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1042 drivers/crypto/picoxcell_crypto.c 	crypto_free_sync_skcipher(ctx->sw_cipher);
ctx               301 drivers/crypto/qat/qat_common/icp_qat_uclo.h 	unsigned char ctx;
ctx               152 drivers/crypto/qat/qat_common/qat_algs.c 				  struct qat_alg_aead_ctx *ctx,
ctx               156 drivers/crypto/qat/qat_common/qat_algs.c 	SHASH_DESC_ON_STACK(shash, ctx->hash_tfm);
ctx               157 drivers/crypto/qat/qat_common/qat_algs.c 	int block_size = crypto_shash_blocksize(ctx->hash_tfm);
ctx               158 drivers/crypto/qat/qat_common/qat_algs.c 	int digest_size = crypto_shash_digestsize(ctx->hash_tfm);
ctx               163 drivers/crypto/qat/qat_common/qat_algs.c 	memset(ctx->ipad, 0, block_size);
ctx               164 drivers/crypto/qat/qat_common/qat_algs.c 	memset(ctx->opad, 0, block_size);
ctx               165 drivers/crypto/qat/qat_common/qat_algs.c 	shash->tfm = ctx->hash_tfm;
ctx               169 drivers/crypto/qat/qat_common/qat_algs.c 					      auth_keylen, ctx->ipad);
ctx               173 drivers/crypto/qat/qat_common/qat_algs.c 		memcpy(ctx->opad, ctx->ipad, digest_size);
ctx               175 drivers/crypto/qat/qat_common/qat_algs.c 		memcpy(ctx->ipad, auth_key, auth_keylen);
ctx               176 drivers/crypto/qat/qat_common/qat_algs.c 		memcpy(ctx->opad, auth_key, auth_keylen);
ctx               180 drivers/crypto/qat/qat_common/qat_algs.c 		char *ipad_ptr = ctx->ipad + i;
ctx               181 drivers/crypto/qat/qat_common/qat_algs.c 		char *opad_ptr = ctx->opad + i;
ctx               189 drivers/crypto/qat/qat_common/qat_algs.c 	if (crypto_shash_update(shash, ctx->ipad, block_size))
ctx               195 drivers/crypto/qat/qat_common/qat_algs.c 	switch (ctx->qat_hash_alg) {
ctx               197 drivers/crypto/qat/qat_common/qat_algs.c 		if (crypto_shash_export(shash, &ctx->sha1))
ctx               200 drivers/crypto/qat/qat_common/qat_algs.c 			*hash_state_out = cpu_to_be32(ctx->sha1.state[i]);
ctx               203 drivers/crypto/qat/qat_common/qat_algs.c 		if (crypto_shash_export(shash, &ctx->sha256))
ctx               206 drivers/crypto/qat/qat_common/qat_algs.c 			*hash_state_out = cpu_to_be32(ctx->sha256.state[i]);
ctx               209 drivers/crypto/qat/qat_common/qat_algs.c 		if (crypto_shash_export(shash, &ctx->sha512))
ctx               212 drivers/crypto/qat/qat_common/qat_algs.c 			*hash512_state_out = cpu_to_be64(ctx->sha512.state[i]);
ctx               221 drivers/crypto/qat/qat_common/qat_algs.c 	if (crypto_shash_update(shash, ctx->opad, block_size))
ctx               224 drivers/crypto/qat/qat_common/qat_algs.c 	offset = round_up(qat_get_inter_state_size(ctx->qat_hash_alg), 8);
ctx               231 drivers/crypto/qat/qat_common/qat_algs.c 	switch (ctx->qat_hash_alg) {
ctx               233 drivers/crypto/qat/qat_common/qat_algs.c 		if (crypto_shash_export(shash, &ctx->sha1))
ctx               236 drivers/crypto/qat/qat_common/qat_algs.c 			*hash_state_out = cpu_to_be32(ctx->sha1.state[i]);
ctx               239 drivers/crypto/qat/qat_common/qat_algs.c 		if (crypto_shash_export(shash, &ctx->sha256))
ctx               242 drivers/crypto/qat/qat_common/qat_algs.c 			*hash_state_out = cpu_to_be32(ctx->sha256.state[i]);
ctx               245 drivers/crypto/qat/qat_common/qat_algs.c 		if (crypto_shash_export(shash, &ctx->sha512))
ctx               248 drivers/crypto/qat/qat_common/qat_algs.c 			*hash512_state_out = cpu_to_be64(ctx->sha512.state[i]);
ctx               253 drivers/crypto/qat/qat_common/qat_algs.c 	memzero_explicit(ctx->ipad, block_size);
ctx               254 drivers/crypto/qat/qat_common/qat_algs.c 	memzero_explicit(ctx->opad, block_size);
ctx               298 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(aead_tfm);
ctx               300 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_enc *enc_ctx = &ctx->enc_cd->qat_enc_cd;
ctx               305 drivers/crypto/qat/qat_common/qat_algs.c 	struct icp_qat_fw_la_bulk_req *req_tmpl = &ctx->enc_fw_req;
ctx               317 drivers/crypto/qat/qat_common/qat_algs.c 					     ctx->qat_hash_alg, digestsize);
ctx               319 drivers/crypto/qat/qat_common/qat_algs.c 		cpu_to_be32(crypto_shash_blocksize(ctx->hash_tfm));
ctx               321 drivers/crypto/qat/qat_common/qat_algs.c 	if (qat_alg_do_precomputes(hash, ctx, keys->authkey, keys->authkeylen))
ctx               333 drivers/crypto/qat/qat_common/qat_algs.c 	cd_pars->u.s.content_desc_addr = ctx->enc_cd_paddr;
ctx               348 drivers/crypto/qat/qat_common/qat_algs.c 	switch (ctx->qat_hash_alg) {
ctx               379 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(aead_tfm);
ctx               381 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_dec *dec_ctx = &ctx->dec_cd->qat_dec_cd;
ctx               386 drivers/crypto/qat/qat_common/qat_algs.c 		roundup(crypto_shash_digestsize(ctx->hash_tfm), 8) * 2);
ctx               387 drivers/crypto/qat/qat_common/qat_algs.c 	struct icp_qat_fw_la_bulk_req *req_tmpl = &ctx->dec_fw_req;
ctx               403 drivers/crypto/qat/qat_common/qat_algs.c 					     ctx->qat_hash_alg,
ctx               406 drivers/crypto/qat/qat_common/qat_algs.c 		cpu_to_be32(crypto_shash_blocksize(ctx->hash_tfm));
ctx               408 drivers/crypto/qat/qat_common/qat_algs.c 	if (qat_alg_do_precomputes(hash, ctx, keys->authkey, keys->authkeylen))
ctx               420 drivers/crypto/qat/qat_common/qat_algs.c 	cd_pars->u.s.content_desc_addr = ctx->dec_cd_paddr;
ctx               428 drivers/crypto/qat/qat_common/qat_algs.c 		 roundup(crypto_shash_digestsize(ctx->hash_tfm), 8) * 2) >> 3;
ctx               438 drivers/crypto/qat/qat_common/qat_algs.c 	switch (ctx->qat_hash_alg) {
ctx               466 drivers/crypto/qat/qat_common/qat_algs.c static void qat_alg_ablkcipher_init_com(struct qat_alg_ablkcipher_ctx *ctx,
ctx               488 drivers/crypto/qat/qat_common/qat_algs.c static void qat_alg_ablkcipher_init_enc(struct qat_alg_ablkcipher_ctx *ctx,
ctx               492 drivers/crypto/qat/qat_common/qat_algs.c 	struct icp_qat_hw_cipher_algo_blk *enc_cd = ctx->enc_cd;
ctx               493 drivers/crypto/qat/qat_common/qat_algs.c 	struct icp_qat_fw_la_bulk_req *req = &ctx->enc_fw_req;
ctx               496 drivers/crypto/qat/qat_common/qat_algs.c 	qat_alg_ablkcipher_init_com(ctx, req, enc_cd, key, keylen);
ctx               497 drivers/crypto/qat/qat_common/qat_algs.c 	cd_pars->u.s.content_desc_addr = ctx->enc_cd_paddr;
ctx               501 drivers/crypto/qat/qat_common/qat_algs.c static void qat_alg_ablkcipher_init_dec(struct qat_alg_ablkcipher_ctx *ctx,
ctx               505 drivers/crypto/qat/qat_common/qat_algs.c 	struct icp_qat_hw_cipher_algo_blk *dec_cd = ctx->dec_cd;
ctx               506 drivers/crypto/qat/qat_common/qat_algs.c 	struct icp_qat_fw_la_bulk_req *req = &ctx->dec_fw_req;
ctx               509 drivers/crypto/qat/qat_common/qat_algs.c 	qat_alg_ablkcipher_init_com(ctx, req, dec_cd, key, keylen);
ctx               510 drivers/crypto/qat/qat_common/qat_algs.c 	cd_pars->u.s.content_desc_addr = ctx->dec_cd_paddr;
ctx               580 drivers/crypto/qat/qat_common/qat_algs.c static int qat_alg_ablkcipher_init_sessions(struct qat_alg_ablkcipher_ctx *ctx,
ctx               590 drivers/crypto/qat/qat_common/qat_algs.c 	qat_alg_ablkcipher_init_enc(ctx, alg, key, keylen, mode);
ctx               591 drivers/crypto/qat/qat_common/qat_algs.c 	qat_alg_ablkcipher_init_dec(ctx, alg, key, keylen, mode);
ctx               594 drivers/crypto/qat/qat_common/qat_algs.c 	crypto_tfm_set_flags(ctx->tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
ctx               601 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               603 drivers/crypto/qat/qat_common/qat_algs.c 	memset(ctx->enc_cd, 0, sizeof(*ctx->enc_cd));
ctx               604 drivers/crypto/qat/qat_common/qat_algs.c 	memset(ctx->dec_cd, 0, sizeof(*ctx->dec_cd));
ctx               605 drivers/crypto/qat/qat_common/qat_algs.c 	memset(&ctx->enc_fw_req, 0, sizeof(ctx->enc_fw_req));
ctx               606 drivers/crypto/qat/qat_common/qat_algs.c 	memset(&ctx->dec_fw_req, 0, sizeof(ctx->dec_fw_req));
ctx               615 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               625 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->inst = inst;
ctx               626 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->enc_cd = dma_alloc_coherent(dev, sizeof(*ctx->enc_cd),
ctx               627 drivers/crypto/qat/qat_common/qat_algs.c 					 &ctx->enc_cd_paddr,
ctx               629 drivers/crypto/qat/qat_common/qat_algs.c 	if (!ctx->enc_cd) {
ctx               633 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->dec_cd = dma_alloc_coherent(dev, sizeof(*ctx->dec_cd),
ctx               634 drivers/crypto/qat/qat_common/qat_algs.c 					 &ctx->dec_cd_paddr,
ctx               636 drivers/crypto/qat/qat_common/qat_algs.c 	if (!ctx->dec_cd) {
ctx               649 drivers/crypto/qat/qat_common/qat_algs.c 	memset(ctx->dec_cd, 0, sizeof(struct qat_alg_cd));
ctx               651 drivers/crypto/qat/qat_common/qat_algs.c 			  ctx->dec_cd, ctx->dec_cd_paddr);
ctx               652 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->dec_cd = NULL;
ctx               654 drivers/crypto/qat/qat_common/qat_algs.c 	memset(ctx->enc_cd, 0, sizeof(struct qat_alg_cd));
ctx               656 drivers/crypto/qat/qat_common/qat_algs.c 			  ctx->enc_cd, ctx->enc_cd_paddr);
ctx               657 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->enc_cd = NULL;
ctx               659 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->inst = NULL;
ctx               667 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx               669 drivers/crypto/qat/qat_common/qat_algs.c 	if (ctx->enc_cd)
ctx               823 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_aead_ctx *ctx = qat_req->aead_ctx;
ctx               824 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_crypto_instance *inst = ctx->inst;
ctx               838 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_ablkcipher_ctx *ctx = qat_req->ablkcipher_ctx;
ctx               839 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_crypto_instance *inst = ctx->inst;
ctx               842 drivers/crypto/qat/qat_common/qat_algs.c 	struct device *dev = &GET_DEV(ctx->inst->accel_dev);
ctx               869 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_aead_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               877 drivers/crypto/qat/qat_common/qat_algs.c 	ret = qat_alg_sgl_to_bufl(ctx->inst, areq->src, areq->dst, qat_req);
ctx               882 drivers/crypto/qat/qat_common/qat_algs.c 	*msg = ctx->dec_fw_req;
ctx               883 drivers/crypto/qat/qat_common/qat_algs.c 	qat_req->aead_ctx = ctx;
ctx               897 drivers/crypto/qat/qat_common/qat_algs.c 		ret = adf_send_message(ctx->inst->sym_tx, (uint32_t *)msg);
ctx               901 drivers/crypto/qat/qat_common/qat_algs.c 		qat_alg_free_bufl(ctx->inst, qat_req);
ctx               911 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_aead_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               919 drivers/crypto/qat/qat_common/qat_algs.c 	ret = qat_alg_sgl_to_bufl(ctx->inst, areq->src, areq->dst, qat_req);
ctx               924 drivers/crypto/qat/qat_common/qat_algs.c 	*msg = ctx->enc_fw_req;
ctx               925 drivers/crypto/qat/qat_common/qat_algs.c 	qat_req->aead_ctx = ctx;
ctx               942 drivers/crypto/qat/qat_common/qat_algs.c 		ret = adf_send_message(ctx->inst->sym_tx, (uint32_t *)msg);
ctx               946 drivers/crypto/qat/qat_common/qat_algs.c 		qat_alg_free_bufl(ctx->inst, qat_req);
ctx               952 drivers/crypto/qat/qat_common/qat_algs.c static int qat_alg_ablkcipher_rekey(struct qat_alg_ablkcipher_ctx *ctx,
ctx               956 drivers/crypto/qat/qat_common/qat_algs.c 	memset(ctx->enc_cd, 0, sizeof(*ctx->enc_cd));
ctx               957 drivers/crypto/qat/qat_common/qat_algs.c 	memset(ctx->dec_cd, 0, sizeof(*ctx->dec_cd));
ctx               958 drivers/crypto/qat/qat_common/qat_algs.c 	memset(&ctx->enc_fw_req, 0, sizeof(ctx->enc_fw_req));
ctx               959 drivers/crypto/qat/qat_common/qat_algs.c 	memset(&ctx->dec_fw_req, 0, sizeof(ctx->dec_fw_req));
ctx               961 drivers/crypto/qat/qat_common/qat_algs.c 	return qat_alg_ablkcipher_init_sessions(ctx, key, keylen, mode);
ctx               964 drivers/crypto/qat/qat_common/qat_algs.c static int qat_alg_ablkcipher_newkey(struct qat_alg_ablkcipher_ctx *ctx,
ctx               977 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->inst = inst;
ctx               978 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->enc_cd = dma_alloc_coherent(dev, sizeof(*ctx->enc_cd),
ctx               979 drivers/crypto/qat/qat_common/qat_algs.c 					 &ctx->enc_cd_paddr,
ctx               981 drivers/crypto/qat/qat_common/qat_algs.c 	if (!ctx->enc_cd) {
ctx               985 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->dec_cd = dma_alloc_coherent(dev, sizeof(*ctx->dec_cd),
ctx               986 drivers/crypto/qat/qat_common/qat_algs.c 					 &ctx->dec_cd_paddr,
ctx               988 drivers/crypto/qat/qat_common/qat_algs.c 	if (!ctx->dec_cd) {
ctx               993 drivers/crypto/qat/qat_common/qat_algs.c 	ret = qat_alg_ablkcipher_init_sessions(ctx, key, keylen, mode);
ctx              1000 drivers/crypto/qat/qat_common/qat_algs.c 	memset(ctx->dec_cd, 0, sizeof(*ctx->dec_cd));
ctx              1001 drivers/crypto/qat/qat_common/qat_algs.c 	dma_free_coherent(dev, sizeof(*ctx->dec_cd),
ctx              1002 drivers/crypto/qat/qat_common/qat_algs.c 			  ctx->dec_cd, ctx->dec_cd_paddr);
ctx              1003 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->dec_cd = NULL;
ctx              1005 drivers/crypto/qat/qat_common/qat_algs.c 	memset(ctx->enc_cd, 0, sizeof(*ctx->enc_cd));
ctx              1006 drivers/crypto/qat/qat_common/qat_algs.c 	dma_free_coherent(dev, sizeof(*ctx->enc_cd),
ctx              1007 drivers/crypto/qat/qat_common/qat_algs.c 			  ctx->enc_cd, ctx->enc_cd_paddr);
ctx              1008 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->enc_cd = NULL;
ctx              1010 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->inst = NULL;
ctx              1019 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_ablkcipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx              1021 drivers/crypto/qat/qat_common/qat_algs.c 	if (ctx->enc_cd)
ctx              1022 drivers/crypto/qat/qat_common/qat_algs.c 		return qat_alg_ablkcipher_rekey(ctx, key, keylen, mode);
ctx              1024 drivers/crypto/qat/qat_common/qat_algs.c 		return qat_alg_ablkcipher_newkey(ctx, key, keylen, mode);
ctx              1052 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_ablkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1056 drivers/crypto/qat/qat_common/qat_algs.c 	struct device *dev = &GET_DEV(ctx->inst->accel_dev);
ctx              1067 drivers/crypto/qat/qat_common/qat_algs.c 	ret = qat_alg_sgl_to_bufl(ctx->inst, req->src, req->dst, qat_req);
ctx              1075 drivers/crypto/qat/qat_common/qat_algs.c 	*msg = ctx->enc_fw_req;
ctx              1076 drivers/crypto/qat/qat_common/qat_algs.c 	qat_req->ablkcipher_ctx = ctx;
ctx              1088 drivers/crypto/qat/qat_common/qat_algs.c 		ret = adf_send_message(ctx->inst->sym_tx, (uint32_t *)msg);
ctx              1092 drivers/crypto/qat/qat_common/qat_algs.c 		qat_alg_free_bufl(ctx->inst, qat_req);
ctx              1112 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_ablkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1116 drivers/crypto/qat/qat_common/qat_algs.c 	struct device *dev = &GET_DEV(ctx->inst->accel_dev);
ctx              1127 drivers/crypto/qat/qat_common/qat_algs.c 	ret = qat_alg_sgl_to_bufl(ctx->inst, req->src, req->dst, qat_req);
ctx              1135 drivers/crypto/qat/qat_common/qat_algs.c 	*msg = ctx->dec_fw_req;
ctx              1136 drivers/crypto/qat/qat_common/qat_algs.c 	qat_req->ablkcipher_ctx = ctx;
ctx              1148 drivers/crypto/qat/qat_common/qat_algs.c 		ret = adf_send_message(ctx->inst->sym_tx, (uint32_t *)msg);
ctx              1152 drivers/crypto/qat/qat_common/qat_algs.c 		qat_alg_free_bufl(ctx->inst, qat_req);
ctx              1171 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1173 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->hash_tfm = crypto_alloc_shash(hash_name, 0, 0);
ctx              1174 drivers/crypto/qat/qat_common/qat_algs.c 	if (IS_ERR(ctx->hash_tfm))
ctx              1175 drivers/crypto/qat/qat_common/qat_algs.c 		return PTR_ERR(ctx->hash_tfm);
ctx              1176 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->qat_hash_alg = hash;
ctx              1198 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_aead_ctx *ctx = crypto_aead_ctx(tfm);
ctx              1199 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_crypto_instance *inst = ctx->inst;
ctx              1202 drivers/crypto/qat/qat_common/qat_algs.c 	crypto_free_shash(ctx->hash_tfm);
ctx              1208 drivers/crypto/qat/qat_common/qat_algs.c 	if (ctx->enc_cd) {
ctx              1209 drivers/crypto/qat/qat_common/qat_algs.c 		memset(ctx->enc_cd, 0, sizeof(struct qat_alg_cd));
ctx              1211 drivers/crypto/qat/qat_common/qat_algs.c 				  ctx->enc_cd, ctx->enc_cd_paddr);
ctx              1213 drivers/crypto/qat/qat_common/qat_algs.c 	if (ctx->dec_cd) {
ctx              1214 drivers/crypto/qat/qat_common/qat_algs.c 		memset(ctx->dec_cd, 0, sizeof(struct qat_alg_cd));
ctx              1216 drivers/crypto/qat/qat_common/qat_algs.c 				  ctx->dec_cd, ctx->dec_cd_paddr);
ctx              1223 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_ablkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1226 drivers/crypto/qat/qat_common/qat_algs.c 	ctx->tfm = tfm;
ctx              1232 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_alg_ablkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1233 drivers/crypto/qat/qat_common/qat_algs.c 	struct qat_crypto_instance *inst = ctx->inst;
ctx              1240 drivers/crypto/qat/qat_common/qat_algs.c 	if (ctx->enc_cd) {
ctx              1241 drivers/crypto/qat/qat_common/qat_algs.c 		memset(ctx->enc_cd, 0,
ctx              1245 drivers/crypto/qat/qat_common/qat_algs.c 				  ctx->enc_cd, ctx->enc_cd_paddr);
ctx              1247 drivers/crypto/qat/qat_common/qat_algs.c 	if (ctx->dec_cd) {
ctx              1248 drivers/crypto/qat/qat_common/qat_algs.c 		memset(ctx->dec_cd, 0,
ctx              1252 drivers/crypto/qat/qat_common/qat_algs.c 				  ctx->dec_cd, ctx->dec_cd_paddr);
ctx               176 drivers/crypto/qat/qat_common/qat_asym_algs.c 	} ctx;
ctx               189 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct device *dev = &GET_DEV(req->ctx.dh->inst->accel_dev);
ctx               197 drivers/crypto/qat/qat_common/qat_asym_algs.c 			dma_free_coherent(dev, req->ctx.dh->p_size,
ctx               201 drivers/crypto/qat/qat_common/qat_asym_algs.c 					 req->ctx.dh->p_size, DMA_TO_DEVICE);
ctx               204 drivers/crypto/qat/qat_common/qat_asym_algs.c 	areq->dst_len = req->ctx.dh->p_size;
ctx               209 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, req->ctx.dh->p_size, req->dst_align,
ctx               212 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_unmap_single(dev, req->out.dh.r, req->ctx.dh->p_size,
ctx               260 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_dh_ctx *ctx = kpp_tfm_ctx(tfm);
ctx               261 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_crypto_instance *inst = ctx->inst;
ctx               269 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (unlikely(!ctx->xa))
ctx               272 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (req->dst_len < ctx->p_size) {
ctx               273 drivers/crypto/qat/qat_common/qat_asym_algs.c 		req->dst_len = ctx->p_size;
ctx               280 drivers/crypto/qat/qat_common/qat_asym_algs.c 	msg->pke_hdr.cd_pars.func_id = qat_dh_fn_id(ctx->p_size,
ctx               281 drivers/crypto/qat/qat_common/qat_asym_algs.c 						    !req->src && ctx->g2);
ctx               286 drivers/crypto/qat/qat_common/qat_asym_algs.c 	qat_req->ctx.dh = ctx;
ctx               297 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->in.dh.in.xa = ctx->dma_xa;
ctx               298 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->in.dh.in.p = ctx->dma_p;
ctx               301 drivers/crypto/qat/qat_common/qat_asym_algs.c 		if (ctx->g2) {
ctx               302 drivers/crypto/qat/qat_common/qat_asym_algs.c 			qat_req->in.dh.in_g2.xa = ctx->dma_xa;
ctx               303 drivers/crypto/qat/qat_common/qat_asym_algs.c 			qat_req->in.dh.in_g2.p = ctx->dma_p;
ctx               306 drivers/crypto/qat/qat_common/qat_asym_algs.c 			qat_req->in.dh.in.b = ctx->dma_g;
ctx               307 drivers/crypto/qat/qat_common/qat_asym_algs.c 			qat_req->in.dh.in.xa = ctx->dma_xa;
ctx               308 drivers/crypto/qat/qat_common/qat_asym_algs.c 			qat_req->in.dh.in.p = ctx->dma_p;
ctx               322 drivers/crypto/qat/qat_common/qat_asym_algs.c 		if (sg_is_last(req->src) && req->src_len == ctx->p_size) {
ctx               333 drivers/crypto/qat/qat_common/qat_asym_algs.c 			int shift = ctx->p_size - req->src_len;
ctx               336 drivers/crypto/qat/qat_common/qat_asym_algs.c 								ctx->p_size,
ctx               353 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (sg_is_last(req->dst) && req->dst_len == ctx->p_size) {
ctx               363 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->dst_align = dma_alloc_coherent(dev, ctx->p_size,
ctx               392 drivers/crypto/qat/qat_common/qat_asym_algs.c 		ret = adf_send_message(ctx->inst->pke_tx, (uint32_t *)msg);
ctx               409 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->p_size, qat_req->dst_align,
ctx               413 drivers/crypto/qat/qat_common/qat_asym_algs.c 			dma_unmap_single(dev, qat_req->out.dh.r, ctx->p_size,
ctx               418 drivers/crypto/qat/qat_common/qat_asym_algs.c 			dma_free_coherent(dev, ctx->p_size, qat_req->src_align,
ctx               423 drivers/crypto/qat/qat_common/qat_asym_algs.c 						 ctx->p_size,
ctx               441 drivers/crypto/qat/qat_common/qat_asym_algs.c static int qat_dh_set_params(struct qat_dh_ctx *ctx, struct dh *params)
ctx               443 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_crypto_instance *inst = ctx->inst;
ctx               449 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->p_size = params->p_size;
ctx               450 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->p = dma_alloc_coherent(dev, ctx->p_size, &ctx->dma_p, GFP_KERNEL);
ctx               451 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->p)
ctx               453 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memcpy(ctx->p, params->p, ctx->p_size);
ctx               457 drivers/crypto/qat/qat_common/qat_asym_algs.c 		ctx->g2 = true;
ctx               461 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->g = dma_alloc_coherent(dev, ctx->p_size, &ctx->dma_g, GFP_KERNEL);
ctx               462 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->g)
ctx               464 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memcpy(ctx->g + (ctx->p_size - params->g_size), params->g,
ctx               470 drivers/crypto/qat/qat_common/qat_asym_algs.c static void qat_dh_clear_ctx(struct device *dev, struct qat_dh_ctx *ctx)
ctx               472 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->g) {
ctx               473 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->p_size, ctx->g, ctx->dma_g);
ctx               474 drivers/crypto/qat/qat_common/qat_asym_algs.c 		ctx->g = NULL;
ctx               476 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->xa) {
ctx               477 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->p_size, ctx->xa, ctx->dma_xa);
ctx               478 drivers/crypto/qat/qat_common/qat_asym_algs.c 		ctx->xa = NULL;
ctx               480 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->p) {
ctx               481 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->p_size, ctx->p, ctx->dma_p);
ctx               482 drivers/crypto/qat/qat_common/qat_asym_algs.c 		ctx->p = NULL;
ctx               484 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->p_size = 0;
ctx               485 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->g2 = false;
ctx               491 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_dh_ctx *ctx = kpp_tfm_ctx(tfm);
ctx               492 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct device *dev = &GET_DEV(ctx->inst->accel_dev);
ctx               500 drivers/crypto/qat/qat_common/qat_asym_algs.c 	qat_dh_clear_ctx(dev, ctx);
ctx               502 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ret = qat_dh_set_params(ctx, &params);
ctx               506 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->xa = dma_alloc_coherent(dev, ctx->p_size, &ctx->dma_xa,
ctx               508 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->xa) {
ctx               512 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memcpy(ctx->xa + (ctx->p_size - params.key_size), params.key,
ctx               518 drivers/crypto/qat/qat_common/qat_asym_algs.c 	qat_dh_clear_ctx(dev, ctx);
ctx               524 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_dh_ctx *ctx = kpp_tfm_ctx(tfm);
ctx               526 drivers/crypto/qat/qat_common/qat_asym_algs.c 	return ctx->p_size;
ctx               531 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_dh_ctx *ctx = kpp_tfm_ctx(tfm);
ctx               538 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->p_size = 0;
ctx               539 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->g2 = false;
ctx               540 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->inst = inst;
ctx               546 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_dh_ctx *ctx = kpp_tfm_ctx(tfm);
ctx               547 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct device *dev = &GET_DEV(ctx->inst->accel_dev);
ctx               549 drivers/crypto/qat/qat_common/qat_asym_algs.c 	qat_dh_clear_ctx(dev, ctx);
ctx               550 drivers/crypto/qat/qat_common/qat_asym_algs.c 	qat_crypto_put_instance(ctx->inst);
ctx               557 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct device *dev = &GET_DEV(req->ctx.rsa->inst->accel_dev);
ctx               564 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, req->ctx.rsa->key_sz, req->src_align,
ctx               567 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_unmap_single(dev, req->in.rsa.enc.m, req->ctx.rsa->key_sz,
ctx               570 drivers/crypto/qat/qat_common/qat_asym_algs.c 	areq->dst_len = req->ctx.rsa->key_sz;
ctx               575 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, req->ctx.rsa->key_sz, req->dst_align,
ctx               578 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_unmap_single(dev, req->out.rsa.enc.c, req->ctx.rsa->key_sz,
ctx               689 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               690 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_crypto_instance *inst = ctx->inst;
ctx               697 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (unlikely(!ctx->n || !ctx->e))
ctx               700 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (req->dst_len < ctx->key_sz) {
ctx               701 drivers/crypto/qat/qat_common/qat_asym_algs.c 		req->dst_len = ctx->key_sz;
ctx               707 drivers/crypto/qat/qat_common/qat_asym_algs.c 	msg->pke_hdr.cd_pars.func_id = qat_rsa_enc_fn_id(ctx->key_sz);
ctx               712 drivers/crypto/qat/qat_common/qat_asym_algs.c 	qat_req->ctx.rsa = ctx;
ctx               719 drivers/crypto/qat/qat_common/qat_asym_algs.c 	qat_req->in.rsa.enc.e = ctx->dma_e;
ctx               720 drivers/crypto/qat/qat_common/qat_asym_algs.c 	qat_req->in.rsa.enc.n = ctx->dma_n;
ctx               730 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (sg_is_last(req->src) && req->src_len == ctx->key_sz) {
ctx               738 drivers/crypto/qat/qat_common/qat_asym_algs.c 		int shift = ctx->key_sz - req->src_len;
ctx               740 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->src_align = dma_alloc_coherent(dev, ctx->key_sz,
ctx               749 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (sg_is_last(req->dst) && req->dst_len == ctx->key_sz) {
ctx               759 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->dst_align = dma_alloc_coherent(dev, ctx->key_sz,
ctx               786 drivers/crypto/qat/qat_common/qat_asym_algs.c 		ret = adf_send_message(ctx->inst->pke_tx, (uint32_t *)msg);
ctx               803 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->key_sz, qat_req->dst_align,
ctx               808 drivers/crypto/qat/qat_common/qat_asym_algs.c 					 ctx->key_sz, DMA_FROM_DEVICE);
ctx               811 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->key_sz, qat_req->src_align,
ctx               816 drivers/crypto/qat/qat_common/qat_asym_algs.c 					 ctx->key_sz, DMA_TO_DEVICE);
ctx               823 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx               824 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_crypto_instance *inst = ctx->inst;
ctx               831 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (unlikely(!ctx->n || !ctx->d))
ctx               834 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (req->dst_len < ctx->key_sz) {
ctx               835 drivers/crypto/qat/qat_common/qat_asym_algs.c 		req->dst_len = ctx->key_sz;
ctx               841 drivers/crypto/qat/qat_common/qat_asym_algs.c 	msg->pke_hdr.cd_pars.func_id = ctx->crt_mode ?
ctx               842 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_rsa_dec_fn_id_crt(ctx->key_sz) :
ctx               843 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_rsa_dec_fn_id(ctx->key_sz);
ctx               848 drivers/crypto/qat/qat_common/qat_asym_algs.c 	qat_req->ctx.rsa = ctx;
ctx               855 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->crt_mode) {
ctx               856 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->in.rsa.dec_crt.p = ctx->dma_p;
ctx               857 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->in.rsa.dec_crt.q = ctx->dma_q;
ctx               858 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->in.rsa.dec_crt.dp = ctx->dma_dp;
ctx               859 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->in.rsa.dec_crt.dq = ctx->dma_dq;
ctx               860 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->in.rsa.dec_crt.qinv = ctx->dma_qinv;
ctx               862 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->in.rsa.dec.d = ctx->dma_d;
ctx               863 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->in.rsa.dec.n = ctx->dma_n;
ctx               874 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (sg_is_last(req->src) && req->src_len == ctx->key_sz) {
ctx               882 drivers/crypto/qat/qat_common/qat_asym_algs.c 		int shift = ctx->key_sz - req->src_len;
ctx               884 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->src_align = dma_alloc_coherent(dev, ctx->key_sz,
ctx               893 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (sg_is_last(req->dst) && req->dst_len == ctx->key_sz) {
ctx               903 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_req->dst_align = dma_alloc_coherent(dev, ctx->key_sz,
ctx               911 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->crt_mode)
ctx               931 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->crt_mode)
ctx               938 drivers/crypto/qat/qat_common/qat_asym_algs.c 		ret = adf_send_message(ctx->inst->pke_tx, (uint32_t *)msg);
ctx               955 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->key_sz, qat_req->dst_align,
ctx               960 drivers/crypto/qat/qat_common/qat_asym_algs.c 					 ctx->key_sz, DMA_FROM_DEVICE);
ctx               963 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->key_sz, qat_req->src_align,
ctx               968 drivers/crypto/qat/qat_common/qat_asym_algs.c 					 ctx->key_sz, DMA_TO_DEVICE);
ctx               972 drivers/crypto/qat/qat_common/qat_asym_algs.c static int qat_rsa_set_n(struct qat_rsa_ctx *ctx, const char *value,
ctx               975 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_crypto_instance *inst = ctx->inst;
ctx               985 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->key_sz = vlen;
ctx               988 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!qat_rsa_enc_fn_id(ctx->key_sz))
ctx               992 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->n = dma_alloc_coherent(dev, ctx->key_sz, &ctx->dma_n, GFP_KERNEL);
ctx               993 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->n)
ctx               996 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memcpy(ctx->n, ptr, ctx->key_sz);
ctx               999 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->key_sz = 0;
ctx              1000 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->n = NULL;
ctx              1004 drivers/crypto/qat/qat_common/qat_asym_algs.c static int qat_rsa_set_e(struct qat_rsa_ctx *ctx, const char *value,
ctx              1007 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_crypto_instance *inst = ctx->inst;
ctx              1016 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->key_sz || !vlen || vlen > ctx->key_sz) {
ctx              1017 drivers/crypto/qat/qat_common/qat_asym_algs.c 		ctx->e = NULL;
ctx              1021 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->e = dma_alloc_coherent(dev, ctx->key_sz, &ctx->dma_e, GFP_KERNEL);
ctx              1022 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->e)
ctx              1025 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memcpy(ctx->e + (ctx->key_sz - vlen), ptr, vlen);
ctx              1029 drivers/crypto/qat/qat_common/qat_asym_algs.c static int qat_rsa_set_d(struct qat_rsa_ctx *ctx, const char *value,
ctx              1032 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_crypto_instance *inst = ctx->inst;
ctx              1043 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->key_sz || !vlen || vlen > ctx->key_sz)
ctx              1047 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->d = dma_alloc_coherent(dev, ctx->key_sz, &ctx->dma_d, GFP_KERNEL);
ctx              1048 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->d)
ctx              1051 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memcpy(ctx->d + (ctx->key_sz - vlen), ptr, vlen);
ctx              1054 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->d = NULL;
ctx              1066 drivers/crypto/qat/qat_common/qat_asym_algs.c static void qat_rsa_setkey_crt(struct qat_rsa_ctx *ctx, struct rsa_key *rsa_key)
ctx              1068 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_crypto_instance *inst = ctx->inst;
ctx              1072 drivers/crypto/qat/qat_common/qat_asym_algs.c 	unsigned int half_key_sz = ctx->key_sz / 2;
ctx              1080 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->p = dma_alloc_coherent(dev, half_key_sz, &ctx->dma_p, GFP_KERNEL);
ctx              1081 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->p)
ctx              1083 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memcpy(ctx->p + (half_key_sz - len), ptr, len);
ctx              1091 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->q = dma_alloc_coherent(dev, half_key_sz, &ctx->dma_q, GFP_KERNEL);
ctx              1092 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->q)
ctx              1094 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memcpy(ctx->q + (half_key_sz - len), ptr, len);
ctx              1102 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->dp = dma_alloc_coherent(dev, half_key_sz, &ctx->dma_dp,
ctx              1104 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->dp)
ctx              1106 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memcpy(ctx->dp + (half_key_sz - len), ptr, len);
ctx              1114 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->dq = dma_alloc_coherent(dev, half_key_sz, &ctx->dma_dq,
ctx              1116 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->dq)
ctx              1118 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memcpy(ctx->dq + (half_key_sz - len), ptr, len);
ctx              1126 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->qinv = dma_alloc_coherent(dev, half_key_sz, &ctx->dma_qinv,
ctx              1128 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->qinv)
ctx              1130 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memcpy(ctx->qinv + (half_key_sz - len), ptr, len);
ctx              1132 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->crt_mode = true;
ctx              1136 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memset(ctx->dq, '\0', half_key_sz);
ctx              1137 drivers/crypto/qat/qat_common/qat_asym_algs.c 	dma_free_coherent(dev, half_key_sz, ctx->dq, ctx->dma_dq);
ctx              1138 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->dq = NULL;
ctx              1140 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memset(ctx->dp, '\0', half_key_sz);
ctx              1141 drivers/crypto/qat/qat_common/qat_asym_algs.c 	dma_free_coherent(dev, half_key_sz, ctx->dp, ctx->dma_dp);
ctx              1142 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->dp = NULL;
ctx              1144 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memset(ctx->q, '\0', half_key_sz);
ctx              1145 drivers/crypto/qat/qat_common/qat_asym_algs.c 	dma_free_coherent(dev, half_key_sz, ctx->q, ctx->dma_q);
ctx              1146 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->q = NULL;
ctx              1148 drivers/crypto/qat/qat_common/qat_asym_algs.c 	memset(ctx->p, '\0', half_key_sz);
ctx              1149 drivers/crypto/qat/qat_common/qat_asym_algs.c 	dma_free_coherent(dev, half_key_sz, ctx->p, ctx->dma_p);
ctx              1150 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->p = NULL;
ctx              1152 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->crt_mode = false;
ctx              1155 drivers/crypto/qat/qat_common/qat_asym_algs.c static void qat_rsa_clear_ctx(struct device *dev, struct qat_rsa_ctx *ctx)
ctx              1157 drivers/crypto/qat/qat_common/qat_asym_algs.c 	unsigned int half_key_sz = ctx->key_sz / 2;
ctx              1160 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->n)
ctx              1161 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->key_sz, ctx->n, ctx->dma_n);
ctx              1162 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->e)
ctx              1163 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->key_sz, ctx->e, ctx->dma_e);
ctx              1164 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->d) {
ctx              1165 drivers/crypto/qat/qat_common/qat_asym_algs.c 		memset(ctx->d, '\0', ctx->key_sz);
ctx              1166 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->key_sz, ctx->d, ctx->dma_d);
ctx              1168 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->p) {
ctx              1169 drivers/crypto/qat/qat_common/qat_asym_algs.c 		memset(ctx->p, '\0', half_key_sz);
ctx              1170 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, half_key_sz, ctx->p, ctx->dma_p);
ctx              1172 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->q) {
ctx              1173 drivers/crypto/qat/qat_common/qat_asym_algs.c 		memset(ctx->q, '\0', half_key_sz);
ctx              1174 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, half_key_sz, ctx->q, ctx->dma_q);
ctx              1176 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->dp) {
ctx              1177 drivers/crypto/qat/qat_common/qat_asym_algs.c 		memset(ctx->dp, '\0', half_key_sz);
ctx              1178 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, half_key_sz, ctx->dp, ctx->dma_dp);
ctx              1180 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->dq) {
ctx              1181 drivers/crypto/qat/qat_common/qat_asym_algs.c 		memset(ctx->dq, '\0', half_key_sz);
ctx              1182 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, half_key_sz, ctx->dq, ctx->dma_dq);
ctx              1184 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->qinv) {
ctx              1185 drivers/crypto/qat/qat_common/qat_asym_algs.c 		memset(ctx->qinv, '\0', half_key_sz);
ctx              1186 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, half_key_sz, ctx->qinv, ctx->dma_qinv);
ctx              1189 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->n = NULL;
ctx              1190 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->e = NULL;
ctx              1191 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->d = NULL;
ctx              1192 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->p = NULL;
ctx              1193 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->q = NULL;
ctx              1194 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->dp = NULL;
ctx              1195 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->dq = NULL;
ctx              1196 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->qinv = NULL;
ctx              1197 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->crt_mode = false;
ctx              1198 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->key_sz = 0;
ctx              1204 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx              1205 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct device *dev = &GET_DEV(ctx->inst->accel_dev);
ctx              1209 drivers/crypto/qat/qat_common/qat_asym_algs.c 	qat_rsa_clear_ctx(dev, ctx);
ctx              1218 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ret = qat_rsa_set_n(ctx, rsa_key.n, rsa_key.n_sz);
ctx              1221 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ret = qat_rsa_set_e(ctx, rsa_key.e, rsa_key.e_sz);
ctx              1225 drivers/crypto/qat/qat_common/qat_asym_algs.c 		ret = qat_rsa_set_d(ctx, rsa_key.d, rsa_key.d_sz);
ctx              1228 drivers/crypto/qat/qat_common/qat_asym_algs.c 		qat_rsa_setkey_crt(ctx, &rsa_key);
ctx              1231 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (!ctx->n || !ctx->e) {
ctx              1236 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (private && !ctx->d) {
ctx              1244 drivers/crypto/qat/qat_common/qat_asym_algs.c 	qat_rsa_clear_ctx(dev, ctx);
ctx              1262 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx              1264 drivers/crypto/qat/qat_common/qat_asym_algs.c 	return ctx->key_sz;
ctx              1269 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx              1276 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->key_sz = 0;
ctx              1277 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->inst = inst;
ctx              1283 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct qat_rsa_ctx *ctx = akcipher_tfm_ctx(tfm);
ctx              1284 drivers/crypto/qat/qat_common/qat_asym_algs.c 	struct device *dev = &GET_DEV(ctx->inst->accel_dev);
ctx              1286 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->n)
ctx              1287 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->key_sz, ctx->n, ctx->dma_n);
ctx              1288 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->e)
ctx              1289 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->key_sz, ctx->e, ctx->dma_e);
ctx              1290 drivers/crypto/qat/qat_common/qat_asym_algs.c 	if (ctx->d) {
ctx              1291 drivers/crypto/qat/qat_common/qat_asym_algs.c 		memset(ctx->d, '\0', ctx->key_sz);
ctx              1292 drivers/crypto/qat/qat_common/qat_asym_algs.c 		dma_free_coherent(dev, ctx->key_sz, ctx->d, ctx->dma_d);
ctx              1294 drivers/crypto/qat/qat_common/qat_asym_algs.c 	qat_crypto_put_instance(ctx->inst);
ctx              1295 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->n = NULL;
ctx              1296 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->e = NULL;
ctx              1297 drivers/crypto/qat/qat_common/qat_asym_algs.c 	ctx->d = NULL;
ctx               152 drivers/crypto/qat/qat_common/qat_hal.c 				     unsigned char ae, unsigned char ctx,
ctx               158 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_ae_csr(handle, ae, CSR_CTX_POINTER, ctx);
ctx               329 drivers/crypto/qat/qat_common/qat_hal.c 	unsigned int ctx, cur_ctx;
ctx               333 drivers/crypto/qat/qat_common/qat_hal.c 	for (ctx = 0; ctx < ICP_QAT_UCLO_MAX_CTX; ctx++) {
ctx               334 drivers/crypto/qat/qat_common/qat_hal.c 		if (!(ctx_mask & (1 << ctx)))
ctx               336 drivers/crypto/qat/qat_common/qat_hal.c 		qat_hal_wr_ae_csr(handle, ae, CSR_CTX_POINTER, ctx);
ctx               344 drivers/crypto/qat/qat_common/qat_hal.c 				unsigned char ae, unsigned char ctx,
ctx               350 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_ae_csr(handle, ae, CSR_CTX_POINTER, ctx);
ctx               361 drivers/crypto/qat/qat_common/qat_hal.c 	unsigned int ctx, cur_ctx;
ctx               364 drivers/crypto/qat/qat_common/qat_hal.c 	for (ctx = 0; ctx < ICP_QAT_UCLO_MAX_CTX; ctx++) {
ctx               365 drivers/crypto/qat/qat_common/qat_hal.c 		if (!(ctx_mask & (1 << ctx)))
ctx               367 drivers/crypto/qat/qat_common/qat_hal.c 		qat_hal_wr_ae_csr(handle, ae, CSR_CTX_POINTER, ctx);
ctx               377 drivers/crypto/qat/qat_common/qat_hal.c 	unsigned int ctx, cur_ctx;
ctx               380 drivers/crypto/qat/qat_common/qat_hal.c 	for (ctx = 0; ctx < ICP_QAT_UCLO_MAX_CTX; ctx++) {
ctx               381 drivers/crypto/qat/qat_common/qat_hal.c 		if (!(ctx_mask & (1 << ctx)))
ctx               383 drivers/crypto/qat/qat_common/qat_hal.c 		qat_hal_wr_ae_csr(handle, ae, CSR_CTX_POINTER, ctx);
ctx               541 drivers/crypto/qat/qat_common/qat_hal.c 	unsigned int ctx;
ctx               543 drivers/crypto/qat/qat_common/qat_hal.c 	ctx = qat_hal_rd_ae_csr(handle, ae, CTX_ENABLES);
ctx               544 drivers/crypto/qat/qat_common/qat_hal.c 	ctx &= IGNORE_W1C_MASK &
ctx               546 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_ae_csr(handle, ae, CTX_ENABLES, ctx);
ctx               605 drivers/crypto/qat/qat_common/qat_hal.c 	unsigned int ctx;
ctx               607 drivers/crypto/qat/qat_common/qat_hal.c 	ctx = qat_hal_rd_ae_csr(handle, ae, CTX_ENABLES);
ctx               608 drivers/crypto/qat/qat_common/qat_hal.c 	ctx &= IGNORE_W1C_MASK;
ctx               609 drivers/crypto/qat/qat_common/qat_hal.c 	ctx_mask &= (ctx & CE_INUSE_CONTEXTS) ? 0x55 : 0xFF;
ctx               610 drivers/crypto/qat/qat_common/qat_hal.c 	ctx |= (ctx_mask << CE_ENABLE_BITPOS);
ctx               611 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_ae_csr(handle, ae, CTX_ENABLES, ctx);
ctx               873 drivers/crypto/qat/qat_common/qat_hal.c 				   unsigned char ae, unsigned char ctx,
ctx               893 drivers/crypto/qat/qat_common/qat_hal.c 	ind_lm_addr0 = qat_hal_rd_indr_csr(handle, ae, ctx, LM_ADDR_0_INDIRECT);
ctx               894 drivers/crypto/qat/qat_common/qat_hal.c 	ind_lm_addr1 = qat_hal_rd_indr_csr(handle, ae, ctx, LM_ADDR_1_INDIRECT);
ctx               895 drivers/crypto/qat/qat_common/qat_hal.c 	ind_lm_addr_byte0 = qat_hal_rd_indr_csr(handle, ae, ctx,
ctx               897 drivers/crypto/qat/qat_common/qat_hal.c 	ind_lm_addr_byte1 = qat_hal_rd_indr_csr(handle, ae, ctx,
ctx               901 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_get_wakeup_event(handle, ae, ctx, &wakeup_events);
ctx               902 drivers/crypto/qat/qat_common/qat_hal.c 	savpc = qat_hal_rd_indr_csr(handle, ae, ctx, CTX_STS_INDIRECT);
ctx               909 drivers/crypto/qat/qat_common/qat_hal.c 	ind_cnt_sig = qat_hal_rd_indr_csr(handle, ae, ctx,
ctx               911 drivers/crypto/qat/qat_common/qat_hal.c 	ind_sig = qat_hal_rd_indr_csr(handle, ae, ctx,
ctx               917 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_indr_csr(handle, ae, (1 << ctx), CTX_STS_INDIRECT, 0);
ctx               918 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_ae_csr(handle, ae, ACTIVE_CTX_STATUS, ctx & ACS_ACNO);
ctx               921 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_put_wakeup_event(handle, ae, (1 << ctx), XCWE_VOLUNTARY);
ctx               922 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_indr_csr(handle, ae, (1 << ctx), CTX_SIG_EVENTS_INDIRECT, 0);
ctx               924 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_enable_ctx(handle, ae, (1 << ctx));
ctx               931 drivers/crypto/qat/qat_common/qat_hal.c 		ctx_status = qat_hal_rd_indr_csr(handle, ae, ctx,
ctx               936 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_disable_ctx(handle, ae, (1 << ctx));
ctx               939 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_put_wakeup_event(handle, ae, (1 << ctx), wakeup_events);
ctx               940 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_indr_csr(handle, ae, (1 << ctx), CTX_STS_INDIRECT,
ctx               948 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_indr_csr(handle, ae, (1 << ctx),
ctx               950 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_indr_csr(handle, ae, (1 << ctx),
ctx               952 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_indr_csr(handle, ae, (1 << ctx),
ctx               954 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_indr_csr(handle, ae, (1 << ctx),
ctx               956 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_indr_csr(handle, ae, (1 << ctx),
ctx               958 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_indr_csr(handle, ae, (1 << ctx),
ctx               967 drivers/crypto/qat/qat_common/qat_hal.c 			      unsigned char ae, unsigned char ctx,
ctx               994 drivers/crypto/qat/qat_common/qat_hal.c 	if (ctx != (savctx & ACS_ACNO))
ctx               996 drivers/crypto/qat/qat_common/qat_hal.c 				  ctx & ACS_ACNO);
ctx              1018 drivers/crypto/qat/qat_common/qat_hal.c 	if (ctx != (savctx & ACS_ACNO))
ctx              1028 drivers/crypto/qat/qat_common/qat_hal.c 			      unsigned char ae, unsigned char ctx,
ctx              1070 drivers/crypto/qat/qat_common/qat_hal.c 	return qat_hal_exec_micro_inst(handle, ae, ctx, insts, num_inst,
ctx              1109 drivers/crypto/qat/qat_common/qat_hal.c 				      unsigned char ae, unsigned char ctx,
ctx              1118 drivers/crypto/qat/qat_common/qat_hal.c 		qat_hal_rd_rel_reg(handle, ae, ctx, ICP_GPA_REL, 0, &gpra0);
ctx              1119 drivers/crypto/qat/qat_common/qat_hal.c 		qat_hal_rd_rel_reg(handle, ae, ctx, ICP_GPA_REL, 0x1, &gpra1);
ctx              1120 drivers/crypto/qat/qat_common/qat_hal.c 		qat_hal_rd_rel_reg(handle, ae, ctx, ICP_GPA_REL, 0x2, &gpra2);
ctx              1121 drivers/crypto/qat/qat_common/qat_hal.c 		qat_hal_rd_rel_reg(handle, ae, ctx, ICP_GPB_REL, 0, &gprb0);
ctx              1122 drivers/crypto/qat/qat_common/qat_hal.c 		qat_hal_rd_rel_reg(handle, ae, ctx, ICP_GPB_REL, 0x1, &gprb1);
ctx              1125 drivers/crypto/qat/qat_common/qat_hal.c 	stat = qat_hal_exec_micro_inst(handle, ae, ctx, micro_inst, inst_num, 1,
ctx              1129 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_rel_reg(handle, ae, ctx, ICP_GPA_REL, 0, gpra0);
ctx              1130 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_rel_reg(handle, ae, ctx, ICP_GPA_REL, 0x1, gpra1);
ctx              1131 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_rel_reg(handle, ae, ctx, ICP_GPA_REL, 0x2, gpra2);
ctx              1132 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_rel_reg(handle, ae, ctx, ICP_GPB_REL, 0, gprb0);
ctx              1133 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_rel_reg(handle, ae, ctx, ICP_GPB_REL, 0x1, gprb1);
ctx              1182 drivers/crypto/qat/qat_common/qat_hal.c 				   unsigned char ae, unsigned char ctx,
ctx              1194 drivers/crypto/qat/qat_common/qat_hal.c 		if (ctx & 0x1) {
ctx              1195 drivers/crypto/qat/qat_common/qat_hal.c 			pr_err("QAT: bad 4-ctx mode,ctx=0x%x\n", ctx);
ctx              1205 drivers/crypto/qat/qat_common/qat_hal.c 	reg_addr = reg_num + (ctx << 0x5);
ctx              1223 drivers/crypto/qat/qat_common/qat_hal.c 				   unsigned char ae, unsigned char ctx,
ctx              1244 drivers/crypto/qat/qat_common/qat_hal.c 		if (ctx & 0x1) {
ctx              1245 drivers/crypto/qat/qat_common/qat_hal.c 			pr_err("QAT: 4-ctx mode,ctx=0x%x\n", ctx);
ctx              1259 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_rd_rel_reg(handle, ae, ctx, ICP_GPB_REL, gprnum, &gprval);
ctx              1273 drivers/crypto/qat/qat_common/qat_hal.c 	status = qat_hal_exec_micro_inst(handle, ae, ctx, micro_inst, num_inst,
ctx              1275 drivers/crypto/qat/qat_common/qat_hal.c 	qat_hal_wr_rel_reg(handle, ae, ctx, ICP_GPB_REL, gprnum, gprval);
ctx              1280 drivers/crypto/qat/qat_common/qat_hal.c 			      unsigned char ae, unsigned char ctx,
ctx              1290 drivers/crypto/qat/qat_common/qat_hal.c 	stat = qat_hal_put_rel_wr_xfer(handle, ae, ctx, ICP_NEIGH_REL, nn, val);
ctx              1299 drivers/crypto/qat/qat_common/qat_hal.c 				      unsigned char *ctx)
ctx              1307 drivers/crypto/qat/qat_common/qat_hal.c 		*ctx = (absreg_num >> 0x4) & 0x6;
ctx              1311 drivers/crypto/qat/qat_common/qat_hal.c 		*ctx = (absreg_num >> 0x4) & 0x7;
ctx              1323 drivers/crypto/qat/qat_common/qat_hal.c 	unsigned char ctx = 0;
ctx              1332 drivers/crypto/qat/qat_common/qat_hal.c 						   &ctx);
ctx              1337 drivers/crypto/qat/qat_common/qat_hal.c 			if (!test_bit(ctx, (unsigned long *)&ctx_mask))
ctx              1340 drivers/crypto/qat/qat_common/qat_hal.c 		stat = qat_hal_wr_rel_reg(handle, ae, ctx, type, reg, regdata);
ctx              1345 drivers/crypto/qat/qat_common/qat_hal.c 	} while (ctx_mask && (ctx++ < ICP_QAT_UCLO_MAX_CTX));
ctx              1357 drivers/crypto/qat/qat_common/qat_hal.c 	unsigned char ctx = 0;
ctx              1366 drivers/crypto/qat/qat_common/qat_hal.c 						   &ctx);
ctx              1371 drivers/crypto/qat/qat_common/qat_hal.c 			if (!test_bit(ctx, (unsigned long *)&ctx_mask))
ctx              1374 drivers/crypto/qat/qat_common/qat_hal.c 		stat = qat_hal_put_rel_wr_xfer(handle, ae, ctx, type, reg,
ctx              1380 drivers/crypto/qat/qat_common/qat_hal.c 	} while (ctx_mask && (ctx++ < ICP_QAT_UCLO_MAX_CTX));
ctx              1392 drivers/crypto/qat/qat_common/qat_hal.c 	unsigned char ctx = 0;
ctx              1401 drivers/crypto/qat/qat_common/qat_hal.c 						   &ctx);
ctx              1406 drivers/crypto/qat/qat_common/qat_hal.c 			if (!test_bit(ctx, (unsigned long *)&ctx_mask))
ctx              1409 drivers/crypto/qat/qat_common/qat_hal.c 		stat = qat_hal_put_rel_rd_xfer(handle, ae, ctx, type, reg,
ctx              1415 drivers/crypto/qat/qat_common/qat_hal.c 	} while (ctx_mask && (ctx++ < ICP_QAT_UCLO_MAX_CTX));
ctx              1425 drivers/crypto/qat/qat_common/qat_hal.c 	unsigned char ctx;
ctx              1430 drivers/crypto/qat/qat_common/qat_hal.c 	for (ctx = 0; ctx < ICP_QAT_UCLO_MAX_CTX; ctx++) {
ctx              1431 drivers/crypto/qat/qat_common/qat_hal.c 		if (!test_bit(ctx, (unsigned long *)&ctx_mask))
ctx              1433 drivers/crypto/qat/qat_common/qat_hal.c 		stat = qat_hal_put_rel_nn(handle, ae, ctx, reg_num, regdata);
ctx               857 drivers/crypto/qat/qat_common/qat_uclo.c 			if (!((1 << init_regsym->ctx) & ctx_mask)) {
ctx               859 drivers/crypto/qat/qat_common/qat_uclo.c 				       init_regsym->ctx);
ctx               864 drivers/crypto/qat/qat_common/qat_uclo.c 					  (1 << init_regsym->ctx),
ctx              1590 drivers/crypto/qat/qat_common/qat_uclo.c 	int ctx;
ctx              1615 drivers/crypto/qat/qat_common/qat_uclo.c 		for (ctx = 0; ctx < ICP_QAT_UCLO_MAX_CTX; ctx++)
ctx              1616 drivers/crypto/qat/qat_common/qat_uclo.c 			obj_handle->ae_data[ae].ae_slices[s].cur_page[ctx] =
ctx              1617 drivers/crypto/qat/qat_common/qat_uclo.c 					(ctx_mask & (1 << ctx)) ? page : NULL;
ctx               156 drivers/crypto/qce/ablkcipher.c 	struct qce_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               170 drivers/crypto/qce/ablkcipher.c 	ctx->enc_keylen = keylen;
ctx               171 drivers/crypto/qce/ablkcipher.c 	memcpy(ctx->enc_key, key, keylen);
ctx               174 drivers/crypto/qce/ablkcipher.c 	ret = crypto_sync_skcipher_setkey(ctx->fallback, key, keylen);
ctx               176 drivers/crypto/qce/ablkcipher.c 		ctx->enc_keylen = keylen;
ctx               183 drivers/crypto/qce/ablkcipher.c 	struct qce_cipher_ctx *ctx = crypto_ablkcipher_ctx(ablk);
ctx               190 drivers/crypto/qce/ablkcipher.c 	ctx->enc_keylen = keylen;
ctx               191 drivers/crypto/qce/ablkcipher.c 	memcpy(ctx->enc_key, key, keylen);
ctx               198 drivers/crypto/qce/ablkcipher.c 	struct qce_cipher_ctx *ctx = crypto_ablkcipher_ctx(ablk);
ctx               205 drivers/crypto/qce/ablkcipher.c 	ctx->enc_keylen = keylen;
ctx               206 drivers/crypto/qce/ablkcipher.c 	memcpy(ctx->enc_key, key, keylen);
ctx               214 drivers/crypto/qce/ablkcipher.c 	struct qce_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               222 drivers/crypto/qce/ablkcipher.c 	if (IS_AES(rctx->flags) && ctx->enc_keylen != AES_KEYSIZE_128 &&
ctx               223 drivers/crypto/qce/ablkcipher.c 	    ctx->enc_keylen != AES_KEYSIZE_256) {
ctx               224 drivers/crypto/qce/ablkcipher.c 		SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
ctx               226 drivers/crypto/qce/ablkcipher.c 		skcipher_request_set_sync_tfm(subreq, ctx->fallback);
ctx               252 drivers/crypto/qce/ablkcipher.c 	struct qce_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               254 drivers/crypto/qce/ablkcipher.c 	memset(ctx, 0, sizeof(*ctx));
ctx               257 drivers/crypto/qce/ablkcipher.c 	ctx->fallback = crypto_alloc_sync_skcipher(crypto_tfm_alg_name(tfm),
ctx               259 drivers/crypto/qce/ablkcipher.c 	return PTR_ERR_OR_ZERO(ctx->fallback);
ctx               264 drivers/crypto/qce/ablkcipher.c 	struct qce_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               266 drivers/crypto/qce/ablkcipher.c 	crypto_free_sync_skcipher(ctx->fallback);
ctx               312 drivers/crypto/qce/common.c 	struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
ctx               326 drivers/crypto/qce/common.c 		keylen = ctx->enc_keylen / 2;
ctx               328 drivers/crypto/qce/common.c 		keylen = ctx->enc_keylen;
ctx               330 drivers/crypto/qce/common.c 	qce_cpu_to_be32p_array(enckey, ctx->enc_key, keylen);
ctx               345 drivers/crypto/qce/common.c 			qce_xtskey(qce, ctx->enc_key, ctx->enc_keylen,
ctx                72 drivers/crypto/qce/sha.c 	struct qce_sha_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
ctx                79 drivers/crypto/qce/sha.c 		rctx->authkey = ctx->authkey;
ctx                82 drivers/crypto/qce/sha.c 		rctx->authkey = ctx->authkey;
ctx               348 drivers/crypto/qce/sha.c 	struct qce_sha_ctx *ctx = crypto_tfm_ctx(&tfm->base);
ctx               359 drivers/crypto/qce/sha.c 	memset(ctx->authkey, 0, sizeof(ctx->authkey));
ctx               362 drivers/crypto/qce/sha.c 		memcpy(ctx->authkey, key, keylen);
ctx               396 drivers/crypto/qce/sha.c 	ahash_request_set_crypt(req, &sg, ctx->authkey, keylen);
ctx               413 drivers/crypto/qce/sha.c 	struct qce_sha_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               416 drivers/crypto/qce/sha.c 	memset(ctx, 0, sizeof(*ctx));
ctx                74 drivers/crypto/qcom-rng.c 	struct qcom_rng_ctx *ctx = crypto_rng_ctx(tfm);
ctx                75 drivers/crypto/qcom-rng.c 	struct qcom_rng *rng = ctx->rng;
ctx               129 drivers/crypto/qcom-rng.c 	struct qcom_rng_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               131 drivers/crypto/qcom-rng.c 	ctx->rng = qcom_rng_dev;
ctx               133 drivers/crypto/qcom-rng.c 	if (!ctx->rng->skip_init)
ctx               134 drivers/crypto/qcom-rng.c 		return qcom_rng_enable(ctx->rng);
ctx                34 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                41 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->keylen = keylen;
ctx                42 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
ctx                49 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx                56 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->keylen = keylen;
ctx                57 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
ctx                64 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx                71 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->keylen = keylen;
ctx                72 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
ctx                79 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx                80 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_crypto_info *dev = ctx->dev;
ctx                82 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->mode = RK_CRYPTO_AES_ECB_MODE;
ctx                89 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx                90 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_crypto_info *dev = ctx->dev;
ctx                92 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
ctx                99 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               100 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_crypto_info *dev = ctx->dev;
ctx               102 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->mode = RK_CRYPTO_AES_CBC_MODE;
ctx               109 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               110 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_crypto_info *dev = ctx->dev;
ctx               112 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
ctx               119 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               120 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_crypto_info *dev = ctx->dev;
ctx               122 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->mode = 0;
ctx               129 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               130 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_crypto_info *dev = ctx->dev;
ctx               132 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->mode = RK_CRYPTO_DEC;
ctx               139 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               140 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_crypto_info *dev = ctx->dev;
ctx               142 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
ctx               149 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               150 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_crypto_info *dev = ctx->dev;
ctx               152 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
ctx               159 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               160 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_crypto_info *dev = ctx->dev;
ctx               162 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->mode = RK_CRYPTO_TDES_SELECT;
ctx               169 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               170 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_crypto_info *dev = ctx->dev;
ctx               172 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
ctx               179 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               180 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_crypto_info *dev = ctx->dev;
ctx               182 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
ctx               189 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               190 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_crypto_info *dev = ctx->dev;
ctx               192 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
ctx               203 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx               210 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		ctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
ctx               213 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, ctx->mode);
ctx               217 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		ctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
ctx               221 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		if (ctx->keylen == AES_KEYSIZE_192)
ctx               222 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 			ctx->mode |= RK_CRYPTO_AES_192BIT_key;
ctx               223 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		else if (ctx->keylen == AES_KEYSIZE_256)
ctx               224 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 			ctx->mode |= RK_CRYPTO_AES_256BIT_key;
ctx               225 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, ctx->mode);
ctx               250 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               258 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	if (ctx->mode & RK_CRYPTO_DEC) {
ctx               259 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		memcpy(ctx->iv, src_last_blk, ivsize);
ctx               298 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               302 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	if (!(ctx->mode & RK_CRYPTO_DEC)) {
ctx               318 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               322 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	if (ctx->mode & RK_CRYPTO_DEC) {
ctx               323 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		new_iv = ctx->iv;
ctx               380 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               386 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->dev = algt->dev;
ctx               387 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->dev->align_size = crypto_tfm_alg_alignmask(tfm) + 1;
ctx               388 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->dev->start = rk_ablk_start;
ctx               389 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->dev->update = rk_ablk_rx;
ctx               390 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->dev->complete = rk_crypto_complete;
ctx               391 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
ctx               393 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
ctx               398 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               400 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	free_page((unsigned long)ctx->dev->addr_vir);
ctx               401 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 	ctx->dev->disable_clk(ctx->dev);
ctx                83 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx                85 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
ctx                96 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx                98 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
ctx               111 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               113 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
ctx               125 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               127 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
ctx               142 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               144 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
ctx               155 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               157 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
ctx               307 drivers/crypto/s5p-sss.c 	struct s5p_aes_ctx		*ctx;
ctx               788 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx               789 drivers/crypto/s5p-sss.c 	struct s5p_aes_dev *dd = ctx->dd;
ctx               790 drivers/crypto/s5p-sss.c 	u32 *hash = (u32 *)ctx->digest;
ctx               793 drivers/crypto/s5p-sss.c 	for (i = 0; i < ctx->nregs; i++)
ctx               803 drivers/crypto/s5p-sss.c 				  const struct s5p_hash_reqctx *ctx)
ctx               805 drivers/crypto/s5p-sss.c 	const u32 *hash = (const u32 *)ctx->digest;
ctx               808 drivers/crypto/s5p-sss.c 	for (i = 0; i < ctx->nregs; i++)
ctx               818 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx               820 drivers/crypto/s5p-sss.c 	s5p_hash_write_ctx_iv(ctx->dd, ctx);
ctx               829 drivers/crypto/s5p-sss.c 	const struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx               834 drivers/crypto/s5p-sss.c 	memcpy(req->result, ctx->digest, ctx->nregs * HASH_REG_SIZEOF);
ctx               936 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(dd->hash_req);
ctx               941 drivers/crypto/s5p-sss.c 	configflags = ctx->engine | SSS_HASH_INIT_BIT;
ctx               943 drivers/crypto/s5p-sss.c 	if (likely(ctx->digcnt)) {
ctx               944 drivers/crypto/s5p-sss.c 		s5p_hash_write_ctx_iv(dd, ctx);
ctx               953 drivers/crypto/s5p-sss.c 		tmplen = ctx->digcnt * 8;
ctx               986 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(dd->hash_req);
ctx               989 drivers/crypto/s5p-sss.c 	cnt = dma_map_sg(dd->dev, ctx->sg, ctx->sg_len, DMA_TO_DEVICE);
ctx               992 drivers/crypto/s5p-sss.c 		ctx->error = true;
ctx               997 drivers/crypto/s5p-sss.c 	dd->hash_sg_iter = ctx->sg;
ctx              1000 drivers/crypto/s5p-sss.c 	ctx->digcnt += length;
ctx              1001 drivers/crypto/s5p-sss.c 	ctx->total -= length;
ctx              1024 drivers/crypto/s5p-sss.c static int s5p_hash_copy_sgs(struct s5p_hash_reqctx *ctx,
ctx              1030 drivers/crypto/s5p-sss.c 	len = new_len + ctx->bufcnt;
ctx              1035 drivers/crypto/s5p-sss.c 		dev_err(ctx->dd->dev, "alloc pages for unaligned case.\n");
ctx              1036 drivers/crypto/s5p-sss.c 		ctx->error = true;
ctx              1040 drivers/crypto/s5p-sss.c 	if (ctx->bufcnt)
ctx              1041 drivers/crypto/s5p-sss.c 		memcpy(buf, ctx->dd->xmit_buf, ctx->bufcnt);
ctx              1043 drivers/crypto/s5p-sss.c 	scatterwalk_map_and_copy(buf + ctx->bufcnt, sg, ctx->skip,
ctx              1045 drivers/crypto/s5p-sss.c 	sg_init_table(ctx->sgl, 1);
ctx              1046 drivers/crypto/s5p-sss.c 	sg_set_buf(ctx->sgl, buf, len);
ctx              1047 drivers/crypto/s5p-sss.c 	ctx->sg = ctx->sgl;
ctx              1048 drivers/crypto/s5p-sss.c 	ctx->sg_len = 1;
ctx              1049 drivers/crypto/s5p-sss.c 	ctx->bufcnt = 0;
ctx              1050 drivers/crypto/s5p-sss.c 	ctx->skip = 0;
ctx              1051 drivers/crypto/s5p-sss.c 	set_bit(HASH_FLAGS_SGS_COPIED, &ctx->dd->hash_flags);
ctx              1070 drivers/crypto/s5p-sss.c static int s5p_hash_copy_sg_lists(struct s5p_hash_reqctx *ctx,
ctx              1073 drivers/crypto/s5p-sss.c 	unsigned int skip = ctx->skip, n = sg_nents(sg);
ctx              1077 drivers/crypto/s5p-sss.c 	if (ctx->bufcnt)
ctx              1080 drivers/crypto/s5p-sss.c 	ctx->sg = kmalloc_array(n, sizeof(*sg), GFP_KERNEL);
ctx              1081 drivers/crypto/s5p-sss.c 	if (!ctx->sg) {
ctx              1082 drivers/crypto/s5p-sss.c 		ctx->error = true;
ctx              1086 drivers/crypto/s5p-sss.c 	sg_init_table(ctx->sg, n);
ctx              1088 drivers/crypto/s5p-sss.c 	tmp = ctx->sg;
ctx              1090 drivers/crypto/s5p-sss.c 	ctx->sg_len = 0;
ctx              1092 drivers/crypto/s5p-sss.c 	if (ctx->bufcnt) {
ctx              1093 drivers/crypto/s5p-sss.c 		sg_set_buf(tmp, ctx->dd->xmit_buf, ctx->bufcnt);
ctx              1095 drivers/crypto/s5p-sss.c 		ctx->sg_len++;
ctx              1115 drivers/crypto/s5p-sss.c 		ctx->sg_len++;
ctx              1119 drivers/crypto/s5p-sss.c 	set_bit(HASH_FLAGS_SGS_ALLOCED, &ctx->dd->hash_flags);
ctx              1140 drivers/crypto/s5p-sss.c static int s5p_hash_prepare_sgs(struct s5p_hash_reqctx *ctx,
ctx              1144 drivers/crypto/s5p-sss.c 	unsigned int skip = ctx->skip, nbytes = new_len, n = 0;
ctx              1181 drivers/crypto/s5p-sss.c 		return s5p_hash_copy_sgs(ctx, sg, new_len);
ctx              1183 drivers/crypto/s5p-sss.c 		return s5p_hash_copy_sg_lists(ctx, sg, new_len);
ctx              1189 drivers/crypto/s5p-sss.c 	if (ctx->bufcnt) {
ctx              1190 drivers/crypto/s5p-sss.c 		ctx->sg_len = n;
ctx              1191 drivers/crypto/s5p-sss.c 		sg_init_table(ctx->sgl, 2);
ctx              1192 drivers/crypto/s5p-sss.c 		sg_set_buf(ctx->sgl, ctx->dd->xmit_buf, ctx->bufcnt);
ctx              1193 drivers/crypto/s5p-sss.c 		sg_chain(ctx->sgl, 2, sg);
ctx              1194 drivers/crypto/s5p-sss.c 		ctx->sg = ctx->sgl;
ctx              1195 drivers/crypto/s5p-sss.c 		ctx->sg_len++;
ctx              1197 drivers/crypto/s5p-sss.c 		ctx->sg = sg;
ctx              1198 drivers/crypto/s5p-sss.c 		ctx->sg_len = n;
ctx              1216 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx              1217 drivers/crypto/s5p-sss.c 	bool final = ctx->finup;
ctx              1226 drivers/crypto/s5p-sss.c 	ctx->total = nbytes + ctx->bufcnt;
ctx              1227 drivers/crypto/s5p-sss.c 	if (!ctx->total)
ctx              1230 drivers/crypto/s5p-sss.c 	if (nbytes && (!IS_ALIGNED(ctx->bufcnt, BUFLEN))) {
ctx              1232 drivers/crypto/s5p-sss.c 		int len = BUFLEN - ctx->bufcnt % BUFLEN;
ctx              1237 drivers/crypto/s5p-sss.c 		scatterwalk_map_and_copy(ctx->buffer + ctx->bufcnt, req->src,
ctx              1239 drivers/crypto/s5p-sss.c 		ctx->bufcnt += len;
ctx              1241 drivers/crypto/s5p-sss.c 		ctx->skip = len;
ctx              1243 drivers/crypto/s5p-sss.c 		ctx->skip = 0;
ctx              1246 drivers/crypto/s5p-sss.c 	if (ctx->bufcnt)
ctx              1247 drivers/crypto/s5p-sss.c 		memcpy(ctx->dd->xmit_buf, ctx->buffer, ctx->bufcnt);
ctx              1249 drivers/crypto/s5p-sss.c 	xmit_len = ctx->total;
ctx              1258 drivers/crypto/s5p-sss.c 		hash_later = ctx->total - xmit_len;
ctx              1261 drivers/crypto/s5p-sss.c 		scatterwalk_map_and_copy(ctx->buffer, req->src,
ctx              1267 drivers/crypto/s5p-sss.c 		ret = s5p_hash_prepare_sgs(ctx, req->src, nbytes - hash_later,
ctx              1273 drivers/crypto/s5p-sss.c 		if (unlikely(!ctx->bufcnt)) {
ctx              1275 drivers/crypto/s5p-sss.c 			scatterwalk_map_and_copy(ctx->dd->xmit_buf, req->src,
ctx              1279 drivers/crypto/s5p-sss.c 		sg_init_table(ctx->sgl, 1);
ctx              1280 drivers/crypto/s5p-sss.c 		sg_set_buf(ctx->sgl, ctx->dd->xmit_buf, xmit_len);
ctx              1282 drivers/crypto/s5p-sss.c 		ctx->sg = ctx->sgl;
ctx              1283 drivers/crypto/s5p-sss.c 		ctx->sg_len = 1;
ctx              1286 drivers/crypto/s5p-sss.c 	ctx->bufcnt = hash_later;
ctx              1288 drivers/crypto/s5p-sss.c 		ctx->total = xmit_len;
ctx              1301 drivers/crypto/s5p-sss.c 	const struct s5p_hash_reqctx *ctx = ahash_request_ctx(dd->hash_req);
ctx              1303 drivers/crypto/s5p-sss.c 	dma_unmap_sg(dd->dev, ctx->sg, ctx->sg_len, DMA_TO_DEVICE);
ctx              1313 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx              1314 drivers/crypto/s5p-sss.c 	struct s5p_aes_dev *dd = ctx->dd;
ctx              1316 drivers/crypto/s5p-sss.c 	if (ctx->digcnt)
ctx              1319 drivers/crypto/s5p-sss.c 	dev_dbg(dd->dev, "hash_finish digcnt: %lld\n", ctx->digcnt);
ctx              1329 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx              1330 drivers/crypto/s5p-sss.c 	struct s5p_aes_dev *dd = ctx->dd;
ctx              1334 drivers/crypto/s5p-sss.c 		free_pages((unsigned long)sg_virt(ctx->sg),
ctx              1335 drivers/crypto/s5p-sss.c 			   get_order(ctx->sg->length));
ctx              1338 drivers/crypto/s5p-sss.c 		kfree(ctx->sg);
ctx              1340 drivers/crypto/s5p-sss.c 	ctx->sg = NULL;
ctx              1344 drivers/crypto/s5p-sss.c 	if (!err && !ctx->error) {
ctx              1349 drivers/crypto/s5p-sss.c 		ctx->error = true;
ctx              1376 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx;
ctx              1405 drivers/crypto/s5p-sss.c 	ctx = ahash_request_ctx(req);
ctx              1407 drivers/crypto/s5p-sss.c 	err = s5p_hash_prepare_request(req, ctx->op_update);
ctx              1408 drivers/crypto/s5p-sss.c 	if (err || !ctx->total)
ctx              1412 drivers/crypto/s5p-sss.c 		ctx->op_update, req->nbytes);
ctx              1415 drivers/crypto/s5p-sss.c 	if (ctx->digcnt)
ctx              1418 drivers/crypto/s5p-sss.c 	if (ctx->op_update) { /* HASH_OP_UPDATE */
ctx              1419 drivers/crypto/s5p-sss.c 		err = s5p_hash_xmit_dma(dd, ctx->total, ctx->finup);
ctx              1420 drivers/crypto/s5p-sss.c 		if (err != -EINPROGRESS && ctx->finup && !ctx->error)
ctx              1422 drivers/crypto/s5p-sss.c 			err = s5p_hash_xmit_dma(dd, ctx->total, true);
ctx              1424 drivers/crypto/s5p-sss.c 		err = s5p_hash_xmit_dma(dd, ctx->total, true);
ctx              1489 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx              1492 drivers/crypto/s5p-sss.c 	ctx->op_update = op;
ctx              1508 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx              1513 drivers/crypto/s5p-sss.c 	if (ctx->bufcnt + req->nbytes <= BUFLEN) {
ctx              1514 drivers/crypto/s5p-sss.c 		scatterwalk_map_and_copy(ctx->buffer + ctx->bufcnt, req->src,
ctx              1516 drivers/crypto/s5p-sss.c 		ctx->bufcnt += req->nbytes;
ctx              1548 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx              1551 drivers/crypto/s5p-sss.c 				     ctx->buffer, ctx->bufcnt, req->result);
ctx              1579 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx              1581 drivers/crypto/s5p-sss.c 	ctx->finup = true;
ctx              1582 drivers/crypto/s5p-sss.c 	if (ctx->error)
ctx              1585 drivers/crypto/s5p-sss.c 	if (!ctx->digcnt && ctx->bufcnt < BUFLEN)
ctx              1599 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx              1602 drivers/crypto/s5p-sss.c 	ctx->finup = true;
ctx              1626 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx              1630 drivers/crypto/s5p-sss.c 	ctx->dd = tctx->dd;
ctx              1631 drivers/crypto/s5p-sss.c 	ctx->error = false;
ctx              1632 drivers/crypto/s5p-sss.c 	ctx->finup = false;
ctx              1633 drivers/crypto/s5p-sss.c 	ctx->bufcnt = 0;
ctx              1634 drivers/crypto/s5p-sss.c 	ctx->digcnt = 0;
ctx              1635 drivers/crypto/s5p-sss.c 	ctx->total = 0;
ctx              1636 drivers/crypto/s5p-sss.c 	ctx->skip = 0;
ctx              1643 drivers/crypto/s5p-sss.c 		ctx->engine = SSS_HASH_ENGINE_MD5;
ctx              1644 drivers/crypto/s5p-sss.c 		ctx->nregs = HASH_MD5_MAX_REG;
ctx              1647 drivers/crypto/s5p-sss.c 		ctx->engine = SSS_HASH_ENGINE_SHA1;
ctx              1648 drivers/crypto/s5p-sss.c 		ctx->nregs = HASH_SHA1_MAX_REG;
ctx              1651 drivers/crypto/s5p-sss.c 		ctx->engine = SSS_HASH_ENGINE_SHA256;
ctx              1652 drivers/crypto/s5p-sss.c 		ctx->nregs = HASH_SHA256_MAX_REG;
ctx              1655 drivers/crypto/s5p-sss.c 		ctx->error = true;
ctx              1727 drivers/crypto/s5p-sss.c 	const struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx              1729 drivers/crypto/s5p-sss.c 	memcpy(out, ctx, sizeof(*ctx) + ctx->bufcnt);
ctx              1741 drivers/crypto/s5p-sss.c 	struct s5p_hash_reqctx *ctx = ahash_request_ctx(req);
ctx              1746 drivers/crypto/s5p-sss.c 	memcpy(ctx, in, sizeof(*ctx) + BUFLEN);
ctx              1748 drivers/crypto/s5p-sss.c 		ctx->error = true;
ctx              1752 drivers/crypto/s5p-sss.c 	ctx->dd = tctx->dd;
ctx              1753 drivers/crypto/s5p-sss.c 	ctx->error = false;
ctx              1952 drivers/crypto/s5p-sss.c 	if (dev->ctx->keylen == AES_KEYSIZE_192)
ctx              1954 drivers/crypto/s5p-sss.c 	else if (dev->ctx->keylen == AES_KEYSIZE_256)
ctx              1981 drivers/crypto/s5p-sss.c 	s5p_set_aes(dev, dev->ctx->aes_key, iv, ctr, dev->ctx->keylen);
ctx              2025 drivers/crypto/s5p-sss.c 	dev->ctx = crypto_tfm_ctx(dev->req->base.tfm);
ctx              2056 drivers/crypto/s5p-sss.c 	struct s5p_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx              2057 drivers/crypto/s5p-sss.c 	struct s5p_aes_dev *dev = ctx->dev;
ctx              2077 drivers/crypto/s5p-sss.c 	struct s5p_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              2084 drivers/crypto/s5p-sss.c 	memcpy(ctx->aes_key, key, keylen);
ctx              2085 drivers/crypto/s5p-sss.c 	ctx->keylen = keylen;
ctx              2117 drivers/crypto/s5p-sss.c 	struct s5p_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              2119 drivers/crypto/s5p-sss.c 	ctx->dev = s5p_dev;
ctx               201 drivers/crypto/sahara.c 	struct sahara_ctx	*ctx;
ctx               443 drivers/crypto/sahara.c 	struct sahara_ctx *ctx = dev->ctx;
ctx               450 drivers/crypto/sahara.c 	if (ctx->flags & FLAGS_NEW_KEY) {
ctx               451 drivers/crypto/sahara.c 		memcpy(dev->key_base, ctx->key, ctx->keylen);
ctx               452 drivers/crypto/sahara.c 		ctx->flags &= ~FLAGS_NEW_KEY;
ctx               461 drivers/crypto/sahara.c 		dev->hw_desc[idx]->len2 = ctx->keylen;
ctx               553 drivers/crypto/sahara.c 	struct sahara_ctx *ctx;
ctx               569 drivers/crypto/sahara.c 	ctx = crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req));
ctx               577 drivers/crypto/sahara.c 	dev->ctx = ctx;
ctx               603 drivers/crypto/sahara.c 	struct sahara_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               606 drivers/crypto/sahara.c 	ctx->keylen = keylen;
ctx               610 drivers/crypto/sahara.c 		memcpy(ctx->key, key, keylen);
ctx               611 drivers/crypto/sahara.c 		ctx->flags |= FLAGS_NEW_KEY;
ctx               621 drivers/crypto/sahara.c 	crypto_sync_skcipher_clear_flags(ctx->fallback, CRYPTO_TFM_REQ_MASK);
ctx               622 drivers/crypto/sahara.c 	crypto_sync_skcipher_set_flags(ctx->fallback, tfm->base.crt_flags &
ctx               625 drivers/crypto/sahara.c 	ret = crypto_sync_skcipher_setkey(ctx->fallback, key, keylen);
ctx               628 drivers/crypto/sahara.c 	tfm->base.crt_flags |= crypto_sync_skcipher_get_flags(ctx->fallback) &
ctx               661 drivers/crypto/sahara.c 	struct sahara_ctx *ctx = crypto_ablkcipher_ctx(
ctx               665 drivers/crypto/sahara.c 	if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
ctx               666 drivers/crypto/sahara.c 		SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
ctx               668 drivers/crypto/sahara.c 		skcipher_request_set_sync_tfm(subreq, ctx->fallback);
ctx               683 drivers/crypto/sahara.c 	struct sahara_ctx *ctx = crypto_ablkcipher_ctx(
ctx               687 drivers/crypto/sahara.c 	if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
ctx               688 drivers/crypto/sahara.c 		SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
ctx               690 drivers/crypto/sahara.c 		skcipher_request_set_sync_tfm(subreq, ctx->fallback);
ctx               705 drivers/crypto/sahara.c 	struct sahara_ctx *ctx = crypto_ablkcipher_ctx(
ctx               709 drivers/crypto/sahara.c 	if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
ctx               710 drivers/crypto/sahara.c 		SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
ctx               712 drivers/crypto/sahara.c 		skcipher_request_set_sync_tfm(subreq, ctx->fallback);
ctx               727 drivers/crypto/sahara.c 	struct sahara_ctx *ctx = crypto_ablkcipher_ctx(
ctx               731 drivers/crypto/sahara.c 	if (unlikely(ctx->keylen != AES_KEYSIZE_128)) {
ctx               732 drivers/crypto/sahara.c 		SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
ctx               734 drivers/crypto/sahara.c 		skcipher_request_set_sync_tfm(subreq, ctx->fallback);
ctx               750 drivers/crypto/sahara.c 	struct sahara_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               752 drivers/crypto/sahara.c 	ctx->fallback = crypto_alloc_sync_skcipher(name, 0,
ctx               754 drivers/crypto/sahara.c 	if (IS_ERR(ctx->fallback)) {
ctx               756 drivers/crypto/sahara.c 		return PTR_ERR(ctx->fallback);
ctx               766 drivers/crypto/sahara.c 	struct sahara_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               768 drivers/crypto/sahara.c 	crypto_free_sync_skcipher(ctx->fallback);
ctx                99 drivers/crypto/stm32/stm32-crc32.c 	struct stm32_crc_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               105 drivers/crypto/stm32/stm32-crc32.c 		ctx->crc = crc;
ctx               110 drivers/crypto/stm32/stm32-crc32.c 	pm_runtime_get_sync(ctx->crc->dev);
ctx               113 drivers/crypto/stm32/stm32-crc32.c 	writel_relaxed(bitrev32(mctx->key), ctx->crc->regs + CRC_INIT);
ctx               114 drivers/crypto/stm32/stm32-crc32.c 	writel_relaxed(bitrev32(mctx->poly), ctx->crc->regs + CRC_POL);
ctx               115 drivers/crypto/stm32/stm32-crc32.c 	writel_relaxed(CRC_CR_RESET | CRC_CR_REVERSE, ctx->crc->regs + CRC_CR);
ctx               118 drivers/crypto/stm32/stm32-crc32.c 	ctx->partial = readl_relaxed(ctx->crc->regs + CRC_DR);
ctx               119 drivers/crypto/stm32/stm32-crc32.c 	ctx->crc->nb_pending_bytes = 0;
ctx               121 drivers/crypto/stm32/stm32-crc32.c 	pm_runtime_mark_last_busy(ctx->crc->dev);
ctx               122 drivers/crypto/stm32/stm32-crc32.c 	pm_runtime_put_autosuspend(ctx->crc->dev);
ctx               130 drivers/crypto/stm32/stm32-crc32.c 	struct stm32_crc_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               131 drivers/crypto/stm32/stm32-crc32.c 	struct stm32_crc *crc = ctx->crc;
ctx               158 drivers/crypto/stm32/stm32-crc32.c 	ctx->partial = readl_relaxed(crc->regs + CRC_DR);
ctx               184 drivers/crypto/stm32/stm32-crc32.c 	struct stm32_crc_desc_ctx *ctx = shash_desc_ctx(desc);
ctx               189 drivers/crypto/stm32/stm32-crc32.c 			   ~ctx->partial : ctx->partial, out);
ctx               136 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp_ctx   *ctx;
ctx               265 drivers/crypto/stm32/stm32-cryp.c static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
ctx               270 drivers/crypto/stm32/stm32-cryp.c 	if (!ctx->cryp) {
ctx               275 drivers/crypto/stm32/stm32-cryp.c 		ctx->cryp = cryp;
ctx               277 drivers/crypto/stm32/stm32-cryp.c 		cryp = ctx->cryp;
ctx               419 drivers/crypto/stm32/stm32-cryp.c 		stm32_cryp_write(c, CRYP_K1LR, cpu_to_be32(c->ctx->key[0]));
ctx               420 drivers/crypto/stm32/stm32-cryp.c 		stm32_cryp_write(c, CRYP_K1RR, cpu_to_be32(c->ctx->key[1]));
ctx               423 drivers/crypto/stm32/stm32-cryp.c 		for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
ctx               425 drivers/crypto/stm32/stm32-cryp.c 					 cpu_to_be32(c->ctx->key[i - 1]));
ctx               551 drivers/crypto/stm32/stm32-cryp.c 	switch (cryp->ctx->keylen) {
ctx               673 drivers/crypto/stm32/stm32-cryp.c 	memset(cryp->ctx->key, 0, cryp->ctx->keylen);
ctx               690 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               694 drivers/crypto/stm32/stm32-cryp.c 	ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req;
ctx               695 drivers/crypto/stm32/stm32-cryp.c 	ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req;
ctx               696 drivers/crypto/stm32/stm32-cryp.c 	ctx->enginectx.op.unprepare_request = NULL;
ctx               706 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
ctx               710 drivers/crypto/stm32/stm32-cryp.c 	ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req;
ctx               711 drivers/crypto/stm32/stm32-cryp.c 	ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req;
ctx               712 drivers/crypto/stm32/stm32-cryp.c 	ctx->enginectx.op.unprepare_request = NULL;
ctx               719 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
ctx               722 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
ctx               734 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx               736 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
ctx               749 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               751 drivers/crypto/stm32/stm32-cryp.c 	memcpy(ctx->key, key, keylen);
ctx               752 drivers/crypto/stm32/stm32-cryp.c 	ctx->keylen = keylen;
ctx               784 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
ctx               790 drivers/crypto/stm32/stm32-cryp.c 	memcpy(ctx->key, key, keylen);
ctx               791 drivers/crypto/stm32/stm32-cryp.c 	ctx->keylen = keylen;
ctx               914 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp_ctx *ctx;
ctx               922 drivers/crypto/stm32/stm32-cryp.c 	ctx = req ? crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req)) :
ctx               925 drivers/crypto/stm32/stm32-cryp.c 	cryp = ctx->cryp;
ctx               933 drivers/crypto/stm32/stm32-cryp.c 	ctx->cryp = cryp;
ctx               937 drivers/crypto/stm32/stm32-cryp.c 	cryp->ctx = ctx;
ctx              1031 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
ctx              1033 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp *cryp = ctx->cryp;
ctx              1053 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
ctx              1054 drivers/crypto/stm32/stm32-cryp.c 	struct stm32_cryp *cryp = ctx->cryp;
ctx               226 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               228 drivers/crypto/stm32/stm32-hash.c 	int keylen = ctx->keylen;
ctx               229 drivers/crypto/stm32/stm32-hash.c 	void *key = ctx->key;
ctx               254 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               281 drivers/crypto/stm32/stm32-hash.c 			if (ctx->keylen > HASH_LONG_KEY)
ctx               480 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               483 drivers/crypto/stm32/stm32-hash.c 	if (ctx->keylen < HASH_DMA_THRESHOLD || (hdev->dma_mode == 1)) {
ctx               489 drivers/crypto/stm32/stm32-hash.c 			sg_init_one(&rctx->sg_key, ctx->key,
ctx               490 drivers/crypto/stm32/stm32-hash.c 				    ALIGN(ctx->keylen, sizeof(u32)));
ctx               499 drivers/crypto/stm32/stm32-hash.c 		err = stm32_hash_xmit_dma(hdev, &rctx->sg_key, ctx->keylen, 0);
ctx               633 drivers/crypto/stm32/stm32-hash.c static struct stm32_hash_dev *stm32_hash_find_dev(struct stm32_hash_ctx *ctx)
ctx               638 drivers/crypto/stm32/stm32-hash.c 	if (!ctx->hdev) {
ctx               643 drivers/crypto/stm32/stm32-hash.c 		ctx->hdev = hdev;
ctx               645 drivers/crypto/stm32/stm32-hash.c 		hdev = ctx->hdev;
ctx               656 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
ctx               657 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
ctx               682 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               684 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
ctx               716 drivers/crypto/stm32/stm32-hash.c 	if (ctx->flags & HASH_FLAGS_HMAC)
ctx               839 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
ctx               840 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
ctx               860 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
ctx               861 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
ctx               887 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
ctx               888 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_dev *hdev = ctx->hdev;
ctx               926 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
ctx               927 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
ctx               957 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
ctx               958 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
ctx               990 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
ctx               991 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
ctx              1022 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1025 drivers/crypto/stm32/stm32-hash.c 		memcpy(ctx->key, key, keylen);
ctx              1026 drivers/crypto/stm32/stm32-hash.c 		ctx->keylen = keylen;
ctx              1037 drivers/crypto/stm32/stm32-hash.c 	struct stm32_hash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1042 drivers/crypto/stm32/stm32-hash.c 	ctx->keylen = 0;
ctx              1045 drivers/crypto/stm32/stm32-hash.c 		ctx->flags |= HASH_FLAGS_HMAC;
ctx              1047 drivers/crypto/stm32/stm32-hash.c 	ctx->enginectx.op.do_one_request = stm32_hash_one_request;
ctx              1048 drivers/crypto/stm32/stm32-hash.c 	ctx->enginectx.op.prepare_request = stm32_hash_prepare_req;
ctx              1049 drivers/crypto/stm32/stm32-hash.c 	ctx->enginectx.op.unprepare_request = NULL;
ctx                21 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c 	struct sun4i_cipher_req_ctx *ctx = skcipher_request_ctx(areq);
ctx                22 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c 	u32 mode = ctx->mode;
ctx               124 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c 	struct sun4i_cipher_req_ctx *ctx = skcipher_request_ctx(areq);
ctx               133 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c 	if (ctx->mode & SS_DECRYPTION)
ctx               152 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c 	struct sun4i_cipher_req_ctx *ctx = skcipher_request_ctx(areq);
ctx               155 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c 	u32 mode = ctx->mode;
ctx               891 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
ctx               892 drivers/crypto/talitos.c 	struct device *dev = ctx->dev;
ctx               901 drivers/crypto/talitos.c 	if (ctx->keylen)
ctx               902 drivers/crypto/talitos.c 		dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
ctx               904 drivers/crypto/talitos.c 	memcpy(ctx->key, keys.authkey, keys.authkeylen);
ctx               905 drivers/crypto/talitos.c 	memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
ctx               907 drivers/crypto/talitos.c 	ctx->keylen = keys.authkeylen + keys.enckeylen;
ctx               908 drivers/crypto/talitos.c 	ctx->enckeylen = keys.enckeylen;
ctx               909 drivers/crypto/talitos.c 	ctx->authkeylen = keys.authkeylen;
ctx               910 drivers/crypto/talitos.c 	ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
ctx               925 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
ctx               926 drivers/crypto/talitos.c 	struct device *dev = ctx->dev;
ctx               942 drivers/crypto/talitos.c 	if (ctx->keylen)
ctx               943 drivers/crypto/talitos.c 		dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
ctx               945 drivers/crypto/talitos.c 	memcpy(ctx->key, keys.authkey, keys.authkeylen);
ctx               946 drivers/crypto/talitos.c 	memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
ctx               948 drivers/crypto/talitos.c 	ctx->keylen = keys.authkeylen + keys.enckeylen;
ctx               949 drivers/crypto/talitos.c 	ctx->enckeylen = keys.enckeylen;
ctx               950 drivers/crypto/talitos.c 	ctx->authkeylen = keys.authkeylen;
ctx               951 drivers/crypto/talitos.c 	ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
ctx               996 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_aead_ctx(aead);
ctx              1018 drivers/crypto/talitos.c 		sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
ctx              1203 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_aead_ctx(aead);
ctx              1204 drivers/crypto/talitos.c 	struct device *dev = ctx->dev;
ctx              1220 drivers/crypto/talitos.c 	to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
ctx              1244 drivers/crypto/talitos.c 	to_talitos_ptr(ckey_ptr, ctx->dma_key  + ctx->authkeylen,
ctx              1245 drivers/crypto/talitos.c 		       ctx->enckeylen, is_sec1);
ctx              1302 drivers/crypto/talitos.c 		map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
ctx              1310 drivers/crypto/talitos.c 	ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
ctx              1424 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
ctx              1428 drivers/crypto/talitos.c 	return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
ctx              1437 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
ctx              1446 drivers/crypto/talitos.c 	edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
ctx              1455 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
ctx              1456 drivers/crypto/talitos.c 	struct talitos_private *priv = dev_get_drvdata(ctx->dev);
ctx              1471 drivers/crypto/talitos.c 		edesc->desc.hdr = ctx->desc_hdr_template |
ctx              1482 drivers/crypto/talitos.c 	edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
ctx              1496 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1497 drivers/crypto/talitos.c 	struct device *dev = ctx->dev;
ctx              1499 drivers/crypto/talitos.c 	if (ctx->keylen)
ctx              1500 drivers/crypto/talitos.c 		dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
ctx              1502 drivers/crypto/talitos.c 	memcpy(&ctx->key, key, keylen);
ctx              1503 drivers/crypto/talitos.c 	ctx->keylen = keylen;
ctx              1505 drivers/crypto/talitos.c 	ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
ctx              1556 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1563 drivers/crypto/talitos.c 	memcpy(areq->info, ctx->iv, ivsize);
ctx              1577 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1578 drivers/crypto/talitos.c 	struct device *dev = ctx->dev;
ctx              1593 drivers/crypto/talitos.c 	to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
ctx              1624 drivers/crypto/talitos.c 	map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
ctx              1633 drivers/crypto/talitos.c 	ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
ctx              1645 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1648 drivers/crypto/talitos.c 	return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
ctx              1656 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1673 drivers/crypto/talitos.c 	edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
ctx              1681 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1697 drivers/crypto/talitos.c 	edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
ctx              1767 drivers/crypto/talitos.c static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
ctx              1780 drivers/crypto/talitos.c 	map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
ctx              1791 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1793 drivers/crypto/talitos.c 	struct device *dev = ctx->dev;
ctx              1815 drivers/crypto/talitos.c 	if (ctx->keylen)
ctx              1816 drivers/crypto/talitos.c 		to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
ctx              1858 drivers/crypto/talitos.c 		talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
ctx              1902 drivers/crypto/talitos.c 	ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
ctx              1914 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1916 drivers/crypto/talitos.c 	struct talitos_private *priv = dev_get_drvdata(ctx->dev);
ctx              1922 drivers/crypto/talitos.c 	return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
ctx              1929 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1930 drivers/crypto/talitos.c 	struct device *dev = ctx->dev;
ctx              1982 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1991 drivers/crypto/talitos.c 	struct device *dev = ctx->dev;
ctx              2000 drivers/crypto/talitos.c 			dev_err(ctx->dev, "Invalid number of src SG.\n");
ctx              2041 drivers/crypto/talitos.c 			dev_err(ctx->dev, "Invalid number of src SG.\n");
ctx              2055 drivers/crypto/talitos.c 			dev_err(ctx->dev, "Invalid number of src SG.\n");
ctx              2070 drivers/crypto/talitos.c 	edesc->desc.hdr = ctx->desc_hdr_template;
ctx              2085 drivers/crypto/talitos.c 	if (ctx->keylen && (req_ctx->first || req_ctx->last))
ctx              2134 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              2135 drivers/crypto/talitos.c 	struct device *dev = ctx->dev;
ctx              2158 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              2159 drivers/crypto/talitos.c 	struct device *dev = ctx->dev;
ctx              2187 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
ctx              2201 drivers/crypto/talitos.c 	ctx->keylen = 0;
ctx              2218 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
ctx              2219 drivers/crypto/talitos.c 	struct device *dev = ctx->dev;
ctx              2228 drivers/crypto/talitos.c 		memcpy(ctx->key, key, keysize);
ctx              2239 drivers/crypto/talitos.c 		memcpy(ctx->key, hash, digestsize);
ctx              2242 drivers/crypto/talitos.c 	if (ctx->keylen)
ctx              2243 drivers/crypto/talitos.c 		dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
ctx              2245 drivers/crypto/talitos.c 	ctx->keylen = keysize;
ctx              2246 drivers/crypto/talitos.c 	ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
ctx              3013 drivers/crypto/talitos.c static int talitos_init_common(struct talitos_ctx *ctx,
ctx              3019 drivers/crypto/talitos.c 	ctx->dev = talitos_alg->dev;
ctx              3022 drivers/crypto/talitos.c 	priv = dev_get_drvdata(ctx->dev);
ctx              3023 drivers/crypto/talitos.c 	ctx->ch = atomic_inc_return(&priv->last_chan) &
ctx              3027 drivers/crypto/talitos.c 	ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
ctx              3030 drivers/crypto/talitos.c 	ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
ctx              3039 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              3049 drivers/crypto/talitos.c 	return talitos_init_common(ctx, talitos_alg);
ctx              3056 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
ctx              3061 drivers/crypto/talitos.c 	return talitos_init_common(ctx, talitos_alg);
ctx              3066 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              3070 drivers/crypto/talitos.c 	ctx->keylen = 0;
ctx              3079 drivers/crypto/talitos.c 	struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              3080 drivers/crypto/talitos.c 	struct device *dev = ctx->dev;
ctx              3082 drivers/crypto/talitos.c 	if (ctx->keylen)
ctx              3083 drivers/crypto/talitos.c 		dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
ctx               290 drivers/crypto/ux500/cryp/cryp.c 			      struct cryp_device_context *ctx,
ctx               309 drivers/crypto/ux500/cryp/cryp.c 		ctx->din = readl_relaxed(&src_reg->din);
ctx               311 drivers/crypto/ux500/cryp/cryp.c 	ctx->cr = readl_relaxed(&src_reg->cr) & CRYP_CR_CONTEXT_SAVE_MASK;
ctx               315 drivers/crypto/ux500/cryp/cryp.c 		ctx->key_4_l = readl_relaxed(&src_reg->key_4_l);
ctx               316 drivers/crypto/ux500/cryp/cryp.c 		ctx->key_4_r = readl_relaxed(&src_reg->key_4_r);
ctx               320 drivers/crypto/ux500/cryp/cryp.c 		ctx->key_3_l = readl_relaxed(&src_reg->key_3_l);
ctx               321 drivers/crypto/ux500/cryp/cryp.c 		ctx->key_3_r = readl_relaxed(&src_reg->key_3_r);
ctx               325 drivers/crypto/ux500/cryp/cryp.c 		ctx->key_2_l = readl_relaxed(&src_reg->key_2_l);
ctx               326 drivers/crypto/ux500/cryp/cryp.c 		ctx->key_2_r = readl_relaxed(&src_reg->key_2_r);
ctx               330 drivers/crypto/ux500/cryp/cryp.c 		ctx->key_1_l = readl_relaxed(&src_reg->key_1_l);
ctx               331 drivers/crypto/ux500/cryp/cryp.c 		ctx->key_1_r = readl_relaxed(&src_reg->key_1_r);
ctx               335 drivers/crypto/ux500/cryp/cryp.c 	algomode = ((ctx->cr & CRYP_CR_ALGOMODE_MASK) >> CRYP_CR_ALGOMODE_POS);
ctx               339 drivers/crypto/ux500/cryp/cryp.c 		ctx->init_vect_0_l = readl_relaxed(&src_reg->init_vect_0_l);
ctx               340 drivers/crypto/ux500/cryp/cryp.c 		ctx->init_vect_0_r = readl_relaxed(&src_reg->init_vect_0_r);
ctx               341 drivers/crypto/ux500/cryp/cryp.c 		ctx->init_vect_1_l = readl_relaxed(&src_reg->init_vect_1_l);
ctx               342 drivers/crypto/ux500/cryp/cryp.c 		ctx->init_vect_1_r = readl_relaxed(&src_reg->init_vect_1_r);
ctx               353 drivers/crypto/ux500/cryp/cryp.c 				 struct cryp_device_context *ctx)
ctx               365 drivers/crypto/ux500/cryp/cryp.c 		writel_relaxed(ctx->key_4_l, &reg->key_4_l);
ctx               366 drivers/crypto/ux500/cryp/cryp.c 		writel_relaxed(ctx->key_4_r, &reg->key_4_r);
ctx               370 drivers/crypto/ux500/cryp/cryp.c 		writel_relaxed(ctx->key_3_l, &reg->key_3_l);
ctx               371 drivers/crypto/ux500/cryp/cryp.c 		writel_relaxed(ctx->key_3_r, &reg->key_3_r);
ctx               375 drivers/crypto/ux500/cryp/cryp.c 		writel_relaxed(ctx->key_2_l, &reg->key_2_l);
ctx               376 drivers/crypto/ux500/cryp/cryp.c 		writel_relaxed(ctx->key_2_r, &reg->key_2_r);
ctx               380 drivers/crypto/ux500/cryp/cryp.c 		writel_relaxed(ctx->key_1_l, &reg->key_1_l);
ctx               381 drivers/crypto/ux500/cryp/cryp.c 		writel_relaxed(ctx->key_1_r, &reg->key_1_r);
ctx               388 drivers/crypto/ux500/cryp/cryp.c 		writel_relaxed(ctx->init_vect_0_l, &reg->init_vect_0_l);
ctx               389 drivers/crypto/ux500/cryp/cryp.c 		writel_relaxed(ctx->init_vect_0_r, &reg->init_vect_0_r);
ctx               390 drivers/crypto/ux500/cryp/cryp.c 		writel_relaxed(ctx->init_vect_1_l, &reg->init_vect_1_l);
ctx               391 drivers/crypto/ux500/cryp/cryp.c 		writel_relaxed(ctx->init_vect_1_r, &reg->init_vect_1_r);
ctx               286 drivers/crypto/ux500/cryp/cryp.h 			      struct cryp_device_context *ctx,
ctx               290 drivers/crypto/ux500/cryp/cryp.h 				 struct cryp_device_context *ctx);
ctx               179 drivers/crypto/ux500/cryp/cryp_core.c static void add_session_id(struct cryp_ctx *ctx)
ctx               188 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->session_id = atomic_read(&session_id);
ctx               193 drivers/crypto/ux500/cryp/cryp_core.c 	struct cryp_ctx *ctx;
ctx               205 drivers/crypto/ux500/cryp/cryp_core.c 	ctx = device_data->current_ctx;
ctx               207 drivers/crypto/ux500/cryp/cryp_core.c 	if (ctx == NULL) {
ctx               208 drivers/crypto/ux500/cryp/cryp_core.c 		BUG_ON(!ctx);
ctx               212 drivers/crypto/ux500/cryp/cryp_core.c 	dev_dbg(ctx->device->dev, "[%s] (len: %d) %s, ", __func__, ctx->outlen,
ctx               218 drivers/crypto/ux500/cryp/cryp_core.c 		if (ctx->outlen / ctx->blocksize > 0) {
ctx               219 drivers/crypto/ux500/cryp/cryp_core.c 			count = ctx->blocksize / 4;
ctx               221 drivers/crypto/ux500/cryp/cryp_core.c 			readsl(&device_data->base->dout, ctx->outdata, count);
ctx               222 drivers/crypto/ux500/cryp/cryp_core.c 			ctx->outdata += count;
ctx               223 drivers/crypto/ux500/cryp/cryp_core.c 			ctx->outlen -= count;
ctx               225 drivers/crypto/ux500/cryp/cryp_core.c 			if (ctx->outlen == 0) {
ctx               232 drivers/crypto/ux500/cryp/cryp_core.c 		if (ctx->datalen / ctx->blocksize > 0) {
ctx               233 drivers/crypto/ux500/cryp/cryp_core.c 			count = ctx->blocksize / 4;
ctx               235 drivers/crypto/ux500/cryp/cryp_core.c 			writesl(&device_data->base->din, ctx->indata, count);
ctx               237 drivers/crypto/ux500/cryp/cryp_core.c 			ctx->indata += count;
ctx               238 drivers/crypto/ux500/cryp/cryp_core.c 			ctx->datalen -= count;
ctx               240 drivers/crypto/ux500/cryp/cryp_core.c 			if (ctx->datalen == 0)
ctx               244 drivers/crypto/ux500/cryp/cryp_core.c 			if (ctx->config.algomode == CRYP_ALGO_AES_XTS) {
ctx               281 drivers/crypto/ux500/cryp/cryp_core.c static int cfg_ivs(struct cryp_device_data *device_data, struct cryp_ctx *ctx)
ctx               285 drivers/crypto/ux500/cryp/cryp_core.c 	int num_of_regs = ctx->blocksize / 8;
ctx               297 drivers/crypto/ux500/cryp/cryp_core.c 			__func__, ctx->blocksize);
ctx               301 drivers/crypto/ux500/cryp/cryp_core.c 	for (i = 0; i < ctx->blocksize / 4; i++)
ctx               302 drivers/crypto/ux500/cryp/cryp_core.c 		iv[i] = uint8p_to_uint32_be(ctx->iv + i*4);
ctx               336 drivers/crypto/ux500/cryp/cryp_core.c static int cfg_keys(struct cryp_ctx *ctx)
ctx               339 drivers/crypto/ux500/cryp/cryp_core.c 	int num_of_regs = ctx->keylen / 8;
ctx               343 drivers/crypto/ux500/cryp/cryp_core.c 	dev_dbg(ctx->device->dev, "[%s]", __func__);
ctx               345 drivers/crypto/ux500/cryp/cryp_core.c 	if (mode_is_aes(ctx->config.algomode)) {
ctx               346 drivers/crypto/ux500/cryp/cryp_core.c 		swap_words_in_key_and_bits_in_byte((u8 *)ctx->key,
ctx               348 drivers/crypto/ux500/cryp/cryp_core.c 						   ctx->keylen);
ctx               350 drivers/crypto/ux500/cryp/cryp_core.c 		for (i = 0; i < ctx->keylen / 4; i++)
ctx               351 drivers/crypto/ux500/cryp/cryp_core.c 			swapped_key[i] = uint8p_to_uint32_be(ctx->key + i*4);
ctx               355 drivers/crypto/ux500/cryp/cryp_core.c 		cryp_error = set_key(ctx->device,
ctx               361 drivers/crypto/ux500/cryp/cryp_core.c 			dev_err(ctx->device->dev, "[%s]: set_key() failed!",
ctx               369 drivers/crypto/ux500/cryp/cryp_core.c static int cryp_setup_context(struct cryp_ctx *ctx,
ctx               387 drivers/crypto/ux500/cryp/cryp_core.c 	if (ctx->updated == 0) {
ctx               389 drivers/crypto/ux500/cryp/cryp_core.c 		if (cfg_keys(ctx) != 0) {
ctx               390 drivers/crypto/ux500/cryp/cryp_core.c 			dev_err(ctx->device->dev, "[%s]: cfg_keys failed!",
ctx               395 drivers/crypto/ux500/cryp/cryp_core.c 		if (ctx->iv &&
ctx               396 drivers/crypto/ux500/cryp/cryp_core.c 		    CRYP_ALGO_AES_ECB != ctx->config.algomode &&
ctx               397 drivers/crypto/ux500/cryp/cryp_core.c 		    CRYP_ALGO_DES_ECB != ctx->config.algomode &&
ctx               398 drivers/crypto/ux500/cryp/cryp_core.c 		    CRYP_ALGO_TDES_ECB != ctx->config.algomode) {
ctx               399 drivers/crypto/ux500/cryp/cryp_core.c 			if (cfg_ivs(device_data, ctx) != 0)
ctx               403 drivers/crypto/ux500/cryp/cryp_core.c 		cryp_set_configuration(device_data, &ctx->config,
ctx               405 drivers/crypto/ux500/cryp/cryp_core.c 		add_session_id(ctx);
ctx               406 drivers/crypto/ux500/cryp/cryp_core.c 	} else if (ctx->updated == 1 &&
ctx               407 drivers/crypto/ux500/cryp/cryp_core.c 		   ctx->session_id != atomic_read(&session_id)) {
ctx               409 drivers/crypto/ux500/cryp/cryp_core.c 		cryp_restore_device_context(device_data, &ctx->dev_ctx);
ctx               411 drivers/crypto/ux500/cryp/cryp_core.c 		add_session_id(ctx);
ctx               412 drivers/crypto/ux500/cryp/cryp_core.c 		control_register = ctx->dev_ctx.cr;
ctx               414 drivers/crypto/ux500/cryp/cryp_core.c 		control_register = ctx->dev_ctx.cr;
ctx               423 drivers/crypto/ux500/cryp/cryp_core.c static int cryp_get_device_data(struct cryp_ctx *ctx,
ctx               449 drivers/crypto/ux500/cryp/cryp_core.c 			local_device_data->current_ctx = ctx;
ctx               450 drivers/crypto/ux500/cryp/cryp_core.c 			ctx->device = local_device_data;
ctx               514 drivers/crypto/ux500/cryp/cryp_core.c 	struct cryp_ctx *ctx = (struct cryp_ctx *) data;
ctx               515 drivers/crypto/ux500/cryp/cryp_core.c 	dev_dbg(ctx->device->dev, "[%s]: ", __func__);
ctx               517 drivers/crypto/ux500/cryp/cryp_core.c 	complete(&ctx->device->dma.cryp_dma_complete);
ctx               520 drivers/crypto/ux500/cryp/cryp_core.c static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
ctx               529 drivers/crypto/ux500/cryp/cryp_core.c 	dev_dbg(ctx->device->dev, "[%s]: ", __func__);
ctx               532 drivers/crypto/ux500/cryp/cryp_core.c 		dev_err(ctx->device->dev, "[%s]: Data in sg list isn't "
ctx               539 drivers/crypto/ux500/cryp/cryp_core.c 		channel = ctx->device->dma.chan_mem2cryp;
ctx               540 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->device->dma.sg_src = sg;
ctx               541 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->device->dma.sg_src_len = dma_map_sg(channel->device->dev,
ctx               542 drivers/crypto/ux500/cryp/cryp_core.c 						 ctx->device->dma.sg_src,
ctx               543 drivers/crypto/ux500/cryp/cryp_core.c 						 ctx->device->dma.nents_src,
ctx               546 drivers/crypto/ux500/cryp/cryp_core.c 		if (!ctx->device->dma.sg_src_len) {
ctx               547 drivers/crypto/ux500/cryp/cryp_core.c 			dev_dbg(ctx->device->dev,
ctx               553 drivers/crypto/ux500/cryp/cryp_core.c 		dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
ctx               557 drivers/crypto/ux500/cryp/cryp_core.c 				ctx->device->dma.sg_src,
ctx               558 drivers/crypto/ux500/cryp/cryp_core.c 				ctx->device->dma.sg_src_len,
ctx               563 drivers/crypto/ux500/cryp/cryp_core.c 		channel = ctx->device->dma.chan_cryp2mem;
ctx               564 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->device->dma.sg_dst = sg;
ctx               565 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->device->dma.sg_dst_len = dma_map_sg(channel->device->dev,
ctx               566 drivers/crypto/ux500/cryp/cryp_core.c 						 ctx->device->dma.sg_dst,
ctx               567 drivers/crypto/ux500/cryp/cryp_core.c 						 ctx->device->dma.nents_dst,
ctx               570 drivers/crypto/ux500/cryp/cryp_core.c 		if (!ctx->device->dma.sg_dst_len) {
ctx               571 drivers/crypto/ux500/cryp/cryp_core.c 			dev_dbg(ctx->device->dev,
ctx               577 drivers/crypto/ux500/cryp/cryp_core.c 		dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
ctx               581 drivers/crypto/ux500/cryp/cryp_core.c 				ctx->device->dma.sg_dst,
ctx               582 drivers/crypto/ux500/cryp/cryp_core.c 				ctx->device->dma.sg_dst_len,
ctx               588 drivers/crypto/ux500/cryp/cryp_core.c 		desc->callback_param = ctx;
ctx               592 drivers/crypto/ux500/cryp/cryp_core.c 		dev_dbg(ctx->device->dev, "[%s]: Invalid DMA direction",
ctx               599 drivers/crypto/ux500/cryp/cryp_core.c 		dev_dbg(ctx->device->dev, "[%s]: DMA submission failed\n",
ctx               609 drivers/crypto/ux500/cryp/cryp_core.c static void cryp_dma_done(struct cryp_ctx *ctx)
ctx               613 drivers/crypto/ux500/cryp/cryp_core.c 	dev_dbg(ctx->device->dev, "[%s]: ", __func__);
ctx               615 drivers/crypto/ux500/cryp/cryp_core.c 	chan = ctx->device->dma.chan_mem2cryp;
ctx               617 drivers/crypto/ux500/cryp/cryp_core.c 	dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src,
ctx               618 drivers/crypto/ux500/cryp/cryp_core.c 		     ctx->device->dma.sg_src_len, DMA_TO_DEVICE);
ctx               620 drivers/crypto/ux500/cryp/cryp_core.c 	chan = ctx->device->dma.chan_cryp2mem;
ctx               622 drivers/crypto/ux500/cryp/cryp_core.c 	dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst,
ctx               623 drivers/crypto/ux500/cryp/cryp_core.c 		     ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE);
ctx               626 drivers/crypto/ux500/cryp/cryp_core.c static int cryp_dma_write(struct cryp_ctx *ctx, struct scatterlist *sg,
ctx               629 drivers/crypto/ux500/cryp/cryp_core.c 	int error = cryp_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
ctx               630 drivers/crypto/ux500/cryp/cryp_core.c 	dev_dbg(ctx->device->dev, "[%s]: ", __func__);
ctx               633 drivers/crypto/ux500/cryp/cryp_core.c 		dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
ctx               641 drivers/crypto/ux500/cryp/cryp_core.c static int cryp_dma_read(struct cryp_ctx *ctx, struct scatterlist *sg, int len)
ctx               643 drivers/crypto/ux500/cryp/cryp_core.c 	int error = cryp_set_dma_transfer(ctx, sg, len, DMA_FROM_DEVICE);
ctx               645 drivers/crypto/ux500/cryp/cryp_core.c 		dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
ctx               653 drivers/crypto/ux500/cryp/cryp_core.c static void cryp_polling_mode(struct cryp_ctx *ctx,
ctx               656 drivers/crypto/ux500/cryp/cryp_core.c 	int len = ctx->blocksize / BYTES_PER_WORD;
ctx               657 drivers/crypto/ux500/cryp/cryp_core.c 	int remaining_length = ctx->datalen;
ctx               658 drivers/crypto/ux500/cryp/cryp_core.c 	u32 *indata = (u32 *)ctx->indata;
ctx               659 drivers/crypto/ux500/cryp/cryp_core.c 	u32 *outdata = (u32 *)ctx->outdata;
ctx               752 drivers/crypto/ux500/cryp/cryp_core.c static int hw_crypt_noxts(struct cryp_ctx *ctx,
ctx               757 drivers/crypto/ux500/cryp/cryp_core.c 	const u8 *indata = ctx->indata;
ctx               758 drivers/crypto/ux500/cryp/cryp_core.c 	u8 *outdata = ctx->outdata;
ctx               759 drivers/crypto/ux500/cryp/cryp_core.c 	u32 datalen = ctx->datalen;
ctx               764 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->outlen = ctx->datalen;
ctx               772 drivers/crypto/ux500/cryp/cryp_core.c 	ret = cryp_setup_context(ctx, device_data);
ctx               786 drivers/crypto/ux500/cryp/cryp_core.c 		while (ctx->outlen > 0)
ctx               798 drivers/crypto/ux500/cryp/cryp_core.c 		cryp_polling_mode(ctx, device_data);
ctx               800 drivers/crypto/ux500/cryp/cryp_core.c 		dev_err(ctx->device->dev, "[%s]: Invalid operation mode!",
ctx               806 drivers/crypto/ux500/cryp/cryp_core.c 	cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
ctx               807 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->updated = 1;
ctx               810 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->indata = indata;
ctx               811 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->outdata = outdata;
ctx               812 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->datalen = datalen;
ctx               813 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->outlen = outlen;
ctx               834 drivers/crypto/ux500/cryp/cryp_core.c 	struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx               843 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->datalen = areq->nbytes;
ctx               844 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->outlen = areq->nbytes;
ctx               846 drivers/crypto/ux500/cryp/cryp_core.c 	ret = cryp_get_device_data(ctx, &device_data);
ctx               850 drivers/crypto/ux500/cryp/cryp_core.c 	ret = cryp_setup_context(ctx, device_data);
ctx               855 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen);
ctx               856 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen);
ctx               861 drivers/crypto/ux500/cryp/cryp_core.c 	bytes_written = cryp_dma_write(ctx, areq->src, ctx->datalen);
ctx               862 drivers/crypto/ux500/cryp/cryp_core.c 	bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written);
ctx               864 drivers/crypto/ux500/cryp/cryp_core.c 	wait_for_completion(&ctx->device->dma.cryp_dma_complete);
ctx               865 drivers/crypto/ux500/cryp/cryp_core.c 	cryp_dma_done(ctx);
ctx               867 drivers/crypto/ux500/cryp/cryp_core.c 	cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
ctx               868 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->updated = 1;
ctx               873 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->device = NULL;
ctx               892 drivers/crypto/ux500/cryp/cryp_core.c 	struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx               901 drivers/crypto/ux500/cryp/cryp_core.c 	ret = cryp_get_device_data(ctx, &device_data);
ctx               915 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->iv = walk.iv;
ctx               917 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->indata = phys_to_virt(src_paddr);
ctx               920 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->outdata = phys_to_virt(dst_paddr);
ctx               922 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->datalen = nbytes - (nbytes % ctx->blocksize);
ctx               924 drivers/crypto/ux500/cryp/cryp_core.c 		ret = hw_crypt_noxts(ctx, device_data);
ctx               928 drivers/crypto/ux500/cryp/cryp_core.c 		nbytes -= ctx->datalen;
ctx               939 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->device = NULL;
ctx               954 drivers/crypto/ux500/cryp/cryp_core.c 	struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx               961 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->config.keysize = CRYP_KEY_SIZE_128;
ctx               965 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->config.keysize = CRYP_KEY_SIZE_192;
ctx               969 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->config.keysize = CRYP_KEY_SIZE_256;
ctx               978 drivers/crypto/ux500/cryp/cryp_core.c 	memcpy(ctx->key, key, keylen);
ctx               979 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->keylen = keylen;
ctx               981 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->updated = 0;
ctx               989 drivers/crypto/ux500/cryp/cryp_core.c 	struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx               998 drivers/crypto/ux500/cryp/cryp_core.c 	memcpy(ctx->key, key, keylen);
ctx               999 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->keylen = keylen;
ctx              1001 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->updated = 0;
ctx              1008 drivers/crypto/ux500/cryp/cryp_core.c 	struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1017 drivers/crypto/ux500/cryp/cryp_core.c 	memcpy(ctx->key, key, keylen);
ctx              1018 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->keylen = keylen;
ctx              1020 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->updated = 0;
ctx              1027 drivers/crypto/ux500/cryp/cryp_core.c 	struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1031 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT;
ctx              1035 drivers/crypto/ux500/cryp/cryp_core.c 	if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
ctx              1045 drivers/crypto/ux500/cryp/cryp_core.c 	struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
ctx              1049 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->config.algodir = CRYP_ALGORITHM_DECRYPT;
ctx              1052 drivers/crypto/ux500/cryp/cryp_core.c 	if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
ctx              1066 drivers/crypto/ux500/cryp/cryp_core.c 	struct cryp_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1072 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->config.algomode = cryp_alg->algomode;
ctx              1073 drivers/crypto/ux500/cryp/cryp_core.c 	ctx->blocksize = crypto_tfm_alg_blocksize(tfm);
ctx               385 drivers/crypto/ux500/hash/hash_alg.h void hash_begin(struct hash_device_data *device_data, struct hash_ctx *ctx);
ctx               133 drivers/crypto/ux500/hash/hash_core.c 	struct hash_ctx *ctx = data;
ctx               135 drivers/crypto/ux500/hash/hash_core.c 	complete(&ctx->device->dma.complete);
ctx               138 drivers/crypto/ux500/hash/hash_core.c static int hash_set_dma_transfer(struct hash_ctx *ctx, struct scatterlist *sg,
ctx               146 drivers/crypto/ux500/hash/hash_core.c 		dev_err(ctx->device->dev, "%s: Invalid DMA direction\n",
ctx               153 drivers/crypto/ux500/hash/hash_core.c 	channel = ctx->device->dma.chan_mem2hash;
ctx               154 drivers/crypto/ux500/hash/hash_core.c 	ctx->device->dma.sg = sg;
ctx               155 drivers/crypto/ux500/hash/hash_core.c 	ctx->device->dma.sg_len = dma_map_sg(channel->device->dev,
ctx               156 drivers/crypto/ux500/hash/hash_core.c 			ctx->device->dma.sg, ctx->device->dma.nents,
ctx               159 drivers/crypto/ux500/hash/hash_core.c 	if (!ctx->device->dma.sg_len) {
ctx               160 drivers/crypto/ux500/hash/hash_core.c 		dev_err(ctx->device->dev, "%s: Could not map the sg list (TO_DEVICE)\n",
ctx               165 drivers/crypto/ux500/hash/hash_core.c 	dev_dbg(ctx->device->dev, "%s: Setting up DMA for buffer (TO_DEVICE)\n",
ctx               168 drivers/crypto/ux500/hash/hash_core.c 			ctx->device->dma.sg, ctx->device->dma.sg_len,
ctx               171 drivers/crypto/ux500/hash/hash_core.c 		dev_err(ctx->device->dev,
ctx               177 drivers/crypto/ux500/hash/hash_core.c 	desc->callback_param = ctx;
ctx               185 drivers/crypto/ux500/hash/hash_core.c static void hash_dma_done(struct hash_ctx *ctx)
ctx               189 drivers/crypto/ux500/hash/hash_core.c 	chan = ctx->device->dma.chan_mem2hash;
ctx               191 drivers/crypto/ux500/hash/hash_core.c 	dma_unmap_sg(chan->device->dev, ctx->device->dma.sg,
ctx               192 drivers/crypto/ux500/hash/hash_core.c 		     ctx->device->dma.sg_len, DMA_TO_DEVICE);
ctx               195 drivers/crypto/ux500/hash/hash_core.c static int hash_dma_write(struct hash_ctx *ctx,
ctx               198 drivers/crypto/ux500/hash/hash_core.c 	int error = hash_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
ctx               200 drivers/crypto/ux500/hash/hash_core.c 		dev_dbg(ctx->device->dev,
ctx               221 drivers/crypto/ux500/hash/hash_core.c 	struct hash_ctx *ctx = device_data->current_ctx;
ctx               228 drivers/crypto/ux500/hash/hash_core.c 	if (HASH_OPER_MODE_HASH == ctx->config.oper_mode) {
ctx               229 drivers/crypto/ux500/hash/hash_core.c 		if (HASH_ALGO_SHA1 == ctx->config.algorithm) {
ctx               235 drivers/crypto/ux500/hash/hash_core.c 				ctx->config.algorithm) {
ctx               246 drivers/crypto/ux500/hash/hash_core.c 	} else if (HASH_OPER_MODE_HMAC == ctx->config.oper_mode) {
ctx               247 drivers/crypto/ux500/hash/hash_core.c 		if (!ctx->keylen) {
ctx               248 drivers/crypto/ux500/hash/hash_core.c 			if (HASH_ALGO_SHA1 == ctx->config.algorithm) {
ctx               253 drivers/crypto/ux500/hash/hash_core.c 			} else if (HASH_ALGO_SHA256 == ctx->config.algorithm) {
ctx               365 drivers/crypto/ux500/hash/hash_core.c static int hash_get_device_data(struct hash_ctx *ctx,
ctx               389 drivers/crypto/ux500/hash/hash_core.c 			local_device_data->current_ctx = ctx;
ctx               390 drivers/crypto/ux500/hash/hash_core.c 			ctx->device = local_device_data;
ctx               471 drivers/crypto/ux500/hash/hash_core.c 			struct hash_ctx *ctx)
ctx               475 drivers/crypto/ux500/hash/hash_core.c 	ret = hash_setconfiguration(device_data, &ctx->config);
ctx               482 drivers/crypto/ux500/hash/hash_core.c 	hash_begin(device_data, ctx);
ctx               484 drivers/crypto/ux500/hash/hash_core.c 	if (ctx->config.oper_mode == HASH_OPER_MODE_HMAC)
ctx               485 drivers/crypto/ux500/hash/hash_core.c 		hash_hw_write_key(device_data, ctx->key, ctx->keylen);
ctx               552 drivers/crypto/ux500/hash/hash_core.c 	struct hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               555 drivers/crypto/ux500/hash/hash_core.c 	if (!ctx->key)
ctx               556 drivers/crypto/ux500/hash/hash_core.c 		ctx->keylen = 0;
ctx               660 drivers/crypto/ux500/hash/hash_core.c static void hash_incrementlength(struct hash_req_ctx *ctx, u32 incr)
ctx               662 drivers/crypto/ux500/hash/hash_core.c 	ctx->state.length.low_word += incr;
ctx               665 drivers/crypto/ux500/hash/hash_core.c 	if (ctx->state.length.low_word < incr)
ctx               666 drivers/crypto/ux500/hash/hash_core.c 		ctx->state.length.high_word++;
ctx               742 drivers/crypto/ux500/hash/hash_core.c void hash_begin(struct hash_device_data *device_data, struct hash_ctx *ctx)
ctx               764 drivers/crypto/ux500/hash/hash_core.c 			     struct hash_ctx *ctx, struct hash_req_ctx *req_ctx,
ctx               793 drivers/crypto/ux500/hash/hash_core.c 				ret = init_hash_hw(device_data, ctx);
ctx               857 drivers/crypto/ux500/hash/hash_core.c 	struct hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               863 drivers/crypto/ux500/hash/hash_core.c 	ret = hash_get_device_data(ctx, &device_data);
ctx               868 drivers/crypto/ux500/hash/hash_core.c 		(unsigned long)ctx);
ctx               881 drivers/crypto/ux500/hash/hash_core.c 		ret = hash_setconfiguration(device_data, &ctx->config);
ctx               902 drivers/crypto/ux500/hash/hash_core.c 		if (ctx->config.oper_mode == HASH_OPER_MODE_HMAC)
ctx               903 drivers/crypto/ux500/hash/hash_core.c 			hash_hw_write_key(device_data, ctx->key, ctx->keylen);
ctx               911 drivers/crypto/ux500/hash/hash_core.c 	ctx->device->dma.nents = hash_get_nents(req->src, req->nbytes, NULL);
ctx               912 drivers/crypto/ux500/hash/hash_core.c 	if (!ctx->device->dma.nents) {
ctx               915 drivers/crypto/ux500/hash/hash_core.c 		ret = ctx->device->dma.nents;
ctx               919 drivers/crypto/ux500/hash/hash_core.c 	bytes_written = hash_dma_write(ctx, req->src, req->nbytes);
ctx               927 drivers/crypto/ux500/hash/hash_core.c 	wait_for_completion(&ctx->device->dma.complete);
ctx               928 drivers/crypto/ux500/hash/hash_core.c 	hash_dma_done(ctx);
ctx               933 drivers/crypto/ux500/hash/hash_core.c 	if (ctx->config.oper_mode == HASH_OPER_MODE_HMAC && ctx->key) {
ctx               934 drivers/crypto/ux500/hash/hash_core.c 		unsigned int keylen = ctx->keylen;
ctx               935 drivers/crypto/ux500/hash/hash_core.c 		u8 *key = ctx->key;
ctx               938 drivers/crypto/ux500/hash/hash_core.c 			__func__, ctx->keylen);
ctx               942 drivers/crypto/ux500/hash/hash_core.c 	hash_get_digest(device_data, digest, ctx->config.algorithm);
ctx               943 drivers/crypto/ux500/hash/hash_core.c 	memcpy(req->result, digest, ctx->digestsize);
ctx               951 drivers/crypto/ux500/hash/hash_core.c 	kfree(ctx->key);
ctx               964 drivers/crypto/ux500/hash/hash_core.c 	struct hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx               969 drivers/crypto/ux500/hash/hash_core.c 	ret = hash_get_device_data(ctx, &device_data);
ctx               974 drivers/crypto/ux500/hash/hash_core.c 		(unsigned long)ctx);
ctx               984 drivers/crypto/ux500/hash/hash_core.c 	} else if (req->nbytes == 0 && ctx->keylen == 0) {
ctx               994 drivers/crypto/ux500/hash/hash_core.c 		if (!ret && likely(zero_hash_size == ctx->digestsize) &&
ctx               996 drivers/crypto/ux500/hash/hash_core.c 			memcpy(req->result, &zero_hash[0], ctx->digestsize);
ctx              1006 drivers/crypto/ux500/hash/hash_core.c 				zero_hash_size == ctx->digestsize ?
ctx              1011 drivers/crypto/ux500/hash/hash_core.c 	} else if (req->nbytes == 0 && ctx->keylen > 0) {
ctx              1018 drivers/crypto/ux500/hash/hash_core.c 		ret = init_hash_hw(device_data, ctx);
ctx              1035 drivers/crypto/ux500/hash/hash_core.c 	if (ctx->config.oper_mode == HASH_OPER_MODE_HMAC && ctx->key) {
ctx              1036 drivers/crypto/ux500/hash/hash_core.c 		unsigned int keylen = ctx->keylen;
ctx              1037 drivers/crypto/ux500/hash/hash_core.c 		u8 *key = ctx->key;
ctx              1040 drivers/crypto/ux500/hash/hash_core.c 			__func__, ctx->keylen);
ctx              1044 drivers/crypto/ux500/hash/hash_core.c 	hash_get_digest(device_data, digest, ctx->config.algorithm);
ctx              1045 drivers/crypto/ux500/hash/hash_core.c 	memcpy(req->result, digest, ctx->digestsize);
ctx              1053 drivers/crypto/ux500/hash/hash_core.c 	kfree(ctx->key);
ctx              1072 drivers/crypto/ux500/hash/hash_core.c 	struct hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1092 drivers/crypto/ux500/hash/hash_core.c 	ret = hash_get_device_data(ctx, &device_data);
ctx              1099 drivers/crypto/ux500/hash/hash_core.c 		ret = hash_process_data(device_data, ctx, req_ctx, msg_length,
ctx              1337 drivers/crypto/ux500/hash/hash_core.c 	struct hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1342 drivers/crypto/ux500/hash/hash_core.c 	ctx->key = kmemdup(key, keylen, GFP_KERNEL);
ctx              1343 drivers/crypto/ux500/hash/hash_core.c 	if (!ctx->key) {
ctx              1348 drivers/crypto/ux500/hash/hash_core.c 	ctx->keylen = keylen;
ctx              1356 drivers/crypto/ux500/hash/hash_core.c 	struct hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1358 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.data_format = HASH_DATA_8_BITS;
ctx              1359 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.algorithm = HASH_ALGO_SHA1;
ctx              1360 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.oper_mode = HASH_OPER_MODE_HASH;
ctx              1361 drivers/crypto/ux500/hash/hash_core.c 	ctx->digestsize = SHA1_DIGEST_SIZE;
ctx              1369 drivers/crypto/ux500/hash/hash_core.c 	struct hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1371 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.data_format = HASH_DATA_8_BITS;
ctx              1372 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.algorithm = HASH_ALGO_SHA256;
ctx              1373 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.oper_mode = HASH_OPER_MODE_HASH;
ctx              1374 drivers/crypto/ux500/hash/hash_core.c 	ctx->digestsize = SHA256_DIGEST_SIZE;
ctx              1422 drivers/crypto/ux500/hash/hash_core.c 	struct hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1424 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.data_format	= HASH_DATA_8_BITS;
ctx              1425 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.algorithm	= HASH_ALGO_SHA1;
ctx              1426 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.oper_mode	= HASH_OPER_MODE_HMAC;
ctx              1427 drivers/crypto/ux500/hash/hash_core.c 	ctx->digestsize		= SHA1_DIGEST_SIZE;
ctx              1435 drivers/crypto/ux500/hash/hash_core.c 	struct hash_ctx *ctx = crypto_ahash_ctx(tfm);
ctx              1437 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.data_format	= HASH_DATA_8_BITS;
ctx              1438 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.algorithm	= HASH_ALGO_SHA256;
ctx              1439 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.oper_mode	= HASH_OPER_MODE_HMAC;
ctx              1440 drivers/crypto/ux500/hash/hash_core.c 	ctx->digestsize		= SHA256_DIGEST_SIZE;
ctx              1494 drivers/crypto/ux500/hash/hash_core.c 	struct hash_ctx *ctx = crypto_tfm_ctx(tfm);
ctx              1505 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.data_format = HASH_DATA_8_BITS;
ctx              1506 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.algorithm = hash_alg->conf.algorithm;
ctx              1507 drivers/crypto/ux500/hash/hash_core.c 	ctx->config.oper_mode = hash_alg->conf.oper_mode;
ctx              1509 drivers/crypto/ux500/hash/hash_core.c 	ctx->digestsize = hash_alg->hash.halg.digestsize;
ctx               114 drivers/crypto/virtio/virtio_crypto_algs.c 		struct virtio_crypto_ablkcipher_ctx *ctx,
ctx               121 drivers/crypto/virtio/virtio_crypto_algs.c 	struct virtio_crypto *vcrypto = ctx->vcrypto;
ctx               191 drivers/crypto/virtio/virtio_crypto_algs.c 		ctx->enc_sess_info.session_id =
ctx               194 drivers/crypto/virtio/virtio_crypto_algs.c 		ctx->dec_sess_info.session_id =
ctx               204 drivers/crypto/virtio/virtio_crypto_algs.c 		struct virtio_crypto_ablkcipher_ctx *ctx,
ctx               210 drivers/crypto/virtio/virtio_crypto_algs.c 	struct virtio_crypto *vcrypto = ctx->vcrypto;
ctx               226 drivers/crypto/virtio/virtio_crypto_algs.c 			cpu_to_le64(ctx->enc_sess_info.session_id);
ctx               229 drivers/crypto/virtio/virtio_crypto_algs.c 			cpu_to_le64(ctx->dec_sess_info.session_id);
ctx               265 drivers/crypto/virtio/virtio_crypto_algs.c 		struct virtio_crypto_ablkcipher_ctx *ctx,
ctx               270 drivers/crypto/virtio/virtio_crypto_algs.c 	struct virtio_crypto *vcrypto = ctx->vcrypto;
ctx               281 drivers/crypto/virtio/virtio_crypto_algs.c 	ret = virtio_crypto_alg_ablkcipher_init_session(ctx,
ctx               286 drivers/crypto/virtio/virtio_crypto_algs.c 	ret = virtio_crypto_alg_ablkcipher_init_session(ctx,
ctx               289 drivers/crypto/virtio/virtio_crypto_algs.c 		virtio_crypto_alg_ablkcipher_close_session(ctx, 1);
ctx               295 drivers/crypto/virtio/virtio_crypto_algs.c 	crypto_tfm_set_flags(ctx->tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
ctx               304 drivers/crypto/virtio/virtio_crypto_algs.c 	struct virtio_crypto_ablkcipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
ctx               312 drivers/crypto/virtio/virtio_crypto_algs.c 	if (!ctx->vcrypto) {
ctx               323 drivers/crypto/virtio/virtio_crypto_algs.c 		ctx->vcrypto = vcrypto;
ctx               326 drivers/crypto/virtio/virtio_crypto_algs.c 		virtio_crypto_alg_ablkcipher_close_session(ctx, 1);
ctx               327 drivers/crypto/virtio/virtio_crypto_algs.c 		virtio_crypto_alg_ablkcipher_close_session(ctx, 0);
ctx               330 drivers/crypto/virtio/virtio_crypto_algs.c 	ret = virtio_crypto_alg_ablkcipher_init_sessions(ctx, key, keylen);
ctx               332 drivers/crypto/virtio/virtio_crypto_algs.c 		virtcrypto_dev_put(ctx->vcrypto);
ctx               333 drivers/crypto/virtio/virtio_crypto_algs.c 		ctx->vcrypto = NULL;
ctx               347 drivers/crypto/virtio/virtio_crypto_algs.c 	struct virtio_crypto_ablkcipher_ctx *ctx = vc_sym_req->ablkcipher_ctx;
ctx               350 drivers/crypto/virtio/virtio_crypto_algs.c 	struct virtio_crypto *vcrypto = ctx->vcrypto;
ctx               392 drivers/crypto/virtio/virtio_crypto_algs.c 			cpu_to_le64(ctx->enc_sess_info.session_id);
ctx               397 drivers/crypto/virtio/virtio_crypto_algs.c 			cpu_to_le64(ctx->dec_sess_info.session_id);
ctx               488 drivers/crypto/virtio/virtio_crypto_algs.c 	struct virtio_crypto_ablkcipher_ctx *ctx = crypto_ablkcipher_ctx(atfm);
ctx               492 drivers/crypto/virtio/virtio_crypto_algs.c 	struct virtio_crypto *vcrypto = ctx->vcrypto;
ctx               503 drivers/crypto/virtio/virtio_crypto_algs.c 	vc_sym_req->ablkcipher_ctx = ctx;
ctx               513 drivers/crypto/virtio/virtio_crypto_algs.c 	struct virtio_crypto_ablkcipher_ctx *ctx = crypto_ablkcipher_ctx(atfm);
ctx               517 drivers/crypto/virtio/virtio_crypto_algs.c 	struct virtio_crypto *vcrypto = ctx->vcrypto;
ctx               528 drivers/crypto/virtio/virtio_crypto_algs.c 	vc_sym_req->ablkcipher_ctx = ctx;
ctx               537 drivers/crypto/virtio/virtio_crypto_algs.c 	struct virtio_crypto_ablkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               540 drivers/crypto/virtio/virtio_crypto_algs.c 	ctx->tfm = tfm;
ctx               542 drivers/crypto/virtio/virtio_crypto_algs.c 	ctx->enginectx.op.do_one_request = virtio_crypto_ablkcipher_crypt_req;
ctx               543 drivers/crypto/virtio/virtio_crypto_algs.c 	ctx->enginectx.op.prepare_request = NULL;
ctx               544 drivers/crypto/virtio/virtio_crypto_algs.c 	ctx->enginectx.op.unprepare_request = NULL;
ctx               550 drivers/crypto/virtio/virtio_crypto_algs.c 	struct virtio_crypto_ablkcipher_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               552 drivers/crypto/virtio/virtio_crypto_algs.c 	if (!ctx->vcrypto)
ctx               555 drivers/crypto/virtio/virtio_crypto_algs.c 	virtio_crypto_alg_ablkcipher_close_session(ctx, 1);
ctx               556 drivers/crypto/virtio/virtio_crypto_algs.c 	virtio_crypto_alg_ablkcipher_close_session(ctx, 0);
ctx               557 drivers/crypto/virtio/virtio_crypto_algs.c 	virtcrypto_dev_put(ctx->vcrypto);
ctx               558 drivers/crypto/virtio/virtio_crypto_algs.c 	ctx->vcrypto = NULL;
ctx                31 drivers/crypto/vmx/aes.c 	struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                45 drivers/crypto/vmx/aes.c 	ctx->fallback = fallback;
ctx                52 drivers/crypto/vmx/aes.c 	struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                54 drivers/crypto/vmx/aes.c 	if (ctx->fallback) {
ctx                55 drivers/crypto/vmx/aes.c 		crypto_free_cipher(ctx->fallback);
ctx                56 drivers/crypto/vmx/aes.c 		ctx->fallback = NULL;
ctx                64 drivers/crypto/vmx/aes.c 	struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                69 drivers/crypto/vmx/aes.c 	ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
ctx                70 drivers/crypto/vmx/aes.c 	ret |= aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
ctx                75 drivers/crypto/vmx/aes.c 	ret |= crypto_cipher_setkey(ctx->fallback, key, keylen);
ctx                82 drivers/crypto/vmx/aes.c 	struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx                85 drivers/crypto/vmx/aes.c 		crypto_cipher_encrypt_one(ctx->fallback, dst, src);
ctx                90 drivers/crypto/vmx/aes.c 		aes_p8_encrypt(src, dst, &ctx->enc_key);
ctx                99 drivers/crypto/vmx/aes.c 	struct p8_aes_ctx *ctx = crypto_tfm_ctx(tfm);
ctx               102 drivers/crypto/vmx/aes.c 		crypto_cipher_decrypt_one(ctx->fallback, dst, src);
ctx               107 drivers/crypto/vmx/aes.c 		aes_p8_decrypt(src, dst, &ctx->dec_key);
ctx                26 drivers/crypto/vmx/aes_cbc.c 	struct p8_aes_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                40 drivers/crypto/vmx/aes_cbc.c 	ctx->fallback = fallback;
ctx                46 drivers/crypto/vmx/aes_cbc.c 	struct p8_aes_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                48 drivers/crypto/vmx/aes_cbc.c 	crypto_free_skcipher(ctx->fallback);
ctx                54 drivers/crypto/vmx/aes_cbc.c 	struct p8_aes_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                60 drivers/crypto/vmx/aes_cbc.c 	ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
ctx                61 drivers/crypto/vmx/aes_cbc.c 	ret |= aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
ctx                66 drivers/crypto/vmx/aes_cbc.c 	ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
ctx                74 drivers/crypto/vmx/aes_cbc.c 	const struct p8_aes_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                83 drivers/crypto/vmx/aes_cbc.c 		skcipher_request_set_tfm(subreq, ctx->fallback);
ctx                96 drivers/crypto/vmx/aes_cbc.c 				   enc ? &ctx->enc_key : &ctx->dec_key,
ctx                25 drivers/crypto/vmx/aes_ctr.c 	struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                39 drivers/crypto/vmx/aes_ctr.c 	ctx->fallback = fallback;
ctx                45 drivers/crypto/vmx/aes_ctr.c 	struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                47 drivers/crypto/vmx/aes_ctr.c 	crypto_free_skcipher(ctx->fallback);
ctx                53 drivers/crypto/vmx/aes_ctr.c 	struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                59 drivers/crypto/vmx/aes_ctr.c 	ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
ctx                64 drivers/crypto/vmx/aes_ctr.c 	ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
ctx                69 drivers/crypto/vmx/aes_ctr.c static void p8_aes_ctr_final(const struct p8_aes_ctr_ctx *ctx,
ctx                81 drivers/crypto/vmx/aes_ctr.c 	aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
ctx                93 drivers/crypto/vmx/aes_ctr.c 	const struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx               102 drivers/crypto/vmx/aes_ctr.c 		skcipher_request_set_tfm(subreq, ctx->fallback);
ctx               114 drivers/crypto/vmx/aes_ctr.c 					    &ctx->enc_key, walk.iv);
ctx               126 drivers/crypto/vmx/aes_ctr.c 		p8_aes_ctr_final(ctx, &walk);
ctx                28 drivers/crypto/vmx/aes_xts.c 	struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                42 drivers/crypto/vmx/aes_xts.c 	ctx->fallback = fallback;
ctx                48 drivers/crypto/vmx/aes_xts.c 	struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                50 drivers/crypto/vmx/aes_xts.c 	crypto_free_skcipher(ctx->fallback);
ctx                56 drivers/crypto/vmx/aes_xts.c 	struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                66 drivers/crypto/vmx/aes_xts.c 	ret = aes_p8_set_encrypt_key(key + keylen/2, (keylen/2) * 8, &ctx->tweak_key);
ctx                67 drivers/crypto/vmx/aes_xts.c 	ret |= aes_p8_set_encrypt_key(key, (keylen/2) * 8, &ctx->enc_key);
ctx                68 drivers/crypto/vmx/aes_xts.c 	ret |= aes_p8_set_decrypt_key(key, (keylen/2) * 8, &ctx->dec_key);
ctx                73 drivers/crypto/vmx/aes_xts.c 	ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
ctx                81 drivers/crypto/vmx/aes_xts.c 	const struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx                94 drivers/crypto/vmx/aes_xts.c 		skcipher_request_set_tfm(subreq, ctx->fallback);
ctx               107 drivers/crypto/vmx/aes_xts.c 	aes_p8_encrypt(walk.iv, tweak, &ctx->tweak_key);
ctx               121 drivers/crypto/vmx/aes_xts.c 					   &ctx->enc_key, NULL, tweak);
ctx               126 drivers/crypto/vmx/aes_xts.c 					   &ctx->dec_key, NULL, tweak);
ctx                57 drivers/crypto/vmx/ghash.c 	struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(tfm));
ctx                65 drivers/crypto/vmx/ghash.c 	gcm_init_p8(ctx->htable, (const u64 *) key);
ctx                70 drivers/crypto/vmx/ghash.c 	memcpy(&ctx->key, key, GHASH_BLOCK_SIZE);
ctx                75 drivers/crypto/vmx/ghash.c static inline void __ghash_block(struct p8_ghash_ctx *ctx,
ctx                82 drivers/crypto/vmx/ghash.c 		gcm_ghash_p8(dctx->shash, ctx->htable,
ctx                89 drivers/crypto/vmx/ghash.c 		gf128mul_lle((be128 *)dctx->shash, &ctx->key);
ctx                93 drivers/crypto/vmx/ghash.c static inline void __ghash_blocks(struct p8_ghash_ctx *ctx,
ctx               101 drivers/crypto/vmx/ghash.c 		gcm_ghash_p8(dctx->shash, ctx->htable,
ctx               109 drivers/crypto/vmx/ghash.c 			gf128mul_lle((be128 *)dctx->shash, &ctx->key);
ctx               120 drivers/crypto/vmx/ghash.c 	struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
ctx               133 drivers/crypto/vmx/ghash.c 		__ghash_block(ctx, dctx);
ctx               141 drivers/crypto/vmx/ghash.c 		__ghash_blocks(ctx, dctx, src, len);
ctx               155 drivers/crypto/vmx/ghash.c 	struct p8_ghash_ctx *ctx = crypto_tfm_ctx(crypto_shash_tfm(desc->tfm));
ctx               161 drivers/crypto/vmx/ghash.c 		__ghash_block(ctx, dctx);
ctx               475 drivers/dax/super.c 	struct pseudo_fs_context *ctx = init_pseudo(fc, DAXFS_MAGIC);
ctx               476 drivers/dax/super.c 	if (!ctx)
ctx               478 drivers/dax/super.c 	ctx->ops = &dax_sops;
ctx                65 drivers/dma-buf/dma-buf.c 	struct pseudo_fs_context *ctx;
ctx                67 drivers/dma-buf/dma-buf.c 	ctx = init_pseudo(fc, DMA_BUF_MAGIC);
ctx                68 drivers/dma-buf/dma-buf.c 	if (!ctx)
ctx                70 drivers/dma-buf/dma-buf.c 	ctx->dops = &dma_buf_dentry_ops;
ctx               618 drivers/dma/bcm-sba-raid.c 	msg->ctx = req;
ctx               706 drivers/dma/bcm-sba-raid.c 	msg->ctx = req;
ctx               845 drivers/dma/bcm-sba-raid.c 	msg->ctx = req;
ctx              1056 drivers/dma/bcm-sba-raid.c 	msg->ctx = req;
ctx              1310 drivers/dma/bcm-sba-raid.c 	msg->ctx = req;
ctx              1445 drivers/dma/bcm-sba-raid.c 	struct sba_request *req = m->ctx;
ctx               290 drivers/dma/bestcomm/bestcomm.c 	bcom_eng->ctx = bcom_sram_alloc(ctx_size, BCOM_CTX_ALIGN, &ctx_pa);
ctx               294 drivers/dma/bestcomm/bestcomm.c 	if (!bcom_eng->tdt || !bcom_eng->ctx || !bcom_eng->var || !bcom_eng->fdt) {
ctx               298 drivers/dma/bestcomm/bestcomm.c 		bcom_sram_free(bcom_eng->ctx);
ctx               306 drivers/dma/bestcomm/bestcomm.c 	memset(bcom_eng->ctx, 0x00, ctx_size);
ctx               358 drivers/dma/bestcomm/bestcomm.c 	bcom_sram_free(bcom_eng->ctx);
ctx               204 drivers/edac/aspeed_edac.c static int config_irq(void *ctx, struct platform_device *pdev)
ctx               216 drivers/edac/aspeed_edac.c 			      DRV_NAME, ctx);
ctx               551 drivers/edac/thunderx_edac.c 	struct lmc_err_ctx *ctx = &lmc->err_ctx[head];
ctx               557 drivers/edac/thunderx_edac.c 	ctx->reg_int = readq(lmc->regs + LMC_INT);
ctx               558 drivers/edac/thunderx_edac.c 	ctx->reg_fadr = readq(lmc->regs + LMC_FADR);
ctx               559 drivers/edac/thunderx_edac.c 	ctx->reg_nxm_fadr = readq(lmc->regs + LMC_NXM_FADR);
ctx               560 drivers/edac/thunderx_edac.c 	ctx->reg_scram_fadr = readq(lmc->regs + LMC_SCRAM_FADR);
ctx               561 drivers/edac/thunderx_edac.c 	ctx->reg_ecc_synd = readq(lmc->regs + LMC_ECC_SYND);
ctx               568 drivers/edac/thunderx_edac.c 	writeq(ctx->reg_int, lmc->regs + LMC_INT);
ctx               580 drivers/edac/thunderx_edac.c 	struct lmc_err_ctx *ctx;
ctx               597 drivers/edac/thunderx_edac.c 		ctx = &lmc->err_ctx[tail];
ctx               600 drivers/edac/thunderx_edac.c 			ctx->reg_int);
ctx               602 drivers/edac/thunderx_edac.c 			ctx->reg_fadr);
ctx               604 drivers/edac/thunderx_edac.c 			ctx->reg_nxm_fadr);
ctx               606 drivers/edac/thunderx_edac.c 			ctx->reg_scram_fadr);
ctx               608 drivers/edac/thunderx_edac.c 			ctx->reg_ecc_synd);
ctx               612 drivers/edac/thunderx_edac.c 			 LMC_FADR_FDIMM(ctx->reg_scram_fadr),
ctx               613 drivers/edac/thunderx_edac.c 			 LMC_FADR_FBUNK(ctx->reg_scram_fadr),
ctx               614 drivers/edac/thunderx_edac.c 			 LMC_FADR_FBANK(ctx->reg_scram_fadr),
ctx               615 drivers/edac/thunderx_edac.c 			 LMC_FADR_FROW(ctx->reg_scram_fadr),
ctx               616 drivers/edac/thunderx_edac.c 			 LMC_FADR_FCOL(ctx->reg_scram_fadr));
ctx               619 drivers/edac/thunderx_edac.c 				ctx->reg_int);
ctx               621 drivers/edac/thunderx_edac.c 		phys_addr = thunderx_faddr_to_phys(ctx->reg_fadr, lmc);
ctx               623 drivers/edac/thunderx_edac.c 		if (ctx->reg_int & LMC_INT_UE)
ctx               628 drivers/edac/thunderx_edac.c 		else if (ctx->reg_int & LMC_INT_CE)
ctx              1084 drivers/edac/thunderx_edac.c 	struct ocx_com_err_ctx *ctx = &ocx->com_err_ctx[head];
ctx              1086 drivers/edac/thunderx_edac.c 	ctx->reg_com_int = readq(ocx->regs + OCX_COM_INT);
ctx              1089 drivers/edac/thunderx_edac.c 		ctx->reg_lane_int[lane] =
ctx              1091 drivers/edac/thunderx_edac.c 		ctx->reg_lane_stat11[lane] =
ctx              1094 drivers/edac/thunderx_edac.c 		writeq(ctx->reg_lane_int[lane], ocx->regs + OCX_LNE_INT(lane));
ctx              1097 drivers/edac/thunderx_edac.c 	writeq(ctx->reg_com_int, ocx->regs + OCX_COM_INT);
ctx              1113 drivers/edac/thunderx_edac.c 	struct ocx_com_err_ctx *ctx;
ctx              1128 drivers/edac/thunderx_edac.c 		ctx = &ocx->com_err_ctx[tail];
ctx              1131 drivers/edac/thunderx_edac.c 			ocx->edac_dev->ctl_name, ctx->reg_com_int);
ctx              1134 drivers/edac/thunderx_edac.c 				ocx_com_errors, ctx->reg_com_int);
ctx              1139 drivers/edac/thunderx_edac.c 			if (ctx->reg_com_int & BIT(lane)) {
ctx              1142 drivers/edac/thunderx_edac.c 					 lane, ctx->reg_lane_int[lane],
ctx              1143 drivers/edac/thunderx_edac.c 					 lane, ctx->reg_lane_stat11[lane]);
ctx              1149 drivers/edac/thunderx_edac.c 						ctx->reg_lane_int[lane]);
ctx              1153 drivers/edac/thunderx_edac.c 		if (ctx->reg_com_int & OCX_COM_INT_CE)
ctx              1175 drivers/edac/thunderx_edac.c 	struct ocx_link_err_ctx *ctx = &ocx->link_err_ctx[head];
ctx              1177 drivers/edac/thunderx_edac.c 	ctx->link = msix->entry;
ctx              1178 drivers/edac/thunderx_edac.c 	ctx->reg_com_link_int = readq(ocx->regs + OCX_COM_LINKX_INT(ctx->link));
ctx              1180 drivers/edac/thunderx_edac.c 	writeq(ctx->reg_com_link_int, ocx->regs + OCX_COM_LINKX_INT(ctx->link));
ctx              1194 drivers/edac/thunderx_edac.c 	struct ocx_link_err_ctx *ctx;
ctx              1210 drivers/edac/thunderx_edac.c 		ctx = &ocx->link_err_ctx[tail];
ctx              1215 drivers/edac/thunderx_edac.c 			 ctx->link, ctx->reg_com_link_int);
ctx              1218 drivers/edac/thunderx_edac.c 				ocx_com_link_errors, ctx->reg_com_link_int);
ctx              1222 drivers/edac/thunderx_edac.c 		if (ctx->reg_com_link_int & OCX_COM_LINK_INT_UE)
ctx              1224 drivers/edac/thunderx_edac.c 		else if (ctx->reg_com_link_int & OCX_COM_LINK_INT_CE)
ctx              1767 drivers/edac/thunderx_edac.c 	struct l2c_err_ctx *ctx = &tad->err_ctx[head];
ctx              1769 drivers/edac/thunderx_edac.c 	ctx->reg_int = readq(tad->regs + L2C_TAD_INT_W1C);
ctx              1771 drivers/edac/thunderx_edac.c 	if (ctx->reg_int & L2C_TAD_INT_ECC) {
ctx              1772 drivers/edac/thunderx_edac.c 		ctx->reg_ext_name = "TQD_ERR";
ctx              1773 drivers/edac/thunderx_edac.c 		ctx->reg_ext = readq(tad->regs + L2C_TAD_TQD_ERR);
ctx              1774 drivers/edac/thunderx_edac.c 	} else if (ctx->reg_int & L2C_TAD_INT_TAG) {
ctx              1775 drivers/edac/thunderx_edac.c 		ctx->reg_ext_name = "TTG_ERR";
ctx              1776 drivers/edac/thunderx_edac.c 		ctx->reg_ext = readq(tad->regs + L2C_TAD_TTG_ERR);
ctx              1777 drivers/edac/thunderx_edac.c 	} else if (ctx->reg_int & L2C_TAD_INT_LFBTO) {
ctx              1778 drivers/edac/thunderx_edac.c 		ctx->reg_ext_name = "TIMEOUT";
ctx              1779 drivers/edac/thunderx_edac.c 		ctx->reg_ext = readq(tad->regs + L2C_TAD_TIMEOUT);
ctx              1780 drivers/edac/thunderx_edac.c 	} else if (ctx->reg_int & L2C_TAD_INT_DISOCI) {
ctx              1781 drivers/edac/thunderx_edac.c 		ctx->reg_ext_name = "ERR";
ctx              1782 drivers/edac/thunderx_edac.c 		ctx->reg_ext = readq(tad->regs + L2C_TAD_ERR);
ctx              1785 drivers/edac/thunderx_edac.c 	writeq(ctx->reg_int, tad->regs + L2C_TAD_INT_W1C);
ctx              1799 drivers/edac/thunderx_edac.c 	struct l2c_err_ctx *ctx = &cbc->err_ctx[head];
ctx              1801 drivers/edac/thunderx_edac.c 	ctx->reg_int = readq(cbc->regs + L2C_CBC_INT_W1C);
ctx              1803 drivers/edac/thunderx_edac.c 	if (ctx->reg_int & L2C_CBC_INT_RSD) {
ctx              1804 drivers/edac/thunderx_edac.c 		ctx->reg_ext_name = "RSDERR";
ctx              1805 drivers/edac/thunderx_edac.c 		ctx->reg_ext = readq(cbc->regs + L2C_CBC_RSDERR);
ctx              1806 drivers/edac/thunderx_edac.c 	} else if (ctx->reg_int & L2C_CBC_INT_MIB) {
ctx              1807 drivers/edac/thunderx_edac.c 		ctx->reg_ext_name = "MIBERR";
ctx              1808 drivers/edac/thunderx_edac.c 		ctx->reg_ext = readq(cbc->regs + L2C_CBC_MIBERR);
ctx              1809 drivers/edac/thunderx_edac.c 	} else if (ctx->reg_int & L2C_CBC_INT_IODISOCI) {
ctx              1810 drivers/edac/thunderx_edac.c 		ctx->reg_ext_name = "IODISOCIERR";
ctx              1811 drivers/edac/thunderx_edac.c 		ctx->reg_ext = readq(cbc->regs + L2C_CBC_IODISOCIERR);
ctx              1814 drivers/edac/thunderx_edac.c 	writeq(ctx->reg_int, cbc->regs + L2C_CBC_INT_W1C);
ctx              1828 drivers/edac/thunderx_edac.c 	struct l2c_err_ctx *ctx = &mci->err_ctx[head];
ctx              1830 drivers/edac/thunderx_edac.c 	ctx->reg_int = readq(mci->regs + L2C_MCI_INT_W1C);
ctx              1831 drivers/edac/thunderx_edac.c 	ctx->reg_ext = readq(mci->regs + L2C_MCI_ERR);
ctx              1833 drivers/edac/thunderx_edac.c 	writeq(ctx->reg_int, mci->regs + L2C_MCI_INT_W1C);
ctx              1835 drivers/edac/thunderx_edac.c 	ctx->reg_ext_name = "ERR";
ctx              1849 drivers/edac/thunderx_edac.c 	struct l2c_err_ctx *ctx = &l2c->err_ctx[tail];
ctx              1894 drivers/edac/thunderx_edac.c 			 l2c->edac_dev->ctl_name, reg_int_name, ctx->reg_int,
ctx              1895 drivers/edac/thunderx_edac.c 			 ctx->reg_ext_name, ctx->reg_ext);
ctx              1897 drivers/edac/thunderx_edac.c 		decode_register(other, L2C_OTHER_SIZE, l2_errors, ctx->reg_int);
ctx              1901 drivers/edac/thunderx_edac.c 		if (ctx->reg_int & mask_ue)
ctx              1903 drivers/edac/thunderx_edac.c 		else if (ctx->reg_int & mask_ce)
ctx               145 drivers/edac/xgene_edac.c 	struct xgene_edac_mc_ctx *ctx = mci->pvt_info;
ctx               151 drivers/edac/xgene_edac.c 		       ctx->mcu_csr + MCUESRRA0 + i * MCU_RANK_STRIDE);
ctx               176 drivers/edac/xgene_edac.c 	struct xgene_edac_mc_ctx *ctx = mci->pvt_info;
ctx               185 drivers/edac/xgene_edac.c 	xgene_edac_pcp_rd(ctx->edac, PCPHPERRINTSTS, &pcp_hp_stat);
ctx               186 drivers/edac/xgene_edac.c 	xgene_edac_pcp_rd(ctx->edac, PCPLPERRINTSTS, &pcp_lp_stat);
ctx               193 drivers/edac/xgene_edac.c 		reg = readl(ctx->mcu_csr + MCUESRR0 + rank * MCU_RANK_STRIDE);
ctx               208 drivers/edac/xgene_edac.c 			bank = readl(ctx->mcu_csr + MCUEBLRR0 +
ctx               210 drivers/edac/xgene_edac.c 			col_row = readl(ctx->mcu_csr + MCUERCRR0 +
ctx               212 drivers/edac/xgene_edac.c 			count = readl(ctx->mcu_csr + MCUSBECNT0 +
ctx               226 drivers/edac/xgene_edac.c 		writel(0x0, ctx->mcu_csr + MCUEBLRR0 + rank * MCU_RANK_STRIDE);
ctx               227 drivers/edac/xgene_edac.c 		writel(0x0, ctx->mcu_csr + MCUERCRR0 + rank * MCU_RANK_STRIDE);
ctx               228 drivers/edac/xgene_edac.c 		writel(0x0, ctx->mcu_csr + MCUSBECNT0 +
ctx               230 drivers/edac/xgene_edac.c 		writel(reg, ctx->mcu_csr + MCUESRR0 + rank * MCU_RANK_STRIDE);
ctx               234 drivers/edac/xgene_edac.c 	reg = readl(ctx->mcu_csr + MCUGESR);
ctx               243 drivers/edac/xgene_edac.c 		writel(reg, ctx->mcu_csr + MCUGESR);
ctx               249 drivers/edac/xgene_edac.c 	struct xgene_edac_mc_ctx *ctx = mci->pvt_info;
ctx               255 drivers/edac/xgene_edac.c 	mutex_lock(&ctx->edac->mc_lock);
ctx               267 drivers/edac/xgene_edac.c 		ctx->edac->mc_registered_mask |= 1 << ctx->mcu_id;
ctx               270 drivers/edac/xgene_edac.c 		if (ctx->edac->mc_registered_mask ==
ctx               271 drivers/edac/xgene_edac.c 		    ctx->edac->mc_active_mask) {
ctx               273 drivers/edac/xgene_edac.c 			xgene_edac_pcp_clrbits(ctx->edac, PCPHPERRINTMSK,
ctx               276 drivers/edac/xgene_edac.c 			xgene_edac_pcp_clrbits(ctx->edac, PCPLPERRINTMSK,
ctx               281 drivers/edac/xgene_edac.c 		val = readl(ctx->mcu_csr + MCUGECR);
ctx               286 drivers/edac/xgene_edac.c 		writel(val, ctx->mcu_csr + MCUGECR);
ctx               289 drivers/edac/xgene_edac.c 		val = readl(ctx->mcu_csr + MCUGECR);
ctx               294 drivers/edac/xgene_edac.c 		writel(val, ctx->mcu_csr + MCUGECR);
ctx               297 drivers/edac/xgene_edac.c 		xgene_edac_pcp_setbits(ctx->edac, PCPHPERRINTMSK,
ctx               299 drivers/edac/xgene_edac.c 		xgene_edac_pcp_setbits(ctx->edac, PCPLPERRINTMSK,
ctx               303 drivers/edac/xgene_edac.c 		ctx->edac->mc_registered_mask &= ~(1 << ctx->mcu_id);
ctx               306 drivers/edac/xgene_edac.c 	mutex_unlock(&ctx->edac->mc_lock);
ctx               309 drivers/edac/xgene_edac.c static int xgene_edac_mc_is_active(struct xgene_edac_mc_ctx *ctx, int mc_idx)
ctx               314 drivers/edac/xgene_edac.c 	if (regmap_read(ctx->edac->csw_map, CSW_CSWCR, &reg))
ctx               322 drivers/edac/xgene_edac.c 		if (regmap_read(ctx->edac->mcbb_map, MCBADDRMR, &reg))
ctx               330 drivers/edac/xgene_edac.c 		if (regmap_read(ctx->edac->mcba_map, MCBADDRMR, &reg))
ctx               336 drivers/edac/xgene_edac.c 	if (!ctx->edac->mc_active_mask)
ctx               337 drivers/edac/xgene_edac.c 		ctx->edac->mc_active_mask = mcu_mask;
ctx               347 drivers/edac/xgene_edac.c 	struct xgene_edac_mc_ctx *ctx;
ctx               387 drivers/edac/xgene_edac.c 			    sizeof(*ctx));
ctx               393 drivers/edac/xgene_edac.c 	ctx = mci->pvt_info;
ctx               394 drivers/edac/xgene_edac.c 	*ctx = tmp_ctx;		/* Copy over resource value */
ctx               395 drivers/edac/xgene_edac.c 	ctx->name = "xgene_edac_mc_err";
ctx               396 drivers/edac/xgene_edac.c 	ctx->mci = mci;
ctx               398 drivers/edac/xgene_edac.c 	mci->ctl_name = ctx->name;
ctx               399 drivers/edac/xgene_edac.c 	mci->dev_name = ctx->name;
ctx               421 drivers/edac/xgene_edac.c 	list_add(&ctx->next, &edac->mcus);
ctx               522 drivers/edac/xgene_edac.c 	struct xgene_edac_pmd_ctx *ctx = edac_dev->pvt_info;
ctx               526 drivers/edac/xgene_edac.c 	pg_f = ctx->pmd_csr + cpu_idx * CPU_CSR_STRIDE + CPU_MEMERR_CPU_PAGE;
ctx               533 drivers/edac/xgene_edac.c 		ctx->pmd * MAX_CPU_PER_PMD + cpu_idx, val,
ctx               573 drivers/edac/xgene_edac.c 		ctx->pmd * MAX_CPU_PER_PMD + cpu_idx, val,
ctx               617 drivers/edac/xgene_edac.c 		ctx->pmd * MAX_CPU_PER_PMD + cpu_idx, val,
ctx               661 drivers/edac/xgene_edac.c 	struct xgene_edac_pmd_ctx *ctx = edac_dev->pvt_info;
ctx               669 drivers/edac/xgene_edac.c 	pg_e = ctx->pmd_csr + CPU_MEMERR_L2C_PAGE;
ctx               677 drivers/edac/xgene_edac.c 		ctx->pmd, val, val_hi, val_lo);
ctx               722 drivers/edac/xgene_edac.c 	pg_d = ctx->pmd_csr + CPU_L2C_PAGE;
ctx               729 drivers/edac/xgene_edac.c 			ctx->pmd, val, val_hi, val_lo);
ctx               736 drivers/edac/xgene_edac.c 	struct xgene_edac_pmd_ctx *ctx = edac_dev->pvt_info;
ctx               740 drivers/edac/xgene_edac.c 	xgene_edac_pcp_rd(ctx->edac, PCPHPERRINTSTS, &pcp_hp_stat);
ctx               741 drivers/edac/xgene_edac.c 	if (!((PMD0_MERR_MASK << ctx->pmd) & pcp_hp_stat))
ctx               755 drivers/edac/xgene_edac.c 	struct xgene_edac_pmd_ctx *ctx = edac_dev->pvt_info;
ctx               756 drivers/edac/xgene_edac.c 	void __iomem *pg_f = ctx->pmd_csr + cpu * CPU_CSR_STRIDE +
ctx               770 drivers/edac/xgene_edac.c 	struct xgene_edac_pmd_ctx *ctx = edac_dev->pvt_info;
ctx               771 drivers/edac/xgene_edac.c 	void __iomem *pg_d = ctx->pmd_csr + CPU_L2C_PAGE;
ctx               772 drivers/edac/xgene_edac.c 	void __iomem *pg_e = ctx->pmd_csr + CPU_MEMERR_L2C_PAGE;
ctx               777 drivers/edac/xgene_edac.c 	if (ctx->version > 1)
ctx               784 drivers/edac/xgene_edac.c 	struct xgene_edac_pmd_ctx *ctx = edac_dev->pvt_info;
ctx               790 drivers/edac/xgene_edac.c 			xgene_edac_pcp_clrbits(ctx->edac, PCPHPERRINTMSK,
ctx               791 drivers/edac/xgene_edac.c 					       PMD0_MERR_MASK << ctx->pmd);
ctx               793 drivers/edac/xgene_edac.c 			xgene_edac_pcp_setbits(ctx->edac, PCPHPERRINTMSK,
ctx               794 drivers/edac/xgene_edac.c 					       PMD0_MERR_MASK << ctx->pmd);
ctx               811 drivers/edac/xgene_edac.c 	struct xgene_edac_pmd_ctx *ctx = edac_dev->pvt_info;
ctx               816 drivers/edac/xgene_edac.c 		cpux_pg_f = ctx->pmd_csr + i * CPU_CSR_STRIDE +
ctx               837 drivers/edac/xgene_edac.c 	struct xgene_edac_pmd_ctx *ctx = edac_dev->pvt_info;
ctx               838 drivers/edac/xgene_edac.c 	void __iomem *pg_e = ctx->pmd_csr + CPU_MEMERR_L2C_PAGE;
ctx               863 drivers/edac/xgene_edac.c 	struct xgene_edac_pmd_ctx *ctx = edac_dev->pvt_info;
ctx               867 drivers/edac/xgene_edac.c 	if (!IS_ENABLED(CONFIG_EDAC_DEBUG) || !ctx->edac->dfs)
ctx               870 drivers/edac/xgene_edac.c 	snprintf(name, sizeof(name), "PMD%d", ctx->pmd);
ctx               871 drivers/edac/xgene_edac.c 	dbgfs_dir = edac_debugfs_create_dir_at(name, ctx->edac->dfs);
ctx               890 drivers/edac/xgene_edac.c 	struct xgene_edac_pmd_ctx *ctx;
ctx               915 drivers/edac/xgene_edac.c 	edac_dev = edac_device_alloc_ctl_info(sizeof(*ctx),
ctx               923 drivers/edac/xgene_edac.c 	ctx = edac_dev->pvt_info;
ctx               924 drivers/edac/xgene_edac.c 	ctx->name = "xgene_pmd_err";
ctx               925 drivers/edac/xgene_edac.c 	ctx->pmd = pmd;
ctx               926 drivers/edac/xgene_edac.c 	ctx->edac = edac;
ctx               927 drivers/edac/xgene_edac.c 	ctx->edac_dev = edac_dev;
ctx               928 drivers/edac/xgene_edac.c 	ctx->ddev = *edac->dev;
ctx               929 drivers/edac/xgene_edac.c 	ctx->version = version;
ctx               930 drivers/edac/xgene_edac.c 	edac_dev->dev = &ctx->ddev;
ctx               931 drivers/edac/xgene_edac.c 	edac_dev->ctl_name = ctx->name;
ctx               932 drivers/edac/xgene_edac.c 	edac_dev->dev_name = ctx->name;
ctx               940 drivers/edac/xgene_edac.c 	ctx->pmd_csr = devm_ioremap_resource(edac->dev, &res);
ctx               941 drivers/edac/xgene_edac.c 	if (IS_ERR(ctx->pmd_csr)) {
ctx               944 drivers/edac/xgene_edac.c 		rc = PTR_ERR(ctx->pmd_csr);
ctx               963 drivers/edac/xgene_edac.c 	list_add(&ctx->next, &edac->pmds);
ctx               969 drivers/edac/xgene_edac.c 	dev_info(edac->dev, "X-Gene EDAC PMD%d registered\n", ctx->pmd);
ctx              1055 drivers/edac/xgene_edac.c 	struct xgene_edac_dev_ctx *ctx = edac_dev->pvt_info;
ctx              1061 drivers/edac/xgene_edac.c 	l3cesr = readl(ctx->dev_csr + L3C_ESR);
ctx              1070 drivers/edac/xgene_edac.c 	l3celr = readl(ctx->dev_csr + L3C_ELR);
ctx              1071 drivers/edac/xgene_edac.c 	l3caelr = readl(ctx->dev_csr + L3C_AELR);
ctx              1072 drivers/edac/xgene_edac.c 	l3cbelr = readl(ctx->dev_csr + L3C_BELR);
ctx              1100 drivers/edac/xgene_edac.c 	writel(0, ctx->dev_csr + L3C_ESR);
ctx              1102 drivers/edac/xgene_edac.c 	if (ctx->version <= 1 &&
ctx              1116 drivers/edac/xgene_edac.c 	struct xgene_edac_dev_ctx *ctx = edac_dev->pvt_info;
ctx              1119 drivers/edac/xgene_edac.c 	val = readl(ctx->dev_csr + L3C_ECR);
ctx              1128 drivers/edac/xgene_edac.c 	writel(val, ctx->dev_csr + L3C_ECR);
ctx              1133 drivers/edac/xgene_edac.c 			xgene_edac_pcp_clrbits(ctx->edac, PCPHPERRINTMSK,
ctx              1135 drivers/edac/xgene_edac.c 			xgene_edac_pcp_clrbits(ctx->edac, PCPLPERRINTMSK,
ctx              1138 drivers/edac/xgene_edac.c 			xgene_edac_pcp_setbits(ctx->edac, PCPHPERRINTMSK,
ctx              1140 drivers/edac/xgene_edac.c 			xgene_edac_pcp_setbits(ctx->edac, PCPLPERRINTMSK,
ctx              1151 drivers/edac/xgene_edac.c 	struct xgene_edac_dev_ctx *ctx = edac_dev->pvt_info;
ctx              1154 drivers/edac/xgene_edac.c 	writel(0xFFFFFFFF, ctx->dev_csr + L3C_ESR);
ctx              1167 drivers/edac/xgene_edac.c 	struct xgene_edac_dev_ctx *ctx = edac_dev->pvt_info;
ctx              1171 drivers/edac/xgene_edac.c 	if (!IS_ENABLED(CONFIG_EDAC_DEBUG) || !ctx->edac->dfs)
ctx              1174 drivers/edac/xgene_edac.c 	snprintf(name, sizeof(name), "l3c%d", ctx->edac_idx);
ctx              1175 drivers/edac/xgene_edac.c 	dbgfs_dir = edac_debugfs_create_dir_at(name, ctx->edac->dfs);
ctx              1187 drivers/edac/xgene_edac.c 	struct xgene_edac_dev_ctx *ctx;
ctx              1210 drivers/edac/xgene_edac.c 	edac_dev = edac_device_alloc_ctl_info(sizeof(*ctx),
ctx              1218 drivers/edac/xgene_edac.c 	ctx = edac_dev->pvt_info;
ctx              1219 drivers/edac/xgene_edac.c 	ctx->dev_csr = dev_csr;
ctx              1220 drivers/edac/xgene_edac.c 	ctx->name = "xgene_l3_err";
ctx              1221 drivers/edac/xgene_edac.c 	ctx->edac_idx = edac_idx;
ctx              1222 drivers/edac/xgene_edac.c 	ctx->edac = edac;
ctx              1223 drivers/edac/xgene_edac.c 	ctx->edac_dev = edac_dev;
ctx              1224 drivers/edac/xgene_edac.c 	ctx->ddev = *edac->dev;
ctx              1225 drivers/edac/xgene_edac.c 	ctx->version = version;
ctx              1226 drivers/edac/xgene_edac.c 	edac_dev->dev = &ctx->ddev;
ctx              1227 drivers/edac/xgene_edac.c 	edac_dev->ctl_name = ctx->name;
ctx              1228 drivers/edac/xgene_edac.c 	edac_dev->dev_name = ctx->name;
ctx              1246 drivers/edac/xgene_edac.c 	list_add(&ctx->next, &edac->l3s);
ctx              1391 drivers/edac/xgene_edac.c 	struct xgene_edac_dev_ctx *ctx = edac_dev->pvt_info;
ctx              1398 drivers/edac/xgene_edac.c 	reg = readl(ctx->dev_csr + XGICTRANSERRINTSTS);
ctx              1410 drivers/edac/xgene_edac.c 	info = readl(ctx->dev_csr + XGICTRANSERRREQINFO);
ctx              1414 drivers/edac/xgene_edac.c 	writel(reg, ctx->dev_csr + XGICTRANSERRINTSTS);
ctx              1418 drivers/edac/xgene_edac.c 	reg = readl(ctx->dev_csr + GLBL_ERR_STS);
ctx              1422 drivers/edac/xgene_edac.c 		err_addr_lo = readl(ctx->dev_csr + GLBL_SEC_ERRL);
ctx              1423 drivers/edac/xgene_edac.c 		err_addr_hi = readl(ctx->dev_csr + GLBL_SEC_ERRH);
ctx              1427 drivers/edac/xgene_edac.c 		writel(err_addr_lo, ctx->dev_csr + GLBL_SEC_ERRL);
ctx              1428 drivers/edac/xgene_edac.c 		writel(err_addr_hi, ctx->dev_csr + GLBL_SEC_ERRH);
ctx              1431 drivers/edac/xgene_edac.c 		err_addr_lo = readl(ctx->dev_csr + GLBL_MSEC_ERRL);
ctx              1432 drivers/edac/xgene_edac.c 		err_addr_hi = readl(ctx->dev_csr + GLBL_MSEC_ERRH);
ctx              1436 drivers/edac/xgene_edac.c 		writel(err_addr_lo, ctx->dev_csr + GLBL_MSEC_ERRL);
ctx              1437 drivers/edac/xgene_edac.c 		writel(err_addr_hi, ctx->dev_csr + GLBL_MSEC_ERRH);
ctx              1443 drivers/edac/xgene_edac.c 		err_addr_lo = readl(ctx->dev_csr + GLBL_DED_ERRL);
ctx              1444 drivers/edac/xgene_edac.c 		err_addr_hi = readl(ctx->dev_csr + GLBL_DED_ERRH);
ctx              1448 drivers/edac/xgene_edac.c 		writel(err_addr_lo, ctx->dev_csr + GLBL_DED_ERRL);
ctx              1449 drivers/edac/xgene_edac.c 		writel(err_addr_hi, ctx->dev_csr + GLBL_DED_ERRH);
ctx              1452 drivers/edac/xgene_edac.c 		err_addr_lo = readl(ctx->dev_csr + GLBL_MDED_ERRL);
ctx              1453 drivers/edac/xgene_edac.c 		err_addr_hi = readl(ctx->dev_csr + GLBL_MDED_ERRH);
ctx              1457 drivers/edac/xgene_edac.c 		writel(err_addr_lo, ctx->dev_csr + GLBL_MDED_ERRL);
ctx              1458 drivers/edac/xgene_edac.c 		writel(err_addr_hi, ctx->dev_csr + GLBL_MDED_ERRH);
ctx              1466 drivers/edac/xgene_edac.c 	struct xgene_edac_dev_ctx *ctx = edac_dev->pvt_info;
ctx              1472 drivers/edac/xgene_edac.c 	if (!ctx->edac->rb_map)
ctx              1482 drivers/edac/xgene_edac.c 	if (regmap_read(ctx->edac->rb_map, RBCSR, &reg))
ctx              1489 drivers/edac/xgene_edac.c 		if (regmap_read(ctx->edac->rb_map, RBEIR, &reg))
ctx              1509 drivers/edac/xgene_edac.c 		if (regmap_write(ctx->edac->rb_map, RBEIR, 0))
ctx              1511 drivers/edac/xgene_edac.c 		if (regmap_write(ctx->edac->rb_map, RBCSR, 0))
ctx              1517 drivers/edac/xgene_edac.c 	reg = readl(ctx->dev_csr + IOBBATRANSERRINTSTS);
ctx              1564 drivers/edac/xgene_edac.c 	err_addr_lo = readl(ctx->dev_csr + IOBBATRANSERRREQINFOL);
ctx              1565 drivers/edac/xgene_edac.c 	err_addr_hi = readl(ctx->dev_csr + IOBBATRANSERRREQINFOH);
ctx              1571 drivers/edac/xgene_edac.c 			readl(ctx->dev_csr + IOBBATRANSERRCSWREQID));
ctx              1572 drivers/edac/xgene_edac.c 	writel(reg, ctx->dev_csr + IOBBATRANSERRINTSTS);
ctx              1577 drivers/edac/xgene_edac.c 	struct xgene_edac_dev_ctx *ctx = edac_dev->pvt_info;
ctx              1583 drivers/edac/xgene_edac.c 	reg = readl(ctx->dev_csr + IOBPATRANSERRINTSTS);
ctx              1606 drivers/edac/xgene_edac.c 	writel(reg, ctx->dev_csr + IOBPATRANSERRINTSTS);
ctx              1610 drivers/edac/xgene_edac.c 	reg = readl(ctx->dev_csr + IOBAXIS0TRANSERRINTSTS);
ctx              1613 drivers/edac/xgene_edac.c 	err_addr_lo = readl(ctx->dev_csr + IOBAXIS0TRANSERRREQINFOL);
ctx              1614 drivers/edac/xgene_edac.c 	err_addr_hi = readl(ctx->dev_csr + IOBAXIS0TRANSERRREQINFOH);
ctx              1620 drivers/edac/xgene_edac.c 	writel(reg, ctx->dev_csr + IOBAXIS0TRANSERRINTSTS);
ctx              1624 drivers/edac/xgene_edac.c 	reg = readl(ctx->dev_csr + IOBAXIS1TRANSERRINTSTS);
ctx              1627 drivers/edac/xgene_edac.c 	err_addr_lo = readl(ctx->dev_csr + IOBAXIS1TRANSERRREQINFOL);
ctx              1628 drivers/edac/xgene_edac.c 	err_addr_hi = readl(ctx->dev_csr + IOBAXIS1TRANSERRREQINFOH);
ctx              1634 drivers/edac/xgene_edac.c 	writel(reg, ctx->dev_csr + IOBAXIS1TRANSERRINTSTS);
ctx              1639 drivers/edac/xgene_edac.c 	struct xgene_edac_dev_ctx *ctx = edac_dev->pvt_info;
ctx              1646 drivers/edac/xgene_edac.c 	xgene_edac_pcp_rd(ctx->edac, PCPHPERRINTSTS, &pcp_hp_stat);
ctx              1647 drivers/edac/xgene_edac.c 	xgene_edac_pcp_rd(ctx->edac, PCPLPERRINTSTS, &pcp_lp_stat);
ctx              1648 drivers/edac/xgene_edac.c 	xgene_edac_pcp_rd(ctx->edac, MEMERRINTSTS, &reg);
ctx              1671 drivers/edac/xgene_edac.c 	if (ctx->version == 1)
ctx              1692 drivers/edac/xgene_edac.c 	struct xgene_edac_dev_ctx *ctx = edac_dev->pvt_info;
ctx              1697 drivers/edac/xgene_edac.c 			xgene_edac_pcp_clrbits(ctx->edac, PCPHPERRINTMSK,
ctx              1702 drivers/edac/xgene_edac.c 			xgene_edac_pcp_clrbits(ctx->edac, PCPLPERRINTMSK,
ctx              1705 drivers/edac/xgene_edac.c 			xgene_edac_pcp_setbits(ctx->edac, PCPHPERRINTMSK,
ctx              1710 drivers/edac/xgene_edac.c 			xgene_edac_pcp_setbits(ctx->edac, PCPLPERRINTMSK,
ctx              1715 drivers/edac/xgene_edac.c 		       ctx->dev_csr + IOBAXIS0TRANSERRINTMSK);
ctx              1717 drivers/edac/xgene_edac.c 		       ctx->dev_csr + IOBAXIS1TRANSERRINTMSK);
ctx              1719 drivers/edac/xgene_edac.c 		       ctx->dev_csr + XGICTRANSERRINTMSK);
ctx              1721 drivers/edac/xgene_edac.c 		xgene_edac_pcp_setbits(ctx->edac, MEMERRINTMSK,
ctx              1730 drivers/edac/xgene_edac.c 	struct xgene_edac_dev_ctx *ctx;
ctx              1753 drivers/edac/xgene_edac.c 	edac_dev = edac_device_alloc_ctl_info(sizeof(*ctx),
ctx              1761 drivers/edac/xgene_edac.c 	ctx = edac_dev->pvt_info;
ctx              1762 drivers/edac/xgene_edac.c 	ctx->dev_csr = dev_csr;
ctx              1763 drivers/edac/xgene_edac.c 	ctx->name = "xgene_soc_err";
ctx              1764 drivers/edac/xgene_edac.c 	ctx->edac_idx = edac_idx;
ctx              1765 drivers/edac/xgene_edac.c 	ctx->edac = edac;
ctx              1766 drivers/edac/xgene_edac.c 	ctx->edac_dev = edac_dev;
ctx              1767 drivers/edac/xgene_edac.c 	ctx->ddev = *edac->dev;
ctx              1768 drivers/edac/xgene_edac.c 	ctx->version = version;
ctx              1769 drivers/edac/xgene_edac.c 	edac_dev->dev = &ctx->ddev;
ctx              1770 drivers/edac/xgene_edac.c 	edac_dev->ctl_name = ctx->name;
ctx              1771 drivers/edac/xgene_edac.c 	edac_dev->dev_name = ctx->name;
ctx              1787 drivers/edac/xgene_edac.c 	list_add(&ctx->next, &edac->socs);
ctx              1816 drivers/edac/xgene_edac.c 	struct xgene_edac *ctx = dev_id;
ctx              1822 drivers/edac/xgene_edac.c 	xgene_edac_pcp_rd(ctx, PCPHPERRINTSTS, &pcp_hp_stat);
ctx              1823 drivers/edac/xgene_edac.c 	xgene_edac_pcp_rd(ctx, PCPLPERRINTSTS, &pcp_lp_stat);
ctx              1829 drivers/edac/xgene_edac.c 		list_for_each_entry(mcu, &ctx->mcus, next)
ctx              1833 drivers/edac/xgene_edac.c 	list_for_each_entry(pmd, &ctx->pmds, next) {
ctx              1838 drivers/edac/xgene_edac.c 	list_for_each_entry(node, &ctx->l3s, next)
ctx              1841 drivers/edac/xgene_edac.c 	list_for_each_entry(node, &ctx->socs, next)
ctx               619 drivers/firewire/core-card.c static int dummy_start_iso(struct fw_iso_context *ctx,
ctx               625 drivers/firewire/core-card.c static int dummy_set_iso_channels(struct fw_iso_context *ctx, u64 *channels)
ctx               630 drivers/firewire/core-card.c static int dummy_queue_iso(struct fw_iso_context *ctx, struct fw_iso_packet *p,
ctx               636 drivers/firewire/core-card.c static void dummy_flush_queue_iso(struct fw_iso_context *ctx)
ctx               640 drivers/firewire/core-card.c static int dummy_flush_iso_completions(struct fw_iso_context *ctx)
ctx              1031 drivers/firewire/core-cdev.c 	struct fw_iso_context *ctx = client->iso_context;
ctx              1033 drivers/firewire/core-cdev.c 	if (ctx == NULL || a->handle != 0)
ctx              1036 drivers/firewire/core-cdev.c 	return fw_iso_context_set_channels(ctx, &a->channels);
ctx              1051 drivers/firewire/core-cdev.c 	struct fw_iso_context *ctx = client->iso_context;
ctx              1060 drivers/firewire/core-cdev.c 	if (ctx == NULL || a->handle != 0)
ctx              1080 drivers/firewire/core-cdev.c 	if (ctx->type == FW_ISO_CONTEXT_RECEIVE_MULTICHANNEL && payload & 3)
ctx              1099 drivers/firewire/core-cdev.c 		switch (ctx->type) {
ctx              1108 drivers/firewire/core-cdev.c 			    u.packet.header_length % ctx->header_size != 0)
ctx              1126 drivers/firewire/core-cdev.c 		if (u.packet.skip && ctx->type == FW_ISO_CONTEXT_TRANSMIT &&
ctx              1132 drivers/firewire/core-cdev.c 		if (fw_iso_context_queue(ctx, &u.packet,
ctx              1140 drivers/firewire/core-cdev.c 	fw_iso_context_queue_flush(ctx);
ctx               143 drivers/firewire/core-iso.c 	struct fw_iso_context *ctx;
ctx               145 drivers/firewire/core-iso.c 	ctx = card->driver->allocate_iso_context(card,
ctx               147 drivers/firewire/core-iso.c 	if (IS_ERR(ctx))
ctx               148 drivers/firewire/core-iso.c 		return ctx;
ctx               150 drivers/firewire/core-iso.c 	ctx->card = card;
ctx               151 drivers/firewire/core-iso.c 	ctx->type = type;
ctx               152 drivers/firewire/core-iso.c 	ctx->channel = channel;
ctx               153 drivers/firewire/core-iso.c 	ctx->speed = speed;
ctx               154 drivers/firewire/core-iso.c 	ctx->header_size = header_size;
ctx               155 drivers/firewire/core-iso.c 	ctx->callback.sc = callback;
ctx               156 drivers/firewire/core-iso.c 	ctx->callback_data = callback_data;
ctx               158 drivers/firewire/core-iso.c 	return ctx;
ctx               162 drivers/firewire/core-iso.c void fw_iso_context_destroy(struct fw_iso_context *ctx)
ctx               164 drivers/firewire/core-iso.c 	ctx->card->driver->free_iso_context(ctx);
ctx               168 drivers/firewire/core-iso.c int fw_iso_context_start(struct fw_iso_context *ctx,
ctx               171 drivers/firewire/core-iso.c 	return ctx->card->driver->start_iso(ctx, cycle, sync, tags);
ctx               175 drivers/firewire/core-iso.c int fw_iso_context_set_channels(struct fw_iso_context *ctx, u64 *channels)
ctx               177 drivers/firewire/core-iso.c 	return ctx->card->driver->set_iso_channels(ctx, channels);
ctx               180 drivers/firewire/core-iso.c int fw_iso_context_queue(struct fw_iso_context *ctx,
ctx               185 drivers/firewire/core-iso.c 	return ctx->card->driver->queue_iso(ctx, packet, buffer, payload);
ctx               189 drivers/firewire/core-iso.c void fw_iso_context_queue_flush(struct fw_iso_context *ctx)
ctx               191 drivers/firewire/core-iso.c 	ctx->card->driver->flush_queue_iso(ctx);
ctx               195 drivers/firewire/core-iso.c int fw_iso_context_flush_completions(struct fw_iso_context *ctx)
ctx               197 drivers/firewire/core-iso.c 	return ctx->card->driver->flush_iso_completions(ctx);
ctx               201 drivers/firewire/core-iso.c int fw_iso_context_stop(struct fw_iso_context *ctx)
ctx               203 drivers/firewire/core-iso.c 	return ctx->card->driver->stop_iso(ctx);
ctx                97 drivers/firewire/core.h 	void (*free_iso_context)(struct fw_iso_context *ctx);
ctx                99 drivers/firewire/core.h 	int (*start_iso)(struct fw_iso_context *ctx,
ctx               102 drivers/firewire/core.h 	int (*set_iso_channels)(struct fw_iso_context *ctx, u64 *channels);
ctx               104 drivers/firewire/core.h 	int (*queue_iso)(struct fw_iso_context *ctx,
ctx               109 drivers/firewire/core.h 	void (*flush_queue_iso)(struct fw_iso_context *ctx);
ctx               111 drivers/firewire/core.h 	int (*flush_iso_completions)(struct fw_iso_context *ctx);
ctx               113 drivers/firewire/core.h 	int (*stop_iso)(struct fw_iso_context *ctx);
ctx               101 drivers/firewire/ohci.c typedef int (*descriptor_callback_t)(struct context *ctx,
ctx               652 drivers/firewire/ohci.c static inline dma_addr_t ar_buffer_bus(struct ar_context *ctx, unsigned int i)
ctx               654 drivers/firewire/ohci.c 	return page_private(ctx->pages[i]);
ctx               657 drivers/firewire/ohci.c static void ar_context_link_page(struct ar_context *ctx, unsigned int index)
ctx               661 drivers/firewire/ohci.c 	d = &ctx->descriptors[index];
ctx               667 drivers/firewire/ohci.c 	d = &ctx->descriptors[ctx->last_buffer_index];
ctx               670 drivers/firewire/ohci.c 	ctx->last_buffer_index = index;
ctx               672 drivers/firewire/ohci.c 	reg_write(ctx->ohci, CONTROL_SET(ctx->regs), CONTEXT_WAKE);
ctx               675 drivers/firewire/ohci.c static void ar_context_release(struct ar_context *ctx)
ctx               679 drivers/firewire/ohci.c 	vunmap(ctx->buffer);
ctx               682 drivers/firewire/ohci.c 		if (ctx->pages[i]) {
ctx               683 drivers/firewire/ohci.c 			dma_unmap_page(ctx->ohci->card.device,
ctx               684 drivers/firewire/ohci.c 				       ar_buffer_bus(ctx, i),
ctx               686 drivers/firewire/ohci.c 			__free_page(ctx->pages[i]);
ctx               690 drivers/firewire/ohci.c static void ar_context_abort(struct ar_context *ctx, const char *error_msg)
ctx               692 drivers/firewire/ohci.c 	struct fw_ohci *ohci = ctx->ohci;
ctx               694 drivers/firewire/ohci.c 	if (reg_read(ohci, CONTROL_CLEAR(ctx->regs)) & CONTEXT_RUN) {
ctx               695 drivers/firewire/ohci.c 		reg_write(ohci, CONTROL_CLEAR(ctx->regs), CONTEXT_RUN);
ctx               708 drivers/firewire/ohci.c static inline unsigned int ar_first_buffer_index(struct ar_context *ctx)
ctx               710 drivers/firewire/ohci.c 	return ar_next_buffer_index(ctx->last_buffer_index);
ctx               717 drivers/firewire/ohci.c static unsigned int ar_search_last_active_buffer(struct ar_context *ctx,
ctx               720 drivers/firewire/ohci.c 	unsigned int i, next_i, last = ctx->last_buffer_index;
ctx               723 drivers/firewire/ohci.c 	i = ar_first_buffer_index(ctx);
ctx               724 drivers/firewire/ohci.c 	res_count = READ_ONCE(ctx->descriptors[i].res_count);
ctx               732 drivers/firewire/ohci.c 		next_res_count = READ_ONCE(ctx->descriptors[next_i].res_count);
ctx               748 drivers/firewire/ohci.c 				next_res_count = READ_ONCE(ctx->descriptors[next_i].res_count);
ctx               766 drivers/firewire/ohci.c 		ar_context_abort(ctx, "corrupted descriptor");
ctx               772 drivers/firewire/ohci.c static void ar_sync_buffers_for_cpu(struct ar_context *ctx,
ctx               778 drivers/firewire/ohci.c 	i = ar_first_buffer_index(ctx);
ctx               780 drivers/firewire/ohci.c 		dma_sync_single_for_cpu(ctx->ohci->card.device,
ctx               781 drivers/firewire/ohci.c 					ar_buffer_bus(ctx, i),
ctx               786 drivers/firewire/ohci.c 		dma_sync_single_for_cpu(ctx->ohci->card.device,
ctx               787 drivers/firewire/ohci.c 					ar_buffer_bus(ctx, i),
ctx               798 drivers/firewire/ohci.c static __le32 *handle_ar_packet(struct ar_context *ctx, __le32 *buffer)
ctx               800 drivers/firewire/ohci.c 	struct fw_ohci *ohci = ctx->ohci;
ctx               832 drivers/firewire/ohci.c 			ar_context_abort(ctx, "invalid packet length");
ctx               845 drivers/firewire/ohci.c 		ar_context_abort(ctx, "invalid tcode");
ctx               887 drivers/firewire/ohci.c 	} else if (ctx == &ohci->ar_request_ctx) {
ctx               896 drivers/firewire/ohci.c static void *handle_ar_packets(struct ar_context *ctx, void *p, void *end)
ctx               901 drivers/firewire/ohci.c 		next = handle_ar_packet(ctx, p);
ctx               910 drivers/firewire/ohci.c static void ar_recycle_buffers(struct ar_context *ctx, unsigned int end_buffer)
ctx               914 drivers/firewire/ohci.c 	i = ar_first_buffer_index(ctx);
ctx               916 drivers/firewire/ohci.c 		dma_sync_single_for_device(ctx->ohci->card.device,
ctx               917 drivers/firewire/ohci.c 					   ar_buffer_bus(ctx, i),
ctx               919 drivers/firewire/ohci.c 		ar_context_link_page(ctx, i);
ctx               926 drivers/firewire/ohci.c 	struct ar_context *ctx = (struct ar_context *)data;
ctx               930 drivers/firewire/ohci.c 	p = ctx->pointer;
ctx               934 drivers/firewire/ohci.c 	end_buffer_index = ar_search_last_active_buffer(ctx,
ctx               936 drivers/firewire/ohci.c 	ar_sync_buffers_for_cpu(ctx, end_buffer_index, end_buffer_offset);
ctx               937 drivers/firewire/ohci.c 	end = ctx->buffer + end_buffer_index * PAGE_SIZE + end_buffer_offset;
ctx               939 drivers/firewire/ohci.c 	if (end_buffer_index < ar_first_buffer_index(ctx)) {
ctx               946 drivers/firewire/ohci.c 		void *buffer_end = ctx->buffer + AR_BUFFERS * PAGE_SIZE;
ctx               947 drivers/firewire/ohci.c 		p = handle_ar_packets(ctx, p, buffer_end);
ctx               954 drivers/firewire/ohci.c 	p = handle_ar_packets(ctx, p, end);
ctx               957 drivers/firewire/ohci.c 			ar_context_abort(ctx, "inconsistent descriptor");
ctx               961 drivers/firewire/ohci.c 	ctx->pointer = p;
ctx               962 drivers/firewire/ohci.c 	ar_recycle_buffers(ctx, end_buffer_index);
ctx               967 drivers/firewire/ohci.c 	ctx->pointer = NULL;
ctx               970 drivers/firewire/ohci.c static int ar_context_init(struct ar_context *ctx, struct fw_ohci *ohci,
ctx               978 drivers/firewire/ohci.c 	ctx->regs        = regs;
ctx               979 drivers/firewire/ohci.c 	ctx->ohci        = ohci;
ctx               980 drivers/firewire/ohci.c 	tasklet_init(&ctx->tasklet, ar_context_tasklet, (unsigned long)ctx);
ctx               983 drivers/firewire/ohci.c 		ctx->pages[i] = alloc_page(GFP_KERNEL | GFP_DMA32);
ctx               984 drivers/firewire/ohci.c 		if (!ctx->pages[i])
ctx               986 drivers/firewire/ohci.c 		dma_addr = dma_map_page(ohci->card.device, ctx->pages[i],
ctx               989 drivers/firewire/ohci.c 			__free_page(ctx->pages[i]);
ctx               990 drivers/firewire/ohci.c 			ctx->pages[i] = NULL;
ctx               993 drivers/firewire/ohci.c 		set_page_private(ctx->pages[i], dma_addr);
ctx               997 drivers/firewire/ohci.c 		pages[i]              = ctx->pages[i];
ctx               999 drivers/firewire/ohci.c 		pages[AR_BUFFERS + i] = ctx->pages[i];
ctx              1000 drivers/firewire/ohci.c 	ctx->buffer = vmap(pages, ARRAY_SIZE(pages), VM_MAP, PAGE_KERNEL);
ctx              1001 drivers/firewire/ohci.c 	if (!ctx->buffer)
ctx              1004 drivers/firewire/ohci.c 	ctx->descriptors     = ohci->misc_buffer     + descriptors_offset;
ctx              1005 drivers/firewire/ohci.c 	ctx->descriptors_bus = ohci->misc_buffer_bus + descriptors_offset;
ctx              1008 drivers/firewire/ohci.c 		d = &ctx->descriptors[i];
ctx              1013 drivers/firewire/ohci.c 		d->data_address   = cpu_to_le32(ar_buffer_bus(ctx, i));
ctx              1014 drivers/firewire/ohci.c 		d->branch_address = cpu_to_le32(ctx->descriptors_bus +
ctx              1021 drivers/firewire/ohci.c 	ar_context_release(ctx);
ctx              1026 drivers/firewire/ohci.c static void ar_context_run(struct ar_context *ctx)
ctx              1031 drivers/firewire/ohci.c 		ar_context_link_page(ctx, i);
ctx              1033 drivers/firewire/ohci.c 	ctx->pointer = ctx->buffer;
ctx              1035 drivers/firewire/ohci.c 	reg_write(ctx->ohci, COMMAND_PTR(ctx->regs), ctx->descriptors_bus | 1);
ctx              1036 drivers/firewire/ohci.c 	reg_write(ctx->ohci, CONTROL_SET(ctx->regs), CONTEXT_RUN);
ctx              1054 drivers/firewire/ohci.c 	struct context *ctx = (struct context *) data;
ctx              1060 drivers/firewire/ohci.c 	desc = list_entry(ctx->buffer_list.next,
ctx              1062 drivers/firewire/ohci.c 	last = ctx->last;
ctx              1068 drivers/firewire/ohci.c 		ctx->current_bus = address;
ctx              1079 drivers/firewire/ohci.c 		if (!ctx->callback(ctx, d, last))
ctx              1087 drivers/firewire/ohci.c 			spin_lock_irqsave(&ctx->ohci->lock, flags);
ctx              1088 drivers/firewire/ohci.c 			list_move_tail(&old_desc->list, &ctx->buffer_list);
ctx              1089 drivers/firewire/ohci.c 			spin_unlock_irqrestore(&ctx->ohci->lock, flags);
ctx              1091 drivers/firewire/ohci.c 		ctx->last = last;
ctx              1099 drivers/firewire/ohci.c static int context_add_buffer(struct context *ctx)
ctx              1109 drivers/firewire/ohci.c 	if (ctx->total_allocation >= 16*1024*1024)
ctx              1112 drivers/firewire/ohci.c 	desc = dma_alloc_coherent(ctx->ohci->card.device, PAGE_SIZE,
ctx              1128 drivers/firewire/ohci.c 	list_add_tail(&desc->list, &ctx->buffer_list);
ctx              1129 drivers/firewire/ohci.c 	ctx->total_allocation += PAGE_SIZE;
ctx              1134 drivers/firewire/ohci.c static int context_init(struct context *ctx, struct fw_ohci *ohci,
ctx              1137 drivers/firewire/ohci.c 	ctx->ohci = ohci;
ctx              1138 drivers/firewire/ohci.c 	ctx->regs = regs;
ctx              1139 drivers/firewire/ohci.c 	ctx->total_allocation = 0;
ctx              1141 drivers/firewire/ohci.c 	INIT_LIST_HEAD(&ctx->buffer_list);
ctx              1142 drivers/firewire/ohci.c 	if (context_add_buffer(ctx) < 0)
ctx              1145 drivers/firewire/ohci.c 	ctx->buffer_tail = list_entry(ctx->buffer_list.next,
ctx              1148 drivers/firewire/ohci.c 	tasklet_init(&ctx->tasklet, context_tasklet, (unsigned long)ctx);
ctx              1149 drivers/firewire/ohci.c 	ctx->callback = callback;
ctx              1156 drivers/firewire/ohci.c 	memset(ctx->buffer_tail->buffer, 0, sizeof(*ctx->buffer_tail->buffer));
ctx              1157 drivers/firewire/ohci.c 	ctx->buffer_tail->buffer->control = cpu_to_le16(DESCRIPTOR_OUTPUT_LAST);
ctx              1158 drivers/firewire/ohci.c 	ctx->buffer_tail->buffer->transfer_status = cpu_to_le16(0x8011);
ctx              1159 drivers/firewire/ohci.c 	ctx->buffer_tail->used += sizeof(*ctx->buffer_tail->buffer);
ctx              1160 drivers/firewire/ohci.c 	ctx->last = ctx->buffer_tail->buffer;
ctx              1161 drivers/firewire/ohci.c 	ctx->prev = ctx->buffer_tail->buffer;
ctx              1162 drivers/firewire/ohci.c 	ctx->prev_z = 1;
ctx              1167 drivers/firewire/ohci.c static void context_release(struct context *ctx)
ctx              1169 drivers/firewire/ohci.c 	struct fw_card *card = &ctx->ohci->card;
ctx              1172 drivers/firewire/ohci.c 	list_for_each_entry_safe(desc, tmp, &ctx->buffer_list, list)
ctx              1179 drivers/firewire/ohci.c static struct descriptor *context_get_descriptors(struct context *ctx,
ctx              1183 drivers/firewire/ohci.c 	struct descriptor_buffer *desc = ctx->buffer_tail;
ctx              1192 drivers/firewire/ohci.c 		if (desc->list.next == &ctx->buffer_list) {
ctx              1195 drivers/firewire/ohci.c 			if (context_add_buffer(ctx) < 0)
ctx              1200 drivers/firewire/ohci.c 		ctx->buffer_tail = desc;
ctx              1210 drivers/firewire/ohci.c static void context_run(struct context *ctx, u32 extra)
ctx              1212 drivers/firewire/ohci.c 	struct fw_ohci *ohci = ctx->ohci;
ctx              1214 drivers/firewire/ohci.c 	reg_write(ohci, COMMAND_PTR(ctx->regs),
ctx              1215 drivers/firewire/ohci.c 		  le32_to_cpu(ctx->last->branch_address));
ctx              1216 drivers/firewire/ohci.c 	reg_write(ohci, CONTROL_CLEAR(ctx->regs), ~0);
ctx              1217 drivers/firewire/ohci.c 	reg_write(ohci, CONTROL_SET(ctx->regs), CONTEXT_RUN | extra);
ctx              1218 drivers/firewire/ohci.c 	ctx->running = true;
ctx              1222 drivers/firewire/ohci.c static void context_append(struct context *ctx,
ctx              1226 drivers/firewire/ohci.c 	struct descriptor_buffer *desc = ctx->buffer_tail;
ctx              1235 drivers/firewire/ohci.c 	d_branch = find_branch_descriptor(ctx->prev, ctx->prev_z);
ctx              1247 drivers/firewire/ohci.c 	if (unlikely(ctx->ohci->quirks & QUIRK_IR_WAKE) &&
ctx              1248 drivers/firewire/ohci.c 	    d_branch != ctx->prev &&
ctx              1249 drivers/firewire/ohci.c 	    (ctx->prev->control & cpu_to_le16(DESCRIPTOR_CMD)) ==
ctx              1251 drivers/firewire/ohci.c 		ctx->prev->branch_address = cpu_to_le32(d_bus | z);
ctx              1254 drivers/firewire/ohci.c 	ctx->prev = d;
ctx              1255 drivers/firewire/ohci.c 	ctx->prev_z = z;
ctx              1258 drivers/firewire/ohci.c static void context_stop(struct context *ctx)
ctx              1260 drivers/firewire/ohci.c 	struct fw_ohci *ohci = ctx->ohci;
ctx              1264 drivers/firewire/ohci.c 	reg_write(ohci, CONTROL_CLEAR(ctx->regs), CONTEXT_RUN);
ctx              1265 drivers/firewire/ohci.c 	ctx->running = false;
ctx              1268 drivers/firewire/ohci.c 		reg = reg_read(ohci, CONTROL_SET(ctx->regs));
ctx              1288 drivers/firewire/ohci.c static int at_context_queue_packet(struct context *ctx,
ctx              1291 drivers/firewire/ohci.c 	struct fw_ohci *ohci = ctx->ohci;
ctx              1298 drivers/firewire/ohci.c 	d = context_get_descriptors(ctx, 4, &d_bus);
ctx              1408 drivers/firewire/ohci.c 	context_append(ctx, d, z, 4 - z);
ctx              1410 drivers/firewire/ohci.c 	if (ctx->running)
ctx              1411 drivers/firewire/ohci.c 		reg_write(ohci, CONTROL_SET(ctx->regs), CONTEXT_WAKE);
ctx              1413 drivers/firewire/ohci.c 		context_run(ctx, 0);
ctx              1418 drivers/firewire/ohci.c static void at_context_flush(struct context *ctx)
ctx              1420 drivers/firewire/ohci.c 	tasklet_disable(&ctx->tasklet);
ctx              1422 drivers/firewire/ohci.c 	ctx->flushing = true;
ctx              1423 drivers/firewire/ohci.c 	context_tasklet((unsigned long)ctx);
ctx              1424 drivers/firewire/ohci.c 	ctx->flushing = false;
ctx              1426 drivers/firewire/ohci.c 	tasklet_enable(&ctx->tasklet);
ctx              1591 drivers/firewire/ohci.c static void handle_local_request(struct context *ctx, struct fw_packet *packet)
ctx              1595 drivers/firewire/ohci.c 	if (ctx == &ctx->ohci->at_request_ctx) {
ctx              1597 drivers/firewire/ohci.c 		packet->callback(packet, &ctx->ohci->card, packet->ack);
ctx              1608 drivers/firewire/ohci.c 		handle_local_rom(ctx->ohci, packet, csr);
ctx              1614 drivers/firewire/ohci.c 		handle_local_lock(ctx->ohci, packet, csr);
ctx              1617 drivers/firewire/ohci.c 		if (ctx == &ctx->ohci->at_request_ctx)
ctx              1618 drivers/firewire/ohci.c 			fw_core_handle_request(&ctx->ohci->card, packet);
ctx              1620 drivers/firewire/ohci.c 			fw_core_handle_response(&ctx->ohci->card, packet);
ctx              1624 drivers/firewire/ohci.c 	if (ctx == &ctx->ohci->at_response_ctx) {
ctx              1626 drivers/firewire/ohci.c 		packet->callback(packet, &ctx->ohci->card, packet->ack);
ctx              1630 drivers/firewire/ohci.c static void at_context_transmit(struct context *ctx, struct fw_packet *packet)
ctx              1635 drivers/firewire/ohci.c 	spin_lock_irqsave(&ctx->ohci->lock, flags);
ctx              1637 drivers/firewire/ohci.c 	if (HEADER_GET_DESTINATION(packet->header[0]) == ctx->ohci->node_id &&
ctx              1638 drivers/firewire/ohci.c 	    ctx->ohci->generation == packet->generation) {
ctx              1639 drivers/firewire/ohci.c 		spin_unlock_irqrestore(&ctx->ohci->lock, flags);
ctx              1640 drivers/firewire/ohci.c 		handle_local_request(ctx, packet);
ctx              1644 drivers/firewire/ohci.c 	ret = at_context_queue_packet(ctx, packet);
ctx              1645 drivers/firewire/ohci.c 	spin_unlock_irqrestore(&ctx->ohci->lock, flags);
ctx              1648 drivers/firewire/ohci.c 		packet->callback(packet, &ctx->ohci->card, packet->ack);
ctx              2548 drivers/firewire/ohci.c 	struct context *ctx = &ohci->at_request_ctx;
ctx              2552 drivers/firewire/ohci.c 	tasklet_disable(&ctx->tasklet);
ctx              2567 drivers/firewire/ohci.c 	tasklet_enable(&ctx->tasklet);
ctx              2726 drivers/firewire/ohci.c static void flush_iso_completions(struct iso_context *ctx)
ctx              2728 drivers/firewire/ohci.c 	ctx->base.callback.sc(&ctx->base, ctx->last_timestamp,
ctx              2729 drivers/firewire/ohci.c 			      ctx->header_length, ctx->header,
ctx              2730 drivers/firewire/ohci.c 			      ctx->base.callback_data);
ctx              2731 drivers/firewire/ohci.c 	ctx->header_length = 0;
ctx              2734 drivers/firewire/ohci.c static void copy_iso_headers(struct iso_context *ctx, const u32 *dma_hdr)
ctx              2738 drivers/firewire/ohci.c 	if (ctx->header_length + ctx->base.header_size > PAGE_SIZE) {
ctx              2739 drivers/firewire/ohci.c 		if (ctx->base.drop_overflow_headers)
ctx              2741 drivers/firewire/ohci.c 		flush_iso_completions(ctx);
ctx              2744 drivers/firewire/ohci.c 	ctx_hdr = ctx->header + ctx->header_length;
ctx              2745 drivers/firewire/ohci.c 	ctx->last_timestamp = (u16)le32_to_cpu((__force __le32)dma_hdr[0]);
ctx              2752 drivers/firewire/ohci.c 	if (ctx->base.header_size > 0)
ctx              2754 drivers/firewire/ohci.c 	if (ctx->base.header_size > 4)
ctx              2756 drivers/firewire/ohci.c 	if (ctx->base.header_size > 8)
ctx              2757 drivers/firewire/ohci.c 		memcpy(&ctx_hdr[2], &dma_hdr[2], ctx->base.header_size - 8);
ctx              2758 drivers/firewire/ohci.c 	ctx->header_length += ctx->base.header_size;
ctx              2765 drivers/firewire/ohci.c 	struct iso_context *ctx =
ctx              2787 drivers/firewire/ohci.c 	copy_iso_headers(ctx, (u32 *) (last + 1));
ctx              2790 drivers/firewire/ohci.c 		flush_iso_completions(ctx);
ctx              2800 drivers/firewire/ohci.c 	struct iso_context *ctx =
ctx              2811 drivers/firewire/ohci.c 		ctx->mc_buffer_bus = buffer_dma;
ctx              2812 drivers/firewire/ohci.c 		ctx->mc_completed = completed;
ctx              2825 drivers/firewire/ohci.c 		ctx->base.callback.mc(&ctx->base,
ctx              2827 drivers/firewire/ohci.c 				      ctx->base.callback_data);
ctx              2828 drivers/firewire/ohci.c 		ctx->mc_completed = 0;
ctx              2834 drivers/firewire/ohci.c static void flush_ir_buffer_fill(struct iso_context *ctx)
ctx              2836 drivers/firewire/ohci.c 	dma_sync_single_range_for_cpu(ctx->context.ohci->card.device,
ctx              2837 drivers/firewire/ohci.c 				      ctx->mc_buffer_bus & PAGE_MASK,
ctx              2838 drivers/firewire/ohci.c 				      ctx->mc_buffer_bus & ~PAGE_MASK,
ctx              2839 drivers/firewire/ohci.c 				      ctx->mc_completed, DMA_FROM_DEVICE);
ctx              2841 drivers/firewire/ohci.c 	ctx->base.callback.mc(&ctx->base,
ctx              2842 drivers/firewire/ohci.c 			      ctx->mc_buffer_bus + ctx->mc_completed,
ctx              2843 drivers/firewire/ohci.c 			      ctx->base.callback_data);
ctx              2844 drivers/firewire/ohci.c 	ctx->mc_completed = 0;
ctx              2888 drivers/firewire/ohci.c 	struct iso_context *ctx =
ctx              2902 drivers/firewire/ohci.c 	if (ctx->header_length + 4 > PAGE_SIZE) {
ctx              2903 drivers/firewire/ohci.c 		if (ctx->base.drop_overflow_headers)
ctx              2905 drivers/firewire/ohci.c 		flush_iso_completions(ctx);
ctx              2908 drivers/firewire/ohci.c 	ctx_hdr = ctx->header + ctx->header_length;
ctx              2909 drivers/firewire/ohci.c 	ctx->last_timestamp = le16_to_cpu(last->res_count);
ctx              2913 drivers/firewire/ohci.c 	ctx->header_length += 4;
ctx              2916 drivers/firewire/ohci.c 		flush_iso_completions(ctx);
ctx              2936 drivers/firewire/ohci.c 	struct iso_context *uninitialized_var(ctx);
ctx              2952 drivers/firewire/ohci.c 			ctx  = &ohci->it_context_list[index];
ctx              2965 drivers/firewire/ohci.c 			ctx  = &ohci->ir_context_list[index];
ctx              2977 drivers/firewire/ohci.c 			ctx  = &ohci->ir_context_list[index];
ctx              2991 drivers/firewire/ohci.c 	memset(ctx, 0, sizeof(*ctx));
ctx              2992 drivers/firewire/ohci.c 	ctx->header_length = 0;
ctx              2993 drivers/firewire/ohci.c 	ctx->header = (void *) __get_free_page(GFP_KERNEL);
ctx              2994 drivers/firewire/ohci.c 	if (ctx->header == NULL) {
ctx              2998 drivers/firewire/ohci.c 	ret = context_init(&ctx->context, ohci, regs, callback);
ctx              3004 drivers/firewire/ohci.c 		ctx->mc_completed = 0;
ctx              3007 drivers/firewire/ohci.c 	return &ctx->base;
ctx              3010 drivers/firewire/ohci.c 	free_page((unsigned long)ctx->header);
ctx              3033 drivers/firewire/ohci.c 	struct iso_context *ctx = container_of(base, struct iso_context, base);
ctx              3034 drivers/firewire/ohci.c 	struct fw_ohci *ohci = ctx->context.ohci;
ctx              3039 drivers/firewire/ohci.c 	if (ctx->context.last->branch_address == 0)
ctx              3042 drivers/firewire/ohci.c 	switch (ctx->base.type) {
ctx              3044 drivers/firewire/ohci.c 		index = ctx - ohci->it_context_list;
ctx              3052 drivers/firewire/ohci.c 		context_run(&ctx->context, match);
ctx              3059 drivers/firewire/ohci.c 		index = ctx - ohci->ir_context_list;
ctx              3060 drivers/firewire/ohci.c 		match = (tags << 28) | (sync << 8) | ctx->base.channel;
ctx              3068 drivers/firewire/ohci.c 		reg_write(ohci, CONTEXT_MATCH(ctx->context.regs), match);
ctx              3069 drivers/firewire/ohci.c 		context_run(&ctx->context, control);
ctx              3071 drivers/firewire/ohci.c 		ctx->sync = sync;
ctx              3072 drivers/firewire/ohci.c 		ctx->tags = tags;
ctx              3083 drivers/firewire/ohci.c 	struct iso_context *ctx = container_of(base, struct iso_context, base);
ctx              3086 drivers/firewire/ohci.c 	switch (ctx->base.type) {
ctx              3088 drivers/firewire/ohci.c 		index = ctx - ohci->it_context_list;
ctx              3094 drivers/firewire/ohci.c 		index = ctx - ohci->ir_context_list;
ctx              3099 drivers/firewire/ohci.c 	context_stop(&ctx->context);
ctx              3100 drivers/firewire/ohci.c 	tasklet_kill(&ctx->context.tasklet);
ctx              3108 drivers/firewire/ohci.c 	struct iso_context *ctx = container_of(base, struct iso_context, base);
ctx              3113 drivers/firewire/ohci.c 	context_release(&ctx->context);
ctx              3114 drivers/firewire/ohci.c 	free_page((unsigned long)ctx->header);
ctx              3120 drivers/firewire/ohci.c 		index = ctx - ohci->it_context_list;
ctx              3125 drivers/firewire/ohci.c 		index = ctx - ohci->ir_context_list;
ctx              3131 drivers/firewire/ohci.c 		index = ctx - ohci->ir_context_list;
ctx              3176 drivers/firewire/ohci.c 	struct iso_context *ctx;
ctx              3179 drivers/firewire/ohci.c 		ctx = &ohci->ir_context_list[i];
ctx              3180 drivers/firewire/ohci.c 		if (ctx->context.running)
ctx              3181 drivers/firewire/ohci.c 			ohci_start_iso(&ctx->base, 0, ctx->sync, ctx->tags);
ctx              3185 drivers/firewire/ohci.c 		ctx = &ohci->it_context_list[i];
ctx              3186 drivers/firewire/ohci.c 		if (ctx->context.running)
ctx              3187 drivers/firewire/ohci.c 			ohci_start_iso(&ctx->base, 0, ctx->sync, ctx->tags);
ctx              3192 drivers/firewire/ohci.c static int queue_iso_transmit(struct iso_context *ctx,
ctx              3227 drivers/firewire/ohci.c 	d = context_get_descriptors(&ctx->context, z + header_z, &d_bus);
ctx              3247 drivers/firewire/ohci.c 					IT_HEADER_CHANNEL(ctx->base.channel) |
ctx              3248 drivers/firewire/ohci.c 					IT_HEADER_SPEED(ctx->base.speed));
ctx              3273 drivers/firewire/ohci.c 		dma_sync_single_range_for_device(ctx->context.ohci->card.device,
ctx              3291 drivers/firewire/ohci.c 	context_append(&ctx->context, d, z, header_z);
ctx              3296 drivers/firewire/ohci.c static int queue_iso_packet_per_buffer(struct iso_context *ctx,
ctx              3301 drivers/firewire/ohci.c 	struct device *device = ctx->context.ohci->card.device;
ctx              3312 drivers/firewire/ohci.c 	packet_count = packet->header_length / ctx->base.header_size;
ctx              3313 drivers/firewire/ohci.c 	header_size  = max(ctx->base.header_size, (size_t)8);
ctx              3324 drivers/firewire/ohci.c 		d = context_get_descriptors(&ctx->context,
ctx              3371 drivers/firewire/ohci.c 		context_append(&ctx->context, d, z, header_z);
ctx              3377 drivers/firewire/ohci.c static int queue_iso_buffer_fill(struct iso_context *ctx,
ctx              3397 drivers/firewire/ohci.c 		d = context_get_descriptors(&ctx->context, 1, &d_bus);
ctx              3419 drivers/firewire/ohci.c 		dma_sync_single_range_for_device(ctx->context.ohci->card.device,
ctx              3427 drivers/firewire/ohci.c 		context_append(&ctx->context, d, 1, 0);
ctx              3438 drivers/firewire/ohci.c 	struct iso_context *ctx = container_of(base, struct iso_context, base);
ctx              3442 drivers/firewire/ohci.c 	spin_lock_irqsave(&ctx->context.ohci->lock, flags);
ctx              3445 drivers/firewire/ohci.c 		ret = queue_iso_transmit(ctx, packet, buffer, payload);
ctx              3448 drivers/firewire/ohci.c 		ret = queue_iso_packet_per_buffer(ctx, packet, buffer, payload);
ctx              3451 drivers/firewire/ohci.c 		ret = queue_iso_buffer_fill(ctx, packet, buffer, payload);
ctx              3454 drivers/firewire/ohci.c 	spin_unlock_irqrestore(&ctx->context.ohci->lock, flags);
ctx              3461 drivers/firewire/ohci.c 	struct context *ctx =
ctx              3464 drivers/firewire/ohci.c 	reg_write(ctx->ohci, CONTROL_SET(ctx->regs), CONTEXT_WAKE);
ctx              3469 drivers/firewire/ohci.c 	struct iso_context *ctx = container_of(base, struct iso_context, base);
ctx              3472 drivers/firewire/ohci.c 	tasklet_disable(&ctx->context.tasklet);
ctx              3474 drivers/firewire/ohci.c 	if (!test_and_set_bit_lock(0, &ctx->flushing_completions)) {
ctx              3475 drivers/firewire/ohci.c 		context_tasklet((unsigned long)&ctx->context);
ctx              3480 drivers/firewire/ohci.c 			if (ctx->header_length != 0)
ctx              3481 drivers/firewire/ohci.c 				flush_iso_completions(ctx);
ctx              3484 drivers/firewire/ohci.c 			if (ctx->mc_completed != 0)
ctx              3485 drivers/firewire/ohci.c 				flush_ir_buffer_fill(ctx);
ctx              3491 drivers/firewire/ohci.c 		clear_bit_unlock(0, &ctx->flushing_completions);
ctx              3495 drivers/firewire/ohci.c 	tasklet_enable(&ctx->context.tasklet);
ctx                45 drivers/firmware/qcom_scm.c 	__le64 ctx;
ctx               486 drivers/firmware/qcom_scm.c 		destvm->ctx = 0;
ctx                87 drivers/gpio/gpio-dwapb.c 	struct dwapb_context	*ctx;
ctx               330 drivers/gpio/gpio-dwapb.c 	struct dwapb_context *ctx = gpio->ports[0].ctx;
ctx               333 drivers/gpio/gpio-dwapb.c 		ctx->wake_en |= BIT(d->hwirq);
ctx               335 drivers/gpio/gpio-dwapb.c 		ctx->wake_en &= ~BIT(d->hwirq);
ctx               503 drivers/gpio/gpio-dwapb.c 	port->ctx = devm_kzalloc(gpio->dev, sizeof(*port->ctx), GFP_KERNEL);
ctx               504 drivers/gpio/gpio-dwapb.c 	if (!port->ctx)
ctx               756 drivers/gpio/gpio-dwapb.c 		struct dwapb_context *ctx = gpio->ports[i].ctx;
ctx               758 drivers/gpio/gpio-dwapb.c 		BUG_ON(!ctx);
ctx               761 drivers/gpio/gpio-dwapb.c 		ctx->dir = dwapb_read(gpio, offset);
ctx               764 drivers/gpio/gpio-dwapb.c 		ctx->data = dwapb_read(gpio, offset);
ctx               767 drivers/gpio/gpio-dwapb.c 		ctx->ext = dwapb_read(gpio, offset);
ctx               771 drivers/gpio/gpio-dwapb.c 			ctx->int_mask	= dwapb_read(gpio, GPIO_INTMASK);
ctx               772 drivers/gpio/gpio-dwapb.c 			ctx->int_en	= dwapb_read(gpio, GPIO_INTEN);
ctx               773 drivers/gpio/gpio-dwapb.c 			ctx->int_pol	= dwapb_read(gpio, GPIO_INT_POLARITY);
ctx               774 drivers/gpio/gpio-dwapb.c 			ctx->int_type	= dwapb_read(gpio, GPIO_INTTYPE_LEVEL);
ctx               775 drivers/gpio/gpio-dwapb.c 			ctx->int_deb	= dwapb_read(gpio, GPIO_PORTA_DEBOUNCE);
ctx               779 drivers/gpio/gpio-dwapb.c 				    0xffffffff & ~ctx->wake_en);
ctx               803 drivers/gpio/gpio-dwapb.c 		struct dwapb_context *ctx = gpio->ports[i].ctx;
ctx               805 drivers/gpio/gpio-dwapb.c 		BUG_ON(!ctx);
ctx               808 drivers/gpio/gpio-dwapb.c 		dwapb_write(gpio, offset, ctx->data);
ctx               811 drivers/gpio/gpio-dwapb.c 		dwapb_write(gpio, offset, ctx->dir);
ctx               814 drivers/gpio/gpio-dwapb.c 		dwapb_write(gpio, offset, ctx->ext);
ctx               818 drivers/gpio/gpio-dwapb.c 			dwapb_write(gpio, GPIO_INTTYPE_LEVEL, ctx->int_type);
ctx               819 drivers/gpio/gpio-dwapb.c 			dwapb_write(gpio, GPIO_INT_POLARITY, ctx->int_pol);
ctx               820 drivers/gpio/gpio-dwapb.c 			dwapb_write(gpio, GPIO_PORTA_DEBOUNCE, ctx->int_deb);
ctx               821 drivers/gpio/gpio-dwapb.c 			dwapb_write(gpio, GPIO_INTEN, ctx->int_en);
ctx               822 drivers/gpio/gpio-dwapb.c 			dwapb_write(gpio, GPIO_INTMASK, ctx->int_mask);
ctx               459 drivers/gpu/drm/amd/amdgpu/amdgpu.h 	struct amdgpu_ctx	*ctx;
ctx               277 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               286 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               489 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               519 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               563 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			      struct bo_vm_reservation_context *ctx)
ctx               570 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->reserved = false;
ctx               571 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->n_vms = 1;
ctx               572 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->sync = &mem->sync;
ctx               574 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	INIT_LIST_HEAD(&ctx->list);
ctx               575 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	INIT_LIST_HEAD(&ctx->duplicates);
ctx               577 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->vm_pd = kcalloc(ctx->n_vms, sizeof(*ctx->vm_pd), GFP_KERNEL);
ctx               578 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (!ctx->vm_pd)
ctx               581 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->kfd_bo.priority = 0;
ctx               582 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->kfd_bo.tv.bo = &bo->tbo;
ctx               583 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->kfd_bo.tv.num_shared = 1;
ctx               584 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_add(&ctx->kfd_bo.tv.head, &ctx->list);
ctx               586 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	amdgpu_vm_get_pd_bo(vm, &ctx->list, &ctx->vm_pd[0]);
ctx               588 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = ttm_eu_reserve_buffers(&ctx->ticket, &ctx->list,
ctx               589 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 				     false, &ctx->duplicates, true);
ctx               591 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ctx->reserved = true;
ctx               594 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		kfree(ctx->vm_pd);
ctx               595 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ctx->vm_pd = NULL;
ctx               613 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 				struct bo_vm_reservation_context *ctx)
ctx               620 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->reserved = false;
ctx               621 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->n_vms = 0;
ctx               622 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->vm_pd = NULL;
ctx               623 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->sync = &mem->sync;
ctx               625 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	INIT_LIST_HEAD(&ctx->list);
ctx               626 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	INIT_LIST_HEAD(&ctx->duplicates);
ctx               634 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ctx->n_vms++;
ctx               637 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (ctx->n_vms != 0) {
ctx               638 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ctx->vm_pd = kcalloc(ctx->n_vms, sizeof(*ctx->vm_pd),
ctx               640 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		if (!ctx->vm_pd)
ctx               644 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->kfd_bo.priority = 0;
ctx               645 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->kfd_bo.tv.bo = &bo->tbo;
ctx               646 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->kfd_bo.tv.num_shared = 1;
ctx               647 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	list_add(&ctx->kfd_bo.tv.head, &ctx->list);
ctx               656 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_vm_get_pd_bo(entry->bo_va->base.vm, &ctx->list,
ctx               657 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 				&ctx->vm_pd[i]);
ctx               661 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = ttm_eu_reserve_buffers(&ctx->ticket, &ctx->list,
ctx               662 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 				     false, &ctx->duplicates, true);
ctx               664 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ctx->reserved = true;
ctx               669 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		kfree(ctx->vm_pd);
ctx               670 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ctx->vm_pd = NULL;
ctx               686 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c static int unreserve_bo_and_vms(struct bo_vm_reservation_context *ctx,
ctx               692 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ret = amdgpu_sync_wait(ctx->sync, intr);
ctx               694 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (ctx->reserved)
ctx               695 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ttm_eu_backoff_reservation(&ctx->ticket, &ctx->list);
ctx               696 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	kfree(ctx->vm_pd);
ctx               698 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->sync = NULL;
ctx               700 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->reserved = false;
ctx               701 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ctx->vm_pd = NULL;
ctx              1232 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct bo_vm_reservation_context ctx;
ctx              1259 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = reserve_bo_and_cond_vms(mem, NULL, BO_VM_ALL, &ctx);
ctx              1277 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = unreserve_bo_and_vms(&ctx, false, false);
ctx              1307 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct bo_vm_reservation_context ctx;
ctx              1345 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = reserve_bo_and_vm(mem, vm, &ctx);
ctx              1394 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			ret = map_bo_to_gpuvm(adev, entry, ctx.sync,
ctx              1401 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			ret = vm_update_pds(vm, ctx.sync);
ctx              1418 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = unreserve_bo_and_vms(&ctx, false, false);
ctx              1429 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	unreserve_bo_and_vms(&ctx, false, false);
ctx              1444 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct bo_vm_reservation_context ctx;
ctx              1449 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = reserve_bo_and_cond_vms(mem, vm, BO_VM_MAPPED, &ctx);
ctx              1453 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (ctx.n_vms == 0) {
ctx              1474 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			ret = unmap_bo_from_gpuvm(adev, entry, ctx.sync);
ctx              1498 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	unreserve_bo_and_vms(&ctx, false, false);
ctx              1689 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct ttm_operation_ctx ctx = { false, false };
ctx              1706 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx              1771 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct ttm_operation_ctx ctx = { false, false };
ctx              1824 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 			ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx              1966 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct bo_vm_reservation_context ctx;
ctx              1973 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	INIT_LIST_HEAD(&ctx.list);
ctx              1974 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	INIT_LIST_HEAD(&ctx.duplicates);
ctx              1986 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		amdgpu_vm_get_pd_bo(peer_vm, &ctx.list, &pd_bo_list[i++]);
ctx              1994 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		list_add_tail(&mem->resv_list.head, &ctx.list);
ctx              1999 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = ttm_eu_reserve_buffers(&ctx.ticket, &ctx.list,
ctx              2091 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ttm_eu_backoff_reservation(&ctx.ticket, &ctx.list);
ctx                92 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	struct atom_context *ctx = adev->mode_info.atom_context;
ctx               103 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	if (amdgpu_atom_parse_data_header(ctx, index, &size, NULL, NULL, &data_offset)) {
ctx               104 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 		i2c_info = (struct _ATOM_GPIO_I2C_INFO *)(ctx->bios + data_offset);
ctx               128 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	struct atom_context *ctx = adev->mode_info.atom_context;
ctx               137 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	if (amdgpu_atom_parse_data_header(ctx, index, &size, NULL, NULL, &data_offset)) {
ctx               138 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 		i2c_info = (struct _ATOM_GPIO_I2C_INFO *)(ctx->bios + data_offset);
ctx               163 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	struct atom_context *ctx = adev->mode_info.atom_context;
ctx               174 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	if (amdgpu_atom_parse_data_header(ctx, index, &size, NULL, NULL, &data_offset)) {
ctx               175 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 		gpio_info = (struct _ATOM_GPIO_PIN_LUT *)(ctx->bios + data_offset);
ctx               267 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	struct atom_context *ctx = mode_info->atom_context;
ctx               274 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	if (!amdgpu_atom_parse_data_header(ctx, index, &size, &frev, &crev, &data_offset))
ctx               280 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	obj_header = (ATOM_OBJECT_HEADER *) (ctx->bios + data_offset);
ctx               282 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	    (ctx->bios + data_offset +
ctx               294 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	struct atom_context *ctx = mode_info->atom_context;
ctx               311 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	if (!amdgpu_atom_parse_data_header(ctx, index, &size, &frev, &crev, &data_offset))
ctx               317 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	obj_header = (ATOM_OBJECT_HEADER *) (ctx->bios + data_offset);
ctx               319 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	    (ctx->bios + data_offset +
ctx               322 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	    (ctx->bios + data_offset +
ctx               325 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	    (ctx->bios + data_offset +
ctx               328 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 		(ctx->bios + data_offset +
ctx               377 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 								(ctx->bios + data_offset +
ctx               405 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 								(ctx->bios + data_offset +
ctx               413 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 								(ctx->bios + data_offset +
ctx               483 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 						    (ctx->bios + data_offset +
ctx              1767 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	struct atom_context *ctx = adev->mode_info.atom_context;
ctx              1775 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	if (amdgpu_atom_parse_data_header(ctx, index, NULL, NULL, NULL, &data_offset)) {
ctx              1776 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 		firmware_usage = (struct _ATOM_VRAM_USAGE_BY_FIRMWARE *)(ctx->bios + data_offset);
ctx              1798 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	ctx->scratch_size_bytes = 0;
ctx              1802 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	ctx->scratch = kzalloc(usage_bytes, GFP_KERNEL);
ctx              1803 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	if (!ctx->scratch)
ctx              1805 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	ctx->scratch_size_bytes = usage_bytes;
ctx              1948 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	struct atom_context *ctx = adev->mode_info.atom_context;
ctx              1950 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	return snprintf(buf, PAGE_SIZE, "%s\n", ctx->vbios_version);
ctx                69 drivers/gpu/drm/amd/amdgpu/amdgpu_atomfirmware.c 	struct atom_context *ctx = adev->mode_info.atom_context;
ctx                77 drivers/gpu/drm/amd/amdgpu/amdgpu_atomfirmware.c 	if (amdgpu_atom_parse_data_header(ctx, index, NULL, NULL, NULL, &data_offset)) {
ctx                78 drivers/gpu/drm/amd/amdgpu/amdgpu_atomfirmware.c 		firmware_usage = (struct vram_usagebyfirmware_v2_1 *)(ctx->bios + data_offset);
ctx               100 drivers/gpu/drm/amd/amdgpu/amdgpu_atomfirmware.c 	ctx->scratch_size_bytes = 0;
ctx               104 drivers/gpu/drm/amd/amdgpu/amdgpu_atomfirmware.c 	ctx->scratch = kzalloc(usage_bytes, GFP_KERNEL);
ctx               105 drivers/gpu/drm/amd/amdgpu/amdgpu_atomfirmware.c 	if (!ctx->scratch)
ctx               107 drivers/gpu/drm/amd/amdgpu/amdgpu_atomfirmware.c 	ctx->scratch_size_bytes = usage_bytes;
ctx               123 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	p->ctx = amdgpu_ctx_get(fpriv, cs->in.ctx_id);
ctx               124 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (!p->ctx) {
ctx               129 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	mutex_lock(&p->ctx->lock);
ctx               132 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (atomic_read(&p->ctx->guilty) == 1) {
ctx               233 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (p->ctx->vram_lost_counter != p->job->vram_lost_counter) {
ctx               402 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	struct ttm_operation_ctx ctx = {
ctx               437 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               439 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	p->bytes_moved += ctx.bytes_moved;
ctx               442 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		p->bytes_moved_vis += ctx.bytes_moved;
ctx               457 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               496 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               497 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		p->bytes_moved += ctx.bytes_moved;
ctx               499 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			p->bytes_moved_vis += ctx.bytes_moved;
ctx               533 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               549 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               765 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (parser->ctx) {
ctx               766 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		mutex_unlock(&parser->ctx->lock);
ctx               767 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		amdgpu_ctx_put(parser->ctx);
ctx               976 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		r = amdgpu_ctx_get_entity(parser->ctx, chunk_ib->ip_type,
ctx              1011 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	return amdgpu_ctx_wait_prev_fence(parser->ctx, parser->entity);
ctx              1027 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		struct amdgpu_ctx *ctx;
ctx              1031 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		ctx = amdgpu_ctx_get(fpriv, deps[i].ctx_id);
ctx              1032 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		if (ctx == NULL)
ctx              1035 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		r = amdgpu_ctx_get_entity(ctx, deps[i].ip_type,
ctx              1039 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			amdgpu_ctx_put(ctx);
ctx              1043 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		fence = amdgpu_ctx_get_fence(ctx, entity, deps[i].handle);
ctx              1044 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		amdgpu_ctx_put(ctx);
ctx              1312 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	amdgpu_ctx_add_fence(p->ctx, entity, p->fence, &seq);
ctx              1316 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	    !p->ctx->preamble_presented) {
ctx              1318 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		p->ctx->preamble_presented = true;
ctx              1421 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	struct amdgpu_ctx *ctx;
ctx              1425 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	ctx = amdgpu_ctx_get(filp->driver_priv, wait->in.ctx_id);
ctx              1426 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (ctx == NULL)
ctx              1429 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	r = amdgpu_ctx_get_entity(ctx, wait->in.ip_type, wait->in.ip_instance,
ctx              1432 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		amdgpu_ctx_put(ctx);
ctx              1436 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	fence = amdgpu_ctx_get_fence(ctx, entity, wait->in.handle);
ctx              1447 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	amdgpu_ctx_put(ctx);
ctx              1469 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	struct amdgpu_ctx *ctx;
ctx              1473 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	ctx = amdgpu_ctx_get(filp->driver_priv, user->ctx_id);
ctx              1474 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (ctx == NULL)
ctx              1477 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	r = amdgpu_ctx_get_entity(ctx, user->ip_type, user->ip_instance,
ctx              1480 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		amdgpu_ctx_put(ctx);
ctx              1484 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	fence = amdgpu_ctx_get_fence(ctx, entity, user->seq_no);
ctx              1485 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	amdgpu_ctx_put(ctx);
ctx              1716 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	struct ttm_operation_ctx ctx = { false, false };
ctx              1737 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		r = ttm_bo_validate(&(*bo)->tbo, &(*bo)->placement, &ctx);
ctx                74 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 			   struct amdgpu_ctx *ctx)
ctx                87 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	memset(ctx, 0, sizeof(*ctx));
ctx                88 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx->adev = adev;
ctx                90 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx->fences = kcalloc(amdgpu_sched_jobs * num_entities,
ctx                92 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (!ctx->fences)
ctx                95 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx->entities[0] = kcalloc(num_entities,
ctx                98 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (!ctx->entities[0]) {
ctx               104 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		struct amdgpu_ctx_entity *entity = &ctx->entities[0][i];
ctx               107 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		entity->fences = &ctx->fences[amdgpu_sched_jobs * i];
ctx               110 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		ctx->entities[i] = ctx->entities[i - 1] +
ctx               113 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	kref_init(&ctx->refcount);
ctx               114 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	spin_lock_init(&ctx->ring_lock);
ctx               115 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	mutex_init(&ctx->lock);
ctx               117 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx->reset_counter = atomic_read(&adev->gpu_reset_counter);
ctx               118 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx->reset_counter_query = ctx->reset_counter;
ctx               119 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx->vram_lost_counter = atomic_read(&adev->vram_lost_counter);
ctx               120 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx->init_priority = priority;
ctx               121 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx->override_priority = DRM_SCHED_PRIORITY_UNSET;
ctx               188 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 			r = drm_sched_entity_init(&ctx->entities[i][j].entity,
ctx               189 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 						  rqs, num_rqs, &ctx->guilty);
ctx               198 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		drm_sched_entity_destroy(&ctx->entities[0][i].entity);
ctx               199 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	kfree(ctx->entities[0]);
ctx               202 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	kfree(ctx->fences);
ctx               203 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx->fences = NULL;
ctx               209 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	struct amdgpu_ctx *ctx = container_of(ref, struct amdgpu_ctx, refcount);
ctx               211 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	struct amdgpu_device *adev = ctx->adev;
ctx               219 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 			dma_fence_put(ctx->entities[0][i].fences[j]);
ctx               220 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	kfree(ctx->fences);
ctx               221 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	kfree(ctx->entities[0]);
ctx               223 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	mutex_destroy(&ctx->lock);
ctx               225 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	kfree(ctx);
ctx               228 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c int amdgpu_ctx_get_entity(struct amdgpu_ctx *ctx, u32 hw_ip, u32 instance,
ctx               247 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	*entity = &ctx->entities[hw_ip][ring].entity;
ctx               258 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	struct amdgpu_ctx *ctx;
ctx               261 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx = kmalloc(sizeof(*ctx), GFP_KERNEL);
ctx               262 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (!ctx)
ctx               266 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	r = idr_alloc(&mgr->ctx_handles, ctx, 1, AMDGPU_VM_MAX_NUM_CTX, GFP_KERNEL);
ctx               269 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		kfree(ctx);
ctx               274 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	r = amdgpu_ctx_init(adev, priority, filp, ctx);
ctx               278 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		kfree(ctx);
ctx               286 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	struct amdgpu_ctx *ctx;
ctx               290 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx = container_of(ref, struct amdgpu_ctx, refcount);
ctx               294 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		drm_sched_entity_destroy(&ctx->entities[0][i].entity);
ctx               302 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	struct amdgpu_ctx *ctx;
ctx               305 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx = idr_remove(&mgr->ctx_handles, id);
ctx               306 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (ctx)
ctx               307 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		kref_put(&ctx->refcount, amdgpu_ctx_do_release);
ctx               309 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	return ctx ? 0 : -EINVAL;
ctx               316 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	struct amdgpu_ctx *ctx;
ctx               325 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx = idr_find(&mgr->ctx_handles, id);
ctx               326 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (!ctx) {
ctx               338 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (ctx->reset_counter_query == reset_counter)
ctx               342 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx->reset_counter_query = reset_counter;
ctx               352 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	struct amdgpu_ctx *ctx;
ctx               361 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx = idr_find(&mgr->ctx_handles, id);
ctx               362 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (!ctx) {
ctx               370 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (ctx->reset_counter != atomic_read(&adev->gpu_reset_counter))
ctx               373 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (ctx->vram_lost_counter != atomic_read(&adev->vram_lost_counter))
ctx               376 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (atomic_read(&ctx->guilty))
ctx               382 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (ras_counter != ctx->ras_counter_ue) {
ctx               384 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		ctx->ras_counter_ue = ras_counter;
ctx               389 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (ras_counter != ctx->ras_counter_ce) {
ctx               391 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		ctx->ras_counter_ce = ras_counter;
ctx               441 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	struct amdgpu_ctx *ctx;
ctx               450 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx = idr_find(&mgr->ctx_handles, id);
ctx               451 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (ctx)
ctx               452 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		kref_get(&ctx->refcount);
ctx               454 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	return ctx;
ctx               457 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c int amdgpu_ctx_put(struct amdgpu_ctx *ctx)
ctx               459 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	if (ctx == NULL)
ctx               462 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	kref_put(&ctx->refcount, amdgpu_ctx_do_release);
ctx               466 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c void amdgpu_ctx_add_fence(struct amdgpu_ctx *ctx,
ctx               482 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	spin_lock(&ctx->ring_lock);
ctx               485 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	spin_unlock(&ctx->ring_lock);
ctx               492 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c struct dma_fence *amdgpu_ctx_get_fence(struct amdgpu_ctx *ctx,
ctx               499 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	spin_lock(&ctx->ring_lock);
ctx               505 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		spin_unlock(&ctx->ring_lock);
ctx               511 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		spin_unlock(&ctx->ring_lock);
ctx               516 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	spin_unlock(&ctx->ring_lock);
ctx               521 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c void amdgpu_ctx_priority_override(struct amdgpu_ctx *ctx,
ctx               528 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx->override_priority = priority;
ctx               530 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	ctx_prio = (ctx->override_priority == DRM_SCHED_PRIORITY_UNSET) ?
ctx               531 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 			ctx->init_priority : ctx->override_priority;
ctx               534 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		struct drm_sched_entity *entity = &ctx->entities[0][i].entity;
ctx               540 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c int amdgpu_ctx_wait_prev_fence(struct amdgpu_ctx *ctx,
ctx               548 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	spin_lock(&ctx->ring_lock);
ctx               551 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	spin_unlock(&ctx->ring_lock);
ctx               573 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	struct amdgpu_ctx *ctx;
ctx               580 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	idr_for_each_entry(idp, ctx, id) {
ctx               584 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 			entity = &ctx->entities[0][i].entity;
ctx               595 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	struct amdgpu_ctx *ctx;
ctx               601 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	idr_for_each_entry(idp, ctx, id) {
ctx               602 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		if (kref_read(&ctx->refcount) != 1) {
ctx               603 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 			DRM_ERROR("ctx %p is still alive\n", ctx);
ctx               608 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 			mutex_lock(&ctx->adev->lock_reset);
ctx               609 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 			drm_sched_entity_fini(&ctx->entities[0][i].entity);
ctx               610 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 			mutex_unlock(&ctx->adev->lock_reset);
ctx               617 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	struct amdgpu_ctx *ctx;
ctx               625 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 	idr_for_each_entry(idp, ctx, id) {
ctx               626 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 		if (kref_put(&ctx->refcount, amdgpu_ctx_fini) != 1)
ctx               627 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c 			DRM_ERROR("ctx %p is still alive\n", ctx);
ctx                66 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.h int amdgpu_ctx_put(struct amdgpu_ctx *ctx);
ctx                68 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.h int amdgpu_ctx_get_entity(struct amdgpu_ctx *ctx, u32 hw_ip, u32 instance,
ctx                70 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.h void amdgpu_ctx_add_fence(struct amdgpu_ctx *ctx,
ctx                73 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.h struct dma_fence *amdgpu_ctx_get_fence(struct amdgpu_ctx *ctx,
ctx                76 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.h void amdgpu_ctx_priority_override(struct amdgpu_ctx *ctx,
ctx                82 drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.h int amdgpu_ctx_wait_prev_fence(struct amdgpu_ctx *ctx,
ctx               152 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c 				struct drm_modeset_acquire_ctx *ctx)
ctx               270 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c 				   struct drm_modeset_acquire_ctx *ctx)
ctx               287 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c 	ret = drm_crtc_helper_set_config(set, ctx);
ctx               287 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               303 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 		ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               286 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               341 drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c 		r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx              1231 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 	struct atom_context *ctx = adev->mode_info.atom_context;
ctx              1397 drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c 	seq_printf(m, "VBIOS version: %s\n", ctx->vbios_version);
ctx               628 drivers/gpu/drm/amd/amdgpu/amdgpu_mode.h 				   struct drm_modeset_acquire_ctx *ctx);
ctx               633 drivers/gpu/drm/amd/amdgpu/amdgpu_mode.h 				struct drm_modeset_acquire_ctx *ctx);
ctx               364 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               387 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 			     &(*bo_ptr)->tbo.mem, &ctx);
ctx               513 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct ttm_operation_ctx ctx = {
ctx               576 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 				 &bo->placement, page_align, &ctx, acc_size,
ctx               584 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		amdgpu_cs_report_moved_bytes(adev, ctx.bytes_moved,
ctx               585 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 					     ctx.bytes_moved);
ctx               587 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		amdgpu_cs_report_moved_bytes(adev, ctx.bytes_moved, 0);
ctx               707 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               718 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               886 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               944 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               995 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct ttm_operation_ctx ctx = { false, false };
ctx              1012 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx              1333 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	struct ttm_operation_ctx ctx = { false, false };
ctx              1367 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 	r = ttm_bo_validate(bo, &abo->placement, &ctx);
ctx                61 drivers/gpu/drm/amd/amdgpu/amdgpu_sched.c 	struct amdgpu_ctx *ctx;
ctx                74 drivers/gpu/drm/amd/amdgpu/amdgpu_sched.c 	idr_for_each_entry(&fpriv->ctx_mgr.ctx_handles, ctx, id)
ctx                75 drivers/gpu/drm/amd/amdgpu/amdgpu_sched.c 		amdgpu_ctx_priority_override(ctx, priority);
ctx                88 drivers/gpu/drm/amd/amdgpu/amdgpu_sched.c 	struct amdgpu_ctx *ctx;
ctx               100 drivers/gpu/drm/amd/amdgpu/amdgpu_sched.c 	ctx = amdgpu_ctx_get(fpriv, ctx_id);
ctx               102 drivers/gpu/drm/amd/amdgpu/amdgpu_sched.c 	if (!ctx) {
ctx               107 drivers/gpu/drm/amd/amdgpu/amdgpu_sched.c 	amdgpu_ctx_priority_override(ctx, priority);
ctx               108 drivers/gpu/drm/amd/amdgpu/amdgpu_sched.c 	amdgpu_ctx_put(ctx);
ctx               474 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			     __field(uint64_t, ctx)
ctx               482 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 			   __entry->ctx = fence->context;
ctx               487 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h 		      __entry->fence, __entry->ctx,
ctx               485 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 				struct ttm_operation_ctx *ctx,
ctx               507 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = ttm_bo_mem_space(bo, &placement, &tmp_mem, ctx);
ctx               520 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = ttm_tt_bind(bo->ttm, &tmp_mem, ctx);
ctx               526 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = amdgpu_move_blit(bo, evict, ctx->no_wait_gpu, &tmp_mem, old_mem);
ctx               532 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = ttm_bo_move_ttm(bo, ctx, new_mem);
ctx               544 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 				struct ttm_operation_ctx *ctx,
ctx               566 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = ttm_bo_mem_space(bo, &placement, &tmp_mem, ctx);
ctx               573 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = ttm_bo_move_ttm(bo, ctx, &tmp_mem);
ctx               579 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = amdgpu_move_blit(bo, evict, ctx->no_wait_gpu, new_mem, old_mem);
ctx               618 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			  struct ttm_operation_ctx *ctx,
ctx               663 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = amdgpu_move_vram_ram(bo, evict, ctx, new_mem);
ctx               666 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = amdgpu_move_ram_vram(bo, evict, ctx, new_mem);
ctx               668 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = amdgpu_move_blit(bo, evict, ctx->no_wait_gpu,
ctx               681 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = ttm_bo_move_memcpy(bo, ctx, new_mem);
ctx              1097 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct ttm_operation_ctx ctx = { false, false };
ctx              1125 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		r = ttm_bo_mem_space(bo, &placement, &tmp, &ctx);
ctx              1251 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			struct ttm_operation_ctx *ctx)
ctx              1278 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		return ttm_dma_populate(&gtt->ttm, adev->dev, ctx);
ctx              1284 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	return ttm_populate_and_map_pages(adev->dev, &gtt->ttm, ctx);
ctx               455 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c static u64 amdgpu_uvd_get_addr_from_ctx(struct amdgpu_uvd_cs_ctx *ctx)
ctx               460 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	lo = amdgpu_get_ib_value(ctx->parser, ctx->ib_idx, ctx->data0);
ctx               461 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	hi = amdgpu_get_ib_value(ctx->parser, ctx->ib_idx, ctx->data1);
ctx               475 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c static int amdgpu_uvd_cs_pass1(struct amdgpu_uvd_cs_ctx *ctx)
ctx               481 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	uint64_t addr = amdgpu_uvd_get_addr_from_ctx(ctx);
ctx               484 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	r = amdgpu_cs_find_mapping(ctx->parser, addr, &bo, &mapping);
ctx               490 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	if (!ctx->parser->adev->uvd.address_64_bit) {
ctx               492 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		cmd = amdgpu_get_ib_value(ctx->parser, ctx->ib_idx, ctx->idx) >> 1;
ctx               710 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c static int amdgpu_uvd_cs_msg(struct amdgpu_uvd_cs_ctx *ctx,
ctx               713 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	struct amdgpu_device *adev = ctx->parser->adev;
ctx               754 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 				adev->uvd.filp[i] = ctx->parser->filp;
ctx               764 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		r = amdgpu_uvd_cs_msg_decode(adev, msg, ctx->buf_sizes);
ctx               772 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 				if (adev->uvd.filp[i] != ctx->parser->filp) {
ctx               805 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c static int amdgpu_uvd_cs_pass2(struct amdgpu_uvd_cs_ctx *ctx)
ctx               811 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	uint64_t addr = amdgpu_uvd_get_addr_from_ctx(ctx);
ctx               814 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	r = amdgpu_cs_find_mapping(ctx->parser, addr, &bo, &mapping);
ctx               828 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	amdgpu_set_ib_value(ctx->parser, ctx->ib_idx, ctx->data0,
ctx               830 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	amdgpu_set_ib_value(ctx->parser, ctx->ib_idx, ctx->data1,
ctx               833 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	cmd = amdgpu_get_ib_value(ctx->parser, ctx->ib_idx, ctx->idx) >> 1;
ctx               835 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		if ((end - start) < ctx->buf_sizes[cmd]) {
ctx               838 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 				  ctx->buf_sizes[cmd]);
ctx               843 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		if ((end - start) < ctx->buf_sizes[4]) {
ctx               846 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 					  ctx->buf_sizes[4]);
ctx               854 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	if (!ctx->parser->adev->uvd.address_64_bit) {
ctx               862 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		    (start >> 28) != (ctx->parser->adev->uvd.inst->gpu_addr >> 28)) {
ctx               870 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		ctx->has_msg_cmd = true;
ctx               871 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		r = amdgpu_uvd_cs_msg(ctx, bo, addr);
ctx               874 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	} else if (!ctx->has_msg_cmd) {
ctx               890 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c static int amdgpu_uvd_cs_reg(struct amdgpu_uvd_cs_ctx *ctx,
ctx               891 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 			     int (*cb)(struct amdgpu_uvd_cs_ctx *ctx))
ctx               893 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	struct amdgpu_ib *ib = &ctx->parser->job->ibs[ctx->ib_idx];
ctx               896 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	ctx->idx++;
ctx               897 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	for (i = 0; i <= ctx->count; ++i) {
ctx               898 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		unsigned reg = ctx->reg + i;
ctx               900 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		if (ctx->idx >= ib->length_dw) {
ctx               907 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 			ctx->data0 = ctx->idx;
ctx               910 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 			ctx->data1 = ctx->idx;
ctx               913 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 			r = cb(ctx);
ctx               924 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		ctx->idx++;
ctx               937 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c static int amdgpu_uvd_cs_packets(struct amdgpu_uvd_cs_ctx *ctx,
ctx               938 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 				 int (*cb)(struct amdgpu_uvd_cs_ctx *ctx))
ctx               940 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	struct amdgpu_ib *ib = &ctx->parser->job->ibs[ctx->ib_idx];
ctx               943 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	for (ctx->idx = 0 ; ctx->idx < ib->length_dw; ) {
ctx               944 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		uint32_t cmd = amdgpu_get_ib_value(ctx->parser, ctx->ib_idx, ctx->idx);
ctx               948 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 			ctx->reg = CP_PACKET0_GET_REG(cmd);
ctx               949 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 			ctx->count = CP_PACKET_GET_COUNT(cmd);
ctx               950 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 			r = amdgpu_uvd_cs_reg(ctx, cb);
ctx               955 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 			++ctx->idx;
ctx               974 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	struct amdgpu_uvd_cs_ctx ctx = {};
ctx               994 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	ctx.parser = parser;
ctx               995 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	ctx.buf_sizes = buf_sizes;
ctx               996 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	ctx.ib_idx = ib_idx;
ctx              1001 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		r = amdgpu_uvd_cs_packets(&ctx, amdgpu_uvd_cs_pass1);
ctx              1007 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	r = amdgpu_uvd_cs_packets(&ctx, amdgpu_uvd_cs_pass2);
ctx              1011 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 	if (!ctx.has_msg_cmd) {
ctx              1037 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		struct ttm_operation_ctx ctx = { true, false };
ctx              1041 drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c 		r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               580 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               610 drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c 	return ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               706 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               745 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               751 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 				    &ctx);
ctx                58 drivers/gpu/drm/amd/amdgpu/atom.c 	struct atom_context *ctx;
ctx                68 drivers/gpu/drm/amd/amdgpu/atom.c static int amdgpu_atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t * params);
ctx                69 drivers/gpu/drm/amd/amdgpu/atom.c int amdgpu_atom_execute_table(struct atom_context *ctx, int index, uint32_t * params);
ctx               104 drivers/gpu/drm/amd/amdgpu/atom.c static uint32_t atom_iio_execute(struct atom_context *ctx, int base,
ctx               115 drivers/gpu/drm/amd/amdgpu/atom.c 			temp = ctx->card->ioreg_read(ctx->card, CU16(base + 1));
ctx               119 drivers/gpu/drm/amd/amdgpu/atom.c 			ctx->card->ioreg_write(ctx->card, CU16(base + 1), temp);
ctx               159 drivers/gpu/drm/amd/amdgpu/atom.c 			    ((ctx->
ctx               176 drivers/gpu/drm/amd/amdgpu/atom.c static uint32_t atom_get_src_int(atom_exec_context *ctx, uint8_t attr,
ctx               180 drivers/gpu/drm/amd/amdgpu/atom.c 	struct atom_context *gctx = ctx->ctx;
ctx               221 drivers/gpu/drm/amd/amdgpu/atom.c 		val = get_unaligned_le32((u32 *)&ctx->ps[idx]);
ctx               259 drivers/gpu/drm/amd/amdgpu/atom.c 			val = ctx->ws[idx];
ctx               361 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_skip_src_int(atom_exec_context *ctx, uint8_t attr, int *ptr)
ctx               397 drivers/gpu/drm/amd/amdgpu/atom.c static uint32_t atom_get_src(atom_exec_context *ctx, uint8_t attr, int *ptr)
ctx               399 drivers/gpu/drm/amd/amdgpu/atom.c 	return atom_get_src_int(ctx, attr, ptr, NULL, 1);
ctx               402 drivers/gpu/drm/amd/amdgpu/atom.c static uint32_t atom_get_src_direct(atom_exec_context *ctx, uint8_t align, int *ptr)
ctx               428 drivers/gpu/drm/amd/amdgpu/atom.c static uint32_t atom_get_dst(atom_exec_context *ctx, int arg, uint8_t attr,
ctx               431 drivers/gpu/drm/amd/amdgpu/atom.c 	return atom_get_src_int(ctx,
ctx               437 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_skip_dst(atom_exec_context *ctx, int arg, uint8_t attr, int *ptr)
ctx               439 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_skip_src_int(ctx,
ctx               444 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_put_dst(atom_exec_context *ctx, int arg, uint8_t attr,
ctx               450 drivers/gpu/drm/amd/amdgpu/atom.c 	struct atom_context *gctx = ctx->ctx;
ctx               494 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ps[idx] = cpu_to_le32(val);
ctx               526 drivers/gpu/drm/amd/amdgpu/atom.c 			ctx->ws[idx] = val;
ctx               580 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_add(atom_exec_context *ctx, int *ptr, int arg)
ctx               586 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               588 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               591 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               594 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_and(atom_exec_context *ctx, int *ptr, int arg)
ctx               600 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               602 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               605 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               608 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_beep(atom_exec_context *ctx, int *ptr, int arg)
ctx               613 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_calltable(atom_exec_context *ctx, int *ptr, int arg)
ctx               622 drivers/gpu/drm/amd/amdgpu/atom.c 	if (U16(ctx->ctx->cmd_table + 4 + 2 * idx))
ctx               623 drivers/gpu/drm/amd/amdgpu/atom.c 		r = amdgpu_atom_execute_table_locked(ctx->ctx, idx, ctx->ps + ctx->ps_shift);
ctx               625 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->abort = true;
ctx               629 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_clear(atom_exec_context *ctx, int *ptr, int arg)
ctx               636 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_get_dst(ctx, arg, attr, ptr, &saved, 0);
ctx               638 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, 0, saved);
ctx               641 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_compare(atom_exec_context *ctx, int *ptr, int arg)
ctx               646 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
ctx               648 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               649 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->ctx->cs_equal = (dst == src);
ctx               650 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->ctx->cs_above = (dst > src);
ctx               651 drivers/gpu/drm/amd/amdgpu/atom.c 	SDEBUG("   result: %s %s\n", ctx->ctx->cs_equal ? "EQ" : "NE",
ctx               652 drivers/gpu/drm/amd/amdgpu/atom.c 	       ctx->ctx->cs_above ? "GT" : "LE");
ctx               655 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_delay(atom_exec_context *ctx, int *ptr, int arg)
ctx               667 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_div(atom_exec_context *ctx, int *ptr, int arg)
ctx               672 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
ctx               674 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               676 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ctx->divmul[0] = dst / src;
ctx               677 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ctx->divmul[1] = dst % src;
ctx               679 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ctx->divmul[0] = 0;
ctx               680 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ctx->divmul[1] = 0;
ctx               684 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_div32(atom_exec_context *ctx, int *ptr, int arg)
ctx               690 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
ctx               692 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               695 drivers/gpu/drm/amd/amdgpu/atom.c 		val64 |= ((uint64_t)ctx->ctx->divmul[1]) << 32;
ctx               697 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ctx->divmul[0] = lower_32_bits(val64);
ctx               698 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ctx->divmul[1] = upper_32_bits(val64);
ctx               700 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ctx->divmul[0] = 0;
ctx               701 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ctx->divmul[1] = 0;
ctx               705 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_eot(atom_exec_context *ctx, int *ptr, int arg)
ctx               710 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_jump(atom_exec_context *ctx, int *ptr, int arg)
ctx               718 drivers/gpu/drm/amd/amdgpu/atom.c 		execute = ctx->ctx->cs_above;
ctx               721 drivers/gpu/drm/amd/amdgpu/atom.c 		execute = ctx->ctx->cs_above || ctx->ctx->cs_equal;
ctx               727 drivers/gpu/drm/amd/amdgpu/atom.c 		execute = !(ctx->ctx->cs_above || ctx->ctx->cs_equal);
ctx               730 drivers/gpu/drm/amd/amdgpu/atom.c 		execute = !ctx->ctx->cs_above;
ctx               733 drivers/gpu/drm/amd/amdgpu/atom.c 		execute = ctx->ctx->cs_equal;
ctx               736 drivers/gpu/drm/amd/amdgpu/atom.c 		execute = !ctx->ctx->cs_equal;
ctx               743 drivers/gpu/drm/amd/amdgpu/atom.c 		if (ctx->last_jump == (ctx->start + target)) {
ctx               745 drivers/gpu/drm/amd/amdgpu/atom.c 			if (time_after(cjiffies, ctx->last_jump_jiffies)) {
ctx               746 drivers/gpu/drm/amd/amdgpu/atom.c 				cjiffies -= ctx->last_jump_jiffies;
ctx               749 drivers/gpu/drm/amd/amdgpu/atom.c 					ctx->abort = true;
ctx               753 drivers/gpu/drm/amd/amdgpu/atom.c 				ctx->last_jump_jiffies = jiffies;
ctx               756 drivers/gpu/drm/amd/amdgpu/atom.c 			ctx->last_jump = ctx->start + target;
ctx               757 drivers/gpu/drm/amd/amdgpu/atom.c 			ctx->last_jump_jiffies = jiffies;
ctx               759 drivers/gpu/drm/amd/amdgpu/atom.c 		*ptr = ctx->start + target;
ctx               763 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_mask(atom_exec_context *ctx, int *ptr, int arg)
ctx               769 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               770 drivers/gpu/drm/amd/amdgpu/atom.c 	mask = atom_get_src_direct(ctx, ((attr >> 3) & 7), ptr);
ctx               773 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               777 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               780 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_move(atom_exec_context *ctx, int *ptr, int arg)
ctx               786 drivers/gpu/drm/amd/amdgpu/atom.c 		atom_get_dst(ctx, arg, attr, ptr, &saved, 0);
ctx               788 drivers/gpu/drm/amd/amdgpu/atom.c 		atom_skip_dst(ctx, arg, attr, ptr);
ctx               792 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               794 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, src, saved);
ctx               797 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_mul(atom_exec_context *ctx, int *ptr, int arg)
ctx               802 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
ctx               804 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               805 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->ctx->divmul[0] = dst * src;
ctx               808 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_mul32(atom_exec_context *ctx, int *ptr, int arg)
ctx               814 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
ctx               816 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               818 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->ctx->divmul[0] = lower_32_bits(val64);
ctx               819 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->ctx->divmul[1] = upper_32_bits(val64);
ctx               822 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_nop(atom_exec_context *ctx, int *ptr, int arg)
ctx               827 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_or(atom_exec_context *ctx, int *ptr, int arg)
ctx               833 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               835 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               838 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               841 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_postcard(atom_exec_context *ctx, int *ptr, int arg)
ctx               847 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_repeat(atom_exec_context *ctx, int *ptr, int arg)
ctx               852 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_restorereg(atom_exec_context *ctx, int *ptr, int arg)
ctx               857 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_savereg(atom_exec_context *ctx, int *ptr, int arg)
ctx               862 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_setdatablock(atom_exec_context *ctx, int *ptr, int arg)
ctx               868 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ctx->data_block = 0;
ctx               870 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ctx->data_block = ctx->start;
ctx               872 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ctx->data_block = U16(ctx->ctx->data_table + 4 + 2 * idx);
ctx               873 drivers/gpu/drm/amd/amdgpu/atom.c 	SDEBUG("   base: 0x%04X\n", ctx->ctx->data_block);
ctx               876 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_setfbbase(atom_exec_context *ctx, int *ptr, int arg)
ctx               880 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->ctx->fb_base = atom_get_src(ctx, attr, ptr);
ctx               883 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_setport(atom_exec_context *ctx, int *ptr, int arg)
ctx               894 drivers/gpu/drm/amd/amdgpu/atom.c 			ctx->ctx->io_mode = ATOM_IO_MM;
ctx               896 drivers/gpu/drm/amd/amdgpu/atom.c 			ctx->ctx->io_mode = ATOM_IO_IIO | port;
ctx               900 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ctx->io_mode = ATOM_IO_PCI;
ctx               904 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->ctx->io_mode = ATOM_IO_SYSIO;
ctx               910 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_setregblock(atom_exec_context *ctx, int *ptr, int arg)
ctx               912 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->ctx->reg_block = U16(*ptr);
ctx               914 drivers/gpu/drm/amd/amdgpu/atom.c 	SDEBUG("   base: 0x%04X\n", ctx->ctx->reg_block);
ctx               917 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_shift_left(atom_exec_context *ctx, int *ptr, int arg)
ctx               925 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               926 drivers/gpu/drm/amd/amdgpu/atom.c 	shift = atom_get_src_direct(ctx, ATOM_SRC_BYTE0, ptr);
ctx               930 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               933 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_shift_right(atom_exec_context *ctx, int *ptr, int arg)
ctx               941 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               942 drivers/gpu/drm/amd/amdgpu/atom.c 	shift = atom_get_src_direct(ctx, ATOM_SRC_BYTE0, ptr);
ctx               946 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               949 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_shl(atom_exec_context *ctx, int *ptr, int arg)
ctx               956 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               959 drivers/gpu/drm/amd/amdgpu/atom.c 	shift = atom_get_src(ctx, attr, ptr);
ctx               965 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               968 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_shr(atom_exec_context *ctx, int *ptr, int arg)
ctx               975 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               978 drivers/gpu/drm/amd/amdgpu/atom.c 	shift = atom_get_src(ctx, attr, ptr);
ctx               984 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               987 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_sub(atom_exec_context *ctx, int *ptr, int arg)
ctx               993 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               995 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               998 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx              1001 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_switch(atom_exec_context *ctx, int *ptr, int arg)
ctx              1006 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx              1012 drivers/gpu/drm/amd/amdgpu/atom.c 			    atom_get_src(ctx, (attr & 0x38) | ATOM_ARG_IMM,
ctx              1017 drivers/gpu/drm/amd/amdgpu/atom.c 				*ptr = ctx->start + target;
ctx              1028 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_test(atom_exec_context *ctx, int *ptr, int arg)
ctx              1033 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
ctx              1035 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx              1036 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->ctx->cs_equal = ((dst & src) == 0);
ctx              1037 drivers/gpu/drm/amd/amdgpu/atom.c 	SDEBUG("   result: %s\n", ctx->ctx->cs_equal ? "EQ" : "NE");
ctx              1040 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_xor(atom_exec_context *ctx, int *ptr, int arg)
ctx              1046 drivers/gpu/drm/amd/amdgpu/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx              1048 drivers/gpu/drm/amd/amdgpu/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx              1051 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx              1054 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_debug(atom_exec_context *ctx, int *ptr, int arg)
ctx              1060 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_op_processds(atom_exec_context *ctx, int *ptr, int arg)
ctx              1201 drivers/gpu/drm/amd/amdgpu/atom.c static int amdgpu_atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t * params)
ctx              1203 drivers/gpu/drm/amd/amdgpu/atom.c 	int base = CU16(ctx->cmd_table + 4 + 2 * index);
ctx              1219 drivers/gpu/drm/amd/amdgpu/atom.c 	ectx.ctx = ctx;
ctx              1262 drivers/gpu/drm/amd/amdgpu/atom.c int amdgpu_atom_execute_table(struct atom_context *ctx, int index, uint32_t * params)
ctx              1266 drivers/gpu/drm/amd/amdgpu/atom.c 	mutex_lock(&ctx->mutex);
ctx              1268 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->data_block = 0;
ctx              1270 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->reg_block = 0;
ctx              1272 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->fb_base = 0;
ctx              1274 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->io_mode = ATOM_IO_MM;
ctx              1276 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->divmul[0] = 0;
ctx              1277 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->divmul[1] = 0;
ctx              1278 drivers/gpu/drm/amd/amdgpu/atom.c 	r = amdgpu_atom_execute_table_locked(ctx, index, params);
ctx              1279 drivers/gpu/drm/amd/amdgpu/atom.c 	mutex_unlock(&ctx->mutex);
ctx              1285 drivers/gpu/drm/amd/amdgpu/atom.c static void atom_index_iio(struct atom_context *ctx, int base)
ctx              1287 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->iio = kzalloc(2 * 256, GFP_KERNEL);
ctx              1288 drivers/gpu/drm/amd/amdgpu/atom.c 	if (!ctx->iio)
ctx              1291 drivers/gpu/drm/amd/amdgpu/atom.c 		ctx->iio[CU8(base + 1)] = base + 2;
ctx              1302 drivers/gpu/drm/amd/amdgpu/atom.c 	struct atom_context *ctx =
ctx              1307 drivers/gpu/drm/amd/amdgpu/atom.c 	if (!ctx)
ctx              1310 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->card = card;
ctx              1311 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->bios = bios;
ctx              1315 drivers/gpu/drm/amd/amdgpu/atom.c 		kfree(ctx);
ctx              1322 drivers/gpu/drm/amd/amdgpu/atom.c 		kfree(ctx);
ctx              1331 drivers/gpu/drm/amd/amdgpu/atom.c 		kfree(ctx);
ctx              1335 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->cmd_table = CU16(base + ATOM_ROM_CMD_PTR);
ctx              1336 drivers/gpu/drm/amd/amdgpu/atom.c 	ctx->data_table = CU16(base + ATOM_ROM_DATA_PTR);
ctx              1337 drivers/gpu/drm/amd/amdgpu/atom.c 	atom_index_iio(ctx, CU16(ctx->data_table + ATOM_DATA_IIO_PTR) + 4);
ctx              1338 drivers/gpu/drm/amd/amdgpu/atom.c 	if (!ctx->iio) {
ctx              1339 drivers/gpu/drm/amd/amdgpu/atom.c 		amdgpu_atom_destroy(ctx);
ctx              1350 drivers/gpu/drm/amd/amdgpu/atom.c 		strlcpy(ctx->vbios_version, str, sizeof(ctx->vbios_version));
ctx              1354 drivers/gpu/drm/amd/amdgpu/atom.c 	return ctx;
ctx              1357 drivers/gpu/drm/amd/amdgpu/atom.c int amdgpu_atom_asic_init(struct atom_context *ctx)
ctx              1359 drivers/gpu/drm/amd/amdgpu/atom.c 	int hwi = CU16(ctx->data_table + ATOM_DATA_FWI_PTR);
ctx              1370 drivers/gpu/drm/amd/amdgpu/atom.c 	if (!CU16(ctx->cmd_table + 4 + 2 * ATOM_CMD_INIT))
ctx              1372 drivers/gpu/drm/amd/amdgpu/atom.c 	ret = amdgpu_atom_execute_table(ctx, ATOM_CMD_INIT, ps);
ctx              1381 drivers/gpu/drm/amd/amdgpu/atom.c void amdgpu_atom_destroy(struct atom_context *ctx)
ctx              1383 drivers/gpu/drm/amd/amdgpu/atom.c 	kfree(ctx->iio);
ctx              1384 drivers/gpu/drm/amd/amdgpu/atom.c 	kfree(ctx);
ctx              1387 drivers/gpu/drm/amd/amdgpu/atom.c bool amdgpu_atom_parse_data_header(struct atom_context *ctx, int index,
ctx              1392 drivers/gpu/drm/amd/amdgpu/atom.c 	int idx = CU16(ctx->data_table + offset);
ctx              1393 drivers/gpu/drm/amd/amdgpu/atom.c 	u16 *mdt = (u16 *)(ctx->bios + ctx->data_table + 4);
ctx              1408 drivers/gpu/drm/amd/amdgpu/atom.c bool amdgpu_atom_parse_cmd_header(struct atom_context *ctx, int index, uint8_t * frev,
ctx              1412 drivers/gpu/drm/amd/amdgpu/atom.c 	int idx = CU16(ctx->cmd_table + offset);
ctx              1413 drivers/gpu/drm/amd/amdgpu/atom.c 	u16 *mct = (u16 *)(ctx->bios + ctx->cmd_table + 4);
ctx               153 drivers/gpu/drm/amd/amdgpu/atom.h bool amdgpu_atom_parse_data_header(struct atom_context *ctx, int index, uint16_t *size,
ctx               155 drivers/gpu/drm/amd/amdgpu/atom.h bool amdgpu_atom_parse_cmd_header(struct atom_context *ctx, int index,
ctx              2457 drivers/gpu/drm/amd/amdgpu/dce_v10_0.c 				    struct drm_modeset_acquire_ctx *ctx)
ctx              2536 drivers/gpu/drm/amd/amdgpu/dce_v11_0.c 				    struct drm_modeset_acquire_ctx *ctx)
ctx              2349 drivers/gpu/drm/amd/amdgpu/dce_v6_0.c 				   struct drm_modeset_acquire_ctx *ctx)
ctx              2358 drivers/gpu/drm/amd/amdgpu/dce_v8_0.c 				   struct drm_modeset_acquire_ctx *ctx)
ctx               106 drivers/gpu/drm/amd/amdgpu/dce_virtual.c 				      struct drm_modeset_acquire_ctx *ctx)
ctx              1200 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	struct amdgpu_ras *ctx;
ctx              1351 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	ctx = amdgpu_ras_get_context((struct amdgpu_device *)(dev->gpu->kgd));
ctx              1352 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 	if (ctx) {
ctx              1355 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 			(((ctx->features & BIT(AMDGPU_RAS_BLOCK__SDMA)) != 0) ||
ctx              1356 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 			 ((ctx->features & BIT(AMDGPU_RAS_BLOCK__GFX)) != 0)) ?
ctx              1358 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		dev->node_props.capability |= ((ctx->features & BIT(AMDGPU_RAS_BLOCK__UMC)) != 0) ?
ctx              1361 drivers/gpu/drm/amd/amdkfd/kfd_topology.c 		dev->node_props.capability |= (ctx->features != 0) ?
ctx               505 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 			adev->dm.dc->ctx->fbc_gpu_addr = compressor->gpu_addr;
ctx              1087 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 	struct dc_context *dc_ctx = link->ctx;
ctx              1155 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 			link->ctx,
ctx              3646 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 			if (dc_dsc_compute_config(aconnector->dc_link->ctx->dc,
ctx              5139 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 			ddc_service->ctx->dc,
ctx              5163 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c 	struct amdgpu_device *adev = ddc_service->ctx->driver_context;
ctx               117 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_crc.c 		if (!dc_stream_configure_crc(stream_state->ctx->dc,
ctx               308 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_crc.c 		if (!dc_stream_get_crc(stream_state->ctx->dc, stream_state,
ctx               851 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_debugfs.c 	dm_helpers_dp_write_dpcd(link->ctx, link,
ctx               873 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_debugfs.c 	dm_helpers_dp_read_dpcd(link->ctx, link,
ctx                53 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		struct dc_context *ctx,
ctx               172 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 	struct dc_context *ctx,
ctx               180 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		struct dc_context *ctx,
ctx               267 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 	struct dc_context *ctx,
ctx               275 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 	struct dc_context *ctx,
ctx               284 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		struct dc_context *ctx,
ctx               310 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		struct dc_context *ctx,
ctx               339 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c void dm_dtn_log_begin(struct dc_context *ctx,
ctx               349 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 	dm_dtn_log_append_v(ctx, log_ctx, "%s", msg);
ctx               352 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c void dm_dtn_log_append_v(struct dc_context *ctx,
ctx               413 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c void dm_dtn_log_end(struct dc_context *ctx,
ctx               423 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 	dm_dtn_log_append_v(ctx, log_ctx, "%s", msg);
ctx               427 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		struct dc_context *ctx,
ctx               451 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		struct dc_context *ctx,
ctx               469 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		struct dc_context *ctx,
ctx               488 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		struct dc_context *ctx,
ctx               506 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		struct dc_context *ctx,
ctx               541 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		struct dc_context *ctx,
ctx               548 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 	return dm_helpers_dp_write_dpcd(ctx, stream->sink->link, DP_DSC_ENABLE, &enable_dsc, 1);
ctx               569 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		struct dc_context *ctx,
ctx               601 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 						ctx,
ctx               615 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		dm_helpers_dp_read_dpcd(ctx,
ctx               626 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		dm_helpers_dp_write_dpcd(ctx,
ctx               632 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c 		dm_helpers_dp_write_dpcd(ctx,
ctx               643 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_helpers.c void dm_set_dcn_clocks(struct dc_context *ctx, struct dc_clocks *clks)
ctx                39 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 		const struct dc_context *ctx,
ctx                42 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               332 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 		const struct dc_context *ctx,
ctx               336 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               418 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx,
ctx               422 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               448 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx,
ctx               452 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               477 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx,
ctx               485 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx,
ctx               493 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx,
ctx               496 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               520 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx,
ctx               523 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               546 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               547 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               601 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               602 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               614 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               615 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               627 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               628 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               640 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               641 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               653 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               654 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               667 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               668 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               727 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               728 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               743 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               744 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               759 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               760 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               776 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               777 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               798 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               799 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               821 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               822 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               834 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               835 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               869 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               870 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               888 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	const struct dc_context *ctx = pp->dm;
ctx               889 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	struct amdgpu_device *adev = ctx->driver_context;
ctx               906 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 		struct dc_context *ctx,
ctx               909 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 	switch (ctx->dce_version) {
ctx               912 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 		funcs->ctx.ver = PP_SMU_VER_RV;
ctx               913 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 		funcs->rv_funcs.pp_smu.dm = ctx;
ctx               927 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 		funcs->ctx.ver = PP_SMU_VER_NV;
ctx               928 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_pp_smu.c 		funcs->nv_funcs.pp_smu.dm = ctx;
ctx                39 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_services.c unsigned long long dm_get_elapse_time_in_ns(struct dc_context *ctx,
ctx                50 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_services.c bool dm_write_persistent_data(struct dc_context *ctx,
ctx                62 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_services.c bool dm_read_persistent_data(struct dc_context *ctx,
ctx                33 drivers/gpu/drm/amd/display/dc/basics/vector.c 	struct dc_context *ctx,
ctx                51 drivers/gpu/drm/amd/display/dc/basics/vector.c 	vector->ctx = ctx;
ctx                57 drivers/gpu/drm/amd/display/dc/basics/vector.c 	struct dc_context *ctx,
ctx                95 drivers/gpu/drm/amd/display/dc/basics/vector.c 	struct dc_context *ctx,
ctx               106 drivers/gpu/drm/amd/display/dc/basics/vector.c 		vector, ctx, size, initial_value, struct_size))
ctx               115 drivers/gpu/drm/amd/display/dc/basics/vector.c 	struct dc_context *ctx,
ctx               124 drivers/gpu/drm/amd/display/dc/basics/vector.c 	if (dal_vector_construct(vector, ctx, capacity, struct_size))
ctx               257 drivers/gpu/drm/amd/display/dc/basics/vector.c 			vector->ctx,
ctx               266 drivers/gpu/drm/amd/display/dc/basics/vector.c 		vec_cloned = dal_vector_presized_create(vector->ctx, count,
ctx                55 drivers/gpu/drm/amd/display/dc/bios/bios_parser.c 	bp->base.ctx->logger
ctx              2864 drivers/gpu/drm/amd/display/dc/bios/bios_parser.c 	bp->base.ctx = init->ctx;
ctx                69 drivers/gpu/drm/amd/display/dc/bios/bios_parser2.c 	bp->base.ctx->logger
ctx              1941 drivers/gpu/drm/amd/display/dc/bios/bios_parser2.c 	bp->base.ctx = init->ctx;
ctx                49 drivers/gpu/drm/amd/display/dc/bios/bios_parser_helper.c 	bios->ctx
ctx                38 drivers/gpu/drm/amd/display/dc/bios/command_table.c 	(amdgpu_atom_execute_table(((struct amdgpu_device *)bp->base.ctx->driver_context)->mode_info.atom_context, \
ctx                43 drivers/gpu/drm/amd/display/dc/bios/command_table.c 	amdgpu_atom_parse_cmd_header(((struct amdgpu_device *)bp->base.ctx->driver_context)->mode_info.atom_context, \
ctx                47 drivers/gpu/drm/amd/display/dc/bios/command_table.c 	bios_cmd_table_para_revision(bp->base.ctx->driver_context, \
ctx                42 drivers/gpu/drm/amd/display/dc/bios/command_table2.c 	bp->base.ctx->logger
ctx                50 drivers/gpu/drm/amd/display/dc/bios/command_table2.c 	(amdgpu_atom_execute_table(((struct amdgpu_device *)bp->base.ctx->driver_context)->mode_info.atom_context, \
ctx                55 drivers/gpu/drm/amd/display/dc/bios/command_table2.c 	amdgpu_atom_parse_cmd_header(((struct amdgpu_device *)bp->base.ctx->driver_context)->mode_info.atom_context, \
ctx                59 drivers/gpu/drm/amd/display/dc/bios/command_table2.c 	bios_cmd_table_para_revision(bp->base.ctx->driver_context, \
ctx                28 drivers/gpu/drm/amd/display/dc/calcs/calcs_logger.h #define DC_LOGGER ctx->logger
ctx                30 drivers/gpu/drm/amd/display/dc/calcs/calcs_logger.h static void print_bw_calcs_dceip(struct dc_context *ctx, const struct bw_calcs_dceip *dceip)
ctx               124 drivers/gpu/drm/amd/display/dc/calcs/calcs_logger.h static void print_bw_calcs_vbios(struct dc_context *ctx, const struct bw_calcs_vbios *vbios)
ctx               183 drivers/gpu/drm/amd/display/dc/calcs/calcs_logger.h static void print_bw_calcs_data(struct dc_context *ctx, struct bw_calcs_data *data)
ctx              3014 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c bool bw_calcs(struct dc_context *ctx,
ctx              3028 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 	if (ctx->dc->config.multi_mon_pp_mclk_switch)
ctx              3047 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 		if (ctx->dc->debug.bandwidth_calcs_trace) {
ctx              3048 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 			print_bw_calcs_dceip(ctx, dceip);
ctx              3049 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 			print_bw_calcs_vbios(ctx, vbios);
ctx              3050 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 			print_bw_calcs_data(ctx, data);
ctx              3098 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 		if (ctx->dc->caps.max_slave_planes) {
ctx              3128 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 		if (ctx->dc->caps.max_slave_planes) {
ctx              3156 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 		if (ctx->dc->caps.max_slave_planes) {
ctx              3184 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 		if (ctx->dc->caps.max_slave_planes) {
ctx              3219 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 			if (ctx->dc->caps.max_slave_planes) {
ctx              3249 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 			if (ctx->dc->caps.max_slave_planes) {
ctx              3277 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 		if (ctx->dc->caps.max_slave_planes) {
ctx              3305 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 			if (ctx->dc->caps.max_slave_planes) {
ctx              3339 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 			if (ctx->dc->caps.max_slave_planes) {
ctx              3368 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 			if (ctx->dc->caps.max_slave_planes) {
ctx              3396 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 		if (ctx->dc->caps.max_slave_planes) {
ctx              3424 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 			if (ctx->dc->caps.max_slave_planes) {
ctx              3472 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 		if (ctx->dc->caps.max_slave_planes) {
ctx              3500 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 		if (ctx->dc->caps.max_slave_planes) {
ctx              3528 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 		if (ctx->dc->caps.max_slave_planes) {
ctx              3556 drivers/gpu/drm/amd/display/dc/calcs/dce_calcs.c 		if (ctx->dc->caps.max_slave_planes) {
ctx                39 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 	dc->ctx->logger
ctx               313 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 	if (pipe->plane_res.dpp->ctx->dc->debug.optimized_watermark) {
ctx               328 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 		input->src.dcc = pipe->plane_res.dpp->ctx->dc->res_pool->hubbub->funcs->
ctx              1283 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 	if (bw_limit_pass && v->voltage_level <= get_highest_allowed_voltage_level(dc->ctx->asic_id.hw_internal_rev))
ctx              1438 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 	struct dc_context *ctx = dc->ctx;
ctx              1445 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 			ctx, DM_PP_CLOCK_TYPE_FCLK, &fclks);
ctx              1481 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c 			ctx, DM_PP_CLOCK_TYPE_DCFCLK, &dcfclks);
ctx                69 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg)
ctx                71 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c 	struct hw_asic_id asic_id = ctx->asic_id;
ctx                83 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c 		dce_clk_mgr_construct(ctx, clk_mgr);
ctx                86 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c 		dce110_clk_mgr_construct(ctx, clk_mgr);
ctx                91 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c 			dce_clk_mgr_construct(ctx, clk_mgr);
ctx                97 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c 			dce112_clk_mgr_construct(ctx, clk_mgr);
ctx               101 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c 			dce112_clk_mgr_construct(ctx, clk_mgr);
ctx               107 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c 			dce121_clk_mgr_construct(ctx, clk_mgr);
ctx               109 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c 			dce120_clk_mgr_construct(ctx, clk_mgr);
ctx               116 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c 			rn_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
ctx               121 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c 			rv2_clk_mgr_construct(ctx, clk_mgr, pp_smu);
ctx               126 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c 			rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
ctx               134 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c 		dcn20_clk_mgr_construct(ctx, clk_mgr, pp_smu, dccg);
ctx               235 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c 	struct dc_bios *bp = clk_mgr_base->ctx->dc_bios;
ctx               237 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c 	struct dmcu *dmcu = clk_mgr_dce->base.ctx->dc->res_pool->dmcu;
ctx               274 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c 	struct dc_debug_options *debug = &clk_mgr_dce->base.ctx->dc->debug;
ctx               275 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c 	struct dc_bios *bp = clk_mgr_dce->base.ctx->dc_bios;
ctx               327 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c 	struct dc_bios *bp = clk_mgr_dce->base.ctx->dc_bios;
ctx               392 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c 		dm_pp_apply_display_requirements(dc->ctx, pp_display_cfg);
ctx               411 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c 		if (dm_pp_apply_power_level_change_request(clk_mgr_base->ctx, &level_change_req))
ctx               419 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c 	dce_pplib_apply_display_requirements(clk_mgr_base->ctx->dc, context);
ctx               435 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c 		struct dc_context *ctx,
ctx               445 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c 	base->ctx = ctx;
ctx               457 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c 	if (dm_pp_get_static_clocks(ctx, &static_clk_info))
ctx                43 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.h 		struct dc_context *ctx,
ctx               198 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c 	if (ASICREV_IS_VEGA20_P(dc->ctx->asic_id.hw_internal_rev) && (context->stream_count >= 2)) {
ctx               245 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c 		dm_pp_apply_display_requirements(dc->ctx, pp_display_cfg);
ctx               264 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c 		if (dm_pp_apply_power_level_change_request(clk_mgr_base->ctx, &level_change_req))
ctx               272 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c 	dce11_pplib_apply_display_requirements(clk_mgr_base->ctx->dc, context);
ctx               281 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c 		struct dc_context *ctx,
ctx               284 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c 	dce_clk_mgr_construct(ctx, clk_mgr);
ctx                30 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.h 		struct dc_context *ctx,
ctx                74 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c 	struct dc_bios *bp = clk_mgr_base->ctx->dc_bios;
ctx                75 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c 	struct dc *core_dc = clk_mgr_base->ctx->dc;
ctx               104 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c 	if (!ASICREV_IS_VEGA20_P(clk_mgr_base->ctx->asic_id.hw_internal_rev))
ctx               113 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c 	if (!IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
ctx               128 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c 	struct dc_bios *bp = clk_mgr->base.ctx->dc_bios;
ctx               129 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c 	struct dc *core_dc = clk_mgr->base.ctx->dc;
ctx               155 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c 	if (!IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
ctx               171 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c 	struct dc_bios *bp = clk_mgr->base.ctx->dc_bios;
ctx               180 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c 	if (!ASICREV_IS_VEGA20_P(clk_mgr->base.ctx->asic_id.hw_internal_rev))
ctx               209 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c 		if (dm_pp_apply_power_level_change_request(clk_mgr_base->ctx, &level_change_req))
ctx               217 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c 	dce11_pplib_apply_display_requirements(clk_mgr_base->ctx->dc, context);
ctx               226 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c 		struct dc_context *ctx,
ctx               229 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c 	dce_clk_mgr_construct(ctx, clk_mgr);
ctx                31 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.h 		struct dc_context *ctx,
ctx                60 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c 	struct dc_bios *bp = clk_mgr_dce->base.ctx->dc_bios;
ctx               109 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c 		dm_pp_apply_clock_for_voltage_request(clk_mgr_base->ctx, &clock_voltage_req);
ctx               117 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c 		dm_pp_apply_clock_for_voltage_request(clk_mgr_base->ctx, &clock_voltage_req);
ctx               119 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c 	dce11_pplib_apply_display_requirements(clk_mgr_base->ctx->dc, context);
ctx               128 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c void dce120_clk_mgr_construct(struct dc_context *ctx, struct clk_mgr_internal *clk_mgr)
ctx               130 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c 	dce_clk_mgr_construct(ctx, clk_mgr);
ctx               140 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c void dce121_clk_mgr_construct(struct dc_context *ctx, struct clk_mgr_internal *clk_mgr)
ctx               142 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c 	dce120_clk_mgr_construct(ctx, clk_mgr);
ctx               149 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c 	if (dce121_xgmi_enabled(ctx->dc->hwseq))
ctx                29 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.h void dce120_clk_mgr_construct(struct dc_context *ctx, struct clk_mgr_internal *clk_mgr);
ctx                30 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.h void dce121_clk_mgr_construct(struct dc_context *ctx, struct clk_mgr_internal *clk_mgr);
ctx               130 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr.c 	struct dc *dc = clk_mgr_base->ctx->dc;
ctx               251 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr.c void rv1_clk_mgr_construct(struct dc_context *ctx, struct clk_mgr_internal *clk_mgr, struct pp_smu_funcs *pp_smu)
ctx               253 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr.c 	struct dc_debug_options *debug = &ctx->dc->debug;
ctx               254 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr.c 	struct dc_bios *bp = ctx->dc_bios;
ctx               256 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr.c 	clk_mgr->base.ctx = ctx;
ctx                29 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr.h void rv1_clk_mgr_construct(struct dc_context *ctx, struct clk_mgr_internal *clk_mgr, struct pp_smu_funcs *pp_smu);
ctx                91 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr_vbios_smu.c 	struct dc *core_dc = clk_mgr->base.ctx->dc;
ctx               103 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr_vbios_smu.c 	if (!IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
ctx                37 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv2_clk_mgr.c void rv2_clk_mgr_construct(struct dc_context *ctx, struct clk_mgr_internal *clk_mgr, struct pp_smu_funcs *pp_smu)
ctx                40 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv2_clk_mgr.c 	rv1_clk_mgr_construct(ctx, clk_mgr, pp_smu);
ctx                29 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv2_clk_mgr.h void rv2_clk_mgr_construct(struct dc_context *ctx, struct clk_mgr_internal *clk_mgr, struct pp_smu_funcs *pp_smu);
ctx               107 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c 	for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) {
ctx               146 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c 	struct dc *dc = clk_mgr_base->ctx->dc;
ctx               167 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c 	struct dc *dc = clk_mgr_base->ctx->dc;
ctx               192 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c 	struct dc *dc = clk_mgr_base->ctx->dc;
ctx               197 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c 	struct dmcu *dmcu = clk_mgr_base->ctx->dc->res_pool->dmcu;
ctx               281 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c 			for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) {
ctx               298 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c 			for (i = 0; i < clk_mgr->base.ctx->dc->res_pool->pipe_count; i++) {
ctx               374 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c 	dm_set_dcn_clocks(clk_mgr->ctx, &clk_mgr->clks);
ctx               428 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c 		struct dc_context *ctx,
ctx               433 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c 	clk_mgr->base.ctx = ctx;
ctx               449 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c 	if (IS_FPGA_MAXIMUS_DC(ctx->dce_environment)) {
ctx                41 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.h void dcn20_clk_mgr_construct(struct dc_context *ctx,
ctx                61 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c 	struct dc *dc = clk_mgr_base->ctx->dc;
ctx                67 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c 	struct dmcu *dmcu = clk_mgr_base->ctx->dc->res_pool->dmcu;
ctx                95 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c 	if (!IS_DIAG_DC(dc->ctx->dce_environment)) {
ctx               517 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c 		struct dc_context *ctx,
ctx               522 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c 	struct dc_debug_options *debug = &ctx->dc->debug;
ctx               526 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c 	clk_mgr->base.ctx = ctx;
ctx               541 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c 	if (IS_FPGA_MAXIMUS_DC(ctx->dce_environment)) {
ctx               574 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c 		clk_mgr_helper_populate_bw_params(clk_mgr->base.bw_params, &clock_table, &ctx->asic_id);
ctx                34 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.h void rn_clk_mgr_construct(struct dc_context *ctx,
ctx                85 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr_vbios_smu.c 	struct dc *core_dc = clk_mgr->base.ctx->dc;
ctx                98 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr_vbios_smu.c 	if (!IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
ctx                73 drivers/gpu/drm/amd/display/dc/core/dc.c 	dc->ctx->logger
ctx               152 drivers/gpu/drm/amd/display/dc/core/dc.c 	struct dc_bios *bios = dc->ctx->dc_bios;
ctx               176 drivers/gpu/drm/amd/display/dc/core/dc.c 		link_init_params.ctx = dc->ctx;
ctx               220 drivers/gpu/drm/amd/display/dc/core/dc.c 		link->ctx = dc->ctx;
ctx               235 drivers/gpu/drm/amd/display/dc/core/dc.c 		enc_init.ctx = dc->ctx;
ctx               536 drivers/gpu/drm/amd/display/dc/core/dc.c 	if (dc->ctx->gpio_service)
ctx               537 drivers/gpu/drm/amd/display/dc/core/dc.c 		dal_gpio_service_destroy(&dc->ctx->gpio_service);
ctx               539 drivers/gpu/drm/amd/display/dc/core/dc.c 	if (dc->ctx->created_bios)
ctx               540 drivers/gpu/drm/amd/display/dc/core/dc.c 		dal_bios_parser_destroy(&dc->ctx->dc_bios);
ctx               542 drivers/gpu/drm/amd/display/dc/core/dc.c 	dc_perf_trace_destroy(&dc->ctx->perf_trace);
ctx               544 drivers/gpu/drm/amd/display/dc/core/dc.c 	kfree(dc->ctx);
ctx               545 drivers/gpu/drm/amd/display/dc/core/dc.c 	dc->ctx = NULL;
ctx               641 drivers/gpu/drm/amd/display/dc/core/dc.c 	dc->ctx = dc_ctx;
ctx               659 drivers/gpu/drm/amd/display/dc/core/dc.c 		bp_init_data.ctx = dc_ctx;
ctx               694 drivers/gpu/drm/amd/display/dc/core/dc.c 	dc->clk_mgr = dc_clk_mgr_create(dc->ctx, dc->res_pool->pp_smu, dc->res_pool->dccg);
ctx               843 drivers/gpu/drm/amd/display/dc/core/dc.c 		struct dc_state *ctx)
ctx               850 drivers/gpu/drm/amd/display/dc/core/dc.c 		if (!ctx->res_ctx.pipe_ctx[i].stream ||
ctx               851 drivers/gpu/drm/amd/display/dc/core/dc.c 				!ctx->res_ctx.pipe_ctx[i].stream->triggered_crtc_reset.enabled)
ctx               853 drivers/gpu/drm/amd/display/dc/core/dc.c 		if (ctx->res_ctx.pipe_ctx[i].stream == ctx->res_ctx.pipe_ctx[i].stream->triggered_crtc_reset.event_source)
ctx               855 drivers/gpu/drm/amd/display/dc/core/dc.c 		multisync_pipes[multisync_count] = &ctx->res_ctx.pipe_ctx[i];
ctx               867 drivers/gpu/drm/amd/display/dc/core/dc.c 		struct dc_state *ctx)
ctx               876 drivers/gpu/drm/amd/display/dc/core/dc.c 		if (!ctx->res_ctx.pipe_ctx[i].stream || ctx->res_ctx.pipe_ctx[i].top_pipe)
ctx               879 drivers/gpu/drm/amd/display/dc/core/dc.c 		unsynced_pipes[i] = &ctx->res_ctx.pipe_ctx[i];
ctx               925 drivers/gpu/drm/amd/display/dc/core/dc.c 			struct dc_stream_status *status = dc_stream_get_status_from_state(ctx, pipe_set[k]->stream);
ctx              1057 drivers/gpu/drm/amd/display/dc/core/dc.c 	struct dc_bios *dcb = dc->ctx->dc_bios;
ctx              1665 drivers/gpu/drm/amd/display/dc/core/dc.c 	struct dc_state *ctx,
ctx              1670 drivers/gpu/drm/amd/display/dc/core/dc.c 	for (i = 0; i < ctx->stream_count; i++) {
ctx              1671 drivers/gpu/drm/amd/display/dc/core/dc.c 		if (stream == ctx->streams[i]) {
ctx              1672 drivers/gpu/drm/amd/display/dc/core/dc.c 			return &ctx->stream_status[i];
ctx              2037 drivers/gpu/drm/amd/display/dc/core/dc.c 	if (!IS_DIAG_DC(dc->ctx->dce_environment)) {
ctx              2179 drivers/gpu/drm/amd/display/dc/core/dc.c 	struct dc_context *dc_ctx = dc->ctx;
ctx              2463 drivers/gpu/drm/amd/display/dc/core/dc.c 			link->ctx,
ctx                64 drivers/gpu/drm/amd/display/dc/core/dc_debug.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx               184 drivers/gpu/drm/amd/display/dc/core/dc_debug.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx               302 drivers/gpu/drm/amd/display/dc/core/dc_debug.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx               317 drivers/gpu/drm/amd/display/dc/core/dc_debug.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx               351 drivers/gpu/drm/amd/display/dc/core/dc_debug.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx               178 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	hpd = get_hpd_gpio(link->ctx->dc_bios, link->link_id, link->ctx->gpio_service);
ctx               229 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	hpd_pin = get_hpd_gpio(link->ctx->dc_bios, link->link_id, link->ctx->gpio_service);
ctx               635 drivers/gpu/drm/amd/display/dc/core/dc_link.c 				link->ctx,
ctx               639 drivers/gpu/drm/amd/display/dc/core/dc_link.c 				link->ctx,
ctx               698 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	DC_LOGGER_INIT(link->ctx->logger);
ctx               709 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	enter_timestamp = dm_get_timestamp(link->ctx);
ctx               716 drivers/gpu/drm/amd/display/dc/core/dc_link.c 			finish_timestamp = dm_get_timestamp(link->ctx);
ctx               718 drivers/gpu/drm/amd/display/dc/core/dc_link.c 				link->ctx, finish_timestamp, enter_timestamp);
ctx               725 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	finish_timestamp = dm_get_timestamp(link->ctx);
ctx               726 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	time_taken_in_ns = dm_get_elapse_time_in_ns(link->ctx, finish_timestamp,
ctx               750 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	struct dc_context *dc_ctx = link->ctx;
ctx               756 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	DC_LOGGER_INIT(link->ctx->logger);
ctx               916 drivers/gpu/drm/amd/display/dc/core/dc_link.c 				link->ctx,
ctx              1023 drivers/gpu/drm/amd/display/dc/core/dc_link.c 			dm_helpers_dp_mst_stop_top_mgr(link->ctx, link);
ctx              1068 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	hpd = get_hpd_gpio(link->ctx->dc_bios, link->link_id, link->ctx->gpio_service);
ctx              1214 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	struct dc_context *dc_ctx = init_params->ctx;
ctx              1217 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	struct dc_bios *bios = init_params->dc->ctx->dc_bios;
ctx              1227 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	link->ctx = dc_ctx;
ctx              1245 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	link->hpd_gpio = get_hpd_gpio(link->ctx->dc_bios, link->link_id, link->ctx->gpio_service);
ctx              1296 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	ddc_service_init_data.ctx = link->ctx;
ctx              1310 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	enc_init_data.ctx = dc_ctx;
ctx              1568 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	dm_helpers_dp_mst_clear_payload_allocation_table(link->ctx, link);
ctx              1573 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	dm_helpers_dp_mst_poll_pending_down_reply(link->ctx, link);
ctx              1588 drivers/gpu/drm/amd/display/dc/core/dc_link.c 			pipe_ctx->stream->ctx->dc_bios->integrated_info;
ctx              1689 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	cmd.speed = pipe_ctx->stream->ctx->dc->caps.i2c_speed_in_khz;
ctx              1697 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	if (dm_helpers_submit_i2c(pipe_ctx->stream->ctx,
ctx              1717 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	DC_LOGGER_INIT(pipe_ctx->stream->ctx->logger);
ctx              1878 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	DC_LOGGER_INIT(pipe_ctx->stream->ctx->logger);
ctx              2004 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	DC_LOGGER_INIT(pipe_ctx->stream->ctx->logger);
ctx              2309 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	struct abm *abm = link->ctx->dc->res_pool->abm;
ctx              2321 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	struct dc  *core_dc = link->ctx->dc;
ctx              2327 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	DC_LOGGER_INIT(link->ctx->logger);
ctx              2374 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	struct dc  *core_dc = link->ctx->dc;
ctx              2387 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	struct dc  *core_dc = link->ctx->dc;
ctx              2525 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	DC_LOGGER_INIT(link->ctx->logger);
ctx              2534 drivers/gpu/drm/amd/display/dc/core/dc_link.c 		stream->ctx,
ctx              2573 drivers/gpu/drm/amd/display/dc/core/dc_link.c 			stream->ctx,
ctx              2577 drivers/gpu/drm/amd/display/dc/core/dc_link.c 			stream->ctx,
ctx              2604 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	DC_LOGGER_INIT(link->ctx->logger);
ctx              2621 drivers/gpu/drm/amd/display/dc/core/dc_link.c 				stream->ctx,
ctx              2660 drivers/gpu/drm/amd/display/dc/core/dc_link.c 			stream->ctx,
ctx              2664 drivers/gpu/drm/amd/display/dc/core/dc_link.c 			stream->ctx,
ctx              2676 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	struct dc *core_dc = pipe_ctx->stream->ctx->dc;
ctx              2679 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	DC_LOGGER_INIT(pipe_ctx->stream->ctx->logger);
ctx              2719 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	if (!IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
ctx              2810 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	struct dc  *core_dc = pipe_ctx->stream->ctx->dc;
ctx              2856 drivers/gpu/drm/amd/display/dc/core/dc_link.c 	struct dc  *core_dc = pipe_ctx->stream->ctx->dc;
ctx              2890 drivers/gpu/drm/amd/display/dc/core/dc_link.c 		hpd = get_hpd_gpio(link->ctx->dc_bios, link->link_id, link->ctx->gpio_service);
ctx               129 drivers/gpu/drm/amd/display/dc/core/dc_link_ddc.c static struct i2c_payloads *dal_ddc_i2c_payloads_create(struct dc_context *ctx, uint32_t count)
ctx               139 drivers/gpu/drm/amd/display/dc/core/dc_link_ddc.c 		&payloads->payloads, ctx, count, sizeof(struct i2c_payload)))
ctx               197 drivers/gpu/drm/amd/display/dc/core/dc_link_ddc.c 	struct gpio_service *gpio_service = init_data->ctx->gpio_service;
ctx               200 drivers/gpu/drm/amd/display/dc/core/dc_link_ddc.c 	struct dc_bios *dcb = init_data->ctx->dc_bios;
ctx               203 drivers/gpu/drm/amd/display/dc/core/dc_link_ddc.c 	ddc_service->ctx = init_data->ctx;
ctx               360 drivers/gpu/drm/amd/display/dc/core/dc_link_ddc.c 		.speed = ddc->ctx->dc->caps.i2c_speed_in_khz };
ctx               363 drivers/gpu/drm/amd/display/dc/core/dc_link_ddc.c 			ddc->ctx,
ctx               560 drivers/gpu/drm/amd/display/dc/core/dc_link_ddc.c 			dal_ddc_i2c_payloads_create(ddc->ctx, payloads_num);
ctx               566 drivers/gpu/drm/amd/display/dc/core/dc_link_ddc.c 			.speed = ddc->ctx->dc->caps.i2c_speed_in_khz };
ctx               578 drivers/gpu/drm/amd/display/dc/core/dc_link_ddc.c 				ddc->ctx,
ctx                22 drivers/gpu/drm/amd/display/dc/core/dc_link_dp.c 	link->ctx->logger
ctx              1230 drivers/gpu/drm/amd/display/dc/core/dc_link_dp.c 		link->ctx->dc->debug_data.ltFailCount++;
ctx              2068 drivers/gpu/drm/amd/display/dc/core/dc_link_dp.c 		link->ctx,
ctx              2081 drivers/gpu/drm/amd/display/dc/core/dc_link_dp.c 			link->ctx,
ctx              2096 drivers/gpu/drm/amd/display/dc/core/dc_link_dp.c 				link->ctx,
ctx              3464 drivers/gpu/drm/amd/display/dc/core/dc_link_dp.c 			IS_FPGA_MAXIMUS_DC(link->ctx->dce_environment))
ctx              3500 drivers/gpu/drm/amd/display/dc/core/dc_link_dp.c 			IS_FPGA_MAXIMUS_DC(link->ctx->dce_environment))
ctx                29 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 			!dm_helpers_dp_read_dpcd(link->ctx,
ctx                44 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 			!dm_helpers_dp_write_dpcd(link->ctx,
ctx                72 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 	struct dc  *core_dc = link->ctx->dc;
ctx               177 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 	struct dc  *core_dc = link->ctx->dc;
ctx               344 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 	dsc->ctx->logger
ctx               368 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 	struct dc *core_dc = pipe_ctx->stream->ctx->dc;
ctx               372 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 	if (IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment))
ctx               375 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 		result = dm_helpers_dp_write_dsc_enable(core_dc->ctx, stream, enable);
ctx               385 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 	struct dc *core_dc = pipe_ctx->stream->ctx->dc;
ctx               421 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 		if (dc_is_dp_signal(stream->signal) && !IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
ctx               446 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 		if (dc_is_dp_signal(stream->signal) && !IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
ctx               489 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 	struct dc *core_dc = pipe_ctx->stream->ctx->dc;
ctx               508 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 		if (dc_is_dp_signal(stream->signal) && !IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
ctx               517 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c 		if (dc_is_dp_signal(stream->signal) && !IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
ctx               189 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 		if (dc->ctx->dc_bios->fw_info_valid) {
ctx               191 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 				dc->ctx->dc_bios->fw_info.pll_info.crystal_frequency;
ctx               252 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 	struct dc_context *ctx = dc->ctx;
ctx               259 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 		create_funcs->read_dce_straps(dc->ctx, &straps);
ctx               272 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 			struct audio *aud = create_funcs->create_audio(ctx, i);
ctx               290 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 			pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
ctx               304 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 					ctx, ctx->dc_bios);
ctx               312 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 	dc->hwseq = create_funcs->create_hwseq(ctx);
ctx               416 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 	if (stream1->ctx->dc->caps.disable_dp_clk_share)
ctx               967 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 	DC_LOGGER_INIT(pipe_ctx->stream->ctx->logger);
ctx              1690 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx              1717 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 	struct dc_context *dc_ctx = dc->ctx;
ctx              1912 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 	struct dc_context *dc_ctx = dc->ctx;
ctx              1915 drivers/gpu/drm/amd/display/dc/core/dc_resource.c 	struct dc_bios *dcb = dc->ctx->dc_bios;
ctx                54 drivers/gpu/drm/amd/display/dc/core/dc_sink.c 	sink->ctx = link->ctx;
ctx                58 drivers/gpu/drm/amd/display/dc/core/dc_sink.c 	sink->sink_id = init_params->link->ctx->dc_sink_id_count;
ctx                61 drivers/gpu/drm/amd/display/dc/core/dc_sink.c 	init_params->link->ctx->dc_sink_id_count++;
ctx                39 drivers/gpu/drm/amd/display/dc/core/dc_stream.c #define DC_LOGGER dc->ctx->logger
ctx                52 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 		if (stream->ctx->dc->caps.dual_link_dvi &&
ctx                69 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 	stream->ctx = dc_sink_data->ctx;
ctx               126 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 	stream->out_transfer_func->ctx = stream->ctx;
ctx               128 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 	stream->stream_id = stream->ctx->dc_stream_id_count;
ctx               129 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 	stream->ctx->dc_stream_id_count++;
ctx               194 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 	new_stream->stream_id = new_stream->ctx->dc_stream_id_count;
ctx               195 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 	new_stream->ctx->dc_stream_id_count++;
ctx               234 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 	struct dc *dc = stream->ctx->dc;
ctx               246 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 	if (stream->ctx->asic_id.chip_family == FAMILY_RV &&
ctx               247 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 			ASICREV_IS_RAVEN(stream->ctx->asic_id.hw_internal_rev)) {
ctx               293 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 	core_dc = stream->ctx->dc;
ctx               340 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 	core_dc = stream->ctx->dc;
ctx               488 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 	struct dc  *core_dc = stream->ctx->dc;
ctx               517 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 	dc = stream->ctx->dc;
ctx               547 drivers/gpu/drm/amd/display/dc/core/dc_stream.c 	struct dc  *core_dc = stream->ctx->dc;
ctx                40 drivers/gpu/drm/amd/display/dc/core/dc_surface.c static void construct(struct dc_context *ctx, struct dc_plane_state *plane_state)
ctx                42 drivers/gpu/drm/amd/display/dc/core/dc_surface.c 	plane_state->ctx = ctx;
ctx                51 drivers/gpu/drm/amd/display/dc/core/dc_surface.c 		plane_state->in_transfer_func->ctx = ctx;
ctx                57 drivers/gpu/drm/amd/display/dc/core/dc_surface.c 		plane_state->in_shaper_func->ctx = ctx;
ctx                62 drivers/gpu/drm/amd/display/dc/core/dc_surface.c 		plane_state->lut3d_func->ctx = ctx;
ctx                67 drivers/gpu/drm/amd/display/dc/core/dc_surface.c 		plane_state->blend_tf->ctx = ctx;
ctx               124 drivers/gpu/drm/amd/display/dc/core/dc_surface.c 	construct(core_dc->ctx, plane_state);
ctx               148 drivers/gpu/drm/amd/display/dc/core/dc_surface.c 		!plane_state->ctx ||
ctx               149 drivers/gpu/drm/amd/display/dc/core/dc_surface.c 		!plane_state->ctx->dc) {
ctx               155 drivers/gpu/drm/amd/display/dc/core/dc_surface.c 	core_dc = plane_state->ctx->dc;
ctx               294 drivers/gpu/drm/amd/display/dc/dc.h 				dm_get_timestamp(dc->ctx) : 0
ctx               303 drivers/gpu/drm/amd/display/dc/dc.h 				voltage_level_tick = dm_get_timestamp(dc->ctx); \
ctx               309 drivers/gpu/drm/amd/display/dc/dc.h 			voltage_level_tick = dm_get_timestamp(dc->ctx)
ctx               313 drivers/gpu/drm/amd/display/dc/dc.h 			watermark_tick = dm_get_timestamp(dc->ctx)
ctx               317 drivers/gpu/drm/amd/display/dc/dc.h 			end_tick = dm_get_timestamp(dc->ctx); \
ctx               468 drivers/gpu/drm/amd/display/dc/dc.h 	struct dc_context *ctx;
ctx               633 drivers/gpu/drm/amd/display/dc/dc.h 	struct dc_context *ctx;
ctx               662 drivers/gpu/drm/amd/display/dc/dc.h 	struct dc_context *ctx;
ctx               762 drivers/gpu/drm/amd/display/dc/dc.h 	struct dc_context *ctx;
ctx              1011 drivers/gpu/drm/amd/display/dc/dc.h 	struct dc_context *ctx;
ctx               149 drivers/gpu/drm/amd/display/dc/dc_bios_types.h 	struct dc_context *ctx;
ctx               122 drivers/gpu/drm/amd/display/dc/dc_ddc_types.h 	struct dc_context *ctx;
ctx               168 drivers/gpu/drm/amd/display/dc/dc_ddc_types.h 	struct dc_context *ctx;
ctx                80 drivers/gpu/drm/amd/display/dc/dc_helper.c uint32_t generic_reg_update_ex(const struct dc_context *ctx,
ctx                97 drivers/gpu/drm/amd/display/dc/dc_helper.c 	reg_val = dm_read_reg(ctx, addr);
ctx                99 drivers/gpu/drm/amd/display/dc/dc_helper.c 	dm_write_reg(ctx, addr, reg_val);
ctx               103 drivers/gpu/drm/amd/display/dc/dc_helper.c uint32_t generic_reg_set_ex(const struct dc_context *ctx,
ctx               121 drivers/gpu/drm/amd/display/dc/dc_helper.c 	dm_write_reg(ctx, addr, reg_val);
ctx               126 drivers/gpu/drm/amd/display/dc/dc_helper.c 	const struct dc_context *ctx,
ctx               137 drivers/gpu/drm/amd/display/dc/dc_helper.c 	value = cgs_read_register(ctx->cgs_device, address);
ctx               138 drivers/gpu/drm/amd/display/dc/dc_helper.c 	trace_amdgpu_dc_rreg(&ctx->perf_trace->read_count, address, value);
ctx               143 drivers/gpu/drm/amd/display/dc/dc_helper.c uint32_t generic_reg_get(const struct dc_context *ctx, uint32_t addr,
ctx               146 drivers/gpu/drm/amd/display/dc/dc_helper.c 	uint32_t reg_val = dm_read_reg(ctx, addr);
ctx               151 drivers/gpu/drm/amd/display/dc/dc_helper.c uint32_t generic_reg_get2(const struct dc_context *ctx, uint32_t addr,
ctx               155 drivers/gpu/drm/amd/display/dc/dc_helper.c 	uint32_t reg_val = dm_read_reg(ctx, addr);
ctx               161 drivers/gpu/drm/amd/display/dc/dc_helper.c uint32_t generic_reg_get3(const struct dc_context *ctx, uint32_t addr,
ctx               166 drivers/gpu/drm/amd/display/dc/dc_helper.c 	uint32_t reg_val = dm_read_reg(ctx, addr);
ctx               173 drivers/gpu/drm/amd/display/dc/dc_helper.c uint32_t generic_reg_get4(const struct dc_context *ctx, uint32_t addr,
ctx               179 drivers/gpu/drm/amd/display/dc/dc_helper.c 	uint32_t reg_val = dm_read_reg(ctx, addr);
ctx               187 drivers/gpu/drm/amd/display/dc/dc_helper.c uint32_t generic_reg_get5(const struct dc_context *ctx, uint32_t addr,
ctx               194 drivers/gpu/drm/amd/display/dc/dc_helper.c 	uint32_t reg_val = dm_read_reg(ctx, addr);
ctx               203 drivers/gpu/drm/amd/display/dc/dc_helper.c uint32_t generic_reg_get6(const struct dc_context *ctx, uint32_t addr,
ctx               211 drivers/gpu/drm/amd/display/dc/dc_helper.c 	uint32_t reg_val = dm_read_reg(ctx, addr);
ctx               221 drivers/gpu/drm/amd/display/dc/dc_helper.c uint32_t generic_reg_get7(const struct dc_context *ctx, uint32_t addr,
ctx               230 drivers/gpu/drm/amd/display/dc/dc_helper.c 	uint32_t reg_val = dm_read_reg(ctx, addr);
ctx               241 drivers/gpu/drm/amd/display/dc/dc_helper.c uint32_t generic_reg_get8(const struct dc_context *ctx, uint32_t addr,
ctx               251 drivers/gpu/drm/amd/display/dc/dc_helper.c 	uint32_t reg_val = dm_read_reg(ctx, addr);
ctx               293 drivers/gpu/drm/amd/display/dc/dc_helper.c void generic_reg_wait(const struct dc_context *ctx,
ctx               313 drivers/gpu/drm/amd/display/dc/dc_helper.c 		reg_val = dm_read_reg(ctx, addr);
ctx               319 drivers/gpu/drm/amd/display/dc/dc_helper.c 					!IS_FPGA_MAXIMUS_DC(ctx->dce_environment))
ctx               331 drivers/gpu/drm/amd/display/dc/dc_helper.c 	if (!IS_FPGA_MAXIMUS_DC(ctx->dce_environment))
ctx               335 drivers/gpu/drm/amd/display/dc/dc_helper.c void generic_write_indirect_reg(const struct dc_context *ctx,
ctx               339 drivers/gpu/drm/amd/display/dc/dc_helper.c 	dm_write_reg(ctx, addr_index, index);
ctx               340 drivers/gpu/drm/amd/display/dc/dc_helper.c 	dm_write_reg(ctx, addr_data, data);
ctx               343 drivers/gpu/drm/amd/display/dc/dc_helper.c uint32_t generic_read_indirect_reg(const struct dc_context *ctx,
ctx               349 drivers/gpu/drm/amd/display/dc/dc_helper.c 	dm_write_reg(ctx, addr_index, index);
ctx               350 drivers/gpu/drm/amd/display/dc/dc_helper.c 	value = dm_read_reg(ctx, addr_data);
ctx               356 drivers/gpu/drm/amd/display/dc/dc_helper.c uint32_t generic_indirect_reg_update_ex(const struct dc_context *ctx,
ctx               380 drivers/gpu/drm/amd/display/dc/dc_helper.c 	generic_write_indirect_reg(ctx, addr_index, addr_data, index, reg_val);
ctx               477 drivers/gpu/drm/amd/display/dc/dc_hw_types.h 	struct dc_context *ctx;
ctx               118 drivers/gpu/drm/amd/display/dc/dc_link.h 	struct dc_context *ctx;
ctx               170 drivers/gpu/drm/amd/display/dc/dc_stream.h 	struct dc_context *ctx;
ctx                48 drivers/gpu/drm/amd/display/dc/dce/dce_abm.c 	abm->ctx->logger
ctx                50 drivers/gpu/drm/amd/display/dc/dce/dce_abm.c 	abm_dce->base.ctx
ctx               451 drivers/gpu/drm/amd/display/dc/dce/dce_abm.c 	struct dc_context *ctx,
ctx               458 drivers/gpu/drm/amd/display/dc/dce/dce_abm.c 	base->ctx = ctx;
ctx               472 drivers/gpu/drm/amd/display/dc/dce/dce_abm.c 	struct dc_context *ctx,
ctx               484 drivers/gpu/drm/amd/display/dc/dce/dce_abm.c 	dce_abm_construct(abm_dce, ctx, regs, abm_shift, abm_mask);
ctx               237 drivers/gpu/drm/amd/display/dc/dce/dce_abm.h 	struct dc_context *ctx,
ctx                37 drivers/gpu/drm/amd/display/dc/dce/dce_audio.c 	aud->base.ctx
ctx               936 drivers/gpu/drm/amd/display/dc/dce/dce_audio.c 		struct dc_context *ctx,
ctx               950 drivers/gpu/drm/amd/display/dc/dce/dce_audio.c 	audio->base.ctx = ctx;
ctx               132 drivers/gpu/drm/amd/display/dc/dce/dce_audio.h 		struct dc_context *ctx,
ctx                36 drivers/gpu/drm/amd/display/dc/dce/dce_aux.c 	aux110->base.ctx
ctx                41 drivers/gpu/drm/amd/display/dc/dce/dce_aux.c 	engine->ctx->logger
ctx               418 drivers/gpu/drm/amd/display/dc/dce/dce_aux.c 		struct dc_context *ctx,
ctx               424 drivers/gpu/drm/amd/display/dc/dce/dce_aux.c 	aux_engine110->base.ctx = ctx;
ctx               466 drivers/gpu/drm/amd/display/dc/dce/dce_aux.c 	aux_engine = ddc->ctx->dc->res_pool->engines[ddc_pin->pin_data->en];
ctx                94 drivers/gpu/drm/amd/display/dc/dce/dce_aux.h 	struct dc_context *ctx;
ctx               119 drivers/gpu/drm/amd/display/dc/dce/dce_aux.h 	struct dc_context *ctx;
ctx               125 drivers/gpu/drm/amd/display/dc/dce/dce_aux.h 		struct dc_context *ctx,
ctx                46 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	clk_mgr_dce->base.ctx
ctx                48 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	clk_mgr->ctx->logger
ctx               252 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	struct dc_bios *bp = clk_mgr->ctx->dc_bios;
ctx               254 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	struct dmcu *dmcu = clk_mgr_dce->base.ctx->dc->res_pool->dmcu;
ctx               292 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	struct dc_bios *bp = clk_mgr->ctx->dc_bios;
ctx               293 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	struct dc *core_dc = clk_mgr->ctx->dc;
ctx               320 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	if (!ASICREV_IS_VEGA20_P(clk_mgr->ctx->asic_id.hw_internal_rev))
ctx               329 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	if (!IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
ctx               343 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	struct dc_debug_options *debug = &clk_mgr_dce->base.ctx->dc->debug;
ctx               344 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	struct dc_bios *bp = clk_mgr_dce->base.ctx->dc_bios;
ctx               401 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	struct dc_bios *bp = clk_mgr_dce->base.ctx->dc_bios;
ctx               469 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	struct dc_bios *bp = clk_mgr_dce->base.ctx->dc_bios;
ctx               605 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 		dm_pp_apply_display_requirements(dc->ctx, pp_display_cfg);
ctx               665 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 		dm_pp_apply_display_requirements(dc->ctx, pp_display_cfg);
ctx               684 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 		if (dm_pp_apply_power_level_change_request(clk_mgr->ctx, &level_change_req))
ctx               692 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	dce_pplib_apply_display_requirements(clk_mgr->ctx->dc, context);
ctx               711 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 		if (dm_pp_apply_power_level_change_request(clk_mgr->ctx, &level_change_req))
ctx               719 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	dce11_pplib_apply_display_requirements(clk_mgr->ctx->dc, context);
ctx               738 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 		if (dm_pp_apply_power_level_change_request(clk_mgr->ctx, &level_change_req))
ctx               746 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	dce11_pplib_apply_display_requirements(clk_mgr->ctx->dc, context);
ctx               774 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 		dm_pp_apply_clock_for_voltage_request(clk_mgr->ctx, &clock_voltage_req);
ctx               782 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 		dm_pp_apply_clock_for_voltage_request(clk_mgr->ctx, &clock_voltage_req);
ctx               784 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	dce11_pplib_apply_display_requirements(clk_mgr->ctx->dc, context);
ctx               809 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	struct dc_context *ctx,
ctx               817 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	base->ctx = ctx;
ctx               831 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	if (dm_pp_get_static_clocks(ctx, &static_clk_info))
ctx               842 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	struct dc_context *ctx,
ctx               859 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 		clk_mgr_dce, ctx, regs, clk_shift, clk_mask);
ctx               865 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	struct dc_context *ctx,
ctx               882 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 		clk_mgr_dce, ctx, regs, clk_shift, clk_mask);
ctx               890 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	struct dc_context *ctx,
ctx               907 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 		clk_mgr_dce, ctx, regs, clk_shift, clk_mask);
ctx               914 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c struct clk_mgr *dce120_clk_mgr_create(struct dc_context *ctx)
ctx               928 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 		clk_mgr_dce, ctx, NULL, NULL, NULL);
ctx               936 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c struct clk_mgr *dce121_clk_mgr_create(struct dc_context *ctx)
ctx               949 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c 	dce_clk_mgr_construct(clk_mgr_dce, ctx, NULL, NULL, NULL);
ctx                46 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.c 	clk_src->base.ctx
ctx               855 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.c 			clock_source->ctx->dce_version <= DCE_VERSION_11_0)
ctx               909 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.c 	if (IS_FPGA_MAXIMUS_DC(clock_source->ctx->dce_environment)) {
ctx               929 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.c 			clock_source->ctx->dce_version <= DCE_VERSION_11_0)
ctx              1248 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.c 	calc_pll_cs->ctx = init_data->ctx;
ctx              1312 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.c 	struct dc_context *ctx,
ctx              1322 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.c 	clk_src->base.ctx = ctx;
ctx              1355 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.c 	calc_pll_cs_init_data.ctx =	ctx;
ctx              1374 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.c 	calc_pll_cs_init_data_hdmi.ctx = ctx;
ctx              1412 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.c 	struct dc_context *ctx,
ctx              1419 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.c 	clk_src->base.ctx = ctx;
ctx              1441 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.c 	struct dc_context *ctx,
ctx              1448 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.c 	bool ret = dce112_clk_src_construct(clk_src, ctx, bios, id, regs, cs_shift, cs_mask);
ctx               188 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.h 	struct dc_context *ctx,
ctx               197 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.h 	struct dc_context *ctx,
ctx               207 drivers/gpu/drm/amd/display/dc/dce/dce_clock_source.h 	struct dc_context *ctx,
ctx                48 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	dmcu_dce->base.ctx
ctx               240 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG1),
ctx               252 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG2),
ctx               257 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG3),
ctx               305 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG1), masterCmdData1.u32);
ctx               373 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	const struct dc_config *config = &dmcu->ctx->dc->config;
ctx               653 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG1),
ctx               665 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG2),
ctx               670 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG3),
ctx               705 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	dm_write_reg(dmcu->ctx, REG(MASTER_COMM_DATA_REG1), masterCmdData1.u32);
ctx               823 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	struct dc_context *ctx,
ctx               830 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	base->ctx = ctx;
ctx               840 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	struct dc_context *ctx,
ctx               853 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 		dmcu_dce, ctx, regs, dmcu_shift, dmcu_mask);
ctx               862 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	struct dc_context *ctx,
ctx               875 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 		dmcu_dce, ctx, regs, dmcu_shift, dmcu_mask);
ctx               885 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 	struct dc_context *ctx,
ctx               898 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.c 		dmcu_dce, ctx, regs, dmcu_shift, dmcu_mask);
ctx               253 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.h 	struct dc_context *ctx,
ctx               259 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.h 	struct dc_context *ctx,
ctx               266 drivers/gpu/drm/amd/display/dc/dce/dce_dmcu.h 	struct dc_context *ctx,
ctx                32 drivers/gpu/drm/amd/display/dc/dce/dce_hwseq.c 	hws->ctx
ctx                35 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c 	dce_i2c_hw->ctx
ctx               617 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c 	struct dc_context *ctx,
ctx               623 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c 	dce_i2c_hw->ctx = ctx;
ctx               625 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c 	dce_i2c_hw->reference_frequency = (ctx->dc_bios->fw_info.pll_info.crystal_frequency) >> 1;
ctx               641 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c 	struct dc_context *ctx,
ctx               651 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c 			ctx,
ctx               676 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c 	struct dc_context *ctx,
ctx               683 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c 			ctx,
ctx               693 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c 	struct dc_context *ctx,
ctx               700 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c 			ctx,
ctx               711 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c 	struct dc_context *ctx,
ctx               718 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c 			ctx,
ctx               724 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c 	if (ctx->dc->debug.scl_reset_length10)
ctx               276 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.h 	struct dc_context *ctx;
ctx               285 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.h 	struct dc_context *ctx,
ctx               293 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.h 	struct dc_context *ctx,
ctx               301 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.h 	struct dc_context *ctx,
ctx               309 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.h 	struct dc_context *ctx,
ctx               318 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.h 	struct dc_context *ctx,
ctx                36 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	struct dc_context *ctx)
ctx                38 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	dce_i2c_sw->ctx = ctx;
ctx               102 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	struct dc_context *ctx,
ctx               123 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	struct dc_context *ctx,
ctx               142 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 		if (!wait_for_scl_high_sw(ctx, ddc_handle, clock_delay_div_4))
ctx               163 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	if (!wait_for_scl_high_sw(ctx, ddc_handle, clock_delay_div_4))
ctx               180 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	struct dc_context *ctx,
ctx               197 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 		if (!wait_for_scl_high_sw(ctx, ddc_handle, clock_delay_div_4))
ctx               226 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	if (!wait_for_scl_high_sw(ctx, ddc_handle, clock_delay_div_4))
ctx               240 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	struct dc_context *ctx,
ctx               260 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	if (!wait_for_scl_high_sw(ctx, ddc_handle, clock_delay_div_4))
ctx               277 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	struct dc_context *ctx,
ctx               286 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	if (!write_byte_sw(ctx, ddc_handle, clock_delay_div_4, address))
ctx               290 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 		if (!write_byte_sw(ctx, ddc_handle, clock_delay_div_4, data[i]))
ctx               299 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	struct dc_context *ctx,
ctx               308 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	if (!write_byte_sw(ctx, ddc_handle, clock_delay_div_4, address))
ctx               312 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 		if (!read_byte_sw(ctx, ddc_handle, clock_delay_div_4, data + i,
ctx               324 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	struct dc_context *ctx,
ctx               350 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 		if (!wait_for_scl_high_sw(ctx, ddc_handle, clock_delay_div_4))
ctx               434 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 	bool result = start_sync_sw(engine->ctx, ddc, clock_delay_div_4);
ctx               442 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 			result = i2c_write_sw(engine->ctx, ddc, clock_delay_div_4,
ctx               447 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 			result = i2c_read_sw(engine->ctx, ddc, clock_delay_div_4,
ctx               461 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c 		if (!stop_sync_sw(engine->ctx, ddc, clock_delay_div_4))
ctx                37 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.h 	struct dc_context *ctx;
ctx                44 drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.h 	struct dc_context *ctx);
ctx                40 drivers/gpu/drm/amd/display/dc/dce/dce_ipp.c 	ipp_dce->base.ctx
ctx               248 drivers/gpu/drm/amd/display/dc/dce/dce_ipp.c 	struct dc_context *ctx,
ctx               254 drivers/gpu/drm/amd/display/dc/dce/dce_ipp.c 	ipp_dce->base.ctx = ctx;
ctx               230 drivers/gpu/drm/amd/display/dc/dce/dce_ipp.h 	struct dc_context *ctx,
ctx                61 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	enc110->base.ctx
ctx                63 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	enc110->base.ctx->logger
ctx               129 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	struct dc_bios *bp = enc110->base.ctx->dc_bios;
ctx               263 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	struct dc_context *ctx = enc110->base.ctx;
ctx               266 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	if (ctx->dc->caps.psp_setup_panel_mode)
ctx               495 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	struct dc_context *ctx = enc110->base.ctx;
ctx               498 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	uint32_t value = dm_read_reg(ctx, addr);
ctx               502 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	dm_write_reg(ctx, addr, value);
ctx               505 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	value = dm_read_reg(ctx, addr);
ctx               510 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	dm_write_reg(ctx, addr, value);
ctx               658 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	if (enc110->base.ctx->dc->debug.hdmi20_disable &&
ctx               683 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	const struct dc_vbios_funcs *bp_funcs = init_data->ctx->dc_bios->funcs;
ctx               687 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	enc110->base.ctx = init_data->ctx;
ctx               762 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	result = bp_funcs->get_encoder_cap_info(enc110->base.ctx->dc_bios,
ctx               777 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	if (enc110->base.ctx->dc->debug.hdmi20_disable) {
ctx              1372 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	struct dc_context *ctx = enc110->base.ctx;
ctx              1375 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	uint32_t value = dm_read_reg(ctx, addr);
ctx              1386 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	struct dc_context *ctx = enc110->base.ctx;
ctx              1388 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c 	uint32_t value = dm_read_reg(ctx, addr);
ctx                31 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.c 	dce_mi->base.ctx
ctx               272 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.c 	uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1;
ctx               297 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.c 	uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1;
ctx               330 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.c 	uint32_t stutter_en = mi->ctx->dc->debug.disable_stutter ? 0 : 1;
ctx               770 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.c 	struct dc_context *ctx,
ctx               776 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.c 	dce_mi->base.ctx = ctx;
ctx               788 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.c 	struct dc_context *ctx,
ctx               794 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.c 	dce_mem_input_construct(dce_mi, ctx, inst, regs, mi_shift, mi_mask);
ctx               800 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.c 	struct dc_context *ctx,
ctx               806 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.c 	dce_mem_input_construct(dce_mi, ctx, inst, regs, mi_shift, mi_mask);
ctx               342 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.h 	struct dc_context *ctx,
ctx               350 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.h 	struct dc_context *ctx,
ctx               358 drivers/gpu/drm/amd/display/dc/dce/dce_mem_input.h 	struct dc_context *ctx,
ctx                43 drivers/gpu/drm/amd/display/dc/dce/dce_opp.c 	opp110->base.ctx
ctx               545 drivers/gpu/drm/amd/display/dc/dce/dce_opp.c 	struct dc_context *ctx,
ctx               553 drivers/gpu/drm/amd/display/dc/dce/dce_opp.c 	opp110->base.ctx = ctx;
ctx               276 drivers/gpu/drm/amd/display/dc/dce/dce_opp.h 	struct dc_context *ctx,
ctx                34 drivers/gpu/drm/amd/display/dc/dce/dce_stream_encoder.c 		enc110->base.ctx->logger
ctx                63 drivers/gpu/drm/amd/display/dc/dce/dce_stream_encoder.c 	enc110->base.ctx
ctx              1656 drivers/gpu/drm/amd/display/dc/dce/dce_stream_encoder.c 	struct dc_context *ctx,
ctx              1664 drivers/gpu/drm/amd/display/dc/dce/dce_stream_encoder.c 	enc110->base.ctx = ctx;
ctx               706 drivers/gpu/drm/amd/display/dc/dce/dce_stream_encoder.h 	struct dc_context *ctx,
ctx                40 drivers/gpu/drm/amd/display/dc/dce/dce_transform.c 	xfm_dce->base.ctx
ctx                42 drivers/gpu/drm/amd/display/dc/dce/dce_transform.c 	xfm_dce->base.ctx->logger
ctx               158 drivers/gpu/drm/amd/display/dc/dce/dce_transform.c 	if (xfm_dce->base.ctx->dc->debug.visual_confirm != VISUAL_CONFIRM_DISABLE) {
ctx              1344 drivers/gpu/drm/amd/display/dc/dce/dce_transform.c 	struct dc_context *ctx,
ctx              1350 drivers/gpu/drm/amd/display/dc/dce/dce_transform.c 	xfm_dce->base.ctx = ctx;
ctx               486 drivers/gpu/drm/amd/display/dc/dce/dce_transform.h 	struct dc_context *ctx,
ctx                81 drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.c 	struct dc_context *ctx = dc->ctx;
ctx                98 drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.c 		dm_write_reg(ctx,
ctx               391 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c #define CTX  ctx
ctx               403 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	struct dc_context *ctx,
ctx               414 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 		struct dc_context *ctx, unsigned int inst)
ctx               416 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	return dce_audio_create(ctx, inst,
ctx               421 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 		struct dc_context *ctx,
ctx               431 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	dce110_timing_generator_construct(tg110, ctx, instance, offsets);
ctx               437 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	struct dc_context *ctx)
ctx               445 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	dce110_stream_encoder_construct(enc110, ctx, ctx->dc_bios, eng_id,
ctx               466 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	struct dc_context *ctx)
ctx               471 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 		hws->ctx = ctx;
ctx               510 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	struct dc_context *ctx,
ctx               521 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	dce_mem_input_construct(dce_mi, ctx, inst, &mi_regs[inst], &mi_shifts, &mi_masks);
ctx               533 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	struct dc_context *ctx,
ctx               542 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	dce_transform_construct(transform, ctx, inst,
ctx               548 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	struct dc_context *ctx, uint32_t inst)
ctx               557 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	dce_ipp_construct(ipp, ctx, inst,
ctx               588 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	struct dc_context *ctx,
ctx               598 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 			     ctx, inst, &opp_regs[inst], &opp_shift, &opp_mask);
ctx               603 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	struct dc_context *ctx,
ctx               612 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	dce110_aux_engine_construct(aux_engine, ctx, inst,
ctx               638 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	struct dc_context *ctx,
ctx               647 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	dce100_i2c_hw_construct(dce_i2c_hw, ctx, inst,
ctx               653 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	struct dc_context *ctx,
ctx               665 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	if (dce110_clk_src_construct(clk_src, ctx, bios, id,
ctx               913 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	struct dc_context *ctx = dc->ctx;
ctx               916 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	ctx->dc_bios->regs = &bios_regs;
ctx               922 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	bp = ctx->dc_bios;
ctx               926 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 				dce100_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_EXTERNAL, NULL, true);
ctx               929 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 				dce100_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL0, &clk_src_regs[0], false);
ctx               931 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 				dce100_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL1, &clk_src_regs[1], false);
ctx               933 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 				dce100_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL2, &clk_src_regs[2], false);
ctx               938 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 				dce100_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL0, &clk_src_regs[0], true);
ctx               941 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 				dce100_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL1, &clk_src_regs[1], false);
ctx               943 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 				dce100_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL2, &clk_src_regs[2], false);
ctx               961 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	pool->base.dmcu = dce_dmcu_create(ctx,
ctx               971 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 	pool->base.abm = dce_abm_create(ctx,
ctx               983 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 		init_data.ctx = dc->ctx;
ctx              1003 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 				ctx,
ctx              1012 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 		pool->base.mis[i] = dce100_mem_input_create(ctx, i);
ctx              1020 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 		pool->base.ipps[i] = dce100_ipp_create(ctx, i);
ctx              1028 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 		pool->base.transforms[i] = dce100_transform_create(ctx, i);
ctx              1036 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 		pool->base.opps[i] = dce100_opp_create(ctx, i);
ctx              1046 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 		pool->base.engines[i] = dce100_aux_engine_create(ctx, i);
ctx              1053 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c 		pool->base.hw_i2cs[i] = dce100_i2c_hw_create(ctx, i);
ctx                41 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		cp110->base.ctx->logger
ctx                83 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	status_pos = dm_read_reg(compressor->ctx, DCP_REG(mmCRTC_STATUS_POSITION));
ctx                87 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	if (status_pos != dm_read_reg(compressor->ctx, DCP_REG(mmCRTC_STATUS_POSITION))) {
ctx                89 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		value = dm_read_reg(compressor->ctx, DCP_REG(mmLB_SYNC_RESET_SEL));
ctx                92 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		dm_write_reg(compressor->ctx, DCP_REG(mmLB_SYNC_RESET_SEL), value);
ctx                94 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		frame_count = dm_read_reg(compressor->ctx, DCP_REG(mmCRTC_STATUS_FRAME_COUNT));
ctx                98 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 			if (frame_count != dm_read_reg(compressor->ctx, DCP_REG(mmCRTC_STATUS_FRAME_COUNT)))
ctx               106 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		value = dm_read_reg(compressor->ctx, DCP_REG(mmLB_SYNC_RESET_SEL));
ctx               109 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		dm_write_reg(compressor->ctx, DCP_REG(mmLB_SYNC_RESET_SEL), value);
ctx               122 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		value = dm_read_reg(cp110->base.ctx, addr);
ctx               148 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               160 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               163 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               167 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               170 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               172 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               178 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               182 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dm_write_reg(compressor->ctx, mmFBC_IND_LUT0, value);
ctx               185 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dm_write_reg(compressor->ctx, mmFBC_IND_LUT1, value);
ctx               201 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		value = dm_read_reg(compressor->ctx, addr);
ctx               208 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		dm_write_reg(compressor->ctx, addr, value);
ctx               219 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		dm_write_reg(compressor->ctx, addr, value);
ctx               222 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		misc_value = dm_read_reg(compressor->ctx, mmFBC_MISC);
ctx               231 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		dm_write_reg(compressor->ctx, mmFBC_MISC, misc_value);
ctx               235 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		dm_write_reg(compressor->ctx, addr, value);
ctx               250 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 			reg_data = dm_read_reg(compressor->ctx, mmFBC_CNTL);
ctx               252 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 			dm_write_reg(compressor->ctx, mmFBC_CNTL, reg_data);
ctx               275 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	value = dm_read_reg(compressor->ctx, mmFBC_STATUS);
ctx               282 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	value = dm_read_reg(compressor->ctx, mmFBC_MISC);
ctx               284 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		value = dm_read_reg(compressor->ctx, mmFBC_CNTL);
ctx               311 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 		compressor->ctx,
ctx               314 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dm_write_reg(compressor->ctx,
ctx               318 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dm_write_reg(compressor->ctx,
ctx               321 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dm_write_reg(compressor->ctx,
ctx               334 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dm_write_reg(compressor->ctx, DCP_REG(mmGRPH_COMPRESS_PITCH), 0);
ctx               342 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dm_write_reg(compressor->ctx, DCP_REG(mmGRPH_COMPRESS_PITCH), value);
ctx               354 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	uint32_t value = dm_read_reg(compressor->ctx, addr);
ctx               361 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               388 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               394 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               397 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c struct compressor *dce110_compressor_create(struct dc_context *ctx)
ctx               405 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	dce110_compressor_construct(cp110, ctx);
ctx               494 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	struct dc_context *ctx)
ctx               512 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	compressor->base.ctx = ctx;
ctx               515 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.c 	compressor->base.memory_bus_width = ctx->asic_id.vram_width;
ctx                43 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.h struct compressor *dce110_compressor_create(struct dc_context *ctx);
ctx                46 drivers/gpu/drm/amd/display/dc/dce110/dce110_compressor.h 	struct dc_context *ctx);
ctx                74 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	hws->ctx
ctx               117 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c static void dce110_init_pte(struct dc_context *ctx)
ctx               125 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	value = dm_read_reg(ctx, addr);
ctx               145 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	dm_write_reg(ctx, addr, value);
ctx               148 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	value = dm_read_reg(ctx, addr);
ctx               180 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 		dm_write_reg(ctx, addr, value);
ctx               186 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	struct dc_context *ctx,
ctx               200 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	struct dc_context *ctx = dc->ctx;
ctx               203 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	if (IS_FPGA_MAXIMUS_DC(ctx->dce_environment))
ctx               229 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 			dm_write_reg(ctx,
ctx               235 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 		dce110_init_pte(ctx);
ctx               738 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	struct dc_context *ctx = link->ctx;
ctx               766 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	hpd = get_hpd_gpio(ctx->dc_bios, connector, ctx->gpio_service);
ctx               806 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	struct dc_context *ctx = link->ctx;
ctx               807 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	struct dce_hwseq *hwseq = ctx->dc->hwseq;
ctx               821 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 			unsigned long long current_ts = dm_get_timestamp(ctx);
ctx               824 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 							ctx,
ctx               860 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 		bp_result = link_transmitter_control(ctx->dc_bios, &cntl);
ctx               864 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 			link->link_trace.time_stamp.edp_poweroff = dm_get_timestamp(ctx);
ctx               866 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 			link->link_trace.time_stamp.edp_poweron = dm_get_timestamp(ctx);
ctx               888 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	struct dc_context *ctx = link->ctx;
ctx               889 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	struct dce_hwseq *hws = ctx->dc->hwseq;
ctx               937 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	link_transmitter_control(ctx->dc_bios, &cntl);
ctx               954 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	core_dc = pipe_ctx->stream->ctx->dc;
ctx               993 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	dc = pipe_ctx->stream->ctx->dc;
ctx              1030 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	struct dc *dc = pipe_ctx->stream->ctx->dc;
ctx              1500 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	struct dc_context *ctx = dc->ctx;
ctx              1511 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 		enable_display_pipe_clock_gating(ctx,
ctx              1586 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	if (edp_link && dc->ctx->dce_version != DCE_VERSION_8_0 &&
ctx              1587 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 		    dc->ctx->dce_version != DCE_VERSION_8_1 &&
ctx              1588 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 		    dc->ctx->dce_version != DCE_VERSION_8_3) {
ctx              1626 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	bios_set_scratch_acc_mode_change(dc->ctx->dc_bios);
ctx              1776 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 		struct dc *dc = pipe_ctx[0]->stream->ctx->dc;
ctx              1803 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	if (!dc->ctx->fbc_gpu_addr)
ctx              1869 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 		compr->compr_surface_address.quad_part = dc->ctx->fbc_gpu_addr;
ctx              2033 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	struct dc_bios *dcb = dc->ctx->dc_bios;
ctx              2065 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 				dc, i, dc->ctx->dc_bios,
ctx              2288 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	struct dc_context *dc_ctx = dc->ctx;
ctx              2333 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	struct dc_context *dc_ctx = dc->ctx;
ctx              2374 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 	bp = dc->ctx->dc_bios;
ctx              2386 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 			dc->ctx,
ctx              2647 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 		dc, fe_idx, dc->ctx->dc_bios, PIPE_GATING_CONTROL_ENABLE);
ctx              2690 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c 		.ref_clk_khz = pipe_ctx->stream->ctx->dc->res_pool->ref_clocks.xtalin_clock_inKhz,
ctx                45 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 			mem_input110->base.ctx,
ctx                53 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 			mem_input110->base.ctx,
ctx                74 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx                88 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               110 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               124 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               157 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	value = dm_read_reg(mem_input110->base.ctx, mmUNP_GRPH_ENABLE);
ctx               159 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(mem_input110->base.ctx,
ctx               202 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               224 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               255 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               263 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               271 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               279 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               287 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               295 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               304 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               313 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               322 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               331 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               356 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input110->base.ctx,
ctx               371 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 				mem_input110->base.ctx,
ctx               418 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 				mem_input110->base.ctx,
ctx               423 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 				mem_input110->base.ctx,
ctx               433 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 				mem_input110->base.ctx,
ctx               443 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 				mem_input110->base.ctx,
ctx               465 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 			mem_input110->base.ctx,
ctx               476 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	value = dm_read_reg(mem_input110->base.ctx, mmUNP_GRPH_UPDATE);
ctx               607 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	value = dm_read_reg(mem_input110->base.ctx, mmUNP_PIPE_OUTSTANDING_REQUEST_LIMIT);
ctx               611 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(mem_input110->base.ctx, mmUNP_PIPE_OUTSTANDING_REQUEST_LIMIT, value);
ctx               613 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	value = dm_read_reg(mem_input110->base.ctx, mmUNP_DVMM_PTE_CONTROL);
ctx               617 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(mem_input110->base.ctx, mmUNP_DVMM_PTE_CONTROL, value);
ctx               619 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	value = dm_read_reg(mem_input110->base.ctx, mmUNP_DVMM_PTE_ARB_CONTROL);
ctx               622 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(mem_input110->base.ctx, mmUNP_DVMM_PTE_ARB_CONTROL, value);
ctx               624 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	value = dm_read_reg(mem_input110->base.ctx, mmUNP_DVMM_PTE_CONTROL_C);
ctx               628 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(mem_input110->base.ctx, mmUNP_DVMM_PTE_CONTROL_C, value);
ctx               630 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	value = dm_read_reg(mem_input110->base.ctx, mmUNP_DVMM_PTE_ARB_CONTROL_C);
ctx               633 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(mem_input110->base.ctx, mmUNP_DVMM_PTE_ARB_CONTROL_C, value);
ctx               654 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	const struct dc_context *ctx,
ctx               665 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	wm_mask_cntl = dm_read_reg(ctx, wm_addr);
ctx               670 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, wm_addr, wm_mask_cntl);
ctx               672 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	urgency_cntl = dm_read_reg(ctx, urgency_addr);
ctx               685 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, urgency_addr, urgency_cntl);
ctx               688 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	wm_mask_cntl = dm_read_reg(ctx, wm_addr);
ctx               693 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, wm_addr, wm_mask_cntl);
ctx               695 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	urgency_cntl = dm_read_reg(ctx, urgency_addr);
ctx               707 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, urgency_addr, urgency_cntl);
ctx               711 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	const struct dc_context *ctx,
ctx               716 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		ctx,
ctx               724 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	const struct dc_context *ctx,
ctx               729 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		ctx,
ctx               737 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	const struct dc_context *ctx,
ctx               748 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	wm_mask_cntl = dm_read_reg(ctx, wm_addr);
ctx               753 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, wm_addr, wm_mask_cntl);
ctx               755 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	stutter_cntl = dm_read_reg(ctx, stutter_addr);
ctx               757 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	if (ctx->dc->debug.disable_stutter) {
ctx               779 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, stutter_addr, stutter_cntl);
ctx               782 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	wm_mask_cntl = dm_read_reg(ctx, wm_addr);
ctx               787 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, wm_addr, wm_mask_cntl);
ctx               789 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	stutter_cntl = dm_read_reg(ctx, stutter_addr);
ctx               795 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, stutter_addr, stutter_cntl);
ctx               799 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	const struct dc_context *ctx,
ctx               802 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	program_stutter_watermark(ctx,
ctx               809 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	const struct dc_context *ctx,
ctx               812 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	program_stutter_watermark(ctx,
ctx               819 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	const struct dc_context *ctx,
ctx               828 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	value = dm_read_reg(ctx, wm_mask_ctrl_addr);
ctx               835 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, wm_mask_ctrl_addr, value);
ctx               837 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	value = dm_read_reg(ctx, nbp_pstate_ctrl_addr);
ctx               854 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, nbp_pstate_ctrl_addr, value);
ctx               857 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	value = dm_read_reg(ctx, nbp_pstate_ctrl_addr);
ctx               863 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, nbp_pstate_ctrl_addr, value);
ctx               866 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	value = dm_read_reg(ctx, wm_mask_ctrl_addr);
ctx               872 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, wm_mask_ctrl_addr, value);
ctx               874 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	value = dm_read_reg(ctx, nbp_pstate_ctrl_addr);
ctx               890 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, nbp_pstate_ctrl_addr, value);
ctx               893 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	value = dm_read_reg(ctx, nbp_pstate_ctrl_addr);
ctx               899 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dm_write_reg(ctx, nbp_pstate_ctrl_addr, value);
ctx               903 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	const struct dc_context *ctx,
ctx               906 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	program_nbp_watermark(ctx,
ctx               913 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	const struct dc_context *ctx,
ctx               916 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	program_nbp_watermark(ctx,
ctx               931 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input->ctx,
ctx               936 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input->ctx,
ctx               940 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input->ctx,
ctx               953 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input->ctx,
ctx               958 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input->ctx,
ctx               962 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		mem_input->ctx,
ctx               978 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		value = dm_read_reg(mi->ctx, addr);
ctx               985 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		dm_write_reg(mi->ctx, addr, value);
ctx               988 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		value = dm_read_reg(mi->ctx, addr);
ctx               995 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		dm_write_reg(mi->ctx, addr, value);
ctx               999 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		dm_write_reg(mi->ctx, addr, value);
ctx              1003 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 		dm_write_reg(mi->ctx, addr, value);
ctx              1036 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	struct dc_context *ctx)
ctx              1039 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.c 	dce_mi->base.ctx = ctx;
ctx                33 drivers/gpu/drm/amd/display/dc/dce110/dce110_mem_input_v.h 	struct dc_context *ctx);
ctx               113 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 	struct dc_context *ctx = xfm_dce->base.ctx;
ctx               114 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 	uint32_t cntl_value = dm_read_reg(ctx, mmCOL_MAN_OUTPUT_CSC_CONTROL);
ctx               142 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 			dm_write_reg(ctx, addr, value);
ctx               160 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 			dm_write_reg(ctx, addr, value);
ctx               178 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 			dm_write_reg(ctx, addr, value);
ctx               196 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 			dm_write_reg(ctx, addr, value);
ctx               214 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 			dm_write_reg(ctx, addr, value);
ctx               232 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 			dm_write_reg(ctx, addr, value);
ctx               256 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 			dm_write_reg(ctx, addr, value);
ctx               274 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 			dm_write_reg(ctx, addr, value);
ctx               292 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 			dm_write_reg(ctx, addr, value);
ctx               310 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 			dm_write_reg(ctx, addr, value);
ctx               328 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 			dm_write_reg(ctx, addr, value);
ctx               346 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 			dm_write_reg(ctx, addr, value);
ctx               355 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 	dm_write_reg(ctx, mmCOL_MAN_OUTPUT_CSC_CONTROL, cntl_value);
ctx               364 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 	struct dc_context *ctx = xfm_dce->base.ctx;
ctx               366 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 	uint32_t value = dm_read_reg(ctx, addr);
ctx               456 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 	dm_write_reg(ctx, addr, value);
ctx               465 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 	uint32_t value = dm_read_reg(xfm->ctx, mmDENORM_CLAMP_CONTROL);
ctx               503 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 	dm_write_reg(xfm->ctx, mmDENORM_CLAMP_CONTROL, value);
ctx               535 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 	struct dc_context *ctx = xfm->ctx;
ctx               555 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 	value = dm_read_reg(ctx, mmCOL_MAN_INPUT_CSC_CONTROL);
ctx               566 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 		dm_write_reg(ctx, mmINPUT_CSC_C11_C12_A, value);
ctx               573 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 		dm_write_reg(ctx, mmINPUT_CSC_C13_C14_A, value);
ctx               580 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 		dm_write_reg(ctx, mmINPUT_CSC_C21_C22_A, value);
ctx               587 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 		dm_write_reg(ctx, mmINPUT_CSC_C23_C24_A, value);
ctx               594 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 		dm_write_reg(ctx, mmINPUT_CSC_C31_C32_A, value);
ctx               601 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 		dm_write_reg(ctx, mmINPUT_CSC_C33_C34_A, value);
ctx               609 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 		dm_write_reg(ctx, mmINPUT_CSC_C11_C12_B, value);
ctx               616 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 		dm_write_reg(ctx, mmINPUT_CSC_C13_C14_B, value);
ctx               623 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 		dm_write_reg(ctx, mmINPUT_CSC_C21_C22_B, value);
ctx               630 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 		dm_write_reg(ctx, mmINPUT_CSC_C23_C24_B, value);
ctx               637 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 		dm_write_reg(ctx, mmINPUT_CSC_C31_C32_B, value);
ctx               644 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 		dm_write_reg(ctx, mmINPUT_CSC_C33_C34_B, value);
ctx               668 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_csc_v.c 	dm_write_reg(ctx, mmCOL_MAN_INPUT_CSC_CONTROL, value);
ctx                39 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 	uint32_t value = dm_read_reg(xfm->ctx, mmDCFEV_MEM_PWR_CTRL);
ctx                70 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 	dm_write_reg(xfm->ctx, mmDCFEV_MEM_PWR_CTRL, value);
ctx                73 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 		value = dm_read_reg(xfm->ctx, mmDCFEV_MEM_PWR_CTRL);
ctx                90 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 	value = dm_read_reg(xfm_dce->base.ctx,
ctx                99 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 	dm_write_reg(xfm_dce->base.ctx,
ctx               113 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 	dm_write_reg(xfm_dce->base.ctx, mmGAMMA_CORR_CONTROL, 0);
ctx               151 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 		dm_write_reg(xfm_dce->base.ctx, mmGAMMA_CORR_CNTLA_START_CNTL,
ctx               162 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 		dm_write_reg(xfm_dce->base.ctx,
ctx               173 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 		dm_write_reg(xfm_dce->base.ctx,
ctx               190 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 		dm_write_reg(xfm_dce->base.ctx,
ctx               223 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 				xfm_dce->base.ctx,
ctx               255 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 		dm_write_reg(xfm_dce->base.ctx,
ctx               287 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 		dm_write_reg(xfm_dce->base.ctx,
ctx               319 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 		dm_write_reg(xfm_dce->base.ctx,
ctx               351 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 		dm_write_reg(xfm_dce->base.ctx,
ctx               383 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 		dm_write_reg(xfm_dce->base.ctx,
ctx               415 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 		dm_write_reg(xfm_dce->base.ctx,
ctx               447 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 		dm_write_reg(xfm_dce->base.ctx,
ctx               464 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 	dm_write_reg(xfm_dce->base.ctx,
ctx               467 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 	dm_write_reg(xfm_dce->base.ctx,
ctx               478 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 			dm_write_reg(xfm_dce->base.ctx, addr, rgb->red_reg);
ctx               479 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 			dm_write_reg(xfm_dce->base.ctx, addr, rgb->green_reg);
ctx               480 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 			dm_write_reg(xfm_dce->base.ctx, addr, rgb->blue_reg);
ctx               482 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 			dm_write_reg(xfm_dce->base.ctx, addr,
ctx               484 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 			dm_write_reg(xfm_dce->base.ctx, addr,
ctx               486 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 			dm_write_reg(xfm_dce->base.ctx, addr,
ctx               523 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 	uint32_t value = dm_read_reg(xfm->ctx, mmDCFEV_MEM_PWR_CTRL);
ctx               549 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_regamma_v.c 	dm_write_reg(xfm->ctx, mmDCFEV_MEM_PWR_CTRL, value);
ctx                48 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_v.c 	struct dc_context *ctx)
ctx                52 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_v.c 	opp110->base.ctx = ctx;
ctx                33 drivers/gpu/drm/amd/display/dc/dce110/dce110_opp_v.h 	struct dc_context *ctx);
ctx                58 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		dc->ctx->logger
ctx               432 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c #define CTX  ctx
ctx               444 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	struct dc_context *ctx,
ctx               455 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		struct dc_context *ctx, unsigned int inst)
ctx               457 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	return dce_audio_create(ctx, inst,
ctx               462 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		struct dc_context *ctx,
ctx               472 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	dce110_timing_generator_construct(tg110, ctx, instance, offsets);
ctx               478 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	struct dc_context *ctx)
ctx               486 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	dce110_stream_encoder_construct(enc110, ctx, ctx->dc_bios, eng_id,
ctx               512 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	struct dc_context *ctx)
ctx               517 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		hws->ctx = ctx;
ctx               518 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		hws->regs = ASIC_REV_IS_STONEY(ctx->asic_id.hw_internal_rev) ?
ctx               556 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	struct dc_context *ctx,
ctx               567 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	dce_mem_input_construct(dce_mi, ctx, inst, &mi_regs[inst], &mi_shifts, &mi_masks);
ctx               579 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	struct dc_context *ctx,
ctx               588 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	dce_transform_construct(transform, ctx, inst,
ctx               594 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	struct dc_context *ctx, uint32_t inst)
ctx               603 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	dce_ipp_construct(ipp, ctx, inst,
ctx               634 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	struct dc_context *ctx,
ctx               644 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 			     ctx, inst, &opp_regs[inst], &opp_shift, &opp_mask);
ctx               649 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	struct dc_context *ctx,
ctx               658 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	dce110_aux_engine_construct(aux_engine, ctx, inst,
ctx               684 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	struct dc_context *ctx,
ctx               693 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	dce100_i2c_hw_construct(dce_i2c_hw, ctx, inst,
ctx               699 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	struct dc_context *ctx,
ctx               711 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	if (dce110_clk_src_construct(clk_src, ctx, bios, id,
ctx               902 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 			dc->ctx,
ctx              1053 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	struct dc *dc = stream->ctx->dc;
ctx              1072 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		struct dc_bios *dcb = dc->ctx->dc_bios;
ctx              1167 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c static bool underlay_create(struct dc_context *ctx, struct resource_pool *pool)
ctx              1186 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	dce110_opp_v_construct(dce110_oppv, ctx);
ctx              1188 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	dce110_timing_generator_v_construct(dce110_tgv, ctx);
ctx              1189 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	dce110_mem_input_v_construct(dce110_miv, ctx);
ctx              1190 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	dce110_transform_v_construct(dce110_xfmv, ctx);
ctx              1199 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	ctx->dc->caps.max_slave_planes = 1;
ctx              1200 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	ctx->dc->caps.max_slave_planes = 1;
ctx              1211 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 			dc->ctx,
ctx              1235 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 			dc->ctx,
ctx              1247 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 			dc->ctx,
ctx              1277 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	struct dc_context *ctx = dc->ctx;
ctx              1280 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	ctx->dc_bios->regs = &bios_regs;
ctx              1282 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	pool->base.res_cap = dce110_resource_cap(&ctx->asic_id);
ctx              1301 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	bp = ctx->dc_bios;
ctx              1305 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 				dce110_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_EXTERNAL, NULL, true);
ctx              1308 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 				dce110_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL0,
ctx              1311 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 				dce110_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL1,
ctx              1333 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	pool->base.dmcu = dce_dmcu_create(ctx,
ctx              1343 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	pool->base.abm = dce_abm_create(ctx,
ctx              1355 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		init_data.ctx = dc->ctx;
ctx              1363 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 				ctx, i, &dce110_tg_offsets[i]);
ctx              1370 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		pool->base.mis[i] = dce110_mem_input_create(ctx, i);
ctx              1378 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		pool->base.ipps[i] = dce110_ipp_create(ctx, i);
ctx              1386 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		pool->base.transforms[i] = dce110_transform_create(ctx, i);
ctx              1394 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		pool->base.opps[i] = dce110_opp_create(ctx, i);
ctx              1404 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		pool->base.engines[i] = dce110_aux_engine_create(ctx, i);
ctx              1411 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		pool->base.hw_i2cs[i] = dce110_i2c_hw_create(ctx, i);
ctx              1422 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 		dc->fbc_compressor = dce110_compressor_create(ctx);
ctx              1424 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	if (!underlay_create(ctx, &pool->base))
ctx              1441 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c 	bw_calcs_init(dc->bw_dceip, dc->bw_vbios, dc->ctx->asic_id);
ctx               100 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, addr);
ctx               113 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	regval = dm_read_reg(tg->ctx, address);
ctx               116 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, address, regval);
ctx               140 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_MASTER_UPDATE_MODE), value);
ctx               144 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_MASTER_UPDATE_LOCK), value);
ctx               157 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	uint32_t value = dm_read_reg(tg->ctx, addr);
ctx               175 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, value);
ctx               199 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, addr);
ctx               222 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, value);
ctx               225 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, value);
ctx               262 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	regval = dm_read_reg(tg->ctx,
ctx               272 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx,
ctx               379 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	v_total_min = dm_read_reg(tg->ctx, addr);
ctx               382 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	v_total_max = dm_read_reg(tg->ctx, addr);
ctx               385 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	v_total_cntl = dm_read_reg(tg->ctx, addr);
ctx               461 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, v_total_min);
ctx               464 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, v_total_max);
ctx               467 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, v_total_cntl);
ctx               479 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	static_screen_cntl = dm_read_reg(tg->ctx, addr);
ctx               491 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, static_screen_cntl);
ctx               512 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	uint32_t value = dm_read_reg(tg->ctx, addr);
ctx               535 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_STATUS_POSITION));
ctx               547 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_NOM_VERT_POSITION));
ctx               575 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	uint32_t value  = dm_read_reg(tg->ctx,
ctx               609 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	struct dc_context *ctx = tg->ctx;
ctx               615 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(ctx, addr);
ctx               621 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(ctx, addr, value);
ctx               624 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(ctx, addr);
ctx               630 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(ctx, addr, value);
ctx               636 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(ctx, addr);
ctx               642 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(ctx, addr, value);
ctx               645 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(ctx, addr);
ctx               651 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(ctx, addr, value);
ctx               654 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(ctx, addr);
ctx               674 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(ctx, addr, value);
ctx               677 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(ctx, addr);
ctx               696 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(ctx, addr, value);
ctx               707 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	struct dc_context *ctx = tg->ctx;
ctx               772 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(ctx, addr, value);
ctx               799 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(ctx, addr, value);
ctx               844 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(ctx, addr, value);
ctx               884 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 			dm_write_reg(ctx, addr, value);
ctx               897 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(ctx, addr, value);
ctx               927 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(ctx, addr, value);
ctx              1053 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(ctx, addr, value);
ctx              1057 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(ctx, addr, value);
ctx              1087 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(ctx, addr, value);
ctx              1093 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(ctx, CRTC_REG(mmCRTC_TEST_PATTERN_CONTROL), value);
ctx              1094 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(ctx, CRTC_REG(mmCRTC_TEST_PATTERN_COLOR), value);
ctx              1095 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(ctx, CRTC_REG(mmCRTC_TEST_PATTERN_PARAMETERS),
ctx              1223 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, address);
ctx              1248 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, CRTC_REG(mmDCP_GSL_CONTROL), value);
ctx              1266 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, CRTC_REG(mmDCIO_GSL0_CNTL), value);
ctx              1272 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		value_crtc_vtotal = dm_read_reg(tg->ctx,
ctx              1285 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_GSL_WINDOW), 0);
ctx              1293 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, address, value);
ctx              1298 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, address);
ctx              1309 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, address, value);
ctx              1351 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		value_crtc_vtotal = dm_read_reg(tg->ctx,
ctx              1365 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, address, value);
ctx              1381 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, address, value);
ctx              1416 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	uint32_t value = dm_read_reg(tg->ctx, addr);
ctx              1468 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, value);
ctx              1475 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	struct dc_context *ctx = tg->ctx;
ctx              1478 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	uint32_t value = dm_read_reg(ctx, addr);
ctx              1486 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(ctx, addr, value);
ctx              1501 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		uint32_t pol_value = dm_read_reg(tg->ctx,
ctx              1515 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_TRIGB_CNTL));
ctx              1554 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_TRIGB_CNTL), value);
ctx              1558 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL));
ctx              1575 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL), value);
ctx              1599 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_TRIGB_CNTL));
ctx              1626 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_TRIGB_CNTL), value);
ctx              1632 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL));
ctx              1649 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL), value);
ctx              1651 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_VERT_SYNC_CONTROL));
ctx              1666 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_VERT_SYNC_CONTROL));
ctx              1678 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_VERT_SYNC_CONTROL), value);
ctx              1680 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL));
ctx              1697 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL), value);
ctx              1701 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_MASTER_UPDATE_MODE));
ctx              1708 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_MASTER_UPDATE_MODE), value);
ctx              1716 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL));
ctx              1728 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL), value);
ctx              1730 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_VERT_SYNC_CONTROL));
ctx              1742 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_VERT_SYNC_CONTROL), value);
ctx              1745 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_TRIGB_CNTL));
ctx              1762 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_TRIGB_CNTL), value);
ctx              1778 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	uint32_t value = dm_read_reg(tg->ctx,
ctx              1780 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	uint32_t value1 = dm_read_reg(tg->ctx,
ctx              1827 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, addr);
ctx              1835 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, value);
ctx              1850 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	struct dc_context *ctx = tg->ctx;
ctx              1874 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(ctx, addr, value);
ctx              1876 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(ctx, addr, value);
ctx              1882 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(ctx, addr, value);
ctx              1899 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	uint32_t value = dm_read_reg(tg->ctx, addr);
ctx              1917 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, value);
ctx              1920 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, value);
ctx              1926 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	struct dc_context *ctx = tg->ctx;
ctx              1950 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(ctx, addr, value);
ctx              1971 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	uint32_t value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_BLANK_CONTROL));
ctx              1997 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_DOUBLE_BUFFER_CONTROL), value);
ctx              2007 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_BLANK_CONTROL), value);
ctx              2010 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 		dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_BLANK_CONTROL), 0);
ctx              2080 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_VERTICAL_INTERRUPT0_POSITION), val);
ctx              2093 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, addr);
ctx              2114 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, cntl_addr, 0);
ctx              2129 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, value);
ctx              2140 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, value);
ctx              2151 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, value);
ctx              2162 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, addr, value);
ctx              2171 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	dm_write_reg(tg->ctx, cntl_addr, value);
ctx              2185 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, addr);
ctx              2193 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, addr);
ctx              2198 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	value = dm_read_reg(tg->ctx, addr);
ctx              2246 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	struct dc_context *ctx,
ctx              2257 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	tg110->base.ctx = ctx;
ctx              2258 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c 	tg110->base.bp = ctx->dc_bios;
ctx               122 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h 	struct dc_context *ctx,
ctx                42 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	tg->ctx->logger
ctx                65 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(tg->ctx,
ctx                70 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(tg->ctx, mmCRTCV_MASTER_UPDATE_MODE, value);
ctx                75 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(tg->ctx,
ctx                85 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	value = dm_read_reg(tg->ctx,
ctx                91 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(tg->ctx,
ctx               103 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	uint32_t value = dm_read_reg(tg->ctx, addr);
ctx               117 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(tg->ctx, addr, value);
ctx               123 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	uint32_t value = dm_read_reg(tg->ctx, addr);
ctx               137 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(tg->ctx, addr, value);
ctx               148 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	value = dm_read_reg(tg->ctx, addr);
ctx               161 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	value = dm_read_reg(tg->ctx, mmCRTCV_STATUS_POSITION);
ctx               173 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	value = dm_read_reg(tg->ctx, mmCRTCV_STATUS_POSITION);
ctx               254 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	struct dc_context *ctx = tg->ctx;
ctx               260 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	value = dm_read_reg(ctx, addr);
ctx               266 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(ctx, addr, value);
ctx               269 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	value = dm_read_reg(ctx, addr);
ctx               275 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(ctx, addr, value);
ctx               278 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	value = dm_read_reg(ctx, addr);
ctx               298 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(ctx, addr, value);
ctx               301 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	value = dm_read_reg(ctx, addr);
ctx               320 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(ctx, addr, value);
ctx               329 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(ctx, addr, value);
ctx               332 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	value = dm_read_reg(ctx, addr);
ctx               346 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(ctx, addr, value);
ctx               355 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(ctx, addr, value);
ctx               358 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	value = dm_read_reg(ctx, addr);
ctx               372 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(ctx, addr, value);
ctx               375 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	value = dm_read_reg(ctx, addr);
ctx               381 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(ctx, addr, value);
ctx               390 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	uint32_t value = dm_read_reg(tg->ctx, addr);
ctx               424 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(tg->ctx, addr, value);
ctx               456 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	uint32_t value = dm_read_reg(tg->ctx, addr);
ctx               474 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(tg->ctx, addr, value);
ctx               481 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	struct dc_context *ctx = tg->ctx;
ctx               504 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(ctx, addr, value);
ctx               506 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(ctx, addr, value);
ctx               512 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(ctx, addr, value);
ctx               527 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	uint32_t value = dm_read_reg(tg->ctx, addr);
ctx               545 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(tg->ctx, addr, value);
ctx               548 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(tg->ctx, addr, value);
ctx               554 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	struct dc_context *ctx = tg->ctx;
ctx               577 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(ctx, addr, value);
ctx               597 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	regval = dm_read_reg(tg->ctx, address);
ctx               600 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	dm_write_reg(tg->ctx, address, regval);
ctx               606 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	uint32_t value = dm_read_reg(tg->ctx, addr);
ctx               691 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	struct dc_context *ctx)
ctx               697 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	tg110->base.ctx = ctx;
ctx               698 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c 	tg110->base.bp = ctx->dc_bios;
ctx                31 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.h 	struct dc_context *ctx);
ctx                36 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	xfm->ctx->logger
ctx                84 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	struct dc_context *ctx = xfm_dce->base.ctx;
ctx               101 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 		dm_write_reg(ctx, addr, value);
ctx               115 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 		dm_write_reg(ctx, addr, value);
ctx               131 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 		dm_write_reg(ctx, addr, value);
ctx               145 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 		dm_write_reg(ctx, addr, value);
ctx               164 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	struct dc_context *ctx = xfm_dce->base.ctx;
ctx               175 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(ctx, mmSCLV_TAP_CONTROL, value);
ctx               206 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(ctx, mmSCLV_MODE, value);
ctx               215 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(ctx, mmSCLV_CONTROL, value);
ctx               240 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	if (xfm_dce->base.ctx->dc->debug.visual_confirm != VISUAL_CONFIRM_DISABLE) {
ctx               266 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(xfm_dce->base.ctx,
ctx               270 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(xfm_dce->base.ctx,
ctx               280 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	value = dm_read_reg(xfm_dce->base.ctx, mmSCLV_UPDATE);
ctx               282 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(xfm_dce->base.ctx, mmSCLV_UPDATE, value);
ctx               291 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	struct dc_context *ctx = xfm_dce->base.ctx;
ctx               304 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	power_ctl = dm_read_reg(ctx, mmDCFEV_MEM_PWR_CTRL);
ctx               307 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(ctx, mmDCFEV_MEM_PWR_CTRL, power_ctl_off);
ctx               312 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 				dm_read_reg(ctx, mmDCFEV_MEM_PWR_STATUS),
ctx               332 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 			dm_write_reg(ctx, mmSCLV_COEF_RAM_SELECT, select);
ctx               362 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 			dm_write_reg(ctx, mmSCLV_COEF_RAM_TAP_DATA, data);
ctx               367 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(ctx, mmDCFEV_MEM_PWR_CTRL, power_ctl);
ctx               396 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	struct dc_context *ctx = xfm_dce->base.ctx;
ctx               405 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(ctx, addr, value);
ctx               414 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(ctx, addr, value);
ctx               423 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(ctx, addr, value);
ctx               432 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(ctx, addr, value);
ctx               446 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(ctx, addr, value);
ctx               460 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(ctx, addr, value);
ctx               474 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(ctx, addr, value);
ctx               488 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(ctx, addr, value);
ctx               511 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	value = dm_read_reg(xfm_dce->base.ctx, mmLBV_MEMORY_CTRL);
ctx               519 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(xfm_dce->base.ctx, mmLBV_MEMORY_CTRL, value);
ctx               672 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	dm_write_reg(xfm->ctx, mmLBV_DATA_FORMAT, reg_data);
ctx               703 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	struct dc_context *ctx)
ctx               705 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.c 	xfm_dce->base.ctx = ctx;
ctx                35 drivers/gpu/drm/amd/display/dc/dce110/dce110_transform_v.h 	struct dc_context *ctx);
ctx                40 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 		cp110->base.ctx->logger
ctx               302 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 		value = dm_read_reg(cp110->base.ctx, addr);
ctx               325 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               337 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               340 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               344 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               347 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               349 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               355 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               359 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, mmFBC_IND_LUT0, value);
ctx               362 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, mmFBC_IND_LUT1, value);
ctx               394 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 		value = dm_read_reg(compressor->ctx, addr);
ctx               400 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 		dm_write_reg(compressor->ctx, addr, value);
ctx               409 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 		dm_write_reg(compressor->ctx, addr, value);
ctx               411 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 		dm_write_reg(compressor->ctx, addr, value);
ctx               425 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 		reg_data = dm_read_reg(compressor->ctx, mmFBC_CNTL);
ctx               427 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 		dm_write_reg(compressor->ctx, mmFBC_CNTL, reg_data);
ctx               449 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value = dm_read_reg(compressor->ctx, mmFBC_STATUS);
ctx               456 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value = dm_read_reg(compressor->ctx, mmFBC_MISC);
ctx               458 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 		value = dm_read_reg(compressor->ctx, mmFBC_CNTL);
ctx               473 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	uint32_t value = dm_read_reg(compressor->ctx,
ctx               494 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 		compressor->ctx,
ctx               497 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx,
ctx               512 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx,
ctx               515 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx,
ctx               531 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, DCP_REG(mmGRPH_COMPRESS_PITCH), 0);
ctx               539 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, DCP_REG(mmGRPH_COMPRESS_PITCH), value);
ctx               554 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 				compressor->ctx,
ctx               562 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 			compressor->ctx,
ctx               568 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               574 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               578 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               584 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               588 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               594 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, mmGMCON_LPT_TARGET, value);
ctx               606 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value = dm_read_reg(compressor->ctx,
ctx               613 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx,
ctx               618 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               624 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               630 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value_control = dm_read_reg(compressor->ctx, addr);
ctx               636 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               643 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               647 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               653 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               670 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	lpt_control = dm_read_reg(compressor->ctx,
ctx               725 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx,
ctx               741 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	uint32_t value = dm_read_reg(compressor->ctx, addr);
ctx               748 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               775 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	value = dm_read_reg(compressor->ctx, addr);
ctx               787 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dm_write_reg(compressor->ctx, addr, value);
ctx               791 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	struct dc_context *ctx)
ctx               793 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	struct dc_bios *bp = ctx->dc_bios;
ctx               810 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	compressor->base.ctx = ctx;
ctx               813 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	compressor->base.memory_bus_width = ctx->asic_id.vram_width;
ctx               834 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c struct compressor *dce112_compressor_create(struct dc_context *ctx)
ctx               842 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.c 	dce112_compressor_construct(cp110, ctx);
ctx                43 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.h struct compressor *dce112_compressor_create(struct dc_context *ctx);
ctx                46 drivers/gpu/drm/amd/display/dc/dce112/dce112_compressor.h 	struct dc_context *ctx);
ctx                69 drivers/gpu/drm/amd/display/dc/dce112/dce112_hw_sequencer.c static void dce112_init_pte(struct dc_context *ctx)
ctx                77 drivers/gpu/drm/amd/display/dc/dce112/dce112_hw_sequencer.c 	value = dm_read_reg(ctx, addr);
ctx               109 drivers/gpu/drm/amd/display/dc/dce112/dce112_hw_sequencer.c 		dm_write_reg(ctx, addr, value);
ctx               121 drivers/gpu/drm/amd/display/dc/dce112/dce112_hw_sequencer.c 	struct dc_context *ctx = dc->ctx;
ctx               123 drivers/gpu/drm/amd/display/dc/dce112/dce112_hw_sequencer.c 	if (IS_FPGA_MAXIMUS_DC(ctx->dce_environment))
ctx               141 drivers/gpu/drm/amd/display/dc/dce112/dce112_hw_sequencer.c 		dm_write_reg(ctx,
ctx               147 drivers/gpu/drm/amd/display/dc/dce112/dce112_hw_sequencer.c 		dce112_init_pte(ctx);
ctx                62 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 		dc->ctx->logger
ctx               409 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c #define CTX  ctx
ctx               421 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	struct dc_context *ctx,
ctx               432 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 		struct dc_context *ctx, unsigned int inst)
ctx               434 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	return dce_audio_create(ctx, inst,
ctx               440 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 		struct dc_context *ctx,
ctx               450 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	dce110_timing_generator_construct(tg110, ctx, instance, offsets);
ctx               456 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	struct dc_context *ctx)
ctx               464 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	dce110_stream_encoder_construct(enc110, ctx, ctx->dc_bios, eng_id,
ctx               486 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	struct dc_context *ctx)
ctx               491 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 		hws->ctx = ctx;
ctx               525 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	struct dc_context *ctx,
ctx               536 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	dce112_mem_input_construct(dce_mi, ctx, inst, &mi_regs[inst], &mi_shifts, &mi_masks);
ctx               547 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	struct dc_context *ctx,
ctx               556 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	dce_transform_construct(transform, ctx, inst,
ctx               592 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	struct dc_context *ctx, uint32_t inst)
ctx               601 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	dce_ipp_construct(ipp, ctx, inst,
ctx               607 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	struct dc_context *ctx,
ctx               617 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 			     ctx, inst, &opp_regs[inst], &opp_shift, &opp_mask);
ctx               622 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	struct dc_context *ctx,
ctx               631 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	dce110_aux_engine_construct(aux_engine, ctx, inst,
ctx               657 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	struct dc_context *ctx,
ctx               666 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	dce112_i2c_hw_construct(dce_i2c_hw, ctx, inst,
ctx               672 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	struct dc_context *ctx,
ctx               684 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	if (dce112_clk_src_construct(clk_src, ctx, bios, id,
ctx               824 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 			dc->ctx,
ctx              1000 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 			dc->ctx,
ctx              1006 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 				dc->ctx,
ctx              1029 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 				dc->ctx,
ctx              1065 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 			dc->ctx,
ctx              1130 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	dm_pp_notify_wm_clock_changes(dc->ctx, &clk_ranges);
ctx              1149 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	struct dc_context *ctx = dc->ctx;
ctx              1151 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	ctx->dc_bios->regs = &bios_regs;
ctx              1153 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	pool->base.res_cap = dce112_resource_cap(&ctx->asic_id);
ctx              1174 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 				ctx, ctx->dc_bios,
ctx              1179 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 				ctx, ctx->dc_bios,
ctx              1184 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 				ctx, ctx->dc_bios,
ctx              1189 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 				ctx, ctx->dc_bios,
ctx              1194 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 				ctx, ctx->dc_bios,
ctx              1199 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 				ctx, ctx->dc_bios,
ctx              1205 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 		ctx, ctx->dc_bios,
ctx              1217 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	pool->base.dmcu = dce_dmcu_create(ctx,
ctx              1227 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	pool->base.abm = dce_abm_create(ctx,
ctx              1239 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 		init_data.ctx = dc->ctx;
ctx              1248 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 					ctx,
ctx              1257 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 		pool->base.mis[i] = dce112_mem_input_create(ctx, i);
ctx              1265 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 		pool->base.ipps[i] = dce112_ipp_create(ctx, i);
ctx              1273 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 		pool->base.transforms[i] = dce112_transform_create(ctx, i);
ctx              1282 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 			ctx,
ctx              1293 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 		pool->base.engines[i] = dce112_aux_engine_create(ctx, i);
ctx              1300 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 		pool->base.hw_i2cs[i] = dce112_i2c_hw_create(ctx, i);
ctx              1322 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c 	bw_calcs_init(dc->bw_dceip, dc->bw_vbios, dc->ctx->asic_id);
ctx                41 drivers/gpu/drm/amd/display/dc/dce120/dce120_hw_sequencer.c 	hws->ctx
ctx                83 drivers/gpu/drm/amd/display/dc/dce120/dce120_hw_sequencer.c static void dce120_init_pte(struct dc_context *ctx, uint8_t controller_id)
ctx               113 drivers/gpu/drm/amd/display/dc/dce120/dce120_hw_sequencer.c 	value = dm_read_reg(ctx, addr);
ctx               145 drivers/gpu/drm/amd/display/dc/dce120/dce120_hw_sequencer.c 		dm_write_reg(ctx, addr, value);
ctx               160 drivers/gpu/drm/amd/display/dc/dce120/dce120_hw_sequencer.c 	struct dc_context *ctx = dc->ctx;
ctx               162 drivers/gpu/drm/amd/display/dc/dce120/dce120_hw_sequencer.c 	if (IS_FPGA_MAXIMUS_DC(ctx->dce_environment))
ctx               180 drivers/gpu/drm/amd/display/dc/dce120/dce120_hw_sequencer.c 		dm_write_reg(ctx,
ctx               186 drivers/gpu/drm/amd/display/dc/dce120/dce120_hw_sequencer.c 		dce120_init_pte(ctx, controller_id);
ctx               382 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	struct dc_context *ctx,
ctx               392 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 			     ctx, inst, &opp_regs[inst], &opp_shift, &opp_mask);
ctx               396 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	struct dc_context *ctx,
ctx               405 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	dce110_aux_engine_construct(aux_engine, ctx, inst,
ctx               431 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	struct dc_context *ctx,
ctx               440 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	dce112_i2c_hw_construct(dce_i2c_hw, ctx, inst,
ctx               485 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	struct dc_context *ctx,
ctx               497 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	if (dce112_clk_src_construct(clk_src, ctx, bios, id,
ctx               528 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 		struct dc_context *ctx,
ctx               538 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	dce120_timing_generator_construct(tg110, ctx, instance, offsets);
ctx               617 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	struct dc_context *ctx,
ctx               620 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	uint32_t reg_val = dm_read_reg_soc15(ctx, mmCC_DC_MISC_STRAPS, 0);
ctx               629 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	reg_val = dm_read_reg_soc15(ctx, mmDC_PINSTRAPS, 0);
ctx               636 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 		struct dc_context *ctx, unsigned int inst)
ctx               638 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	return dce_audio_create(ctx, inst,
ctx               673 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	struct dc_context *ctx, uint32_t inst)
ctx               682 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	dce_ipp_construct(ipp, ctx, inst,
ctx               689 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	struct dc_context *ctx)
ctx               697 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	dce110_stream_encoder_construct(enc110, ctx, ctx->dc_bios, eng_id,
ctx               733 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	struct dc_context *ctx)
ctx               738 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 		hws->ctx = ctx;
ctx               747 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	struct dc_context *ctx)
ctx               752 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 		hws->ctx = ctx;
ctx               794 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	struct dc_context *ctx,
ctx               805 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	dce120_mem_input_construct(dce_mi, ctx, inst, &mi_regs[inst], &mi_shifts, &mi_masks);
ctx               810 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	struct dc_context *ctx,
ctx               819 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	dce_transform_construct(transform, ctx, inst,
ctx               856 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 				dc->ctx,
ctx               889 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 			dc->ctx,
ctx               970 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	dm_pp_notify_wm_clock_changes(dc->ctx, &clk_ranges);
ctx               973 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c static uint32_t read_pipe_fuses(struct dc_context *ctx)
ctx               975 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	uint32_t value = dm_read_reg_soc15(ctx, mmCC_DC_PIPE_DIS, 0);
ctx               988 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	struct dc_context *ctx = dc->ctx;
ctx               991 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	bool is_vg20 = ASICREV_IS_VEGA20_P(ctx->asic_id.hw_internal_rev);
ctx               994 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	ctx->dc_bios->regs = &bios_regs;
ctx              1017 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 			dce120_clock_source_create(ctx, ctx->dc_bios,
ctx              1021 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 			dce120_clock_source_create(ctx, ctx->dc_bios,
ctx              1025 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 			dce120_clock_source_create(ctx, ctx->dc_bios,
ctx              1029 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 			dce120_clock_source_create(ctx, ctx->dc_bios,
ctx              1033 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 			dce120_clock_source_create(ctx, ctx->dc_bios,
ctx              1037 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 			dce120_clock_source_create(ctx, ctx->dc_bios,
ctx              1043 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 			dce120_clock_source_create(ctx, ctx->dc_bios,
ctx              1055 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	pool->base.dmcu = dce_dmcu_create(ctx,
ctx              1065 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	pool->base.abm = dce_abm_create(ctx,
ctx              1076 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	irq_init_data.ctx = dc->ctx;
ctx              1083 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 		pipe_fuses = read_pipe_fuses(ctx);
ctx              1097 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 					ctx,
ctx              1106 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 		pool->base.mis[j] = dce120_mem_input_create(ctx, i);
ctx              1115 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 		pool->base.ipps[j] = dce120_ipp_create(ctx, i);
ctx              1123 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 		pool->base.transforms[j] = dce120_transform_create(ctx, i);
ctx              1132 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 			ctx,
ctx              1145 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 		pool->base.engines[i] = dce120_aux_engine_create(ctx, i);
ctx              1152 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 		pool->base.hw_i2cs[i] = dce120_i2c_hw_create(ctx, i);
ctx              1183 drivers/gpu/drm/amd/display/dc/dce120/dce120_resource.c 	bw_calcs_init(dc->bw_dceip, dc->bw_vbios, dc->ctx->asic_id);
ctx                43 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 		generic_reg_update_soc15(tg110->base.ctx, tg110->offsets.crtc, reg_name, n, __VA_ARGS__)
ctx                46 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 		generic_reg_set_soc15(tg110->base.ctx, tg110->offsets.crtc, reg_name, n, __VA_ARGS__)
ctx                91 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 					tg->ctx,
ctx               174 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 				tg->ctx,
ctx               190 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 				tg->ctx,
ctx               201 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 				tg->ctx,
ctx               251 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 							dm_read_reg_soc15(tg->ctx,
ctx               261 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 	dm_write_reg_soc15(tg->ctx, mmCRTC0_CRTC_GSL_WINDOW, tg110->offsets.crtc, 0);
ctx               313 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 									tg->ctx,
ctx               375 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 			tg->ctx,
ctx               416 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 	value = dm_read_reg_soc15(tg->ctx, mmD1VGA_CONTROL, offset);
ctx               424 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 	dm_write_reg_soc15(tg->ctx, mmD1VGA_CONTROL, offset, value);
ctx               514 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 			tg->ctx,
ctx               519 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 			tg->ctx,
ctx               529 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 		tg->ctx,
ctx               609 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 			tg->ctx,
ctx               624 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 			tg->ctx,
ctx               646 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 			tg->ctx,
ctx               674 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 				tg->ctx,
ctx               696 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 	dm_write_reg_soc15(tg->ctx,
ctx               715 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 				tg->ctx,
ctx               719 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 		tg->ctx,
ctx               756 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 			tg->ctx,
ctx               785 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 		dm_write_reg_soc15(tg->ctx, mmCRTC0_CRTC_BLANK_CONTROL,
ctx               839 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 	struct dc_context *ctx = tg->ctx;
ctx               941 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 		dm_write_reg_soc15(ctx, mmCRTC0_CRTC_TEST_PATTERN_PARAMETERS, tg110->offsets.crtc, 0);
ctx               980 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 			dm_write_reg_soc15(ctx, mmCRTC0_CRTC_TEST_PATTERN_COLOR, tg110->offsets.crtc, value);
ctx               993 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 		dm_write_reg_soc15(ctx, mmCRTC0_CRTC_TEST_PATTERN_COLOR, tg110->offsets.crtc, value);
ctx              1067 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 		dm_write_reg_soc15(ctx, mmCRTC0_CRTC_TEST_PATTERN_COLOR, tg110->offsets.crtc, 0);
ctx              1070 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 		dm_write_reg_soc15(ctx, mmCRTC0_CRTC_TEST_PATTERN_CONTROL, tg110->offsets.crtc, 0);
ctx              1082 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 		dm_write_reg_soc15(ctx, mmCRTC0_CRTC_TEST_PATTERN_CONTROL, tg110->offsets.crtc,  value);
ctx              1083 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 		dm_write_reg_soc15(ctx, mmCRTC0_CRTC_TEST_PATTERN_COLOR, tg110->offsets.crtc, value);
ctx              1084 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 		dm_write_reg_soc15(ctx, mmCRTC0_CRTC_TEST_PATTERN_PARAMETERS, tg110->offsets.crtc, value);
ctx              1123 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 	value = dm_read_reg_soc15(tg->ctx, mmCRTC0_CRTC_CONTROL,
ctx              1141 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 	dm_write_reg_soc15(tg->ctx, mmCRTC0_CRTC_CRC_CNTL,
ctx              1183 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 	value = dm_read_reg_soc15(tg->ctx, mmCRTC0_CRTC_CRC_CNTL,
ctx              1191 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 	value = dm_read_reg_soc15(tg->ctx, mmCRTC0_CRTC_CRC0_DATA_RG,
ctx              1196 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 	value = dm_read_reg_soc15(tg->ctx, mmCRTC0_CRTC_CRC0_DATA_B,
ctx              1242 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 	struct dc_context *ctx,
ctx              1253 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 	tg110->base.ctx = ctx;
ctx              1254 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c 	tg110->base.bp = ctx->dc_bios;
ctx                36 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.h 	struct dc_context *ctx,
ctx               423 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c #define CTX  ctx
ctx               435 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx,
ctx               446 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		struct dc_context *ctx, unsigned int inst)
ctx               448 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	return dce_audio_create(ctx, inst,
ctx               453 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		struct dc_context *ctx,
ctx               463 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	dce80_timing_generator_construct(tg110, ctx, instance, offsets);
ctx               468 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx,
ctx               478 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 			     ctx, inst, &opp_regs[inst], &opp_shift, &opp_mask);
ctx               483 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx,
ctx               492 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	dce110_aux_engine_construct(aux_engine, ctx, inst,
ctx               518 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx,
ctx               527 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	dce_i2c_hw_construct(dce_i2c_hw, ctx, inst,
ctx               534 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx)
ctx               542 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	dce_i2c_sw_construct(dce_i2c_sw, ctx);
ctx               548 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx)
ctx               556 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	dce110_stream_encoder_construct(enc110, ctx, ctx->dc_bios, eng_id,
ctx               578 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx)
ctx               583 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		hws->ctx = ctx;
ctx               622 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx,
ctx               633 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	dce_mem_input_construct(dce_mi, ctx, inst, &mi_regs[inst], &mi_shifts, &mi_masks);
ctx               645 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx,
ctx               654 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	dce_transform_construct(transform, ctx, inst,
ctx               686 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx,
ctx               698 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	if (dce110_clk_src_construct(clk_src, ctx, bios, id,
ctx               716 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx, uint32_t inst)
ctx               725 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	dce_ipp_construct(ipp, ctx, inst,
ctx               879 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx = dc->ctx;
ctx               882 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	ctx->dc_bios->regs = &bios_regs;
ctx               903 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	bp = ctx->dc_bios;
ctx               907 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_EXTERNAL, NULL, true);
ctx               910 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL0, &clk_src_regs[0], false);
ctx               912 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL1, &clk_src_regs[1], false);
ctx               914 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL2, &clk_src_regs[2], false);
ctx               919 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL0, &clk_src_regs[0], true);
ctx               922 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL1, &clk_src_regs[1], false);
ctx               924 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL2, &clk_src_regs[2], false);
ctx               942 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	pool->base.dmcu = dce_dmcu_create(ctx,
ctx               952 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	pool->base.abm = dce_abm_create(ctx,
ctx               964 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		init_data.ctx = dc->ctx;
ctx               972 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				ctx, i, &dce80_tg_offsets[i]);
ctx               979 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.mis[i] = dce80_mem_input_create(ctx, i);
ctx               986 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.ipps[i] = dce80_ipp_create(ctx, i);
ctx               993 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.transforms[i] = dce80_transform_create(ctx, i);
ctx              1000 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.opps[i] = dce80_opp_create(ctx, i);
ctx              1009 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.engines[i] = dce80_aux_engine_create(ctx, i);
ctx              1016 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.hw_i2cs[i] = dce80_i2c_hw_create(ctx, i);
ctx              1023 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.sw_i2cs[i] = dce80_i2c_sw_create(ctx);
ctx              1076 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx = dc->ctx;
ctx              1079 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	ctx->dc_bios->regs = &bios_regs;
ctx              1100 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	bp = ctx->dc_bios;
ctx              1104 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_EXTERNAL, NULL, true);
ctx              1107 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL0, &clk_src_regs[0], false);
ctx              1109 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL1, &clk_src_regs[1], false);
ctx              1111 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL2, &clk_src_regs[2], false);
ctx              1116 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL0, &clk_src_regs[0], true);
ctx              1119 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL1, &clk_src_regs[1], false);
ctx              1121 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL2, &clk_src_regs[2], false);
ctx              1139 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	pool->base.dmcu = dce_dmcu_create(ctx,
ctx              1149 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	pool->base.abm = dce_abm_create(ctx,
ctx              1161 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		init_data.ctx = dc->ctx;
ctx              1169 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				ctx, i, &dce80_tg_offsets[i]);
ctx              1176 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.mis[i] = dce80_mem_input_create(ctx, i);
ctx              1183 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.ipps[i] = dce80_ipp_create(ctx, i);
ctx              1190 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.transforms[i] = dce80_transform_create(ctx, i);
ctx              1197 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.opps[i] = dce80_opp_create(ctx, i);
ctx              1206 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.engines[i] = dce80_aux_engine_create(ctx, i);
ctx              1213 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.hw_i2cs[i] = dce80_i2c_hw_create(ctx, i);
ctx              1220 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.sw_i2cs[i] = dce80_i2c_sw_create(ctx);
ctx              1273 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	struct dc_context *ctx = dc->ctx;
ctx              1276 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	ctx->dc_bios->regs = &bios_regs;
ctx              1297 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	bp = ctx->dc_bios;
ctx              1301 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_EXTERNAL, NULL, true);
ctx              1304 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL1, &clk_src_regs[0], false);
ctx              1306 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL2, &clk_src_regs[1], false);
ctx              1311 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL1, &clk_src_regs[0], true);
ctx              1314 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				dce80_clock_source_create(ctx, bp, CLOCK_SOURCE_ID_PLL2, &clk_src_regs[1], false);
ctx              1332 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	pool->base.dmcu = dce_dmcu_create(ctx,
ctx              1342 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 	pool->base.abm = dce_abm_create(ctx,
ctx              1354 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		init_data.ctx = dc->ctx;
ctx              1362 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 				ctx, i, &dce80_tg_offsets[i]);
ctx              1369 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.mis[i] = dce80_mem_input_create(ctx, i);
ctx              1376 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.ipps[i] = dce80_ipp_create(ctx, i);
ctx              1383 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.transforms[i] = dce80_transform_create(ctx, i);
ctx              1390 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.opps[i] = dce80_opp_create(ctx, i);
ctx              1399 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.engines[i] = dce80_aux_engine_create(ctx, i);
ctx              1406 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.hw_i2cs[i] = dce80_i2c_hw_create(ctx, i);
ctx              1413 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c 		pool->base.sw_i2cs[i] = dce80_i2c_sw_create(ctx);
ctx                92 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c 	uint32_t value = dm_read_reg(tg->ctx, addr);
ctx               105 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c 	dm_write_reg(tg->ctx, addr, value);
ctx               130 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c 	uint32_t value = dm_read_reg(tg->ctx, addr);
ctx               182 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c 	dm_write_reg(tg->ctx, addr, value);
ctx               226 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c 	struct dc_context *ctx,
ctx               237 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c 	tg110->base.ctx = ctx;
ctx               238 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c 	tg110->base.bp = ctx->dc_bios;
ctx                35 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.h 	struct dc_context *ctx,
ctx                35 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_cm_common.c 	ctx
ctx                42 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_cm_common.c 		struct dc_context *ctx,
ctx                66 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_cm_common.c 		struct dc_context *ctx,
ctx               327 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_cm_common.c 	PERF_TRACE_CTX(output_tf->ctx);
ctx               514 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_cm_common.c 	PERF_TRACE_CTX(output_tf->ctx);
ctx                93 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_cm_common.h 		struct dc_context *ctx,
ctx                98 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_cm_common.h 		struct dc_context *ctx,
ctx                45 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp.c 	dpp->base.ctx
ctx               152 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp.c 		dpp->ctx->dc->debug.max_downscale_src_width != 0 &&
ctx               153 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp.c 		scl_data->viewport.width > dpp->ctx->dc->debug.max_downscale_src_width)
ctx               189 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp.c 	if (!dpp->ctx->dc->debug.always_scale) {
ctx               563 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp.c 	struct dc_context *ctx,
ctx               569 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp.c 	dpp->base.ctx = ctx;
ctx              1508 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp.h 	struct dc_context *ctx,
ctx                46 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_cm.c 	dpp->base.ctx
ctx               149 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_cm.c 				dpp->base.ctx,
ctx               159 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_cm.c 				dpp->base.ctx,
ctx               169 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_cm.c 				dpp->base.ctx,
ctx               251 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_cm.c 			dpp->base.ctx,
ctx               406 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_cm.c 	cm_helper_program_xfer_func(dpp->base.ctx, params, &gam_regs);
ctx               435 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_cm.c 	cm_helper_program_xfer_func(dpp->base.ctx, params, &gam_regs);
ctx               505 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_cm.c 			dpp->base.ctx,
ctx               560 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_cm.c 	cm_helper_program_xfer_func(dpp->base.ctx, params, &gam_regs);
ctx               588 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_cm.c 	cm_helper_program_xfer_func(dpp->base.ctx, params, &gam_regs);
ctx                46 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_dscl.c 	dpp->base.ctx
ctx               489 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_dscl.c 	if (dpp->base.ctx->dc->debug.use_max_lb)
ctx               533 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_dscl.c 			dpp_base, scl_data, dpp_base->ctx->dc->debug.always_scale);
ctx               647 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_dscl.c 	if (dpp->base.ctx->dc->debug.visual_confirm != VISUAL_CONFIRM_DISABLE)
ctx               672 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_dscl.c 			dpp_base, scl_data, dpp_base->ctx->dc->debug.always_scale);
ctx                38 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dwb.c 	dwbc10->base.ctx
ctx               119 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dwb.c 		struct dc_context *ctx,
ctx               125 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dwb.c 	dwbc10->base.ctx = ctx;
ctx               263 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dwb.h 		struct dc_context *ctx,
ctx                34 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 	hubbub1->base.ctx
ctx                36 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 	hubbub1->base.ctx->logger
ctx               623 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 	hubbub1_allow_self_refresh_control(hubbub, !hubbub->ctx->dc->debug.disable_stutter);
ctx               860 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 	struct dc *dc = hubbub1->base.ctx->dc;
ctx               953 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 	struct dc_context *ctx,
ctx               960 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 	hubbub1->base.ctx = ctx;
ctx               969 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.c 	if (ctx->dce_version == DCN_VERSION_1_01)
ctx               337 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubbub.h 	struct dc_context *ctx,
ctx                35 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c 	hubp1->base.ctx
ctx              1255 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c 	struct dc_context *ctx,
ctx              1262 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c 	hubp1->base.ctx = ctx;
ctx               732 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.h 	struct dc_context *ctx,
ctx                59 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	hws->ctx
ctx                87 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	struct dc_context *dc_ctx = dc->ctx;
ctx               100 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	struct dc_context *dc_ctx = dc->ctx;
ctx               128 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	struct dc_context *dc_ctx = dc->ctx;
ctx               244 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	struct dc_context *dc_ctx = dc->ctx;
ctx               504 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	if (hws->ctx->dc->debug.disable_dpp_power_gate)
ctx               556 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	if (hws->ctx->dc->debug.disable_hubp_power_gate)
ctx               604 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	DC_LOGGER_INIT(hws->ctx->logger);
ctx               608 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 		hws->ctx->dc->hwss.dpp_pg_control(hws, plane_id, true);
ctx               609 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 		hws->ctx->dc->hwss.hubp_pg_control(hws, plane_id, true);
ctx               669 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	struct dc_bios *bp = dc->ctx->dc_bios;
ctx               822 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx               828 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
ctx              1011 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx              1063 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx              1181 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	struct dc_bios *dcb = dc->ctx->dc_bios;
ctx              1191 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	if (IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
ctx              1216 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	if (dc->ctx->dc_bios->fw_info_valid) {
ctx              1218 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 				dc->ctx->dc_bios->fw_info.pll_info.crystal_frequency;
ctx              1220 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 		if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
ctx              1224 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 						dc->ctx->dc_bios->fw_info.pll_info.crystal_frequency,
ctx              1403 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 		!dpp_base->ctx->dc->debug.always_use_regamma
ctx              1441 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c static void log_tf(struct dc_context *ctx,
ctx              1449 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	DC_LOGGER_INIT(ctx->logger);
ctx              1495 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	if (stream != NULL && stream->ctx != NULL &&
ctx              1497 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 		log_tf(stream->ctx,
ctx              1571 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	struct dc_context *dc_ctx = dc->ctx;
ctx              1600 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	struct dc_context *dc_ctx = dc->ctx;
ctx              2563 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx              2694 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
ctx              2726 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
ctx              2852 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 		if (!dc_set_generic_gpio_for_stereo(true, dc->ctx->gpio_service))
ctx              2853 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 			dc_set_generic_gpio_for_stereo(false, dc->ctx->gpio_service);
ctx              2855 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 		dc_set_generic_gpio_for_stereo(false, dc->ctx->gpio_service);
ctx              2948 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 	struct hubbub *hubbub = hws->ctx->dc->res_pool->hubbub;
ctx              2961 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c 		.ref_clk_khz = pipe_ctx->stream->ctx->dc->res_pool->ref_clocks.dchub_ref_clock_inKhz,
ctx                73 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c 	struct dc_context *dc_ctx = dc->ctx;
ctx               111 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c 	struct dc_context *dc_ctx = dc->ctx;
ctx                40 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_ipp.c 	ippn10->base.ctx
ctx                64 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_ipp.c 	struct dc_context *ctx,
ctx                70 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_ipp.c 	ippn10->base.ctx = ctx;
ctx                82 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_ipp.c 	struct dc_context *ctx,
ctx                88 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_ipp.c 	ippn10->base.ctx = ctx;
ctx               193 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_ipp.h 	struct dc_context *ctx,
ctx               201 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_ipp.h 	struct dc_context *ctx,
ctx                41 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_link_encoder.c 	enc10->base.ctx
ctx                43 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_link_encoder.c 	enc10->base.ctx->logger
ctx               101 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_link_encoder.c 	struct dc_bios *bp = enc10->base.ctx->dc_bios;
ctx               646 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_link_encoder.c 	if (enc10->base.ctx->dc->debug.hdmi20_disable &&
ctx               675 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_link_encoder.c 	const struct dc_vbios_funcs *bp_funcs = init_data->ctx->dc_bios->funcs;
ctx               679 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_link_encoder.c 	enc10->base.ctx = init_data->ctx;
ctx               756 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_link_encoder.c 	result = bp_funcs->get_encoder_cap_info(enc10->base.ctx->dc_bios,
ctx               773 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_link_encoder.c 	if (enc10->base.ctx->dc->debug.hdmi20_disable) {
ctx                33 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_mpc.c 	mpc10->base.ctx
ctx               469 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_mpc.c 	struct dc_context *ctx,
ctx               477 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_mpc.c 	mpc10->base.ctx = ctx;
ctx               129 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_mpc.h 	struct dc_context *ctx,
ctx                40 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_opp.c 	oppn10->base.ctx
ctx               408 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_opp.c 	struct dc_context *ctx,
ctx               415 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_opp.c 	oppn10->base.ctx = ctx;
ctx               157 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_opp.h 	struct dc_context *ctx,
ctx                35 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_optc.c 	optc1->base.ctx
ctx               602 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_optc.c 	if (optc->ctx->dce_environment != DCE_ENV_FPGA_MAXIMUS) {
ctx               586 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	struct dc_context *ctx,
ctx               595 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	dpp1_construct(dpp, ctx, inst,
ctx               601 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	struct dc_context *ctx, uint32_t inst)
ctx               611 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	dcn10_ipp_construct(ipp, ctx, inst,
ctx               618 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	struct dc_context *ctx, uint32_t inst)
ctx               628 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	dcn10_opp_construct(opp, ctx, inst,
ctx               634 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	struct dc_context *ctx,
ctx               643 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	dce110_aux_engine_construct(aux_engine, ctx, inst,
ctx               669 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	struct dc_context *ctx,
ctx               678 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	dcn1_i2c_hw_construct(dce_i2c_hw, ctx, inst,
ctx               683 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c static struct mpc *dcn10_mpc_create(struct dc_context *ctx)
ctx               691 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	dcn10_mpc_construct(mpc10, ctx,
ctx               700 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c static struct hubbub *dcn10_hubbub_create(struct dc_context *ctx)
ctx               708 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	hubbub1_construct(&dcn10_hubbub->base, ctx,
ctx               717 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 		struct dc_context *ctx,
ctx               727 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	tgn10->base.ctx = ctx;
ctx               771 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	struct dc_context *ctx,
ctx               783 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	if (dce112_clk_src_construct(clk_src, ctx, bios, id,
ctx               795 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	struct dc_context *ctx,
ctx               798 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	generic_reg_get(ctx, mmDC_PINSTRAPS + BASE(mmDC_PINSTRAPS_BASE_IDX),
ctx               803 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 		struct dc_context *ctx, unsigned int inst)
ctx               805 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	return dce_audio_create(ctx, inst,
ctx               811 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	struct dc_context *ctx)
ctx               819 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	dcn10_stream_encoder_construct(enc1, ctx, ctx->dc_bios, eng_id,
ctx               838 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	struct dc_context *ctx)
ctx               843 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 		hws->ctx = ctx;
ctx               874 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c static struct pp_smu_funcs *dcn10_pp_smu_create(struct dc_context *ctx)
ctx               881 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	dm_pp_get_funcs(ctx, pp_smu);
ctx               971 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	struct dc_context *ctx,
ctx               980 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	dcn10_hubp_construct(hubp1, ctx, inst,
ctx              1263 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c static uint32_t read_pipe_fuses(struct dc_context *ctx)
ctx              1265 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	uint32_t value = dm_read_reg_soc15(ctx, mmCC_DC_PIPE_DIS, 0);
ctx              1278 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	struct dc_context *ctx = dc->ctx;
ctx              1279 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	uint32_t pipe_fuses = read_pipe_fuses(ctx);
ctx              1281 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	ctx->dc_bios->regs = &bios_regs;
ctx              1283 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	if (ctx->dce_version == DCN_VERSION_1_01)
ctx              1302 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	if (dc->ctx->dce_version == DCN_VERSION_1_01)
ctx              1314 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	if (dc->ctx->dce_environment == DCE_ENV_PRODUCTION_DRV)
ctx              1324 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 			dcn10_clock_source_create(ctx, ctx->dc_bios,
ctx              1328 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 			dcn10_clock_source_create(ctx, ctx->dc_bios,
ctx              1332 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 			dcn10_clock_source_create(ctx, ctx->dc_bios,
ctx              1336 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	if (dc->ctx->dce_version == DCN_VERSION_1_0) {
ctx              1338 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 				dcn10_clock_source_create(ctx, ctx->dc_bios,
ctx              1345 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	if (dc->ctx->dce_version == DCN_VERSION_1_01)
ctx              1349 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 			dcn10_clock_source_create(ctx, ctx->dc_bios,
ctx              1362 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	pool->base.dmcu = dcn10_dmcu_create(ctx,
ctx              1372 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	pool->base.abm = dce_abm_create(ctx,
ctx              1386 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	if (dc->ctx->dce_version == DCN_VERSION_1_01) {
ctx              1396 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	if (ASICREV_IS_RV1_F0(dc->ctx->asic_id.hw_internal_rev)) {
ctx              1403 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	dc->dcn_soc->number_of_channels = dc->ctx->asic_id.vram_width / ddr4_dram_width;
ctx              1413 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 		if (ASICREV_IS_RV1_F0(dc->ctx->asic_id.hw_internal_rev)) {
ctx              1418 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	pool->base.pp_smu = dcn10_pp_smu_create(ctx);
ctx              1438 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 		init_data.ctx = dc->ctx;
ctx              1454 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 		pool->base.hubps[j] = dcn10_hubp_create(ctx, i);
ctx              1462 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 		pool->base.ipps[j] = dcn10_ipp_create(ctx, i);
ctx              1470 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 		pool->base.dpps[j] = dcn10_dpp_create(ctx, i);
ctx              1478 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 		pool->base.opps[j] = dcn10_opp_create(ctx, i);
ctx              1487 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 				ctx, i);
ctx              1498 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 		pool->base.engines[i] = dcn10_aux_engine_create(ctx, i);
ctx              1505 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 		pool->base.hw_i2cs[i] = dcn10_i2c_hw_create(ctx, i);
ctx              1525 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	pool->base.mpc = dcn10_mpc_create(ctx);
ctx              1532 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 	pool->base.hubbub = dcn10_hubbub_create(ctx);
ctx              1540 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c 			(!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment) ?
ctx                34 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_stream_encoder.c 		enc1->base.ctx->logger
ctx                54 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_stream_encoder.c 	enc1->base.ctx
ctx              1596 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_stream_encoder.c 	struct dc_context *ctx,
ctx              1604 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_stream_encoder.c 	enc1->base.ctx = ctx;
ctx               513 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_stream_encoder.h 	struct dc_context *ctx,
ctx                43 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dccg.c 	dccg_dcn->base.ctx
ctx                45 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dccg.c 	dccg->ctx->logger
ctx               125 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dccg.c 	switch (dccg_dcn->base.ctx->dc->res_pool->pipe_count) {
ctx               157 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dccg.c 	struct dc_context *ctx,
ctx               171 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dccg.c 	base->ctx = ctx;
ctx               109 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dccg.h 	struct dc_context *ctx,
ctx                45 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dpp.c 	dpp->base.ctx
ctx               394 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dpp.c 		dpp->ctx->dc->debug.max_downscale_src_width != 0 &&
ctx               395 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dpp.c 		scl_data->viewport.width > dpp->ctx->dc->debug.max_downscale_src_width)
ctx               443 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dpp.c 	if (!dpp->ctx->dc->debug.always_scale) {
ctx               496 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dpp.c 	struct dc_context *ctx,
ctx               502 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dpp.c 	dpp->base.ctx = ctx;
ctx               700 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dpp.h 	struct dc_context *ctx,
ctx                40 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dpp_cm.c 	dpp->base.ctx
ctx                57 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dpp_cm.c 	if (dpp_base->ctx->dc->debug.cm_in_bypass)
ctx               258 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dpp_cm.c 	cm_helper_program_xfer_func(dpp->base.ctx, params, &gam_regs);
ctx               286 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dpp_cm.c 	cm_helper_program_xfer_func(dpp->base.ctx, params, &gam_regs);
ctx                62 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dsc.c 	dsc20->base.ctx
ctx                71 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dsc.c 	dsc->ctx->logger
ctx                83 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dsc.c 		struct dc_context *ctx,
ctx                89 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dsc.c 	dsc->base.ctx = ctx;
ctx               699 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dsc.c 	if (IS_FPGA_MAXIMUS_DC(dsc20->base.ctx->dce_environment)) {
ctx               567 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dsc.h 		struct dc_context *ctx,
ctx                37 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dwb.c 	dwbc20->base.ctx
ctx                40 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dwb.c 	dwbc20->base.ctx->logger
ctx               317 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dwb.c 		struct dc_context *ctx,
ctx               323 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dwb.c 	dwbc20->base.ctx = ctx;
ctx               423 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dwb.h 	struct dc_context *ctx,
ctx                40 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_dwb_scl.c 	dwbc20->base.ctx
ctx                35 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubbub.c 	hubbub1->base.ctx
ctx                45 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubbub.c 	hubbub1->base.ctx
ctx               219 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubbub.c 	struct dc *dc = hubbub->ctx->dc;
ctx               581 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubbub.c 	if (hubbub1->base.ctx->dc->clk_mgr->clks.prev_p_state_change_support == true &&
ctx               582 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubbub.c 		hubbub1->base.ctx->dc->clk_mgr->clks.p_state_change_support == false)
ctx               591 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubbub.c 	hubbub1_allow_self_refresh_control(hubbub, !hubbub->ctx->dc->debug.disable_stutter);
ctx               607 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubbub.c 	struct dc_context *ctx,
ctx               612 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubbub.c 	hubbub->base.ctx = ctx;
ctx                87 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubbub.h 	struct dc_context *ctx,
ctx                37 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c 	hubp2->base.ctx
ctx              1274 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c 	struct dc_context *ctx,
ctx              1281 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c 	hubp2->base.ctx = ctx;
ctx               234 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.h 		struct dc_context *ctx,
ctx                59 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c 	hws->ctx
ctx               256 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c 	if (hws->ctx->dc->debug.disable_dsc_power_gate)
ctx               333 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c 	if (hws->ctx->dc->debug.disable_dpp_power_gate)
ctx               407 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c 	if (hws->ctx->dc->debug.disable_hubp_power_gate)
ctx               512 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx               655 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c 	struct mpc *mpc = pipe_ctx->stream_res.opp->ctx->dc->res_pool->mpc;
ctx               912 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c 	DC_LOGGER_INIT(hws->ctx->logger);
ctx              1160 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx              1630 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx              1636 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c 	if (!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
ctx              2154 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c 	if (IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
ctx                38 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_link_encoder.c 	enc10->base.ctx
ctx                40 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_link_encoder.c 	enc10->base.ctx->logger
ctx               252 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_link_encoder.c 	if (!enc->ctx->dc->debug.avoid_vbios_exec_table) {
ctx               359 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_link_encoder.c 	const struct dc_vbios_funcs *bp_funcs = init_data->ctx->dc_bios->funcs;
ctx               364 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_link_encoder.c 	enc10->base.ctx = init_data->ctx;
ctx               441 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_link_encoder.c 	result = bp_funcs->get_encoder_cap_info(enc10->base.ctx->dc_bios,
ctx               458 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_link_encoder.c 	if (enc10->base.ctx->dc->debug.hdmi20_disable) {
ctx                37 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mmhubbub.c 	mcif_wb20->base.ctx
ctx               309 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mmhubbub.c 		struct dc_context *ctx,
ctx               315 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mmhubbub.c 	mcif_wb20->base.ctx = ctx;
ctx               538 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mmhubbub.h 	struct dc_context *ctx,
ctx                37 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mpc.c 	mpc20->base.ctx
ctx               161 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mpc.c 			mpc20->base.ctx,
ctx               203 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mpc.c 			mpc20->base.ctx,
ctx               309 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mpc.c 	cm_helper_program_xfer_func(mpc20->base.ctx, params, &gam_regs);
ctx               336 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mpc.c 	cm_helper_program_xfer_func(mpc20->base.ctx, params, &gam_regs);
ctx               371 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mpc.c 	if (mpc->ctx->dc->debug.cm_in_bypass) {
ctx               376 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mpc.c 	if (mpc->ctx->dc->work_arounds.dedcn20_305_wa == false) {
ctx               398 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mpc.c 	if (mpc->ctx->dc->debug.cm_in_bypass) {
ctx               516 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mpc.c 	struct dc_context *ctx,
ctx               524 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mpc.c 	mpc20->base.ctx = ctx;
ctx               248 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_mpc.h 	struct dc_context *ctx,
ctx                38 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_opp.c 	oppn20->base.ctx
ctx               340 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_opp.c 	struct dc_context *ctx,
ctx               346 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_opp.c 	oppn20->base.ctx = ctx;
ctx               134 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_opp.h 	struct dc_context *ctx,
ctx                34 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_optc.c 	optc1->base.ctx
ctx               330 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_optc.c 	if (optc->ctx->dce_environment != DCE_ENV_FPGA_MAXIMUS)
ctx               973 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	struct dc_context *ctx,
ctx               982 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if (dpp2_construct(dpp, ctx, inst,
ctx               992 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	struct dc_context *ctx, uint32_t inst)
ctx              1002 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	dcn20_ipp_construct(ipp, ctx, inst,
ctx              1009 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	struct dc_context *ctx, uint32_t inst)
ctx              1019 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	dcn20_opp_construct(opp, ctx, inst,
ctx              1025 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	struct dc_context *ctx,
ctx              1034 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	dce110_aux_engine_construct(aux_engine, ctx, inst,
ctx              1060 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	struct dc_context *ctx,
ctx              1069 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	dcn2_i2c_hw_construct(dce_i2c_hw, ctx, inst,
ctx              1074 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c struct mpc *dcn20_mpc_create(struct dc_context *ctx)
ctx              1082 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	dcn20_mpc_construct(mpc20, ctx,
ctx              1091 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c struct hubbub *dcn20_hubbub_create(struct dc_context *ctx)
ctx              1100 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	hubbub2_construct(hubbub, ctx,
ctx              1108 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		vmid->ctx = ctx;
ctx              1119 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		struct dc_context *ctx,
ctx              1129 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	tgn10->base.ctx = ctx;
ctx              1173 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	struct dc_context *ctx,
ctx              1185 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if (dcn20_clk_src_construct(clk_src, ctx, bios, id,
ctx              1197 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	struct dc_context *ctx,
ctx              1200 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	generic_reg_get(ctx, mmDC_PINSTRAPS + BASE(mmDC_PINSTRAPS_BASE_IDX),
ctx              1205 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		struct dc_context *ctx, unsigned int inst)
ctx              1207 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	return dce_audio_create(ctx, inst,
ctx              1213 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	struct dc_context *ctx)
ctx              1221 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if (ASICREV_IS_NAVI14_M(ctx->asic_id.hw_internal_rev)) {
ctx              1226 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	dcn20_stream_encoder_construct(enc1, ctx, ctx->dc_bios, eng_id,
ctx              1246 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	struct dc_context *ctx)
ctx              1251 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		hws->ctx = ctx;
ctx              1282 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	struct dc_context *ctx, uint32_t inst)
ctx              1292 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	dsc2_construct(dsc, ctx, inst, &dsc_regs[inst], &dsc_shift, &dsc_mask);
ctx              1416 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	struct dc_context *ctx,
ctx              1425 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if (hubp2_construct(hubp2, ctx, inst,
ctx              2849 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx              3017 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c bool dcn20_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
ctx              3032 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		dcn20_dwbc_construct(dwbc20, ctx,
ctx              3042 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c bool dcn20_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
ctx              3058 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		dcn20_mmhubbub_construct(mcif_wb20, ctx,
ctx              3069 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c struct pp_smu_funcs *dcn20_pp_smu_create(struct dc_context *ctx)
ctx              3076 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	dm_pp_get_funcs(ctx, pp_smu);
ctx              3078 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if (pp_smu->ctx.ver != PP_SMU_VER_NV)
ctx              3278 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			get_asic_rev_soc_bb(dc->ctx->asic_id.hw_internal_rev);
ctx              3280 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			get_asic_rev_ip_params(dc->ctx->asic_id.hw_internal_rev);
ctx              3282 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx              3437 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	struct dc_context *ctx = dc->ctx;
ctx              3440 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			get_asic_rev_soc_bb(ctx->asic_id.hw_internal_rev);
ctx              3442 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			get_asic_rev_ip_params(ctx->asic_id.hw_internal_rev);
ctx              3444 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			get_dml_project_version(ctx->asic_id.hw_internal_rev);
ctx              3446 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	ctx->dc_bios->regs = &bios_regs;
ctx              3449 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if (ASICREV_IS_NAVI14_M(ctx->asic_id.hw_internal_rev)) {
ctx              3473 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if (dc->ctx->dce_environment == DCE_ENV_PRODUCTION_DRV) {
ctx              3475 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	} else if (dc->ctx->dce_environment == DCE_ENV_FPGA_MAXIMUS) {
ctx              3494 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			dcn20_clock_source_create(ctx, ctx->dc_bios,
ctx              3498 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			dcn20_clock_source_create(ctx, ctx->dc_bios,
ctx              3502 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			dcn20_clock_source_create(ctx, ctx->dc_bios,
ctx              3506 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			dcn20_clock_source_create(ctx, ctx->dc_bios,
ctx              3510 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			dcn20_clock_source_create(ctx, ctx->dc_bios,
ctx              3514 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			dcn20_clock_source_create(ctx, ctx->dc_bios,
ctx              3520 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			dcn20_clock_source_create(ctx, ctx->dc_bios,
ctx              3532 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	pool->base.dccg = dccg2_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
ctx              3539 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	pool->base.dmcu = dcn20_dmcu_create(ctx,
ctx              3549 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	pool->base.abm = dce_abm_create(ctx,
ctx              3559 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	pool->base.pp_smu = dcn20_pp_smu_create(ctx);
ctx              3612 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	init_data.ctx = dc->ctx;
ctx              3619 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		pool->base.hubps[i] = dcn20_hubp_create(ctx, i);
ctx              3627 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		pool->base.ipps[i] = dcn20_ipp_create(ctx, i);
ctx              3635 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		pool->base.dpps[i] = dcn20_dpp_create(ctx, i);
ctx              3644 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		pool->base.engines[i] = dcn20_aux_engine_create(ctx, i);
ctx              3651 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		pool->base.hw_i2cs[i] = dcn20_i2c_hw_create(ctx, i);
ctx              3662 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		pool->base.opps[i] = dcn20_opp_create(ctx, i);
ctx              3673 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				ctx, i);
ctx              3683 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	pool->base.mpc = dcn20_mpc_create(ctx);
ctx              3690 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	pool->base.hubbub = dcn20_hubbub_create(ctx);
ctx              3699 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		pool->base.dscs[i] = dcn20_dsc_create(ctx, i);
ctx              3708 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if (!dcn20_dwbc_create(ctx, &pool->base)) {
ctx              3713 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if (!dcn20_mmhubbub_create(ctx, &pool->base)) {
ctx              3720 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			(!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment) ?
ctx                63 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h 	struct dc_context *ctx);
ctx                66 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h 	struct dc_context *ctx);
ctx                75 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h 	struct dc_context *ctx,
ctx                79 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h 	struct dc_context *ctx, uint32_t inst);
ctx                83 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h 	struct dc_context *ctx, uint32_t inst);
ctx                86 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h 	struct dc_context *ctx, uint32_t inst);
ctx                89 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h 	struct dc_context *ctx,
ctx                95 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h 	struct dc_context *ctx, uint32_t inst);
ctx                98 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h struct pp_smu_funcs *dcn20_pp_smu_create(struct dc_context *ctx);
ctx               102 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h 	struct dc_context *ctx,
ctx               105 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h 		struct dc_context *ctx,
ctx               107 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h struct mpc *dcn20_mpc_create(struct dc_context *ctx);
ctx               108 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h struct hubbub *dcn20_hubbub_create(struct dc_context *ctx);
ctx               110 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h bool dcn20_dwbc_create(struct dc_context *ctx, struct resource_pool *pool);
ctx               111 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h bool dcn20_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool);
ctx                34 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_stream_encoder.c 		enc1->base.ctx->logger
ctx                46 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_stream_encoder.c 	enc1->base.ctx
ctx               600 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_stream_encoder.c 	struct dc_context *ctx,
ctx               608 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_stream_encoder.c 	enc1->base.ctx = ctx;
ctx                90 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_stream_encoder.h 	struct dc_context *ctx,
ctx                35 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_vmid.c 	vmid->ctx
ctx                82 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_vmid.h 	struct dc_context *ctx;
ctx                34 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 	hubbub1->base.ctx->logger
ctx                36 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 	hubbub1->base.ctx
ctx                46 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 	hubbub1->base.ctx
ctx               509 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 	hubbub1_allow_self_refresh_control(hubbub, !hubbub->ctx->dc->debug.disable_stutter);
ctx               591 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 	struct dc_context *ctx,
ctx               596 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.c 	hubbub->base.ctx = ctx;
ctx               127 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubbub.h 	struct dc_context *ctx,
ctx                34 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c 	hubp21->base.ctx
ctx               228 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c 	struct dc_context *ctx,
ctx               235 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c 	hubp21->base.ctx = ctx;
ctx               115 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.h 	struct dc_context *ctx,
ctx               641 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	struct dc_context *ctx, uint32_t inst)
ctx               651 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	dcn20_ipp_construct(ipp, ctx, inst,
ctx               657 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	struct dc_context *ctx,
ctx               666 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	if (dpp2_construct(dpp, ctx, inst,
ctx               676 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	struct dc_context *ctx,
ctx               685 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	dce110_aux_engine_construct(aux_engine, ctx, inst,
ctx               711 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	struct dc_context *ctx,
ctx               720 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	dcn2_i2c_hw_construct(dce_i2c_hw, ctx, inst,
ctx              1073 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	DC_LOGGER_INIT(dc->ctx->logger);
ctx              1123 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 		struct dc_context *ctx,
ctx              1135 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	if (dcn20_clk_src_construct(clk_src, ctx, bios, id,
ctx              1146 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	struct dc_context *ctx,
ctx              1155 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	if (hubp21_construct(hubp21, ctx, inst,
ctx              1164 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c static struct hubbub *dcn21_hubbub_create(struct dc_context *ctx)
ctx              1174 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	hubbub21_construct(hubbub, ctx,
ctx              1182 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 		vmid->ctx = ctx;
ctx              1193 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	struct dc_context *ctx, uint32_t inst)
ctx              1203 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	dcn20_opp_construct(opp, ctx, inst,
ctx              1209 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 		struct dc_context *ctx,
ctx              1219 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	tgn10->base.ctx = ctx;
ctx              1230 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c struct mpc *dcn21_mpc_create(struct dc_context *ctx)
ctx              1238 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	dcn20_mpc_construct(mpc20, ctx,
ctx              1248 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	struct dc_context *ctx,
ctx              1251 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	generic_reg_get(ctx, mmDC_PINSTRAPS + BASE(mmDC_PINSTRAPS_BASE_IDX),
ctx              1259 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	struct dc_context *ctx, uint32_t inst)
ctx              1269 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	dsc2_construct(dsc, ctx, inst, &dsc_regs[inst], &dsc_shift, &dsc_mask);
ctx              1335 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c struct pp_smu_funcs *dcn21_pp_smu_create(struct dc_context *ctx)
ctx              1339 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	pp_smu->ctx.ver = PP_SMU_VER_RN;
ctx              1356 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 		struct dc_context *ctx, unsigned int inst)
ctx              1358 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	return dce_audio_create(ctx, inst,
ctx              1368 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	struct dc_context *ctx)
ctx              1376 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	dcn20_stream_encoder_construct(enc1, ctx, ctx->dc_bios, eng_id,
ctx              1396 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	struct dc_context *ctx)
ctx              1401 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 		hws->ctx = ctx;
ctx              1443 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	struct dc_context *ctx = dc->ctx;
ctx              1446 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	ctx->dc_bios->regs = &bios_regs;
ctx              1450 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	if (IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment))
ctx              1473 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	if (dc->ctx->dce_environment == DCE_ENV_PRODUCTION_DRV)
ctx              1475 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	else if (dc->ctx->dce_environment == DCE_ENV_FPGA_MAXIMUS) {
ctx              1490 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 			dcn21_clock_source_create(ctx, ctx->dc_bios,
ctx              1494 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 			dcn21_clock_source_create(ctx, ctx->dc_bios,
ctx              1498 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 			dcn21_clock_source_create(ctx, ctx->dc_bios,
ctx              1506 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 			dcn21_clock_source_create(ctx, ctx->dc_bios,
ctx              1518 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	pool->base.dccg = dccg2_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
ctx              1526 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	pool->base.dmcub = dcn21_dmcub_create(ctx,
ctx              1537 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	pool->base.pp_smu = dcn21_pp_smu_create(ctx);
ctx              1541 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	init_data.ctx = dc->ctx;
ctx              1548 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 		pool->base.hubps[i] = dcn21_hubp_create(ctx, i);
ctx              1556 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 		pool->base.ipps[i] = dcn21_ipp_create(ctx, i);
ctx              1564 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 		pool->base.dpps[i] = dcn21_dpp_create(ctx, i);
ctx              1574 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 		pool->base.engines[i] = dcn21_aux_engine_create(ctx, i);
ctx              1581 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 		pool->base.hw_i2cs[i] = dcn21_i2c_hw_create(ctx, i);
ctx              1592 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 		pool->base.opps[i] = dcn21_opp_create(ctx, i);
ctx              1603 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 				ctx, i);
ctx              1613 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	pool->base.mpc = dcn21_mpc_create(ctx);
ctx              1620 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	pool->base.hubbub = dcn21_hubbub_create(ctx);
ctx              1629 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 		pool->base.dscs[i] = dcn21_dsc_create(ctx, i);
ctx              1638 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	if (!dcn20_dwbc_create(ctx, &pool->base)) {
ctx              1643 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	if (!dcn20_mmhubbub_create(ctx, &pool->base)) {
ctx              1650 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 			(!IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment) ?
ctx                39 drivers/gpu/drm/amd/display/dc/dm_helpers.h 	struct dc_context *ctx,
ctx                48 drivers/gpu/drm/amd/display/dc/dm_helpers.h 		struct dc_context *ctx,
ctx                55 drivers/gpu/drm/amd/display/dc/dm_helpers.h 		struct dc_context *ctx,
ctx                64 drivers/gpu/drm/amd/display/dc/dm_helpers.h 	struct dc_context *ctx,
ctx                71 drivers/gpu/drm/amd/display/dc/dm_helpers.h 	struct dc_context *ctx,
ctx                78 drivers/gpu/drm/amd/display/dc/dm_helpers.h 		struct dc_context *ctx,
ctx                84 drivers/gpu/drm/amd/display/dc/dm_helpers.h 		struct dc_context *ctx,
ctx                89 drivers/gpu/drm/amd/display/dc/dm_helpers.h 		struct dc_context *ctx,
ctx                94 drivers/gpu/drm/amd/display/dc/dm_helpers.h 		struct dc_context *ctx,
ctx               100 drivers/gpu/drm/amd/display/dc/dm_helpers.h 		struct dc_context *ctx,
ctx               110 drivers/gpu/drm/amd/display/dc/dm_helpers.h 		struct dc_context *ctx,
ctx               117 drivers/gpu/drm/amd/display/dc/dm_helpers.h 		struct dc_context *ctx,
ctx               123 drivers/gpu/drm/amd/display/dc/dm_helpers.h 		struct dc_context *ctx,
ctx               132 drivers/gpu/drm/amd/display/dc/dm_helpers.h 		struct dc_context *ctx,
ctx               137 drivers/gpu/drm/amd/display/dc/dm_helpers.h 		struct dc_context *ctx,
ctx               294 drivers/gpu/drm/amd/display/dc/dm_pp_smu.h 	struct pp_smu ctx;
ctx                44 drivers/gpu/drm/amd/display/dc/dm_services.h 	struct dc_context *ctx,
ctx                56 drivers/gpu/drm/amd/display/dc/dm_services.h 	const struct dc_context *ctx,
ctx                62 drivers/gpu/drm/amd/display/dc/dm_services.h #define dm_read_reg(ctx, address)	\
ctx                63 drivers/gpu/drm/amd/display/dc/dm_services.h 		dm_read_reg_func(ctx, address, __func__)
ctx                67 drivers/gpu/drm/amd/display/dc/dm_services.h #define dm_write_reg(ctx, address, value)	\
ctx                68 drivers/gpu/drm/amd/display/dc/dm_services.h 	dm_write_reg_func(ctx, address, value, __func__)
ctx                71 drivers/gpu/drm/amd/display/dc/dm_services.h 	const struct dc_context *ctx,
ctx                82 drivers/gpu/drm/amd/display/dc/dm_services.h 	cgs_write_register(ctx->cgs_device, address, value);
ctx                83 drivers/gpu/drm/amd/display/dc/dm_services.h 	trace_amdgpu_dc_wreg(&ctx->perf_trace->write_count, address, value);
ctx                87 drivers/gpu/drm/amd/display/dc/dm_services.h 	const struct dc_context *ctx,
ctx                91 drivers/gpu/drm/amd/display/dc/dm_services.h 	return cgs_read_ind_register(ctx->cgs_device, addr_space, index);
ctx                95 drivers/gpu/drm/amd/display/dc/dm_services.h 	const struct dc_context *ctx,
ctx               100 drivers/gpu/drm/amd/display/dc/dm_services.h 	cgs_write_ind_register(ctx->cgs_device, addr_space, index, value);
ctx               134 drivers/gpu/drm/amd/display/dc/dm_services.h uint32_t generic_reg_set_ex(const struct dc_context *ctx,
ctx               138 drivers/gpu/drm/amd/display/dc/dm_services.h uint32_t generic_reg_update_ex(const struct dc_context *ctx,
ctx               149 drivers/gpu/drm/amd/display/dc/dm_services.h void generic_reg_wait(const struct dc_context *ctx,
ctx               159 drivers/gpu/drm/amd/display/dc/dm_services.h #define dm_write_reg_soc15(ctx, reg, inst_offset, value)	\
ctx               160 drivers/gpu/drm/amd/display/dc/dm_services.h 		dm_write_reg_func(ctx, reg + DCE_BASE.instance[0].segment[reg##_BASE_IDX] + inst_offset, value, __func__)
ctx               162 drivers/gpu/drm/amd/display/dc/dm_services.h #define dm_read_reg_soc15(ctx, reg, inst_offset)	\
ctx               163 drivers/gpu/drm/amd/display/dc/dm_services.h 		dm_read_reg_func(ctx, reg + DCE_BASE.instance[0].segment[reg##_BASE_IDX] + inst_offset, __func__)
ctx               165 drivers/gpu/drm/amd/display/dc/dm_services.h #define generic_reg_update_soc15(ctx, inst_offset, reg_name, n, ...)\
ctx               166 drivers/gpu/drm/amd/display/dc/dm_services.h 		generic_reg_update_ex(ctx, DCE_BASE.instance[0].segment[mm##reg_name##_BASE_IDX] +  mm##reg_name + inst_offset, \
ctx               169 drivers/gpu/drm/amd/display/dc/dm_services.h #define generic_reg_set_soc15(ctx, inst_offset, reg_name, n, ...)\
ctx               170 drivers/gpu/drm/amd/display/dc/dm_services.h 		generic_reg_set_ex(ctx, DCE_BASE.instance[0].segment[mm##reg_name##_BASE_IDX] + mm##reg_name + inst_offset, 0, \
ctx               199 drivers/gpu/drm/amd/display/dc/dm_services.h 	const struct dc_context *ctx,
ctx               204 drivers/gpu/drm/amd/display/dc/dm_services.h 	const struct dc_context *ctx,
ctx               209 drivers/gpu/drm/amd/display/dc/dm_services.h 	const struct dc_context *ctx,
ctx               214 drivers/gpu/drm/amd/display/dc/dm_services.h 	const struct dc_context *ctx,
ctx               217 drivers/gpu/drm/amd/display/dc/dm_services.h void dm_pp_get_funcs(struct dc_context *ctx,
ctx               232 drivers/gpu/drm/amd/display/dc/dm_services.h 	const struct dc_context *ctx,
ctx               236 drivers/gpu/drm/amd/display/dc/dm_services.h 	const struct dc_context *ctx,
ctx               240 drivers/gpu/drm/amd/display/dc/dm_services.h 	const struct dc_context *ctx,
ctx               244 drivers/gpu/drm/amd/display/dc/dm_services.h 	const struct dc_context *ctx,
ctx               281 drivers/gpu/drm/amd/display/dc/dm_services.h bool dm_write_persistent_data(struct dc_context *ctx,
ctx               315 drivers/gpu/drm/amd/display/dc/dm_services.h bool dm_read_persistent_data(struct dc_context *ctx,
ctx               324 drivers/gpu/drm/amd/display/dc/dm_services.h 	(struct dc_context *ctx, enum dm_acpi_display_type display,
ctx               327 drivers/gpu/drm/amd/display/dc/dm_services.h bool dm_dmcu_set_pipe(struct dc_context *ctx, unsigned int controller_id);
ctx               337 drivers/gpu/drm/amd/display/dc/dm_services.h static inline unsigned long long dm_get_timestamp(struct dc_context *ctx)
ctx               342 drivers/gpu/drm/amd/display/dc/dm_services.h unsigned long long dm_get_elapse_time_in_ns(struct dc_context *ctx,
ctx               361 drivers/gpu/drm/amd/display/dc/dm_services.h void dm_dtn_log_begin(struct dc_context *ctx,
ctx               363 drivers/gpu/drm/amd/display/dc/dm_services.h void dm_dtn_log_append_v(struct dc_context *ctx,
ctx               366 drivers/gpu/drm/amd/display/dc/dm_services.h void dm_dtn_log_end(struct dc_context *ctx,
ctx               292 drivers/gpu/drm/amd/display/dc/gpio/gpio_base.c 		gpio->service->factory.funcs->init_ddc_data(&gpio->hw_container.ddc, service->ctx, id, en);
ctx               295 drivers/gpu/drm/amd/display/dc/gpio/gpio_base.c 		gpio->service->factory.funcs->init_ddc_data(&gpio->hw_container.ddc, service->ctx, id, en);
ctx               298 drivers/gpu/drm/amd/display/dc/gpio/gpio_base.c 		gpio->service->factory.funcs->init_generic(&gpio->hw_container.generic, service->ctx, id, en);
ctx               301 drivers/gpu/drm/amd/display/dc/gpio/gpio_base.c 		gpio->service->factory.funcs->init_hpd(&gpio->hw_container.hpd, service->ctx, id, en);
ctx                58 drivers/gpu/drm/amd/display/dc/gpio/gpio_service.c 	struct dc_context *ctx)
ctx                85 drivers/gpu/drm/amd/display/dc/gpio/gpio_service.c 		service->ctx = ctx;
ctx               508 drivers/gpu/drm/amd/display/dc/gpio/gpio_service.c 	ddc->ctx = service->ctx;
ctx                33 drivers/gpu/drm/amd/display/dc/gpio/gpio_service.h 	struct dc_context *ctx;
ctx                45 drivers/gpu/drm/amd/display/dc/gpio/hw_ddc.c 	ddc->base.base.ctx
ctx               227 drivers/gpu/drm/amd/display/dc/gpio/hw_ddc.c 	struct dc_context *ctx)
ctx               229 drivers/gpu/drm/amd/display/dc/gpio/hw_ddc.c 	dal_hw_gpio_construct(&ddc->base, id, en, ctx);
ctx               235 drivers/gpu/drm/amd/display/dc/gpio/hw_ddc.c 	struct dc_context *ctx,
ctx               250 drivers/gpu/drm/amd/display/dc/gpio/hw_ddc.c 	construct(*hw_ddc, id, en, ctx);
ctx                43 drivers/gpu/drm/amd/display/dc/gpio/hw_ddc.h 	struct dc_context *ctx,
ctx               118 drivers/gpu/drm/amd/display/dc/gpio/hw_factory.c 	struct dc_context *ctx,
ctx                41 drivers/gpu/drm/amd/display/dc/gpio/hw_factory.h 				struct dc_context *ctx,
ctx                46 drivers/gpu/drm/amd/display/dc/gpio/hw_factory.h 				struct dc_context *ctx,
ctx                51 drivers/gpu/drm/amd/display/dc/gpio/hw_factory.h 				struct dc_context *ctx,
ctx                43 drivers/gpu/drm/amd/display/dc/gpio/hw_generic.c 	generic->base.base.ctx
ctx                53 drivers/gpu/drm/amd/display/dc/gpio/hw_generic.c 	struct dc_context *ctx)
ctx                55 drivers/gpu/drm/amd/display/dc/gpio/hw_generic.c 	dal_hw_gpio_construct(&pin->base, id, en, ctx);
ctx               106 drivers/gpu/drm/amd/display/dc/gpio/hw_generic.c 	struct dc_context *ctx)
ctx               108 drivers/gpu/drm/amd/display/dc/gpio/hw_generic.c 	dal_hw_generic_construct(generic, id, en, ctx);
ctx               114 drivers/gpu/drm/amd/display/dc/gpio/hw_generic.c 	struct dc_context *ctx,
ctx               129 drivers/gpu/drm/amd/display/dc/gpio/hw_generic.c 	construct(*hw_generic, id, en, ctx);
ctx                44 drivers/gpu/drm/amd/display/dc/gpio/hw_generic.h 	struct dc_context *ctx,
ctx                38 drivers/gpu/drm/amd/display/dc/gpio/hw_gpio.c 	gpio->base.ctx
ctx               183 drivers/gpu/drm/amd/display/dc/gpio/hw_gpio.c 	struct dc_context *ctx)
ctx               185 drivers/gpu/drm/amd/display/dc/gpio/hw_gpio.c 	pin->base.ctx = ctx;
ctx                45 drivers/gpu/drm/amd/display/dc/gpio/hw_gpio.h 	struct dc_context *ctx;
ctx               116 drivers/gpu/drm/amd/display/dc/gpio/hw_gpio.h 	struct dc_context *ctx);
ctx                43 drivers/gpu/drm/amd/display/dc/gpio/hw_hpd.c 	hpd->base.base.ctx
ctx                53 drivers/gpu/drm/amd/display/dc/gpio/hw_hpd.c 	struct dc_context *ctx)
ctx                55 drivers/gpu/drm/amd/display/dc/gpio/hw_hpd.c 	dal_hw_gpio_construct(&pin->base, id, en, ctx);
ctx               136 drivers/gpu/drm/amd/display/dc/gpio/hw_hpd.c 	struct dc_context *ctx)
ctx               138 drivers/gpu/drm/amd/display/dc/gpio/hw_hpd.c 	dal_hw_hpd_construct(hpd, id, en, ctx);
ctx               144 drivers/gpu/drm/amd/display/dc/gpio/hw_hpd.c 	struct dc_context *ctx,
ctx               159 drivers/gpu/drm/amd/display/dc/gpio/hw_hpd.c 	construct(*hw_hpd, id, en, ctx);
ctx                43 drivers/gpu/drm/amd/display/dc/gpio/hw_hpd.h 	struct dc_context *ctx,
ctx               137 drivers/gpu/drm/amd/display/dc/inc/clock_source.h 	struct dc_context *ctx;
ctx               157 drivers/gpu/drm/amd/display/dc/inc/clock_source.h 	struct dc_context *ctx;
ctx               177 drivers/gpu/drm/amd/display/dc/inc/clock_source.h 	struct dc_context *ctx;
ctx                79 drivers/gpu/drm/amd/display/dc/inc/compressor.h 	struct dc_context *ctx;
ctx                60 drivers/gpu/drm/amd/display/dc/inc/core_types.h 	struct dc_context *ctx; /* TODO: remove 'dal' when DC is complete. */
ctx                69 drivers/gpu/drm/amd/display/dc/inc/dc_link_ddc.h 	struct dc_context *ctx;
ctx               481 drivers/gpu/drm/amd/display/dc/inc/dce_calcs.h 	struct dc_context *ctx,
ctx                38 drivers/gpu/drm/amd/display/dc/inc/hw/abm.h 	struct dc_context *ctx;
ctx                58 drivers/gpu/drm/amd/display/dc/inc/hw/audio.h 	struct dc_context *ctx;
ctx                87 drivers/gpu/drm/amd/display/dc/inc/hw/aux_engine.h 	struct dc_context *ctx;
ctx               186 drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr.h 	struct dc_context *ctx;
ctx               198 drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr.h struct clk_mgr *dc_clk_mgr_create(struct dc_context *ctx, struct pp_smu_funcs *pp_smu, struct dccg *dccg);
ctx                73 drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr_internal.h 	clk_mgr->base.ctx
ctx                75 drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr_internal.h 	clk_mgr->ctx->logger
ctx                32 drivers/gpu/drm/amd/display/dc/inc/hw/dccg.h 	struct dc_context *ctx;
ctx               154 drivers/gpu/drm/amd/display/dc/inc/hw/dchubbub.h 	struct dc_context *ctx;
ctx                49 drivers/gpu/drm/amd/display/dc/inc/hw/dmcu.h 	struct dc_context *ctx;
ctx                34 drivers/gpu/drm/amd/display/dc/inc/hw/dpp.h 	struct dc_context *ctx;
ctx                86 drivers/gpu/drm/amd/display/dc/inc/hw/dsc.h 	struct dc_context *ctx;
ctx               111 drivers/gpu/drm/amd/display/dc/inc/hw/dwb.h 	struct dc_context *ctx;
ctx                55 drivers/gpu/drm/amd/display/dc/inc/hw/gpio.h 		struct dc_context *ctx,
ctx                59 drivers/gpu/drm/amd/display/dc/inc/hw/gpio.h 		struct dc_context *ctx,
ctx                63 drivers/gpu/drm/amd/display/dc/inc/hw/gpio.h 		struct dc_context *ctx,
ctx                67 drivers/gpu/drm/amd/display/dc/inc/hw/gpio.h 		struct dc_context *ctx,
ctx                71 drivers/gpu/drm/amd/display/dc/inc/hw/gpio.h 		struct dc_context *ctx,
ctx                75 drivers/gpu/drm/amd/display/dc/inc/hw/gpio.h 		struct dc_context *ctx,
ctx                79 drivers/gpu/drm/amd/display/dc/inc/hw/gpio.h 		struct dc_context *ctx,
ctx                57 drivers/gpu/drm/amd/display/dc/inc/hw/hubp.h 	struct dc_context *ctx;
ctx                38 drivers/gpu/drm/amd/display/dc/inc/hw/ipp.h 	struct  dc_context *ctx;
ctx                50 drivers/gpu/drm/amd/display/dc/inc/hw/link_encoder.h 	struct dc_context *ctx;
ctx               108 drivers/gpu/drm/amd/display/dc/inc/hw/link_encoder.h 	struct dc_context *ctx;
ctx                69 drivers/gpu/drm/amd/display/dc/inc/hw/mcif_wb.h 	struct dc_context *ctx;
ctx                72 drivers/gpu/drm/amd/display/dc/inc/hw/mem_input.h 	struct dc_context *ctx;
ctx               126 drivers/gpu/drm/amd/display/dc/inc/hw/mpc.h 	struct dc_context *ctx;
ctx               205 drivers/gpu/drm/amd/display/dc/inc/hw/opp.h 	struct dc_context *ctx;
ctx               107 drivers/gpu/drm/amd/display/dc/inc/hw/stream_encoder.h 	struct dc_context *ctx;
ctx               127 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h 	struct dc_context *ctx;
ctx                39 drivers/gpu/drm/amd/display/dc/inc/hw/transform.h 	struct dc_context *ctx;
ctx                59 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h 	struct dc_context *ctx;
ctx               180 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h 					struct dc_context *ctx,
ctx               391 drivers/gpu/drm/amd/display/dc/inc/reg_helper.h uint32_t generic_reg_get(const struct dc_context *ctx, uint32_t addr,
ctx               394 drivers/gpu/drm/amd/display/dc/inc/reg_helper.h uint32_t generic_reg_get2(const struct dc_context *ctx, uint32_t addr,
ctx               398 drivers/gpu/drm/amd/display/dc/inc/reg_helper.h uint32_t generic_reg_get3(const struct dc_context *ctx, uint32_t addr,
ctx               403 drivers/gpu/drm/amd/display/dc/inc/reg_helper.h uint32_t generic_reg_get4(const struct dc_context *ctx, uint32_t addr,
ctx               409 drivers/gpu/drm/amd/display/dc/inc/reg_helper.h uint32_t generic_reg_get5(const struct dc_context *ctx, uint32_t addr,
ctx               416 drivers/gpu/drm/amd/display/dc/inc/reg_helper.h uint32_t generic_reg_get6(const struct dc_context *ctx, uint32_t addr,
ctx               424 drivers/gpu/drm/amd/display/dc/inc/reg_helper.h uint32_t generic_reg_get7(const struct dc_context *ctx, uint32_t addr,
ctx               433 drivers/gpu/drm/amd/display/dc/inc/reg_helper.h uint32_t generic_reg_get8(const struct dc_context *ctx, uint32_t addr,
ctx               474 drivers/gpu/drm/amd/display/dc/inc/reg_helper.h void generic_write_indirect_reg(const struct dc_context *ctx,
ctx               478 drivers/gpu/drm/amd/display/dc/inc/reg_helper.h uint32_t generic_read_indirect_reg(const struct dc_context *ctx,
ctx               482 drivers/gpu/drm/amd/display/dc/inc/reg_helper.h uint32_t generic_indirect_reg_update_ex(const struct dc_context *ctx,
ctx                65 drivers/gpu/drm/amd/display/dc/inc/resource.h 			struct dc_context *ctx, struct resource_straps *straps);
ctx                68 drivers/gpu/drm/amd/display/dc/inc/resource.h 			struct dc_context *ctx, unsigned int inst);
ctx                71 drivers/gpu/drm/amd/display/dc/inc/resource.h 			enum engine_id eng_id, struct dc_context *ctx);
ctx                74 drivers/gpu/drm/amd/display/dc/inc/resource.h 			struct dc_context *ctx);
ctx                42 drivers/gpu/drm/amd/display/dc/irq/dce110/irq_service_dce110.c 	irq_service->ctx->logger
ctx                48 drivers/gpu/drm/amd/display/dc/irq/dce110/irq_service_dce110.c 	uint32_t value = dm_read_reg(irq_service->ctx, addr);
ctx                55 drivers/gpu/drm/amd/display/dc/irq/dce110/irq_service_dce110.c 	value = dm_read_reg(irq_service->ctx, info->enable_reg);
ctx                61 drivers/gpu/drm/amd/display/dc/irq/dce110/irq_service_dce110.c 	dm_write_reg(irq_service->ctx, info->enable_reg, value);
ctx               206 drivers/gpu/drm/amd/display/dc/irq/dce110/irq_service_dce110.c 	struct dc_context *dc_ctx = irq_service->ctx;
ctx               207 drivers/gpu/drm/amd/display/dc/irq/dce110/irq_service_dce110.c 	struct dc *core_dc = irq_service->ctx->dc;
ctx               209 drivers/gpu/drm/amd/display/dc/irq/dce110/irq_service_dce110.c 			dc_interrupt_to_irq_source(irq_service->ctx->dc,
ctx                47 drivers/gpu/drm/amd/display/dc/irq/dce120/irq_service_dce120.c 	uint32_t value = dm_read_reg(irq_service->ctx, addr);
ctx                56 drivers/gpu/drm/amd/display/dc/irq/dce120/irq_service_dce120.c 	value = dm_read_reg(irq_service->ctx, info->enable_reg);
ctx                64 drivers/gpu/drm/amd/display/dc/irq/dce120/irq_service_dce120.c 	dm_write_reg(irq_service->ctx, info->enable_reg, value);
ctx                47 drivers/gpu/drm/amd/display/dc/irq/dce80/irq_service_dce80.c 	uint32_t value = dm_read_reg(irq_service->ctx, addr);
ctx                56 drivers/gpu/drm/amd/display/dc/irq/dce80/irq_service_dce80.c 	value = dm_read_reg(irq_service->ctx, info->enable_reg);
ctx                64 drivers/gpu/drm/amd/display/dc/irq/dce80/irq_service_dce80.c 	dm_write_reg(irq_service->ctx, info->enable_reg, value);
ctx               128 drivers/gpu/drm/amd/display/dc/irq/dcn10/irq_service_dcn10.c 	uint32_t value = dm_read_reg(irq_service->ctx, addr);
ctx               137 drivers/gpu/drm/amd/display/dc/irq/dcn10/irq_service_dcn10.c 	value = dm_read_reg(irq_service->ctx, info->enable_reg);
ctx               145 drivers/gpu/drm/amd/display/dc/irq/dcn10/irq_service_dcn10.c 	dm_write_reg(irq_service->ctx, info->enable_reg, value);
ctx               128 drivers/gpu/drm/amd/display/dc/irq/dcn20/irq_service_dcn20.c 	uint32_t value = dm_read_reg(irq_service->ctx, addr);
ctx               137 drivers/gpu/drm/amd/display/dc/irq/dcn20/irq_service_dcn20.c 	value = dm_read_reg(irq_service->ctx, info->enable_reg);
ctx               145 drivers/gpu/drm/amd/display/dc/irq/dcn20/irq_service_dcn20.c 	dm_write_reg(irq_service->ctx, info->enable_reg, value);
ctx               129 drivers/gpu/drm/amd/display/dc/irq/dcn21/irq_service_dcn21.c 	uint32_t value = dm_read_reg(irq_service->ctx, addr);
ctx               138 drivers/gpu/drm/amd/display/dc/irq/dcn21/irq_service_dcn21.c 	value = dm_read_reg(irq_service->ctx, info->enable_reg);
ctx               146 drivers/gpu/drm/amd/display/dc/irq/dcn21/irq_service_dcn21.c 	dm_write_reg(irq_service->ctx, info->enable_reg, value);
ctx                51 drivers/gpu/drm/amd/display/dc/irq/irq_service.c 		irq_service->ctx
ctx                53 drivers/gpu/drm/amd/display/dc/irq/irq_service.c 	irq_service->ctx->logger
ctx                59 drivers/gpu/drm/amd/display/dc/irq/irq_service.c 	if (!init_data || !init_data->ctx) {
ctx                64 drivers/gpu/drm/amd/display/dc/irq/irq_service.c 	irq_service->ctx = init_data->ctx;
ctx                95 drivers/gpu/drm/amd/display/dc/irq/irq_service.c 	uint32_t value = dm_read_reg(irq_service->ctx, addr);
ctx                99 drivers/gpu/drm/amd/display/dc/irq/irq_service.c 	dm_write_reg(irq_service->ctx, addr, value);
ctx               132 drivers/gpu/drm/amd/display/dc/irq/irq_service.c 	uint32_t value = dm_read_reg(irq_service->ctx, addr);
ctx               136 drivers/gpu/drm/amd/display/dc/irq/irq_service.c 	dm_write_reg(irq_service->ctx, addr, value);
ctx                67 drivers/gpu/drm/amd/display/dc/irq/irq_service.h 	struct dc_context *ctx;
ctx               109 drivers/gpu/drm/amd/display/dc/virtual/virtual_link_encoder.c 	enc->ctx = init_data->ctx;
ctx               128 drivers/gpu/drm/amd/display/dc/virtual/virtual_stream_encoder.c 	struct dc_context *ctx,
ctx               137 drivers/gpu/drm/amd/display/dc/virtual/virtual_stream_encoder.c 	enc->ctx = ctx;
ctx               145 drivers/gpu/drm/amd/display/dc/virtual/virtual_stream_encoder.c 	struct dc_context *ctx, struct dc_bios *bp)
ctx               152 drivers/gpu/drm/amd/display/dc/virtual/virtual_stream_encoder.c 	if (virtual_stream_encoder_construct(enc, ctx, bp))
ctx                32 drivers/gpu/drm/amd/display/dc/virtual/virtual_stream_encoder.h 	struct dc_context *ctx, struct dc_bios *bp);
ctx                36 drivers/gpu/drm/amd/display/dc/virtual/virtual_stream_encoder.h 	struct dc_context *ctx,
ctx                34 drivers/gpu/drm/amd/display/include/bios_parser_interface.h 	struct dc_context *ctx;
ctx                47 drivers/gpu/drm/amd/display/include/gpio_service_interface.h 	struct dc_context *ctx);
ctx                30 drivers/gpu/drm/amd/display/include/irq_service_interface.h 	struct dc_context *ctx;
ctx               141 drivers/gpu/drm/amd/display/include/logger_interface.h 	unsigned long long perf_trc_start_stmp = dm_get_timestamp(dc->ctx)
ctx               145 drivers/gpu/drm/amd/display/include/logger_interface.h 		unsigned long long perf_trc_end_stmp = dm_get_timestamp(dc->ctx); \
ctx                34 drivers/gpu/drm/amd/display/include/vector.h 	struct dc_context *ctx;
ctx                39 drivers/gpu/drm/amd/display/include/vector.h 	struct dc_context *ctx,
ctx                44 drivers/gpu/drm/amd/display/include/vector.h 	struct dc_context *ctx,
ctx                51 drivers/gpu/drm/amd/display/include/vector.h 	struct dc_context *ctx,
ctx               129 drivers/gpu/drm/amd/display/modules/stats/stats.c 	if (dm_read_persistent_data(dc->ctx, NULL, NULL,
ctx               136 drivers/gpu/drm/amd/display/modules/stats/stats.c 		if (dm_read_persistent_data(dc->ctx, NULL, NULL,
ctx               212 drivers/gpu/drm/amd/display/modules/stats/stats.c 	logger = dc->ctx->logger;
ctx                32 drivers/gpu/drm/amd/include/atom-bits.h #define U8(ptr) get_u8(ctx->ctx->bios, (ptr))
ctx                33 drivers/gpu/drm/amd/include/atom-bits.h #define CU8(ptr) get_u8(ctx->bios, (ptr))
ctx                38 drivers/gpu/drm/amd/include/atom-bits.h #define U16(ptr) get_u16(ctx->ctx->bios, (ptr))
ctx                39 drivers/gpu/drm/amd/include/atom-bits.h #define CU16(ptr) get_u16(ctx->bios, (ptr))
ctx                44 drivers/gpu/drm/amd/include/atom-bits.h #define U32(ptr) get_u32(ctx->ctx->bios, (ptr))
ctx                45 drivers/gpu/drm/amd/include/atom-bits.h #define CU32(ptr) get_u32(ctx->bios, (ptr))
ctx                46 drivers/gpu/drm/amd/include/atom-bits.h #define CSTR(ptr) (((char *)(ctx->bios))+(ptr))
ctx               255 drivers/gpu/drm/armada/armada_overlay.c 	struct drm_modeset_acquire_ctx *ctx)
ctx               269 drivers/gpu/drm/armada/armada_overlay.c 	state->acquire_ctx = ctx;
ctx               647 drivers/gpu/drm/ast/ast_mode.c 			      struct drm_modeset_acquire_ctx *ctx)
ctx               193 drivers/gpu/drm/bridge/sii9234.c static int sii9234_writeb(struct sii9234 *ctx, int id, int offset,
ctx               197 drivers/gpu/drm/bridge/sii9234.c 	struct i2c_client *client = ctx->client[id];
ctx               199 drivers/gpu/drm/bridge/sii9234.c 	if (ctx->i2c_error)
ctx               200 drivers/gpu/drm/bridge/sii9234.c 		return ctx->i2c_error;
ctx               204 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "writeb: %4s[0x%02x] <- 0x%02x\n",
ctx               206 drivers/gpu/drm/bridge/sii9234.c 	ctx->i2c_error = ret;
ctx               211 drivers/gpu/drm/bridge/sii9234.c static int sii9234_writebm(struct sii9234 *ctx, int id, int offset,
ctx               215 drivers/gpu/drm/bridge/sii9234.c 	struct i2c_client *client = ctx->client[id];
ctx               217 drivers/gpu/drm/bridge/sii9234.c 	if (ctx->i2c_error)
ctx               218 drivers/gpu/drm/bridge/sii9234.c 		return ctx->i2c_error;
ctx               222 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "writebm: %4s[0x%02x] <- 0x%02x\n",
ctx               224 drivers/gpu/drm/bridge/sii9234.c 		ctx->i2c_error = ret;
ctx               230 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "writebm: %4s[0x%02x] <- 0x%02x\n",
ctx               232 drivers/gpu/drm/bridge/sii9234.c 		ctx->i2c_error = ret;
ctx               240 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "writebm: %4s[0x%02x] <- 0x%02x\n",
ctx               242 drivers/gpu/drm/bridge/sii9234.c 		ctx->i2c_error = ret;
ctx               248 drivers/gpu/drm/bridge/sii9234.c static int sii9234_readb(struct sii9234 *ctx, int id, int offset)
ctx               251 drivers/gpu/drm/bridge/sii9234.c 	struct i2c_client *client = ctx->client[id];
ctx               253 drivers/gpu/drm/bridge/sii9234.c 	if (ctx->i2c_error)
ctx               254 drivers/gpu/drm/bridge/sii9234.c 		return ctx->i2c_error;
ctx               258 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "readb: %4s[0x%02x]\n",
ctx               260 drivers/gpu/drm/bridge/sii9234.c 		ctx->i2c_error = ret;
ctx               266 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "readb: %4s[0x%02x]\n",
ctx               268 drivers/gpu/drm/bridge/sii9234.c 		ctx->i2c_error = ret;
ctx               274 drivers/gpu/drm/bridge/sii9234.c static int sii9234_clear_error(struct sii9234 *ctx)
ctx               276 drivers/gpu/drm/bridge/sii9234.c 	int ret = ctx->i2c_error;
ctx               278 drivers/gpu/drm/bridge/sii9234.c 	ctx->i2c_error = 0;
ctx               308 drivers/gpu/drm/bridge/sii9234.c static u8 sii9234_tmds_control(struct sii9234 *ctx, bool enable)
ctx               310 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_TMDS_CCTRL, enable ? ~0 : 0,
ctx               312 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_INT_CTRL_REG, enable ? ~0 : 0,
ctx               314 drivers/gpu/drm/bridge/sii9234.c 	return sii9234_clear_error(ctx);
ctx               317 drivers/gpu/drm/bridge/sii9234.c static int sii9234_cbus_reset(struct sii9234 *ctx)
ctx               321 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_SRST, ~0, BIT_CBUS_RESET);
ctx               323 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_SRST, 0, BIT_CBUS_RESET);
ctx               330 drivers/gpu/drm/bridge/sii9234.c 		cbus_writeb(ctx, 0xE0 + i, 0xF2);
ctx               335 drivers/gpu/drm/bridge/sii9234.c 		cbus_writeb(ctx, 0xF0 + i, 0xF2);
ctx               338 drivers/gpu/drm/bridge/sii9234.c 	return sii9234_clear_error(ctx);
ctx               342 drivers/gpu/drm/bridge/sii9234.c static int sii9234_cbus_init(struct sii9234 *ctx)
ctx               344 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, 0x07, 0xF2);
ctx               345 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, 0x40, 0x03);
ctx               346 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, 0x42, 0x06);
ctx               347 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, 0x36, 0x0C);
ctx               348 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, 0x3D, 0xFD);
ctx               349 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, 0x1C, 0x01);
ctx               350 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, 0x1D, 0x0F);
ctx               351 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, 0x44, 0x02);
ctx               353 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_DEV_STATE, 0x00);
ctx               354 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_MHL_VERSION,
ctx               356 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_CAT,
ctx               358 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_ADOPTER_ID_H, 0x01);
ctx               359 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_ADOPTER_ID_L, 0x41);
ctx               360 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_VID_LINK_MODE,
ctx               362 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_VIDEO_TYPE,
ctx               364 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_LOG_DEV_MAP,
ctx               366 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_BANDWIDTH, 0x0F);
ctx               367 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_FEATURE_FLAG,
ctx               370 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_DEVICE_ID_H, 0x0);
ctx               371 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_DEVICE_ID_L, 0x0);
ctx               372 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_SCRATCHPAD_SIZE,
ctx               374 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_INT_STAT_SIZE,
ctx               376 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_DEVCAP_OFFSET + MHL_DCAP_RESERVED, 0);
ctx               377 drivers/gpu/drm/bridge/sii9234.c 	cbus_writebm(ctx, 0x31, 0x0C, 0x0C);
ctx               378 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, 0x30, 0x01);
ctx               379 drivers/gpu/drm/bridge/sii9234.c 	cbus_writebm(ctx, 0x3C, 0x30, 0x38);
ctx               380 drivers/gpu/drm/bridge/sii9234.c 	cbus_writebm(ctx, 0x22, 0x0D, 0x0F);
ctx               381 drivers/gpu/drm/bridge/sii9234.c 	cbus_writebm(ctx, 0x2E, 0x15, 0x15);
ctx               382 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_INTR1_ENABLE_REG, 0);
ctx               383 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_INTR2_ENABLE_REG, 0);
ctx               385 drivers/gpu/drm/bridge/sii9234.c 	return sii9234_clear_error(ctx);
ctx               388 drivers/gpu/drm/bridge/sii9234.c static void force_usb_id_switch_open(struct sii9234 *ctx)
ctx               391 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL1_REG, 0, 0x01);
ctx               393 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL6_REG, ~0, USB_ID_OVR);
ctx               394 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL3_REG, ~0, 0x86);
ctx               396 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_INT_CTRL_REG, 0, 0x30);
ctx               399 drivers/gpu/drm/bridge/sii9234.c static void release_usb_id_switch_open(struct sii9234 *ctx)
ctx               403 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL6_REG, 0, USB_ID_OVR);
ctx               405 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL1_REG, ~0, 0x01);
ctx               408 drivers/gpu/drm/bridge/sii9234.c static int sii9234_power_init(struct sii9234 *ctx)
ctx               411 drivers/gpu/drm/bridge/sii9234.c 	tpi_writeb(ctx, TPI_DPD_REG, 0x3F);
ctx               413 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, HDMI_RX_TMDS_CLK_EN_REG, 0x01);
ctx               415 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, HDMI_RX_TMDS_CH_EN_REG, 0x15);
ctx               417 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, 0x08, 0x35);
ctx               418 drivers/gpu/drm/bridge/sii9234.c 	return sii9234_clear_error(ctx);
ctx               421 drivers/gpu/drm/bridge/sii9234.c static int sii9234_hdmi_init(struct sii9234 *ctx)
ctx               423 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, HDMI_RX_TMDS0_CCTRL1_REG, 0xC1);
ctx               424 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, HDMI_RX_PLL_CALREFSEL_REG, 0x03);
ctx               425 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, HDMI_RX_PLL_VCOCAL_REG, 0x20);
ctx               426 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, HDMI_RX_EQ_DATA0_REG, 0x8A);
ctx               427 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, HDMI_RX_EQ_DATA1_REG, 0x6A);
ctx               428 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, HDMI_RX_EQ_DATA2_REG, 0xAA);
ctx               429 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, HDMI_RX_EQ_DATA3_REG, 0xCA);
ctx               430 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, HDMI_RX_EQ_DATA4_REG, 0xEA);
ctx               431 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, HDMI_RX_TMDS_ZONE_CTRL_REG, 0xA0);
ctx               432 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, HDMI_RX_TMDS_MODE_CTRL_REG, 0x00);
ctx               433 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_TMDS_CCTRL, 0x34);
ctx               434 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, 0x45, 0x44);
ctx               435 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, 0x31, 0x0A);
ctx               436 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, HDMI_RX_TMDS0_CCTRL1_REG, 0xC1);
ctx               438 drivers/gpu/drm/bridge/sii9234.c 	return sii9234_clear_error(ctx);
ctx               441 drivers/gpu/drm/bridge/sii9234.c static int sii9234_mhl_tx_ctl_int(struct sii9234 *ctx)
ctx               443 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_MHLTX_CTL1_REG, 0xD0);
ctx               444 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_MHLTX_CTL2_REG, 0xFC);
ctx               445 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_MHLTX_CTL4_REG, 0xEB);
ctx               446 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_MHLTX_CTL7_REG, 0x0C);
ctx               448 drivers/gpu/drm/bridge/sii9234.c 	return sii9234_clear_error(ctx);
ctx               451 drivers/gpu/drm/bridge/sii9234.c static int sii9234_reset(struct sii9234 *ctx)
ctx               455 drivers/gpu/drm/bridge/sii9234.c 	sii9234_clear_error(ctx);
ctx               457 drivers/gpu/drm/bridge/sii9234.c 	ret = sii9234_power_init(ctx);
ctx               460 drivers/gpu/drm/bridge/sii9234.c 	ret = sii9234_cbus_reset(ctx);
ctx               463 drivers/gpu/drm/bridge/sii9234.c 	ret = sii9234_hdmi_init(ctx);
ctx               466 drivers/gpu/drm/bridge/sii9234.c 	ret = sii9234_mhl_tx_ctl_int(ctx);
ctx               471 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, 0x2B, 0x01);
ctx               473 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL1_REG, 0x04, 0x06);
ctx               475 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_DISC_CTRL2_REG, (1 << 7) /* Reserved */
ctx               482 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_DISC_CTRL5_REG, 0x77);
ctx               483 drivers/gpu/drm/bridge/sii9234.c 	cbus_writebm(ctx, CBUS_LINK_CONTROL_2_REG, ~0, MHL_INIT_TIMEOUT);
ctx               484 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_MHLTX_CTL6_REG, 0xA0);
ctx               486 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_DISC_CTRL6_REG, BLOCK_RGND_INT |
ctx               489 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_DISC_CTRL8_REG, 0);
ctx               491 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL6_REG, ~0, USB_ID_OVR);
ctx               501 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL3_REG, ~0, 0x86);
ctx               506 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL4_REG, ~0, 0x8C);
ctx               508 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_INT_CTRL_REG, 0, 0x06);
ctx               513 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL6_REG, 0,  USB_ID_OVR);
ctx               514 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_DISC_CTRL1_REG, 0x27);
ctx               516 drivers/gpu/drm/bridge/sii9234.c 	ret = sii9234_clear_error(ctx);
ctx               519 drivers/gpu/drm/bridge/sii9234.c 	ret = sii9234_cbus_init(ctx);
ctx               524 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, 0x05, 0x04);
ctx               526 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, 0x0D, 0x1C);
ctx               527 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_INTR4_ENABLE_REG,
ctx               530 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_INTR1_ENABLE_REG, 0x60);
ctx               533 drivers/gpu/drm/bridge/sii9234.c 	force_usb_id_switch_open(ctx);
ctx               534 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL4_REG, 0, 0xF0);
ctx               535 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL5_REG, 0, 0x03);
ctx               536 drivers/gpu/drm/bridge/sii9234.c 	release_usb_id_switch_open(ctx);
ctx               539 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_INT_CTRL_REG, 0, 1 << 5);
ctx               540 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_INT_CTRL_REG, ~0, 1 << 4);
ctx               542 drivers/gpu/drm/bridge/sii9234.c 	return sii9234_clear_error(ctx);
ctx               545 drivers/gpu/drm/bridge/sii9234.c static int sii9234_goto_d3(struct sii9234 *ctx)
ctx               549 drivers/gpu/drm/bridge/sii9234.c 	dev_dbg(ctx->dev, "sii9234: detection started d3\n");
ctx               551 drivers/gpu/drm/bridge/sii9234.c 	ret = sii9234_reset(ctx);
ctx               555 drivers/gpu/drm/bridge/sii9234.c 	hdmi_writeb(ctx, 0x01, 0x03);
ctx               556 drivers/gpu/drm/bridge/sii9234.c 	tpi_writebm(ctx, TPI_DPD_REG, 0, 1);
ctx               558 drivers/gpu/drm/bridge/sii9234.c 	sii9234_clear_error(ctx);
ctx               560 drivers/gpu/drm/bridge/sii9234.c 	ctx->state = ST_D3;
ctx               564 drivers/gpu/drm/bridge/sii9234.c 	dev_err(ctx->dev, "%s failed\n", __func__);
ctx               568 drivers/gpu/drm/bridge/sii9234.c static int sii9234_hw_on(struct sii9234 *ctx)
ctx               570 drivers/gpu/drm/bridge/sii9234.c 	return regulator_bulk_enable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               573 drivers/gpu/drm/bridge/sii9234.c static void sii9234_hw_off(struct sii9234 *ctx)
ctx               575 drivers/gpu/drm/bridge/sii9234.c 	gpiod_set_value(ctx->gpio_reset, 1);
ctx               577 drivers/gpu/drm/bridge/sii9234.c 	regulator_bulk_disable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               580 drivers/gpu/drm/bridge/sii9234.c static void sii9234_hw_reset(struct sii9234 *ctx)
ctx               582 drivers/gpu/drm/bridge/sii9234.c 	gpiod_set_value(ctx->gpio_reset, 1);
ctx               584 drivers/gpu/drm/bridge/sii9234.c 	gpiod_set_value(ctx->gpio_reset, 0);
ctx               587 drivers/gpu/drm/bridge/sii9234.c static void sii9234_cable_in(struct sii9234 *ctx)
ctx               591 drivers/gpu/drm/bridge/sii9234.c 	mutex_lock(&ctx->lock);
ctx               592 drivers/gpu/drm/bridge/sii9234.c 	if (ctx->state != ST_OFF)
ctx               594 drivers/gpu/drm/bridge/sii9234.c 	ret = sii9234_hw_on(ctx);
ctx               598 drivers/gpu/drm/bridge/sii9234.c 	sii9234_hw_reset(ctx);
ctx               599 drivers/gpu/drm/bridge/sii9234.c 	sii9234_goto_d3(ctx);
ctx               601 drivers/gpu/drm/bridge/sii9234.c 	enable_irq(to_i2c_client(ctx->dev)->irq);
ctx               604 drivers/gpu/drm/bridge/sii9234.c 	mutex_unlock(&ctx->lock);
ctx               607 drivers/gpu/drm/bridge/sii9234.c static void sii9234_cable_out(struct sii9234 *ctx)
ctx               609 drivers/gpu/drm/bridge/sii9234.c 	mutex_lock(&ctx->lock);
ctx               611 drivers/gpu/drm/bridge/sii9234.c 	if (ctx->state == ST_OFF)
ctx               614 drivers/gpu/drm/bridge/sii9234.c 	disable_irq(to_i2c_client(ctx->dev)->irq);
ctx               615 drivers/gpu/drm/bridge/sii9234.c 	tpi_writeb(ctx, TPI_DPD_REG, 0);
ctx               617 drivers/gpu/drm/bridge/sii9234.c 	sii9234_hw_off(ctx);
ctx               619 drivers/gpu/drm/bridge/sii9234.c 	ctx->state = ST_OFF;
ctx               622 drivers/gpu/drm/bridge/sii9234.c 	mutex_unlock(&ctx->lock);
ctx               625 drivers/gpu/drm/bridge/sii9234.c static enum sii9234_state sii9234_rgnd_ready_irq(struct sii9234 *ctx)
ctx               629 drivers/gpu/drm/bridge/sii9234.c 	if (ctx->state == ST_D3) {
ctx               632 drivers/gpu/drm/bridge/sii9234.c 		dev_dbg(ctx->dev, "RGND_READY_INT\n");
ctx               633 drivers/gpu/drm/bridge/sii9234.c 		sii9234_hw_reset(ctx);
ctx               635 drivers/gpu/drm/bridge/sii9234.c 		ret = sii9234_reset(ctx);
ctx               637 drivers/gpu/drm/bridge/sii9234.c 			dev_err(ctx->dev, "sii9234_reset() failed\n");
ctx               645 drivers/gpu/drm/bridge/sii9234.c 	if (ctx->state != ST_RGND_INIT)
ctx               648 drivers/gpu/drm/bridge/sii9234.c 	value = mhl_tx_readb(ctx, MHL_TX_STAT2_REG);
ctx               649 drivers/gpu/drm/bridge/sii9234.c 	if (sii9234_clear_error(ctx))
ctx               653 drivers/gpu/drm/bridge/sii9234.c 		dev_warn(ctx->dev, "RGND is not 1k\n");
ctx               656 drivers/gpu/drm/bridge/sii9234.c 	dev_dbg(ctx->dev, "RGND 1K!!\n");
ctx               657 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL4_REG, ~0, 0x8C);
ctx               658 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_DISC_CTRL5_REG, 0x77);
ctx               659 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL6_REG, ~0, 0x05);
ctx               660 drivers/gpu/drm/bridge/sii9234.c 	if (sii9234_clear_error(ctx))
ctx               667 drivers/gpu/drm/bridge/sii9234.c static enum sii9234_state sii9234_mhl_established(struct sii9234 *ctx)
ctx               669 drivers/gpu/drm/bridge/sii9234.c 	dev_dbg(ctx->dev, "mhl est interrupt\n");
ctx               672 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_MHLTX_CTL1_REG, 0x10);
ctx               674 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, 0x07, 0x32);
ctx               675 drivers/gpu/drm/bridge/sii9234.c 	cbus_writebm(ctx, 0x44, ~0, 1 << 1);
ctx               677 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writebm(ctx, MHL_TX_DISC_CTRL1_REG, ~0, 1);
ctx               678 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_INTR1_ENABLE_REG,
ctx               681 drivers/gpu/drm/bridge/sii9234.c 	if (sii9234_clear_error(ctx))
ctx               687 drivers/gpu/drm/bridge/sii9234.c static enum sii9234_state sii9234_hpd_change(struct sii9234 *ctx)
ctx               691 drivers/gpu/drm/bridge/sii9234.c 	value = cbus_readb(ctx, CBUS_MSC_REQ_ABORT_REASON_REG);
ctx               692 drivers/gpu/drm/bridge/sii9234.c 	if (sii9234_clear_error(ctx))
ctx               697 drivers/gpu/drm/bridge/sii9234.c 		sii9234_tmds_control(ctx, true);
ctx               700 drivers/gpu/drm/bridge/sii9234.c 		sii9234_tmds_control(ctx, false);
ctx               703 drivers/gpu/drm/bridge/sii9234.c 	return ctx->state;
ctx               706 drivers/gpu/drm/bridge/sii9234.c static enum sii9234_state sii9234_rsen_change(struct sii9234 *ctx)
ctx               711 drivers/gpu/drm/bridge/sii9234.c 	if (ctx->state != ST_RGND_1K) {
ctx               712 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "RSEN_HIGH without RGND_1K\n");
ctx               715 drivers/gpu/drm/bridge/sii9234.c 	value = mhl_tx_readb(ctx, MHL_TX_SYSSTAT_REG);
ctx               720 drivers/gpu/drm/bridge/sii9234.c 		dev_dbg(ctx->dev, "MHL cable connected.. RSEN High\n");
ctx               723 drivers/gpu/drm/bridge/sii9234.c 	dev_dbg(ctx->dev, "RSEN lost\n");
ctx               732 drivers/gpu/drm/bridge/sii9234.c 	value = mhl_tx_readb(ctx, MHL_TX_SYSSTAT_REG);
ctx               735 drivers/gpu/drm/bridge/sii9234.c 	dev_dbg(ctx->dev, "sys_stat: %x\n", value);
ctx               738 drivers/gpu/drm/bridge/sii9234.c 		dev_dbg(ctx->dev, "RSEN recovery\n");
ctx               741 drivers/gpu/drm/bridge/sii9234.c 	dev_dbg(ctx->dev, "RSEN Really LOW\n");
ctx               743 drivers/gpu/drm/bridge/sii9234.c 	sii9234_tmds_control(ctx, false);
ctx               744 drivers/gpu/drm/bridge/sii9234.c 	force_usb_id_switch_open(ctx);
ctx               745 drivers/gpu/drm/bridge/sii9234.c 	release_usb_id_switch_open(ctx);
ctx               752 drivers/gpu/drm/bridge/sii9234.c 	struct sii9234 *ctx = data;
ctx               757 drivers/gpu/drm/bridge/sii9234.c 	dev_dbg(ctx->dev, "%s\n", __func__);
ctx               759 drivers/gpu/drm/bridge/sii9234.c 	mutex_lock(&ctx->lock);
ctx               761 drivers/gpu/drm/bridge/sii9234.c 	intr1 = mhl_tx_readb(ctx, MHL_TX_INTR1_REG);
ctx               762 drivers/gpu/drm/bridge/sii9234.c 	intr4 = mhl_tx_readb(ctx, MHL_TX_INTR4_REG);
ctx               763 drivers/gpu/drm/bridge/sii9234.c 	intr1_en = mhl_tx_readb(ctx, MHL_TX_INTR1_ENABLE_REG);
ctx               764 drivers/gpu/drm/bridge/sii9234.c 	intr4_en = mhl_tx_readb(ctx, MHL_TX_INTR4_ENABLE_REG);
ctx               765 drivers/gpu/drm/bridge/sii9234.c 	cbus_intr1 = cbus_readb(ctx, CBUS_INT_STATUS_1_REG);
ctx               766 drivers/gpu/drm/bridge/sii9234.c 	cbus_intr2 = cbus_readb(ctx, CBUS_INT_STATUS_2_REG);
ctx               768 drivers/gpu/drm/bridge/sii9234.c 	if (sii9234_clear_error(ctx))
ctx               771 drivers/gpu/drm/bridge/sii9234.c 	dev_dbg(ctx->dev, "irq %02x/%02x %02x/%02x %02x/%02x\n",
ctx               775 drivers/gpu/drm/bridge/sii9234.c 		ctx->state = sii9234_rgnd_ready_irq(ctx);
ctx               777 drivers/gpu/drm/bridge/sii9234.c 		ctx->state = sii9234_rsen_change(ctx);
ctx               779 drivers/gpu/drm/bridge/sii9234.c 		ctx->state = sii9234_mhl_established(ctx);
ctx               781 drivers/gpu/drm/bridge/sii9234.c 		ctx->state = sii9234_hpd_change(ctx);
ctx               783 drivers/gpu/drm/bridge/sii9234.c 		ctx->state = ST_FAILURE;
ctx               785 drivers/gpu/drm/bridge/sii9234.c 		ctx->state = ST_FAILURE_DISCOVERY;
ctx               789 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_INTR1_REG, intr1);
ctx               790 drivers/gpu/drm/bridge/sii9234.c 	mhl_tx_writeb(ctx, MHL_TX_INTR4_REG, intr4);
ctx               791 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_MHL_STATUS_REG_0, 0xFF);
ctx               792 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_MHL_STATUS_REG_1, 0xFF);
ctx               793 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_INT_STATUS_1_REG, cbus_intr1);
ctx               794 drivers/gpu/drm/bridge/sii9234.c 	cbus_writeb(ctx, CBUS_INT_STATUS_2_REG, cbus_intr2);
ctx               796 drivers/gpu/drm/bridge/sii9234.c 	sii9234_clear_error(ctx);
ctx               798 drivers/gpu/drm/bridge/sii9234.c 	if (ctx->state == ST_FAILURE) {
ctx               799 drivers/gpu/drm/bridge/sii9234.c 		dev_dbg(ctx->dev, "try to reset after failure\n");
ctx               800 drivers/gpu/drm/bridge/sii9234.c 		sii9234_hw_reset(ctx);
ctx               801 drivers/gpu/drm/bridge/sii9234.c 		sii9234_goto_d3(ctx);
ctx               804 drivers/gpu/drm/bridge/sii9234.c 	if (ctx->state == ST_FAILURE_DISCOVERY) {
ctx               805 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "discovery failed, no power for MHL?\n");
ctx               806 drivers/gpu/drm/bridge/sii9234.c 		tpi_writebm(ctx, TPI_DPD_REG, 0, 1);
ctx               807 drivers/gpu/drm/bridge/sii9234.c 		ctx->state = ST_D3;
ctx               810 drivers/gpu/drm/bridge/sii9234.c 	mutex_unlock(&ctx->lock);
ctx               815 drivers/gpu/drm/bridge/sii9234.c static int sii9234_init_resources(struct sii9234 *ctx,
ctx               821 drivers/gpu/drm/bridge/sii9234.c 	if (!ctx->dev->of_node) {
ctx               822 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "not DT device\n");
ctx               826 drivers/gpu/drm/bridge/sii9234.c 	ctx->gpio_reset = devm_gpiod_get(ctx->dev, "reset", GPIOD_OUT_LOW);
ctx               827 drivers/gpu/drm/bridge/sii9234.c 	if (IS_ERR(ctx->gpio_reset)) {
ctx               828 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "failed to get reset gpio from DT\n");
ctx               829 drivers/gpu/drm/bridge/sii9234.c 		return PTR_ERR(ctx->gpio_reset);
ctx               832 drivers/gpu/drm/bridge/sii9234.c 	ctx->supplies[0].supply = "avcc12";
ctx               833 drivers/gpu/drm/bridge/sii9234.c 	ctx->supplies[1].supply = "avcc33";
ctx               834 drivers/gpu/drm/bridge/sii9234.c 	ctx->supplies[2].supply = "iovcc18";
ctx               835 drivers/gpu/drm/bridge/sii9234.c 	ctx->supplies[3].supply = "cvcc12";
ctx               836 drivers/gpu/drm/bridge/sii9234.c 	ret = devm_regulator_bulk_get(ctx->dev, 4, ctx->supplies);
ctx               838 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "regulator_bulk failed\n");
ctx               842 drivers/gpu/drm/bridge/sii9234.c 	ctx->client[I2C_MHL] = client;
ctx               844 drivers/gpu/drm/bridge/sii9234.c 	ctx->client[I2C_TPI] = i2c_new_dummy(adapter, I2C_TPI_ADDR);
ctx               845 drivers/gpu/drm/bridge/sii9234.c 	if (!ctx->client[I2C_TPI]) {
ctx               846 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "failed to create TPI client\n");
ctx               850 drivers/gpu/drm/bridge/sii9234.c 	ctx->client[I2C_HDMI] = i2c_new_dummy(adapter, I2C_HDMI_ADDR);
ctx               851 drivers/gpu/drm/bridge/sii9234.c 	if (!ctx->client[I2C_HDMI]) {
ctx               852 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "failed to create HDMI RX client\n");
ctx               856 drivers/gpu/drm/bridge/sii9234.c 	ctx->client[I2C_CBUS] = i2c_new_dummy(adapter, I2C_CBUS_ADDR);
ctx               857 drivers/gpu/drm/bridge/sii9234.c 	if (!ctx->client[I2C_CBUS]) {
ctx               858 drivers/gpu/drm/bridge/sii9234.c 		dev_err(ctx->dev, "failed to create CBUS client\n");
ctx               865 drivers/gpu/drm/bridge/sii9234.c 	i2c_unregister_device(ctx->client[I2C_HDMI]);
ctx               867 drivers/gpu/drm/bridge/sii9234.c 	i2c_unregister_device(ctx->client[I2C_TPI]);
ctx               872 drivers/gpu/drm/bridge/sii9234.c static void sii9234_deinit_resources(struct sii9234 *ctx)
ctx               874 drivers/gpu/drm/bridge/sii9234.c 	i2c_unregister_device(ctx->client[I2C_CBUS]);
ctx               875 drivers/gpu/drm/bridge/sii9234.c 	i2c_unregister_device(ctx->client[I2C_HDMI]);
ctx               876 drivers/gpu/drm/bridge/sii9234.c 	i2c_unregister_device(ctx->client[I2C_TPI]);
ctx               901 drivers/gpu/drm/bridge/sii9234.c 	struct sii9234 *ctx;
ctx               905 drivers/gpu/drm/bridge/sii9234.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               906 drivers/gpu/drm/bridge/sii9234.c 	if (!ctx)
ctx               909 drivers/gpu/drm/bridge/sii9234.c 	ctx->dev = dev;
ctx               910 drivers/gpu/drm/bridge/sii9234.c 	mutex_init(&ctx->lock);
ctx               926 drivers/gpu/drm/bridge/sii9234.c 					"sii9234", ctx);
ctx               932 drivers/gpu/drm/bridge/sii9234.c 	ret = sii9234_init_resources(ctx, client);
ctx               936 drivers/gpu/drm/bridge/sii9234.c 	i2c_set_clientdata(client, ctx);
ctx               938 drivers/gpu/drm/bridge/sii9234.c 	ctx->bridge.funcs = &sii9234_bridge_funcs;
ctx               939 drivers/gpu/drm/bridge/sii9234.c 	ctx->bridge.of_node = dev->of_node;
ctx               940 drivers/gpu/drm/bridge/sii9234.c 	drm_bridge_add(&ctx->bridge);
ctx               942 drivers/gpu/drm/bridge/sii9234.c 	sii9234_cable_in(ctx);
ctx               949 drivers/gpu/drm/bridge/sii9234.c 	struct sii9234 *ctx = i2c_get_clientdata(client);
ctx               951 drivers/gpu/drm/bridge/sii9234.c 	sii9234_cable_out(ctx);
ctx               952 drivers/gpu/drm/bridge/sii9234.c 	drm_bridge_remove(&ctx->bridge);
ctx               953 drivers/gpu/drm/bridge/sii9234.c 	sii9234_deinit_resources(ctx);
ctx               105 drivers/gpu/drm/bridge/sil-sii8620.c typedef void (*sii8620_mt_msg_cb)(struct sii8620 *ctx,
ctx               108 drivers/gpu/drm/bridge/sil-sii8620.c typedef void (*sii8620_cb)(struct sii8620 *ctx, int ret);
ctx               130 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_fetch_edid(struct sii8620 *ctx);
ctx               131 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_set_upstream_edid(struct sii8620 *ctx);
ctx               132 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_enable_hpd(struct sii8620 *ctx);
ctx               133 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mhl_disconnected(struct sii8620 *ctx);
ctx               134 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_disconnect(struct sii8620 *ctx);
ctx               136 drivers/gpu/drm/bridge/sil-sii8620.c static int sii8620_clear_error(struct sii8620 *ctx)
ctx               138 drivers/gpu/drm/bridge/sil-sii8620.c 	int ret = ctx->error;
ctx               140 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->error = 0;
ctx               144 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_read_buf(struct sii8620 *ctx, u16 addr, u8 *buf, int len)
ctx               146 drivers/gpu/drm/bridge/sil-sii8620.c 	struct device *dev = ctx->dev;
ctx               165 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->error)
ctx               174 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->error = ret < 0 ? ret : -EIO;
ctx               178 drivers/gpu/drm/bridge/sil-sii8620.c static u8 sii8620_readb(struct sii8620 *ctx, u16 addr)
ctx               182 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_read_buf(ctx, addr, &ret, 1);
ctx               186 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_write_buf(struct sii8620 *ctx, u16 addr, const u8 *buf,
ctx               189 drivers/gpu/drm/bridge/sil-sii8620.c 	struct device *dev = ctx->dev;
ctx               199 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->error)
ctx               205 drivers/gpu/drm/bridge/sil-sii8620.c 			ctx->error = -ENOMEM;
ctx               222 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->error = ret ?: -EIO;
ctx               229 drivers/gpu/drm/bridge/sil-sii8620.c #define sii8620_write(ctx, addr, arr...) \
ctx               232 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_buf(ctx, addr, d, ARRAY_SIZE(d)); \
ctx               235 drivers/gpu/drm/bridge/sil-sii8620.c static void __sii8620_write_seq(struct sii8620 *ctx, const u16 *seq, int len)
ctx               240 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, seq[i], seq[i + 1]);
ctx               243 drivers/gpu/drm/bridge/sil-sii8620.c #define sii8620_write_seq(ctx, seq...) \
ctx               246 drivers/gpu/drm/bridge/sil-sii8620.c 	__sii8620_write_seq(ctx, d, ARRAY_SIZE(d)); \
ctx               249 drivers/gpu/drm/bridge/sil-sii8620.c #define sii8620_write_seq_static(ctx, seq...) \
ctx               252 drivers/gpu/drm/bridge/sil-sii8620.c 	__sii8620_write_seq(ctx, d, ARRAY_SIZE(d)); \
ctx               255 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_setbits(struct sii8620 *ctx, u16 addr, u8 mask, u8 val)
ctx               257 drivers/gpu/drm/bridge/sil-sii8620.c 	val = (val & mask) | (sii8620_readb(ctx, addr) & ~mask);
ctx               258 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, addr, val);
ctx               261 drivers/gpu/drm/bridge/sil-sii8620.c static inline bool sii8620_is_mhl3(struct sii8620 *ctx)
ctx               263 drivers/gpu/drm/bridge/sil-sii8620.c 	return ctx->mode >= CM_MHL3;
ctx               266 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_cleanup(struct sii8620 *ctx)
ctx               270 drivers/gpu/drm/bridge/sil-sii8620.c 	list_for_each_entry_safe(msg, n, &ctx->mt_queue, node) {
ctx               274 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->mt_state = MT_STATE_READY;
ctx               277 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_work(struct sii8620 *ctx)
ctx               281 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->error)
ctx               283 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->mt_state == MT_STATE_BUSY || list_empty(&ctx->mt_queue))
ctx               286 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->mt_state == MT_STATE_DONE) {
ctx               287 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->mt_state = MT_STATE_READY;
ctx               288 drivers/gpu/drm/bridge/sil-sii8620.c 		msg = list_first_entry(&ctx->mt_queue, struct sii8620_mt_msg,
ctx               292 drivers/gpu/drm/bridge/sil-sii8620.c 			msg->recv(ctx, msg);
ctx               294 drivers/gpu/drm/bridge/sil-sii8620.c 			msg->continuation(ctx, msg->ret);
ctx               298 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->mt_state != MT_STATE_READY || list_empty(&ctx->mt_queue))
ctx               301 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->mt_state = MT_STATE_BUSY;
ctx               302 drivers/gpu/drm/bridge/sil-sii8620.c 	msg = list_first_entry(&ctx->mt_queue, struct sii8620_mt_msg, node);
ctx               304 drivers/gpu/drm/bridge/sil-sii8620.c 		msg->send(ctx, msg);
ctx               307 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_enable_gen2_write_burst(struct sii8620 *ctx)
ctx               311 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->gen2_write_burst)
ctx               314 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->mode >= CM_MHL1)
ctx               317 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq(ctx,
ctx               321 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->gen2_write_burst = 1;
ctx               324 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_disable_gen2_write_burst(struct sii8620 *ctx)
ctx               326 drivers/gpu/drm/bridge/sil-sii8620.c 	if (!ctx->gen2_write_burst)
ctx               329 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx               333 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->gen2_write_burst = 0;
ctx               336 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_start_gen2_write_burst(struct sii8620 *ctx)
ctx               338 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx               347 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_enable_gen2_write_burst(ctx);
ctx               350 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_msc_cmd_send(struct sii8620 *ctx,
ctx               356 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_enable_gen2_write_burst(ctx);
ctx               358 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_disable_gen2_write_burst(ctx);
ctx               363 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_buf(ctx, REG_MSC_CMD_OR_OFFSET, msg->reg + 1, 2);
ctx               364 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, REG_MSC_COMMAND_START,
ctx               368 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_buf(ctx, REG_MSC_CMD_OR_OFFSET, msg->reg, 3);
ctx               369 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, REG_MSC_COMMAND_START,
ctx               374 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, REG_MSC_CMD_OR_OFFSET, msg->reg[1]);
ctx               375 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, REG_MSC_COMMAND_START,
ctx               379 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_err(ctx->dev, "%s: command %#x not supported\n", __func__,
ctx               384 drivers/gpu/drm/bridge/sil-sii8620.c static struct sii8620_mt_msg *sii8620_mt_msg_new(struct sii8620 *ctx)
ctx               389 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->error = -ENOMEM;
ctx               391 drivers/gpu/drm/bridge/sil-sii8620.c 		list_add_tail(&msg->node, &ctx->mt_queue);
ctx               396 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_set_cont(struct sii8620 *ctx, sii8620_cb cont)
ctx               400 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->error)
ctx               403 drivers/gpu/drm/bridge/sil-sii8620.c 	if (list_empty(&ctx->mt_queue)) {
ctx               404 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->error = -EINVAL;
ctx               407 drivers/gpu/drm/bridge/sil-sii8620.c 	msg = list_last_entry(&ctx->mt_queue, struct sii8620_mt_msg, node);
ctx               411 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_msc_cmd(struct sii8620 *ctx, u8 cmd, u8 arg1, u8 arg2)
ctx               413 drivers/gpu/drm/bridge/sil-sii8620.c 	struct sii8620_mt_msg *msg = sii8620_mt_msg_new(ctx);
ctx               424 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_write_stat(struct sii8620 *ctx, u8 reg, u8 val)
ctx               426 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_msc_cmd(ctx, MHL_WRITE_STAT, reg, val);
ctx               429 drivers/gpu/drm/bridge/sil-sii8620.c static inline void sii8620_mt_set_int(struct sii8620 *ctx, u8 irq, u8 mask)
ctx               431 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_msc_cmd(ctx, MHL_SET_INT, irq, mask);
ctx               434 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_msc_msg(struct sii8620 *ctx, u8 cmd, u8 data)
ctx               436 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_msc_cmd(ctx, MHL_MSC_MSG, cmd, data);
ctx               439 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_rap(struct sii8620 *ctx, u8 code)
ctx               441 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_msc_msg(ctx, MHL_MSC_MSG_RAP, code);
ctx               444 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_rcpk(struct sii8620 *ctx, u8 code)
ctx               446 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_msc_msg(ctx, MHL_MSC_MSG_RCPK, code);
ctx               449 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_rcpe(struct sii8620 *ctx, u8 code)
ctx               451 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_msc_msg(ctx, MHL_MSC_MSG_RCPE, code);
ctx               454 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_read_devcap_send(struct sii8620 *ctx,
ctx               464 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq(ctx,
ctx               480 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_identify_sink(struct sii8620 *ctx)
ctx               489 drivers/gpu/drm/bridge/sil-sii8620.c 	struct device *dev = ctx->dev;
ctx               491 drivers/gpu/drm/bridge/sil-sii8620.c 	if (!ctx->sink_detected || !ctx->devcap_read)
ctx               494 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_fetch_edid(ctx);
ctx               495 drivers/gpu/drm/bridge/sil-sii8620.c 	if (!ctx->edid) {
ctx               496 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_err(ctx->dev, "Cannot fetch EDID\n");
ctx               497 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_mhl_disconnected(ctx);
ctx               500 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_set_upstream_edid(ctx);
ctx               502 drivers/gpu/drm/bridge/sil-sii8620.c 	if (drm_detect_hdmi_monitor(ctx->edid))
ctx               503 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->sink_type = SINK_HDMI;
ctx               505 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->sink_type = SINK_DVI;
ctx               507 drivers/gpu/drm/bridge/sil-sii8620.c 	drm_edid_get_monitor_name(ctx->edid, sink_name, ARRAY_SIZE(sink_name));
ctx               510 drivers/gpu/drm/bridge/sil-sii8620.c 		 sink_str[ctx->sink_type], sink_name);
ctx               513 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mr_devcap(struct sii8620 *ctx)
ctx               516 drivers/gpu/drm/bridge/sil-sii8620.c 	struct device *dev = ctx->dev;
ctx               518 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_read_buf(ctx, REG_EDID_FIFO_RD_DATA, dcap, MHL_DCAP_SIZE);
ctx               519 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->error < 0)
ctx               527 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_update_array(ctx->devcap, dcap, MHL_DCAP_SIZE);
ctx               528 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->devcap_read = true;
ctx               529 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_identify_sink(ctx);
ctx               532 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mr_xdevcap(struct sii8620 *ctx)
ctx               534 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_read_buf(ctx, REG_EDID_FIFO_RD_DATA, ctx->xdevcap,
ctx               538 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_read_devcap_recv(struct sii8620 *ctx,
ctx               548 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq(ctx,
ctx               556 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_mr_xdevcap(ctx);
ctx               558 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_mr_devcap(ctx);
ctx               561 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_read_devcap(struct sii8620 *ctx, bool xdevcap)
ctx               563 drivers/gpu/drm/bridge/sil-sii8620.c 	struct sii8620_mt_msg *msg = sii8620_mt_msg_new(ctx);
ctx               573 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_read_devcap_reg_recv(struct sii8620 *ctx,
ctx               579 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->xdevcap[reg] = msg->ret;
ctx               581 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->devcap[reg] = msg->ret;
ctx               584 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mt_read_devcap_reg(struct sii8620 *ctx, u8 reg)
ctx               586 drivers/gpu/drm/bridge/sil-sii8620.c 	struct sii8620_mt_msg *msg = sii8620_mt_msg_new(ctx);
ctx               597 drivers/gpu/drm/bridge/sil-sii8620.c static inline void sii8620_mt_read_xdevcap_reg(struct sii8620 *ctx, u8 reg)
ctx               599 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_read_devcap_reg(ctx, reg | 0x80);
ctx               602 drivers/gpu/drm/bridge/sil-sii8620.c static void *sii8620_burst_get_tx_buf(struct sii8620 *ctx, int len)
ctx               604 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 *buf = &ctx->burst.tx_buf[ctx->burst.tx_count];
ctx               607 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->burst.tx_count + size > ARRAY_SIZE(ctx->burst.tx_buf)) {
ctx               608 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_err(ctx->dev, "TX-BLK buffer exhausted\n");
ctx               609 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->error = -EINVAL;
ctx               613 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->burst.tx_count += size;
ctx               619 drivers/gpu/drm/bridge/sil-sii8620.c static u8 *sii8620_burst_get_rx_buf(struct sii8620 *ctx, int len)
ctx               621 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 *buf = &ctx->burst.rx_buf[ctx->burst.rx_count];
ctx               624 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->burst.tx_count + size > ARRAY_SIZE(ctx->burst.tx_buf)) {
ctx               625 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_err(ctx->dev, "RX-BLK buffer exhausted\n");
ctx               626 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->error = -EINVAL;
ctx               630 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->burst.rx_count += size;
ctx               636 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_burst_send(struct sii8620 *ctx)
ctx               638 drivers/gpu/drm/bridge/sil-sii8620.c 	int tx_left = ctx->burst.tx_count;
ctx               639 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 *d = ctx->burst.tx_buf;
ctx               644 drivers/gpu/drm/bridge/sil-sii8620.c 		if (ctx->burst.r_count + len > ctx->burst.r_size)
ctx               646 drivers/gpu/drm/bridge/sil-sii8620.c 		d[0] = min(ctx->burst.rx_ack, 255);
ctx               647 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->burst.rx_ack -= d[0];
ctx               648 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_buf(ctx, REG_EMSC_XMIT_WRITE_PORT, d, len);
ctx               649 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->burst.r_count += len;
ctx               654 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->burst.tx_count = tx_left;
ctx               656 drivers/gpu/drm/bridge/sil-sii8620.c 	while (ctx->burst.rx_ack > 0) {
ctx               657 drivers/gpu/drm/bridge/sil-sii8620.c 		u8 b[2] = { min(ctx->burst.rx_ack, 255), 0 };
ctx               659 drivers/gpu/drm/bridge/sil-sii8620.c 		if (ctx->burst.r_count + 2 > ctx->burst.r_size)
ctx               661 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->burst.rx_ack -= b[0];
ctx               662 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_buf(ctx, REG_EMSC_XMIT_WRITE_PORT, b, 2);
ctx               663 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->burst.r_count += 2;
ctx               667 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_burst_receive(struct sii8620 *ctx)
ctx               672 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_read_buf(ctx, REG_EMSCRFIFOBCNTL, buf, 2);
ctx               677 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_read_buf(ctx, REG_EMSC_RCV_READ_PORT, buf, len);
ctx               679 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->burst.rx_ack += len - 1;
ctx               680 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->burst.r_count -= buf[1];
ctx               681 drivers/gpu/drm/bridge/sil-sii8620.c 		if (ctx->burst.r_count < 0)
ctx               682 drivers/gpu/drm/bridge/sil-sii8620.c 			ctx->burst.r_count = 0;
ctx               688 drivers/gpu/drm/bridge/sil-sii8620.c 		d = sii8620_burst_get_rx_buf(ctx, len);
ctx               691 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_read_buf(ctx, REG_EMSC_RCV_READ_PORT, d, len);
ctx               693 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->burst.rx_ack += len;
ctx               697 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_burst_tx_rbuf_info(struct sii8620 *ctx, int size)
ctx               700 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_burst_get_tx_buf(ctx, sizeof(*d));
ctx               726 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_burst_tx_bits_per_pixel_fmt(struct sii8620 *ctx, u8 fmt)
ctx               731 drivers/gpu/drm/bridge/sil-sii8620.c 	d = sii8620_burst_get_tx_buf(ctx, size);
ctx               742 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_burst_rx_all(struct sii8620 *ctx)
ctx               744 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 *d = ctx->burst.rx_buf;
ctx               745 drivers/gpu/drm/bridge/sil-sii8620.c 	int count = ctx->burst.rx_count;
ctx               753 drivers/gpu/drm/bridge/sil-sii8620.c 			ctx->burst.r_size = get_unaligned_le16(&d[2]);
ctx               761 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->burst.rx_count = 0;
ctx               764 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_fetch_edid(struct sii8620 *ctx)
ctx               772 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_readb(ctx, REG_CBUS_STATUS);
ctx               773 drivers/gpu/drm/bridge/sil-sii8620.c 	lm_ddc = sii8620_readb(ctx, REG_LM_DDC);
ctx               774 drivers/gpu/drm/bridge/sil-sii8620.c 	ddc_cmd = sii8620_readb(ctx, REG_DDC_CMD);
ctx               776 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq(ctx,
ctx               785 drivers/gpu/drm/bridge/sil-sii8620.c 		u8 ddc_stat = sii8620_readb(ctx, REG_DDC_STATUS);
ctx               789 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, REG_DDC_STATUS,
ctx               793 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_DDC_ADDR, 0x50 << 1);
ctx               797 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->error = -ENOMEM;
ctx               803 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_readb(ctx, REG_DDC_STATUS);
ctx               804 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_seq(ctx,
ctx               809 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_seq(ctx,
ctx               820 drivers/gpu/drm/bridge/sil-sii8620.c 			cbus = sii8620_readb(ctx, REG_CBUS_STATUS);
ctx               827 drivers/gpu/drm/bridge/sil-sii8620.c 				if (sii8620_readb(ctx, REG_DDC_DOUT_CNT)
ctx               831 drivers/gpu/drm/bridge/sil-sii8620.c 				int3 = sii8620_readb(ctx, REG_INTR3);
ctx               834 drivers/gpu/drm/bridge/sil-sii8620.c 				ctx->error = -ETIMEDOUT;
ctx               835 drivers/gpu/drm/bridge/sil-sii8620.c 				dev_err(ctx->dev, "timeout during EDID read\n");
ctx               843 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_read_buf(ctx, REG_DDC_DATA, edid + fetched, FETCH_SIZE);
ctx               854 drivers/gpu/drm/bridge/sil-sii8620.c 					ctx->error = -ENOMEM;
ctx               862 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq(ctx,
ctx               868 drivers/gpu/drm/bridge/sil-sii8620.c 	kfree(ctx->edid);
ctx               869 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->edid = (struct edid *)edid;
ctx               872 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_set_upstream_edid(struct sii8620 *ctx)
ctx               874 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_setbits(ctx, REG_DPD, BIT_DPD_PDNRX12 | BIT_DPD_PDIDCK_N
ctx               877 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx               884 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_setbits(ctx, REG_RX_HDMI_CLR_BUFFER,
ctx               887 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx               893 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_buf(ctx, REG_EDID_FIFO_WR_DATA, (u8 *)ctx->edid,
ctx               894 drivers/gpu/drm/bridge/sil-sii8620.c 			  (ctx->edid->extensions + 1) * EDID_LENGTH);
ctx               896 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx               905 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_xtal_set_rate(struct sii8620 *ctx)
ctx               918 drivers/gpu/drm/bridge/sil-sii8620.c 	unsigned long rate = clk_get_rate(ctx->clk_xtal) / 1000;
ctx               926 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_err(ctx->dev, "xtal clock rate(%lukHz) not supported, setting MHL for %ukHz.\n",
ctx               929 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_DIV_CTL_MAIN, rates[i].div);
ctx               930 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_HDCP2X_TP1, rates[i].tp1);
ctx               933 drivers/gpu/drm/bridge/sil-sii8620.c static int sii8620_hw_on(struct sii8620 *ctx)
ctx               937 drivers/gpu/drm/bridge/sil-sii8620.c 	ret = regulator_bulk_enable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               942 drivers/gpu/drm/bridge/sil-sii8620.c 	ret = clk_prepare_enable(ctx->clk_xtal);
ctx               947 drivers/gpu/drm/bridge/sil-sii8620.c 	gpiod_set_value(ctx->gpio_reset, 0);
ctx               953 drivers/gpu/drm/bridge/sil-sii8620.c static int sii8620_hw_off(struct sii8620 *ctx)
ctx               955 drivers/gpu/drm/bridge/sil-sii8620.c 	clk_disable_unprepare(ctx->clk_xtal);
ctx               956 drivers/gpu/drm/bridge/sil-sii8620.c 	gpiod_set_value(ctx->gpio_reset, 1);
ctx               957 drivers/gpu/drm/bridge/sil-sii8620.c 	return regulator_bulk_disable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               960 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_cbus_reset(struct sii8620 *ctx)
ctx               962 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_PWD_SRST, BIT_PWD_SRST_CBUS_RST
ctx               965 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_PWD_SRST, BIT_PWD_SRST_CBUS_RST_SW_EN);
ctx               968 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_set_auto_zone(struct sii8620 *ctx)
ctx               970 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->mode != CM_MHL1) {
ctx               971 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_seq_static(ctx,
ctx               978 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_seq_static(ctx,
ctx               986 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_stop_video(struct sii8620 *ctx)
ctx               990 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx               997 drivers/gpu/drm/bridge/sil-sii8620.c 	switch (ctx->sink_type) {
ctx              1010 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_TPI_SC, val);
ctx              1013 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_set_format(struct sii8620 *ctx)
ctx              1017 drivers/gpu/drm/bridge/sil-sii8620.c 	if (sii8620_is_mhl3(ctx)) {
ctx              1018 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_setbits(ctx, REG_M3_P0CTRL,
ctx              1020 drivers/gpu/drm/bridge/sil-sii8620.c 				ctx->use_packed_pixel ? ~0 : 0);
ctx              1022 drivers/gpu/drm/bridge/sil-sii8620.c 		if (ctx->use_packed_pixel) {
ctx              1023 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_write_seq_static(ctx,
ctx              1029 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_write_seq_static(ctx,
ctx              1037 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->use_packed_pixel)
ctx              1042 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq(ctx,
ctx              1095 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_set_infoframes(struct sii8620 *ctx,
ctx              1105 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->use_packed_pixel)
ctx              1111 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_buf(ctx, REG_TPI_AVI_CHSUM, buf + 3, ret - 3);
ctx              1113 drivers/gpu/drm/bridge/sil-sii8620.c 	if (!sii8620_is_mhl3(ctx) || !ctx->use_packed_pixel) {
ctx              1114 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, REG_TPI_SC,
ctx              1116 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, REG_PKT_FILTER_0,
ctx              1124 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_PKT_FILTER_0,
ctx              1133 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_TPI_INFO_FSEL, BIT_TPI_INFO_FSEL_EN
ctx              1138 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_buf(ctx, REG_TPI_INFO_B0, buf, ret);
ctx              1141 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_start_video(struct sii8620 *ctx)
ctx              1144 drivers/gpu/drm/bridge/sil-sii8620.c 		&ctx->bridge.encoder->crtc->state->adjusted_mode;
ctx              1146 drivers/gpu/drm/bridge/sil-sii8620.c 	if (!sii8620_is_mhl3(ctx))
ctx              1147 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_stop_video(ctx);
ctx              1149 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->sink_type == SINK_DVI && !sii8620_is_mhl3(ctx)) {
ctx              1150 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, REG_RX_HDMI_CTRL2,
ctx              1152 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, REG_TPI_SC, 0);
ctx              1156 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              1161 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_set_format(ctx);
ctx              1163 drivers/gpu/drm/bridge/sil-sii8620.c 	if (!sii8620_is_mhl3(ctx)) {
ctx              1166 drivers/gpu/drm/bridge/sil-sii8620.c 		if (ctx->use_packed_pixel)
ctx              1171 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_mt_write_stat(ctx, MHL_DST_REG(LINK_MODE), link_mode);
ctx              1172 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_set_auto_zone(ctx);
ctx              1188 drivers/gpu/drm/bridge/sil-sii8620.c 		int clk = mode->clock * (ctx->use_packed_pixel ? 2 : 3);
ctx              1198 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_burst_tx_bits_per_pixel_fmt(ctx, ctx->use_packed_pixel);
ctx              1199 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_burst_send(ctx);
ctx              1200 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_seq(ctx,
ctx              1203 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_setbits(ctx, REG_M3_P0CTRL,
ctx              1206 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_setbits(ctx, REG_M3_POSTM, MSK_M3_POSTM_RRP_DECODE,
ctx              1208 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_seq_static(ctx,
ctx              1213 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_mt_write_stat(ctx, MHL_XDS_REG(AVLINK_MODE_CONTROL),
ctx              1217 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_set_infoframes(ctx, mode);
ctx              1220 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_disable_hpd(struct sii8620 *ctx)
ctx              1222 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_setbits(ctx, REG_EDID_CTRL, BIT_EDID_CTRL_EDID_PRIME_VALID, 0);
ctx              1223 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              1229 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_enable_hpd(struct sii8620 *ctx)
ctx              1231 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_setbits(ctx, REG_TMDS_CSTAT_P3,
ctx              1234 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              1240 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mhl_discover(struct sii8620 *ctx)
ctx              1242 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              1281 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_peer_specific_init(struct sii8620 *ctx)
ctx              1283 drivers/gpu/drm/bridge/sil-sii8620.c 	if (sii8620_is_mhl3(ctx))
ctx              1284 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_seq_static(ctx,
ctx              1290 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_seq_static(ctx,
ctx              1304 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_set_dev_cap(struct sii8620 *ctx)
ctx              1333 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_buf(ctx, REG_MHL_DEVCAP_0, devcap, ARRAY_SIZE(devcap));
ctx              1334 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_buf(ctx, REG_MHL_EXTDEVCAP_0, xdcap, ARRAY_SIZE(xdcap));
ctx              1337 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mhl_init(struct sii8620 *ctx)
ctx              1339 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              1345 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_peer_specific_init(ctx);
ctx              1347 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_disable_hpd(ctx);
ctx              1349 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              1360 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_setbits(ctx, REG_LM_DDC, BIT_LM_DDC_SW_TPI_EN_DISABLED, 0);
ctx              1361 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              1366 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_set_dev_cap(ctx);
ctx              1367 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              1375 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_start_gen2_write_burst(ctx);
ctx              1376 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              1391 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_disable_gen2_write_burst(ctx);
ctx              1393 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_write_stat(ctx, MHL_DST_REG(VERSION), SII8620_MHL_VERSION);
ctx              1394 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_write_stat(ctx, MHL_DST_REG(CONNECTED_RDY),
ctx              1397 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_set_int(ctx, MHL_INT_REG(RCHANGE), MHL_INT_RC_DCAP_CHG);
ctx              1400 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_emsc_enable(struct sii8620 *ctx)
ctx              1404 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_setbits(ctx, REG_GENCTL, BIT_GENCTL_EMSC_EN
ctx              1407 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_setbits(ctx, REG_GENCTL, BIT_GENCTL_CLR_EMSC_RFIFO
ctx              1409 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_setbits(ctx, REG_COMMECNT, BIT_COMMECNT_I2C_TO_EMSC_EN, ~0);
ctx              1410 drivers/gpu/drm/bridge/sil-sii8620.c 	reg = sii8620_readb(ctx, REG_EMSCINTR);
ctx              1411 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_EMSCINTR, reg);
ctx              1412 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_EMSCINTRMASK, BIT_EMSCINTR_SPI_DVLD);
ctx              1415 drivers/gpu/drm/bridge/sil-sii8620.c static int sii8620_wait_for_fsm_state(struct sii8620 *ctx, u8 state)
ctx              1420 drivers/gpu/drm/bridge/sil-sii8620.c 		u8 s = sii8620_readb(ctx, REG_COC_STAT_0);
ctx              1431 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_set_mode(struct sii8620 *ctx, enum sii8620_mode mode)
ctx              1435 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->mode == mode)
ctx              1440 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_seq_static(ctx,
ctx              1447 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->mode = mode;
ctx              1450 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, REG_M3_CTRL, VAL_M3_CTRL_MHL3_VALUE);
ctx              1451 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->mode = mode;
ctx              1454 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_emsc_enable(ctx);
ctx              1455 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_seq_static(ctx,
ctx              1475 drivers/gpu/drm/bridge/sil-sii8620.c 		ret = sii8620_wait_for_fsm_state(ctx, 0x03);
ctx              1476 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_seq_static(ctx,
ctx              1481 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_write(ctx, REG_CBUS3_CNVT, 0x85);
ctx              1483 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_disconnect(ctx);
ctx              1486 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->mode = mode;
ctx              1489 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_err(ctx->dev, "%s mode %d not supported\n", __func__, mode);
ctx              1493 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_set_auto_zone(ctx);
ctx              1498 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              1509 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_hpd_unplugged(struct sii8620 *ctx)
ctx              1511 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_disable_hpd(ctx);
ctx              1512 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->sink_type = SINK_NONE;
ctx              1513 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->sink_detected = false;
ctx              1514 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->feature_complete = false;
ctx              1515 drivers/gpu/drm/bridge/sil-sii8620.c 	kfree(ctx->edid);
ctx              1516 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->edid = NULL;
ctx              1519 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_disconnect(struct sii8620 *ctx)
ctx              1521 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_disable_gen2_write_burst(ctx);
ctx              1522 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_stop_video(ctx);
ctx              1524 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_cbus_reset(ctx);
ctx              1525 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_set_mode(ctx, CM_DISCONNECTED);
ctx              1526 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              1546 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_hpd_unplugged(ctx);
ctx              1547 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              1590 drivers/gpu/drm/bridge/sil-sii8620.c 	memset(ctx->stat, 0, sizeof(ctx->stat));
ctx              1591 drivers/gpu/drm/bridge/sil-sii8620.c 	memset(ctx->xstat, 0, sizeof(ctx->xstat));
ctx              1592 drivers/gpu/drm/bridge/sil-sii8620.c 	memset(ctx->devcap, 0, sizeof(ctx->devcap));
ctx              1593 drivers/gpu/drm/bridge/sil-sii8620.c 	memset(ctx->xdevcap, 0, sizeof(ctx->xdevcap));
ctx              1594 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->devcap_read = false;
ctx              1595 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->cbus_status = 0;
ctx              1596 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_cleanup(ctx);
ctx              1599 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_mhl_disconnected(struct sii8620 *ctx)
ctx              1601 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              1606 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_disconnect(ctx);
ctx              1609 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_irq_disc(struct sii8620 *ctx)
ctx              1611 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 stat = sii8620_readb(ctx, REG_CBUS_DISC_INTR0);
ctx              1614 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_mhl_disconnected(ctx);
ctx              1617 drivers/gpu/drm/bridge/sil-sii8620.c 		u8 stat2 = sii8620_readb(ctx, REG_DISC_STAT2);
ctx              1620 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_mhl_discover(ctx);
ctx              1622 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_write_seq_static(ctx,
ctx              1634 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_mhl_init(ctx);
ctx              1636 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_CBUS_DISC_INTR0, stat);
ctx              1639 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_read_burst(struct sii8620 *ctx)
ctx              1643 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_read_buf(ctx, REG_MDT_RCV_READ_PORT, buf, ARRAY_SIZE(buf));
ctx              1644 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_MDT_RCV_CTRL, BIT_MDT_RCV_CTRL_MDT_RCV_EN |
ctx              1647 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_readb(ctx, REG_MDT_RFIFO_STAT);
ctx              1650 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_irq_g2wb(struct sii8620 *ctx)
ctx              1652 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 stat = sii8620_readb(ctx, REG_MDT_INT_0);
ctx              1655 drivers/gpu/drm/bridge/sil-sii8620.c 		if (sii8620_is_mhl3(ctx))
ctx              1656 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_mt_set_int(ctx, MHL_INT_REG(RCHANGE),
ctx              1660 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_read_burst(ctx);
ctx              1663 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, REG_MDT_XMIT_CTRL, 0);
ctx              1665 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_MDT_INT_0, stat);
ctx              1668 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_status_dcap_ready(struct sii8620 *ctx)
ctx              1672 drivers/gpu/drm/bridge/sil-sii8620.c 	mode = ctx->stat[MHL_DST_VERSION] >= 0x30 ? CM_MHL3 : CM_MHL1;
ctx              1673 drivers/gpu/drm/bridge/sil-sii8620.c 	if (mode > ctx->mode)
ctx              1674 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_set_mode(ctx, mode);
ctx              1675 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_peer_specific_init(ctx);
ctx              1676 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_INTR9_MASK, BIT_INTR9_DEVCAP_DONE
ctx              1680 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_status_changed_path(struct sii8620 *ctx)
ctx              1684 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->use_packed_pixel)
ctx              1689 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->stat[MHL_DST_LINK_MODE] & MHL_DST_LM_PATH_ENABLED)
ctx              1692 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_write_stat(ctx, MHL_DST_REG(LINK_MODE),
ctx              1696 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_msc_mr_write_stat(struct sii8620 *ctx)
ctx              1700 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_read_buf(ctx, REG_MHL_STAT_0, st, MHL_DST_SIZE);
ctx              1701 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_read_buf(ctx, REG_MHL_EXTSTAT_0, xst, MHL_XDS_SIZE);
ctx              1703 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_update_array(ctx->stat, st, MHL_DST_SIZE);
ctx              1704 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_update_array(ctx->xstat, xst, MHL_XDS_SIZE);
ctx              1706 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->stat[MHL_DST_CONNECTED_RDY] & st[MHL_DST_CONNECTED_RDY] &
ctx              1708 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_status_dcap_ready(ctx);
ctx              1710 drivers/gpu/drm/bridge/sil-sii8620.c 		if (!sii8620_is_mhl3(ctx))
ctx              1711 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_mt_read_devcap(ctx, false);
ctx              1715 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_status_changed_path(ctx);
ctx              1718 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_ecbus_up(struct sii8620 *ctx, int ret)
ctx              1723 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_set_mode(ctx, CM_ECBUS_S);
ctx              1726 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_got_ecbus_speed(struct sii8620 *ctx, int ret)
ctx              1731 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_write_stat(ctx, MHL_XDS_REG(CURR_ECBUS_MODE),
ctx              1733 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_rap(ctx, MHL_RAP_CBUS_MODE_UP);
ctx              1734 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_set_cont(ctx, sii8620_ecbus_up);
ctx              1745 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_send_features(struct sii8620 *ctx)
ctx              1749 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_MDT_XMIT_CTRL, BIT_MDT_XMIT_CTRL_EN
ctx              1753 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_buf(ctx, REG_MDT_XMIT_WRITE_PORT, buf, ARRAY_SIZE(buf));
ctx              1756 drivers/gpu/drm/bridge/sil-sii8620.c static bool sii8620_rcp_consume(struct sii8620 *ctx, u8 scancode)
ctx              1762 drivers/gpu/drm/bridge/sil-sii8620.c 	if (!ctx->rc_dev) {
ctx              1763 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_dbg(ctx->dev, "RCP input device not initialized\n");
ctx              1768 drivers/gpu/drm/bridge/sil-sii8620.c 		rc_keydown(ctx->rc_dev, RC_PROTO_CEC, scancode, 0);
ctx              1770 drivers/gpu/drm/bridge/sil-sii8620.c 		rc_keyup(ctx->rc_dev);
ctx              1775 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_msc_mr_set_int(struct sii8620 *ctx)
ctx              1779 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_read_buf(ctx, REG_MHL_INT_0, ints, MHL_INT_SIZE);
ctx              1780 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_buf(ctx, REG_MHL_INT_0, ints, MHL_INT_SIZE);
ctx              1783 drivers/gpu/drm/bridge/sil-sii8620.c 		switch (ctx->mode) {
ctx              1785 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_mt_read_xdevcap_reg(ctx, MHL_XDC_ECBUS_SPEEDS);
ctx              1786 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_mt_set_cont(ctx, sii8620_got_ecbus_speed);
ctx              1789 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_mt_read_devcap(ctx, true);
ctx              1796 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_send_features(ctx);
ctx              1798 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->feature_complete = true;
ctx              1799 drivers/gpu/drm/bridge/sil-sii8620.c 		if (ctx->edid)
ctx              1800 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_enable_hpd(ctx);
ctx              1804 drivers/gpu/drm/bridge/sil-sii8620.c static struct sii8620_mt_msg *sii8620_msc_msg_first(struct sii8620 *ctx)
ctx              1806 drivers/gpu/drm/bridge/sil-sii8620.c 	struct device *dev = ctx->dev;
ctx              1808 drivers/gpu/drm/bridge/sil-sii8620.c 	if (list_empty(&ctx->mt_queue)) {
ctx              1813 drivers/gpu/drm/bridge/sil-sii8620.c 	return list_first_entry(&ctx->mt_queue, struct sii8620_mt_msg, node);
ctx              1816 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_msc_mt_done(struct sii8620 *ctx)
ctx              1818 drivers/gpu/drm/bridge/sil-sii8620.c 	struct sii8620_mt_msg *msg = sii8620_msc_msg_first(ctx);
ctx              1823 drivers/gpu/drm/bridge/sil-sii8620.c 	msg->ret = sii8620_readb(ctx, REG_MSC_MT_RCVD_DATA0);
ctx              1824 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->mt_state = MT_STATE_DONE;
ctx              1827 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_msc_mr_msc_msg(struct sii8620 *ctx)
ctx              1832 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_read_buf(ctx, REG_MSC_MR_MSC_MSG_RCVD_1ST_DATA, buf, 2);
ctx              1836 drivers/gpu/drm/bridge/sil-sii8620.c 		msg = sii8620_msc_msg_first(ctx);
ctx              1840 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->mt_state = MT_STATE_DONE;
ctx              1843 drivers/gpu/drm/bridge/sil-sii8620.c 		if (!sii8620_rcp_consume(ctx, buf[1]))
ctx              1844 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_mt_rcpe(ctx,
ctx              1846 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_mt_rcpk(ctx, buf[1]);
ctx              1849 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_err(ctx->dev, "%s message type %d,%d not supported",
ctx              1854 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_irq_msc(struct sii8620 *ctx)
ctx              1856 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 stat = sii8620_readb(ctx, REG_CBUS_INT_0);
ctx              1859 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, REG_CBUS_INT_0, stat & ~BIT_CBUS_HPD_CHG);
ctx              1862 drivers/gpu/drm/bridge/sil-sii8620.c 		u8 cbus_stat = sii8620_readb(ctx, REG_CBUS_STATUS);
ctx              1864 drivers/gpu/drm/bridge/sil-sii8620.c 		if ((cbus_stat ^ ctx->cbus_status) & BIT_CBUS_STATUS_CBUS_HPD) {
ctx              1865 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_write(ctx, REG_CBUS_INT_0, BIT_CBUS_HPD_CHG);
ctx              1870 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->cbus_status = cbus_stat;
ctx              1874 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_msc_mr_write_stat(ctx);
ctx              1877 drivers/gpu/drm/bridge/sil-sii8620.c 		if (ctx->cbus_status & BIT_CBUS_STATUS_CBUS_HPD) {
ctx              1878 drivers/gpu/drm/bridge/sil-sii8620.c 			ctx->sink_detected = true;
ctx              1879 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_identify_sink(ctx);
ctx              1881 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_hpd_unplugged(ctx);
ctx              1886 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_msc_mr_set_int(ctx);
ctx              1889 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_msc_mt_done(ctx);
ctx              1892 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_msc_mr_msc_msg(ctx);
ctx              1895 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_irq_coc(struct sii8620 *ctx)
ctx              1897 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 stat = sii8620_readb(ctx, REG_COC_INTR);
ctx              1900 drivers/gpu/drm/bridge/sil-sii8620.c 		u8 cstat = sii8620_readb(ctx, REG_COC_STAT_0);
ctx              1904 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_write_seq_static(ctx,
ctx              1912 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_COC_INTR, stat);
ctx              1915 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_irq_merr(struct sii8620 *ctx)
ctx              1917 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 stat = sii8620_readb(ctx, REG_CBUS_INT_1);
ctx              1919 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_CBUS_INT_1, stat);
ctx              1922 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_irq_edid(struct sii8620 *ctx)
ctx              1924 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 stat = sii8620_readb(ctx, REG_INTR9);
ctx              1926 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_INTR9, stat);
ctx              1929 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->mt_state = MT_STATE_DONE;
ctx              1932 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_irq_scdt(struct sii8620 *ctx)
ctx              1934 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 stat = sii8620_readb(ctx, REG_INTR5);
ctx              1937 drivers/gpu/drm/bridge/sil-sii8620.c 		u8 cstat = sii8620_readb(ctx, REG_TMDS_CSTAT_P3);
ctx              1940 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_start_video(ctx);
ctx              1943 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_INTR5, stat);
ctx              1946 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_got_xdevcap(struct sii8620 *ctx, int ret)
ctx              1951 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_read_devcap(ctx, false);
ctx              1954 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_irq_tdm(struct sii8620 *ctx)
ctx              1956 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 stat = sii8620_readb(ctx, REG_TRXINTH);
ctx              1957 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 tdm = sii8620_readb(ctx, REG_TRXSTA2);
ctx              1960 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->mode = CM_ECBUS_S;
ctx              1961 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->burst.rx_ack = 0;
ctx              1962 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->burst.r_size = SII8620_BURST_BUF_LEN;
ctx              1963 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_burst_tx_rbuf_info(ctx, SII8620_BURST_BUF_LEN);
ctx              1964 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_mt_read_devcap(ctx, true);
ctx              1965 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_mt_set_cont(ctx, sii8620_got_xdevcap);
ctx              1967 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write_seq_static(ctx,
ctx              1973 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_TRXINTH, stat);
ctx              1976 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_irq_block(struct sii8620 *ctx)
ctx              1978 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 stat = sii8620_readb(ctx, REG_EMSCINTR);
ctx              1981 drivers/gpu/drm/bridge/sil-sii8620.c 		u8 bstat = sii8620_readb(ctx, REG_SPIBURSTSTAT);
ctx              1984 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_burst_receive(ctx);
ctx              1987 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_EMSCINTR, stat);
ctx              1990 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_irq_ddc(struct sii8620 *ctx)
ctx              1992 drivers/gpu/drm/bridge/sil-sii8620.c 	u8 stat = sii8620_readb(ctx, REG_INTR3);
ctx              1995 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_write(ctx, REG_INTR3_MASK, 0);
ctx              1996 drivers/gpu/drm/bridge/sil-sii8620.c 		if (sii8620_is_mhl3(ctx) && !ctx->feature_complete)
ctx              1997 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_mt_set_int(ctx, MHL_INT_REG(RCHANGE),
ctx              2000 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_enable_hpd(ctx);
ctx              2002 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_INTR3, stat);
ctx              2015 drivers/gpu/drm/bridge/sil-sii8620.c 		void (*handler)(struct sii8620 *ctx);
ctx              2028 drivers/gpu/drm/bridge/sil-sii8620.c 	struct sii8620 *ctx = data;
ctx              2032 drivers/gpu/drm/bridge/sil-sii8620.c 	mutex_lock(&ctx->lock);
ctx              2034 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_read_buf(ctx, REG_FAST_INTR_STAT, stats, ARRAY_SIZE(stats));
ctx              2037 drivers/gpu/drm/bridge/sil-sii8620.c 			irq_vec[i].handler(ctx);
ctx              2039 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_burst_rx_all(ctx);
ctx              2040 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_mt_work(ctx);
ctx              2041 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_burst_send(ctx);
ctx              2043 drivers/gpu/drm/bridge/sil-sii8620.c 	ret = sii8620_clear_error(ctx);
ctx              2045 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_err(ctx->dev, "Error during IRQ handling, %d.\n", ret);
ctx              2046 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_mhl_disconnected(ctx);
ctx              2048 drivers/gpu/drm/bridge/sil-sii8620.c 	mutex_unlock(&ctx->lock);
ctx              2053 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_cable_in(struct sii8620 *ctx)
ctx              2055 drivers/gpu/drm/bridge/sil-sii8620.c 	struct device *dev = ctx->dev;
ctx              2059 drivers/gpu/drm/bridge/sil-sii8620.c 	ret = sii8620_hw_on(ctx);
ctx              2065 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_read_buf(ctx, REG_VND_IDL, ver, ARRAY_SIZE(ver));
ctx              2066 drivers/gpu/drm/bridge/sil-sii8620.c 	ret = sii8620_clear_error(ctx);
ctx              2075 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write(ctx, REG_DPD,
ctx              2078 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_xtal_set_rate(ctx);
ctx              2079 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_disconnect(ctx);
ctx              2081 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_write_seq_static(ctx,
ctx              2088 drivers/gpu/drm/bridge/sil-sii8620.c 	ret = sii8620_clear_error(ctx);
ctx              2094 drivers/gpu/drm/bridge/sil-sii8620.c 	enable_irq(to_i2c_client(ctx->dev)->irq);
ctx              2097 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_init_rcp_input_dev(struct sii8620 *ctx)
ctx              2104 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_err(ctx->dev, "Failed to allocate RC device\n");
ctx              2105 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->error = -ENOMEM;
ctx              2119 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_err(ctx->dev, "Failed to register RC device\n");
ctx              2120 drivers/gpu/drm/bridge/sil-sii8620.c 		ctx->error = ret;
ctx              2121 drivers/gpu/drm/bridge/sil-sii8620.c 		rc_free_device(ctx->rc_dev);
ctx              2124 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->rc_dev = rc_dev;
ctx              2127 drivers/gpu/drm/bridge/sil-sii8620.c static void sii8620_cable_out(struct sii8620 *ctx)
ctx              2129 drivers/gpu/drm/bridge/sil-sii8620.c 	disable_irq(to_i2c_client(ctx->dev)->irq);
ctx              2130 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_hw_off(ctx);
ctx              2135 drivers/gpu/drm/bridge/sil-sii8620.c 	struct sii8620 *ctx =
ctx              2137 drivers/gpu/drm/bridge/sil-sii8620.c 	int state = extcon_get_state(ctx->extcon, EXTCON_DISP_MHL);
ctx              2139 drivers/gpu/drm/bridge/sil-sii8620.c 	if (state == ctx->cable_state)
ctx              2142 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->cable_state = state;
ctx              2145 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_cable_in(ctx);
ctx              2147 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_cable_out(ctx);
ctx              2153 drivers/gpu/drm/bridge/sil-sii8620.c 	struct sii8620 *ctx =
ctx              2156 drivers/gpu/drm/bridge/sil-sii8620.c 	schedule_work(&ctx->extcon_wq);
ctx              2161 drivers/gpu/drm/bridge/sil-sii8620.c static int sii8620_extcon_init(struct sii8620 *ctx)
ctx              2168 drivers/gpu/drm/bridge/sil-sii8620.c 	musb = of_graph_get_remote_node(ctx->dev->of_node, 1, -1);
ctx              2173 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_info(ctx->dev, "no extcon found, switching to 'always on' mode\n");
ctx              2182 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_err(ctx->dev, "Invalid or missing extcon\n");
ctx              2186 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->extcon = edev;
ctx              2187 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->extcon_nb.notifier_call = sii8620_extcon_notifier;
ctx              2188 drivers/gpu/drm/bridge/sil-sii8620.c 	INIT_WORK(&ctx->extcon_wq, sii8620_extcon_work);
ctx              2189 drivers/gpu/drm/bridge/sil-sii8620.c 	ret = extcon_register_notifier(edev, EXTCON_DISP_MHL, &ctx->extcon_nb);
ctx              2191 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_err(ctx->dev, "failed to register notifier for MHL\n");
ctx              2205 drivers/gpu/drm/bridge/sil-sii8620.c 	struct sii8620 *ctx = bridge_to_sii8620(bridge);
ctx              2207 drivers/gpu/drm/bridge/sil-sii8620.c 	sii8620_init_rcp_input_dev(ctx);
ctx              2209 drivers/gpu/drm/bridge/sil-sii8620.c 	return sii8620_clear_error(ctx);
ctx              2214 drivers/gpu/drm/bridge/sil-sii8620.c 	struct sii8620 *ctx = bridge_to_sii8620(bridge);
ctx              2216 drivers/gpu/drm/bridge/sil-sii8620.c 	rc_unregister_device(ctx->rc_dev);
ctx              2219 drivers/gpu/drm/bridge/sil-sii8620.c static int sii8620_is_packing_required(struct sii8620 *ctx,
ctx              2224 drivers/gpu/drm/bridge/sil-sii8620.c 	if (sii8620_is_mhl3(ctx)) {
ctx              2243 drivers/gpu/drm/bridge/sil-sii8620.c 	struct sii8620 *ctx = bridge_to_sii8620(bridge);
ctx              2244 drivers/gpu/drm/bridge/sil-sii8620.c 	int pack_required = sii8620_is_packing_required(ctx, mode);
ctx              2245 drivers/gpu/drm/bridge/sil-sii8620.c 	bool can_pack = ctx->devcap[MHL_DCAP_VID_LINK_MODE] &
ctx              2262 drivers/gpu/drm/bridge/sil-sii8620.c 	struct sii8620 *ctx = bridge_to_sii8620(bridge);
ctx              2264 drivers/gpu/drm/bridge/sil-sii8620.c 	mutex_lock(&ctx->lock);
ctx              2266 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->use_packed_pixel = sii8620_is_packing_required(ctx, adjusted_mode);
ctx              2268 drivers/gpu/drm/bridge/sil-sii8620.c 	mutex_unlock(&ctx->lock);
ctx              2284 drivers/gpu/drm/bridge/sil-sii8620.c 	struct sii8620 *ctx;
ctx              2287 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx              2288 drivers/gpu/drm/bridge/sil-sii8620.c 	if (!ctx)
ctx              2291 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->dev = dev;
ctx              2292 drivers/gpu/drm/bridge/sil-sii8620.c 	mutex_init(&ctx->lock);
ctx              2293 drivers/gpu/drm/bridge/sil-sii8620.c 	INIT_LIST_HEAD(&ctx->mt_queue);
ctx              2295 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->clk_xtal = devm_clk_get(dev, "xtal");
ctx              2296 drivers/gpu/drm/bridge/sil-sii8620.c 	if (IS_ERR(ctx->clk_xtal)) {
ctx              2298 drivers/gpu/drm/bridge/sil-sii8620.c 		return PTR_ERR(ctx->clk_xtal);
ctx              2309 drivers/gpu/drm/bridge/sil-sii8620.c 					"sii8620", ctx);
ctx              2315 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->gpio_reset = devm_gpiod_get(dev, "reset", GPIOD_OUT_HIGH);
ctx              2316 drivers/gpu/drm/bridge/sil-sii8620.c 	if (IS_ERR(ctx->gpio_reset)) {
ctx              2318 drivers/gpu/drm/bridge/sil-sii8620.c 		return PTR_ERR(ctx->gpio_reset);
ctx              2321 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->supplies[0].supply = "cvcc10";
ctx              2322 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->supplies[1].supply = "iovcc18";
ctx              2323 drivers/gpu/drm/bridge/sil-sii8620.c 	ret = devm_regulator_bulk_get(dev, 2, ctx->supplies);
ctx              2327 drivers/gpu/drm/bridge/sil-sii8620.c 	ret = sii8620_extcon_init(ctx);
ctx              2329 drivers/gpu/drm/bridge/sil-sii8620.c 		dev_err(ctx->dev, "failed to initialize EXTCON\n");
ctx              2333 drivers/gpu/drm/bridge/sil-sii8620.c 	i2c_set_clientdata(client, ctx);
ctx              2335 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->bridge.funcs = &sii8620_bridge_funcs;
ctx              2336 drivers/gpu/drm/bridge/sil-sii8620.c 	ctx->bridge.of_node = dev->of_node;
ctx              2337 drivers/gpu/drm/bridge/sil-sii8620.c 	drm_bridge_add(&ctx->bridge);
ctx              2339 drivers/gpu/drm/bridge/sil-sii8620.c 	if (!ctx->extcon)
ctx              2340 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_cable_in(ctx);
ctx              2347 drivers/gpu/drm/bridge/sil-sii8620.c 	struct sii8620 *ctx = i2c_get_clientdata(client);
ctx              2349 drivers/gpu/drm/bridge/sil-sii8620.c 	if (ctx->extcon) {
ctx              2350 drivers/gpu/drm/bridge/sil-sii8620.c 		extcon_unregister_notifier(ctx->extcon, EXTCON_DISP_MHL,
ctx              2351 drivers/gpu/drm/bridge/sil-sii8620.c 					   &ctx->extcon_nb);
ctx              2352 drivers/gpu/drm/bridge/sil-sii8620.c 		flush_work(&ctx->extcon_wq);
ctx              2353 drivers/gpu/drm/bridge/sil-sii8620.c 		if (ctx->cable_state > 0)
ctx              2354 drivers/gpu/drm/bridge/sil-sii8620.c 			sii8620_cable_out(ctx);
ctx              2356 drivers/gpu/drm/bridge/sil-sii8620.c 		sii8620_cable_out(ctx);
ctx              2358 drivers/gpu/drm/bridge/sil-sii8620.c 	drm_bridge_remove(&ctx->bridge);
ctx               162 drivers/gpu/drm/bridge/tc358764.c static int tc358764_clear_error(struct tc358764 *ctx)
ctx               164 drivers/gpu/drm/bridge/tc358764.c 	int ret = ctx->error;
ctx               166 drivers/gpu/drm/bridge/tc358764.c 	ctx->error = 0;
ctx               170 drivers/gpu/drm/bridge/tc358764.c static void tc358764_read(struct tc358764 *ctx, u16 addr, u32 *val)
ctx               172 drivers/gpu/drm/bridge/tc358764.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               175 drivers/gpu/drm/bridge/tc358764.c 	if (ctx->error)
ctx               183 drivers/gpu/drm/bridge/tc358764.c 	dev_dbg(ctx->dev, "read: %d, addr: %d\n", addr, *val);
ctx               186 drivers/gpu/drm/bridge/tc358764.c static void tc358764_write(struct tc358764 *ctx, u16 addr, u32 val)
ctx               188 drivers/gpu/drm/bridge/tc358764.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               192 drivers/gpu/drm/bridge/tc358764.c 	if (ctx->error)
ctx               204 drivers/gpu/drm/bridge/tc358764.c 		ctx->error = ret;
ctx               218 drivers/gpu/drm/bridge/tc358764.c static int tc358764_init(struct tc358764 *ctx)
ctx               222 drivers/gpu/drm/bridge/tc358764.c 	tc358764_read(ctx, SYS_ID, &v);
ctx               223 drivers/gpu/drm/bridge/tc358764.c 	if (ctx->error)
ctx               224 drivers/gpu/drm/bridge/tc358764.c 		return tc358764_clear_error(ctx);
ctx               225 drivers/gpu/drm/bridge/tc358764.c 	dev_info(ctx->dev, "ID: %#x\n", v);
ctx               228 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, PPI_TX_RX_TA, TTA_GET | TTA_SURE);
ctx               229 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, PPI_LPTXTIMECNT, LPX_PERIOD);
ctx               230 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, PPI_D0S_CLRSIPOCOUNT, 5);
ctx               231 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, PPI_D1S_CLRSIPOCOUNT, 5);
ctx               232 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, PPI_D2S_CLRSIPOCOUNT, 5);
ctx               233 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, PPI_D3S_CLRSIPOCOUNT, 5);
ctx               236 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, PPI_LANEENABLE, LANEENABLE_L3EN | LANEENABLE_L2EN |
ctx               238 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, DSI_LANEENABLE, LANEENABLE_L3EN | LANEENABLE_L2EN |
ctx               242 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, PPI_STARTPPI, PPI_START_FUNCTION);
ctx               243 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, DSI_STARTDSI, DSI_RX_START);
ctx               246 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, VP_CTRL, VP_CTRL_VSDELAY(15) | VP_CTRL_RGB888(1) |
ctx               250 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, LV_PHY0, LV_PHY0_RST(1) |
ctx               252 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, LV_PHY0, LV_PHY0_PRBS_ON(4) | LV_PHY0_IS(2) |
ctx               256 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, SYS_RST, SYS_RST_LCD);
ctx               259 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, LV_MX0003, LV_MX(LVI_R0, LVI_R1, LVI_R2, LVI_R3));
ctx               260 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, LV_MX0407, LV_MX(LVI_R4, LVI_R7, LVI_R5, LVI_G0));
ctx               261 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, LV_MX0811, LV_MX(LVI_G1, LVI_G2, LVI_G6, LVI_G7));
ctx               262 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, LV_MX1215, LV_MX(LVI_G3, LVI_G4, LVI_G5, LVI_B0));
ctx               263 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, LV_MX1619, LV_MX(LVI_B6, LVI_B7, LVI_B1, LVI_B2));
ctx               264 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, LV_MX2023, LV_MX(LVI_B3, LVI_B4, LVI_B5, LVI_L0));
ctx               265 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, LV_MX2427, LV_MX(LVI_HS, LVI_VS, LVI_DE, LVI_R6));
ctx               266 drivers/gpu/drm/bridge/tc358764.c 	tc358764_write(ctx, LV_CFG, LV_CFG_CLKPOL2 | LV_CFG_CLKPOL1 |
ctx               269 drivers/gpu/drm/bridge/tc358764.c 	return tc358764_clear_error(ctx);
ctx               272 drivers/gpu/drm/bridge/tc358764.c static void tc358764_reset(struct tc358764 *ctx)
ctx               274 drivers/gpu/drm/bridge/tc358764.c 	gpiod_set_value(ctx->gpio_reset, 1);
ctx               276 drivers/gpu/drm/bridge/tc358764.c 	gpiod_set_value(ctx->gpio_reset, 0);
ctx               282 drivers/gpu/drm/bridge/tc358764.c 	struct tc358764 *ctx = connector_to_tc358764(connector);
ctx               284 drivers/gpu/drm/bridge/tc358764.c 	return drm_panel_get_modes(ctx->panel);
ctx               302 drivers/gpu/drm/bridge/tc358764.c 	struct tc358764 *ctx = bridge_to_tc358764(bridge);
ctx               306 drivers/gpu/drm/bridge/tc358764.c 		dev_err(ctx->dev, "error disabling panel (%d)\n", ret);
ctx               311 drivers/gpu/drm/bridge/tc358764.c 	struct tc358764 *ctx = bridge_to_tc358764(bridge);
ctx               314 drivers/gpu/drm/bridge/tc358764.c 	ret = drm_panel_unprepare(ctx->panel);
ctx               316 drivers/gpu/drm/bridge/tc358764.c 		dev_err(ctx->dev, "error unpreparing panel (%d)\n", ret);
ctx               317 drivers/gpu/drm/bridge/tc358764.c 	tc358764_reset(ctx);
ctx               319 drivers/gpu/drm/bridge/tc358764.c 	ret = regulator_bulk_disable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               321 drivers/gpu/drm/bridge/tc358764.c 		dev_err(ctx->dev, "error disabling regulators (%d)\n", ret);
ctx               326 drivers/gpu/drm/bridge/tc358764.c 	struct tc358764 *ctx = bridge_to_tc358764(bridge);
ctx               329 drivers/gpu/drm/bridge/tc358764.c 	ret = regulator_bulk_enable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               331 drivers/gpu/drm/bridge/tc358764.c 		dev_err(ctx->dev, "error enabling regulators (%d)\n", ret);
ctx               333 drivers/gpu/drm/bridge/tc358764.c 	tc358764_reset(ctx);
ctx               334 drivers/gpu/drm/bridge/tc358764.c 	ret = tc358764_init(ctx);
ctx               336 drivers/gpu/drm/bridge/tc358764.c 		dev_err(ctx->dev, "error initializing bridge (%d)\n", ret);
ctx               337 drivers/gpu/drm/bridge/tc358764.c 	ret = drm_panel_prepare(ctx->panel);
ctx               339 drivers/gpu/drm/bridge/tc358764.c 		dev_err(ctx->dev, "error preparing panel (%d)\n", ret);
ctx               344 drivers/gpu/drm/bridge/tc358764.c 	struct tc358764 *ctx = bridge_to_tc358764(bridge);
ctx               345 drivers/gpu/drm/bridge/tc358764.c 	int ret = drm_panel_enable(ctx->panel);
ctx               348 drivers/gpu/drm/bridge/tc358764.c 		dev_err(ctx->dev, "error enabling panel (%d)\n", ret);
ctx               353 drivers/gpu/drm/bridge/tc358764.c 	struct tc358764 *ctx = bridge_to_tc358764(bridge);
ctx               357 drivers/gpu/drm/bridge/tc358764.c 	ctx->connector.polled = DRM_CONNECTOR_POLL_HPD;
ctx               358 drivers/gpu/drm/bridge/tc358764.c 	ret = drm_connector_init(drm, &ctx->connector,
ctx               366 drivers/gpu/drm/bridge/tc358764.c 	drm_connector_helper_add(&ctx->connector,
ctx               368 drivers/gpu/drm/bridge/tc358764.c 	drm_connector_attach_encoder(&ctx->connector, bridge->encoder);
ctx               369 drivers/gpu/drm/bridge/tc358764.c 	drm_panel_attach(ctx->panel, &ctx->connector);
ctx               370 drivers/gpu/drm/bridge/tc358764.c 	ctx->connector.funcs->reset(&ctx->connector);
ctx               371 drivers/gpu/drm/bridge/tc358764.c 	drm_fb_helper_add_one_connector(drm->fb_helper, &ctx->connector);
ctx               372 drivers/gpu/drm/bridge/tc358764.c 	drm_connector_register(&ctx->connector);
ctx               379 drivers/gpu/drm/bridge/tc358764.c 	struct tc358764 *ctx = bridge_to_tc358764(bridge);
ctx               382 drivers/gpu/drm/bridge/tc358764.c 	drm_connector_unregister(&ctx->connector);
ctx               383 drivers/gpu/drm/bridge/tc358764.c 	drm_fb_helper_remove_one_connector(drm->fb_helper, &ctx->connector);
ctx               384 drivers/gpu/drm/bridge/tc358764.c 	drm_panel_detach(ctx->panel);
ctx               385 drivers/gpu/drm/bridge/tc358764.c 	ctx->panel = NULL;
ctx               386 drivers/gpu/drm/bridge/tc358764.c 	drm_connector_put(&ctx->connector);
ctx               398 drivers/gpu/drm/bridge/tc358764.c static int tc358764_parse_dt(struct tc358764 *ctx)
ctx               400 drivers/gpu/drm/bridge/tc358764.c 	struct device *dev = ctx->dev;
ctx               403 drivers/gpu/drm/bridge/tc358764.c 	ctx->gpio_reset = devm_gpiod_get(dev, "reset", GPIOD_OUT_LOW);
ctx               404 drivers/gpu/drm/bridge/tc358764.c 	if (IS_ERR(ctx->gpio_reset)) {
ctx               406 drivers/gpu/drm/bridge/tc358764.c 		return PTR_ERR(ctx->gpio_reset);
ctx               409 drivers/gpu/drm/bridge/tc358764.c 	ret = drm_of_find_panel_or_bridge(ctx->dev->of_node, 1, 0, &ctx->panel,
ctx               417 drivers/gpu/drm/bridge/tc358764.c static int tc358764_configure_regulators(struct tc358764 *ctx)
ctx               421 drivers/gpu/drm/bridge/tc358764.c 	for (i = 0; i < ARRAY_SIZE(ctx->supplies); ++i)
ctx               422 drivers/gpu/drm/bridge/tc358764.c 		ctx->supplies[i].supply = tc358764_supplies[i];
ctx               424 drivers/gpu/drm/bridge/tc358764.c 	ret = devm_regulator_bulk_get(ctx->dev, ARRAY_SIZE(ctx->supplies),
ctx               425 drivers/gpu/drm/bridge/tc358764.c 				      ctx->supplies);
ctx               427 drivers/gpu/drm/bridge/tc358764.c 		dev_err(ctx->dev, "failed to get regulators: %d\n", ret);
ctx               435 drivers/gpu/drm/bridge/tc358764.c 	struct tc358764 *ctx;
ctx               438 drivers/gpu/drm/bridge/tc358764.c 	ctx = devm_kzalloc(dev, sizeof(struct tc358764), GFP_KERNEL);
ctx               439 drivers/gpu/drm/bridge/tc358764.c 	if (!ctx)
ctx               442 drivers/gpu/drm/bridge/tc358764.c 	mipi_dsi_set_drvdata(dsi, ctx);
ctx               444 drivers/gpu/drm/bridge/tc358764.c 	ctx->dev = dev;
ctx               451 drivers/gpu/drm/bridge/tc358764.c 	ret = tc358764_parse_dt(ctx);
ctx               455 drivers/gpu/drm/bridge/tc358764.c 	ret = tc358764_configure_regulators(ctx);
ctx               459 drivers/gpu/drm/bridge/tc358764.c 	ctx->bridge.funcs = &tc358764_bridge_funcs;
ctx               460 drivers/gpu/drm/bridge/tc358764.c 	ctx->bridge.of_node = dev->of_node;
ctx               462 drivers/gpu/drm/bridge/tc358764.c 	drm_bridge_add(&ctx->bridge);
ctx               466 drivers/gpu/drm/bridge/tc358764.c 		drm_bridge_remove(&ctx->bridge);
ctx               475 drivers/gpu/drm/bridge/tc358764.c 	struct tc358764 *ctx = mipi_dsi_get_drvdata(dsi);
ctx               478 drivers/gpu/drm/bridge/tc358764.c 	drm_bridge_remove(&ctx->bridge);
ctx              2835 drivers/gpu/drm/drm_atomic_helper.c 				   struct drm_modeset_acquire_ctx *ctx)
ctx              2845 drivers/gpu/drm/drm_atomic_helper.c 	state->acquire_ctx = ctx;
ctx              2886 drivers/gpu/drm/drm_atomic_helper.c 				    struct drm_modeset_acquire_ctx *ctx)
ctx              2896 drivers/gpu/drm/drm_atomic_helper.c 	state->acquire_ctx = ctx;
ctx              2934 drivers/gpu/drm/drm_atomic_helper.c 				 struct drm_modeset_acquire_ctx *ctx)
ctx              2944 drivers/gpu/drm/drm_atomic_helper.c 	state->acquire_ctx = ctx;
ctx              2985 drivers/gpu/drm/drm_atomic_helper.c 				  struct drm_modeset_acquire_ctx *ctx)
ctx              3000 drivers/gpu/drm/drm_atomic_helper.c 	state->acquire_ctx = ctx;
ctx              3058 drivers/gpu/drm/drm_atomic_helper.c 	struct drm_modeset_acquire_ctx ctx;
ctx              3061 drivers/gpu/drm/drm_atomic_helper.c 	DRM_MODESET_LOCK_ALL_BEGIN(dev, ctx, 0, ret);
ctx              3063 drivers/gpu/drm/drm_atomic_helper.c 	ret = drm_atomic_helper_disable_all(dev, &ctx);
ctx              3067 drivers/gpu/drm/drm_atomic_helper.c 	DRM_MODESET_LOCK_ALL_END(ctx, ret);
ctx              3097 drivers/gpu/drm/drm_atomic_helper.c 				  struct drm_modeset_acquire_ctx *ctx)
ctx              3110 drivers/gpu/drm/drm_atomic_helper.c 	state->acquire_ctx = ctx;
ctx              3186 drivers/gpu/drm/drm_atomic_helper.c 	struct drm_modeset_acquire_ctx ctx;
ctx              3193 drivers/gpu/drm/drm_atomic_helper.c 	DRM_MODESET_LOCK_ALL_BEGIN(dev, ctx, 0, err);
ctx              3195 drivers/gpu/drm/drm_atomic_helper.c 	state = drm_atomic_helper_duplicate_state(dev, &ctx);
ctx              3199 drivers/gpu/drm/drm_atomic_helper.c 	err = drm_atomic_helper_disable_all(dev, &ctx);
ctx              3207 drivers/gpu/drm/drm_atomic_helper.c 	DRM_MODESET_LOCK_ALL_END(ctx, err);
ctx              3231 drivers/gpu/drm/drm_atomic_helper.c 					      struct drm_modeset_acquire_ctx *ctx)
ctx              3241 drivers/gpu/drm/drm_atomic_helper.c 	state->acquire_ctx = ctx;
ctx              3279 drivers/gpu/drm/drm_atomic_helper.c 	struct drm_modeset_acquire_ctx ctx;
ctx              3284 drivers/gpu/drm/drm_atomic_helper.c 	DRM_MODESET_LOCK_ALL_BEGIN(dev, ctx, 0, err);
ctx              3286 drivers/gpu/drm/drm_atomic_helper.c 	err = drm_atomic_helper_commit_duplicated_state(state, &ctx);
ctx              3288 drivers/gpu/drm/drm_atomic_helper.c 	DRM_MODESET_LOCK_ALL_END(ctx, err);
ctx              3354 drivers/gpu/drm/drm_atomic_helper.c 				struct drm_modeset_acquire_ctx *ctx)
ctx              3364 drivers/gpu/drm/drm_atomic_helper.c 	state->acquire_ctx = ctx;
ctx              3398 drivers/gpu/drm/drm_atomic_helper.c 				       struct drm_modeset_acquire_ctx *ctx)
ctx              3409 drivers/gpu/drm/drm_atomic_helper.c 	state->acquire_ctx = ctx;
ctx              3446 drivers/gpu/drm/drm_atomic_helper.c 				       struct drm_modeset_acquire_ctx *ctx)
ctx              3477 drivers/gpu/drm/drm_atomic_helper.c 	state->acquire_ctx = ctx;
ctx              1286 drivers/gpu/drm/drm_atomic_uapi.c 	struct drm_modeset_acquire_ctx ctx;
ctx              1320 drivers/gpu/drm/drm_atomic_uapi.c 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
ctx              1321 drivers/gpu/drm/drm_atomic_uapi.c 	state->acquire_ctx = &ctx;
ctx              1419 drivers/gpu/drm/drm_atomic_uapi.c 		ret = drm_modeset_backoff(&ctx);
ctx              1426 drivers/gpu/drm/drm_atomic_uapi.c 	drm_modeset_drop_locks(&ctx);
ctx              1427 drivers/gpu/drm/drm_atomic_uapi.c 	drm_modeset_acquire_fini(&ctx);
ctx               519 drivers/gpu/drm/drm_client_modeset.c 	struct drm_modeset_acquire_ctx ctx;
ctx               528 drivers/gpu/drm/drm_client_modeset.c 	drm_modeset_acquire_init(&ctx, 0);
ctx               530 drivers/gpu/drm/drm_client_modeset.c 	while (drm_modeset_lock_all_ctx(dev, &ctx) != 0)
ctx               531 drivers/gpu/drm/drm_client_modeset.c 		drm_modeset_backoff(&ctx);
ctx               671 drivers/gpu/drm/drm_client_modeset.c 	drm_modeset_drop_locks(&ctx);
ctx               672 drivers/gpu/drm/drm_client_modeset.c 	drm_modeset_acquire_fini(&ctx);
ctx               903 drivers/gpu/drm/drm_client_modeset.c 	struct drm_modeset_acquire_ctx ctx;
ctx               907 drivers/gpu/drm/drm_client_modeset.c 	drm_modeset_acquire_init(&ctx, 0);
ctx               915 drivers/gpu/drm/drm_client_modeset.c 	state->acquire_ctx = &ctx;
ctx               973 drivers/gpu/drm/drm_client_modeset.c 	drm_modeset_drop_locks(&ctx);
ctx               974 drivers/gpu/drm/drm_client_modeset.c 	drm_modeset_acquire_fini(&ctx);
ctx               980 drivers/gpu/drm/drm_client_modeset.c 	drm_modeset_backoff(&ctx);
ctx               245 drivers/gpu/drm/drm_color_mgmt.c 	struct drm_modeset_acquire_ctx ctx;
ctx               262 drivers/gpu/drm/drm_color_mgmt.c 	DRM_MODESET_LOCK_ALL_BEGIN(dev, ctx, 0, ret);
ctx               284 drivers/gpu/drm/drm_color_mgmt.c 				     crtc->gamma_size, &ctx);
ctx               287 drivers/gpu/drm/drm_color_mgmt.c 	DRM_MODESET_LOCK_ALL_END(ctx, ret);
ctx               334 drivers/gpu/drm/drm_context.c 	struct drm_ctx ctx;
ctx               342 drivers/gpu/drm/drm_context.c 		memset(&ctx, 0, sizeof(ctx));
ctx               344 drivers/gpu/drm/drm_context.c 			ctx.handle = i;
ctx               345 drivers/gpu/drm/drm_context.c 			if (copy_to_user(&res->contexts[i], &ctx, sizeof(ctx)))
ctx               369 drivers/gpu/drm/drm_context.c 	struct drm_ctx *ctx = data;
ctx               388 drivers/gpu/drm/drm_context.c 	ctx->handle = tmp_handle;
ctx               397 drivers/gpu/drm/drm_context.c 	ctx_entry->handle = ctx->handle;
ctx               419 drivers/gpu/drm/drm_context.c 	struct drm_ctx *ctx = data;
ctx               426 drivers/gpu/drm/drm_context.c 	ctx->flags = 0;
ctx               445 drivers/gpu/drm/drm_context.c 	struct drm_ctx *ctx = data;
ctx               451 drivers/gpu/drm/drm_context.c 	DRM_DEBUG("%d\n", ctx->handle);
ctx               452 drivers/gpu/drm/drm_context.c 	return drm_context_switch(dev, dev->last_context, ctx->handle);
ctx               469 drivers/gpu/drm/drm_context.c 	struct drm_ctx *ctx = data;
ctx               475 drivers/gpu/drm/drm_context.c 	DRM_DEBUG("%d\n", ctx->handle);
ctx               476 drivers/gpu/drm/drm_context.c 	drm_context_switch_complete(dev, file_priv, ctx->handle);
ctx               495 drivers/gpu/drm/drm_context.c 	struct drm_ctx *ctx = data;
ctx               501 drivers/gpu/drm/drm_context.c 	DRM_DEBUG("%d\n", ctx->handle);
ctx               502 drivers/gpu/drm/drm_context.c 	if (ctx->handle != DRM_KERNEL_CONTEXT) {
ctx               504 drivers/gpu/drm/drm_context.c 			dev->driver->context_dtor(dev, ctx->handle);
ctx               505 drivers/gpu/drm/drm_context.c 		drm_legacy_ctxbitmap_free(dev, ctx->handle);
ctx               513 drivers/gpu/drm/drm_context.c 			if (pos->handle == ctx->handle) {
ctx               419 drivers/gpu/drm/drm_crtc.c 					  struct drm_modeset_acquire_ctx *ctx)
ctx               441 drivers/gpu/drm/drm_crtc.c 	ret = crtc->funcs->set_config(set, ctx);
ctx               537 drivers/gpu/drm/drm_crtc.c 	struct drm_modeset_acquire_ctx ctx;
ctx               565 drivers/gpu/drm/drm_crtc.c 	DRM_MODESET_LOCK_ALL_BEGIN(dev, ctx,
ctx               709 drivers/gpu/drm/drm_crtc.c 		ret = crtc->funcs->set_config(&set, &ctx);
ctx               711 drivers/gpu/drm/drm_crtc.c 		ret = __drm_mode_set_config_internal(&set, &ctx);
ctx               731 drivers/gpu/drm/drm_crtc.c 	DRM_MODESET_LOCK_ALL_END(ctx, ret);
ctx               501 drivers/gpu/drm/drm_crtc_helper.c 			       struct drm_modeset_acquire_ctx *ctx)
ctx               165 drivers/gpu/drm/drm_damage_helper.c 	struct drm_modeset_acquire_ctx ctx;
ctx               176 drivers/gpu/drm/drm_damage_helper.c 	drm_modeset_acquire_init(&ctx,
ctx               184 drivers/gpu/drm/drm_damage_helper.c 	state->acquire_ctx = &ctx;
ctx               239 drivers/gpu/drm/drm_damage_helper.c 		ret = drm_modeset_backoff(&ctx);
ctx               249 drivers/gpu/drm/drm_damage_helper.c 	drm_modeset_drop_locks(&ctx);
ctx               250 drivers/gpu/drm/drm_damage_helper.c 	drm_modeset_acquire_fini(&ctx);
ctx              1036 drivers/gpu/drm/drm_fb_helper.c 	struct drm_modeset_acquire_ctx ctx;
ctx              1045 drivers/gpu/drm/drm_fb_helper.c 	drm_modeset_acquire_init(&ctx, 0);
ctx              1053 drivers/gpu/drm/drm_fb_helper.c 	state->acquire_ctx = &ctx;
ctx              1103 drivers/gpu/drm/drm_fb_helper.c 	drm_modeset_drop_locks(&ctx);
ctx              1104 drivers/gpu/drm/drm_fb_helper.c 	drm_modeset_acquire_fini(&ctx);
ctx              1110 drivers/gpu/drm/drm_fb_helper.c 	drm_modeset_backoff(&ctx);
ctx               834 drivers/gpu/drm/drm_framebuffer.c 	struct drm_modeset_acquire_ctx ctx;
ctx               845 drivers/gpu/drm/drm_framebuffer.c 	drm_modeset_acquire_init(&ctx, 0);
ctx               852 drivers/gpu/drm/drm_framebuffer.c 	state->acquire_ctx = &ctx;
ctx               856 drivers/gpu/drm/drm_framebuffer.c 	ret = drm_modeset_lock_all_ctx(dev, &ctx);
ctx               909 drivers/gpu/drm/drm_framebuffer.c 		drm_modeset_backoff(&ctx);
ctx               916 drivers/gpu/drm/drm_framebuffer.c 	drm_modeset_drop_locks(&ctx);
ctx               917 drivers/gpu/drm/drm_framebuffer.c 	drm_modeset_acquire_fini(&ctx);
ctx               213 drivers/gpu/drm/drm_gem_vram_helper.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               228 drivers/gpu/drm/drm_gem_vram_helper.c 	ret = ttm_bo_validate(&gbo->bo, &gbo->placement, &ctx);
ctx               255 drivers/gpu/drm/drm_gem_vram_helper.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               271 drivers/gpu/drm/drm_gem_vram_helper.c 	ret = ttm_bo_validate(&gbo->bo, &gbo->placement, &ctx);
ctx               469 drivers/gpu/drm/drm_mode_object.c 	struct drm_modeset_acquire_ctx ctx;
ctx               476 drivers/gpu/drm/drm_mode_object.c 	drm_modeset_acquire_init(&ctx, 0);
ctx               477 drivers/gpu/drm/drm_mode_object.c 	state->acquire_ctx = &ctx;
ctx               498 drivers/gpu/drm/drm_mode_object.c 		drm_modeset_backoff(&ctx);
ctx               504 drivers/gpu/drm/drm_mode_object.c 	drm_modeset_drop_locks(&ctx);
ctx               505 drivers/gpu/drm/drm_mode_object.c 	drm_modeset_acquire_fini(&ctx);
ctx                98 drivers/gpu/drm/drm_modeset_lock.c 	struct drm_modeset_acquire_ctx *ctx;
ctx               101 drivers/gpu/drm/drm_modeset_lock.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL | __GFP_NOFAIL);
ctx               102 drivers/gpu/drm/drm_modeset_lock.c 	if (WARN_ON(!ctx))
ctx               107 drivers/gpu/drm/drm_modeset_lock.c 	drm_modeset_acquire_init(ctx, 0);
ctx               110 drivers/gpu/drm/drm_modeset_lock.c 	ret = drm_modeset_lock_all_ctx(dev, ctx);
ctx               113 drivers/gpu/drm/drm_modeset_lock.c 			drm_modeset_backoff(ctx);
ctx               117 drivers/gpu/drm/drm_modeset_lock.c 		drm_modeset_acquire_fini(ctx);
ctx               118 drivers/gpu/drm/drm_modeset_lock.c 		kfree(ctx);
ctx               121 drivers/gpu/drm/drm_modeset_lock.c 	ww_acquire_done(&ctx->ww_ctx);
ctx               129 drivers/gpu/drm/drm_modeset_lock.c 	config->acquire_ctx = ctx;
ctx               152 drivers/gpu/drm/drm_modeset_lock.c 	struct drm_modeset_acquire_ctx *ctx = config->acquire_ctx;
ctx               154 drivers/gpu/drm/drm_modeset_lock.c 	if (WARN_ON(!ctx))
ctx               158 drivers/gpu/drm/drm_modeset_lock.c 	drm_modeset_drop_locks(ctx);
ctx               159 drivers/gpu/drm/drm_modeset_lock.c 	drm_modeset_acquire_fini(ctx);
ctx               161 drivers/gpu/drm/drm_modeset_lock.c 	kfree(ctx);
ctx               198 drivers/gpu/drm/drm_modeset_lock.c void drm_modeset_acquire_init(struct drm_modeset_acquire_ctx *ctx,
ctx               201 drivers/gpu/drm/drm_modeset_lock.c 	memset(ctx, 0, sizeof(*ctx));
ctx               202 drivers/gpu/drm/drm_modeset_lock.c 	ww_acquire_init(&ctx->ww_ctx, &crtc_ww_class);
ctx               203 drivers/gpu/drm/drm_modeset_lock.c 	INIT_LIST_HEAD(&ctx->locked);
ctx               206 drivers/gpu/drm/drm_modeset_lock.c 		ctx->interruptible = true;
ctx               214 drivers/gpu/drm/drm_modeset_lock.c void drm_modeset_acquire_fini(struct drm_modeset_acquire_ctx *ctx)
ctx               216 drivers/gpu/drm/drm_modeset_lock.c 	ww_acquire_fini(&ctx->ww_ctx);
ctx               226 drivers/gpu/drm/drm_modeset_lock.c void drm_modeset_drop_locks(struct drm_modeset_acquire_ctx *ctx)
ctx               228 drivers/gpu/drm/drm_modeset_lock.c 	WARN_ON(ctx->contended);
ctx               229 drivers/gpu/drm/drm_modeset_lock.c 	while (!list_empty(&ctx->locked)) {
ctx               232 drivers/gpu/drm/drm_modeset_lock.c 		lock = list_first_entry(&ctx->locked,
ctx               241 drivers/gpu/drm/drm_modeset_lock.c 		struct drm_modeset_acquire_ctx *ctx,
ctx               246 drivers/gpu/drm/drm_modeset_lock.c 	WARN_ON(ctx->contended);
ctx               248 drivers/gpu/drm/drm_modeset_lock.c 	if (ctx->trylock_only) {
ctx               249 drivers/gpu/drm/drm_modeset_lock.c 		lockdep_assert_held(&ctx->ww_ctx);
ctx               256 drivers/gpu/drm/drm_modeset_lock.c 		ret = ww_mutex_lock_slow_interruptible(&lock->mutex, &ctx->ww_ctx);
ctx               258 drivers/gpu/drm/drm_modeset_lock.c 		ret = ww_mutex_lock_interruptible(&lock->mutex, &ctx->ww_ctx);
ctx               260 drivers/gpu/drm/drm_modeset_lock.c 		ww_mutex_lock_slow(&lock->mutex, &ctx->ww_ctx);
ctx               263 drivers/gpu/drm/drm_modeset_lock.c 		ret = ww_mutex_lock(&lock->mutex, &ctx->ww_ctx);
ctx               267 drivers/gpu/drm/drm_modeset_lock.c 		list_add(&lock->head, &ctx->locked);
ctx               276 drivers/gpu/drm/drm_modeset_lock.c 		ctx->contended = lock;
ctx               294 drivers/gpu/drm/drm_modeset_lock.c int drm_modeset_backoff(struct drm_modeset_acquire_ctx *ctx)
ctx               296 drivers/gpu/drm/drm_modeset_lock.c 	struct drm_modeset_lock *contended = ctx->contended;
ctx               298 drivers/gpu/drm/drm_modeset_lock.c 	ctx->contended = NULL;
ctx               303 drivers/gpu/drm/drm_modeset_lock.c 	drm_modeset_drop_locks(ctx);
ctx               305 drivers/gpu/drm/drm_modeset_lock.c 	return modeset_lock(contended, ctx, ctx->interruptible, true);
ctx               339 drivers/gpu/drm/drm_modeset_lock.c 		struct drm_modeset_acquire_ctx *ctx)
ctx               341 drivers/gpu/drm/drm_modeset_lock.c 	if (ctx)
ctx               342 drivers/gpu/drm/drm_modeset_lock.c 		return modeset_lock(lock, ctx, ctx->interruptible, false);
ctx               396 drivers/gpu/drm/drm_modeset_lock.c 			     struct drm_modeset_acquire_ctx *ctx)
ctx               403 drivers/gpu/drm/drm_modeset_lock.c 	ret = drm_modeset_lock(&dev->mode_config.connection_mutex, ctx);
ctx               408 drivers/gpu/drm/drm_modeset_lock.c 		ret = drm_modeset_lock(&crtc->mutex, ctx);
ctx               414 drivers/gpu/drm/drm_modeset_lock.c 		ret = drm_modeset_lock(&plane->mutex, ctx);
ctx               420 drivers/gpu/drm/drm_modeset_lock.c 		ret = drm_modeset_lock(&privobj->lock, ctx);
ctx               681 drivers/gpu/drm/drm_plane.c 			       struct drm_modeset_acquire_ctx *ctx)
ctx               690 drivers/gpu/drm/drm_plane.c 		ret = plane->funcs->disable_plane(plane, ctx);
ctx               709 drivers/gpu/drm/drm_plane.c 					 src_x, src_y, src_w, src_h, ctx);
ctx               733 drivers/gpu/drm/drm_plane.c 			     struct drm_modeset_acquire_ctx *ctx)
ctx               741 drivers/gpu/drm/drm_plane.c 		return plane->funcs->disable_plane(plane, ctx);
ctx               758 drivers/gpu/drm/drm_plane.c 					  src_x, src_y, src_w, src_h, ctx);
ctx               770 drivers/gpu/drm/drm_plane.c 	struct drm_modeset_acquire_ctx ctx;
ctx               773 drivers/gpu/drm/drm_plane.c 	DRM_MODESET_LOCK_ALL_BEGIN(plane->dev, ctx,
ctx               779 drivers/gpu/drm/drm_plane.c 					src_x, src_y, src_w, src_h, &ctx);
ctx               783 drivers/gpu/drm/drm_plane.c 					  src_x, src_y, src_w, src_h, &ctx);
ctx               785 drivers/gpu/drm/drm_plane.c 	DRM_MODESET_LOCK_ALL_END(ctx, ret);
ctx               845 drivers/gpu/drm/drm_plane.c 				     struct drm_modeset_acquire_ctx *ctx)
ctx               911 drivers/gpu/drm/drm_plane.c 					0, 0, src_w, src_h, ctx);
ctx               915 drivers/gpu/drm/drm_plane.c 					  0, 0, src_w, src_h, ctx);
ctx               934 drivers/gpu/drm/drm_plane.c 	struct drm_modeset_acquire_ctx ctx;
ctx               949 drivers/gpu/drm/drm_plane.c 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
ctx               951 drivers/gpu/drm/drm_plane.c 	ret = drm_modeset_lock(&crtc->mutex, &ctx);
ctx               959 drivers/gpu/drm/drm_plane.c 		ret = drm_modeset_lock(&crtc->cursor->mutex, &ctx);
ctx               968 drivers/gpu/drm/drm_plane.c 		ret = drm_mode_cursor_universal(crtc, req, file_priv, &ctx);
ctx               996 drivers/gpu/drm/drm_plane.c 		ret = drm_modeset_backoff(&ctx);
ctx              1001 drivers/gpu/drm/drm_plane.c 	drm_modeset_drop_locks(&ctx);
ctx              1002 drivers/gpu/drm/drm_plane.c 	drm_modeset_acquire_fini(&ctx);
ctx              1043 drivers/gpu/drm/drm_plane.c 	struct drm_modeset_acquire_ctx ctx;
ctx              1112 drivers/gpu/drm/drm_plane.c 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
ctx              1114 drivers/gpu/drm/drm_plane.c 	ret = drm_modeset_lock(&crtc->mutex, &ctx);
ctx              1117 drivers/gpu/drm/drm_plane.c 	ret = drm_modeset_lock(&plane->mutex, &ctx);
ctx              1187 drivers/gpu/drm/drm_plane.c 						    &ctx);
ctx              1190 drivers/gpu/drm/drm_plane.c 					     &ctx);
ctx              1211 drivers/gpu/drm/drm_plane.c 		ret = drm_modeset_backoff(&ctx);
ctx              1216 drivers/gpu/drm/drm_plane.c 	drm_modeset_drop_locks(&ctx);
ctx              1217 drivers/gpu/drm/drm_plane.c 	drm_modeset_acquire_fini(&ctx);
ctx               155 drivers/gpu/drm/drm_plane_helper.c 				     struct drm_modeset_acquire_ctx *ctx)
ctx               195 drivers/gpu/drm/drm_plane_helper.c 		return plane->funcs->disable_plane(plane, ctx);
ctx               217 drivers/gpu/drm/drm_plane_helper.c 	ret = crtc->funcs->set_config(&set, ctx);
ctx               224 drivers/gpu/drm/drm_plane_helper.c 				      struct drm_modeset_acquire_ctx *ctx)
ctx               265 drivers/gpu/drm/drm_probe_helper.c 	struct drm_modeset_acquire_ctx ctx;
ctx               268 drivers/gpu/drm/drm_probe_helper.c 	drm_modeset_acquire_init(&ctx, 0);
ctx               271 drivers/gpu/drm/drm_probe_helper.c 	ret = drm_modeset_lock(&connector->dev->mode_config.connection_mutex, &ctx);
ctx               274 drivers/gpu/drm/drm_probe_helper.c 			ret = funcs->detect_ctx(connector, &ctx, force);
ctx               282 drivers/gpu/drm/drm_probe_helper.c 		drm_modeset_backoff(&ctx);
ctx               289 drivers/gpu/drm/drm_probe_helper.c 	drm_modeset_drop_locks(&ctx);
ctx               290 drivers/gpu/drm/drm_probe_helper.c 	drm_modeset_acquire_fini(&ctx);
ctx               307 drivers/gpu/drm/drm_probe_helper.c 			struct drm_modeset_acquire_ctx *ctx,
ctx               314 drivers/gpu/drm/drm_probe_helper.c 	if (!ctx)
ctx               317 drivers/gpu/drm/drm_probe_helper.c 	ret = drm_modeset_lock(&dev->mode_config.connection_mutex, ctx);
ctx               322 drivers/gpu/drm/drm_probe_helper.c 		return funcs->detect_ctx(connector, ctx, force);
ctx               399 drivers/gpu/drm/drm_probe_helper.c 	struct drm_modeset_acquire_ctx ctx;
ctx               403 drivers/gpu/drm/drm_probe_helper.c 	drm_modeset_acquire_init(&ctx, 0);
ctx               409 drivers/gpu/drm/drm_probe_helper.c 	ret = drm_modeset_lock(&dev->mode_config.connection_mutex, &ctx);
ctx               411 drivers/gpu/drm/drm_probe_helper.c 		drm_modeset_backoff(&ctx);
ctx               431 drivers/gpu/drm/drm_probe_helper.c 		ret = drm_helper_probe_detect(connector, &ctx, true);
ctx               434 drivers/gpu/drm/drm_probe_helper.c 			drm_modeset_backoff(&ctx);
ctx               527 drivers/gpu/drm/drm_probe_helper.c 	drm_modeset_drop_locks(&ctx);
ctx               528 drivers/gpu/drm/drm_probe_helper.c 	drm_modeset_acquire_fini(&ctx);
ctx                74 drivers/gpu/drm/drm_self_refresh_helper.c 	struct drm_modeset_acquire_ctx ctx;
ctx                81 drivers/gpu/drm/drm_self_refresh_helper.c 	drm_modeset_acquire_init(&ctx, 0);
ctx                90 drivers/gpu/drm/drm_self_refresh_helper.c 	state->acquire_ctx = &ctx;
ctx               120 drivers/gpu/drm/drm_self_refresh_helper.c 		ret = drm_modeset_backoff(&ctx);
ctx               128 drivers/gpu/drm/drm_self_refresh_helper.c 	drm_modeset_drop_locks(&ctx);
ctx               129 drivers/gpu/drm/drm_self_refresh_helper.c 	drm_modeset_acquire_fini(&ctx);
ctx                52 drivers/gpu/drm/etnaviv/etnaviv_drv.c 	struct etnaviv_file_private *ctx;
ctx                55 drivers/gpu/drm/etnaviv/etnaviv_drv.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx                56 drivers/gpu/drm/etnaviv/etnaviv_drv.c 	if (!ctx)
ctx                59 drivers/gpu/drm/etnaviv/etnaviv_drv.c 	ctx->mmu = etnaviv_iommu_context_init(priv->mmu_global,
ctx                61 drivers/gpu/drm/etnaviv/etnaviv_drv.c 	if (!ctx->mmu) {
ctx                72 drivers/gpu/drm/etnaviv/etnaviv_drv.c 			drm_sched_entity_init(&ctx->sched_entity[i],
ctx                77 drivers/gpu/drm/etnaviv/etnaviv_drv.c 	file->driver_priv = ctx;
ctx                82 drivers/gpu/drm/etnaviv/etnaviv_drv.c 	kfree(ctx);
ctx                89 drivers/gpu/drm/etnaviv/etnaviv_drv.c 	struct etnaviv_file_private *ctx = file->driver_priv;
ctx                96 drivers/gpu/drm/etnaviv/etnaviv_drv.c 			drm_sched_entity_destroy(&ctx->sched_entity[i]);
ctx                99 drivers/gpu/drm/etnaviv/etnaviv_drv.c 	etnaviv_iommu_context_put(ctx->mmu);
ctx               101 drivers/gpu/drm/etnaviv/etnaviv_drv.c 	kfree(ctx);
ctx                95 drivers/gpu/drm/etnaviv/etnaviv_gem.h 	struct etnaviv_file_private *ctx;
ctx               429 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 	struct etnaviv_file_private *ctx = file->driver_priv;
ctx               536 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 	submit->ctx = file->driver_priv;
ctx               537 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 	etnaviv_iommu_context_get(submit->ctx->mmu);
ctx               538 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 	submit->mmu_context = submit->ctx->mmu;
ctx               584 drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c 	ret = etnaviv_sched_push_job(&ctx->sched_entity[args->pipe], submit);
ctx               318 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 	struct etnaviv_iommu_context *ctx;
ctx               322 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 		ctx = etnaviv_iommuv1_context_alloc(global);
ctx               324 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 		ctx = etnaviv_iommuv2_context_alloc(global);
ctx               326 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 	if (!ctx)
ctx               329 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 	ret = etnaviv_cmdbuf_suballoc_map(suballoc, ctx, &ctx->cmdbuf_mapping,
ctx               335 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 	    ctx->cmdbuf_mapping.iova > 0x80000000) {
ctx               341 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 	return ctx;
ctx               344 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 	etnaviv_cmdbuf_suballoc_unmap(ctx, &ctx->cmdbuf_mapping);
ctx               346 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 	global->ops->free(ctx);
ctx                95 drivers/gpu/drm/etnaviv/etnaviv_mmu.h int etnaviv_iommu_get_suballoc_va(struct etnaviv_iommu_context *ctx,
ctx                99 drivers/gpu/drm/etnaviv/etnaviv_mmu.h void etnaviv_iommu_put_suballoc_va(struct etnaviv_iommu_context *ctx,
ctx               102 drivers/gpu/drm/etnaviv/etnaviv_mmu.h size_t etnaviv_iommu_dump_size(struct etnaviv_iommu_context *ctx);
ctx               103 drivers/gpu/drm/etnaviv/etnaviv_mmu.h void etnaviv_iommu_dump(struct etnaviv_iommu_context *ctx, void *buf);
ctx               108 drivers/gpu/drm/etnaviv/etnaviv_mmu.h static inline void etnaviv_iommu_context_get(struct etnaviv_iommu_context *ctx)
ctx               110 drivers/gpu/drm/etnaviv/etnaviv_mmu.h 	kref_get(&ctx->refcount);
ctx               112 drivers/gpu/drm/etnaviv/etnaviv_mmu.h void etnaviv_iommu_context_put(struct etnaviv_iommu_context *ctx);
ctx               114 drivers/gpu/drm/etnaviv/etnaviv_mmu.h 			   struct etnaviv_iommu_context *ctx);
ctx               156 drivers/gpu/drm/etnaviv/etnaviv_sched.c 				 submit->ctx);
ctx                95 drivers/gpu/drm/exynos/exynos5433_drm_decon.c static inline void decon_set_bits(struct decon_context *ctx, u32 reg, u32 mask,
ctx                98 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	val = (val & mask) | (readl(ctx->addr + reg) & ~mask);
ctx                99 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(val, ctx->addr + reg);
ctx               104 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               113 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(val, ctx->addr + DECON_VIDINTCON0);
ctx               115 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	enable_irq(ctx->irq);
ctx               116 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (!(ctx->out_type & I80_HW_TRG))
ctx               117 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		enable_irq(ctx->te_irq);
ctx               124 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               126 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (!(ctx->out_type & I80_HW_TRG))
ctx               127 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		disable_irq_nosync(ctx->te_irq);
ctx               128 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	disable_irq_nosync(ctx->irq);
ctx               130 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(0, ctx->addr + DECON_VIDINTCON0);
ctx               134 drivers/gpu/drm/exynos/exynos5433_drm_decon.c static u32 decon_get_frame_count(struct decon_context *ctx, bool end)
ctx               142 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	frm = readl(ctx->addr + DECON_CRFMID);
ctx               144 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		status = readl(ctx->addr + DECON_VIDCON1);
ctx               146 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		frm = readl(ctx->addr + DECON_CRFMID);
ctx               157 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		if (!(ctx->crtc->i80_mode))
ctx               175 drivers/gpu/drm/exynos/exynos5433_drm_decon.c static void decon_setup_trigger(struct decon_context *ctx)
ctx               177 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (!ctx->crtc->i80_mode && !(ctx->out_type & I80_HW_TRG))
ctx               180 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (!(ctx->out_type & I80_HW_TRG)) {
ctx               183 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		       ctx->addr + DECON_TRIGCON);
ctx               188 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	       | TRIGCON_HWTRIGEN, ctx->addr + DECON_TRIGCON);
ctx               190 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (regmap_update_bits(ctx->sysreg, DSD_CFG_MUX,
ctx               192 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		DRM_DEV_ERROR(ctx->dev, "Cannot update sysreg.\n");
ctx               197 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               202 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (ctx->out_type & IFTYPE_HDMI) {
ctx               211 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_setup_trigger(ctx);
ctx               223 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(val, ctx->addr + DECON_VIDOUTCON0);
ctx               231 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(val, ctx->addr + DECON_VIDTCON2);
ctx               240 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		writel(val, ctx->addr + DECON_VIDTCON00);
ctx               244 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		writel(val, ctx->addr + DECON_VIDTCON01);
ctx               250 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		writel(val, ctx->addr + DECON_VIDTCON10);
ctx               254 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		writel(val, ctx->addr + DECON_VIDTCON11);
ctx               258 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_set_bits(ctx, DECON_VIDCON0, VIDCON0_ENVID | VIDCON0_ENVID_F, ~0);
ctx               260 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_set_bits(ctx, DECON_UPDATE, STANDALONE_UPDATE_F, ~0);
ctx               263 drivers/gpu/drm/exynos/exynos5433_drm_decon.c static void decon_win_set_bldeq(struct decon_context *ctx, unsigned int win,
ctx               286 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_set_bits(ctx, DECON_BLENDERQx(win), mask, val);
ctx               289 drivers/gpu/drm/exynos/exynos5433_drm_decon.c static void decon_win_set_bldmod(struct decon_context *ctx, unsigned int win,
ctx               306 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_set_bits(ctx, DECON_WINCONx(win), WINCONx_BLEND_MODE_MASK, val);
ctx               312 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		decon_set_bits(ctx, DECON_VIDOSDxC(win),
ctx               314 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		decon_set_bits(ctx, DECON_BLENDCON, BLEND_NEW, BLEND_NEW);
ctx               318 drivers/gpu/drm/exynos/exynos5433_drm_decon.c static void decon_win_set_pixfmt(struct decon_context *ctx, unsigned int win,
ctx               321 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct exynos_drm_plane plane = ctx->planes[win];
ctx               333 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	val = readl(ctx->addr + DECON_WINCONx(win));
ctx               360 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "cpp = %u\n", fb->format->cpp[0]);
ctx               374 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_set_bits(ctx, DECON_WINCONx(win), ~WINCONx_BLEND_MODE_MASK, val);
ctx               377 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		decon_win_set_bldmod(ctx, win, alpha, pixel_alpha);
ctx               378 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		decon_win_set_bldeq(ctx, win, alpha, pixel_alpha);
ctx               382 drivers/gpu/drm/exynos/exynos5433_drm_decon.c static void decon_shadow_protect(struct decon_context *ctx, bool protect)
ctx               384 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_set_bits(ctx, DECON_SHADOWCON, SHADOWCON_PROTECT_MASK,
ctx               390 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               392 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_shadow_protect(ctx, true);
ctx               404 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               415 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		writel(val, ctx->addr + DECON_VIDOSDxA(win));
ctx               419 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		writel(val, ctx->addr + DECON_VIDOSDxB(win));
ctx               422 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		writel(val, ctx->addr + DECON_VIDOSDxA(win));
ctx               426 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		writel(val, ctx->addr + DECON_VIDOSDxB(win));
ctx               431 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(val, ctx->addr + DECON_VIDOSDxC(win));
ctx               435 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(val, ctx->addr + DECON_VIDOSDxD(win));
ctx               437 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(dma_addr, ctx->addr + DECON_VIDW0xADD0B0(win));
ctx               440 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(val, ctx->addr + DECON_VIDW0xADD1B0(win));
ctx               442 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (!(ctx->out_type & IFTYPE_HDMI))
ctx               448 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(val, ctx->addr + DECON_VIDW0xADD2(win));
ctx               450 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_win_set_pixfmt(ctx, win, fb);
ctx               453 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_set_bits(ctx, DECON_WINCONx(win), WINCONx_ENWIN_F, ~0);
ctx               459 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               462 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_set_bits(ctx, DECON_WINCONx(win), WINCONx_ENWIN_F, 0);
ctx               467 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               470 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	spin_lock_irqsave(&ctx->vblank_lock, flags);
ctx               472 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_shadow_protect(ctx, false);
ctx               474 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_set_bits(ctx, DECON_UPDATE, STANDALONE_UPDATE_F, ~0);
ctx               476 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ctx->frame_id = decon_get_frame_count(ctx, true);
ctx               480 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	spin_unlock_irqrestore(&ctx->vblank_lock, flags);
ctx               483 drivers/gpu/drm/exynos/exynos5433_drm_decon.c static void decon_swreset(struct decon_context *ctx)
ctx               489 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(0, ctx->addr + DECON_VIDCON0);
ctx               490 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	readl_poll_timeout(ctx->addr + DECON_VIDCON0, val,
ctx               493 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(VIDCON0_SWRESET, ctx->addr + DECON_VIDCON0);
ctx               494 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ret = readl_poll_timeout(ctx->addr + DECON_VIDCON0, val,
ctx               499 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	spin_lock_irqsave(&ctx->vblank_lock, flags);
ctx               500 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ctx->frame_id = 0;
ctx               501 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	spin_unlock_irqrestore(&ctx->vblank_lock, flags);
ctx               503 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (!(ctx->out_type & IFTYPE_HDMI))
ctx               506 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(VIDCON0_CLKVALUP | VIDCON0_VLCKFREE, ctx->addr + DECON_VIDCON0);
ctx               507 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_set_bits(ctx, DECON_CMU,
ctx               509 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	writel(VIDCON1_VCLK_RUN_VDEN_DISABLE, ctx->addr + DECON_VIDCON1);
ctx               511 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	       ctx->addr + DECON_CRCCTRL);
ctx               516 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               518 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	pm_runtime_get_sync(ctx->dev);
ctx               522 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_swreset(ctx);
ctx               524 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_commit(ctx->crtc);
ctx               529 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               532 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (!(ctx->out_type & I80_HW_TRG))
ctx               533 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		synchronize_irq(ctx->te_irq);
ctx               534 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	synchronize_irq(ctx->irq);
ctx               541 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	for (i = ctx->first_win; i < WINDOWS_NR; i++)
ctx               542 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		decon_disable_plane(crtc, &ctx->planes[i]);
ctx               544 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_swreset(ctx);
ctx               548 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	pm_runtime_put_sync(ctx->dev);
ctx               553 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = dev_id;
ctx               555 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_set_bits(ctx, DECON_TRIGCON, TRIGCON_SWTRIGCMD, ~0);
ctx               562 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               566 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ret = clk_prepare_enable(ctx->clks[i]);
ctx               571 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_shadow_protect(ctx, true);
ctx               573 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		decon_set_bits(ctx, DECON_WINCONx(win), WINCONx_ENWIN_F, 0);
ctx               574 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_shadow_protect(ctx, false);
ctx               576 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_set_bits(ctx, DECON_UPDATE, STANDALONE_UPDATE_F, ~0);
ctx               583 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		clk_disable_unprepare(ctx->clks[i]);
ctx               589 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               591 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ctx->irq = crtc->i80_mode ? ctx->irq_lcd_sys : ctx->irq_vsync;
ctx               593 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (ctx->irq)
ctx               596 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	dev_info(ctx->dev, "Sink requires %s mode, but appropriate interrupt is not provided.\n",
ctx               616 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = dev_get_drvdata(dev);
ctx               623 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ctx->drm_dev = drm_dev;
ctx               625 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	for (win = ctx->first_win; win < WINDOWS_NR; win++) {
ctx               626 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ctx->configs[win].pixel_formats = decon_formats;
ctx               627 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ctx->configs[win].num_pixel_formats = ARRAY_SIZE(decon_formats);
ctx               628 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ctx->configs[win].zpos = win - ctx->first_win;
ctx               629 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ctx->configs[win].type = decon_win_types[win];
ctx               630 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ctx->configs[win].capabilities = capabilities[win];
ctx               632 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ret = exynos_plane_init(drm_dev, &ctx->planes[win], win,
ctx               633 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 					&ctx->configs[win]);
ctx               638 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	exynos_plane = &ctx->planes[PRIMARY_WIN];
ctx               639 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	out_type = (ctx->out_type & IFTYPE_HDMI) ? EXYNOS_DISPLAY_TYPE_HDMI
ctx               641 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ctx->crtc = exynos_drm_crtc_create(drm_dev, &exynos_plane->base,
ctx               642 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 			out_type, &decon_crtc_ops, ctx);
ctx               643 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (IS_ERR(ctx->crtc))
ctx               644 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		return PTR_ERR(ctx->crtc);
ctx               646 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_clear_channels(ctx->crtc);
ctx               648 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	return exynos_drm_register_dma(drm_dev, dev, &ctx->dma_priv);
ctx               653 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = dev_get_drvdata(dev);
ctx               655 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	decon_disable(ctx->crtc);
ctx               658 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	exynos_drm_unregister_dma(ctx->drm_dev, ctx->dev, &ctx->dma_priv);
ctx               666 drivers/gpu/drm/exynos/exynos5433_drm_decon.c static void decon_handle_vblank(struct decon_context *ctx)
ctx               670 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	spin_lock(&ctx->vblank_lock);
ctx               672 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	frm = decon_get_frame_count(ctx, true);
ctx               674 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (frm != ctx->frame_id) {
ctx               676 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		if ((s32)(frm - ctx->frame_id) > 0)
ctx               677 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 			drm_crtc_handle_vblank(&ctx->crtc->base);
ctx               678 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ctx->frame_id = frm;
ctx               681 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	spin_unlock(&ctx->vblank_lock);
ctx               686 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = dev_id;
ctx               689 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	val = readl(ctx->addr + DECON_VIDINTCON1);
ctx               693 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		writel(val, ctx->addr + DECON_VIDINTCON1);
ctx               694 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		if (ctx->out_type & IFTYPE_HDMI) {
ctx               695 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 			val = readl(ctx->addr + DECON_VIDOUTCON0);
ctx               701 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		decon_handle_vblank(ctx);
ctx               710 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = dev_get_drvdata(dev);
ctx               714 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		clk_disable_unprepare(ctx->clks[i]);
ctx               721 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx = dev_get_drvdata(dev);
ctx               725 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ret = clk_prepare_enable(ctx->clks[i]);
ctx               734 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		clk_disable_unprepare(ctx->clks[i]);
ctx               760 drivers/gpu/drm/exynos/exynos5433_drm_decon.c static int decon_conf_irq(struct decon_context *ctx, const char *name,
ctx               763 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct platform_device *pdev = to_platform_device(ctx->dev);
ctx               774 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 			dev_err(ctx->dev, "IRQ %s get failed, %d\n", name, irq);
ctx               779 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ret = devm_request_irq(ctx->dev, irq, handler, flags, "drm_decon", ctx);
ctx               781 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		dev_err(ctx->dev, "IRQ %s request failed\n", name);
ctx               791 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	struct decon_context *ctx;
ctx               796 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               797 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (!ctx)
ctx               800 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ctx->dev = dev;
ctx               801 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ctx->out_type = (unsigned long)of_device_get_match_data(dev);
ctx               802 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	spin_lock_init(&ctx->vblank_lock);
ctx               804 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (ctx->out_type & IFTYPE_HDMI)
ctx               805 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ctx->first_win = 1;
ctx               810 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		clk = devm_clk_get(ctx->dev, decon_clks_name[i]);
ctx               814 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ctx->clks[i] = clk;
ctx               818 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ctx->addr = devm_ioremap_resource(dev, res);
ctx               819 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (IS_ERR(ctx->addr)) {
ctx               821 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		return PTR_ERR(ctx->addr);
ctx               824 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ret = decon_conf_irq(ctx, "vsync", decon_irq_handler, 0);
ctx               827 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ctx->irq_vsync = ret;
ctx               829 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ret = decon_conf_irq(ctx, "lcd_sys", decon_irq_handler, 0);
ctx               832 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ctx->irq_lcd_sys = ret;
ctx               834 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	ret = decon_conf_irq(ctx, "te", decon_te_irq_handler,
ctx               839 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ctx->te_irq = ret;
ctx               840 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ctx->out_type &= ~I80_HW_TRG;
ctx               843 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	if (ctx->out_type & I80_HW_TRG) {
ctx               844 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		ctx->sysreg = syscon_regmap_lookup_by_phandle(dev->of_node,
ctx               846 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 		if (IS_ERR(ctx->sysreg)) {
ctx               848 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 			return PTR_ERR(ctx->sysreg);
ctx               852 drivers/gpu/drm/exynos/exynos5433_drm_decon.c 	platform_set_drvdata(pdev, ctx);
ctx                86 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx                88 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (ctx->suspended)
ctx                91 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	atomic_set(&ctx->wait_vsync_event, 1);
ctx                97 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (!wait_event_timeout(ctx->wait_vsync_queue,
ctx                98 drivers/gpu/drm/exynos/exynos7_drm_decon.c 				!atomic_read(&ctx->wait_vsync_event),
ctx               100 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		DRM_DEV_DEBUG_KMS(ctx->dev, "vblank wait timed out.\n");
ctx               105 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               110 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		u32 val = readl(ctx->regs + WINCON(win));
ctx               114 drivers/gpu/drm/exynos/exynos7_drm_decon.c 			writel(val, ctx->regs + WINCON(win));
ctx               121 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		decon_wait_for_vblank(ctx->crtc);
ctx               124 drivers/gpu/drm/exynos/exynos7_drm_decon.c static int decon_ctx_initialize(struct decon_context *ctx,
ctx               127 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ctx->drm_dev = drm_dev;
ctx               129 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	decon_clear_channels(ctx->crtc);
ctx               131 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	return exynos_drm_register_dma(drm_dev, ctx->dev, &ctx->dma_priv);
ctx               134 drivers/gpu/drm/exynos/exynos7_drm_decon.c static void decon_ctx_remove(struct decon_context *ctx)
ctx               137 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	exynos_drm_unregister_dma(ctx->drm_dev, ctx->dev, &ctx->dma_priv);
ctx               140 drivers/gpu/drm/exynos/exynos7_drm_decon.c static u32 decon_calc_clkdiv(struct decon_context *ctx,
ctx               147 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	clkdiv = DIV_ROUND_UP(clk_get_rate(ctx->vclk), ideal_clk);
ctx               154 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               158 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (ctx->suspended)
ctx               165 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (!ctx->i80_if) {
ctx               173 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		writel(val, ctx->regs + VIDTCON0);
ctx               176 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		writel(val, ctx->regs + VIDTCON1);
ctx               185 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		writel(val, ctx->regs + VIDTCON2);
ctx               188 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		writel(val, ctx->regs + VIDTCON3);
ctx               194 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(val, ctx->regs + VIDTCON4);
ctx               196 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(mode->vdisplay - 1, ctx->regs + LINECNT_OP_THRESHOLD);
ctx               203 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(val, ctx->regs + VIDCON0);
ctx               205 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	clkdiv = decon_calc_clkdiv(ctx, mode);
ctx               208 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		writel(val, ctx->regs + VCLKCON1);
ctx               209 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		writel(val, ctx->regs + VCLKCON2);
ctx               212 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	val = readl(ctx->regs + DECON_UPDATE);
ctx               214 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(val, ctx->regs + DECON_UPDATE);
ctx               219 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               222 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (ctx->suspended)
ctx               225 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (!test_and_set_bit(0, &ctx->irq_flags)) {
ctx               226 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		val = readl(ctx->regs + VIDINTCON0);
ctx               230 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		if (!ctx->i80_if) {
ctx               236 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		writel(val, ctx->regs + VIDINTCON0);
ctx               244 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               247 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (ctx->suspended)
ctx               250 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (test_and_clear_bit(0, &ctx->irq_flags)) {
ctx               251 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		val = readl(ctx->regs + VIDINTCON0);
ctx               254 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		if (!ctx->i80_if)
ctx               257 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		writel(val, ctx->regs + VIDINTCON0);
ctx               261 drivers/gpu/drm/exynos/exynos7_drm_decon.c static void decon_win_set_pixfmt(struct decon_context *ctx, unsigned int win,
ctx               267 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	val = readl(ctx->regs + WINCON(win));
ctx               314 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "cpp = %d\n", fb->format->cpp[0]);
ctx               330 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(val, ctx->regs + WINCON(win));
ctx               333 drivers/gpu/drm/exynos/exynos7_drm_decon.c static void decon_win_set_colkey(struct decon_context *ctx, unsigned int win)
ctx               342 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(keycon0, ctx->regs + WKEYCON0_BASE(win));
ctx               343 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(keycon1, ctx->regs + WKEYCON1_BASE(win));
ctx               352 drivers/gpu/drm/exynos/exynos7_drm_decon.c static void decon_shadow_protect_win(struct decon_context *ctx,
ctx               359 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	val = readl(ctx->regs + SHADOWCON);
ctx               364 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(val, ctx->regs + SHADOWCON);
ctx               369 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               372 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (ctx->suspended)
ctx               376 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		decon_shadow_protect_win(ctx, i, true);
ctx               384 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               394 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (ctx->suspended)
ctx               409 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(val, ctx->regs + VIDW_BUF_START(win));
ctx               414 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(fb->width + padding, ctx->regs + VIDW_WHOLE_X(win));
ctx               415 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(fb->height, ctx->regs + VIDW_WHOLE_Y(win));
ctx               418 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(state->src.x, ctx->regs + VIDW_OFFSET_X(win));
ctx               419 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(state->src.y, ctx->regs + VIDW_OFFSET_Y(win));
ctx               421 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "start addr = 0x%lx\n",
ctx               423 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "ovl_width = %d, ovl_height = %d\n",
ctx               428 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(val, ctx->regs + VIDOSD_A(win));
ctx               439 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(val, ctx->regs + VIDOSD_B(win));
ctx               441 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "osd pos: tx = %d, ty = %d, bx = %d, by = %d\n",
ctx               449 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(alpha, ctx->regs + VIDOSD_C(win));
ctx               455 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(alpha, ctx->regs + VIDOSD_D(win));
ctx               457 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	decon_win_set_pixfmt(ctx, win, fb);
ctx               461 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		decon_win_set_colkey(ctx, win);
ctx               464 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	val = readl(ctx->regs + WINCON(win));
ctx               467 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(val, ctx->regs + WINCON(win));
ctx               470 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	decon_shadow_protect_win(ctx, win, false);
ctx               472 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	val = readl(ctx->regs + DECON_UPDATE);
ctx               474 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(val, ctx->regs + DECON_UPDATE);
ctx               480 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               484 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (ctx->suspended)
ctx               488 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	decon_shadow_protect_win(ctx, win, true);
ctx               491 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	val = readl(ctx->regs + WINCON(win));
ctx               493 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(val, ctx->regs + WINCON(win));
ctx               495 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	val = readl(ctx->regs + DECON_UPDATE);
ctx               497 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(val, ctx->regs + DECON_UPDATE);
ctx               502 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               505 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (ctx->suspended)
ctx               509 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		decon_shadow_protect_win(ctx, i, false);
ctx               513 drivers/gpu/drm/exynos/exynos7_drm_decon.c static void decon_init(struct decon_context *ctx)
ctx               517 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(VIDCON0_SWRESET, ctx->regs + VIDCON0);
ctx               520 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (!ctx->i80_if)
ctx               522 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(val, ctx->regs + VIDOUTCON0);
ctx               524 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	writel(VCLKCON0_CLKVALUP | VCLKCON0_VCLKFREE, ctx->regs + VCLKCON0);
ctx               526 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (!ctx->i80_if)
ctx               527 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		writel(VIDCON1_VCLK_HOLD, ctx->regs + VIDCON1(0));
ctx               532 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               534 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (!ctx->suspended)
ctx               537 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	pm_runtime_get_sync(ctx->dev);
ctx               539 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	decon_init(ctx);
ctx               542 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (test_and_clear_bit(0, &ctx->irq_flags))
ctx               543 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		decon_enable_vblank(ctx->crtc);
ctx               545 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	decon_commit(ctx->crtc);
ctx               547 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ctx->suspended = false;
ctx               552 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = crtc->ctx;
ctx               555 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (ctx->suspended)
ctx               564 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		decon_disable_plane(crtc, &ctx->planes[i]);
ctx               566 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	pm_runtime_put_sync(ctx->dev);
ctx               568 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ctx->suspended = true;
ctx               585 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = (struct decon_context *)dev_id;
ctx               588 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	val = readl(ctx->regs + VIDINTCON1);
ctx               590 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	clear_bit = ctx->i80_if ? VIDINTCON1_INT_I80 : VIDINTCON1_INT_FRAME;
ctx               592 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		writel(clear_bit, ctx->regs + VIDINTCON1);
ctx               595 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (!ctx->drm_dev)
ctx               598 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (!ctx->i80_if) {
ctx               599 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		drm_crtc_handle_vblank(&ctx->crtc->base);
ctx               602 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		if (atomic_read(&ctx->wait_vsync_event)) {
ctx               603 drivers/gpu/drm/exynos/exynos7_drm_decon.c 			atomic_set(&ctx->wait_vsync_event, 0);
ctx               604 drivers/gpu/drm/exynos/exynos7_drm_decon.c 			wake_up(&ctx->wait_vsync_queue);
ctx               613 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = dev_get_drvdata(dev);
ctx               619 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ret = decon_ctx_initialize(ctx, drm_dev);
ctx               626 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		ctx->configs[i].pixel_formats = decon_formats;
ctx               627 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		ctx->configs[i].num_pixel_formats = ARRAY_SIZE(decon_formats);
ctx               628 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		ctx->configs[i].zpos = i;
ctx               629 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		ctx->configs[i].type = decon_win_types[i];
ctx               631 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		ret = exynos_plane_init(drm_dev, &ctx->planes[i], i,
ctx               632 drivers/gpu/drm/exynos/exynos7_drm_decon.c 					&ctx->configs[i]);
ctx               637 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	exynos_plane = &ctx->planes[DEFAULT_WIN];
ctx               638 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ctx->crtc = exynos_drm_crtc_create(drm_dev, &exynos_plane->base,
ctx               639 drivers/gpu/drm/exynos/exynos7_drm_decon.c 			EXYNOS_DISPLAY_TYPE_LCD, &decon_crtc_ops, ctx);
ctx               640 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (IS_ERR(ctx->crtc)) {
ctx               641 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		decon_ctx_remove(ctx);
ctx               642 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		return PTR_ERR(ctx->crtc);
ctx               645 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (ctx->encoder)
ctx               646 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		exynos_dpi_bind(drm_dev, ctx->encoder);
ctx               655 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = dev_get_drvdata(dev);
ctx               657 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	decon_disable(ctx->crtc);
ctx               659 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (ctx->encoder)
ctx               660 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		exynos_dpi_remove(ctx->encoder);
ctx               662 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	decon_ctx_remove(ctx);
ctx               673 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx;
ctx               681 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               682 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (!ctx)
ctx               685 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ctx->dev = dev;
ctx               686 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ctx->suspended = true;
ctx               690 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		ctx->i80_if = true;
ctx               693 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ctx->regs = of_iomap(dev->of_node, 0);
ctx               694 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (!ctx->regs)
ctx               697 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ctx->pclk = devm_clk_get(dev, "pclk_decon0");
ctx               698 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (IS_ERR(ctx->pclk)) {
ctx               700 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		ret = PTR_ERR(ctx->pclk);
ctx               704 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ctx->aclk = devm_clk_get(dev, "aclk_decon0");
ctx               705 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (IS_ERR(ctx->aclk)) {
ctx               707 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		ret = PTR_ERR(ctx->aclk);
ctx               711 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ctx->eclk = devm_clk_get(dev, "decon0_eclk");
ctx               712 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (IS_ERR(ctx->eclk)) {
ctx               714 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		ret = PTR_ERR(ctx->eclk);
ctx               718 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ctx->vclk = devm_clk_get(dev, "decon0_vclk");
ctx               719 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (IS_ERR(ctx->vclk)) {
ctx               721 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		ret = PTR_ERR(ctx->vclk);
ctx               726 drivers/gpu/drm/exynos/exynos7_drm_decon.c 					   ctx->i80_if ? "lcd_sys" : "vsync");
ctx               734 drivers/gpu/drm/exynos/exynos7_drm_decon.c 							0, "drm_decon", ctx);
ctx               740 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	init_waitqueue_head(&ctx->wait_vsync_queue);
ctx               741 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	atomic_set(&ctx->wait_vsync_event, 0);
ctx               743 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	platform_set_drvdata(pdev, ctx);
ctx               745 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ctx->encoder = exynos_dpi_probe(dev);
ctx               746 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	if (IS_ERR(ctx->encoder)) {
ctx               747 drivers/gpu/drm/exynos/exynos7_drm_decon.c 		ret = PTR_ERR(ctx->encoder);
ctx               763 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	iounmap(ctx->regs);
ctx               770 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = dev_get_drvdata(&pdev->dev);
ctx               774 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	iounmap(ctx->regs);
ctx               784 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = dev_get_drvdata(dev);
ctx               786 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	clk_disable_unprepare(ctx->vclk);
ctx               787 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	clk_disable_unprepare(ctx->eclk);
ctx               788 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	clk_disable_unprepare(ctx->aclk);
ctx               789 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	clk_disable_unprepare(ctx->pclk);
ctx               796 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	struct decon_context *ctx = dev_get_drvdata(dev);
ctx               799 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ret = clk_prepare_enable(ctx->pclk);
ctx               806 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ret = clk_prepare_enable(ctx->aclk);
ctx               813 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ret = clk_prepare_enable(ctx->eclk);
ctx               820 drivers/gpu/drm/exynos/exynos7_drm_decon.c 	ret = clk_prepare_enable(ctx->vclk);
ctx               176 drivers/gpu/drm/exynos/exynos_drm_crtc.c 					void *ctx)
ctx               188 drivers/gpu/drm/exynos/exynos_drm_crtc.c 	exynos_crtc->ctx = ctx;
ctx                44 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	struct exynos_dpi *ctx = connector_to_dpi(connector);
ctx                46 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	if (ctx->panel && !ctx->panel->connector)
ctx                47 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		drm_panel_attach(ctx->panel, &ctx->connector);
ctx                69 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	struct exynos_dpi *ctx = connector_to_dpi(connector);
ctx                72 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	if (ctx->vm) {
ctx                77 drivers/gpu/drm/exynos/exynos_drm_dpi.c 			DRM_DEV_ERROR(ctx->dev,
ctx                81 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		drm_display_mode_from_videomode(ctx->vm, mode);
ctx                87 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	if (ctx->panel)
ctx                88 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		return ctx->panel->funcs->get_modes(ctx->panel);
ctx                99 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	struct exynos_dpi *ctx = encoder_to_dpi(encoder);
ctx               100 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	struct drm_connector *connector = &ctx->connector;
ctx               109 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		DRM_DEV_ERROR(ctx->dev,
ctx               128 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	struct exynos_dpi *ctx = encoder_to_dpi(encoder);
ctx               130 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	if (ctx->panel) {
ctx               131 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		drm_panel_prepare(ctx->panel);
ctx               132 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		drm_panel_enable(ctx->panel);
ctx               138 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	struct exynos_dpi *ctx = encoder_to_dpi(encoder);
ctx               140 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	if (ctx->panel) {
ctx               141 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		drm_panel_disable(ctx->panel);
ctx               142 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		drm_panel_unprepare(ctx->panel);
ctx               164 drivers/gpu/drm/exynos/exynos_drm_dpi.c static int exynos_dpi_parse_dt(struct exynos_dpi *ctx)
ctx               166 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	struct device *dev = ctx->dev;
ctx               170 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	ctx->panel_node = of_graph_get_remote_node(dn, FIMD_PORT_RGB, 0);
ctx               179 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		vm = devm_kzalloc(dev, sizeof(*ctx->vm), GFP_KERNEL);
ctx               189 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		ctx->vm = vm;
ctx               194 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	if (!ctx->panel_node)
ctx               226 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	struct exynos_dpi *ctx;
ctx               229 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               230 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	if (!ctx)
ctx               233 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	ctx->dev = dev;
ctx               235 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	ret = exynos_dpi_parse_dt(ctx);
ctx               237 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		devm_kfree(dev, ctx);
ctx               241 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	if (ctx->panel_node) {
ctx               242 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		ctx->panel = of_drm_find_panel(ctx->panel_node);
ctx               243 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		if (IS_ERR(ctx->panel))
ctx               244 drivers/gpu/drm/exynos/exynos_drm_dpi.c 			return ERR_CAST(ctx->panel);
ctx               247 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	return &ctx->encoder;
ctx               252 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	struct exynos_dpi *ctx = encoder_to_dpi(encoder);
ctx               254 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	exynos_dpi_disable(&ctx->encoder);
ctx               256 drivers/gpu/drm/exynos/exynos_drm_dpi.c 	if (ctx->panel)
ctx               257 drivers/gpu/drm/exynos/exynos_drm_dpi.c 		drm_panel_detach(ctx->panel);
ctx               173 drivers/gpu/drm/exynos/exynos_drm_drv.h 	void				*ctx;
ctx               115 drivers/gpu/drm/exynos/exynos_drm_fimc.c static u32 fimc_read(struct fimc_context *ctx, u32 reg)
ctx               117 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	return readl(ctx->regs + reg);
ctx               120 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_write(struct fimc_context *ctx, u32 val, u32 reg)
ctx               122 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	writel(val, ctx->regs + reg);
ctx               125 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_set_bits(struct fimc_context *ctx, u32 reg, u32 bits)
ctx               127 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	void __iomem *r = ctx->regs + reg;
ctx               132 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_clear_bits(struct fimc_context *ctx, u32 reg, u32 bits)
ctx               134 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	void __iomem *r = ctx->regs + reg;
ctx               139 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_sw_reset(struct fimc_context *ctx)
ctx               144 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CISTATUS);
ctx               146 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_clear_bits(ctx, EXYNOS_MSCTRL, EXYNOS_MSCTRL_ENVID);
ctx               148 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_set_bits(ctx, EXYNOS_CISRCFMT, EXYNOS_CISRCFMT_ITU601_8BIT);
ctx               151 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_clear_bits(ctx, EXYNOS_CIIMGCPT,
ctx               155 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_set_bits(ctx, EXYNOS_CIGCTRL, EXYNOS_CIGCTRL_SWRST);
ctx               158 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_clear_bits(ctx, EXYNOS_CIGCTRL, EXYNOS_CIGCTRL_SWRST);
ctx               161 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, 0x0, EXYNOS_CIFCNTSEQ);
ctx               164 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_set_type_ctrl(struct fimc_context *ctx)
ctx               168 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CIGCTRL);
ctx               181 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIGCTRL);
ctx               184 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_handle_jpeg(struct fimc_context *ctx, bool enable)
ctx               188 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "enable[%d]\n", enable);
ctx               190 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CIGCTRL);
ctx               196 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIGCTRL);
ctx               199 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_mask_irq(struct fimc_context *ctx, bool enable)
ctx               203 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "enable[%d]\n", enable);
ctx               205 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CIGCTRL);
ctx               211 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIGCTRL);
ctx               214 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_clear_irq(struct fimc_context *ctx)
ctx               216 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_set_bits(ctx, EXYNOS_CIGCTRL, EXYNOS_CIGCTRL_IRQ_CLR);
ctx               219 drivers/gpu/drm/exynos/exynos_drm_fimc.c static bool fimc_check_ovf(struct fimc_context *ctx)
ctx               223 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	status = fimc_read(ctx, EXYNOS_CISTATUS);
ctx               227 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "flag[0x%x]\n", flag);
ctx               230 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_set_bits(ctx, EXYNOS_CIWDOFST,
ctx               234 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		DRM_DEV_ERROR(ctx->dev,
ctx               236 drivers/gpu/drm/exynos/exynos_drm_fimc.c 			      ctx->id, status);
ctx               243 drivers/gpu/drm/exynos/exynos_drm_fimc.c static bool fimc_check_frame_end(struct fimc_context *ctx)
ctx               247 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CISTATUS);
ctx               249 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "cfg[0x%x]\n", cfg);
ctx               255 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CISTATUS);
ctx               260 drivers/gpu/drm/exynos/exynos_drm_fimc.c static int fimc_get_buf_id(struct fimc_context *ctx)
ctx               265 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CISTATUS2);
ctx               271 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "present[%d]before[%d]\n",
ctx               276 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		DRM_DEV_ERROR(ctx->dev, "failed to get frame count.\n");
ctx               281 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "buf_id[%d]\n", buf_id);
ctx               286 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_handle_lastend(struct fimc_context *ctx, bool enable)
ctx               290 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "enable[%d]\n", enable);
ctx               292 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CIOCTRL);
ctx               298 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIOCTRL);
ctx               301 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_src_set_fmt_order(struct fimc_context *ctx, u32 fmt)
ctx               305 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "fmt[0x%x]\n", fmt);
ctx               308 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CISCCTRL);
ctx               314 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, cfg, EXYNOS_CISCCTRL);
ctx               319 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, cfg, EXYNOS_CISCCTRL);
ctx               327 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_MSCTRL);
ctx               363 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_MSCTRL);
ctx               366 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_src_set_fmt(struct fimc_context *ctx, u32 fmt, bool tiled)
ctx               370 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "fmt[0x%x]\n", fmt);
ctx               372 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_MSCTRL);
ctx               403 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_MSCTRL);
ctx               405 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CIDMAPARAM);
ctx               413 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIDMAPARAM);
ctx               415 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_src_set_fmt_order(ctx, fmt);
ctx               418 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_src_set_transf(struct fimc_context *ctx, unsigned int rotation)
ctx               423 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "rotation[%x]\n", rotation);
ctx               425 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg1 = fimc_read(ctx, EXYNOS_MSCTRL);
ctx               429 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg2 = fimc_read(ctx, EXYNOS_CITRGFMT);
ctx               465 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg1, EXYNOS_MSCTRL);
ctx               466 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg2, EXYNOS_CITRGFMT);
ctx               469 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_set_window(struct fimc_context *ctx,
ctx               481 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "x[%d]y[%d]w[%d]h[%d]hsize[%d]vsize[%d]\n",
ctx               484 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "h1[%d]h2[%d]v1[%d]v2[%d]\n", h1, h2, v1,
ctx               491 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CIWDOFST);
ctx               497 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIWDOFST);
ctx               501 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIWDOFST2);
ctx               504 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_src_set_size(struct fimc_context *ctx,
ctx               510 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "hsize[%d]vsize[%d]\n", real_width,
ctx               517 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_ORGISIZE);
ctx               519 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "x[%d]y[%d]w[%d]h[%d]\n", buf->rect.x,
ctx               523 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CIREAL_ISIZE);
ctx               528 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIREAL_ISIZE);
ctx               537 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CISRCFMT);
ctx               542 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIIYOFF);
ctx               545 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIICBOFF);
ctx               548 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIICROFF);
ctx               550 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_set_window(ctx, buf);
ctx               553 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_src_set_addr(struct fimc_context *ctx,
ctx               556 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, buf->dma_addr[0], EXYNOS_CIIYSA(0));
ctx               557 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, buf->dma_addr[1], EXYNOS_CIICBSA(0));
ctx               558 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, buf->dma_addr[2], EXYNOS_CIICRSA(0));
ctx               561 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_dst_set_fmt_order(struct fimc_context *ctx, u32 fmt)
ctx               565 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "fmt[0x%x]\n", fmt);
ctx               568 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CISCCTRL);
ctx               574 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, cfg, EXYNOS_CISCCTRL);
ctx               578 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, cfg, EXYNOS_CISCCTRL);
ctx               583 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, cfg, EXYNOS_CISCCTRL);
ctx               591 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CIOCTRL);
ctx               629 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIOCTRL);
ctx               632 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_dst_set_fmt(struct fimc_context *ctx, u32 fmt, bool tiled)
ctx               636 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "fmt[0x%x]\n", fmt);
ctx               638 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CIEXTEN);
ctx               642 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, cfg, EXYNOS_CIEXTEN);
ctx               645 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, cfg, EXYNOS_CIEXTEN);
ctx               647 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		cfg = fimc_read(ctx, EXYNOS_CITRGFMT);
ctx               675 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, cfg, EXYNOS_CITRGFMT);
ctx               678 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CIDMAPARAM);
ctx               686 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIDMAPARAM);
ctx               688 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_dst_set_fmt_order(ctx, fmt);
ctx               691 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_dst_set_transf(struct fimc_context *ctx, unsigned int rotation)
ctx               696 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "rotation[0x%x]\n", rotation);
ctx               698 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CITRGFMT);
ctx               735 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CITRGFMT);
ctx               738 drivers/gpu/drm/exynos/exynos_drm_fimc.c static int fimc_set_prescaler(struct fimc_context *ctx, struct fimc_scaler *sc,
ctx               748 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg_ext = fimc_read(ctx, EXYNOS_CITRGFMT);
ctx               768 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		dev_err(ctx->dev, "failed to get ratio horizontal.\n");
ctx               774 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		dev_err(ctx->dev, "failed to get ratio vertical.\n");
ctx               780 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "pre_dst_width[%d]pre_dst_height[%d]\n",
ctx               782 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "hfactor[%d]vfactor[%d]\n", hfactor,
ctx               789 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "hratio[%d]vratio[%d]up_h[%d]up_v[%d]\n",
ctx               793 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "shfactor[%d]\n", shfactor);
ctx               798 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CISCPRERATIO);
ctx               802 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CISCPREDST);
ctx               807 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_set_scaler(struct fimc_context *ctx, struct fimc_scaler *sc)
ctx               811 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "range[%d]bypass[%d]up_h[%d]up_v[%d]\n",
ctx               813 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "hratio[%d]vratio[%d]\n",
ctx               816 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CISCCTRL);
ctx               836 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CISCCTRL);
ctx               838 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg_ext = fimc_read(ctx, EXYNOS_CIEXTEN);
ctx               843 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg_ext, EXYNOS_CIEXTEN);
ctx               846 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_dst_set_size(struct fimc_context *ctx,
ctx               852 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "hsize[%d]vsize[%d]\n", real_width,
ctx               859 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_ORGOSIZE);
ctx               861 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "x[%d]y[%d]w[%d]h[%d]\n", buf->rect.x,
ctx               866 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CIGCTRL);
ctx               874 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIGCTRL);
ctx               876 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg_ext = fimc_read(ctx, EXYNOS_CITRGFMT);
ctx               879 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CITRGFMT);
ctx               888 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CITRGFMT);
ctx               892 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CITAREA);
ctx               897 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIOYOFF);
ctx               900 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIOCBOFF);
ctx               903 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIOCROFF);
ctx               906 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_dst_set_buf_seq(struct fimc_context *ctx, u32 buf_id,
ctx               913 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "buf_id[%d]enqueu[%d]\n", buf_id, enqueue);
ctx               915 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	spin_lock_irqsave(&ctx->lock, flags);
ctx               917 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_CIFCNTSEQ);
ctx               924 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_CIFCNTSEQ);
ctx               929 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_mask_irq(ctx, true);
ctx               931 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_mask_irq(ctx, false);
ctx               933 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx               936 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_dst_set_addr(struct fimc_context *ctx,
ctx               939 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, buf->dma_addr[0], EXYNOS_CIOYSA(0));
ctx               940 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, buf->dma_addr[1], EXYNOS_CIOCBSA(0));
ctx               941 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, buf->dma_addr[2], EXYNOS_CIOCRSA(0));
ctx               943 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_dst_set_buf_seq(ctx, 0, true);
ctx               946 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_stop(struct fimc_context *ctx);
ctx               950 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	struct fimc_context *ctx = dev_id;
ctx               953 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "fimc id[%d]\n", ctx->id);
ctx               955 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_clear_irq(ctx);
ctx               956 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	if (fimc_check_ovf(ctx))
ctx               959 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	if (!fimc_check_frame_end(ctx))
ctx               962 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	buf_id = fimc_get_buf_id(ctx);
ctx               966 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "buf_id[%d]\n", buf_id);
ctx               968 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	if (ctx->task) {
ctx               969 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		struct exynos_drm_ipp_task *task = ctx->task;
ctx               971 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		ctx->task = NULL;
ctx               972 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		pm_runtime_mark_last_busy(ctx->dev);
ctx               973 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		pm_runtime_put_autosuspend(ctx->dev);
ctx               977 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_dst_set_buf_seq(ctx, buf_id, false);
ctx               978 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_stop(ctx);
ctx               983 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_clear_addr(struct fimc_context *ctx)
ctx               988 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, 0, EXYNOS_CIIYSA(i));
ctx               989 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, 0, EXYNOS_CIICBSA(i));
ctx               990 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, 0, EXYNOS_CIICRSA(i));
ctx               994 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, 0, EXYNOS_CIOYSA(i));
ctx               995 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, 0, EXYNOS_CIOCBSA(i));
ctx               996 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		fimc_write(ctx, 0, EXYNOS_CIOCRSA(i));
ctx              1000 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_reset(struct fimc_context *ctx)
ctx              1003 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_sw_reset(ctx);
ctx              1006 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	memset(&ctx->sc, 0x0, sizeof(ctx->sc));
ctx              1008 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_clear_addr(ctx);
ctx              1011 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_start(struct fimc_context *ctx)
ctx              1015 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_mask_irq(ctx, true);
ctx              1018 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_handle_jpeg(ctx, false);
ctx              1019 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_set_scaler(ctx, &ctx->sc);
ctx              1021 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_set_type_ctrl(ctx);
ctx              1022 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_handle_lastend(ctx, false);
ctx              1025 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg0 = fimc_read(ctx, EXYNOS_MSCTRL);
ctx              1028 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg0, EXYNOS_MSCTRL);
ctx              1031 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, 0x0, EXYNOS_CISTATUS);
ctx              1033 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg0 = fimc_read(ctx, EXYNOS_CIIMGCPT);
ctx              1038 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg1 = fimc_read(ctx, EXYNOS_CISCCTRL);
ctx              1043 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg1, EXYNOS_CISCCTRL);
ctx              1047 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg0, EXYNOS_CIIMGCPT);
ctx              1050 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_clear_bits(ctx, EXYNOS_CIGCTRL, EXYNOS_CIGCTRL_IRQ_END_DISABLE);
ctx              1052 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_clear_bits(ctx, EXYNOS_CIOCTRL, EXYNOS_CIOCTRL_WEAVE_MASK);
ctx              1054 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_set_bits(ctx, EXYNOS_MSCTRL, EXYNOS_MSCTRL_ENVID);
ctx              1057 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_stop(struct fimc_context *ctx)
ctx              1062 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg = fimc_read(ctx, EXYNOS_MSCTRL);
ctx              1065 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, cfg, EXYNOS_MSCTRL);
ctx              1067 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_mask_irq(ctx, false);
ctx              1070 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_write(ctx, 0x0, EXYNOS_CIFCNTSEQ);
ctx              1073 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_clear_bits(ctx, EXYNOS_CISCCTRL, EXYNOS_CISCCTRL_SCALERSTART);
ctx              1076 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_clear_bits(ctx, EXYNOS_CIIMGCPT,
ctx              1080 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_set_bits(ctx, EXYNOS_CIGCTRL, EXYNOS_CIGCTRL_IRQ_END_DISABLE);
ctx              1086 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	struct fimc_context *ctx =
ctx              1089 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	pm_runtime_get_sync(ctx->dev);
ctx              1090 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	ctx->task = task;
ctx              1092 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_src_set_fmt(ctx, task->src.buf.fourcc, task->src.buf.modifier);
ctx              1093 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_src_set_size(ctx, &task->src);
ctx              1094 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_src_set_transf(ctx, DRM_MODE_ROTATE_0);
ctx              1095 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_src_set_addr(ctx, &task->src);
ctx              1096 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_dst_set_fmt(ctx, task->dst.buf.fourcc, task->dst.buf.modifier);
ctx              1097 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_dst_set_transf(ctx, task->transform.rotation);
ctx              1098 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_dst_set_size(ctx, &task->dst);
ctx              1099 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_dst_set_addr(ctx, &task->dst);
ctx              1100 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_set_prescaler(ctx, &ctx->sc, &task->src.rect, &task->dst.rect);
ctx              1101 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_start(ctx);
ctx              1109 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	struct fimc_context *ctx =
ctx              1112 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_reset(ctx);
ctx              1114 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	if (ctx->task) {
ctx              1115 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		struct exynos_drm_ipp_task *task = ctx->task;
ctx              1117 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		ctx->task = NULL;
ctx              1118 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		pm_runtime_mark_last_busy(ctx->dev);
ctx              1119 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		pm_runtime_put_autosuspend(ctx->dev);
ctx              1131 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	struct fimc_context *ctx = dev_get_drvdata(dev);
ctx              1133 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	struct exynos_drm_ipp *ipp = &ctx->ipp;
ctx              1135 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	ctx->drm_dev = drm_dev;
ctx              1137 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	exynos_drm_register_dma(drm_dev, dev, &ctx->dma_priv);
ctx              1142 drivers/gpu/drm/exynos/exynos_drm_fimc.c 			ctx->formats, ctx->num_formats, "fimc");
ctx              1152 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	struct fimc_context *ctx = dev_get_drvdata(dev);
ctx              1154 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	struct exynos_drm_ipp *ipp = &ctx->ipp;
ctx              1157 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	exynos_drm_unregister_dma(drm_dev, dev, &ctx->dma_priv);
ctx              1165 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_put_clocks(struct fimc_context *ctx)
ctx              1170 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		if (IS_ERR(ctx->clocks[i]))
ctx              1172 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		clk_put(ctx->clocks[i]);
ctx              1173 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		ctx->clocks[i] = ERR_PTR(-EINVAL);
ctx              1177 drivers/gpu/drm/exynos/exynos_drm_fimc.c static int fimc_setup_clocks(struct fimc_context *ctx)
ctx              1179 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	struct device *fimc_dev = ctx->dev;
ctx              1184 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		ctx->clocks[i] = ERR_PTR(-EINVAL);
ctx              1192 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		ctx->clocks[i] = clk_get(dev, fimc_clock_names[i]);
ctx              1193 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		if (IS_ERR(ctx->clocks[i])) {
ctx              1194 drivers/gpu/drm/exynos/exynos_drm_fimc.c 			ret = PTR_ERR(ctx->clocks[i]);
ctx              1201 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	ret = clk_prepare_enable(ctx->clocks[FIMC_CLK_LCLK]);
ctx              1205 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_put_clocks(ctx);
ctx              1265 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	struct fimc_context *ctx;
ctx              1273 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx              1274 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	if (!ctx)
ctx              1277 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	ctx->dev = dev;
ctx              1278 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	ctx->id = of_alias_get_id(dev->of_node, "fimc");
ctx              1288 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	if (ctx->id < 3) {
ctx              1304 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	if (ctx->id < 3) {
ctx              1320 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	ctx->formats = formats;
ctx              1321 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	ctx->num_formats = num_formats;
ctx              1324 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	ctx->regs_res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
ctx              1325 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	ctx->regs = devm_ioremap_resource(dev, ctx->regs_res);
ctx              1326 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	if (IS_ERR(ctx->regs))
ctx              1327 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		return PTR_ERR(ctx->regs);
ctx              1337 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		0, dev_name(dev), ctx);
ctx              1343 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	ret = fimc_setup_clocks(ctx);
ctx              1347 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	spin_lock_init(&ctx->lock);
ctx              1348 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	platform_set_drvdata(pdev, ctx);
ctx              1365 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_put_clocks(ctx);
ctx              1373 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	struct fimc_context *ctx = get_fimc_context(dev);
ctx              1379 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_put_clocks(ctx);
ctx              1387 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	struct fimc_context *ctx = get_fimc_context(dev);
ctx              1389 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(dev, "id[%d]\n", ctx->id);
ctx              1390 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	clk_disable_unprepare(ctx->clocks[FIMC_CLK_GATE]);
ctx              1396 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	struct fimc_context *ctx = get_fimc_context(dev);
ctx              1398 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	DRM_DEV_DEBUG_KMS(dev, "id[%d]\n", ctx->id);
ctx              1399 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	return clk_prepare_enable(ctx->clocks[FIMC_CLK_GATE]);
ctx               237 drivers/gpu/drm/exynos/exynos_drm_fimd.c static inline void fimd_set_bits(struct fimd_context *ctx, u32 reg, u32 mask,
ctx               240 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	val = (val & mask) | (readl(ctx->regs + reg) & ~mask);
ctx               241 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + reg);
ctx               246 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = crtc->ctx;
ctx               249 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->suspended)
ctx               252 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (!test_and_set_bit(0, &ctx->irq_flags)) {
ctx               253 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		val = readl(ctx->regs + VIDINTCON0);
ctx               257 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		if (ctx->i80_if) {
ctx               270 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		writel(val, ctx->regs + VIDINTCON0);
ctx               278 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = crtc->ctx;
ctx               281 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->suspended)
ctx               284 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (test_and_clear_bit(0, &ctx->irq_flags)) {
ctx               285 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		val = readl(ctx->regs + VIDINTCON0);
ctx               289 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		if (ctx->i80_if) {
ctx               296 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		writel(val, ctx->regs + VIDINTCON0);
ctx               302 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = crtc->ctx;
ctx               304 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->suspended)
ctx               307 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	atomic_set(&ctx->wait_vsync_event, 1);
ctx               313 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (!wait_event_timeout(ctx->wait_vsync_queue,
ctx               314 drivers/gpu/drm/exynos/exynos_drm_fimd.c 				!atomic_read(&ctx->wait_vsync_event),
ctx               316 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		DRM_DEV_DEBUG_KMS(ctx->dev, "vblank wait timed out.\n");
ctx               319 drivers/gpu/drm/exynos/exynos_drm_fimd.c static void fimd_enable_video_output(struct fimd_context *ctx, unsigned int win,
ctx               322 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	u32 val = readl(ctx->regs + WINCON(win));
ctx               329 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + WINCON(win));
ctx               332 drivers/gpu/drm/exynos/exynos_drm_fimd.c static void fimd_enable_shadow_channel_path(struct fimd_context *ctx,
ctx               336 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	u32 val = readl(ctx->regs + SHADOWCON);
ctx               343 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + SHADOWCON);
ctx               348 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = crtc->ctx;
ctx               352 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	pm_runtime_get_sync(ctx->dev);
ctx               354 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	clk_prepare_enable(ctx->bus_clk);
ctx               355 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	clk_prepare_enable(ctx->lcd_clk);
ctx               359 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		u32 val = readl(ctx->regs + WINCON(win));
ctx               362 drivers/gpu/drm/exynos/exynos_drm_fimd.c 			fimd_enable_video_output(ctx, win, false);
ctx               364 drivers/gpu/drm/exynos/exynos_drm_fimd.c 			if (ctx->driver_data->has_shadowcon)
ctx               365 drivers/gpu/drm/exynos/exynos_drm_fimd.c 				fimd_enable_shadow_channel_path(ctx, win,
ctx               374 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->suspended = false;
ctx               376 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_enable_vblank(ctx->crtc);
ctx               377 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_wait_for_vblank(ctx->crtc);
ctx               378 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_disable_vblank(ctx->crtc);
ctx               380 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->suspended = true;
ctx               383 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	clk_disable_unprepare(ctx->lcd_clk);
ctx               384 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	clk_disable_unprepare(ctx->bus_clk);
ctx               386 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	pm_runtime_put(ctx->dev);
ctx               394 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = crtc->ctx;
ctx               399 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		DRM_DEV_ERROR(ctx->dev, "Mode has zero clock value.\n");
ctx               405 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->i80_if) {
ctx               413 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	lcd_rate = clk_get_rate(ctx->lcd_clk);
ctx               415 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		DRM_DEV_ERROR(ctx->dev,
ctx               424 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		DRM_DEV_ERROR(ctx->dev, "requested pixel clock(%lu) too low\n",
ctx               429 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx->clkdiv = (clkdiv < 0x100) ? clkdiv : 0xff;
ctx               434 drivers/gpu/drm/exynos/exynos_drm_fimd.c static void fimd_setup_trigger(struct fimd_context *ctx)
ctx               436 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	void __iomem *timing_base = ctx->regs + ctx->driver_data->timing_base;
ctx               437 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	u32 trg_type = ctx->driver_data->trg_type;
ctx               443 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		if (ctx->driver_data->has_hw_trigger)
ctx               445 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		if (ctx->driver_data->has_trigger_per_te)
ctx               456 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = crtc->ctx;
ctx               458 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	const struct fimd_driver_data *driver_data = ctx->driver_data;
ctx               459 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	void *timing_base = ctx->regs + driver_data->timing_base;
ctx               462 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->suspended)
ctx               469 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->i80_if) {
ctx               470 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		val = ctx->i80ifcon | I80IFEN_ENABLE;
ctx               477 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		if (driver_data->has_vtsel && ctx->sysreg &&
ctx               478 drivers/gpu/drm/exynos/exynos_drm_fimd.c 				regmap_update_bits(ctx->sysreg,
ctx               482 drivers/gpu/drm/exynos/exynos_drm_fimd.c 			DRM_DEV_ERROR(ctx->dev,
ctx               491 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		vidcon1 = ctx->vidcon1;
ctx               496 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		writel(vidcon1, ctx->regs + driver_data->timing_base + VIDCON1);
ctx               506 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		writel(val, ctx->regs + driver_data->timing_base + VIDTCON0);
ctx               516 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		writel(val, ctx->regs + driver_data->timing_base + VIDTCON1);
ctx               520 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		writel(ctx->vidout_con, timing_base + VIDOUT_CON);
ctx               523 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->sysreg && regmap_update_bits(ctx->sysreg,
ctx               527 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		DRM_DEV_ERROR(ctx->dev,
ctx               535 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (driver_data->has_mic_bypass && ctx->sysreg &&
ctx               536 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	    regmap_update_bits(ctx->sysreg,
ctx               540 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		DRM_DEV_ERROR(ctx->dev,
ctx               550 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + driver_data->timing_base + VIDTCON2);
ctx               552 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	fimd_setup_trigger(ctx);
ctx               558 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	val = ctx->vidcon0;
ctx               561 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->driver_data->has_clksel)
ctx               564 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->clkdiv > 1)
ctx               565 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		val |= VIDCON0_CLKVAL_F(ctx->clkdiv - 1) | VIDCON0_CLKDIR;
ctx               567 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + VIDCON0);
ctx               570 drivers/gpu/drm/exynos/exynos_drm_fimd.c static void fimd_win_set_bldeq(struct fimd_context *ctx, unsigned int win,
ctx               593 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	fimd_set_bits(ctx, BLENDEQx(win), mask, val);
ctx               596 drivers/gpu/drm/exynos/exynos_drm_fimd.c static void fimd_win_set_bldmod(struct fimd_context *ctx, unsigned int win,
ctx               614 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	fimd_set_bits(ctx, WINCON(win), WINCONx_BLEND_MODE_MASK, val);
ctx               623 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + VIDOSD_C(win));
ctx               627 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + VIDWnALPHA0(win));
ctx               631 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + VIDWnALPHA1(win));
ctx               633 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	fimd_set_bits(ctx, BLENDCON, BLENDCON_NEW_MASK,
ctx               637 drivers/gpu/drm/exynos/exynos_drm_fimd.c static void fimd_win_set_pixfmt(struct fimd_context *ctx, unsigned int win,
ctx               640 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct exynos_drm_plane plane = ctx->planes[win];
ctx               657 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->driver_data->has_limited_fmt && !win) {
ctx               703 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	fimd_set_bits(ctx, WINCON(win), ~WINCONx_BLEND_MODE_MASK, val);
ctx               707 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_win_set_bldmod(ctx, win, alpha, pixel_alpha);
ctx               708 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_win_set_bldeq(ctx, win, alpha, pixel_alpha);
ctx               712 drivers/gpu/drm/exynos/exynos_drm_fimd.c static void fimd_win_set_colkey(struct fimd_context *ctx, unsigned int win)
ctx               721 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(keycon0, ctx->regs + WKEYCON0_BASE(win));
ctx               722 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(keycon1, ctx->regs + WKEYCON1_BASE(win));
ctx               731 drivers/gpu/drm/exynos/exynos_drm_fimd.c static void fimd_shadow_protect_win(struct fimd_context *ctx,
ctx               746 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->driver_data->has_shadowcon) {
ctx               754 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	val = readl(ctx->regs + reg);
ctx               759 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + reg);
ctx               764 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = crtc->ctx;
ctx               767 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->suspended)
ctx               771 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_shadow_protect_win(ctx, i, true);
ctx               776 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = crtc->ctx;
ctx               779 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->suspended)
ctx               783 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_shadow_protect_win(ctx, i, false);
ctx               793 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = crtc->ctx;
ctx               802 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->suspended)
ctx               811 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + VIDWx_BUF_START(win, 0));
ctx               816 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + VIDWx_BUF_END(win, 0));
ctx               818 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	DRM_DEV_DEBUG_KMS(ctx->dev,
ctx               821 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "ovl_width = %d, ovl_height = %d\n",
ctx               831 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + VIDWx_BUF_SIZE(win, 0));
ctx               838 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + VIDOSD_A(win));
ctx               850 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + VIDOSD_B(win));
ctx               852 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	DRM_DEV_DEBUG_KMS(ctx->dev,
ctx               862 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		writel(val, ctx->regs + offset);
ctx               864 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		DRM_DEV_DEBUG_KMS(ctx->dev, "osd size = 0x%x\n",
ctx               868 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	fimd_win_set_pixfmt(ctx, win, fb, state->src.w);
ctx               872 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_win_set_colkey(ctx, win);
ctx               874 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	fimd_enable_video_output(ctx, win, true);
ctx               876 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->driver_data->has_shadowcon)
ctx               877 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_enable_shadow_channel_path(ctx, win, true);
ctx               879 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->i80_if)
ctx               880 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		atomic_set(&ctx->win_updated, 1);
ctx               886 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = crtc->ctx;
ctx               889 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->suspended)
ctx               892 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	fimd_enable_video_output(ctx, win, false);
ctx               894 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->driver_data->has_shadowcon)
ctx               895 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_enable_shadow_channel_path(ctx, win, false);
ctx               900 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = crtc->ctx;
ctx               902 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (!ctx->suspended)
ctx               905 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx->suspended = false;
ctx               907 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	pm_runtime_get_sync(ctx->dev);
ctx               910 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (test_and_clear_bit(0, &ctx->irq_flags))
ctx               911 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_enable_vblank(ctx->crtc);
ctx               913 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	fimd_commit(ctx->crtc);
ctx               918 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = crtc->ctx;
ctx               921 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->suspended)
ctx               930 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_disable_plane(crtc, &ctx->planes[i]);
ctx               936 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(0, ctx->regs + VIDCON0);
ctx               938 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	pm_runtime_put_sync(ctx->dev);
ctx               939 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx->suspended = true;
ctx               944 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = dev_get_drvdata(dev);
ctx               945 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	const struct fimd_driver_data *driver_data = ctx->driver_data;
ctx               946 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	void *timing_base = ctx->regs + driver_data->timing_base;
ctx               953 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (atomic_read(&ctx->triggering))
ctx               957 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	atomic_set(&ctx->triggering, 1);
ctx               967 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (!test_bit(0, &ctx->irq_flags))
ctx               968 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		atomic_set(&ctx->triggering, 0);
ctx               973 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = crtc->ctx;
ctx               974 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	u32 trg_type = ctx->driver_data->trg_type;
ctx               977 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (!ctx->drm_dev)
ctx               987 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (atomic_add_unless(&ctx->win_updated, -1, 0))
ctx               988 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_trigger(ctx->dev);
ctx               992 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (atomic_read(&ctx->wait_vsync_event)) {
ctx               993 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		atomic_set(&ctx->wait_vsync_event, 0);
ctx               994 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		wake_up(&ctx->wait_vsync_queue);
ctx               997 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (test_bit(0, &ctx->irq_flags))
ctx               998 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		drm_crtc_handle_vblank(&ctx->crtc->base);
ctx              1003 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = container_of(clk, struct fimd_context,
ctx              1006 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	writel(val, ctx->regs + DP_MIE_CLKCON);
ctx              1024 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = (struct fimd_context *)dev_id;
ctx              1027 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	val = readl(ctx->regs + VIDINTCON1);
ctx              1029 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	clear_bit = ctx->i80_if ? VIDINTCON1_INT_I80 : VIDINTCON1_INT_FRAME;
ctx              1031 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		writel(clear_bit, ctx->regs + VIDINTCON1);
ctx              1034 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (!ctx->drm_dev)
ctx              1037 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (!ctx->i80_if)
ctx              1038 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		drm_crtc_handle_vblank(&ctx->crtc->base);
ctx              1040 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->i80_if) {
ctx              1042 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		atomic_set(&ctx->triggering, 0);
ctx              1045 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		if (atomic_read(&ctx->wait_vsync_event)) {
ctx              1046 drivers/gpu/drm/exynos/exynos_drm_fimd.c 			atomic_set(&ctx->wait_vsync_event, 0);
ctx              1047 drivers/gpu/drm/exynos/exynos_drm_fimd.c 			wake_up(&ctx->wait_vsync_queue);
ctx              1057 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = dev_get_drvdata(dev);
ctx              1063 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx->drm_dev = drm_dev;
ctx              1066 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->configs[i].pixel_formats = fimd_formats;
ctx              1067 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->configs[i].num_pixel_formats = ARRAY_SIZE(fimd_formats);
ctx              1068 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->configs[i].zpos = i;
ctx              1069 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->configs[i].type = fimd_win_types[i];
ctx              1070 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->configs[i].capabilities = capabilities[i];
ctx              1071 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ret = exynos_plane_init(drm_dev, &ctx->planes[i], i,
ctx              1072 drivers/gpu/drm/exynos/exynos_drm_fimd.c 					&ctx->configs[i]);
ctx              1077 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	exynos_plane = &ctx->planes[DEFAULT_WIN];
ctx              1078 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx->crtc = exynos_drm_crtc_create(drm_dev, &exynos_plane->base,
ctx              1079 drivers/gpu/drm/exynos/exynos_drm_fimd.c 			EXYNOS_DISPLAY_TYPE_LCD, &fimd_crtc_ops, ctx);
ctx              1080 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (IS_ERR(ctx->crtc))
ctx              1081 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		return PTR_ERR(ctx->crtc);
ctx              1083 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->driver_data->has_dp_clk) {
ctx              1084 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->dp_clk.enable = fimd_dp_clock_enable;
ctx              1085 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->crtc->pipe_clk = &ctx->dp_clk;
ctx              1088 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->encoder)
ctx              1089 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		exynos_dpi_bind(drm_dev, ctx->encoder);
ctx              1092 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		fimd_clear_channels(ctx->crtc);
ctx              1094 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	return exynos_drm_register_dma(drm_dev, dev, &ctx->dma_priv);
ctx              1100 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = dev_get_drvdata(dev);
ctx              1102 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	fimd_disable(ctx->crtc);
ctx              1104 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	exynos_drm_unregister_dma(ctx->drm_dev, ctx->dev, &ctx->dma_priv);
ctx              1106 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (ctx->encoder)
ctx              1107 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		exynos_dpi_remove(ctx->encoder);
ctx              1118 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx;
ctx              1126 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx              1127 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (!ctx)
ctx              1130 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx->dev = dev;
ctx              1131 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx->suspended = true;
ctx              1132 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx->driver_data = of_device_get_match_data(dev);
ctx              1135 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->vidcon1 |= VIDCON1_INV_VDEN;
ctx              1137 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->vidcon1 |= VIDCON1_INV_VCLK;
ctx              1143 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->i80_if = true;
ctx              1145 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		if (ctx->driver_data->has_vidoutcon)
ctx              1146 drivers/gpu/drm/exynos/exynos_drm_fimd.c 			ctx->vidout_con |= VIDOUT_CON_F_I80_LDI0;
ctx              1148 drivers/gpu/drm/exynos/exynos_drm_fimd.c 			ctx->vidcon0 |= VIDCON0_VIDOUT_I80_LDI0;
ctx              1153 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->vidcon0 |= VIDCON0_DSI_EN;
ctx              1157 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->i80ifcon = LCD_CS_SETUP(val);
ctx              1160 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->i80ifcon |= LCD_WR_SETUP(val);
ctx              1163 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->i80ifcon |= LCD_WR_ACTIVE(val);
ctx              1166 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->i80ifcon |= LCD_WR_HOLD(val);
ctx              1170 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx->sysreg = syscon_regmap_lookup_by_phandle(dev->of_node,
ctx              1172 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (IS_ERR(ctx->sysreg)) {
ctx              1174 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		ctx->sysreg = NULL;
ctx              1177 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx->bus_clk = devm_clk_get(dev, "fimd");
ctx              1178 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (IS_ERR(ctx->bus_clk)) {
ctx              1180 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		return PTR_ERR(ctx->bus_clk);
ctx              1183 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx->lcd_clk = devm_clk_get(dev, "sclk_fimd");
ctx              1184 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (IS_ERR(ctx->lcd_clk)) {
ctx              1186 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		return PTR_ERR(ctx->lcd_clk);
ctx              1191 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx->regs = devm_ioremap_resource(dev, res);
ctx              1192 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (IS_ERR(ctx->regs))
ctx              1193 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		return PTR_ERR(ctx->regs);
ctx              1196 drivers/gpu/drm/exynos/exynos_drm_fimd.c 					   ctx->i80_if ? "lcd_sys" : "vsync");
ctx              1203 drivers/gpu/drm/exynos/exynos_drm_fimd.c 							0, "drm_fimd", ctx);
ctx              1209 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	init_waitqueue_head(&ctx->wait_vsync_queue);
ctx              1210 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	atomic_set(&ctx->wait_vsync_event, 0);
ctx              1212 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	platform_set_drvdata(pdev, ctx);
ctx              1214 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ctx->encoder = exynos_dpi_probe(dev);
ctx              1215 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	if (IS_ERR(ctx->encoder))
ctx              1216 drivers/gpu/drm/exynos/exynos_drm_fimd.c 		return PTR_ERR(ctx->encoder);
ctx              1244 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = dev_get_drvdata(dev);
ctx              1246 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	clk_disable_unprepare(ctx->lcd_clk);
ctx              1247 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	clk_disable_unprepare(ctx->bus_clk);
ctx              1254 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	struct fimd_context *ctx = dev_get_drvdata(dev);
ctx              1257 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ret = clk_prepare_enable(ctx->bus_clk);
ctx              1265 drivers/gpu/drm/exynos/exynos_drm_fimd.c 	ret = clk_prepare_enable(ctx->lcd_clk);
ctx                64 drivers/gpu/drm/exynos/exynos_drm_gsc.c #define gsc_read(offset)		readl(ctx->regs + (offset))
ctx                65 drivers/gpu/drm/exynos/exynos_drm_gsc.c #define gsc_write(cfg, offset)	writel(cfg, ctx->regs + (offset))
ctx               379 drivers/gpu/drm/exynos/exynos_drm_gsc.c static int gsc_sw_reset(struct gsc_context *ctx)
ctx               397 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		DRM_DEV_ERROR(ctx->dev, "failed to reset gsc h/w.\n");
ctx               419 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_handle_irq(struct gsc_context *ctx, bool enable,
ctx               424 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "enable[%d]overflow[%d]level[%d]\n",
ctx               449 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_src_set_fmt(struct gsc_context *ctx, u32 fmt, bool tiled)
ctx               453 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "fmt[0x%x]\n", fmt);
ctx               521 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_src_set_transf(struct gsc_context *ctx, unsigned int rotation)
ctx               561 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx->rotation = (cfg & GSC_IN_ROT_90) ? 1 : 0;
ctx               564 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_src_set_size(struct gsc_context *ctx,
ctx               567 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_scaler *sc = &ctx->sc;
ctx               607 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_src_set_buf_seq(struct gsc_context *ctx, u32 buf_id,
ctx               625 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_src_set_addr(struct gsc_context *ctx, u32 buf_id,
ctx               633 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_src_set_buf_seq(ctx, buf_id, true);
ctx               636 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_dst_set_fmt(struct gsc_context *ctx, u32 fmt, bool tiled)
ctx               640 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "fmt[0x%x]\n", fmt);
ctx               708 drivers/gpu/drm/exynos/exynos_drm_gsc.c static int gsc_get_ratio_shift(struct gsc_context *ctx, u32 src, u32 dst,
ctx               711 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "src[%d]dst[%d]\n", src, dst);
ctx               714 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		DRM_DEV_ERROR(ctx->dev, "failed to make ratio and shift.\n");
ctx               743 drivers/gpu/drm/exynos/exynos_drm_gsc.c static int gsc_set_prescaler(struct gsc_context *ctx, struct gsc_scaler *sc,
ctx               754 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	if (ctx->rotation) {
ctx               762 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ret = gsc_get_ratio_shift(ctx, src_w, dst_w, &sc->pre_hratio);
ctx               764 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		DRM_DEV_ERROR(ctx->dev, "failed to get ratio horizontal.\n");
ctx               768 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ret = gsc_get_ratio_shift(ctx, src_h, dst_h, &sc->pre_vratio);
ctx               770 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		DRM_DEV_ERROR(ctx->dev, "failed to get ratio vertical.\n");
ctx               774 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "pre_hratio[%d]pre_vratio[%d]\n",
ctx               780 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "main_hratio[%ld]main_vratio[%ld]\n",
ctx               786 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "pre_shfactor[%d]\n", sc->pre_shfactor);
ctx               796 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_set_h_coef(struct gsc_context *ctx, unsigned long main_hratio)
ctx               822 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_set_v_coef(struct gsc_context *ctx, unsigned long main_vratio)
ctx               848 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_set_scaler(struct gsc_context *ctx, struct gsc_scaler *sc)
ctx               852 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "main_hratio[%ld]main_vratio[%ld]\n",
ctx               855 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_set_h_coef(ctx, sc->main_hratio);
ctx               859 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_set_v_coef(ctx, sc->main_vratio);
ctx               864 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_dst_set_size(struct gsc_context *ctx,
ctx               867 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_scaler *sc = &ctx->sc;
ctx               876 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	if (ctx->rotation)
ctx               908 drivers/gpu/drm/exynos/exynos_drm_gsc.c static int gsc_dst_get_buf_seq(struct gsc_context *ctx)
ctx               919 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "buf_num[%d]\n", buf_num);
ctx               924 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_dst_set_buf_seq(struct gsc_context *ctx, u32 buf_id,
ctx               942 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	if (enqueue && gsc_dst_get_buf_seq(ctx) >= GSC_BUF_START)
ctx               943 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		gsc_handle_irq(ctx, true, false, true);
ctx               946 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	if (!enqueue && gsc_dst_get_buf_seq(ctx) <= GSC_BUF_STOP)
ctx               947 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		gsc_handle_irq(ctx, false, false, true);
ctx               950 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_dst_set_addr(struct gsc_context *ctx,
ctx               958 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_dst_set_buf_seq(ctx, buf_id, true);
ctx               961 drivers/gpu/drm/exynos/exynos_drm_gsc.c static int gsc_get_src_buf_index(struct gsc_context *ctx)
ctx               966 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "gsc id[%d]\n", ctx->id);
ctx               978 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "cfg[0x%x]curr_index[%d]buf_id[%d]\n", cfg,
ctx               982 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		DRM_DEV_ERROR(ctx->dev, "failed to get in buffer index.\n");
ctx               986 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_src_set_buf_seq(ctx, buf_id, false);
ctx               991 drivers/gpu/drm/exynos/exynos_drm_gsc.c static int gsc_get_dst_buf_index(struct gsc_context *ctx)
ctx               996 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "gsc id[%d]\n", ctx->id);
ctx              1009 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		DRM_DEV_ERROR(ctx->dev, "failed to get out buffer index.\n");
ctx              1013 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_dst_set_buf_seq(ctx, buf_id, false);
ctx              1015 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "cfg[0x%x]curr_index[%d]buf_id[%d]\n", cfg,
ctx              1023 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_context *ctx = dev_id;
ctx              1027 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "gsc id[%d]\n", ctx->id);
ctx              1031 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		dev_err(ctx->dev, "occurred overflow at %d, status 0x%x.\n",
ctx              1032 drivers/gpu/drm/exynos/exynos_drm_gsc.c 			ctx->id, status);
ctx              1039 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		dev_dbg(ctx->dev, "occurred frame done at %d, status 0x%x.\n",
ctx              1040 drivers/gpu/drm/exynos/exynos_drm_gsc.c 			ctx->id, status);
ctx              1042 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		src_buf_id = gsc_get_src_buf_index(ctx);
ctx              1043 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		dst_buf_id = gsc_get_dst_buf_index(ctx);
ctx              1045 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		DRM_DEV_DEBUG_KMS(ctx->dev, "buf_id_src[%d]buf_id_dst[%d]\n",
ctx              1052 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	if (ctx->task) {
ctx              1053 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		struct exynos_drm_ipp_task *task = ctx->task;
ctx              1055 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		ctx->task = NULL;
ctx              1056 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		pm_runtime_mark_last_busy(ctx->dev);
ctx              1057 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		pm_runtime_put_autosuspend(ctx->dev);
ctx              1064 drivers/gpu/drm/exynos/exynos_drm_gsc.c static int gsc_reset(struct gsc_context *ctx)
ctx              1066 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_scaler *sc = &ctx->sc;
ctx              1070 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ret = gsc_sw_reset(ctx);
ctx              1072 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		dev_err(ctx->dev, "failed to reset hardware.\n");
ctx              1077 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	memset(&ctx->sc, 0x0, sizeof(ctx->sc));
ctx              1083 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_start(struct gsc_context *ctx)
ctx              1087 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_handle_irq(ctx, true, false, true);
ctx              1107 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_set_scaler(ctx, &ctx->sc);
ctx              1117 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_context *ctx = container_of(ipp, struct gsc_context, ipp);
ctx              1120 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	pm_runtime_get_sync(ctx->dev);
ctx              1121 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx->task = task;
ctx              1123 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ret = gsc_reset(ctx);
ctx              1125 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		pm_runtime_put_autosuspend(ctx->dev);
ctx              1126 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		ctx->task = NULL;
ctx              1130 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_src_set_fmt(ctx, task->src.buf.fourcc, task->src.buf.modifier);
ctx              1131 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_src_set_transf(ctx, task->transform.rotation);
ctx              1132 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_src_set_size(ctx, &task->src);
ctx              1133 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_src_set_addr(ctx, 0, &task->src);
ctx              1134 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_dst_set_fmt(ctx, task->dst.buf.fourcc, task->dst.buf.modifier);
ctx              1135 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_dst_set_size(ctx, &task->dst);
ctx              1136 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_dst_set_addr(ctx, 0, &task->dst);
ctx              1137 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_set_prescaler(ctx, &ctx->sc, &task->src.rect, &task->dst.rect);
ctx              1138 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_start(ctx);
ctx              1146 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_context *ctx =
ctx              1149 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_reset(ctx);
ctx              1150 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	if (ctx->task) {
ctx              1151 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		struct exynos_drm_ipp_task *task = ctx->task;
ctx              1153 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		ctx->task = NULL;
ctx              1154 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		pm_runtime_mark_last_busy(ctx->dev);
ctx              1155 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		pm_runtime_put_autosuspend(ctx->dev);
ctx              1167 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_context *ctx = dev_get_drvdata(dev);
ctx              1169 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct exynos_drm_ipp *ipp = &ctx->ipp;
ctx              1171 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx->drm_dev = drm_dev;
ctx              1172 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx->drm_dev = drm_dev;
ctx              1173 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	exynos_drm_register_dma(drm_dev, dev, &ctx->dma_priv);
ctx              1178 drivers/gpu/drm/exynos/exynos_drm_gsc.c 			ctx->formats, ctx->num_formats, "gsc");
ctx              1188 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_context *ctx = dev_get_drvdata(dev);
ctx              1190 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct exynos_drm_ipp *ipp = &ctx->ipp;
ctx              1193 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	exynos_drm_unregister_dma(drm_dev, dev, &ctx->dma_priv);
ctx              1218 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_context *ctx;
ctx              1222 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx              1223 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	if (!ctx)
ctx              1227 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx->dev = dev;
ctx              1228 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx->num_clocks = driver_data->num_clocks;
ctx              1229 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx->clk_names = driver_data->clk_names;
ctx              1256 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx->formats = formats;
ctx              1257 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx->num_formats = num_formats;
ctx              1260 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	for (i = 0; i < ctx->num_clocks; i++) {
ctx              1261 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		ctx->clocks[i] = devm_clk_get(dev, ctx->clk_names[i]);
ctx              1262 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		if (IS_ERR(ctx->clocks[i])) {
ctx              1264 drivers/gpu/drm/exynos/exynos_drm_gsc.c 				ctx->clk_names[i]);
ctx              1265 drivers/gpu/drm/exynos/exynos_drm_gsc.c 			return PTR_ERR(ctx->clocks[i]);
ctx              1270 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx->regs_res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
ctx              1271 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx->regs = devm_ioremap_resource(dev, ctx->regs_res);
ctx              1272 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	if (IS_ERR(ctx->regs))
ctx              1273 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		return PTR_ERR(ctx->regs);
ctx              1282 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx->irq = res->start;
ctx              1283 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ret = devm_request_irq(dev, ctx->irq, gsc_irq_handler, 0,
ctx              1284 drivers/gpu/drm/exynos/exynos_drm_gsc.c 			       dev_name(dev), ctx);
ctx              1291 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ctx->id = pdev->id;
ctx              1293 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	platform_set_drvdata(pdev, ctx);
ctx              1326 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_context *ctx = get_gsc_context(dev);
ctx              1329 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(dev, "id[%d]\n", ctx->id);
ctx              1331 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	for (i = ctx->num_clocks - 1; i >= 0; i--)
ctx              1332 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		clk_disable_unprepare(ctx->clocks[i]);
ctx              1339 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_context *ctx = get_gsc_context(dev);
ctx              1342 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(dev, "id[%d]\n", ctx->id);
ctx              1344 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	for (i = 0; i < ctx->num_clocks; i++) {
ctx              1345 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		ret = clk_prepare_enable(ctx->clocks[i]);
ctx              1348 drivers/gpu/drm/exynos/exynos_drm_gsc.c 				clk_disable_unprepare(ctx->clocks[i]);
ctx                94 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = crtc->ctx;
ctx                96 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	if (ctx->suspended)
ctx                99 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	mod_timer(&ctx->timer,
ctx               113 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = crtc->ctx;
ctx               116 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	if (ctx->suspended)
ctx               120 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "dma_addr = %pad\n", &addr);
ctx               125 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = crtc->ctx;
ctx               127 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	mutex_lock(&ctx->lock);
ctx               129 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	ctx->suspended = false;
ctx               131 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	mutex_unlock(&ctx->lock);
ctx               138 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = crtc->ctx;
ctx               142 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	mutex_lock(&ctx->lock);
ctx               144 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	ctx->suspended = true;
ctx               146 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	mutex_unlock(&ctx->lock);
ctx               160 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = from_timer(ctx, t, timer);
ctx               162 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	if (drm_crtc_handle_vblank(&ctx->crtc->base))
ctx               163 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		mod_timer(&ctx->timer,
ctx               170 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = dev_get_drvdata(dev);
ctx               173 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	mutex_lock(&ctx->lock);
ctx               175 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	rc = sprintf(buf, "%d\n", ctx->connected);
ctx               177 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	mutex_unlock(&ctx->lock);
ctx               186 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = dev_get_drvdata(dev);
ctx               189 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	ret = kstrtoint(buf, 0, &ctx->connected);
ctx               193 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	if (ctx->connected > 1)
ctx               197 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	if (!ctx->raw_edid)
ctx               198 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		ctx->raw_edid = (struct edid *)fake_edid_info;
ctx               201 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	if (ctx->raw_edid != (struct edid *)fake_edid_info) {
ctx               208 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	drm_helper_hpd_irq_event(ctx->drm_dev);
ctx               219 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = dev_get_drvdata(drm_dev->dev);
ctx               223 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		DRM_DEV_DEBUG_KMS(ctx->dev,
ctx               229 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		DRM_DEV_DEBUG_KMS(ctx->dev,
ctx               234 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	if (ctx->connected == vidi->connection) {
ctx               235 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		DRM_DEV_DEBUG_KMS(ctx->dev,
ctx               245 drivers/gpu/drm/exynos/exynos_drm_vidi.c 			DRM_DEV_DEBUG_KMS(ctx->dev,
ctx               249 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		ctx->raw_edid = drm_edid_duplicate(raw_edid);
ctx               250 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		if (!ctx->raw_edid) {
ctx               251 drivers/gpu/drm/exynos/exynos_drm_vidi.c 			DRM_DEV_DEBUG_KMS(ctx->dev,
ctx               260 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		if (ctx->raw_edid && ctx->raw_edid !=
ctx               262 drivers/gpu/drm/exynos/exynos_drm_vidi.c 			kfree(ctx->raw_edid);
ctx               263 drivers/gpu/drm/exynos/exynos_drm_vidi.c 			ctx->raw_edid = NULL;
ctx               267 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	ctx->connected = vidi->connection;
ctx               268 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	drm_helper_hpd_irq_event(ctx->drm_dev);
ctx               276 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = ctx_from_connector(connector);
ctx               282 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	return ctx->connected ? connector_status_connected :
ctx               301 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = ctx_from_connector(connector);
ctx               309 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	if (!ctx->raw_edid) {
ctx               310 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		DRM_DEV_DEBUG_KMS(ctx->dev, "raw_edid is null.\n");
ctx               314 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	edid_len = (1 + ctx->raw_edid->extensions) * EDID_LENGTH;
ctx               315 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	edid = kmemdup(ctx->raw_edid, edid_len, GFP_KERNEL);
ctx               317 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		DRM_DEV_DEBUG_KMS(ctx->dev, "failed to allocate edid\n");
ctx               332 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = encoder_to_vidi(encoder);
ctx               333 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct drm_connector *connector = &ctx->connector;
ctx               338 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	ret = drm_connector_init(ctx->drm_dev, connector,
ctx               341 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		DRM_DEV_ERROR(ctx->dev,
ctx               378 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = dev_get_drvdata(dev);
ctx               380 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct drm_encoder *encoder = &ctx->encoder;
ctx               386 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	ctx->drm_dev = drm_dev;
ctx               395 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		ret = exynos_plane_init(drm_dev, &ctx->planes[i], i,
ctx               401 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	exynos_plane = &ctx->planes[DEFAULT_WIN];
ctx               402 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	ctx->crtc = exynos_drm_crtc_create(drm_dev, &exynos_plane->base,
ctx               403 drivers/gpu/drm/exynos/exynos_drm_vidi.c 			EXYNOS_DISPLAY_TYPE_VIDI, &vidi_crtc_ops, ctx);
ctx               404 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	if (IS_ERR(ctx->crtc)) {
ctx               406 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		return PTR_ERR(ctx->crtc);
ctx               432 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = dev_get_drvdata(dev);
ctx               434 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	del_timer_sync(&ctx->timer);
ctx               444 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx;
ctx               448 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               449 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	if (!ctx)
ctx               452 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	ctx->dev = dev;
ctx               454 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	timer_setup(&ctx->timer, vidi_fake_vblank_timer, 0);
ctx               456 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	mutex_init(&ctx->lock);
ctx               458 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	platform_set_drvdata(pdev, ctx);
ctx               481 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	struct vidi_context *ctx = platform_get_drvdata(pdev);
ctx               483 drivers/gpu/drm/exynos/exynos_drm_vidi.c 	if (ctx->raw_edid != (struct edid *)fake_edid_info) {
ctx               484 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		kfree(ctx->raw_edid);
ctx               485 drivers/gpu/drm/exynos/exynos_drm_vidi.c 		ctx->raw_edid = NULL;
ctx               182 drivers/gpu/drm/exynos/exynos_mixer.c static inline u32 vp_reg_read(struct mixer_context *ctx, u32 reg_id)
ctx               184 drivers/gpu/drm/exynos/exynos_mixer.c 	return readl(ctx->vp_regs + reg_id);
ctx               187 drivers/gpu/drm/exynos/exynos_mixer.c static inline void vp_reg_write(struct mixer_context *ctx, u32 reg_id,
ctx               190 drivers/gpu/drm/exynos/exynos_mixer.c 	writel(val, ctx->vp_regs + reg_id);
ctx               193 drivers/gpu/drm/exynos/exynos_mixer.c static inline void vp_reg_writemask(struct mixer_context *ctx, u32 reg_id,
ctx               196 drivers/gpu/drm/exynos/exynos_mixer.c 	u32 old = vp_reg_read(ctx, reg_id);
ctx               199 drivers/gpu/drm/exynos/exynos_mixer.c 	writel(val, ctx->vp_regs + reg_id);
ctx               202 drivers/gpu/drm/exynos/exynos_mixer.c static inline u32 mixer_reg_read(struct mixer_context *ctx, u32 reg_id)
ctx               204 drivers/gpu/drm/exynos/exynos_mixer.c 	return readl(ctx->mixer_regs + reg_id);
ctx               207 drivers/gpu/drm/exynos/exynos_mixer.c static inline void mixer_reg_write(struct mixer_context *ctx, u32 reg_id,
ctx               210 drivers/gpu/drm/exynos/exynos_mixer.c 	writel(val, ctx->mixer_regs + reg_id);
ctx               213 drivers/gpu/drm/exynos/exynos_mixer.c static inline void mixer_reg_writemask(struct mixer_context *ctx,
ctx               216 drivers/gpu/drm/exynos/exynos_mixer.c 	u32 old = mixer_reg_read(ctx, reg_id);
ctx               219 drivers/gpu/drm/exynos/exynos_mixer.c 	writel(val, ctx->mixer_regs + reg_id);
ctx               222 drivers/gpu/drm/exynos/exynos_mixer.c static void mixer_regs_dump(struct mixer_context *ctx)
ctx               226 drivers/gpu/drm/exynos/exynos_mixer.c 	DRM_DEV_DEBUG_KMS(ctx->dev, #reg_id " = %08x\n", \
ctx               227 drivers/gpu/drm/exynos/exynos_mixer.c 			 (u32)readl(ctx->mixer_regs + reg_id)); \
ctx               254 drivers/gpu/drm/exynos/exynos_mixer.c static void vp_regs_dump(struct mixer_context *ctx)
ctx               258 drivers/gpu/drm/exynos/exynos_mixer.c 	DRM_DEV_DEBUG_KMS(ctx->dev, #reg_id " = %08x\n", \
ctx               259 drivers/gpu/drm/exynos/exynos_mixer.c 			 (u32) readl(ctx->vp_regs + reg_id)); \
ctx               289 drivers/gpu/drm/exynos/exynos_mixer.c static inline void vp_filter_set(struct mixer_context *ctx,
ctx               297 drivers/gpu/drm/exynos/exynos_mixer.c 		vp_reg_write(ctx, reg_id, val);
ctx               301 drivers/gpu/drm/exynos/exynos_mixer.c static void vp_default_filter(struct mixer_context *ctx)
ctx               303 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_filter_set(ctx, VP_POLY8_Y0_LL,
ctx               305 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_filter_set(ctx, VP_POLY4_Y0_LL,
ctx               307 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_filter_set(ctx, VP_POLY4_C0_LL,
ctx               311 drivers/gpu/drm/exynos/exynos_mixer.c static void mixer_cfg_gfx_blend(struct mixer_context *ctx, unsigned int win,
ctx               335 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_GRAPHIC_CFG(win),
ctx               339 drivers/gpu/drm/exynos/exynos_mixer.c static void mixer_cfg_vp_blend(struct mixer_context *ctx, unsigned int alpha)
ctx               348 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_write(ctx, MXR_VIDEO_CFG, val);
ctx               351 drivers/gpu/drm/exynos/exynos_mixer.c static bool mixer_is_synced(struct mixer_context *ctx)
ctx               355 drivers/gpu/drm/exynos/exynos_mixer.c 	if (ctx->mxr_ver == MXR_VER_16_0_33_0 ||
ctx               356 drivers/gpu/drm/exynos/exynos_mixer.c 	    ctx->mxr_ver == MXR_VER_128_0_0_184)
ctx               357 drivers/gpu/drm/exynos/exynos_mixer.c 		return !(mixer_reg_read(ctx, MXR_CFG) &
ctx               360 drivers/gpu/drm/exynos/exynos_mixer.c 	if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags) &&
ctx               361 drivers/gpu/drm/exynos/exynos_mixer.c 	    vp_reg_read(ctx, VP_SHADOW_UPDATE))
ctx               364 drivers/gpu/drm/exynos/exynos_mixer.c 	base = mixer_reg_read(ctx, MXR_CFG);
ctx               365 drivers/gpu/drm/exynos/exynos_mixer.c 	shadow = mixer_reg_read(ctx, MXR_CFG_S);
ctx               369 drivers/gpu/drm/exynos/exynos_mixer.c 	base = mixer_reg_read(ctx, MXR_GRAPHIC_BASE(0));
ctx               370 drivers/gpu/drm/exynos/exynos_mixer.c 	shadow = mixer_reg_read(ctx, MXR_GRAPHIC_BASE_S(0));
ctx               374 drivers/gpu/drm/exynos/exynos_mixer.c 	base = mixer_reg_read(ctx, MXR_GRAPHIC_BASE(1));
ctx               375 drivers/gpu/drm/exynos/exynos_mixer.c 	shadow = mixer_reg_read(ctx, MXR_GRAPHIC_BASE_S(1));
ctx               382 drivers/gpu/drm/exynos/exynos_mixer.c static int mixer_wait_for_sync(struct mixer_context *ctx)
ctx               386 drivers/gpu/drm/exynos/exynos_mixer.c 	while (!mixer_is_synced(ctx)) {
ctx               394 drivers/gpu/drm/exynos/exynos_mixer.c static void mixer_disable_sync(struct mixer_context *ctx)
ctx               396 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_STATUS, 0, MXR_STATUS_SYNC_ENABLE);
ctx               399 drivers/gpu/drm/exynos/exynos_mixer.c static void mixer_enable_sync(struct mixer_context *ctx)
ctx               401 drivers/gpu/drm/exynos/exynos_mixer.c 	if (ctx->mxr_ver == MXR_VER_16_0_33_0 ||
ctx               402 drivers/gpu/drm/exynos/exynos_mixer.c 	    ctx->mxr_ver == MXR_VER_128_0_0_184)
ctx               403 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_reg_writemask(ctx, MXR_CFG, ~0, MXR_CFG_LAYER_UPDATE);
ctx               404 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_STATUS, ~0, MXR_STATUS_SYNC_ENABLE);
ctx               405 drivers/gpu/drm/exynos/exynos_mixer.c 	if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags))
ctx               406 drivers/gpu/drm/exynos/exynos_mixer.c 		vp_reg_write(ctx, VP_SHADOW_UPDATE, VP_SHADOW_UPDATE_ENABLE);
ctx               409 drivers/gpu/drm/exynos/exynos_mixer.c static void mixer_cfg_scan(struct mixer_context *ctx, int width, int height)
ctx               414 drivers/gpu/drm/exynos/exynos_mixer.c 	val = test_bit(MXR_BIT_INTERLACE, &ctx->flags) ?
ctx               417 drivers/gpu/drm/exynos/exynos_mixer.c 	if (ctx->mxr_ver == MXR_VER_128_0_0_184)
ctx               418 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_reg_write(ctx, MXR_RESOLUTION,
ctx               421 drivers/gpu/drm/exynos/exynos_mixer.c 		val |= ctx->scan_value;
ctx               423 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_SCAN_MASK);
ctx               426 drivers/gpu/drm/exynos/exynos_mixer.c static void mixer_cfg_rgb_fmt(struct mixer_context *ctx, struct drm_display_mode *mode)
ctx               437 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_reg_write(ctx, MXR_CM_COEFF_Y,
ctx               440 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_reg_write(ctx, MXR_CM_COEFF_CB,
ctx               442 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_reg_write(ctx, MXR_CM_COEFF_CR,
ctx               451 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_RGB_FMT_MASK);
ctx               454 drivers/gpu/drm/exynos/exynos_mixer.c static void mixer_cfg_layer(struct mixer_context *ctx, unsigned int win,
ctx               461 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_GRP0_ENABLE);
ctx               462 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_reg_writemask(ctx, MXR_LAYER_CFG,
ctx               467 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_reg_writemask(ctx, MXR_CFG, val, MXR_CFG_GRP1_ENABLE);
ctx               468 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_reg_writemask(ctx, MXR_LAYER_CFG,
ctx               474 drivers/gpu/drm/exynos/exynos_mixer.c 		if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
ctx               475 drivers/gpu/drm/exynos/exynos_mixer.c 			vp_reg_writemask(ctx, VP_ENABLE, val, VP_ENABLE_ON);
ctx               476 drivers/gpu/drm/exynos/exynos_mixer.c 			mixer_reg_writemask(ctx, MXR_CFG, val,
ctx               478 drivers/gpu/drm/exynos/exynos_mixer.c 			mixer_reg_writemask(ctx, MXR_LAYER_CFG,
ctx               486 drivers/gpu/drm/exynos/exynos_mixer.c static void mixer_run(struct mixer_context *ctx)
ctx               488 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_STATUS, ~0, MXR_STATUS_REG_RUN);
ctx               491 drivers/gpu/drm/exynos/exynos_mixer.c static void mixer_stop(struct mixer_context *ctx)
ctx               495 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_STATUS, 0, MXR_STATUS_REG_RUN);
ctx               497 drivers/gpu/drm/exynos/exynos_mixer.c 	while (!(mixer_reg_read(ctx, MXR_STATUS) & MXR_STATUS_REG_IDLE) &&
ctx               502 drivers/gpu/drm/exynos/exynos_mixer.c static void mixer_commit(struct mixer_context *ctx)
ctx               504 drivers/gpu/drm/exynos/exynos_mixer.c 	struct drm_display_mode *mode = &ctx->crtc->base.state->adjusted_mode;
ctx               506 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_cfg_scan(ctx, mode->hdisplay, mode->vdisplay);
ctx               507 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_cfg_rgb_fmt(ctx, mode);
ctx               508 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_run(ctx);
ctx               511 drivers/gpu/drm/exynos/exynos_mixer.c static void vp_video_buffer(struct mixer_context *ctx,
ctx               529 drivers/gpu/drm/exynos/exynos_mixer.c 	if (test_bit(MXR_BIT_INTERLACE, &ctx->flags)) {
ctx               542 drivers/gpu/drm/exynos/exynos_mixer.c 	spin_lock_irqsave(&ctx->reg_slock, flags);
ctx               545 drivers/gpu/drm/exynos/exynos_mixer.c 	val = (test_bit(MXR_BIT_INTERLACE, &ctx->flags) ? ~0 : 0);
ctx               546 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_writemask(ctx, VP_MODE, val, VP_MODE_LINE_SKIP);
ctx               551 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_writemask(ctx, VP_MODE, val, VP_MODE_FMT_MASK);
ctx               554 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_IMG_SIZE_Y, VP_IMG_HSIZE(fb->pitches[0]) |
ctx               557 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_IMG_SIZE_C, VP_IMG_HSIZE(fb->pitches[1]) |
ctx               560 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_SRC_WIDTH, state->src.w);
ctx               561 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_SRC_H_POSITION,
ctx               563 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_DST_WIDTH, state->crtc.w);
ctx               564 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_DST_H_POSITION, state->crtc.x);
ctx               566 drivers/gpu/drm/exynos/exynos_mixer.c 	if (test_bit(MXR_BIT_INTERLACE, &ctx->flags)) {
ctx               567 drivers/gpu/drm/exynos/exynos_mixer.c 		vp_reg_write(ctx, VP_SRC_HEIGHT, state->src.h / 2);
ctx               568 drivers/gpu/drm/exynos/exynos_mixer.c 		vp_reg_write(ctx, VP_SRC_V_POSITION, state->src.y / 2);
ctx               569 drivers/gpu/drm/exynos/exynos_mixer.c 		vp_reg_write(ctx, VP_DST_HEIGHT, state->crtc.h / 2);
ctx               570 drivers/gpu/drm/exynos/exynos_mixer.c 		vp_reg_write(ctx, VP_DST_V_POSITION, state->crtc.y / 2);
ctx               572 drivers/gpu/drm/exynos/exynos_mixer.c 		vp_reg_write(ctx, VP_SRC_HEIGHT, state->src.h);
ctx               573 drivers/gpu/drm/exynos/exynos_mixer.c 		vp_reg_write(ctx, VP_SRC_V_POSITION, state->src.y);
ctx               574 drivers/gpu/drm/exynos/exynos_mixer.c 		vp_reg_write(ctx, VP_DST_HEIGHT, state->crtc.h);
ctx               575 drivers/gpu/drm/exynos/exynos_mixer.c 		vp_reg_write(ctx, VP_DST_V_POSITION, state->crtc.y);
ctx               578 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_H_RATIO, state->h_ratio);
ctx               579 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_V_RATIO, state->v_ratio);
ctx               581 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_ENDIAN_MODE, VP_ENDIAN_MODE_LITTLE);
ctx               584 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_TOP_Y_PTR, luma_addr[0]);
ctx               585 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_BOT_Y_PTR, luma_addr[1]);
ctx               586 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_TOP_C_PTR, chroma_addr[0]);
ctx               587 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_BOT_C_PTR, chroma_addr[1]);
ctx               589 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_cfg_layer(ctx, plane->index, priority, true);
ctx               590 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_cfg_vp_blend(ctx, state->base.alpha);
ctx               592 drivers/gpu/drm/exynos/exynos_mixer.c 	spin_unlock_irqrestore(&ctx->reg_slock, flags);
ctx               594 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_regs_dump(ctx);
ctx               595 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_regs_dump(ctx);
ctx               598 drivers/gpu/drm/exynos/exynos_mixer.c static void mixer_graph_buffer(struct mixer_context *ctx,
ctx               653 drivers/gpu/drm/exynos/exynos_mixer.c 	spin_lock_irqsave(&ctx->reg_slock, flags);
ctx               656 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_GRAPHIC_CFG(win),
ctx               660 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_write(ctx, MXR_GRAPHIC_SPAN(win),
ctx               667 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_write(ctx, MXR_GRAPHIC_WH(win), val);
ctx               672 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_write(ctx, MXR_GRAPHIC_DXY(win), val);
ctx               675 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_write(ctx, MXR_GRAPHIC_BASE(win), dma_addr);
ctx               677 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_cfg_layer(ctx, win, priority, true);
ctx               678 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_cfg_gfx_blend(ctx, win, pixel_alpha, state->base.alpha);
ctx               680 drivers/gpu/drm/exynos/exynos_mixer.c 	spin_unlock_irqrestore(&ctx->reg_slock, flags);
ctx               682 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_regs_dump(ctx);
ctx               685 drivers/gpu/drm/exynos/exynos_mixer.c static void vp_win_reset(struct mixer_context *ctx)
ctx               689 drivers/gpu/drm/exynos/exynos_mixer.c 	vp_reg_write(ctx, VP_SRESET, VP_SRESET_PROCESSING);
ctx               692 drivers/gpu/drm/exynos/exynos_mixer.c 		if (~vp_reg_read(ctx, VP_SRESET) & VP_SRESET_PROCESSING)
ctx               699 drivers/gpu/drm/exynos/exynos_mixer.c static void mixer_win_reset(struct mixer_context *ctx)
ctx               703 drivers/gpu/drm/exynos/exynos_mixer.c 	spin_lock_irqsave(&ctx->reg_slock, flags);
ctx               705 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_CFG, MXR_CFG_DST_HDMI, MXR_CFG_DST_MASK);
ctx               708 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_CFG, MXR_CFG_OUT_RGB888, MXR_CFG_OUT_MASK);
ctx               711 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_STATUS, MXR_STATUS_16_BURST,
ctx               715 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_write(ctx, MXR_LAYER_CFG, 0);
ctx               718 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_write(ctx, MXR_BG_COLOR0, MXR_YCBCR_VAL(0, 128, 128));
ctx               719 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_write(ctx, MXR_BG_COLOR1, MXR_YCBCR_VAL(0, 128, 128));
ctx               720 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_write(ctx, MXR_BG_COLOR2, MXR_YCBCR_VAL(0, 128, 128));
ctx               722 drivers/gpu/drm/exynos/exynos_mixer.c 	if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
ctx               724 drivers/gpu/drm/exynos/exynos_mixer.c 		vp_win_reset(ctx);
ctx               725 drivers/gpu/drm/exynos/exynos_mixer.c 		vp_default_filter(ctx);
ctx               729 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_CFG, 0, MXR_CFG_GRP0_ENABLE);
ctx               730 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_CFG, 0, MXR_CFG_GRP1_ENABLE);
ctx               731 drivers/gpu/drm/exynos/exynos_mixer.c 	if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags))
ctx               732 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_reg_writemask(ctx, MXR_CFG, 0, MXR_CFG_VP_ENABLE);
ctx               735 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_write(ctx, MXR_GRAPHIC_SXY(0), 0);
ctx               736 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_write(ctx, MXR_GRAPHIC_SXY(1), 0);
ctx               738 drivers/gpu/drm/exynos/exynos_mixer.c 	spin_unlock_irqrestore(&ctx->reg_slock, flags);
ctx               743 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *ctx = arg;
ctx               746 drivers/gpu/drm/exynos/exynos_mixer.c 	spin_lock(&ctx->reg_slock);
ctx               749 drivers/gpu/drm/exynos/exynos_mixer.c 	val = mixer_reg_read(ctx, MXR_INT_STATUS);
ctx               758 drivers/gpu/drm/exynos/exynos_mixer.c 		if (test_bit(MXR_BIT_INTERLACE, &ctx->flags)
ctx               759 drivers/gpu/drm/exynos/exynos_mixer.c 		    && !mixer_is_synced(ctx))
ctx               762 drivers/gpu/drm/exynos/exynos_mixer.c 		drm_crtc_handle_vblank(&ctx->crtc->base);
ctx               767 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_write(ctx, MXR_INT_STATUS, val);
ctx               769 drivers/gpu/drm/exynos/exynos_mixer.c 	spin_unlock(&ctx->reg_slock);
ctx               910 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *mixer_ctx = crtc->ctx;
ctx               925 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *mixer_ctx = crtc->ctx;
ctx               939 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *ctx = crtc->ctx;
ctx               941 drivers/gpu/drm/exynos/exynos_mixer.c 	if (!test_bit(MXR_BIT_POWERED, &ctx->flags))
ctx               944 drivers/gpu/drm/exynos/exynos_mixer.c 	if (mixer_wait_for_sync(ctx))
ctx               945 drivers/gpu/drm/exynos/exynos_mixer.c 		dev_err(ctx->dev, "timeout waiting for VSYNC\n");
ctx               946 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_disable_sync(ctx);
ctx               952 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *mixer_ctx = crtc->ctx;
ctx               968 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *mixer_ctx = crtc->ctx;
ctx               983 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *mixer_ctx = crtc->ctx;
ctx               994 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *ctx = crtc->ctx;
ctx               996 drivers/gpu/drm/exynos/exynos_mixer.c 	if (test_bit(MXR_BIT_POWERED, &ctx->flags))
ctx               999 drivers/gpu/drm/exynos/exynos_mixer.c 	pm_runtime_get_sync(ctx->dev);
ctx              1003 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_disable_sync(ctx);
ctx              1005 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_reg_writemask(ctx, MXR_STATUS, ~0, MXR_STATUS_SOFT_RESET);
ctx              1007 drivers/gpu/drm/exynos/exynos_mixer.c 	if (test_bit(MXR_BIT_VSYNC, &ctx->flags)) {
ctx              1008 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_reg_writemask(ctx, MXR_INT_STATUS, ~0,
ctx              1010 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_reg_writemask(ctx, MXR_INT_EN, ~0, MXR_INT_EN_VSYNC);
ctx              1012 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_win_reset(ctx);
ctx              1014 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_commit(ctx);
ctx              1016 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_enable_sync(ctx);
ctx              1018 drivers/gpu/drm/exynos/exynos_mixer.c 	set_bit(MXR_BIT_POWERED, &ctx->flags);
ctx              1023 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *ctx = crtc->ctx;
ctx              1026 drivers/gpu/drm/exynos/exynos_mixer.c 	if (!test_bit(MXR_BIT_POWERED, &ctx->flags))
ctx              1029 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_stop(ctx);
ctx              1030 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_regs_dump(ctx);
ctx              1033 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_disable_plane(crtc, &ctx->planes[i]);
ctx              1037 drivers/gpu/drm/exynos/exynos_mixer.c 	pm_runtime_put(ctx->dev);
ctx              1039 drivers/gpu/drm/exynos/exynos_mixer.c 	clear_bit(MXR_BIT_POWERED, &ctx->flags);
ctx              1045 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *ctx = crtc->ctx;
ctx              1048 drivers/gpu/drm/exynos/exynos_mixer.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "xres=%d, yres=%d, refresh=%d, intl=%d\n",
ctx              1052 drivers/gpu/drm/exynos/exynos_mixer.c 	if (ctx->mxr_ver == MXR_VER_128_0_0_184)
ctx              1072 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *ctx = crtc->ctx;
ctx              1086 drivers/gpu/drm/exynos/exynos_mixer.c 		__set_bit(MXR_BIT_INTERLACE, &ctx->flags);
ctx              1088 drivers/gpu/drm/exynos/exynos_mixer.c 		__clear_bit(MXR_BIT_INTERLACE, &ctx->flags);
ctx              1090 drivers/gpu/drm/exynos/exynos_mixer.c 	if (ctx->mxr_ver == MXR_VER_128_0_0_184)
ctx              1095 drivers/gpu/drm/exynos/exynos_mixer.c 			ctx->scan_value = modes[i].scan_val;
ctx              1172 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *ctx = dev_get_drvdata(dev);
ctx              1178 drivers/gpu/drm/exynos/exynos_mixer.c 	ret = mixer_initialize(ctx, drm_dev);
ctx              1184 drivers/gpu/drm/exynos/exynos_mixer.c 						     &ctx->flags))
ctx              1187 drivers/gpu/drm/exynos/exynos_mixer.c 		ret = exynos_plane_init(drm_dev, &ctx->planes[i], i,
ctx              1193 drivers/gpu/drm/exynos/exynos_mixer.c 	exynos_plane = &ctx->planes[DEFAULT_WIN];
ctx              1194 drivers/gpu/drm/exynos/exynos_mixer.c 	ctx->crtc = exynos_drm_crtc_create(drm_dev, &exynos_plane->base,
ctx              1195 drivers/gpu/drm/exynos/exynos_mixer.c 			EXYNOS_DISPLAY_TYPE_HDMI, &mixer_crtc_ops, ctx);
ctx              1196 drivers/gpu/drm/exynos/exynos_mixer.c 	if (IS_ERR(ctx->crtc)) {
ctx              1197 drivers/gpu/drm/exynos/exynos_mixer.c 		mixer_ctx_remove(ctx);
ctx              1198 drivers/gpu/drm/exynos/exynos_mixer.c 		ret = PTR_ERR(ctx->crtc);
ctx              1205 drivers/gpu/drm/exynos/exynos_mixer.c 	devm_kfree(dev, ctx);
ctx              1211 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *ctx = dev_get_drvdata(dev);
ctx              1213 drivers/gpu/drm/exynos/exynos_mixer.c 	mixer_ctx_remove(ctx);
ctx              1225 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *ctx;
ctx              1228 drivers/gpu/drm/exynos/exynos_mixer.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx              1229 drivers/gpu/drm/exynos/exynos_mixer.c 	if (!ctx) {
ctx              1236 drivers/gpu/drm/exynos/exynos_mixer.c 	ctx->pdev = pdev;
ctx              1237 drivers/gpu/drm/exynos/exynos_mixer.c 	ctx->dev = dev;
ctx              1238 drivers/gpu/drm/exynos/exynos_mixer.c 	ctx->mxr_ver = drv->version;
ctx              1241 drivers/gpu/drm/exynos/exynos_mixer.c 		__set_bit(MXR_BIT_VP_ENABLED, &ctx->flags);
ctx              1243 drivers/gpu/drm/exynos/exynos_mixer.c 		__set_bit(MXR_BIT_HAS_SCLK, &ctx->flags);
ctx              1245 drivers/gpu/drm/exynos/exynos_mixer.c 	platform_set_drvdata(pdev, ctx);
ctx              1265 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *ctx = dev_get_drvdata(dev);
ctx              1267 drivers/gpu/drm/exynos/exynos_mixer.c 	clk_disable_unprepare(ctx->hdmi);
ctx              1268 drivers/gpu/drm/exynos/exynos_mixer.c 	clk_disable_unprepare(ctx->mixer);
ctx              1269 drivers/gpu/drm/exynos/exynos_mixer.c 	if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
ctx              1270 drivers/gpu/drm/exynos/exynos_mixer.c 		clk_disable_unprepare(ctx->vp);
ctx              1271 drivers/gpu/drm/exynos/exynos_mixer.c 		if (test_bit(MXR_BIT_HAS_SCLK, &ctx->flags))
ctx              1272 drivers/gpu/drm/exynos/exynos_mixer.c 			clk_disable_unprepare(ctx->sclk_mixer);
ctx              1280 drivers/gpu/drm/exynos/exynos_mixer.c 	struct mixer_context *ctx = dev_get_drvdata(dev);
ctx              1283 drivers/gpu/drm/exynos/exynos_mixer.c 	ret = clk_prepare_enable(ctx->mixer);
ctx              1285 drivers/gpu/drm/exynos/exynos_mixer.c 		DRM_DEV_ERROR(ctx->dev,
ctx              1290 drivers/gpu/drm/exynos/exynos_mixer.c 	ret = clk_prepare_enable(ctx->hdmi);
ctx              1297 drivers/gpu/drm/exynos/exynos_mixer.c 	if (test_bit(MXR_BIT_VP_ENABLED, &ctx->flags)) {
ctx              1298 drivers/gpu/drm/exynos/exynos_mixer.c 		ret = clk_prepare_enable(ctx->vp);
ctx              1305 drivers/gpu/drm/exynos/exynos_mixer.c 		if (test_bit(MXR_BIT_HAS_SCLK, &ctx->flags)) {
ctx              1306 drivers/gpu/drm/exynos/exynos_mixer.c 			ret = clk_prepare_enable(ctx->sclk_mixer);
ctx               176 drivers/gpu/drm/gma500/gma_display.c 		       struct drm_modeset_acquire_ctx *ctx)
ctx               505 drivers/gpu/drm/gma500/gma_display.c 			struct drm_modeset_acquire_ctx *ctx)
ctx               512 drivers/gpu/drm/gma500/gma_display.c 		return drm_crtc_helper_set_config(set, ctx);
ctx               515 drivers/gpu/drm/gma500/gma_display.c 	ret = drm_crtc_helper_set_config(set, ctx);
ctx                68 drivers/gpu/drm/gma500/gma_display.h 			      struct drm_modeset_acquire_ctx *ctx);
ctx                75 drivers/gpu/drm/gma500/gma_display.h 			       struct drm_modeset_acquire_ctx *ctx);
ctx                86 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	struct dsi_hw_ctx *ctx;
ctx                97 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	struct dsi_hw_ctx ctx;
ctx               538 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	struct dsi_hw_ctx *ctx = dsi->ctx;
ctx               542 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	void __iomem *base = ctx->base;
ctx               573 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	struct dsi_hw_ctx *ctx = dsi->ctx;
ctx               574 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	void __iomem *base = ctx->base;
ctx               582 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	clk_disable_unprepare(ctx->pclk);
ctx               590 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	struct dsi_hw_ctx *ctx = dsi->ctx;
ctx               596 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	ret = clk_prepare_enable(ctx->pclk);
ctx               822 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	struct dsi_hw_ctx *ctx = dsi->ctx;
ctx               835 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	ctx->pclk = devm_clk_get(&pdev->dev, "pclk");
ctx               836 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	if (IS_ERR(ctx->pclk)) {
ctx               838 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 		return PTR_ERR(ctx->pclk);
ctx               842 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	ctx->base = devm_ioremap_resource(&pdev->dev, res);
ctx               843 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	if (IS_ERR(ctx->base)) {
ctx               845 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 		return PTR_ERR(ctx->base);
ctx               855 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	struct dsi_hw_ctx *ctx;
ctx               864 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	ctx = &data->ctx;
ctx               865 drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c 	dsi->ctx = ctx;
ctx               116 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c static void ade_init(struct ade_hw_ctx *ctx)
ctx               118 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	void __iomem *base = ctx->base;
ctx               145 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
ctx               148 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 		clk_round_rate(ctx->ade_pix_clk, mode->clock * 1000) / 1000;
ctx               153 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c static void ade_set_pix_clk(struct ade_hw_ctx *ctx,
ctx               164 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ret = clk_set_rate(ctx->ade_pix_clk, clk_Hz);
ctx               167 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	adj_mode->clock = clk_get_rate(ctx->ade_pix_clk) / 1000;
ctx               170 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c static void ade_ldi_set_mode(struct ade_hw_ctx *ctx,
ctx               174 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	void __iomem *base = ctx->base;
ctx               214 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ade_set_pix_clk(ctx, mode, adj_mode);
ctx               219 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c static int ade_power_up(struct ade_hw_ctx *ctx)
ctx               223 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ret = clk_prepare_enable(ctx->media_noc_clk);
ctx               229 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ret = reset_control_deassert(ctx->reset);
ctx               235 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ret = clk_prepare_enable(ctx->ade_core_clk);
ctx               241 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ade_init(ctx);
ctx               242 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ctx->power_on = true;
ctx               246 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c static void ade_power_down(struct ade_hw_ctx *ctx)
ctx               248 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	void __iomem *base = ctx->base;
ctx               254 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	clk_disable_unprepare(ctx->ade_core_clk);
ctx               255 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	reset_control_assert(ctx->reset);
ctx               256 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	clk_disable_unprepare(ctx->media_noc_clk);
ctx               257 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ctx->power_on = false;
ctx               260 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c static void ade_set_medianoc_qos(struct ade_hw_ctx *ctx)
ctx               262 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct regmap *map = ctx->noc_regmap;
ctx               278 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
ctx               279 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	void __iomem *base = ctx->base;
ctx               281 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	if (!ctx->power_on)
ctx               282 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 		(void)ade_power_up(ctx);
ctx               293 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
ctx               294 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	void __iomem *base = ctx->base;
ctx               296 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	if (!ctx->power_on) {
ctx               307 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct ade_hw_ctx *ctx = data;
ctx               308 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct drm_crtc *crtc = ctx->crtc;
ctx               309 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	void __iomem *base = ctx->base;
ctx               325 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c static void ade_display_enable(struct ade_hw_ctx *ctx)
ctx               327 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	void __iomem *base = ctx->base;
ctx               442 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
ctx               448 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	if (!ctx->power_on) {
ctx               449 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 		ret = ade_power_up(ctx);
ctx               454 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ade_set_medianoc_qos(ctx);
ctx               455 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ade_display_enable(ctx);
ctx               456 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ade_dump_regs(ctx->base);
ctx               465 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
ctx               471 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ade_power_down(ctx);
ctx               478 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
ctx               482 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	if (!ctx->power_on)
ctx               483 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 		(void)ade_power_up(ctx);
ctx               484 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ade_ldi_set_mode(ctx, mode, adj_mode);
ctx               491 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
ctx               495 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	if (!ctx->power_on)
ctx               496 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 		(void)ade_power_up(ctx);
ctx               497 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ade_ldi_set_mode(ctx, mode, adj_mode);
ctx               505 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
ctx               507 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	void __iomem *base = ctx->base;
ctx               715 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct ade_hw_ctx *ctx = kplane->hw_ctx;
ctx               716 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	void __iomem *base = ctx->base;
ctx               744 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct ade_hw_ctx *ctx = kplane->hw_ctx;
ctx               745 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	void __iomem *base = ctx->base;
ctx               846 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	struct ade_hw_ctx *ctx = NULL;
ctx               849 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               850 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	if (!ctx) {
ctx               856 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ctx->base = devm_ioremap_resource(dev, res);
ctx               857 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	if (IS_ERR(ctx->base)) {
ctx               862 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ctx->reset = devm_reset_control_get(dev, NULL);
ctx               863 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	if (IS_ERR(ctx->reset))
ctx               866 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ctx->noc_regmap =
ctx               868 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	if (IS_ERR(ctx->noc_regmap)) {
ctx               873 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ctx->irq = platform_get_irq(pdev, 0);
ctx               874 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	if (ctx->irq < 0) {
ctx               879 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ctx->ade_core_clk = devm_clk_get(dev, "clk_ade_core");
ctx               880 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	if (IS_ERR(ctx->ade_core_clk)) {
ctx               885 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ctx->media_noc_clk = devm_clk_get(dev, "clk_codec_jpeg");
ctx               886 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	if (IS_ERR(ctx->media_noc_clk)) {
ctx               891 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ctx->ade_pix_clk = devm_clk_get(dev, "clk_ade_pix");
ctx               892 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	if (IS_ERR(ctx->ade_pix_clk)) {
ctx               898 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ret = devm_request_irq(dev, ctx->irq, ade_irq_handler,
ctx               899 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 			       IRQF_SHARED, dev->driver->name, ctx);
ctx               903 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	ctx->crtc = crtc;
ctx               905 drivers/gpu/drm/hisilicon/kirin/kirin_drm_ade.c 	return ctx;
ctx               110 drivers/gpu/drm/hisilicon/kirin/kirin_drm_drv.c 	void *ctx;
ctx               120 drivers/gpu/drm/hisilicon/kirin/kirin_drm_drv.c 	ctx = driver_data->alloc_hw_ctx(pdev, &kirin_priv->crtc.base);
ctx               121 drivers/gpu/drm/hisilicon/kirin/kirin_drm_drv.c 	if (IS_ERR(ctx)) {
ctx               125 drivers/gpu/drm/hisilicon/kirin/kirin_drm_drv.c 	kirin_priv->hw_ctx = ctx;
ctx               142 drivers/gpu/drm/hisilicon/kirin/kirin_drm_drv.c 		kirin_priv->planes[ch].hw_ctx = ctx;
ctx               151 drivers/gpu/drm/hisilicon/kirin/kirin_drm_drv.c 	kirin_priv->crtc.hw_ctx = ctx;
ctx               803 drivers/gpu/drm/i915/display/intel_audio.c 	struct drm_modeset_acquire_ctx ctx;
ctx               807 drivers/gpu/drm/i915/display/intel_audio.c 	drm_modeset_acquire_init(&ctx, 0);
ctx               812 drivers/gpu/drm/i915/display/intel_audio.c 	state->acquire_ctx = &ctx;
ctx               825 drivers/gpu/drm/i915/display/intel_audio.c 			       &ctx);
ctx               831 drivers/gpu/drm/i915/display/intel_audio.c 		drm_modeset_backoff(&ctx);
ctx               839 drivers/gpu/drm/i915/display/intel_audio.c 	drm_modeset_drop_locks(&ctx);
ctx               840 drivers/gpu/drm/i915/display/intel_audio.c 	drm_modeset_acquire_fini(&ctx);
ctx               786 drivers/gpu/drm/i915/display/intel_crt.c 		 struct drm_modeset_acquire_ctx *ctx,
ctx               847 drivers/gpu/drm/i915/display/intel_crt.c 	ret = intel_get_load_detect_pipe(connector, NULL, &tmp, ctx);
ctx               858 drivers/gpu/drm/i915/display/intel_crt.c 		intel_release_load_detect_pipe(connector, &tmp, ctx);
ctx              4046 drivers/gpu/drm/i915/display/intel_ddi.c 			struct drm_modeset_acquire_ctx *ctx)
ctx              4056 drivers/gpu/drm/i915/display/intel_ddi.c 	state->acquire_ctx = ctx;
ctx              4074 drivers/gpu/drm/i915/display/intel_ddi.c 				 struct drm_modeset_acquire_ctx *ctx)
ctx              4091 drivers/gpu/drm/i915/display/intel_ddi.c 			       ctx);
ctx              4101 drivers/gpu/drm/i915/display/intel_ddi.c 	ret = drm_modeset_lock(&crtc->base.mutex, ctx);
ctx              4141 drivers/gpu/drm/i915/display/intel_ddi.c 	return modeset_pipe(&crtc->base, ctx);
ctx              4150 drivers/gpu/drm/i915/display/intel_ddi.c 	struct drm_modeset_acquire_ctx ctx;
ctx              4156 drivers/gpu/drm/i915/display/intel_ddi.c 	drm_modeset_acquire_init(&ctx, 0);
ctx              4160 drivers/gpu/drm/i915/display/intel_ddi.c 			ret = intel_hdmi_reset_link(encoder, &ctx);
ctx              4162 drivers/gpu/drm/i915/display/intel_ddi.c 			ret = intel_dp_retrain_link(encoder, &ctx);
ctx              4165 drivers/gpu/drm/i915/display/intel_ddi.c 			drm_modeset_backoff(&ctx);
ctx              4172 drivers/gpu/drm/i915/display/intel_ddi.c 	drm_modeset_drop_locks(&ctx);
ctx              4173 drivers/gpu/drm/i915/display/intel_ddi.c 	drm_modeset_acquire_fini(&ctx);
ctx               146 drivers/gpu/drm/i915/display/intel_display.c 					 struct drm_modeset_acquire_ctx *ctx);
ctx              4223 drivers/gpu/drm/i915/display/intel_display.c 		       struct drm_modeset_acquire_ctx *ctx)
ctx              4229 drivers/gpu/drm/i915/display/intel_display.c 	intel_modeset_setup_hw_state(dev, ctx);
ctx              4253 drivers/gpu/drm/i915/display/intel_display.c 	ret = drm_atomic_helper_commit_duplicated_state(state, ctx);
ctx              4268 drivers/gpu/drm/i915/display/intel_display.c 	struct drm_modeset_acquire_ctx *ctx = &dev_priv->reset_ctx;
ctx              4292 drivers/gpu/drm/i915/display/intel_display.c 	drm_modeset_acquire_init(ctx, 0);
ctx              4294 drivers/gpu/drm/i915/display/intel_display.c 		ret = drm_modeset_lock_all_ctx(dev, ctx);
ctx              4298 drivers/gpu/drm/i915/display/intel_display.c 		drm_modeset_backoff(ctx);
ctx              4304 drivers/gpu/drm/i915/display/intel_display.c 	state = drm_atomic_helper_duplicate_state(dev, ctx);
ctx              4311 drivers/gpu/drm/i915/display/intel_display.c 	ret = drm_atomic_helper_disable_all(dev, ctx);
ctx              4319 drivers/gpu/drm/i915/display/intel_display.c 	state->acquire_ctx = ctx;
ctx              4325 drivers/gpu/drm/i915/display/intel_display.c 	struct drm_modeset_acquire_ctx *ctx = &dev_priv->reset_ctx;
ctx              4340 drivers/gpu/drm/i915/display/intel_display.c 		ret = __intel_display_resume(dev, state, ctx);
ctx              4357 drivers/gpu/drm/i915/display/intel_display.c 		ret = __intel_display_resume(dev, state, ctx);
ctx              4366 drivers/gpu/drm/i915/display/intel_display.c 	drm_modeset_drop_locks(ctx);
ctx              4367 drivers/gpu/drm/i915/display/intel_display.c 	drm_modeset_acquire_fini(ctx);
ctx              7033 drivers/gpu/drm/i915/display/intel_display.c 					struct drm_modeset_acquire_ctx *ctx)
ctx              7065 drivers/gpu/drm/i915/display/intel_display.c 	state->acquire_ctx = ctx;
ctx              11089 drivers/gpu/drm/i915/display/intel_display.c 			       struct drm_modeset_acquire_ctx *ctx)
ctx              11127 drivers/gpu/drm/i915/display/intel_display.c 		ret = drm_modeset_lock(&crtc->mutex, ctx);
ctx              11141 drivers/gpu/drm/i915/display/intel_display.c 		ret = drm_modeset_lock(&possible_crtc->mutex, ctx);
ctx              11173 drivers/gpu/drm/i915/display/intel_display.c 	state->acquire_ctx = ctx;
ctx              11174 drivers/gpu/drm/i915/display/intel_display.c 	restore_state->acquire_ctx = ctx;
ctx              11246 drivers/gpu/drm/i915/display/intel_display.c 				    struct drm_modeset_acquire_ctx *ctx)
ctx              11261 drivers/gpu/drm/i915/display/intel_display.c 	ret = drm_atomic_helper_commit_duplicated_state(state, ctx);
ctx              14692 drivers/gpu/drm/i915/display/intel_display.c 			   struct drm_modeset_acquire_ctx *ctx)
ctx              14810 drivers/gpu/drm/i915/display/intel_display.c 					      src_x, src_y, src_w, src_h, ctx);
ctx              15988 drivers/gpu/drm/i915/display/intel_display.c 	struct drm_modeset_acquire_ctx ctx;
ctx              16000 drivers/gpu/drm/i915/display/intel_display.c 	drm_modeset_acquire_init(&ctx, 0);
ctx              16002 drivers/gpu/drm/i915/display/intel_display.c 	ret = drm_modeset_lock_all_ctx(dev, &ctx);
ctx              16004 drivers/gpu/drm/i915/display/intel_display.c 		drm_modeset_backoff(&ctx);
ctx              16010 drivers/gpu/drm/i915/display/intel_display.c 	state = drm_atomic_helper_duplicate_state(dev, &ctx);
ctx              16052 drivers/gpu/drm/i915/display/intel_display.c 	drm_modeset_drop_locks(&ctx);
ctx              16053 drivers/gpu/drm/i915/display/intel_display.c 	drm_modeset_acquire_fini(&ctx);
ctx              16075 drivers/gpu/drm/i915/display/intel_display.c 	struct drm_modeset_acquire_ctx ctx;
ctx              16084 drivers/gpu/drm/i915/display/intel_display.c 	drm_modeset_acquire_init(&ctx, 0);
ctx              16087 drivers/gpu/drm/i915/display/intel_display.c 	state->acquire_ctx = &ctx;
ctx              16116 drivers/gpu/drm/i915/display/intel_display.c 		drm_modeset_backoff(&ctx);
ctx              16122 drivers/gpu/drm/i915/display/intel_display.c 	drm_modeset_drop_locks(&ctx);
ctx              16123 drivers/gpu/drm/i915/display/intel_display.c 	drm_modeset_acquire_fini(&ctx);
ctx              16435 drivers/gpu/drm/i915/display/intel_display.c 				struct drm_modeset_acquire_ctx *ctx)
ctx              16476 drivers/gpu/drm/i915/display/intel_display.c 		intel_crtc_disable_noatomic(&crtc->base, ctx);
ctx              16945 drivers/gpu/drm/i915/display/intel_display.c 			     struct drm_modeset_acquire_ctx *ctx)
ctx              16996 drivers/gpu/drm/i915/display/intel_display.c 		intel_sanitize_crtc(crtc, ctx);
ctx              17045 drivers/gpu/drm/i915/display/intel_display.c 	struct drm_modeset_acquire_ctx ctx;
ctx              17050 drivers/gpu/drm/i915/display/intel_display.c 		state->acquire_ctx = &ctx;
ctx              17052 drivers/gpu/drm/i915/display/intel_display.c 	drm_modeset_acquire_init(&ctx, 0);
ctx              17055 drivers/gpu/drm/i915/display/intel_display.c 		ret = drm_modeset_lock_all_ctx(dev, &ctx);
ctx              17059 drivers/gpu/drm/i915/display/intel_display.c 		drm_modeset_backoff(&ctx);
ctx              17063 drivers/gpu/drm/i915/display/intel_display.c 		ret = __intel_display_resume(dev, state, &ctx);
ctx              17066 drivers/gpu/drm/i915/display/intel_display.c 	drm_modeset_drop_locks(&ctx);
ctx              17067 drivers/gpu/drm/i915/display/intel_display.c 	drm_modeset_acquire_fini(&ctx);
ctx               469 drivers/gpu/drm/i915/display/intel_display.h 			       struct drm_modeset_acquire_ctx *ctx);
ctx               472 drivers/gpu/drm/i915/display/intel_display.h 				    struct drm_modeset_acquire_ctx *ctx);
ctx              4794 drivers/gpu/drm/i915/display/intel_dp.c 			  struct drm_modeset_acquire_ctx *ctx)
ctx              4810 drivers/gpu/drm/i915/display/intel_dp.c 			       ctx);
ctx              4820 drivers/gpu/drm/i915/display/intel_dp.c 	ret = drm_modeset_lock(&crtc->base.mutex, ctx);
ctx              4875 drivers/gpu/drm/i915/display/intel_dp.c 	struct drm_modeset_acquire_ctx ctx;
ctx              4881 drivers/gpu/drm/i915/display/intel_dp.c 	drm_modeset_acquire_init(&ctx, 0);
ctx              4884 drivers/gpu/drm/i915/display/intel_dp.c 		ret = intel_dp_retrain_link(encoder, &ctx);
ctx              4887 drivers/gpu/drm/i915/display/intel_dp.c 			drm_modeset_backoff(&ctx);
ctx              4894 drivers/gpu/drm/i915/display/intel_dp.c 	drm_modeset_drop_locks(&ctx);
ctx              4895 drivers/gpu/drm/i915/display/intel_dp.c 	drm_modeset_acquire_fini(&ctx);
ctx              5347 drivers/gpu/drm/i915/display/intel_dp.c 		struct drm_modeset_acquire_ctx *ctx,
ctx              5419 drivers/gpu/drm/i915/display/intel_dp.c 		ret = intel_dp_retrain_link(encoder, ctx);
ctx                51 drivers/gpu/drm/i915/display/intel_dp.h 			  struct drm_modeset_acquire_ctx *ctx);
ctx              1122 drivers/gpu/drm/i915/display/intel_dpll_mgr.c static void skl_wrpll_context_init(struct skl_wrpll_context *ctx)
ctx              1124 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	memset(ctx, 0, sizeof(*ctx));
ctx              1126 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	ctx->min_deviation = U64_MAX;
ctx              1133 drivers/gpu/drm/i915/display/intel_dpll_mgr.c static void skl_wrpll_try_divider(struct skl_wrpll_context *ctx,
ctx              1146 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 		    deviation < ctx->min_deviation) {
ctx              1147 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 			ctx->min_deviation = deviation;
ctx              1148 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 			ctx->central_freq = central_freq;
ctx              1149 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 			ctx->dco_freq = dco_freq;
ctx              1150 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 			ctx->p = divider;
ctx              1154 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 		   deviation < ctx->min_deviation) {
ctx              1155 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 		ctx->min_deviation = deviation;
ctx              1156 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 		ctx->central_freq = central_freq;
ctx              1157 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 		ctx->dco_freq = dco_freq;
ctx              1158 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 		ctx->p = divider;
ctx              1309 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	struct skl_wrpll_context ctx;
ctx              1313 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	skl_wrpll_context_init(&ctx);
ctx              1321 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 				skl_wrpll_try_divider(&ctx,
ctx              1330 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 				if (ctx.min_deviation == 0)
ctx              1340 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 		if (d == 0 && ctx.p)
ctx              1344 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	if (!ctx.p) {
ctx              1354 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	skl_wrpll_get_multipliers(ctx.p, &p0, &p1, &p2);
ctx              1355 drivers/gpu/drm/i915/display/intel_dpll_mgr.c 	skl_wrpll_params_populate(wrpll_params, afe_clock, ctx.central_freq,
ctx               292 drivers/gpu/drm/i915/display/intel_pipe_crc.c 	struct drm_modeset_acquire_ctx ctx;
ctx               295 drivers/gpu/drm/i915/display/intel_pipe_crc.c 	drm_modeset_acquire_init(&ctx, 0);
ctx               303 drivers/gpu/drm/i915/display/intel_pipe_crc.c 	state->acquire_ctx = &ctx;
ctx               325 drivers/gpu/drm/i915/display/intel_pipe_crc.c 		drm_modeset_backoff(&ctx);
ctx               332 drivers/gpu/drm/i915/display/intel_pipe_crc.c 	drm_modeset_drop_locks(&ctx);
ctx               333 drivers/gpu/drm/i915/display/intel_pipe_crc.c 	drm_modeset_acquire_fini(&ctx);
ctx              1003 drivers/gpu/drm/i915/display/intel_psr.c 	struct drm_modeset_acquire_ctx ctx;
ctx              1012 drivers/gpu/drm/i915/display/intel_psr.c 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
ctx              1013 drivers/gpu/drm/i915/display/intel_psr.c 	state->acquire_ctx = &ctx;
ctx              1040 drivers/gpu/drm/i915/display/intel_psr.c 		err = drm_modeset_backoff(&ctx);
ctx              1045 drivers/gpu/drm/i915/display/intel_psr.c 	drm_modeset_drop_locks(&ctx);
ctx              1046 drivers/gpu/drm/i915/display/intel_psr.c 	drm_modeset_acquire_fini(&ctx);
ctx              1902 drivers/gpu/drm/i915/display/intel_sprite.c 	struct drm_modeset_acquire_ctx ctx;
ctx              1933 drivers/gpu/drm/i915/display/intel_sprite.c 	drm_modeset_acquire_init(&ctx, 0);
ctx              1940 drivers/gpu/drm/i915/display/intel_sprite.c 	state->acquire_ctx = &ctx;
ctx              1971 drivers/gpu/drm/i915/display/intel_sprite.c 		drm_modeset_backoff(&ctx);
ctx              1976 drivers/gpu/drm/i915/display/intel_sprite.c 	drm_modeset_drop_locks(&ctx);
ctx              1977 drivers/gpu/drm/i915/display/intel_sprite.c 	drm_modeset_acquire_fini(&ctx);
ctx              1690 drivers/gpu/drm/i915/display/intel_tv.c 		struct drm_modeset_acquire_ctx *ctx,
ctx              1705 drivers/gpu/drm/i915/display/intel_tv.c 		ret = intel_get_load_detect_pipe(connector, NULL, &tmp, ctx);
ctx              1711 drivers/gpu/drm/i915/display/intel_tv.c 			intel_release_load_detect_pipe(connector, &tmp, ctx);
ctx                97 drivers/gpu/drm/i915/gem/i915_gem_context.c static void lut_close(struct i915_gem_context *ctx)
ctx               102 drivers/gpu/drm/i915/gem/i915_gem_context.c 	lockdep_assert_held(&ctx->mutex);
ctx               105 drivers/gpu/drm/i915/gem/i915_gem_context.c 	radix_tree_for_each_slot(slot, &ctx->handles_vma, &iter, 0) {
ctx               116 drivers/gpu/drm/i915/gem/i915_gem_context.c 			if (lut->ctx != ctx)
ctx               130 drivers/gpu/drm/i915/gem/i915_gem_context.c 			radix_tree_iter_delete(&ctx->handles_vma, &iter, slot);
ctx               143 drivers/gpu/drm/i915/gem/i915_gem_context.c lookup_user_engine(struct i915_gem_context *ctx,
ctx               150 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!!(flags & LOOKUP_USER_INDEX) != i915_gem_context_user_engines(ctx))
ctx               153 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!i915_gem_context_user_engines(ctx)) {
ctx               156 drivers/gpu/drm/i915/gem/i915_gem_context.c 		engine = intel_engine_lookup_user(ctx->i915,
ctx               167 drivers/gpu/drm/i915/gem/i915_gem_context.c 	return i915_gem_context_get_engine(ctx, idx);
ctx               194 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx, *cn;
ctx               200 drivers/gpu/drm/i915/gem/i915_gem_context.c 	list_for_each_entry_safe(ctx, cn,
ctx               202 drivers/gpu/drm/i915/gem/i915_gem_context.c 		if (atomic_read(&ctx->hw_id_pin_count)) {
ctx               203 drivers/gpu/drm/i915/gem/i915_gem_context.c 			list_move_tail(&ctx->hw_id_link, &pinned);
ctx               207 drivers/gpu/drm/i915/gem/i915_gem_context.c 		GEM_BUG_ON(!ctx->hw_id); /* perma-pinned kernel context */
ctx               208 drivers/gpu/drm/i915/gem/i915_gem_context.c 		list_del_init(&ctx->hw_id_link);
ctx               209 drivers/gpu/drm/i915/gem/i915_gem_context.c 		id = ctx->hw_id;
ctx               246 drivers/gpu/drm/i915/gem/i915_gem_context.c static void release_hw_id(struct i915_gem_context *ctx)
ctx               248 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct drm_i915_private *i915 = ctx->i915;
ctx               250 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (list_empty(&ctx->hw_id_link))
ctx               254 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!list_empty(&ctx->hw_id_link)) {
ctx               255 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ida_simple_remove(&i915->contexts.hw_ida, ctx->hw_id);
ctx               256 drivers/gpu/drm/i915/gem/i915_gem_context.c 		list_del_init(&ctx->hw_id_link);
ctx               282 drivers/gpu/drm/i915/gem/i915_gem_context.c static struct i915_gem_engines *default_engines(struct i915_gem_context *ctx)
ctx               284 drivers/gpu/drm/i915/gem/i915_gem_context.c 	const struct intel_gt *gt = &ctx->i915->gt;
ctx               297 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ce = intel_context_create(ctx, engine);
ctx               310 drivers/gpu/drm/i915/gem/i915_gem_context.c static void i915_gem_context_free(struct i915_gem_context *ctx)
ctx               312 drivers/gpu/drm/i915/gem/i915_gem_context.c 	lockdep_assert_held(&ctx->i915->drm.struct_mutex);
ctx               313 drivers/gpu/drm/i915/gem/i915_gem_context.c 	GEM_BUG_ON(!i915_gem_context_is_closed(ctx));
ctx               315 drivers/gpu/drm/i915/gem/i915_gem_context.c 	release_hw_id(ctx);
ctx               316 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (ctx->vm)
ctx               317 drivers/gpu/drm/i915/gem/i915_gem_context.c 		i915_vm_put(ctx->vm);
ctx               319 drivers/gpu/drm/i915/gem/i915_gem_context.c 	free_engines(rcu_access_pointer(ctx->engines));
ctx               320 drivers/gpu/drm/i915/gem/i915_gem_context.c 	mutex_destroy(&ctx->engines_mutex);
ctx               322 drivers/gpu/drm/i915/gem/i915_gem_context.c 	kfree(ctx->jump_whitelist);
ctx               324 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (ctx->timeline)
ctx               325 drivers/gpu/drm/i915/gem/i915_gem_context.c 		intel_timeline_put(ctx->timeline);
ctx               327 drivers/gpu/drm/i915/gem/i915_gem_context.c 	kfree(ctx->name);
ctx               328 drivers/gpu/drm/i915/gem/i915_gem_context.c 	put_pid(ctx->pid);
ctx               330 drivers/gpu/drm/i915/gem/i915_gem_context.c 	list_del(&ctx->link);
ctx               331 drivers/gpu/drm/i915/gem/i915_gem_context.c 	mutex_destroy(&ctx->mutex);
ctx               333 drivers/gpu/drm/i915/gem/i915_gem_context.c 	kfree_rcu(ctx, rcu);
ctx               339 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx, *cn;
ctx               343 drivers/gpu/drm/i915/gem/i915_gem_context.c 	llist_for_each_entry_safe(ctx, cn, freed, free_link)
ctx               344 drivers/gpu/drm/i915/gem/i915_gem_context.c 		i915_gem_context_free(ctx);
ctx               349 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx               358 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx = container_of(freed, typeof(*ctx), free_link);
ctx               359 drivers/gpu/drm/i915/gem/i915_gem_context.c 	i915_gem_context_free(ctx);
ctx               374 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx = container_of(ref, typeof(*ctx), ref);
ctx               375 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct drm_i915_private *i915 = ctx->i915;
ctx               377 drivers/gpu/drm/i915/gem/i915_gem_context.c 	trace_i915_context_free(ctx);
ctx               378 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (llist_add(&ctx->free_link, &i915->contexts.free_list))
ctx               382 drivers/gpu/drm/i915/gem/i915_gem_context.c static void context_close(struct i915_gem_context *ctx)
ctx               384 drivers/gpu/drm/i915/gem/i915_gem_context.c 	mutex_lock(&ctx->mutex);
ctx               386 drivers/gpu/drm/i915/gem/i915_gem_context.c 	i915_gem_context_set_closed(ctx);
ctx               387 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx->file_priv = ERR_PTR(-EBADF);
ctx               393 drivers/gpu/drm/i915/gem/i915_gem_context.c 	release_hw_id(ctx);
ctx               400 drivers/gpu/drm/i915/gem/i915_gem_context.c 	lut_close(ctx);
ctx               402 drivers/gpu/drm/i915/gem/i915_gem_context.c 	mutex_unlock(&ctx->mutex);
ctx               403 drivers/gpu/drm/i915/gem/i915_gem_context.c 	i915_gem_context_put(ctx);
ctx               409 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx               414 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               415 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!ctx)
ctx               418 drivers/gpu/drm/i915/gem/i915_gem_context.c 	kref_init(&ctx->ref);
ctx               419 drivers/gpu/drm/i915/gem/i915_gem_context.c 	list_add_tail(&ctx->link, &i915->contexts.list);
ctx               420 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx->i915 = i915;
ctx               421 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx->sched.priority = I915_USER_PRIORITY(I915_PRIORITY_NORMAL);
ctx               422 drivers/gpu/drm/i915/gem/i915_gem_context.c 	mutex_init(&ctx->mutex);
ctx               424 drivers/gpu/drm/i915/gem/i915_gem_context.c 	mutex_init(&ctx->engines_mutex);
ctx               425 drivers/gpu/drm/i915/gem/i915_gem_context.c 	e = default_engines(ctx);
ctx               430 drivers/gpu/drm/i915/gem/i915_gem_context.c 	RCU_INIT_POINTER(ctx->engines, e);
ctx               432 drivers/gpu/drm/i915/gem/i915_gem_context.c 	INIT_RADIX_TREE(&ctx->handles_vma, GFP_KERNEL);
ctx               433 drivers/gpu/drm/i915/gem/i915_gem_context.c 	INIT_LIST_HEAD(&ctx->hw_id_link);
ctx               438 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx->remap_slice = ALL_L3_SLICES(i915);
ctx               440 drivers/gpu/drm/i915/gem/i915_gem_context.c 	i915_gem_context_set_bannable(ctx);
ctx               441 drivers/gpu/drm/i915/gem/i915_gem_context.c 	i915_gem_context_set_recoverable(ctx);
ctx               443 drivers/gpu/drm/i915/gem/i915_gem_context.c 	for (i = 0; i < ARRAY_SIZE(ctx->hang_timestamp); i++)
ctx               444 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ctx->hang_timestamp[i] = jiffies - CONTEXT_FAST_HANG_JIFFIES;
ctx               446 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx->jump_whitelist = NULL;
ctx               447 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx->jump_whitelist_cmds = 0;
ctx               449 drivers/gpu/drm/i915/gem/i915_gem_context.c 	return ctx;
ctx               452 drivers/gpu/drm/i915/gem/i915_gem_context.c 	kfree(ctx);
ctx               457 drivers/gpu/drm/i915/gem/i915_gem_context.c context_apply_all(struct i915_gem_context *ctx,
ctx               464 drivers/gpu/drm/i915/gem/i915_gem_context.c 	for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it)
ctx               466 drivers/gpu/drm/i915/gem/i915_gem_context.c 	i915_gem_context_unlock_engines(ctx);
ctx               476 drivers/gpu/drm/i915/gem/i915_gem_context.c __set_ppgtt(struct i915_gem_context *ctx, struct i915_address_space *vm)
ctx               478 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_address_space *old = ctx->vm;
ctx               482 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx->vm = i915_vm_get(vm);
ctx               483 drivers/gpu/drm/i915/gem/i915_gem_context.c 	context_apply_all(ctx, __apply_ppgtt, vm);
ctx               488 drivers/gpu/drm/i915/gem/i915_gem_context.c static void __assign_ppgtt(struct i915_gem_context *ctx,
ctx               491 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (vm == ctx->vm)
ctx               494 drivers/gpu/drm/i915/gem/i915_gem_context.c 	vm = __set_ppgtt(ctx, vm);
ctx               515 drivers/gpu/drm/i915/gem/i915_gem_context.c static void __assign_timeline(struct i915_gem_context *ctx,
ctx               518 drivers/gpu/drm/i915/gem/i915_gem_context.c 	__set_timeline(&ctx->timeline, timeline);
ctx               519 drivers/gpu/drm/i915/gem/i915_gem_context.c 	context_apply_all(ctx, __apply_timeline, timeline);
ctx               525 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx               536 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx = __create_context(dev_priv);
ctx               537 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (IS_ERR(ctx))
ctx               538 drivers/gpu/drm/i915/gem/i915_gem_context.c 		return ctx;
ctx               547 drivers/gpu/drm/i915/gem/i915_gem_context.c 			context_close(ctx);
ctx               551 drivers/gpu/drm/i915/gem/i915_gem_context.c 		__assign_ppgtt(ctx, &ppgtt->vm);
ctx               560 drivers/gpu/drm/i915/gem/i915_gem_context.c 			context_close(ctx);
ctx               564 drivers/gpu/drm/i915/gem/i915_gem_context.c 		__assign_timeline(ctx, timeline);
ctx               568 drivers/gpu/drm/i915/gem/i915_gem_context.c 	trace_i915_context_create(ctx);
ctx               570 drivers/gpu/drm/i915/gem/i915_gem_context.c 	return ctx;
ctx               576 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx               579 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx = i915_gem_context_get(fetch_and_zero(ctxp));
ctx               580 drivers/gpu/drm/i915/gem/i915_gem_context.c 	GEM_BUG_ON(!i915_gem_context_is_kernel(ctx));
ctx               582 drivers/gpu/drm/i915/gem/i915_gem_context.c 	context_close(ctx);
ctx               583 drivers/gpu/drm/i915/gem/i915_gem_context.c 	i915_gem_context_free(ctx);
ctx               589 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx               592 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx = i915_gem_create_context(i915, 0);
ctx               593 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (IS_ERR(ctx))
ctx               594 drivers/gpu/drm/i915/gem/i915_gem_context.c 		return ctx;
ctx               596 drivers/gpu/drm/i915/gem/i915_gem_context.c 	err = i915_gem_context_pin_hw_id(ctx);
ctx               598 drivers/gpu/drm/i915/gem/i915_gem_context.c 		destroy_kernel_context(&ctx);
ctx               602 drivers/gpu/drm/i915/gem/i915_gem_context.c 	i915_gem_context_clear_bannable(ctx);
ctx               603 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx->sched.priority = I915_USER_PRIORITY(prio);
ctx               605 drivers/gpu/drm/i915/gem/i915_gem_context.c 	GEM_BUG_ON(!i915_gem_context_is_kernel(ctx));
ctx               607 drivers/gpu/drm/i915/gem/i915_gem_context.c 	return ctx;
ctx               627 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx               635 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx = i915_gem_context_create_kernel(dev_priv, I915_PRIORITY_MIN);
ctx               636 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (IS_ERR(ctx)) {
ctx               638 drivers/gpu/drm/i915/gem/i915_gem_context.c 		return PTR_ERR(ctx);
ctx               647 drivers/gpu/drm/i915/gem/i915_gem_context.c 	GEM_BUG_ON(ctx->hw_id);
ctx               648 drivers/gpu/drm/i915/gem/i915_gem_context.c 	GEM_BUG_ON(!atomic_read(&ctx->hw_id_pin_count));
ctx               649 drivers/gpu/drm/i915/gem/i915_gem_context.c 	dev_priv->kernel_context = ctx;
ctx               680 drivers/gpu/drm/i915/gem/i915_gem_context.c static int gem_context_register(struct i915_gem_context *ctx,
ctx               685 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx->file_priv = fpriv;
ctx               686 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (ctx->vm)
ctx               687 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ctx->vm->file = fpriv;
ctx               689 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx->pid = get_task_pid(current, PIDTYPE_PID);
ctx               690 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx->name = kasprintf(GFP_KERNEL, "%s[%d]",
ctx               691 drivers/gpu/drm/i915/gem/i915_gem_context.c 			      current->comm, pid_nr(ctx->pid));
ctx               692 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!ctx->name) {
ctx               699 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ret = idr_alloc(&fpriv->context_idr, ctx, 0, 0, GFP_KERNEL);
ctx               704 drivers/gpu/drm/i915/gem/i915_gem_context.c 	kfree(fetch_and_zero(&ctx->name));
ctx               706 drivers/gpu/drm/i915/gem/i915_gem_context.c 	put_pid(fetch_and_zero(&ctx->pid));
ctx               715 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx               725 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx = i915_gem_create_context(i915, 0);
ctx               727 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (IS_ERR(ctx)) {
ctx               728 drivers/gpu/drm/i915/gem/i915_gem_context.c 		err = PTR_ERR(ctx);
ctx               732 drivers/gpu/drm/i915/gem/i915_gem_context.c 	err = gem_context_register(ctx, file_priv);
ctx               736 drivers/gpu/drm/i915/gem/i915_gem_context.c 	GEM_BUG_ON(i915_gem_context_is_kernel(ctx));
ctx               742 drivers/gpu/drm/i915/gem/i915_gem_context.c 	context_close(ctx);
ctx               866 drivers/gpu/drm/i915/gem/i915_gem_context.c static int context_barrier_task(struct i915_gem_context *ctx,
ctx               873 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct drm_i915_private *i915 = ctx->i915;
ctx               893 drivers/gpu/drm/i915/gem/i915_gem_context.c 	for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it) {
ctx               924 drivers/gpu/drm/i915/gem/i915_gem_context.c 	i915_gem_context_unlock_engines(ctx);
ctx               935 drivers/gpu/drm/i915/gem/i915_gem_context.c 		     struct i915_gem_context *ctx,
ctx               941 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!ctx->vm)
ctx               945 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ret = mutex_lock_interruptible(&ctx->i915->drm.struct_mutex);
ctx               949 drivers/gpu/drm/i915/gem/i915_gem_context.c 	vm = i915_vm_get(ctx->vm);
ctx               950 drivers/gpu/drm/i915/gem/i915_gem_context.c 	mutex_unlock(&ctx->i915->drm.struct_mutex);
ctx              1044 drivers/gpu/drm/i915/gem/i915_gem_context.c 		     struct i915_gem_context *ctx,
ctx              1053 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!ctx->vm)
ctx              1070 drivers/gpu/drm/i915/gem/i915_gem_context.c 	err = mutex_lock_interruptible(&ctx->i915->drm.struct_mutex);
ctx              1074 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (vm == ctx->vm)
ctx              1078 drivers/gpu/drm/i915/gem/i915_gem_context.c 	mutex_lock(&ctx->mutex);
ctx              1079 drivers/gpu/drm/i915/gem/i915_gem_context.c 	lut_close(ctx);
ctx              1080 drivers/gpu/drm/i915/gem/i915_gem_context.c 	mutex_unlock(&ctx->mutex);
ctx              1082 drivers/gpu/drm/i915/gem/i915_gem_context.c 	old = __set_ppgtt(ctx, vm);
ctx              1089 drivers/gpu/drm/i915/gem/i915_gem_context.c 	err = context_barrier_task(ctx, ALL_ENGINES,
ctx              1095 drivers/gpu/drm/i915/gem/i915_gem_context.c 		i915_vm_put(__set_ppgtt(ctx, old));
ctx              1100 drivers/gpu/drm/i915/gem/i915_gem_context.c 	mutex_unlock(&ctx->i915->drm.struct_mutex);
ctx              1302 drivers/gpu/drm/i915/gem/i915_gem_context.c static int set_sseu(struct i915_gem_context *ctx,
ctx              1305 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct drm_i915_private *i915 = ctx->i915;
ctx              1332 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ce = lookup_user_engine(ctx, lookup, &user_sseu.engine);
ctx              1358 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx              1375 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!HAS_EXECLISTS(set->ctx->i915))
ctx              1378 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (USES_GUC_SUBMISSION(set->ctx->i915))
ctx              1424 drivers/gpu/drm/i915/gem/i915_gem_context.c 		siblings[n] = intel_engine_lookup_user(set->ctx->i915,
ctx              1435 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ce = intel_execlists_create_virtual(set->ctx, siblings, n);
ctx              1495 drivers/gpu/drm/i915/gem/i915_gem_context.c 	master = intel_engine_lookup_user(set->ctx->i915,
ctx              1512 drivers/gpu/drm/i915/gem/i915_gem_context.c 		bond = intel_engine_lookup_user(set->ctx->i915,
ctx              1543 drivers/gpu/drm/i915/gem/i915_gem_context.c set_engines(struct i915_gem_context *ctx,
ctx              1548 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct set_engines set = { .ctx = ctx };
ctx              1554 drivers/gpu/drm/i915/gem/i915_gem_context.c 		if (!i915_gem_context_user_engines(ctx))
ctx              1557 drivers/gpu/drm/i915/gem/i915_gem_context.c 		set.engines = default_engines(ctx);
ctx              1600 drivers/gpu/drm/i915/gem/i915_gem_context.c 		engine = intel_engine_lookup_user(ctx->i915,
ctx              1610 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ce = intel_context_create(ctx, engine);
ctx              1632 drivers/gpu/drm/i915/gem/i915_gem_context.c 	mutex_lock(&ctx->engines_mutex);
ctx              1634 drivers/gpu/drm/i915/gem/i915_gem_context.c 		i915_gem_context_set_user_engines(ctx);
ctx              1636 drivers/gpu/drm/i915/gem/i915_gem_context.c 		i915_gem_context_clear_user_engines(ctx);
ctx              1637 drivers/gpu/drm/i915/gem/i915_gem_context.c 	rcu_swap_protected(ctx->engines, set.engines, 1);
ctx              1638 drivers/gpu/drm/i915/gem/i915_gem_context.c 	mutex_unlock(&ctx->engines_mutex);
ctx              1668 drivers/gpu/drm/i915/gem/i915_gem_context.c get_engines(struct i915_gem_context *ctx,
ctx              1676 drivers/gpu/drm/i915/gem/i915_gem_context.c 	err = mutex_lock_interruptible(&ctx->engines_mutex);
ctx              1681 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (i915_gem_context_user_engines(ctx))
ctx              1682 drivers/gpu/drm/i915/gem/i915_gem_context.c 		e = __copy_engines(i915_gem_context_engines(ctx));
ctx              1683 drivers/gpu/drm/i915/gem/i915_gem_context.c 	mutex_unlock(&ctx->engines_mutex);
ctx              1747 drivers/gpu/drm/i915/gem/i915_gem_context.c 			struct i915_gem_context *ctx,
ctx              1757 drivers/gpu/drm/i915/gem/i915_gem_context.c 			set_bit(UCONTEXT_NO_ZEROMAP, &ctx->user_flags);
ctx              1759 drivers/gpu/drm/i915/gem/i915_gem_context.c 			clear_bit(UCONTEXT_NO_ZEROMAP, &ctx->user_flags);
ctx              1766 drivers/gpu/drm/i915/gem/i915_gem_context.c 			i915_gem_context_set_no_error_capture(ctx);
ctx              1768 drivers/gpu/drm/i915/gem/i915_gem_context.c 			i915_gem_context_clear_no_error_capture(ctx);
ctx              1777 drivers/gpu/drm/i915/gem/i915_gem_context.c 			i915_gem_context_set_bannable(ctx);
ctx              1779 drivers/gpu/drm/i915/gem/i915_gem_context.c 			i915_gem_context_clear_bannable(ctx);
ctx              1786 drivers/gpu/drm/i915/gem/i915_gem_context.c 			i915_gem_context_set_recoverable(ctx);
ctx              1788 drivers/gpu/drm/i915/gem/i915_gem_context.c 			i915_gem_context_clear_recoverable(ctx);
ctx              1797 drivers/gpu/drm/i915/gem/i915_gem_context.c 			else if (!(ctx->i915->caps.scheduler & I915_SCHEDULER_CAP_PRIORITY))
ctx              1806 drivers/gpu/drm/i915/gem/i915_gem_context.c 				ctx->sched.priority =
ctx              1812 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ret = set_sseu(ctx, args);
ctx              1816 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ret = set_ppgtt(fpriv, ctx, args);
ctx              1820 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ret = set_engines(ctx, args);
ctx              1833 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx              1848 drivers/gpu/drm/i915/gem/i915_gem_context.c 	return ctx_setparam(arg->fpriv, arg->ctx, &local.param);
ctx              2030 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *dst = arg->ctx;
ctx              2105 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ext_data.ctx = i915_gem_create_context(i915, args->flags);
ctx              2107 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (IS_ERR(ext_data.ctx))
ctx              2108 drivers/gpu/drm/i915/gem/i915_gem_context.c 		return PTR_ERR(ext_data.ctx);
ctx              2119 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ret = gem_context_register(ext_data.ctx, ext_data.fpriv);
ctx              2129 drivers/gpu/drm/i915/gem/i915_gem_context.c 	context_close(ext_data.ctx);
ctx              2138 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx              2149 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx = idr_remove(&file_priv->context_idr, args->ctx_id);
ctx              2151 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!ctx)
ctx              2154 drivers/gpu/drm/i915/gem/i915_gem_context.c 	context_close(ctx);
ctx              2158 drivers/gpu/drm/i915/gem/i915_gem_context.c static int get_sseu(struct i915_gem_context *ctx,
ctx              2185 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ce = lookup_user_engine(ctx, lookup, &user_sseu.engine);
ctx              2218 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx              2221 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx = i915_gem_context_lookup(file_priv, args->ctx_id);
ctx              2222 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!ctx)
ctx              2228 drivers/gpu/drm/i915/gem/i915_gem_context.c 		args->value = test_bit(UCONTEXT_NO_ZEROMAP, &ctx->user_flags);
ctx              2233 drivers/gpu/drm/i915/gem/i915_gem_context.c 		if (ctx->vm)
ctx              2234 drivers/gpu/drm/i915/gem/i915_gem_context.c 			args->value = ctx->vm->total;
ctx              2243 drivers/gpu/drm/i915/gem/i915_gem_context.c 		args->value = i915_gem_context_no_error_capture(ctx);
ctx              2248 drivers/gpu/drm/i915/gem/i915_gem_context.c 		args->value = i915_gem_context_is_bannable(ctx);
ctx              2253 drivers/gpu/drm/i915/gem/i915_gem_context.c 		args->value = i915_gem_context_is_recoverable(ctx);
ctx              2258 drivers/gpu/drm/i915/gem/i915_gem_context.c 		args->value = ctx->sched.priority >> I915_USER_PRIORITY_SHIFT;
ctx              2262 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ret = get_sseu(ctx, args);
ctx              2266 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ret = get_ppgtt(file_priv, ctx, args);
ctx              2270 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ret = get_engines(ctx, args);
ctx              2279 drivers/gpu/drm/i915/gem/i915_gem_context.c 	i915_gem_context_put(ctx);
ctx              2288 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx              2291 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx = i915_gem_context_lookup(file_priv, args->ctx_id);
ctx              2292 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!ctx)
ctx              2295 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ret = ctx_setparam(file_priv, ctx, args);
ctx              2297 drivers/gpu/drm/i915/gem/i915_gem_context.c 	i915_gem_context_put(ctx);
ctx              2306 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx              2314 drivers/gpu/drm/i915/gem/i915_gem_context.c 	ctx = __i915_gem_context_lookup_rcu(file->driver_priv, args->ctx_id);
ctx              2315 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (!ctx)
ctx              2330 drivers/gpu/drm/i915/gem/i915_gem_context.c 	args->batch_active = atomic_read(&ctx->guilty_count);
ctx              2331 drivers/gpu/drm/i915/gem/i915_gem_context.c 	args->batch_pending = atomic_read(&ctx->active_count);
ctx              2339 drivers/gpu/drm/i915/gem/i915_gem_context.c int __i915_gem_context_pin_hw_id(struct i915_gem_context *ctx)
ctx              2341 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct drm_i915_private *i915 = ctx->i915;
ctx              2346 drivers/gpu/drm/i915/gem/i915_gem_context.c 	GEM_BUG_ON(i915_gem_context_is_closed(ctx));
ctx              2348 drivers/gpu/drm/i915/gem/i915_gem_context.c 	if (list_empty(&ctx->hw_id_link)) {
ctx              2349 drivers/gpu/drm/i915/gem/i915_gem_context.c 		GEM_BUG_ON(atomic_read(&ctx->hw_id_pin_count));
ctx              2351 drivers/gpu/drm/i915/gem/i915_gem_context.c 		err = assign_hw_id(i915, &ctx->hw_id);
ctx              2355 drivers/gpu/drm/i915/gem/i915_gem_context.c 		list_add_tail(&ctx->hw_id_link, &i915->contexts.hw_id_list);
ctx              2358 drivers/gpu/drm/i915/gem/i915_gem_context.c 	GEM_BUG_ON(atomic_read(&ctx->hw_id_pin_count) == ~0u);
ctx              2359 drivers/gpu/drm/i915/gem/i915_gem_context.c 	atomic_inc(&ctx->hw_id_pin_count);
ctx              2371 drivers/gpu/drm/i915/gem/i915_gem_context.c 	struct intel_context *ctx;
ctx              2377 drivers/gpu/drm/i915/gem/i915_gem_context.c 		ctx = e->engines[it->idx++];
ctx              2378 drivers/gpu/drm/i915/gem/i915_gem_context.c 	} while (!ctx);
ctx              2380 drivers/gpu/drm/i915/gem/i915_gem_context.c 	return ctx;
ctx                21 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline bool i915_gem_context_is_closed(const struct i915_gem_context *ctx)
ctx                23 drivers/gpu/drm/i915/gem/i915_gem_context.h 	return test_bit(CONTEXT_CLOSED, &ctx->flags);
ctx                26 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline void i915_gem_context_set_closed(struct i915_gem_context *ctx)
ctx                28 drivers/gpu/drm/i915/gem/i915_gem_context.h 	GEM_BUG_ON(i915_gem_context_is_closed(ctx));
ctx                29 drivers/gpu/drm/i915/gem/i915_gem_context.h 	set_bit(CONTEXT_CLOSED, &ctx->flags);
ctx                32 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline bool i915_gem_context_no_error_capture(const struct i915_gem_context *ctx)
ctx                34 drivers/gpu/drm/i915/gem/i915_gem_context.h 	return test_bit(UCONTEXT_NO_ERROR_CAPTURE, &ctx->user_flags);
ctx                37 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline void i915_gem_context_set_no_error_capture(struct i915_gem_context *ctx)
ctx                39 drivers/gpu/drm/i915/gem/i915_gem_context.h 	set_bit(UCONTEXT_NO_ERROR_CAPTURE, &ctx->user_flags);
ctx                42 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline void i915_gem_context_clear_no_error_capture(struct i915_gem_context *ctx)
ctx                44 drivers/gpu/drm/i915/gem/i915_gem_context.h 	clear_bit(UCONTEXT_NO_ERROR_CAPTURE, &ctx->user_flags);
ctx                47 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline bool i915_gem_context_is_bannable(const struct i915_gem_context *ctx)
ctx                49 drivers/gpu/drm/i915/gem/i915_gem_context.h 	return test_bit(UCONTEXT_BANNABLE, &ctx->user_flags);
ctx                52 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline void i915_gem_context_set_bannable(struct i915_gem_context *ctx)
ctx                54 drivers/gpu/drm/i915/gem/i915_gem_context.h 	set_bit(UCONTEXT_BANNABLE, &ctx->user_flags);
ctx                57 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline void i915_gem_context_clear_bannable(struct i915_gem_context *ctx)
ctx                59 drivers/gpu/drm/i915/gem/i915_gem_context.h 	clear_bit(UCONTEXT_BANNABLE, &ctx->user_flags);
ctx                62 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline bool i915_gem_context_is_recoverable(const struct i915_gem_context *ctx)
ctx                64 drivers/gpu/drm/i915/gem/i915_gem_context.h 	return test_bit(UCONTEXT_RECOVERABLE, &ctx->user_flags);
ctx                67 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline void i915_gem_context_set_recoverable(struct i915_gem_context *ctx)
ctx                69 drivers/gpu/drm/i915/gem/i915_gem_context.h 	set_bit(UCONTEXT_RECOVERABLE, &ctx->user_flags);
ctx                72 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline void i915_gem_context_clear_recoverable(struct i915_gem_context *ctx)
ctx                74 drivers/gpu/drm/i915/gem/i915_gem_context.h 	clear_bit(UCONTEXT_RECOVERABLE, &ctx->user_flags);
ctx                77 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline bool i915_gem_context_is_banned(const struct i915_gem_context *ctx)
ctx                79 drivers/gpu/drm/i915/gem/i915_gem_context.h 	return test_bit(CONTEXT_BANNED, &ctx->flags);
ctx                82 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline void i915_gem_context_set_banned(struct i915_gem_context *ctx)
ctx                84 drivers/gpu/drm/i915/gem/i915_gem_context.h 	set_bit(CONTEXT_BANNED, &ctx->flags);
ctx                87 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline bool i915_gem_context_force_single_submission(const struct i915_gem_context *ctx)
ctx                89 drivers/gpu/drm/i915/gem/i915_gem_context.h 	return test_bit(CONTEXT_FORCE_SINGLE_SUBMISSION, &ctx->flags);
ctx                92 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline void i915_gem_context_set_force_single_submission(struct i915_gem_context *ctx)
ctx                94 drivers/gpu/drm/i915/gem/i915_gem_context.h 	__set_bit(CONTEXT_FORCE_SINGLE_SUBMISSION, &ctx->flags);
ctx                98 drivers/gpu/drm/i915/gem/i915_gem_context.h i915_gem_context_user_engines(const struct i915_gem_context *ctx)
ctx               100 drivers/gpu/drm/i915/gem/i915_gem_context.h 	return test_bit(CONTEXT_USER_ENGINES, &ctx->flags);
ctx               104 drivers/gpu/drm/i915/gem/i915_gem_context.h i915_gem_context_set_user_engines(struct i915_gem_context *ctx)
ctx               106 drivers/gpu/drm/i915/gem/i915_gem_context.h 	set_bit(CONTEXT_USER_ENGINES, &ctx->flags);
ctx               110 drivers/gpu/drm/i915/gem/i915_gem_context.h i915_gem_context_clear_user_engines(struct i915_gem_context *ctx)
ctx               112 drivers/gpu/drm/i915/gem/i915_gem_context.h 	clear_bit(CONTEXT_USER_ENGINES, &ctx->flags);
ctx               115 drivers/gpu/drm/i915/gem/i915_gem_context.h int __i915_gem_context_pin_hw_id(struct i915_gem_context *ctx);
ctx               116 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline int i915_gem_context_pin_hw_id(struct i915_gem_context *ctx)
ctx               118 drivers/gpu/drm/i915/gem/i915_gem_context.h 	if (atomic_inc_not_zero(&ctx->hw_id_pin_count))
ctx               121 drivers/gpu/drm/i915/gem/i915_gem_context.h 	return __i915_gem_context_pin_hw_id(ctx);
ctx               124 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline void i915_gem_context_unpin_hw_id(struct i915_gem_context *ctx)
ctx               126 drivers/gpu/drm/i915/gem/i915_gem_context.h 	GEM_BUG_ON(atomic_read(&ctx->hw_id_pin_count) == 0u);
ctx               127 drivers/gpu/drm/i915/gem/i915_gem_context.h 	atomic_dec(&ctx->hw_id_pin_count);
ctx               130 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline bool i915_gem_context_is_kernel(struct i915_gem_context *ctx)
ctx               132 drivers/gpu/drm/i915/gem/i915_gem_context.h 	return !ctx->file_priv;
ctx               165 drivers/gpu/drm/i915/gem/i915_gem_context.h i915_gem_context_get(struct i915_gem_context *ctx)
ctx               167 drivers/gpu/drm/i915/gem/i915_gem_context.h 	kref_get(&ctx->ref);
ctx               168 drivers/gpu/drm/i915/gem/i915_gem_context.h 	return ctx;
ctx               171 drivers/gpu/drm/i915/gem/i915_gem_context.h static inline void i915_gem_context_put(struct i915_gem_context *ctx)
ctx               173 drivers/gpu/drm/i915/gem/i915_gem_context.h 	kref_put(&ctx->ref, i915_gem_context_release);
ctx               177 drivers/gpu/drm/i915/gem/i915_gem_context.h i915_gem_context_engines(struct i915_gem_context *ctx)
ctx               179 drivers/gpu/drm/i915/gem/i915_gem_context.h 	return rcu_dereference_protected(ctx->engines,
ctx               180 drivers/gpu/drm/i915/gem/i915_gem_context.h 					 lockdep_is_held(&ctx->engines_mutex));
ctx               184 drivers/gpu/drm/i915/gem/i915_gem_context.h i915_gem_context_lock_engines(struct i915_gem_context *ctx)
ctx               185 drivers/gpu/drm/i915/gem/i915_gem_context.h 	__acquires(&ctx->engines_mutex)
ctx               187 drivers/gpu/drm/i915/gem/i915_gem_context.h 	mutex_lock(&ctx->engines_mutex);
ctx               188 drivers/gpu/drm/i915/gem/i915_gem_context.h 	return i915_gem_context_engines(ctx);
ctx               192 drivers/gpu/drm/i915/gem/i915_gem_context.h i915_gem_context_unlock_engines(struct i915_gem_context *ctx)
ctx               193 drivers/gpu/drm/i915/gem/i915_gem_context.h 	__releases(&ctx->engines_mutex)
ctx               195 drivers/gpu/drm/i915/gem/i915_gem_context.h 	mutex_unlock(&ctx->engines_mutex);
ctx               199 drivers/gpu/drm/i915/gem/i915_gem_context.h i915_gem_context_get_engine(struct i915_gem_context *ctx, unsigned int idx)
ctx               204 drivers/gpu/drm/i915/gem/i915_gem_context.h 		struct i915_gem_engines *e = rcu_dereference(ctx->engines);
ctx               724 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	struct i915_gem_context *ctx;
ctx               726 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	ctx = i915_gem_context_lookup(eb->file->driver_priv, eb->args->rsvd1);
ctx               727 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	if (unlikely(!ctx))
ctx               730 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	eb->gem_context = ctx;
ctx               731 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	if (ctx->vm)
ctx               735 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	if (test_bit(UCONTEXT_NO_ZEROMAP, &ctx->user_flags))
ctx               799 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		lut->ctx = eb->gem_context;
ctx               101 drivers/gpu/drm/i915/gem/i915_gem_object.c 		struct i915_gem_context *ctx = lut->ctx;
ctx               103 drivers/gpu/drm/i915/gem/i915_gem_object.c 		if (ctx->file_priv != fpriv)
ctx               106 drivers/gpu/drm/i915/gem/i915_gem_object.c 		i915_gem_context_get(ctx);
ctx               112 drivers/gpu/drm/i915/gem/i915_gem_object.c 		struct i915_gem_context *ctx = lut->ctx;
ctx               120 drivers/gpu/drm/i915/gem/i915_gem_object.c 		mutex_lock(&ctx->mutex);
ctx               121 drivers/gpu/drm/i915/gem/i915_gem_object.c 		vma = radix_tree_delete(&ctx->handles_vma, lut->handle);
ctx               129 drivers/gpu/drm/i915/gem/i915_gem_object.c 		mutex_unlock(&ctx->mutex);
ctx               131 drivers/gpu/drm/i915/gem/i915_gem_object.c 		i915_gem_context_put(lut->ctx);
ctx                26 drivers/gpu/drm/i915/gem/i915_gem_object_types.h 	struct i915_gem_context *ctx;
ctx               883 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		     struct i915_gem_context *ctx,
ctx               896 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	return igt_gpu_fill_dw(vma, ctx, engine, dw * sizeof(u32),
ctx               932 drivers/gpu/drm/i915/gem/selftests/huge_pages.c static int __igt_write_huge(struct i915_gem_context *ctx,
ctx               938 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct i915_address_space *vm = ctx->vm ?: &engine->gt->ggtt->vm;
ctx               967 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	err = gpu_write(vma, ctx, engine, dword, val);
ctx               987 drivers/gpu/drm/i915/gem/selftests/huge_pages.c static int igt_write_huge(struct i915_gem_context *ctx,
ctx               991 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct i915_address_space *vm = ctx->vm ?: &i915->ggtt.vm;
ctx              1060 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		err = __igt_write_huge(ctx, engine, obj, size, offset_low,
ctx              1065 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		err = __igt_write_huge(ctx, engine, obj, size, offset_high,
ctx              1084 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct i915_gem_context *ctx = arg;
ctx              1085 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct drm_i915_private *i915 = ctx->i915;
ctx              1159 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 			err = igt_write_huge(ctx, obj);
ctx              1185 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct i915_gem_context *ctx = arg;
ctx              1186 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct drm_i915_private *i915 = ctx->i915;
ctx              1221 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		err = igt_write_huge(ctx, obj);
ctx              1250 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct i915_gem_context *ctx = arg;
ctx              1251 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct drm_i915_private *i915 = ctx->i915;
ctx              1290 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		err = igt_write_huge(ctx, obj);
ctx              1314 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct i915_gem_context *ctx = arg;
ctx              1315 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct drm_i915_private *dev_priv = ctx->i915;
ctx              1317 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct i915_address_space *vm = ctx->vm;
ctx              1426 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		err = gpu_write(vma, ctx, engine, n++, 0xdeadbeaf);
ctx              1448 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct i915_gem_context *ctx = arg;
ctx              1449 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct drm_i915_private *i915 = ctx->i915;
ctx              1451 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct i915_address_space *vm = ctx->vm ?: &i915->ggtt.vm;
ctx              1506 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct i915_gem_context *ctx = arg;
ctx              1507 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct drm_i915_private *i915 = ctx->i915;
ctx              1508 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct i915_address_space *vm = ctx->vm ?: &i915->ggtt.vm;
ctx              1555 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		err = gpu_write(vma, ctx, engine, n++, 0xdeadbeaf);
ctx              1663 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct i915_gem_context *ctx;
ctx              1682 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	ctx = live_context(i915, file);
ctx              1683 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	if (IS_ERR(ctx)) {
ctx              1684 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		err = PTR_ERR(ctx);
ctx              1688 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	if (ctx->vm)
ctx              1689 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		ctx->vm->scrub_64K = true;
ctx              1691 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	err = i915_subtests(tests, ctx);
ctx                33 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	struct i915_gem_context **ctx;
ctx                57 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ctx = kcalloc(nctx, sizeof(*ctx), GFP_KERNEL);
ctx                58 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	if (!ctx) {
ctx                64 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		ctx[n] = live_context(i915, file);
ctx                65 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		if (IS_ERR(ctx[n])) {
ctx                66 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			err = PTR_ERR(ctx[n]);
ctx                78 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			rq = igt_request_alloc(ctx[n], engine);
ctx               106 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				rq = igt_request_alloc(ctx[n % nctx], engine);
ctx               170 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		    struct i915_gem_context *ctx,
ctx               174 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	struct i915_address_space *vm = ctx->vm ?: &engine->gt->ggtt->vm;
ctx               204 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			      ctx,
ctx               308 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c create_test_object(struct i915_gem_context *ctx,
ctx               313 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	struct i915_address_space *vm = ctx->vm ?: &ctx->i915->ggtt.vm;
ctx               318 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	i915_retire_requests(ctx->i915);
ctx               323 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	obj = huge_gem_object(ctx->i915, DW_PER_PAGE * PAGE_SIZE, size);
ctx               395 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			struct i915_gem_context *ctx;
ctx               397 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			ctx = live_context(i915, file);
ctx               398 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			if (IS_ERR(ctx)) {
ctx               399 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				err = PTR_ERR(ctx);
ctx               404 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				obj = create_test_object(ctx, file, &objects);
ctx               411 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			err = gpu_fill(obj, ctx, engine, dw);
ctx               415 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				       engine->name, ctx->hw_id,
ctx               416 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				       yesno(!!ctx->vm), err);
ctx               511 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			struct i915_gem_context *ctx;
ctx               513 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			ctx = kernel_context(i915);
ctx               514 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			if (IS_ERR(ctx)) {
ctx               515 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				err = PTR_ERR(ctx);
ctx               519 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			__assign_ppgtt(ctx, parent->vm);
ctx               525 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 					kernel_context_close(ctx);
ctx               530 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			err = gpu_fill(obj, ctx, engine, dw);
ctx               534 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				       engine->name, ctx->hw_id,
ctx               535 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				       yesno(!!ctx->vm), err);
ctx               536 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				kernel_context_close(ctx);
ctx               548 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			kernel_context_close(ctx);
ctx               913 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx               950 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ctx = live_context(i915, file);
ctx               951 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	if (IS_ERR(ctx)) {
ctx               952 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		ret = PTR_ERR(ctx);
ctx               955 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	i915_gem_context_clear_bannable(ctx); /* to reset and beyond! */
ctx               963 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ce = i915_gem_context_get_engine(ctx, RCS0);
ctx              1045 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx              1070 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ctx = live_context(i915, file);
ctx              1071 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	if (IS_ERR(ctx)) {
ctx              1072 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		err = PTR_ERR(ctx);
ctx              1076 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	vm = ctx->vm ?: &i915->ggtt.alias->vm;
ctx              1093 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				obj = create_test_object(ctx, file, &objects);
ctx              1103 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 			err = gpu_fill(obj, ctx, engine, dw);
ctx              1107 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				       engine->name, ctx->hw_id,
ctx              1108 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 				       yesno(!!ctx->vm), err);
ctx              1149 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c static int check_scratch(struct i915_gem_context *ctx, u64 offset)
ctx              1152 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		__drm_mm_interval_first(&ctx->vm->mm,
ctx              1164 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c static int write_to_scratch(struct i915_gem_context *ctx,
ctx              1168 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	struct drm_i915_private *i915 = ctx->i915;
ctx              1200 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	vma = i915_vma_instance(obj, ctx->vm, NULL);
ctx              1210 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	err = check_scratch(ctx, offset);
ctx              1214 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	rq = igt_request_alloc(ctx, engine);
ctx              1251 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c static int read_from_scratch(struct i915_gem_context *ctx,
ctx              1255 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	struct drm_i915_private *i915 = ctx->i915;
ctx              1299 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	vma = i915_vma_instance(obj, ctx->vm, NULL);
ctx              1309 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	err = check_scratch(ctx, offset);
ctx              1313 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	rq = igt_request_alloc(ctx, engine);
ctx              1499 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	struct i915_gem_context *ctx;
ctx              1511 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	ctx = mock_context(i915, "mock");
ctx              1512 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	if (!ctx) {
ctx              1518 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	err = context_barrier_task(ctx, 0,
ctx              1531 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	err = context_barrier_task(ctx, ALL_ENGINES,
ctx              1546 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	rq = igt_request_alloc(ctx, i915->engine[RCS0]);
ctx              1555 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	err = context_barrier_task(ctx, ALL_ENGINES,
ctx              1570 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	err = context_barrier_task(ctx, ALL_ENGINES,
ctx              1587 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	mock_context_close(ctx);
ctx                18 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c igt_request_alloc(struct i915_gem_context *ctx, struct intel_engine_cs *engine)
ctx                28 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	ce = i915_gem_context_get_engine(ctx, engine->legacy_idx);
ctx               105 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 		    struct i915_gem_context *ctx,
ctx               111 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	struct i915_address_space *vm = ctx->vm ?: &engine->gt->ggtt->vm;
ctx               125 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	rq = igt_request_alloc(ctx, engine);
ctx                18 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.h igt_request_alloc(struct i915_gem_context *ctx, struct intel_engine_cs *engine);
ctx                27 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.h 		    struct i915_gem_context *ctx,
ctx                14 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	struct i915_gem_context *ctx;
ctx                18 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx                19 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	if (!ctx)
ctx                22 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	kref_init(&ctx->ref);
ctx                23 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	INIT_LIST_HEAD(&ctx->link);
ctx                24 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	ctx->i915 = i915;
ctx                26 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	mutex_init(&ctx->engines_mutex);
ctx                27 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	e = default_engines(ctx);
ctx                30 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	RCU_INIT_POINTER(ctx->engines, e);
ctx                32 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	INIT_RADIX_TREE(&ctx->handles_vma, GFP_KERNEL);
ctx                33 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	INIT_LIST_HEAD(&ctx->hw_id_link);
ctx                34 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	mutex_init(&ctx->mutex);
ctx                36 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	ret = i915_gem_context_pin_hw_id(ctx);
ctx                43 drivers/gpu/drm/i915/gem/selftests/mock_context.c 		ctx->name = kstrdup(name, GFP_KERNEL);
ctx                44 drivers/gpu/drm/i915/gem/selftests/mock_context.c 		if (!ctx->name)
ctx                51 drivers/gpu/drm/i915/gem/selftests/mock_context.c 		__set_ppgtt(ctx, &ppgtt->vm);
ctx                55 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	return ctx;
ctx                58 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	free_engines(rcu_access_pointer(ctx->engines));
ctx                60 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	kfree(ctx);
ctx                64 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	i915_gem_context_set_closed(ctx);
ctx                65 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	i915_gem_context_put(ctx);
ctx                69 drivers/gpu/drm/i915/gem/selftests/mock_context.c void mock_context_close(struct i915_gem_context *ctx)
ctx                71 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	context_close(ctx);
ctx                82 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	struct i915_gem_context *ctx;
ctx                87 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	ctx = i915_gem_create_context(i915, 0);
ctx                88 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	if (IS_ERR(ctx))
ctx                89 drivers/gpu/drm/i915/gem/selftests/mock_context.c 		return ctx;
ctx                91 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	err = gem_context_register(ctx, file->driver_priv);
ctx                95 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	return ctx;
ctx                98 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	context_close(ctx);
ctx               108 drivers/gpu/drm/i915/gem/selftests/mock_context.c void kernel_context_close(struct i915_gem_context *ctx)
ctx               110 drivers/gpu/drm/i915/gem/selftests/mock_context.c 	context_close(ctx);
ctx                16 drivers/gpu/drm/i915/gem/selftests/mock_context.h void mock_context_close(struct i915_gem_context *ctx);
ctx                22 drivers/gpu/drm/i915/gem/selftests/mock_context.h void kernel_context_close(struct i915_gem_context *ctx);
ctx                33 drivers/gpu/drm/i915/gt/intel_context.c intel_context_create(struct i915_gem_context *ctx,
ctx                42 drivers/gpu/drm/i915/gt/intel_context.c 	intel_context_init(ce, ctx, engine);
ctx               219 drivers/gpu/drm/i915/gt/intel_context.c 		   struct i915_gem_context *ctx,
ctx               226 drivers/gpu/drm/i915/gt/intel_context.c 	ce->gem_context = ctx;
ctx               227 drivers/gpu/drm/i915/gt/intel_context.c 	ce->vm = i915_vm_get(ctx->vm ?: &engine->gt->ggtt->vm);
ctx               228 drivers/gpu/drm/i915/gt/intel_context.c 	if (ctx->timeline)
ctx               229 drivers/gpu/drm/i915/gt/intel_context.c 		ce->timeline = intel_timeline_get(ctx->timeline);
ctx               241 drivers/gpu/drm/i915/gt/intel_context.c 	i915_active_init(ctx->i915, &ce->active,
ctx                18 drivers/gpu/drm/i915/gt/intel_context.h 			struct i915_gem_context *ctx,
ctx                23 drivers/gpu/drm/i915/gt/intel_context.h intel_context_create(struct i915_gem_context *ctx,
ctx               432 drivers/gpu/drm/i915/gt/intel_lrc.c 	struct i915_gem_context *ctx = ce->gem_context;
ctx               455 drivers/gpu/drm/i915/gt/intel_lrc.c 		GEM_BUG_ON(ctx->hw_id >= BIT(GEN11_SW_CTX_ID_WIDTH));
ctx               456 drivers/gpu/drm/i915/gt/intel_lrc.c 		desc |= (u64)ctx->hw_id << GEN11_SW_CTX_ID_SHIFT;
ctx               467 drivers/gpu/drm/i915/gt/intel_lrc.c 		GEM_BUG_ON(ctx->hw_id >= BIT(GEN8_CTX_ID_WIDTH));
ctx               468 drivers/gpu/drm/i915/gt/intel_lrc.c 		desc |= (u64)ctx->hw_id << GEN8_CTX_ID_SHIFT;	/* bits 32-52 */
ctx              3707 drivers/gpu/drm/i915/gt/intel_lrc.c intel_execlists_create_virtual(struct i915_gem_context *ctx,
ctx              3719 drivers/gpu/drm/i915/gt/intel_lrc.c 		return intel_context_create(ctx, siblings[0]);
ctx              3725 drivers/gpu/drm/i915/gt/intel_lrc.c 	ve->base.i915 = ctx->i915;
ctx              3768 drivers/gpu/drm/i915/gt/intel_lrc.c 	intel_context_init(&ve->context, ctx, &ve->base);
ctx              3849 drivers/gpu/drm/i915/gt/intel_lrc.c intel_execlists_clone_virtual(struct i915_gem_context *ctx,
ctx              3855 drivers/gpu/drm/i915/gt/intel_lrc.c 	dst = intel_execlists_create_virtual(ctx,
ctx               122 drivers/gpu/drm/i915/gt/intel_lrc.h intel_execlists_create_virtual(struct i915_gem_context *ctx,
ctx               127 drivers/gpu/drm/i915/gt/intel_lrc.h intel_execlists_clone_virtual(struct i915_gem_context *ctx,
ctx                55 drivers/gpu/drm/i915/gt/intel_reset.c 			       const struct i915_gem_context *ctx)
ctx                60 drivers/gpu/drm/i915/gt/intel_reset.c 	if (i915_gem_context_is_banned(ctx))
ctx                73 drivers/gpu/drm/i915/gt/intel_reset.c 				 ctx->name, score,
ctx                78 drivers/gpu/drm/i915/gt/intel_reset.c static bool context_mark_guilty(struct i915_gem_context *ctx)
ctx                84 drivers/gpu/drm/i915/gt/intel_reset.c 	atomic_inc(&ctx->guilty_count);
ctx                87 drivers/gpu/drm/i915/gt/intel_reset.c 	if (!i915_gem_context_is_bannable(ctx))
ctx                91 drivers/gpu/drm/i915/gt/intel_reset.c 	prev_hang = ctx->hang_timestamp[0];
ctx                92 drivers/gpu/drm/i915/gt/intel_reset.c 	for (i = 0; i < ARRAY_SIZE(ctx->hang_timestamp) - 1; i++)
ctx                93 drivers/gpu/drm/i915/gt/intel_reset.c 		ctx->hang_timestamp[i] = ctx->hang_timestamp[i + 1];
ctx                94 drivers/gpu/drm/i915/gt/intel_reset.c 	ctx->hang_timestamp[i] = jiffies;
ctx                97 drivers/gpu/drm/i915/gt/intel_reset.c 	banned = !i915_gem_context_is_recoverable(ctx);
ctx               102 drivers/gpu/drm/i915/gt/intel_reset.c 				 ctx->name, atomic_read(&ctx->guilty_count));
ctx               103 drivers/gpu/drm/i915/gt/intel_reset.c 		i915_gem_context_set_banned(ctx);
ctx               106 drivers/gpu/drm/i915/gt/intel_reset.c 	if (!IS_ERR_OR_NULL(ctx->file_priv))
ctx               107 drivers/gpu/drm/i915/gt/intel_reset.c 		client_mark_guilty(ctx->file_priv, ctx);
ctx               112 drivers/gpu/drm/i915/gt/intel_reset.c static void context_mark_innocent(struct i915_gem_context *ctx)
ctx               114 drivers/gpu/drm/i915/gt/intel_reset.c 	atomic_inc(&ctx->active_count);
ctx              1725 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	struct i915_gem_context *ctx = rq->gem_context;
ctx              1728 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	if (!ctx->remap_slice)
ctx              1732 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 		if (!(ctx->remap_slice & BIT(i)))
ctx              1740 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	ctx->remap_slice = 0;
ctx                48 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	struct i915_gem_context *ctx;
ctx                61 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	h->ctx = kernel_context(gt->i915);
ctx                62 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	if (IS_ERR(h->ctx))
ctx                63 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		return PTR_ERR(h->ctx);
ctx                65 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	GEM_BUG_ON(i915_gem_context_is_bannable(h->ctx));
ctx               104 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	kernel_context_close(h->ctx);
ctx               134 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	struct i915_address_space *vm = h->ctx->vm ?: &engine->gt->ggtt->vm;
ctx               175 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	rq = igt_request_alloc(h->ctx, engine);
ctx               286 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	kernel_context_close(h->ctx);
ctx               373 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	struct i915_gem_context *ctx;
ctx               387 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	ctx = live_context(gt->i915, file);
ctx               389 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	if (IS_ERR(ctx)) {
ctx               390 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		err = PTR_ERR(ctx);
ctx               394 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	i915_gem_context_clear_bannable(ctx);
ctx               406 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 				rq = igt_request_alloc(ctx, engine);
ctx               454 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	struct i915_gem_context *ctx;
ctx               469 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	ctx = live_context(gt->i915, file);
ctx               471 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	if (IS_ERR(ctx)) {
ctx               472 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		err = PTR_ERR(ctx);
ctx               476 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	i915_gem_context_clear_bannable(ctx);
ctx               501 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 				rq = igt_request_alloc(ctx, engine);
ctx               718 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	struct i915_gem_context *ctx[ARRAY_SIZE(rq)];
ctx               727 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	for (count = 0; count < ARRAY_SIZE(ctx); count++) {
ctx               729 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		ctx[count] = live_context(engine->i915, file);
ctx               731 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		if (IS_ERR(ctx[count])) {
ctx               732 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 			err = PTR_ERR(ctx[count]);
ctx               734 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 				i915_gem_context_put(ctx[count]);
ctx               745 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		new = igt_request_alloc(ctx[idx], engine);
ctx               753 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 			ctx[idx]->sched.priority =
ctx               805 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 			h.ctx->sched.priority = 1024;
ctx              1342 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	struct i915_gem_context *ctx;
ctx              1351 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	ctx = live_context(gt->i915, file);
ctx              1353 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	if (IS_ERR(ctx)) {
ctx              1354 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		err = PTR_ERR(ctx);
ctx              1359 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	if (ctx->vm) /* aliasing == global gtt locking, covered above */
ctx              1360 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		err = __igt_reset_evict_vma(gt, ctx->vm,
ctx                26 drivers/gpu/drm/i915/gt/selftest_lrc.c 	struct i915_gem_context *ctx;
ctx                41 drivers/gpu/drm/i915/gt/selftest_lrc.c 	ctx = kernel_context(i915);
ctx                42 drivers/gpu/drm/i915/gt/selftest_lrc.c 	if (!ctx)
ctx                45 drivers/gpu/drm/i915/gt/selftest_lrc.c 	for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it) {
ctx                72 drivers/gpu/drm/i915/gt/selftest_lrc.c 	i915_gem_context_unlock_engines(ctx);
ctx                73 drivers/gpu/drm/i915/gt/selftest_lrc.c 	kernel_context_close(ctx);
ctx               122 drivers/gpu/drm/i915/gt/selftest_lrc.c 	struct i915_gem_context *ctx;
ctx               126 drivers/gpu/drm/i915/gt/selftest_lrc.c 	ctx = kernel_context(engine->i915);
ctx               127 drivers/gpu/drm/i915/gt/selftest_lrc.c 	if (!ctx)
ctx               130 drivers/gpu/drm/i915/gt/selftest_lrc.c 	rq = igt_request_alloc(ctx, engine);
ctx               140 drivers/gpu/drm/i915/gt/selftest_lrc.c 	kernel_context_close(ctx);
ctx               486 drivers/gpu/drm/i915/gt/selftest_lrc.c 		       struct i915_gem_context *ctx,
ctx               493 drivers/gpu/drm/i915/gt/selftest_lrc.c 	ce = i915_gem_context_get_engine(ctx, engine->legacy_idx);
ctx               723 drivers/gpu/drm/i915/gt/selftest_lrc.c 	struct i915_gem_context *ctx;
ctx               729 drivers/gpu/drm/i915/gt/selftest_lrc.c 	c->ctx = kernel_context(i915);
ctx               730 drivers/gpu/drm/i915/gt/selftest_lrc.c 	if (!c->ctx)
ctx               739 drivers/gpu/drm/i915/gt/selftest_lrc.c 	kernel_context_close(c->ctx);
ctx               746 drivers/gpu/drm/i915/gt/selftest_lrc.c 	kernel_context_close(c->ctx);
ctx               773 drivers/gpu/drm/i915/gt/selftest_lrc.c 	b.ctx->sched.priority = I915_USER_PRIORITY(I915_PRIORITY_MAX);
ctx               784 drivers/gpu/drm/i915/gt/selftest_lrc.c 					      a.ctx, engine,
ctx               801 drivers/gpu/drm/i915/gt/selftest_lrc.c 					      b.ctx, engine,
ctx               903 drivers/gpu/drm/i915/gt/selftest_lrc.c 					      a.ctx, engine,
ctx               920 drivers/gpu/drm/i915/gt/selftest_lrc.c 						      b.ctx, engine,
ctx              1074 drivers/gpu/drm/i915/gt/selftest_lrc.c 							       client[i].ctx, engine,
ctx              1183 drivers/gpu/drm/i915/gt/selftest_lrc.c 					    lo.ctx, engine,
ctx              1209 drivers/gpu/drm/i915/gt/selftest_lrc.c 						    hi.ctx, engine,
ctx              1218 drivers/gpu/drm/i915/gt/selftest_lrc.c 						    lo.ctx, engine,
ctx              1225 drivers/gpu/drm/i915/gt/selftest_lrc.c 				rq = igt_request_alloc(lo.ctx, engine);
ctx              1231 drivers/gpu/drm/i915/gt/selftest_lrc.c 			rq = igt_request_alloc(hi.ctx, engine);
ctx              1250 drivers/gpu/drm/i915/gt/selftest_lrc.c 			rq = igt_request_alloc(lo.ctx, engine);
ctx              1435 drivers/gpu/drm/i915/gt/selftest_lrc.c 			struct i915_gem_context *ctx, int prio,
ctx              1443 drivers/gpu/drm/i915/gt/selftest_lrc.c 		vma = i915_vma_instance(batch, ctx->vm, NULL);
ctx              1452 drivers/gpu/drm/i915/gt/selftest_lrc.c 	ctx->sched.priority = prio;
ctx              1454 drivers/gpu/drm/i915/gt/selftest_lrc.c 	rq = igt_request_alloc(ctx, smoke->engine);
ctx              1489 drivers/gpu/drm/i915/gt/selftest_lrc.c 		struct i915_gem_context *ctx = smoke_context(smoke);
ctx              1494 drivers/gpu/drm/i915/gt/selftest_lrc.c 				   ctx, count % I915_PRIORITY_MAX,
ctx              1568 drivers/gpu/drm/i915/gt/selftest_lrc.c 			struct i915_gem_context *ctx = smoke_context(smoke);
ctx              1572 drivers/gpu/drm/i915/gt/selftest_lrc.c 					   ctx, random_priority(&smoke->prng),
ctx              1680 drivers/gpu/drm/i915/gt/selftest_lrc.c 	struct i915_gem_context *ctx[16];
ctx              1687 drivers/gpu/drm/i915/gt/selftest_lrc.c 	GEM_BUG_ON(!nctx || nctx > ARRAY_SIZE(ctx));
ctx              1690 drivers/gpu/drm/i915/gt/selftest_lrc.c 		ctx[n] = kernel_context(i915);
ctx              1691 drivers/gpu/drm/i915/gt/selftest_lrc.c 		if (!ctx[n]) {
ctx              1697 drivers/gpu/drm/i915/gt/selftest_lrc.c 		ve[n] = intel_execlists_create_virtual(ctx[n],
ctx              1700 drivers/gpu/drm/i915/gt/selftest_lrc.c 			kernel_context_close(ctx[n]);
ctx              1709 drivers/gpu/drm/i915/gt/selftest_lrc.c 			kernel_context_close(ctx[n]);
ctx              1790 drivers/gpu/drm/i915/gt/selftest_lrc.c 		kernel_context_close(ctx[nc]);
ctx              1854 drivers/gpu/drm/i915/gt/selftest_lrc.c 	struct i915_gem_context *ctx;
ctx              1865 drivers/gpu/drm/i915/gt/selftest_lrc.c 	ctx = kernel_context(i915);
ctx              1866 drivers/gpu/drm/i915/gt/selftest_lrc.c 	if (!ctx)
ctx              1869 drivers/gpu/drm/i915/gt/selftest_lrc.c 	ve = intel_execlists_create_virtual(ctx, siblings, nsibling);
ctx              1940 drivers/gpu/drm/i915/gt/selftest_lrc.c 	kernel_context_close(ctx);
ctx              1988 drivers/gpu/drm/i915/gt/selftest_lrc.c 	struct i915_gem_context *ctx;
ctx              1996 drivers/gpu/drm/i915/gt/selftest_lrc.c 	ctx = kernel_context(i915);
ctx              1997 drivers/gpu/drm/i915/gt/selftest_lrc.c 	if (!ctx)
ctx              2010 drivers/gpu/drm/i915/gt/selftest_lrc.c 		rq[0] = igt_request_alloc(ctx, master);
ctx              2030 drivers/gpu/drm/i915/gt/selftest_lrc.c 			ve = intel_execlists_create_virtual(ctx,
ctx              2110 drivers/gpu/drm/i915/gt/selftest_lrc.c 	kernel_context_close(ctx);
ctx               167 drivers/gpu/drm/i915/gt/selftest_timeline.c 		      u64 ctx,
ctx               173 drivers/gpu/drm/i915/gt/selftest_timeline.c 	if (__intel_timeline_sync_is_later(tl, ctx, p->seqno) != p->expected) {
ctx               175 drivers/gpu/drm/i915/gt/selftest_timeline.c 		       name, p->name, ctx, p->seqno, yesno(p->expected));
ctx               180 drivers/gpu/drm/i915/gt/selftest_timeline.c 		ret = __intel_timeline_sync_set(tl, ctx, p->seqno);
ctx               216 drivers/gpu/drm/i915/gt/selftest_timeline.c 				u64 ctx = BIT_ULL(order) + offset;
ctx               218 drivers/gpu/drm/i915/gt/selftest_timeline.c 				ret = __igt_sync(&tl, ctx, p, "1");
ctx               229 drivers/gpu/drm/i915/gt/selftest_timeline.c 			u64 ctx = BIT_ULL(order) + offset;
ctx               232 drivers/gpu/drm/i915/gt/selftest_timeline.c 				ret = __igt_sync(&tl, ctx, p, "2");
ctx                74 drivers/gpu/drm/i915/gt/selftest_workarounds.c read_nonprivs(struct i915_gem_context *ctx, struct intel_engine_cs *engine)
ctx               109 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	rq = igt_request_alloc(ctx, engine);
ctx               124 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	if (INTEL_GEN(ctx->i915) >= 8)
ctx               179 drivers/gpu/drm/i915/gt/selftest_workarounds.c static int check_whitelist(struct i915_gem_context *ctx,
ctx               188 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	results = read_nonprivs(ctx, engine);
ctx               194 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	intel_wedge_on_timeout(&wedge, &ctx->i915->gt, HZ / 5) /* safety net! */
ctx               197 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	if (intel_gt_is_wedged(&ctx->i915->gt))
ctx               243 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	struct i915_gem_context *ctx;
ctx               249 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	ctx = kernel_context(engine->i915);
ctx               250 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	if (IS_ERR(ctx))
ctx               251 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		return PTR_ERR(ctx);
ctx               253 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	GEM_BUG_ON(i915_gem_context_is_bannable(ctx));
ctx               255 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	ce = i915_gem_context_get_engine(ctx, engine->legacy_idx);
ctx               263 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	kernel_context_close(ctx);
ctx               290 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	struct i915_gem_context *ctx, *tmp;
ctx               298 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	ctx = kernel_context(i915);
ctx               299 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	if (IS_ERR(ctx))
ctx               300 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		return PTR_ERR(ctx);
ctx               306 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	err = check_whitelist(ctx, engine);
ctx               326 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	err = check_whitelist(ctx, engine);
ctx               338 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	kernel_context_close(ctx);
ctx               339 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	ctx = tmp;
ctx               341 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	err = check_whitelist(ctx, engine);
ctx               351 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	kernel_context_close(ctx);
ctx               355 drivers/gpu/drm/i915/gt/selftest_workarounds.c static struct i915_vma *create_batch(struct i915_gem_context *ctx)
ctx               361 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	obj = i915_gem_object_create_internal(ctx->i915, 16 * PAGE_SIZE);
ctx               365 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	vma = i915_vma_instance(obj, ctx->vm, NULL);
ctx               437 drivers/gpu/drm/i915/gt/selftest_workarounds.c static int check_dirty_whitelist(struct i915_gem_context *ctx,
ctx               471 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	scratch = create_scratch(ctx->vm, 2 * ARRAY_SIZE(values) + 1);
ctx               475 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	batch = create_batch(ctx);
ctx               497 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		if (INTEL_GEN(ctx->i915) >= 8)
ctx               556 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		rq = igt_request_alloc(ctx, engine);
ctx               582 drivers/gpu/drm/i915/gt/selftest_workarounds.c 			intel_gt_set_wedged(&ctx->i915->gt);
ctx               671 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	if (igt_flush_test(ctx->i915, I915_WAIT_LOCKED))
ctx               684 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	struct i915_gem_context *ctx;
ctx               705 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	ctx = live_context(i915, file);
ctx               706 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	if (IS_ERR(ctx)) {
ctx               707 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		err = PTR_ERR(ctx);
ctx               715 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		err = check_dirty_whitelist(ctx, engine);
ctx               763 drivers/gpu/drm/i915/gt/selftest_workarounds.c static int read_whitelisted_registers(struct i915_gem_context *ctx,
ctx               771 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	rq = igt_request_alloc(ctx, engine);
ctx               776 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	if (INTEL_GEN(ctx->i915) >= 8)
ctx               808 drivers/gpu/drm/i915/gt/selftest_workarounds.c static int scrub_whitelisted_registers(struct i915_gem_context *ctx,
ctx               816 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	batch = create_batch(ctx);
ctx               841 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	rq = igt_request_alloc(ctx, engine);
ctx               978 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		struct i915_gem_context *ctx;
ctx              1020 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		client[i].ctx = c;
ctx              1028 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		err = read_whitelisted_registers(client[0].ctx, engine,
ctx              1034 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		err = scrub_whitelisted_registers(client[0].ctx, engine);
ctx              1039 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		err = read_whitelisted_registers(client[1].ctx, engine,
ctx              1053 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		err = read_whitelisted_registers(client[0].ctx, engine,
ctx              1069 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		if (!client[i].ctx)
ctx              1074 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		kernel_context_close(client[i].ctx);
ctx              1084 drivers/gpu/drm/i915/gt/selftest_workarounds.c verify_wa_lists(struct i915_gem_context *ctx, struct wa_lists *lists,
ctx              1087 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	struct drm_i915_private *i915 = ctx->i915;
ctx              1094 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	for_each_gem_engine(ce, i915_gem_context_engines(ctx), it) {
ctx              1113 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	struct i915_gem_context *ctx;
ctx              1121 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	ctx = kernel_context(i915);
ctx              1122 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	if (IS_ERR(ctx))
ctx              1123 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		return PTR_ERR(ctx);
ctx              1125 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	i915_gem_context_lock_engines(ctx);
ctx              1134 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	ok = verify_wa_lists(ctx, &lists, "before reset");
ctx              1140 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	ok = verify_wa_lists(ctx, &lists, "after reset");
ctx              1143 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	i915_gem_context_unlock_engines(ctx);
ctx              1144 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	kernel_context_close(ctx);
ctx              1157 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	struct i915_gem_context *ctx;
ctx              1168 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	ctx = kernel_context(i915);
ctx              1169 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	if (IS_ERR(ctx))
ctx              1170 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		return PTR_ERR(ctx);
ctx              1177 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it) {
ctx              1183 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		ok = verify_wa_lists(ctx, &lists, "before reset");
ctx              1191 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		ok = verify_wa_lists(ctx, &lists, "after idle reset");
ctx              1222 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		ok = verify_wa_lists(ctx, &lists, "after busy reset");
ctx              1229 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	i915_gem_context_unlock_engines(ctx);
ctx              1233 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	kernel_context_close(ctx);
ctx                45 drivers/gpu/drm/i915/gvt/execlist.c #define valid_context(ctx) ((ctx)->valid)
ctx                78 drivers/gpu/drm/i915/gvt/execlist.c 		&execlist->running_slot->ctx[0] : NULL;
ctx               188 drivers/gpu/drm/i915/gvt/execlist.c 		struct execlist_ctx_descriptor_format *ctx)
ctx               193 drivers/gpu/drm/i915/gvt/execlist.c 	struct execlist_ctx_descriptor_format *ctx0 = &running->ctx[0];
ctx               194 drivers/gpu/drm/i915/gvt/execlist.c 	struct execlist_ctx_descriptor_format *ctx1 = &running->ctx[1];
ctx               199 drivers/gpu/drm/i915/gvt/execlist.c 	gvt_dbg_el("schedule out context id %x\n", ctx->context_id);
ctx               201 drivers/gpu/drm/i915/gvt/execlist.c 	if (WARN_ON(!same_context(ctx, execlist->running_context))) {
ctx               204 drivers/gpu/drm/i915/gvt/execlist.c 				ctx->context_id,
ctx               210 drivers/gpu/drm/i915/gvt/execlist.c 	if (valid_context(ctx1) && same_context(ctx0, ctx)) {
ctx               218 drivers/gpu/drm/i915/gvt/execlist.c 		status.context_id = ctx->context_id;
ctx               229 drivers/gpu/drm/i915/gvt/execlist.c 	} else if ((!valid_context(ctx1) && same_context(ctx0, ctx))
ctx               230 drivers/gpu/drm/i915/gvt/execlist.c 			|| (valid_context(ctx1) && same_context(ctx1, ctx))) {
ctx               238 drivers/gpu/drm/i915/gvt/execlist.c 		status.context_id = ctx->context_id;
ctx               281 drivers/gpu/drm/i915/gvt/execlist.c 		struct execlist_ctx_descriptor_format ctx[2])
ctx               299 drivers/gpu/drm/i915/gvt/execlist.c 	memset(slot->ctx, 0, sizeof(slot->ctx));
ctx               301 drivers/gpu/drm/i915/gvt/execlist.c 	slot->ctx[0] = ctx[0];
ctx               302 drivers/gpu/drm/i915/gvt/execlist.c 	slot->ctx[1] = ctx[1];
ctx               305 drivers/gpu/drm/i915/gvt/execlist.c 			slot->index, ctx[0].context_id,
ctx               306 drivers/gpu/drm/i915/gvt/execlist.c 			ctx[1].context_id);
ctx               317 drivers/gpu/drm/i915/gvt/execlist.c 		execlist->running_context = &slot->ctx[0];
ctx               332 drivers/gpu/drm/i915/gvt/execlist.c 	ctx0 = &running->ctx[0];
ctx               333 drivers/gpu/drm/i915/gvt/execlist.c 	ctx1 = &running->ctx[1];
ctx               346 drivers/gpu/drm/i915/gvt/execlist.c 	if ((valid_context(ctx1) && same_context(ctx1, &slot->ctx[0]) &&
ctx               350 drivers/gpu/drm/i915/gvt/execlist.c 			 same_context(ctx0, &slot->ctx[0]))) { /* condition b */
ctx               359 drivers/gpu/drm/i915/gvt/execlist.c 		status.context_id = ctx[0].context_id;
ctx               381 drivers/gpu/drm/i915/gvt/execlist.c 	struct execlist_ctx_descriptor_format ctx[2];
ctx               388 drivers/gpu/drm/i915/gvt/execlist.c 	ctx[0] = *get_desc_from_elsp_dwords(&workload->elsp_dwords, 0);
ctx               389 drivers/gpu/drm/i915/gvt/execlist.c 	ctx[1] = *get_desc_from_elsp_dwords(&workload->elsp_dwords, 1);
ctx               391 drivers/gpu/drm/i915/gvt/execlist.c 	ret = emulate_execlist_schedule_in(&s->execlist[ring_id], ctx);
ctx               162 drivers/gpu/drm/i915/gvt/execlist.h 	struct execlist_ctx_descriptor_format ctx[2];
ctx               365 drivers/gpu/drm/i915/gvt/scheduler.c 					  struct i915_gem_context *ctx)
ctx               368 drivers/gpu/drm/i915/gvt/scheduler.c 	struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(ctx->vm);
ctx              1239 drivers/gpu/drm/i915/gvt/scheduler.c 	struct i915_gem_context *ctx;
ctx              1245 drivers/gpu/drm/i915/gvt/scheduler.c 	ctx = i915_gem_context_create_kernel(i915, I915_PRIORITY_MAX);
ctx              1246 drivers/gpu/drm/i915/gvt/scheduler.c 	if (IS_ERR(ctx)) {
ctx              1247 drivers/gpu/drm/i915/gvt/scheduler.c 		ret = PTR_ERR(ctx);
ctx              1251 drivers/gpu/drm/i915/gvt/scheduler.c 	i915_gem_context_set_force_single_submission(ctx);
ctx              1253 drivers/gpu/drm/i915/gvt/scheduler.c 	i915_context_ppgtt_root_save(s, i915_vm_to_ppgtt(ctx->vm));
ctx              1261 drivers/gpu/drm/i915/gvt/scheduler.c 		ce = intel_context_create(ctx, engine);
ctx              1298 drivers/gpu/drm/i915/gvt/scheduler.c 	i915_gem_context_put(ctx);
ctx              1303 drivers/gpu/drm/i915/gvt/scheduler.c 	i915_context_ppgtt_root_restore(s, i915_vm_to_ppgtt(ctx->vm));
ctx              1311 drivers/gpu/drm/i915/gvt/scheduler.c 	i915_gem_context_put(ctx);
ctx              1309 drivers/gpu/drm/i915/i915_cmd_parser.c static int check_bbstart(const struct i915_gem_context *ctx,
ctx              1319 drivers/gpu/drm/i915/i915_cmd_parser.c 	if (CMDPARSER_USES_GGTT(ctx->i915)) {
ctx              1355 drivers/gpu/drm/i915/i915_cmd_parser.c 	if (ctx->jump_whitelist_cmds <= target_cmd_index) {
ctx              1358 drivers/gpu/drm/i915/i915_cmd_parser.c 	} else if (!test_bit(target_cmd_index, ctx->jump_whitelist)) {
ctx              1367 drivers/gpu/drm/i915/i915_cmd_parser.c static void init_whitelist(struct i915_gem_context *ctx, u32 batch_len)
ctx              1374 drivers/gpu/drm/i915/i915_cmd_parser.c 	if (CMDPARSER_USES_GGTT(ctx->i915))
ctx              1377 drivers/gpu/drm/i915/i915_cmd_parser.c 	if (batch_cmds <= ctx->jump_whitelist_cmds) {
ctx              1378 drivers/gpu/drm/i915/i915_cmd_parser.c 		bitmap_zero(ctx->jump_whitelist, batch_cmds);
ctx              1385 drivers/gpu/drm/i915/i915_cmd_parser.c 		kfree(ctx->jump_whitelist);
ctx              1386 drivers/gpu/drm/i915/i915_cmd_parser.c 		ctx->jump_whitelist = next_whitelist;
ctx              1387 drivers/gpu/drm/i915/i915_cmd_parser.c 		ctx->jump_whitelist_cmds =
ctx              1398 drivers/gpu/drm/i915/i915_cmd_parser.c 	bitmap_zero(ctx->jump_whitelist, ctx->jump_whitelist_cmds);
ctx              1423 drivers/gpu/drm/i915/i915_cmd_parser.c int intel_engine_cmd_parser(struct i915_gem_context *ctx,
ctx              1446 drivers/gpu/drm/i915/i915_cmd_parser.c 	init_whitelist(ctx, batch_len);
ctx              1488 drivers/gpu/drm/i915/i915_cmd_parser.c 			ret = check_bbstart(ctx, cmd, offset, length,
ctx              1497 drivers/gpu/drm/i915/i915_cmd_parser.c 		if (ctx->jump_whitelist_cmds > offset)
ctx              1498 drivers/gpu/drm/i915/i915_cmd_parser.c 			set_bit(offset, ctx->jump_whitelist);
ctx               312 drivers/gpu/drm/i915/i915_debugfs.c 	struct i915_gem_context *ctx;
ctx               314 drivers/gpu/drm/i915/i915_debugfs.c 	list_for_each_entry(ctx, &i915->contexts.list, link) {
ctx               319 drivers/gpu/drm/i915/i915_debugfs.c 				    i915_gem_context_lock_engines(ctx), it) {
ctx               329 drivers/gpu/drm/i915/i915_debugfs.c 		i915_gem_context_unlock_engines(ctx);
ctx               331 drivers/gpu/drm/i915/i915_debugfs.c 		if (!IS_ERR_OR_NULL(ctx->file_priv)) {
ctx               332 drivers/gpu/drm/i915/i915_debugfs.c 			struct file_stats stats = { .vm = ctx->vm, };
ctx               333 drivers/gpu/drm/i915/i915_debugfs.c 			struct drm_file *file = ctx->file_priv->file;
ctx               342 drivers/gpu/drm/i915/i915_debugfs.c 			task = pid_task(ctx->pid ?: file->pid, PIDTYPE_PID);
ctx              1569 drivers/gpu/drm/i915/i915_debugfs.c 	struct i915_gem_context *ctx;
ctx              1576 drivers/gpu/drm/i915/i915_debugfs.c 	list_for_each_entry(ctx, &dev_priv->contexts.list, link) {
ctx              1581 drivers/gpu/drm/i915/i915_debugfs.c 		if (!list_empty(&ctx->hw_id_link))
ctx              1582 drivers/gpu/drm/i915/i915_debugfs.c 			seq_printf(m, "%x [pin %u]", ctx->hw_id,
ctx              1583 drivers/gpu/drm/i915/i915_debugfs.c 				   atomic_read(&ctx->hw_id_pin_count));
ctx              1584 drivers/gpu/drm/i915/i915_debugfs.c 		if (ctx->pid) {
ctx              1587 drivers/gpu/drm/i915/i915_debugfs.c 			task = get_pid_task(ctx->pid, PIDTYPE_PID);
ctx              1593 drivers/gpu/drm/i915/i915_debugfs.c 		} else if (IS_ERR(ctx->file_priv)) {
ctx              1599 drivers/gpu/drm/i915/i915_debugfs.c 		seq_putc(m, ctx->remap_slice ? 'R' : 'r');
ctx              1603 drivers/gpu/drm/i915/i915_debugfs.c 				    i915_gem_context_lock_engines(ctx), it) {
ctx              1614 drivers/gpu/drm/i915/i915_debugfs.c 		i915_gem_context_unlock_engines(ctx);
ctx              4494 drivers/gpu/drm/i915/i915_debugfs.c 	struct drm_modeset_acquire_ctx ctx;
ctx              4499 drivers/gpu/drm/i915/i915_debugfs.c 	drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
ctx              4504 drivers/gpu/drm/i915/i915_debugfs.c 				       &ctx);
ctx              4506 drivers/gpu/drm/i915/i915_debugfs.c 			if (ret == -EDEADLK && !drm_modeset_backoff(&ctx)) {
ctx              4517 drivers/gpu/drm/i915/i915_debugfs.c 		ret = drm_modeset_lock(&crtc->mutex, &ctx);
ctx              4519 drivers/gpu/drm/i915/i915_debugfs.c 			ret = drm_modeset_backoff(&ctx);
ctx              4541 drivers/gpu/drm/i915/i915_debugfs.c 	drm_modeset_drop_locks(&ctx);
ctx              4542 drivers/gpu/drm/i915/i915_debugfs.c 	drm_modeset_acquire_fini(&ctx);
ctx              1110 drivers/gpu/drm/i915/i915_drv.h 	struct i915_gem_context *ctx;
ctx              2368 drivers/gpu/drm/i915/i915_drv.h 	struct i915_gem_context *ctx;
ctx              2371 drivers/gpu/drm/i915/i915_drv.h 	ctx = __i915_gem_context_lookup_rcu(file_priv, id);
ctx              2372 drivers/gpu/drm/i915/i915_drv.h 	if (ctx && !kref_get_unless_zero(&ctx->ref))
ctx              2373 drivers/gpu/drm/i915/i915_drv.h 		ctx = NULL;
ctx              2376 drivers/gpu/drm/i915/i915_drv.h 	return ctx;
ctx               471 drivers/gpu/drm/i915/i915_gpu_error.c 				const struct drm_i915_error_context *ctx)
ctx               474 drivers/gpu/drm/i915/i915_gpu_error.c 		   header, ctx->comm, ctx->pid, ctx->hw_id,
ctx               475 drivers/gpu/drm/i915/i915_gpu_error.c 		   ctx->sched_attr.priority, ctx->guilty, ctx->active);
ctx               771 drivers/gpu/drm/i915/i915_gpu_error.c 		print_error_obj(m, ee->engine, "HW context", ee->ctx);
ctx               939 drivers/gpu/drm/i915/i915_gpu_error.c 		i915_error_object_free(ee->ctx);
ctx              1172 drivers/gpu/drm/i915/i915_gpu_error.c 	const struct i915_gem_context *ctx = request->gem_context;
ctx              1184 drivers/gpu/drm/i915/i915_gpu_error.c 	erq->pid = ctx->pid ? pid_nr(ctx->pid) : 0;
ctx              1252 drivers/gpu/drm/i915/i915_gpu_error.c 	const struct i915_gem_context *ctx = rq->gem_context;
ctx              1254 drivers/gpu/drm/i915/i915_gpu_error.c 	if (ctx->pid) {
ctx              1258 drivers/gpu/drm/i915/i915_gpu_error.c 		task = pid_task(ctx->pid, PIDTYPE_PID);
ctx              1266 drivers/gpu/drm/i915/i915_gpu_error.c 	e->hw_id = ctx->hw_id;
ctx              1267 drivers/gpu/drm/i915/i915_gpu_error.c 	e->sched_attr = ctx->sched;
ctx              1268 drivers/gpu/drm/i915/i915_gpu_error.c 	e->guilty = atomic_read(&ctx->guilty_count);
ctx              1269 drivers/gpu/drm/i915/i915_gpu_error.c 	e->active = atomic_read(&ctx->active_count);
ctx              1271 drivers/gpu/drm/i915/i915_gpu_error.c 	return i915_gem_context_no_error_capture(ctx);
ctx              1408 drivers/gpu/drm/i915/i915_gpu_error.c 				      &ee->ctx);
ctx               134 drivers/gpu/drm/i915/i915_gpu_error.h 		} *ringbuffer, *batchbuffer, *wa_batchbuffer, *ctx, *hws_page;
ctx               792 drivers/gpu/drm/i915/i915_perf.c 		if (!dev_priv->perf.exclusive_stream->ctx ||
ctx               801 drivers/gpu/drm/i915/i915_perf.c 			if (dev_priv->perf.exclusive_stream->ctx &&
ctx              1208 drivers/gpu/drm/i915/i915_perf.c 	struct i915_gem_context *ctx = stream->ctx;
ctx              1216 drivers/gpu/drm/i915/i915_perf.c 	for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it) {
ctx              1230 drivers/gpu/drm/i915/i915_perf.c 	i915_gem_context_unlock_engines(ctx);
ctx              1382 drivers/gpu/drm/i915/i915_perf.c 	if (stream->ctx)
ctx              1803 drivers/gpu/drm/i915/i915_perf.c static int gen8_configure_context(struct i915_gem_context *ctx,
ctx              1810 drivers/gpu/drm/i915/i915_perf.c 	for_each_gem_engine(ce, i915_gem_context_lock_engines(ctx), it) {
ctx              1820 drivers/gpu/drm/i915/i915_perf.c 		flex->value = intel_sseu_make_rpcs(ctx->i915, &ce->sseu);
ctx              1830 drivers/gpu/drm/i915/i915_perf.c 	i915_gem_context_unlock_engines(ctx);
ctx              1888 drivers/gpu/drm/i915/i915_perf.c 	struct i915_gem_context *ctx;
ctx              1912 drivers/gpu/drm/i915/i915_perf.c 	list_for_each_entry(ctx, &i915->contexts.list, link) {
ctx              1915 drivers/gpu/drm/i915/i915_perf.c 		if (ctx == i915->kernel_context)
ctx              1918 drivers/gpu/drm/i915/i915_perf.c 		err = gen8_configure_context(ctx, regs, ARRAY_SIZE(regs));
ctx              2024 drivers/gpu/drm/i915/i915_perf.c 	struct i915_gem_context *ctx = stream->ctx;
ctx              2047 drivers/gpu/drm/i915/i915_perf.c 		   (ctx ? GEN7_OACONTROL_PER_CTX_ENABLE : 0) |
ctx              2224 drivers/gpu/drm/i915/i915_perf.c 	if (stream->ctx) {
ctx              2295 drivers/gpu/drm/i915/i915_perf.c 	if (stream->ctx)
ctx              2634 drivers/gpu/drm/i915/i915_perf.c 	if (stream->ctx)
ctx              2635 drivers/gpu/drm/i915/i915_perf.c 		i915_gem_context_put(stream->ctx);
ctx              2767 drivers/gpu/drm/i915/i915_perf.c 	stream->ctx = specific_ctx;
ctx               202 drivers/gpu/drm/i915/i915_sysfs.c 	struct i915_gem_context *ctx;
ctx               231 drivers/gpu/drm/i915/i915_sysfs.c 	list_for_each_entry(ctx, &dev_priv->contexts.list, link)
ctx               232 drivers/gpu/drm/i915/i915_sysfs.c 		ctx->remap_slice |= (1<<slice);
ctx               669 drivers/gpu/drm/i915/i915_trace.h 			     __field(u64, ctx)
ctx               681 drivers/gpu/drm/i915/i915_trace.h 			   __entry->ctx = rq->fence.context;
ctx               688 drivers/gpu/drm/i915/i915_trace.h 		      __entry->hw_id, __entry->ctx, __entry->seqno,
ctx               699 drivers/gpu/drm/i915/i915_trace.h 			     __field(u64, ctx)
ctx               710 drivers/gpu/drm/i915/i915_trace.h 			   __entry->ctx = rq->fence.context;
ctx               716 drivers/gpu/drm/i915/i915_trace.h 		      __entry->hw_id, __entry->ctx, __entry->seqno)
ctx               742 drivers/gpu/drm/i915/i915_trace.h 			     __field(u64, ctx)
ctx               755 drivers/gpu/drm/i915/i915_trace.h 			   __entry->ctx = rq->fence.context;
ctx               763 drivers/gpu/drm/i915/i915_trace.h 		      __entry->hw_id, __entry->ctx, __entry->seqno,
ctx               774 drivers/gpu/drm/i915/i915_trace.h 			     __field(u64, ctx)
ctx               786 drivers/gpu/drm/i915/i915_trace.h 			   __entry->ctx = rq->fence.context;
ctx               793 drivers/gpu/drm/i915/i915_trace.h 			      __entry->hw_id, __entry->ctx, __entry->seqno,
ctx               833 drivers/gpu/drm/i915/i915_trace.h 			     __field(u64, ctx)
ctx               851 drivers/gpu/drm/i915/i915_trace.h 			   __entry->ctx = rq->fence.context;
ctx               858 drivers/gpu/drm/i915/i915_trace.h 		      __entry->hw_id, __entry->ctx, __entry->seqno,
ctx               955 drivers/gpu/drm/i915/i915_trace.h 	TP_PROTO(struct i915_gem_context *ctx),
ctx               956 drivers/gpu/drm/i915/i915_trace.h 	TP_ARGS(ctx),
ctx               960 drivers/gpu/drm/i915/i915_trace.h 			__field(struct i915_gem_context *, ctx)
ctx               966 drivers/gpu/drm/i915/i915_trace.h 			__entry->dev = ctx->i915->drm.primary->index;
ctx               967 drivers/gpu/drm/i915/i915_trace.h 			__entry->ctx = ctx;
ctx               968 drivers/gpu/drm/i915/i915_trace.h 			__entry->hw_id = ctx->hw_id;
ctx               969 drivers/gpu/drm/i915/i915_trace.h 			__entry->vm = ctx->vm;
ctx               973 drivers/gpu/drm/i915/i915_trace.h 		  __entry->dev, __entry->ctx, __entry->vm, __entry->hw_id)
ctx               977 drivers/gpu/drm/i915/i915_trace.h 	TP_PROTO(struct i915_gem_context *ctx),
ctx               978 drivers/gpu/drm/i915/i915_trace.h 	TP_ARGS(ctx)
ctx               982 drivers/gpu/drm/i915/i915_trace.h 	TP_PROTO(struct i915_gem_context *ctx),
ctx               983 drivers/gpu/drm/i915/i915_trace.h 	TP_ARGS(ctx)
ctx                19 drivers/gpu/drm/i915/selftests/i915_gem.c 			     struct i915_gem_context *ctx)
ctx                27 drivers/gpu/drm/i915/selftests/i915_gem.c 		rq = igt_request_alloc(ctx, engine);
ctx               134 drivers/gpu/drm/i915/selftests/i915_gem.c 	struct i915_gem_context *ctx;
ctx               144 drivers/gpu/drm/i915/selftests/i915_gem.c 	ctx = live_context(i915, file);
ctx               145 drivers/gpu/drm/i915/selftests/i915_gem.c 	if (!IS_ERR(ctx))
ctx               146 drivers/gpu/drm/i915/selftests/i915_gem.c 		err = switch_to_context(i915, ctx);
ctx               163 drivers/gpu/drm/i915/selftests/i915_gem.c 	err = switch_to_context(i915, ctx);
ctx               173 drivers/gpu/drm/i915/selftests/i915_gem.c 	struct i915_gem_context *ctx;
ctx               183 drivers/gpu/drm/i915/selftests/i915_gem.c 	ctx = live_context(i915, file);
ctx               184 drivers/gpu/drm/i915/selftests/i915_gem.c 	if (!IS_ERR(ctx))
ctx               185 drivers/gpu/drm/i915/selftests/i915_gem.c 		err = switch_to_context(i915, ctx);
ctx               202 drivers/gpu/drm/i915/selftests/i915_gem.c 	err = switch_to_context(i915, ctx);
ctx               467 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 			struct i915_gem_context *ctx;
ctx               469 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 			ctx = live_context(i915, file);
ctx               470 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 			if (IS_ERR(ctx))
ctx               475 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 			rq = igt_request_alloc(ctx, engine);
ctx               482 drivers/gpu/drm/i915/selftests/i915_gem_evict.c 					       ctx->hw_id, engine->name,
ctx              1251 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	struct i915_gem_context *ctx;
ctx              1255 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	ctx = mock_context(i915, "mock");
ctx              1256 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	if (!ctx)
ctx              1259 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	err = func(i915, ctx->vm, 0, min(ctx->vm->total, limit), end_time);
ctx              1261 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	mock_context_close(ctx);
ctx               196 drivers/gpu/drm/i915/selftests/i915_request.c 	struct i915_gem_context *ctx[2];
ctx               201 drivers/gpu/drm/i915/selftests/i915_request.c 	ctx[0] = mock_context(i915, "A");
ctx               202 drivers/gpu/drm/i915/selftests/i915_request.c 	ce = i915_gem_context_get_engine(ctx[0], RCS0);
ctx               214 drivers/gpu/drm/i915/selftests/i915_request.c 	ctx[1] = mock_context(i915, "B");
ctx               215 drivers/gpu/drm/i915/selftests/i915_request.c 	ce = i915_gem_context_get_engine(ctx[1], RCS0);
ctx               253 drivers/gpu/drm/i915/selftests/i915_request.c 	mock_context_close(ctx[1]);
ctx               256 drivers/gpu/drm/i915/selftests/i915_request.c 	mock_context_close(ctx[0]);
ctx               335 drivers/gpu/drm/i915/selftests/i915_request.c 			struct i915_gem_context *ctx =
ctx               342 drivers/gpu/drm/i915/selftests/i915_request.c 			ce = i915_gem_context_get_engine(ctx, t->engine->legacy_idx);
ctx               763 drivers/gpu/drm/i915/selftests/i915_request.c 	struct i915_gem_context *ctx = i915->kernel_context;
ctx               764 drivers/gpu/drm/i915/selftests/i915_request.c 	struct i915_address_space *vm = ctx->vm ?: &i915->ggtt.vm;
ctx              1066 drivers/gpu/drm/i915/selftests/i915_request.c max_batches(struct i915_gem_context *ctx, struct intel_engine_cs *engine)
ctx              1080 drivers/gpu/drm/i915/selftests/i915_request.c 	if (HAS_EXECLISTS(ctx->i915))
ctx              1083 drivers/gpu/drm/i915/selftests/i915_request.c 	rq = igt_request_alloc(ctx, engine);
ctx               574 drivers/gpu/drm/i915/selftests/i915_syncmap.c 		I915_RND_STATE(ctx);
ctx               582 drivers/gpu/drm/i915/selftests/i915_syncmap.c 			u64 context = i915_prandom_u64_state(&ctx);
ctx                37 drivers/gpu/drm/i915/selftests/i915_vma.c 		       struct i915_gem_context *ctx)
ctx                41 drivers/gpu/drm/i915/selftests/i915_vma.c 	if (vma->vm != ctx->vm) {
ctx               110 drivers/gpu/drm/i915/selftests/i915_vma.c 	struct i915_gem_context *ctx;
ctx               115 drivers/gpu/drm/i915/selftests/i915_vma.c 			list_for_each_entry(ctx, contexts, link) {
ctx               116 drivers/gpu/drm/i915/selftests/i915_vma.c 				struct i915_address_space *vm = ctx->vm;
ctx               124 drivers/gpu/drm/i915/selftests/i915_vma.c 				if (!assert_vma(vma, obj, ctx)) {
ctx               150 drivers/gpu/drm/i915/selftests/i915_vma.c 	struct i915_gem_context *ctx, *cn;
ctx               175 drivers/gpu/drm/i915/selftests/i915_vma.c 				ctx = mock_context(i915, "mock");
ctx               176 drivers/gpu/drm/i915/selftests/i915_vma.c 				if (!ctx)
ctx               179 drivers/gpu/drm/i915/selftests/i915_vma.c 				list_move(&ctx->link, &contexts);
ctx               192 drivers/gpu/drm/i915/selftests/i915_vma.c 		list_for_each_entry_safe(ctx, cn, &contexts, link) {
ctx               193 drivers/gpu/drm/i915/selftests/i915_vma.c 			list_del_init(&ctx->link);
ctx               194 drivers/gpu/drm/i915/selftests/i915_vma.c 			mock_context_close(ctx);
ctx               204 drivers/gpu/drm/i915/selftests/i915_vma.c 	list_for_each_entry_safe(ctx, cn, &contexts, link) {
ctx               205 drivers/gpu/drm/i915/selftests/i915_vma.c 		list_del_init(&ctx->link);
ctx               206 drivers/gpu/drm/i915/selftests/i915_vma.c 		mock_context_close(ctx);
ctx                11 drivers/gpu/drm/lima/lima_ctx.c 	struct lima_ctx *ctx;
ctx                14 drivers/gpu/drm/lima/lima_ctx.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx                15 drivers/gpu/drm/lima/lima_ctx.c 	if (!ctx)
ctx                17 drivers/gpu/drm/lima/lima_ctx.c 	ctx->dev = dev;
ctx                18 drivers/gpu/drm/lima/lima_ctx.c 	kref_init(&ctx->refcnt);
ctx                21 drivers/gpu/drm/lima/lima_ctx.c 		err = lima_sched_context_init(dev->pipe + i, ctx->context + i, &ctx->guilty);
ctx                26 drivers/gpu/drm/lima/lima_ctx.c 	err = xa_alloc(&mgr->handles, id, ctx, xa_limit_32b, GFP_KERNEL);
ctx                34 drivers/gpu/drm/lima/lima_ctx.c 		lima_sched_context_fini(dev->pipe + i, ctx->context + i);
ctx                35 drivers/gpu/drm/lima/lima_ctx.c 	kfree(ctx);
ctx                41 drivers/gpu/drm/lima/lima_ctx.c 	struct lima_ctx *ctx = container_of(ref, struct lima_ctx, refcnt);
ctx                45 drivers/gpu/drm/lima/lima_ctx.c 		lima_sched_context_fini(ctx->dev->pipe + i, ctx->context + i);
ctx                46 drivers/gpu/drm/lima/lima_ctx.c 	kfree(ctx);
ctx                51 drivers/gpu/drm/lima/lima_ctx.c 	struct lima_ctx *ctx;
ctx                55 drivers/gpu/drm/lima/lima_ctx.c 	ctx = xa_erase(&mgr->handles, id);
ctx                56 drivers/gpu/drm/lima/lima_ctx.c 	if (ctx)
ctx                57 drivers/gpu/drm/lima/lima_ctx.c 		kref_put(&ctx->refcnt, lima_ctx_do_release);
ctx                66 drivers/gpu/drm/lima/lima_ctx.c 	struct lima_ctx *ctx;
ctx                69 drivers/gpu/drm/lima/lima_ctx.c 	ctx = xa_load(&mgr->handles, id);
ctx                70 drivers/gpu/drm/lima/lima_ctx.c 	if (ctx)
ctx                71 drivers/gpu/drm/lima/lima_ctx.c 		kref_get(&ctx->refcnt);
ctx                73 drivers/gpu/drm/lima/lima_ctx.c 	return ctx;
ctx                76 drivers/gpu/drm/lima/lima_ctx.c void lima_ctx_put(struct lima_ctx *ctx)
ctx                78 drivers/gpu/drm/lima/lima_ctx.c 	kref_put(&ctx->refcnt, lima_ctx_do_release);
ctx                89 drivers/gpu/drm/lima/lima_ctx.c 	struct lima_ctx *ctx;
ctx                92 drivers/gpu/drm/lima/lima_ctx.c 	xa_for_each(&mgr->handles, id, ctx) {
ctx                93 drivers/gpu/drm/lima/lima_ctx.c 		kref_put(&ctx->refcnt, lima_ctx_do_release);
ctx                26 drivers/gpu/drm/lima/lima_ctx.h void lima_ctx_put(struct lima_ctx *ctx);
ctx                96 drivers/gpu/drm/lima/lima_drv.c 	struct lima_ctx *ctx;
ctx               137 drivers/gpu/drm/lima/lima_drv.c 	ctx = lima_ctx_get(&priv->ctx_mgr, args->ctx);
ctx               138 drivers/gpu/drm/lima/lima_drv.c 	if (!ctx) {
ctx               148 drivers/gpu/drm/lima/lima_drv.c 	submit.ctx = ctx;
ctx               156 drivers/gpu/drm/lima/lima_drv.c 	lima_ctx_put(ctx);
ctx                25 drivers/gpu/drm/lima/lima_drv.h 	struct lima_ctx *ctx;
ctx               152 drivers/gpu/drm/lima/lima_gem.c 			     struct ww_acquire_ctx *ctx)
ctx               156 drivers/gpu/drm/lima/lima_gem.c 	ww_acquire_init(ctx, &reservation_ww_class);
ctx               165 drivers/gpu/drm/lima/lima_gem.c 		ret = ww_mutex_lock_interruptible(&bos[i]->gem.resv->lock, ctx);
ctx               172 drivers/gpu/drm/lima/lima_gem.c 	ww_acquire_done(ctx);
ctx               185 drivers/gpu/drm/lima/lima_gem.c 			&bos[contended]->gem.resv->lock, ctx);
ctx               191 drivers/gpu/drm/lima/lima_gem.c 	ww_acquire_fini(ctx);
ctx               197 drivers/gpu/drm/lima/lima_gem.c 				struct ww_acquire_ctx *ctx)
ctx               203 drivers/gpu/drm/lima/lima_gem.c 	ww_acquire_fini(ctx);
ctx               234 drivers/gpu/drm/lima/lima_gem.c 	struct ww_acquire_ctx ctx;
ctx               271 drivers/gpu/drm/lima/lima_gem.c 	err = lima_gem_lock_bos(bos, submit->nr_bos, &ctx);
ctx               276 drivers/gpu/drm/lima/lima_gem.c 		submit->task, submit->ctx->context + submit->pipe,
ctx               295 drivers/gpu/drm/lima/lima_gem.c 		submit->ctx->context + submit->pipe, submit->task);
ctx               304 drivers/gpu/drm/lima/lima_gem.c 	lima_gem_unlock_bos(bos, submit->nr_bos, &ctx);
ctx               321 drivers/gpu/drm/lima/lima_gem.c 	lima_gem_unlock_bos(bos, submit->nr_bos, &ctx);
ctx                45 drivers/gpu/drm/mga/mga_state.c 	drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
ctx                54 drivers/gpu/drm/mga/mga_state.c 		DMA_BLOCK(MGA_DWGCTL, ctx->dwgctl,
ctx                56 drivers/gpu/drm/mga/mga_state.c 			  MGA_DWGCTL, ctx->dwgctl,
ctx                69 drivers/gpu/drm/mga/mga_state.c 	drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
ctx                74 drivers/gpu/drm/mga/mga_state.c 	DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
ctx                75 drivers/gpu/drm/mga/mga_state.c 		  MGA_MACCESS, ctx->maccess,
ctx                76 drivers/gpu/drm/mga/mga_state.c 		  MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
ctx                78 drivers/gpu/drm/mga/mga_state.c 	DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
ctx                79 drivers/gpu/drm/mga/mga_state.c 		  MGA_FOGCOL, ctx->fogcolor,
ctx                80 drivers/gpu/drm/mga/mga_state.c 		  MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
ctx                82 drivers/gpu/drm/mga/mga_state.c 	DMA_BLOCK(MGA_FCOL, ctx->fcol,
ctx                92 drivers/gpu/drm/mga/mga_state.c 	drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
ctx                97 drivers/gpu/drm/mga/mga_state.c 	DMA_BLOCK(MGA_DSTORG, ctx->dstorg,
ctx                98 drivers/gpu/drm/mga/mga_state.c 		  MGA_MACCESS, ctx->maccess,
ctx                99 drivers/gpu/drm/mga/mga_state.c 		  MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
ctx               101 drivers/gpu/drm/mga/mga_state.c 	DMA_BLOCK(MGA_ALPHACTRL, ctx->alphactrl,
ctx               102 drivers/gpu/drm/mga/mga_state.c 		  MGA_FOGCOL, ctx->fogcolor,
ctx               103 drivers/gpu/drm/mga/mga_state.c 		  MGA_WFLAG, ctx->wflag, MGA_ZORG, dev_priv->depth_offset);
ctx               105 drivers/gpu/drm/mga/mga_state.c 	DMA_BLOCK(MGA_WFLAG1, ctx->wflag,
ctx               106 drivers/gpu/drm/mga/mga_state.c 		  MGA_TDUALSTAGE0, ctx->tdualstage0,
ctx               107 drivers/gpu/drm/mga/mga_state.c 		  MGA_TDUALSTAGE1, ctx->tdualstage1, MGA_FCOL, ctx->fcol);
ctx               109 drivers/gpu/drm/mga/mga_state.c 	DMA_BLOCK(MGA_STENCIL, ctx->stencil,
ctx               110 drivers/gpu/drm/mga/mga_state.c 		  MGA_STENCILCTL, ctx->stencilctl,
ctx               384 drivers/gpu/drm/mga/mga_state.c 	drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
ctx               386 drivers/gpu/drm/mga/mga_state.c 	if (ctx->dstorg != dev_priv->front_offset &&
ctx               387 drivers/gpu/drm/mga/mga_state.c 	    ctx->dstorg != dev_priv->back_offset) {
ctx               389 drivers/gpu/drm/mga/mga_state.c 			  ctx->dstorg, dev_priv->front_offset,
ctx               391 drivers/gpu/drm/mga/mga_state.c 		ctx->dstorg = 0;
ctx               484 drivers/gpu/drm/mga/mga_state.c 	drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
ctx               561 drivers/gpu/drm/mga/mga_state.c 		  MGA_PLNWT, ctx->plnwt, MGA_DWGCTL, ctx->dwgctl);
ctx               572 drivers/gpu/drm/mga/mga_state.c 	drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
ctx               612 drivers/gpu/drm/mga/mga_state.c 		  MGA_PLNWT, ctx->plnwt,
ctx               613 drivers/gpu/drm/mga/mga_state.c 		  MGA_SRCORG, dev_priv->front_offset, MGA_DWGCTL, ctx->dwgctl);
ctx               723 drivers/gpu/drm/mga/mga_state.c 	drm_mga_context_regs_t *ctx = &dev_priv->sarea_priv->context_state;
ctx               749 drivers/gpu/drm/mga/mga_state.c 	DMA_BLOCK(MGA_PLNWT, ctx->plnwt,
ctx               770 drivers/gpu/drm/mga/mga_state.c 	drm_mga_context_regs_t *ctx = &sarea_priv->context_state;
ctx               817 drivers/gpu/drm/mga/mga_state.c 		  MGA_PLNWT, ctx->plnwt,
ctx               818 drivers/gpu/drm/mga/mga_state.c 		  MGA_PITCH, dev_priv->front_pitch, MGA_DWGCTL, ctx->dwgctl);
ctx              1385 drivers/gpu/drm/mgag200/mgag200_mode.c 			      struct drm_modeset_acquire_ctx *ctx)
ctx                47 drivers/gpu/drm/msm/adreno/a5xx_gpu.c 	struct msm_file_private *ctx)
ctx                60 drivers/gpu/drm/msm/adreno/a5xx_gpu.c 			if (priv->lastctx == ctx)
ctx               107 drivers/gpu/drm/msm/adreno/a5xx_gpu.c 	struct msm_file_private *ctx)
ctx               117 drivers/gpu/drm/msm/adreno/a5xx_gpu.c 		a5xx_submit_in_rb(gpu, submit, ctx);
ctx               151 drivers/gpu/drm/msm/adreno/a5xx_gpu.c 			if (priv->lastctx == ctx)
ctx                83 drivers/gpu/drm/msm/adreno/a6xx_gpu.c 	struct msm_file_private *ctx)
ctx               116 drivers/gpu/drm/msm/adreno/a6xx_gpu.c 			if (priv->lastctx == ctx)
ctx              1009 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c 	int ctx, index = 0;
ctx              1011 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c 	for (ctx = 0; ctx < A6XX_NUM_CONTEXTS; ctx++) {
ctx              1014 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c 		drm_printf(p, "    - context: %d\n", ctx);
ctx               415 drivers/gpu/drm/msm/adreno/adreno_gpu.c 		struct msm_file_private *ctx)
ctx               429 drivers/gpu/drm/msm/adreno/adreno_gpu.c 			if (priv->lastctx == ctx)
ctx               227 drivers/gpu/drm/msm/adreno/adreno_gpu.h 		struct msm_file_private *ctx);
ctx                67 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static inline u32 dpu_hw_ctl_get_flush_register(struct dpu_hw_ctl *ctx)
ctx                69 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	struct dpu_hw_blk_reg_map *c = &ctx->hw;
ctx                74 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static inline void dpu_hw_ctl_trigger_start(struct dpu_hw_ctl *ctx)
ctx                76 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	trace_dpu_hw_ctl_trigger_start(ctx->pending_flush_mask,
ctx                77 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 				       dpu_hw_ctl_get_flush_register(ctx));
ctx                78 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	DPU_REG_WRITE(&ctx->hw, CTL_START, 0x1);
ctx                81 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static inline void dpu_hw_ctl_trigger_pending(struct dpu_hw_ctl *ctx)
ctx                83 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	trace_dpu_hw_ctl_trigger_prepare(ctx->pending_flush_mask,
ctx                84 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 					 dpu_hw_ctl_get_flush_register(ctx));
ctx                85 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	DPU_REG_WRITE(&ctx->hw, CTL_PREPARE, 0x1);
ctx                88 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static inline void dpu_hw_ctl_clear_pending_flush(struct dpu_hw_ctl *ctx)
ctx                90 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	trace_dpu_hw_ctl_clear_pending_flush(ctx->pending_flush_mask,
ctx                91 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 				     dpu_hw_ctl_get_flush_register(ctx));
ctx                92 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	ctx->pending_flush_mask = 0x0;
ctx                95 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static inline void dpu_hw_ctl_update_pending_flush(struct dpu_hw_ctl *ctx,
ctx                99 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 					      ctx->pending_flush_mask);
ctx               100 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	ctx->pending_flush_mask |= flushbits;
ctx               103 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static u32 dpu_hw_ctl_get_pending_flush(struct dpu_hw_ctl *ctx)
ctx               105 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	return ctx->pending_flush_mask;
ctx               108 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static inline void dpu_hw_ctl_trigger_flush(struct dpu_hw_ctl *ctx)
ctx               110 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	trace_dpu_hw_ctl_trigger_pending_flush(ctx->pending_flush_mask,
ctx               111 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 				     dpu_hw_ctl_get_flush_register(ctx));
ctx               112 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	DPU_REG_WRITE(&ctx->hw, CTL_FLUSH, ctx->pending_flush_mask);
ctx               115 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static uint32_t dpu_hw_ctl_get_bitmask_sspp(struct dpu_hw_ctl *ctx,
ctx               170 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static uint32_t dpu_hw_ctl_get_bitmask_mixer(struct dpu_hw_ctl *ctx,
ctx               203 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static int dpu_hw_ctl_get_bitmask_intf(struct dpu_hw_ctl *ctx,
ctx               225 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static u32 dpu_hw_ctl_poll_reset_status(struct dpu_hw_ctl *ctx, u32 timeout_us)
ctx               227 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	struct dpu_hw_blk_reg_map *c = &ctx->hw;
ctx               247 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static int dpu_hw_ctl_reset_control(struct dpu_hw_ctl *ctx)
ctx               249 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	struct dpu_hw_blk_reg_map *c = &ctx->hw;
ctx               251 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	pr_debug("issuing hw ctl reset for ctl:%d\n", ctx->idx);
ctx               253 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	if (dpu_hw_ctl_poll_reset_status(ctx, DPU_REG_RESET_TIMEOUT_US))
ctx               259 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static int dpu_hw_ctl_wait_reset_status(struct dpu_hw_ctl *ctx)
ctx               261 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	struct dpu_hw_blk_reg_map *c = &ctx->hw;
ctx               269 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	pr_debug("hw ctl reset is set for ctl:%d\n", ctx->idx);
ctx               270 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	if (dpu_hw_ctl_poll_reset_status(ctx, DPU_REG_RESET_TIMEOUT_US)) {
ctx               271 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 		pr_err("hw recovery is not complete for ctl:%d\n", ctx->idx);
ctx               278 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static void dpu_hw_ctl_clear_all_blendstages(struct dpu_hw_ctl *ctx)
ctx               280 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	struct dpu_hw_blk_reg_map *c = &ctx->hw;
ctx               283 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	for (i = 0; i < ctx->mixer_count; i++) {
ctx               291 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static void dpu_hw_ctl_setup_blendstage(struct dpu_hw_ctl *ctx,
ctx               294 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	struct dpu_hw_blk_reg_map *c = &ctx->hw;
ctx               301 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	stages = _mixer_stages(ctx->mixer_hw_caps, ctx->mixer_count, lm);
ctx               306 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 		&ctx->mixer_hw_caps->features))
ctx               425 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c static void dpu_hw_ctl_intf_cfg(struct dpu_hw_ctl *ctx,
ctx               428 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	struct dpu_hw_blk_reg_map *c = &ctx->hw;
ctx               506 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c void dpu_hw_ctl_destroy(struct dpu_hw_ctl *ctx)
ctx               508 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	if (ctx)
ctx               509 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 		dpu_hw_blk_destroy(&ctx->base);
ctx               510 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.c 	kfree(ctx);
ctx                60 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	void (*trigger_start)(struct dpu_hw_ctl *ctx);
ctx                68 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	void (*trigger_pending)(struct dpu_hw_ctl *ctx);
ctx                75 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	void (*clear_pending_flush)(struct dpu_hw_ctl *ctx);
ctx                82 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	u32 (*get_pending_flush)(struct dpu_hw_ctl *ctx);
ctx                90 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	void (*update_pending_flush)(struct dpu_hw_ctl *ctx,
ctx                97 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	void (*trigger_flush)(struct dpu_hw_ctl *ctx);
ctx               104 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	u32 (*get_flush_register)(struct dpu_hw_ctl *ctx);
ctx               111 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	void (*setup_intf_cfg)(struct dpu_hw_ctl *ctx,
ctx               125 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	int (*wait_reset_status)(struct dpu_hw_ctl *ctx);
ctx               127 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	uint32_t (*get_bitmask_sspp)(struct dpu_hw_ctl *ctx,
ctx               130 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	uint32_t (*get_bitmask_mixer)(struct dpu_hw_ctl *ctx,
ctx               133 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	int (*get_bitmask_intf)(struct dpu_hw_ctl *ctx,
ctx               141 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	void (*clear_all_blendstages)(struct dpu_hw_ctl *ctx);
ctx               149 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h 	void (*setup_blendstage)(struct dpu_hw_ctl *ctx,
ctx               204 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_ctl.h void dpu_hw_ctl_destroy(struct dpu_hw_ctl *ctx);
ctx                81 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_intf.c static void dpu_hw_intf_setup_timing_engine(struct dpu_hw_intf *ctx,
ctx                85 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_intf.c 	struct dpu_hw_blk_reg_map *c = &ctx->hw;
ctx               108 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_intf.c 	if (ctx->cap->type == INTF_EDP || ctx->cap->type == INTF_DP) {
ctx               146 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_intf.c 	if (ctx->cap->type == INTF_HDMI) {
ctx                54 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c static inline int _stage_offset(struct dpu_hw_mixer *ctx, enum dpu_stage stage)
ctx                56 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c 	const struct dpu_lm_sub_blks *sblk = ctx->cap->sblk;
ctx                63 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c static void dpu_hw_lm_setup_out(struct dpu_hw_mixer *ctx,
ctx                66 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c 	struct dpu_hw_blk_reg_map *c = &ctx->hw;
ctx                83 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c static void dpu_hw_lm_setup_border_color(struct dpu_hw_mixer *ctx,
ctx                87 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c 	struct dpu_hw_blk_reg_map *c = &ctx->hw;
ctx                99 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c static void dpu_hw_lm_setup_blend_config_sdm845(struct dpu_hw_mixer *ctx,
ctx               102 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c 	struct dpu_hw_blk_reg_map *c = &ctx->hw;
ctx               109 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c 	stage_off = _stage_offset(ctx, stage);
ctx               118 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c static void dpu_hw_lm_setup_blend_config(struct dpu_hw_mixer *ctx,
ctx               121 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c 	struct dpu_hw_blk_reg_map *c = &ctx->hw;
ctx               127 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c 	stage_off = _stage_offset(ctx, stage);
ctx               136 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c static void dpu_hw_lm_setup_color3(struct dpu_hw_mixer *ctx,
ctx               139 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.c 	struct dpu_hw_blk_reg_map *c = &ctx->hw;
ctx                35 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.h 	void (*setup_mixer_out)(struct dpu_hw_mixer *ctx,
ctx                42 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.h 	void (*setup_blend_config)(struct dpu_hw_mixer *ctx, uint32_t stage,
ctx                48 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.h 	void (*setup_alpha_out)(struct dpu_hw_mixer *ctx, uint32_t mixer_op);
ctx                53 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_lm.h 	void (*setup_border_color)(struct dpu_hw_mixer *ctx,
ctx               135 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static int _sspp_subblk_offset(struct dpu_hw_pipe *ctx,
ctx               140 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	const struct dpu_sspp_sub_blks *sblk = ctx->cap->sblk;
ctx               142 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (!ctx)
ctx               165 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void dpu_hw_sspp_setup_multirect(struct dpu_hw_pipe *ctx,
ctx               172 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (_sspp_subblk_offset(ctx, DPU_SSPP_SRC, &idx))
ctx               183 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		mode_mask = DPU_REG_READ(&ctx->hw, SSPP_MULTIRECT_OPMODE + idx);
ctx               191 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	DPU_REG_WRITE(&ctx->hw, SSPP_MULTIRECT_OPMODE + idx, mode_mask);
ctx               194 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void _sspp_setup_opmode(struct dpu_hw_pipe *ctx,
ctx               200 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (!test_bit(DPU_SSPP_SCALER_QSEED2, &ctx->cap->features) ||
ctx               201 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		_sspp_subblk_offset(ctx, DPU_SSPP_SCALER_QSEED2, &idx) ||
ctx               202 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		!test_bit(DPU_SSPP_CSC, &ctx->cap->features))
ctx               205 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	opmode = DPU_REG_READ(&ctx->hw, SSPP_VIG_OP_MODE + idx);
ctx               212 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	DPU_REG_WRITE(&ctx->hw, SSPP_VIG_OP_MODE + idx, opmode);
ctx               215 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void _sspp_setup_csc10_opmode(struct dpu_hw_pipe *ctx,
ctx               221 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (_sspp_subblk_offset(ctx, DPU_SSPP_CSC_10BIT, &idx))
ctx               224 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	opmode = DPU_REG_READ(&ctx->hw, SSPP_VIG_CSC_10_OP_MODE + idx);
ctx               230 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	DPU_REG_WRITE(&ctx->hw, SSPP_VIG_CSC_10_OP_MODE + idx, opmode);
ctx               236 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void dpu_hw_sspp_setup_format(struct dpu_hw_pipe *ctx,
ctx               247 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (_sspp_subblk_offset(ctx, DPU_SSPP_SRC, &idx) || !fmt)
ctx               260 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	c = &ctx->hw;
ctx               304 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 			ctx->mdp->highest_bank_bit << 18);
ctx               305 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		if (IS_UBWC_20_SUPPORTED(ctx->catalog->caps->ubwc_version)) {
ctx               308 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 					fast_clear | (ctx->mdp->ubwc_swizzle) |
ctx               309 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 					(ctx->mdp->highest_bank_bit << 4));
ctx               323 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (test_bit(DPU_SSPP_CSC, &ctx->cap->features))
ctx               324 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		_sspp_setup_opmode(ctx, VIG_OP_CSC_EN | VIG_OP_CSC_SRC_DATAFMT,
ctx               326 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	else if (test_bit(DPU_SSPP_CSC_10BIT, &ctx->cap->features))
ctx               327 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		_sspp_setup_csc10_opmode(ctx,
ctx               339 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void dpu_hw_sspp_setup_pe_config(struct dpu_hw_pipe *ctx,
ctx               349 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (_sspp_subblk_offset(ctx, DPU_SSPP_SRC, &idx) || !pe_ext)
ctx               352 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	c = &ctx->hw;
ctx               397 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void _dpu_hw_sspp_setup_scaler3(struct dpu_hw_pipe *ctx,
ctx               406 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (_sspp_subblk_offset(ctx, DPU_SSPP_SCALER_QSEED3, &idx) || !sspp
ctx               407 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		|| !scaler3_cfg || !ctx || !ctx->cap || !ctx->cap->sblk)
ctx               410 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	dpu_hw_setup_scaler3(&ctx->hw, scaler3_cfg, idx,
ctx               411 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 			ctx->cap->sblk->scaler_blk.version,
ctx               415 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static u32 _dpu_hw_sspp_get_scaler3_ver(struct dpu_hw_pipe *ctx)
ctx               419 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (!ctx || _sspp_subblk_offset(ctx, DPU_SSPP_SCALER_QSEED3, &idx))
ctx               422 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	return dpu_hw_get_scaler3_ver(&ctx->hw, idx);
ctx               428 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void dpu_hw_sspp_setup_rects(struct dpu_hw_pipe *ctx,
ctx               437 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (_sspp_subblk_offset(ctx, DPU_SSPP_SRC, &idx) || !cfg)
ctx               440 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	c = &ctx->hw;
ctx               497 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void dpu_hw_sspp_setup_sourceaddress(struct dpu_hw_pipe *ctx,
ctx               504 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (_sspp_subblk_offset(ctx, DPU_SSPP_SRC, &idx))
ctx               509 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 			DPU_REG_WRITE(&ctx->hw, SSPP_SRC0_ADDR + idx + i * 0x4,
ctx               512 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		DPU_REG_WRITE(&ctx->hw, SSPP_SRC0_ADDR + idx,
ctx               514 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		DPU_REG_WRITE(&ctx->hw, SSPP_SRC2_ADDR + idx,
ctx               517 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		DPU_REG_WRITE(&ctx->hw, SSPP_SRC1_ADDR + idx,
ctx               519 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		DPU_REG_WRITE(&ctx->hw, SSPP_SRC3_ADDR + idx,
ctx               524 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void dpu_hw_sspp_setup_csc(struct dpu_hw_pipe *ctx,
ctx               530 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (_sspp_subblk_offset(ctx, DPU_SSPP_CSC, &idx) || !data)
ctx               533 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (test_bit(DPU_SSPP_CSC_10BIT, &ctx->cap->features)) {
ctx               538 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	dpu_hw_csc_setup(&ctx->hw, idx, data, csc10);
ctx               541 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void dpu_hw_sspp_setup_solidfill(struct dpu_hw_pipe *ctx, u32 color, enum
ctx               546 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (_sspp_subblk_offset(ctx, DPU_SSPP_SRC, &idx))
ctx               550 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		DPU_REG_WRITE(&ctx->hw, SSPP_SRC_CONSTANT_COLOR + idx, color);
ctx               552 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		DPU_REG_WRITE(&ctx->hw, SSPP_SRC_CONSTANT_COLOR_REC1 + idx,
ctx               556 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void dpu_hw_sspp_setup_danger_safe_lut(struct dpu_hw_pipe *ctx,
ctx               561 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (_sspp_subblk_offset(ctx, DPU_SSPP_SRC, &idx))
ctx               564 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	DPU_REG_WRITE(&ctx->hw, SSPP_DANGER_LUT + idx, cfg->danger_lut);
ctx               565 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	DPU_REG_WRITE(&ctx->hw, SSPP_SAFE_LUT + idx, cfg->safe_lut);
ctx               568 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void dpu_hw_sspp_setup_creq_lut(struct dpu_hw_pipe *ctx,
ctx               573 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (_sspp_subblk_offset(ctx, DPU_SSPP_SRC, &idx))
ctx               576 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (ctx->cap && test_bit(DPU_SSPP_QOS_8LVL, &ctx->cap->features)) {
ctx               577 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		DPU_REG_WRITE(&ctx->hw, SSPP_CREQ_LUT_0 + idx, cfg->creq_lut);
ctx               578 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		DPU_REG_WRITE(&ctx->hw, SSPP_CREQ_LUT_1 + idx,
ctx               581 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		DPU_REG_WRITE(&ctx->hw, SSPP_CREQ_LUT + idx, cfg->creq_lut);
ctx               585 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void dpu_hw_sspp_setup_qos_ctrl(struct dpu_hw_pipe *ctx,
ctx               591 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (_sspp_subblk_offset(ctx, DPU_SSPP_SRC, &idx))
ctx               607 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	DPU_REG_WRITE(&ctx->hw, SSPP_QOS_CTRL + idx, qos_ctrl);
ctx               610 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c static void dpu_hw_sspp_setup_cdp(struct dpu_hw_pipe *ctx,
ctx               616 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (!ctx || !cfg)
ctx               619 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (_sspp_subblk_offset(ctx, DPU_SSPP_SRC, &idx))
ctx               631 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	DPU_REG_WRITE(&ctx->hw, SSPP_CDP_CNTL, cdp_cntl);
ctx               726 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c void dpu_hw_sspp_destroy(struct dpu_hw_pipe *ctx)
ctx               728 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	if (ctx)
ctx               729 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 		dpu_hw_blk_destroy(&ctx->base);
ctx               730 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.c 	kfree(ctx);
ctx               232 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_format)(struct dpu_hw_pipe *ctx,
ctx               242 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_rects)(struct dpu_hw_pipe *ctx,
ctx               251 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_pe)(struct dpu_hw_pipe *ctx,
ctx               260 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_sourceaddress)(struct dpu_hw_pipe *ctx,
ctx               269 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_csc)(struct dpu_hw_pipe *ctx, struct dpu_csc_cfg *data);
ctx               278 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_solidfill)(struct dpu_hw_pipe *ctx, u32 color,
ctx               288 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_multirect)(struct dpu_hw_pipe *ctx,
ctx               297 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_sharpening)(struct dpu_hw_pipe *ctx,
ctx               306 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_danger_safe_lut)(struct dpu_hw_pipe *ctx,
ctx               315 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_creq_lut)(struct dpu_hw_pipe *ctx,
ctx               324 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_qos_ctrl)(struct dpu_hw_pipe *ctx,
ctx               332 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_histogram)(struct dpu_hw_pipe *ctx,
ctx               342 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_scaler)(struct dpu_hw_pipe *ctx,
ctx               351 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	u32 (*get_scaler_ver)(struct dpu_hw_pipe *ctx);
ctx               358 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h 	void (*setup_cdp)(struct dpu_hw_pipe *ctx,
ctx               403 drivers/gpu/drm/msm/disp/dpu1/dpu_hw_sspp.h void dpu_hw_sspp_destroy(struct dpu_hw_pipe *ctx);
ctx               591 drivers/gpu/drm/msm/msm_drv.c 	struct msm_file_private *ctx;
ctx               593 drivers/gpu/drm/msm/msm_drv.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               594 drivers/gpu/drm/msm/msm_drv.c 	if (!ctx)
ctx               597 drivers/gpu/drm/msm/msm_drv.c 	msm_submitqueue_init(dev, ctx);
ctx               599 drivers/gpu/drm/msm/msm_drv.c 	ctx->aspace = priv->gpu ? priv->gpu->aspace : NULL;
ctx               600 drivers/gpu/drm/msm/msm_drv.c 	file->driver_priv = ctx;
ctx               615 drivers/gpu/drm/msm/msm_drv.c static void context_close(struct msm_file_private *ctx)
ctx               617 drivers/gpu/drm/msm/msm_drv.c 	msm_submitqueue_close(ctx);
ctx               618 drivers/gpu/drm/msm/msm_drv.c 	kfree(ctx);
ctx               624 drivers/gpu/drm/msm/msm_drv.c 	struct msm_file_private *ctx = file->driver_priv;
ctx               627 drivers/gpu/drm/msm/msm_drv.c 	if (ctx == priv->lastctx)
ctx               631 drivers/gpu/drm/msm/msm_drv.c 	context_close(ctx);
ctx               414 drivers/gpu/drm/msm/msm_drv.h int msm_submitqueue_init(struct drm_device *drm, struct msm_file_private *ctx);
ctx               415 drivers/gpu/drm/msm/msm_drv.h struct msm_gpu_submitqueue *msm_submitqueue_get(struct msm_file_private *ctx,
ctx               417 drivers/gpu/drm/msm/msm_drv.h int msm_submitqueue_create(struct drm_device *drm, struct msm_file_private *ctx,
ctx               419 drivers/gpu/drm/msm/msm_drv.h int msm_submitqueue_query(struct drm_device *drm, struct msm_file_private *ctx,
ctx               421 drivers/gpu/drm/msm/msm_drv.h int msm_submitqueue_remove(struct msm_file_private *ctx, u32 id);
ctx               422 drivers/gpu/drm/msm/msm_drv.h void msm_submitqueue_close(struct msm_file_private *ctx);
ctx               403 drivers/gpu/drm/msm/msm_gem_submit.c 	struct msm_file_private *ctx = file->driver_priv;
ctx               431 drivers/gpu/drm/msm/msm_gem_submit.c 	queue = msm_submitqueue_get(ctx, args->queueid);
ctx               475 drivers/gpu/drm/msm/msm_gem_submit.c 	submit = submit_create(dev, gpu, ctx->aspace, queue, args->nr_bos,
ctx               580 drivers/gpu/drm/msm/msm_gem_submit.c 	msm_gpu_submit(gpu, submit, ctx);
ctx               730 drivers/gpu/drm/msm/msm_gpu.c 		struct msm_file_private *ctx)
ctx               770 drivers/gpu/drm/msm/msm_gpu.c 	gpu->funcs->submit(gpu, submit, ctx);
ctx               771 drivers/gpu/drm/msm/msm_gpu.c 	priv->lastctx = ctx;
ctx                49 drivers/gpu/drm/msm/msm_gpu.h 			struct msm_file_private *ctx);
ctx               276 drivers/gpu/drm/msm/msm_gpu.h 		struct msm_file_private *ctx);
ctx                18 drivers/gpu/drm/msm/msm_submitqueue.c struct msm_gpu_submitqueue *msm_submitqueue_get(struct msm_file_private *ctx,
ctx                23 drivers/gpu/drm/msm/msm_submitqueue.c 	if (!ctx)
ctx                26 drivers/gpu/drm/msm/msm_submitqueue.c 	read_lock(&ctx->queuelock);
ctx                28 drivers/gpu/drm/msm/msm_submitqueue.c 	list_for_each_entry(entry, &ctx->submitqueues, node) {
ctx                31 drivers/gpu/drm/msm/msm_submitqueue.c 			read_unlock(&ctx->queuelock);
ctx                37 drivers/gpu/drm/msm/msm_submitqueue.c 	read_unlock(&ctx->queuelock);
ctx                41 drivers/gpu/drm/msm/msm_submitqueue.c void msm_submitqueue_close(struct msm_file_private *ctx)
ctx                45 drivers/gpu/drm/msm/msm_submitqueue.c 	if (!ctx)
ctx                52 drivers/gpu/drm/msm/msm_submitqueue.c 	list_for_each_entry_safe(entry, tmp, &ctx->submitqueues, node)
ctx                56 drivers/gpu/drm/msm/msm_submitqueue.c int msm_submitqueue_create(struct drm_device *drm, struct msm_file_private *ctx,
ctx                62 drivers/gpu/drm/msm/msm_submitqueue.c 	if (!ctx)
ctx                80 drivers/gpu/drm/msm/msm_submitqueue.c 	write_lock(&ctx->queuelock);
ctx                82 drivers/gpu/drm/msm/msm_submitqueue.c 	queue->id = ctx->queueid++;
ctx                87 drivers/gpu/drm/msm/msm_submitqueue.c 	list_add_tail(&queue->node, &ctx->submitqueues);
ctx                89 drivers/gpu/drm/msm/msm_submitqueue.c 	write_unlock(&ctx->queuelock);
ctx                94 drivers/gpu/drm/msm/msm_submitqueue.c int msm_submitqueue_init(struct drm_device *drm, struct msm_file_private *ctx)
ctx                99 drivers/gpu/drm/msm/msm_submitqueue.c 	if (!ctx)
ctx               109 drivers/gpu/drm/msm/msm_submitqueue.c 	INIT_LIST_HEAD(&ctx->submitqueues);
ctx               111 drivers/gpu/drm/msm/msm_submitqueue.c 	rwlock_init(&ctx->queuelock);
ctx               113 drivers/gpu/drm/msm/msm_submitqueue.c 	return msm_submitqueue_create(drm, ctx, default_prio, 0, NULL);
ctx               136 drivers/gpu/drm/msm/msm_submitqueue.c int msm_submitqueue_query(struct drm_device *drm, struct msm_file_private *ctx,
ctx               145 drivers/gpu/drm/msm/msm_submitqueue.c 	queue = msm_submitqueue_get(ctx, args->id);
ctx               157 drivers/gpu/drm/msm/msm_submitqueue.c int msm_submitqueue_remove(struct msm_file_private *ctx, u32 id)
ctx               161 drivers/gpu/drm/msm/msm_submitqueue.c 	if (!ctx)
ctx               171 drivers/gpu/drm/msm/msm_submitqueue.c 	write_lock(&ctx->queuelock);
ctx               173 drivers/gpu/drm/msm/msm_submitqueue.c 	list_for_each_entry(entry, &ctx->submitqueues, node) {
ctx               176 drivers/gpu/drm/msm/msm_submitqueue.c 			write_unlock(&ctx->queuelock);
ctx               183 drivers/gpu/drm/msm/msm_submitqueue.c 	write_unlock(&ctx->queuelock);
ctx               797 drivers/gpu/drm/nouveau/dispnv04/crtc.c 		  struct drm_modeset_acquire_ctx *ctx)
ctx              1141 drivers/gpu/drm/nouveau/dispnv04/crtc.c 		    struct drm_modeset_acquire_ctx *ctx)
ctx               117 drivers/gpu/drm/nouveau/dispnv04/overlay.c 		  struct drm_modeset_acquire_ctx *ctx)
ctx               191 drivers/gpu/drm/nouveau/dispnv04/overlay.c 		   struct drm_modeset_acquire_ctx *ctx)
ctx               366 drivers/gpu/drm/nouveau/dispnv04/overlay.c 		  struct drm_modeset_acquire_ctx *ctx)
ctx               440 drivers/gpu/drm/nouveau/dispnv04/overlay.c 		   struct drm_modeset_acquire_ctx *ctx)
ctx               581 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct ttm_operation_ctx ctx = { interruptible, no_wait_gpu };
ctx               584 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_validate(&nvbo->bo, &nvbo->placement, &ctx);
ctx              1234 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct ttm_operation_ctx ctx = { intr, no_wait_gpu };
ctx              1249 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_mem_space(bo, &placement, &tmp_reg, &ctx);
ctx              1253 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_tt_bind(bo->ttm, &tmp_reg, &ctx);
ctx              1261 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_move_ttm(bo, &ctx, new_reg);
ctx              1271 drivers/gpu/drm/nouveau/nouveau_bo.c 	struct ttm_operation_ctx ctx = { intr, no_wait_gpu };
ctx              1286 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_mem_space(bo, &placement, &tmp_reg, &ctx);
ctx              1290 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_move_ttm(bo, &ctx, &tmp_reg);
ctx              1364 drivers/gpu/drm/nouveau/nouveau_bo.c 		struct ttm_operation_ctx *ctx,
ctx              1373 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_wait(bo, ctx->interruptible, ctx->no_wait_gpu);
ctx              1398 drivers/gpu/drm/nouveau/nouveau_bo.c 						    ctx->interruptible,
ctx              1399 drivers/gpu/drm/nouveau/nouveau_bo.c 						    ctx->no_wait_gpu, new_reg);
ctx              1402 drivers/gpu/drm/nouveau/nouveau_bo.c 						    ctx->interruptible,
ctx              1403 drivers/gpu/drm/nouveau/nouveau_bo.c 						    ctx->no_wait_gpu, new_reg);
ctx              1406 drivers/gpu/drm/nouveau/nouveau_bo.c 						   ctx->interruptible,
ctx              1407 drivers/gpu/drm/nouveau/nouveau_bo.c 						   ctx->no_wait_gpu, new_reg);
ctx              1413 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = ttm_bo_wait(bo, ctx->interruptible, ctx->no_wait_gpu);
ctx              1415 drivers/gpu/drm/nouveau/nouveau_bo.c 		ret = ttm_bo_move_memcpy(bo, ctx, new_reg);
ctx              1584 drivers/gpu/drm/nouveau/nouveau_bo.c nouveau_ttm_tt_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx)
ctx              1609 drivers/gpu/drm/nouveau/nouveau_bo.c 		return ttm_agp_tt_populate(ttm, ctx);
ctx              1615 drivers/gpu/drm/nouveau/nouveau_bo.c 		return ttm_dma_populate((void *)ttm, dev, ctx);
ctx              1619 drivers/gpu/drm/nouveau/nouveau_bo.c 	r = ttm_pool_populate(ttm, ctx);
ctx                36 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c nv40_fifo_dma_engine(struct nvkm_engine *engine, u32 *reg, u32 *ctx)
ctx                44 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c 		*ctx = 0x38;
ctx                50 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c 		*ctx = 0x54;
ctx                67 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c 	u32 reg, ctx;
ctx                70 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c 	if (!nv40_fifo_dma_engine(engine, &reg, &ctx))
ctx                80 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c 	nvkm_wo32(imem->ramfc, chan->ramfc + ctx, 0x00000000);
ctx                97 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c 	u32 inst, reg, ctx;
ctx               100 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c 	if (!nv40_fifo_dma_engine(engine, &reg, &ctx))
ctx               111 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c 	nvkm_wo32(imem->ramfc, chan->ramfc + ctx, inst);
ctx               134 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c 	u32 reg, ctx;
ctx               136 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c 	if (!nv40_fifo_dma_engine(engine, &reg, &ctx))
ctx              1447 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c 	struct nvkm_vma *ctx = NULL;
ctx              1501 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c 	ret = nvkm_vmm_get(vmm, 0, nvkm_memory_size(data), &ctx);
ctx              1505 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c 	ret = nvkm_memory_map(data, 0, vmm, ctx, NULL, 0);
ctx              1512 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c 	nvkm_wo32(inst, 0x0210, lower_32_bits(ctx->addr + CB_RESERVED) | 4);
ctx              1513 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c 	nvkm_wo32(inst, 0x0214, upper_32_bits(ctx->addr + CB_RESERVED));
ctx              1520 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c 	info.addr = ctx->addr;
ctx              1573 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxgf100.c 	nvkm_vmm_put(vmm, &ctx);
ctx               159 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c nv40_gr_construct_general(struct nvkm_grctx *ctx)
ctx               161 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	struct nvkm_device *device = ctx->device;
ctx               164 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x4000a4, 1);
ctx               165 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x4000a4, 0x00000008);
ctx               166 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x400144, 58);
ctx               167 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x400144, 0x00000001);
ctx               168 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x400314, 1);
ctx               169 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x400314, 0x00000000);
ctx               170 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x400400, 10);
ctx               171 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x400480, 10);
ctx               172 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x400500, 19);
ctx               173 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x400514, 0x00040000);
ctx               174 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x400524, 0x55555555);
ctx               175 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x400528, 0x55555555);
ctx               176 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x40052c, 0x55555555);
ctx               177 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x400530, 0x55555555);
ctx               178 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x400560, 6);
ctx               179 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x400568, 0x0000ffff);
ctx               180 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x40056c, 0x0000ffff);
ctx               181 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x40057c, 5);
ctx               182 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x400710, 3);
ctx               183 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x400710, 0x20010001);
ctx               184 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x400714, 0x0f73ef00);
ctx               185 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x400724, 1);
ctx               186 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x400724, 0x02008821);
ctx               187 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x400770, 3);
ctx               189 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x400814, 4);
ctx               190 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x400828, 5);
ctx               191 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x400840, 5);
ctx               192 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x400850, 0x00000040);
ctx               193 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x400858, 4);
ctx               194 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x400858, 0x00000040);
ctx               195 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x40085c, 0x00000040);
ctx               196 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x400864, 0x80000000);
ctx               197 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x40086c, 9);
ctx               198 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x40086c, 0x80000000);
ctx               199 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x400870, 0x80000000);
ctx               200 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x400874, 0x80000000);
ctx               201 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x400878, 0x80000000);
ctx               202 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x400888, 0x00000040);
ctx               203 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x40088c, 0x80000000);
ctx               204 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x4009c0, 8);
ctx               205 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x4009cc, 0x80000000);
ctx               206 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x4009dc, 0x80000000);
ctx               208 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x400840, 20);
ctx               209 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		if (nv44_gr_class(ctx->device)) {
ctx               211 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 				gr_def(ctx, 0x400860 + (i * 4), 0x00000001);
ctx               213 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x400880, 0x00000040);
ctx               214 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x400884, 0x00000040);
ctx               215 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x400888, 0x00000040);
ctx               216 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x400894, 11);
ctx               217 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x400894, 0x00000040);
ctx               218 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		if (!nv44_gr_class(ctx->device)) {
ctx               220 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 				gr_def(ctx, 0x4008a0 + (i * 4), 0x80000000);
ctx               222 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x4008e0, 2);
ctx               223 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x4008f8, 2);
ctx               226 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x4009f8, 1);
ctx               228 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x400a00, 73);
ctx               229 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x400b0c, 0x0b0b0b0c);
ctx               230 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x401000, 4);
ctx               231 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x405004, 1);
ctx               236 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x403448, 1);
ctx               237 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x403448, 0x00001010);
ctx               240 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x403440, 1);
ctx               243 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			gr_def(ctx, 0x403440, 0x00000010);
ctx               248 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			gr_def(ctx, 0x403440, 0x00003010);
ctx               257 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			gr_def(ctx, 0x403440, 0x00001010);
ctx               265 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c nv40_gr_construct_state3d(struct nvkm_grctx *ctx)
ctx               267 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	struct nvkm_device *device = ctx->device;
ctx               271 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x401880, 51);
ctx               272 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x401940, 0x00000100);
ctx               276 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x401880, 32);
ctx               278 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			gr_def(ctx, 0x401880 + (i * 4), 0x00000111);
ctx               280 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x401900, 16);
ctx               281 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x401940, 3);
ctx               283 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x40194c, 18);
ctx               284 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401954, 0x00000111);
ctx               285 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401958, 0x00080060);
ctx               286 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401974, 0x00000080);
ctx               287 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401978, 0xffff0000);
ctx               288 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x40197c, 0x00000001);
ctx               289 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401990, 0x46400000);
ctx               291 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x4019a0, 2);
ctx               292 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x4019ac, 5);
ctx               294 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x4019a0, 1);
ctx               295 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x4019b4, 3);
ctx               297 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x4019bc, 0xffff0000);
ctx               303 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x4019c0, 18);
ctx               305 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			gr_def(ctx, 0x4019c0 + (i * 4), 0x88888888);
ctx               308 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x401a08, 8);
ctx               309 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401a10, 0x0fff0000);
ctx               310 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401a14, 0x0fff0000);
ctx               311 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401a1c, 0x00011100);
ctx               312 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x401a2c, 4);
ctx               313 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x401a44, 26);
ctx               315 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x401a44 + (i * 4), 0x07ff0000);
ctx               316 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401a8c, 0x4b7fffff);
ctx               318 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x401ab8, 3);
ctx               320 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x401ab8, 1);
ctx               321 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x401ac0, 1);
ctx               323 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x401ad0, 8);
ctx               324 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401ad0, 0x30201000);
ctx               325 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401ad4, 0x70605040);
ctx               326 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401ad8, 0xb8a89888);
ctx               327 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401adc, 0xf8e8d8c8);
ctx               328 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x401b10, device->chipset == 0x40 ? 2 : 1);
ctx               329 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401b10, 0x40100000);
ctx               330 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x401b18, device->chipset == 0x40 ? 6 : 5);
ctx               331 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401b28, device->chipset == 0x40 ?
ctx               333 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x401b30, 25);
ctx               334 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401b34, 0x0000ffff);
ctx               335 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401b68, 0x435185d6);
ctx               336 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401b6c, 0x2155b699);
ctx               337 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401b70, 0xfedcba98);
ctx               338 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401b74, 0x00000098);
ctx               339 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401b84, 0xffffffff);
ctx               340 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401b88, 0x00ff7000);
ctx               341 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401b8c, 0x0000ffff);
ctx               344 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x401b94, 1);
ctx               345 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x401b98, 8);
ctx               346 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401b9c, 0x00ff0000);
ctx               347 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x401bc0, 9);
ctx               348 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x401be0, 0x00ffff00);
ctx               349 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x401c00, 192);
ctx               351 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x401c40 + (i * 4), 0x00018488);
ctx               352 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x401c80 + (i * 4), 0x00028202);
ctx               353 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x401d00 + (i * 4), 0x0000aae4);
ctx               354 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x401d40 + (i * 4), 0x01012000);
ctx               355 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x401d80 + (i * 4), 0x00080008);
ctx               356 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x401e00 + (i * 4), 0x00100008);
ctx               359 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x401e90 + (i * 4), 0x0001bc80);
ctx               360 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x401ea0 + (i * 4), 0x00000202);
ctx               361 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x401ec0 + (i * 4), 0x00000008);
ctx               362 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x401ee0 + (i * 4), 0x00080008);
ctx               364 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x400f5c, 3);
ctx               365 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x400f5c, 0x00000002);
ctx               366 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x400f84, 1);
ctx               370 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c nv40_gr_construct_state3d_2(struct nvkm_grctx *ctx)
ctx               372 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	struct nvkm_device *device = ctx->device;
ctx               375 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x402000, 1);
ctx               376 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x402404, device->chipset == 0x40 ? 1 : 2);
ctx               379 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402404, 0x00000001);
ctx               384 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402404, 0x00000020);
ctx               389 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402404, 0x00000421);
ctx               392 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402404, 0x00000021);
ctx               395 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402408, 0x030c30c3);
ctx               403 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x402440, 1);
ctx               404 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402440, 0x00011001);
ctx               409 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x402480, device->chipset == 0x40 ? 8 : 9);
ctx               410 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x402488, 0x3e020200);
ctx               411 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x40248c, 0x00ffffff);
ctx               414 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402490, 0x60103f00);
ctx               417 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402490, 0x40103f00);
ctx               423 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402490, 0x20103f00);
ctx               426 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402490, 0x0c103f00);
ctx               429 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x40249c, device->chipset <= 0x43 ?
ctx               431 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x402500, 31);
ctx               432 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x402530, 0x00008100);
ctx               434 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x40257c, 6);
ctx               435 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x402594, 16);
ctx               436 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x402800, 17);
ctx               437 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x402800, 0x00000001);
ctx               442 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x402864, 1);
ctx               443 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402864, 0x00001001);
ctx               444 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x402870, 3);
ctx               445 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402878, 0x00000003);
ctx               447 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x402900, 1);
ctx               448 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x402940, 1);
ctx               449 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x402980, 1);
ctx               450 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x4029c0, 1);
ctx               451 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x402a00, 1);
ctx               452 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x402a40, 1);
ctx               453 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x402a80, 1);
ctx               454 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x402ac0, 1);
ctx               458 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x402844, 1);
ctx               459 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402844, 0x00000001);
ctx               460 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x402850, 1);
ctx               463 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x402844, 1);
ctx               464 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402844, 0x00001001);
ctx               465 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x402850, 2);
ctx               466 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402854, 0x00000003);
ctx               470 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x402c00, 4);
ctx               471 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x402c00, device->chipset == 0x40 ?
ctx               477 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x402c20, 40);
ctx               479 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			gr_def(ctx, 0x402c40 + (i * 4), 0xffffffff);
ctx               480 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x4030b8, 13);
ctx               481 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x4030dc, 0x00000005);
ctx               482 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x4030e8, 0x0000ffff);
ctx               485 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x402c10, 4);
ctx               487 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x402c20, 36);
ctx               490 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x402c20, 24);
ctx               493 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x402c20, 16);
ctx               495 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			cp_ctx(ctx, 0x402c20, 8);
ctx               496 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x402cb0, device->chipset == 0x40 ? 12 : 13);
ctx               497 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x402cd4, 0x00000005);
ctx               499 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 			gr_def(ctx, 0x402ce0, 0x0000ffff);
ctx               503 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x403400, device->chipset == 0x40 ? 4 : 3);
ctx               504 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x403410, device->chipset == 0x40 ? 4 : 3);
ctx               505 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x403420, nv40_gr_vs_count(ctx->device));
ctx               506 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	for (i = 0; i < nv40_gr_vs_count(ctx->device); i++)
ctx               507 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x403420 + (i * 4), 0x00005555);
ctx               510 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x403600, 1);
ctx               511 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x403600, 0x00000001);
ctx               513 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x403800, 1);
ctx               515 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x403c18, 1);
ctx               516 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	gr_def(ctx, 0x403c18, 0x00000001);
ctx               522 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x405018, 1);
ctx               523 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x405018, 0x08e00001);
ctx               524 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x405c24, 1);
ctx               525 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		gr_def(ctx, 0x405c24, 0x000e3000);
ctx               529 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 		cp_ctx(ctx, 0x405800, 11);
ctx               530 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_ctx(ctx, 0x407000, 1);
ctx               534 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c nv40_gr_construct_state3d_3(struct nvkm_grctx *ctx)
ctx               536 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	int len = nv44_gr_class(ctx->device) ? 0x0084 : 0x0684;
ctx               538 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_out (ctx, 0x300000);
ctx               539 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_lsr (ctx, len - 4);
ctx               540 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_bra (ctx, SWAP_DIRECTION, SAVE, cp_swap_state3d_3_is_save);
ctx               541 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_lsr (ctx, len);
ctx               542 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_name(ctx, cp_swap_state3d_3_is_save);
ctx               543 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_out (ctx, 0x800001);
ctx               545 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	ctx->ctxvals_pos += len;
ctx               549 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c nv40_gr_construct_shader(struct nvkm_grctx *ctx)
ctx               551 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	struct nvkm_device *device = ctx->device;
ctx               552 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	struct nvkm_gpuobj *obj = ctx->data;
ctx               556 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	vs_nr    = nv40_gr_vs_count(ctx->device);
ctx               574 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_lsr(ctx, vs_len * vs_nr + 0x300/4);
ctx               575 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_out(ctx, nv44_gr_class(device) ? 0x800029 : 0x800041);
ctx               577 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	offset = ctx->ctxvals_pos;
ctx               578 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	ctx->ctxvals_pos += (0x0300/4 + (vs_nr * vs_len));
ctx               580 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	if (ctx->mode != NVKM_GRCTX_VALS)
ctx               596 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c nv40_grctx_generate(struct nvkm_grctx *ctx)
ctx               599 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_bra (ctx, AUTO_SAVE, PENDING, cp_setup_save);
ctx               600 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_bra (ctx, USER_SAVE, PENDING, cp_setup_save);
ctx               602 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_name(ctx, cp_check_load);
ctx               603 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_bra (ctx, AUTO_LOAD, PENDING, cp_setup_auto_load);
ctx               604 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_bra (ctx, USER_LOAD, PENDING, cp_setup_load);
ctx               605 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_bra (ctx, ALWAYS, TRUE, cp_exit);
ctx               608 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_name(ctx, cp_setup_auto_load);
ctx               609 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_wait(ctx, STATUS, IDLE);
ctx               610 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_out (ctx, CP_NEXT_TO_SWAP);
ctx               611 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_name(ctx, cp_setup_load);
ctx               612 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_wait(ctx, STATUS, IDLE);
ctx               613 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_set (ctx, SWAP_DIRECTION, LOAD);
ctx               614 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_out (ctx, 0x00910880); /* ?? */
ctx               615 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_out (ctx, 0x00901ffe); /* ?? */
ctx               616 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_out (ctx, 0x01940000); /* ?? */
ctx               617 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_lsr (ctx, 0x20);
ctx               618 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_out (ctx, 0x0060000b); /* ?? */
ctx               619 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_wait(ctx, UNK57, CLEAR);
ctx               620 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_out (ctx, 0x0060000c); /* ?? */
ctx               621 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_bra (ctx, ALWAYS, TRUE, cp_swap_state);
ctx               624 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_name(ctx, cp_setup_save);
ctx               625 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_set (ctx, SWAP_DIRECTION, SAVE);
ctx               628 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_name(ctx, cp_swap_state);
ctx               629 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_pos (ctx, 0x00020/4);
ctx               630 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	nv40_gr_construct_general(ctx);
ctx               631 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_wait(ctx, STATUS, IDLE);
ctx               634 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_bra (ctx, UNK54, CLEAR, cp_prepare_exit);
ctx               635 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	nv40_gr_construct_state3d(ctx);
ctx               636 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_wait(ctx, STATUS, IDLE);
ctx               639 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	nv40_gr_construct_state3d_2(ctx);
ctx               642 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	nv40_gr_construct_state3d_3(ctx);
ctx               645 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_pos (ctx, ctx->ctxvals_pos);
ctx               646 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	nv40_gr_construct_shader(ctx);
ctx               649 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_name(ctx, cp_prepare_exit);
ctx               650 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_bra (ctx, SWAP_DIRECTION, SAVE, cp_check_load);
ctx               651 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_bra (ctx, USER_SAVE, PENDING, cp_exit);
ctx               652 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_out (ctx, CP_NEXT_TO_CURRENT);
ctx               654 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_name(ctx, cp_exit);
ctx               655 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_set (ctx, USER_SAVE, NOT_PENDING);
ctx               656 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_set (ctx, USER_LOAD, NOT_PENDING);
ctx               657 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	cp_out (ctx, CP_END);
ctx               674 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	struct nvkm_grctx ctx = {
ctx               684 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	nv40_grctx_generate(&ctx);
ctx               687 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	for (i = 0; i < ctx.ctxprog_len; i++)
ctx               689 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.c 	*size = ctx.ctxvals_pos * 4;
ctx                25 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h cp_out(struct nvkm_grctx *ctx, u32 inst)
ctx                27 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	u32 *ctxprog = ctx->ucode;
ctx                29 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	if (ctx->mode != NVKM_GRCTX_PROG)
ctx                32 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	BUG_ON(ctx->ctxprog_len == ctx->ctxprog_max);
ctx                33 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	ctxprog[ctx->ctxprog_len++] = inst;
ctx                37 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h cp_lsr(struct nvkm_grctx *ctx, u32 val)
ctx                39 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	cp_out(ctx, CP_LOAD_SR | val);
ctx                43 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h cp_ctx(struct nvkm_grctx *ctx, u32 reg, u32 length)
ctx                45 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	ctx->ctxprog_reg = (reg - 0x00400000) >> 2;
ctx                47 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	ctx->ctxvals_base = ctx->ctxvals_pos;
ctx                48 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	ctx->ctxvals_pos = ctx->ctxvals_base + length;
ctx                51 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 		cp_lsr(ctx, length);
ctx                55 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	cp_out(ctx, CP_CTX | (length << CP_CTX_COUNT_SHIFT) | ctx->ctxprog_reg);
ctx                59 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h cp_name(struct nvkm_grctx *ctx, int name)
ctx                61 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	u32 *ctxprog = ctx->ucode;
ctx                64 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	if (ctx->mode != NVKM_GRCTX_PROG)
ctx                67 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	ctx->ctxprog_label[name] = ctx->ctxprog_len;
ctx                68 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	for (i = 0; i < ctx->ctxprog_len; i++) {
ctx                74 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 			     (ctx->ctxprog_len << CP_BRA_IP_SHIFT);
ctx                79 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h _cp_bra(struct nvkm_grctx *ctx, u32 mod, int flag, int state, int name)
ctx                84 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 		ip = ctx->ctxprog_label[name] << CP_BRA_IP_SHIFT;
ctx                89 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	cp_out(ctx, CP_BRA | (mod << 18) | ip | flag |
ctx                97 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h _cp_wait(struct nvkm_grctx *ctx, int flag, int state)
ctx                99 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	cp_out(ctx, CP_WAIT | flag | (state ? CP_WAIT_SET : 0));
ctx               104 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h _cp_set(struct nvkm_grctx *ctx, int flag, int state)
ctx               106 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	cp_out(ctx, CP_SET | flag | (state ? CP_SET_1 : 0));
ctx               111 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h cp_pos(struct nvkm_grctx *ctx, int offset)
ctx               113 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	ctx->ctxvals_pos = offset;
ctx               114 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	ctx->ctxvals_base = ctx->ctxvals_pos;
ctx               116 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	cp_lsr(ctx, ctx->ctxvals_pos);
ctx               117 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	cp_out(ctx, CP_SET_CONTEXT_POINTER);
ctx               121 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h gr_def(struct nvkm_grctx *ctx, u32 reg, u32 val)
ctx               123 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	if (ctx->mode != NVKM_GRCTX_VALS)
ctx               127 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	reg = (reg - ctx->ctxprog_reg) + ctx->ctxvals_base;
ctx               129 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv40.h 	nvkm_wo32(ctx->data, reg * 4, val);
ctx               170 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_mmio(struct nvkm_grctx *ctx);
ctx               171 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_xfer1(struct nvkm_grctx *ctx);
ctx               172 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_xfer2(struct nvkm_grctx *ctx);
ctx               177 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_grctx_generate(struct nvkm_grctx *ctx)
ctx               179 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, STATE, RUNNING);
ctx               180 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, XFER_SWITCH, ENABLE);
ctx               182 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_bra (ctx, AUTO_SAVE, PENDING, cp_setup_save);
ctx               183 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_bra (ctx, USER_SAVE, PENDING, cp_setup_save);
ctx               185 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_name(ctx, cp_check_load);
ctx               186 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_bra (ctx, AUTO_LOAD, PENDING, cp_setup_auto_load);
ctx               187 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_bra (ctx, USER_LOAD, PENDING, cp_setup_load);
ctx               188 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_bra (ctx, ALWAYS, TRUE, cp_prepare_exit);
ctx               191 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_name(ctx, cp_setup_auto_load);
ctx               192 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_DISABLE1);
ctx               193 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_DISABLE2);
ctx               194 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_ENABLE);
ctx               195 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_NEXT_TO_SWAP);
ctx               196 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, UNK01, SET);
ctx               197 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_name(ctx, cp_setup_load);
ctx               198 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_NEWCTX);
ctx               199 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_wait(ctx, NEWCTX, BUSY);
ctx               200 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, UNK1D, CLEAR);
ctx               201 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, SWAP_DIRECTION, LOAD);
ctx               202 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_bra (ctx, UNK0B, SET, cp_prepare_exit);
ctx               203 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_bra (ctx, ALWAYS, TRUE, cp_swap_state);
ctx               206 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_name(ctx, cp_setup_save);
ctx               207 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, UNK1D, SET);
ctx               208 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_wait(ctx, STATUS, BUSY);
ctx               209 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_wait(ctx, INTR, PENDING);
ctx               210 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_bra (ctx, STATUS, BUSY, cp_setup_save);
ctx               211 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, UNK01, SET);
ctx               212 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, SWAP_DIRECTION, SAVE);
ctx               215 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_name(ctx, cp_swap_state);
ctx               216 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, UNK03, SET);
ctx               217 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_pos (ctx, 0x00004/4);
ctx               218 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx (ctx, 0x400828, 1); /* needed. otherwise, flickering happens. */
ctx               219 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_pos (ctx, 0x00100/4);
ctx               220 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	nv50_gr_construct_mmio(ctx);
ctx               221 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	nv50_gr_construct_xfer1(ctx);
ctx               222 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	nv50_gr_construct_xfer2(ctx);
ctx               224 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_bra (ctx, SWAP_DIRECTION, SAVE, cp_check_load);
ctx               226 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, UNK20, SET);
ctx               227 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, SWAP_DIRECTION, SAVE); /* no idea why this is needed, but fixes at least one lockup. */
ctx               228 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_lsr (ctx, ctx->ctxvals_base);
ctx               229 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_SET_XFER_POINTER);
ctx               230 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_lsr (ctx, 4);
ctx               231 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_SEEK_1);
ctx               232 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_XFER_1);
ctx               233 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_wait(ctx, XFER, BUSY);
ctx               236 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_name(ctx, cp_prepare_exit);
ctx               237 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, UNK01, CLEAR);
ctx               238 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, UNK03, CLEAR);
ctx               239 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, UNK1D, CLEAR);
ctx               241 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_bra (ctx, USER_SAVE, PENDING, cp_exit);
ctx               242 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_NEXT_TO_CURRENT);
ctx               244 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_name(ctx, cp_exit);
ctx               245 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, USER_SAVE, NOT_PENDING);
ctx               246 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, USER_LOAD, NOT_PENDING);
ctx               247 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, XFER_SWITCH, DISABLE);
ctx               248 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_set (ctx, STATE, STOPPED);
ctx               249 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_END);
ctx               250 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	ctx->ctxvals_pos += 0x400; /* padding... no idea why you need it */
ctx               269 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_grctx ctx = {
ctx               278 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	nv50_grctx_generate(&ctx);
ctx               281 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	for (i = 0; i < ctx.ctxprog_len; i++)
ctx               283 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	*size = ctx.ctxvals_pos * 4;
ctx               294 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_mmio_ddata(struct nvkm_grctx *ctx);
ctx               297 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_mmio(struct nvkm_grctx *ctx)
ctx               299 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx               305 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x400808, 7);
ctx               306 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	gr_def(ctx, 0x400814, 0x00000030);
ctx               307 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x400834, 0x32);
ctx               309 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x400834, 0xff400040);
ctx               310 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x400838, 0xfff00080);
ctx               311 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x40083c, 0xfff70090);
ctx               312 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x400840, 0xffe806a8);
ctx               314 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	gr_def(ctx, 0x400844, 0x00000002);
ctx               316 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x400894, 0x00001000);
ctx               317 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	gr_def(ctx, 0x4008e8, 0x00000003);
ctx               318 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	gr_def(ctx, 0x4008ec, 0x00001000);
ctx               320 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x400908, 0xb);
ctx               322 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x400908, 0xc);
ctx               324 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x400908, 0xe);
ctx               327 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x400b00, 0x1);
ctx               329 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x400b10, 0x1);
ctx               330 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x400b10, 0x0001629d);
ctx               331 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x400b20, 0x1);
ctx               332 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x400b20, 0x0001629d);
ctx               335 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	nv50_gr_construct_mmio_ddata(ctx);
ctx               338 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x400c08, 0x2);
ctx               339 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	gr_def(ctx, 0x400c08, 0x0000fe0c);
ctx               343 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x401008, 0x4);
ctx               344 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x401014, 0x00001000);
ctx               346 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x401008, 0x5);
ctx               347 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x401018, 0x00001000);
ctx               349 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x401008, 0x5);
ctx               350 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x401018, 0x00004000);
ctx               354 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x401400, 0x8);
ctx               355 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x401424, 0x3);
ctx               357 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x40142c, 0x0001fd87);
ctx               359 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x40142c, 0x00000187);
ctx               360 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x401540, 0x5);
ctx               361 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	gr_def(ctx, 0x401550, 0x00001018);
ctx               364 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x401814, 0x1);
ctx               365 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	gr_def(ctx, 0x401814, 0x000000ff);
ctx               367 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x40181c, 0xe);
ctx               368 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x401850, 0x00000004);
ctx               370 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x40181c, 0xf);
ctx               371 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x401854, 0x00000004);
ctx               373 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x40181c, 0x13);
ctx               374 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x401864, 0x00000004);
ctx               378 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x401c00, 0x1);
ctx               381 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x401c00, 0x0001005f);
ctx               386 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x401c00, 0x044d00df);
ctx               394 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x401c00, 0x042500df);
ctx               400 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x401c00, 0x142500df);
ctx               407 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x402400, 0x1);
ctx               409 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x402408, 0x1);
ctx               411 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x402408, 0x2);
ctx               412 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	gr_def(ctx, 0x402408, 0x00000600);
ctx               415 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x402800, 0x1);
ctx               417 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x402800, 0x00000006);
ctx               420 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x402c08, 0x6);
ctx               422 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x402c14, 0x01000000);
ctx               423 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	gr_def(ctx, 0x402c18, 0x000000ff);
ctx               425 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x402ca0, 0x1);
ctx               427 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x402ca0, 0x2);
ctx               429 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x402ca0, 0x00000400);
ctx               431 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x402ca0, 0x00000800);
ctx               433 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x402ca0, 0x00000400);
ctx               434 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x402cac, 0x4);
ctx               437 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x403004, 0x1);
ctx               438 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	gr_def(ctx, 0x403004, 0x00000001);
ctx               442 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x403404, 0x1);
ctx               443 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x403404, 0x00000001);
ctx               447 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x405000, 0x1);
ctx               450 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x405000, 0x00300080);
ctx               460 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x405000, 0x000e0080);
ctx               467 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x405000, 0x00000080);
ctx               470 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x405014, 0x1);
ctx               471 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	gr_def(ctx, 0x405014, 0x00000004);
ctx               472 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x40501c, 0x1);
ctx               473 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x405024, 0x1);
ctx               474 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x40502c, 0x1);
ctx               478 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x4063e0, 0x1);
ctx               482 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x406814, 0x2b);
ctx               483 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x406818, 0x00000f80);
ctx               484 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x406860, 0x007f0080);
ctx               485 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x40689c, 0x007f0080);
ctx               487 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x406814, 0x4);
ctx               489 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, 0x406818, 0x00000f80);
ctx               491 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, 0x406818, 0x00001f80);
ctx               493 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, 0x40681c, 0x00000030);
ctx               494 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x406830, 0x3);
ctx               500 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			cp_ctx(ctx, 0x407000 + (i<<8), 3);
ctx               502 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, 0x407000 + (i<<8), 0x1b74f820);
ctx               504 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, 0x407000 + (i<<8), 0x3b74f821);
ctx               506 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, 0x407000 + (i<<8), 0x7b74f821);
ctx               507 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, 0x407004 + (i<<8), 0x89058001);
ctx               510 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, 0x407010 + (i<<8), 1);
ctx               512 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, 0x407010 + (i<<8), 2);
ctx               513 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, 0x407010 + (i<<8), 0x00001000);
ctx               514 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, 0x407014 + (i<<8), 0x0000001f);
ctx               516 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, 0x407010 + (i<<8), 3);
ctx               517 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, 0x407010 + (i<<8), 0x00001000);
ctx               519 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, 0x407014 + (i<<8), 0x000000ff);
ctx               521 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, 0x407014 + (i<<8), 0x000001ff);
ctx               524 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			cp_ctx(ctx, 0x407080 + (i<<8), 4);
ctx               526 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, 0x407080 + (i<<8), 0x027c10fa);
ctx               528 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, 0x407080 + (i<<8), 0x827c10fa);
ctx               530 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, 0x407084 + (i<<8), 0x000000c0);
ctx               532 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, 0x407084 + (i<<8), 0x400000c0);
ctx               533 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, 0x407088 + (i<<8), 0xb7892080);
ctx               536 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, 0x407094 + (i<<8), 1);
ctx               538 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, 0x407094 + (i<<8), 3);
ctx               540 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, 0x407094 + (i<<8), 4);
ctx               541 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, 0x4070a0 + (i<<8), 1);
ctx               546 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_ctx(ctx, 0x407c00, 0x3);
ctx               548 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x407c00, 0x00010040);
ctx               550 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x407c00, 0x00390040);
ctx               552 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x407c00, 0x003d0040);
ctx               553 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	gr_def(ctx, 0x407c08, 0x00000022);
ctx               555 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x407c10, 0x3);
ctx               556 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x407c20, 0x1);
ctx               557 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x407c2c, 0x1);
ctx               561 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x407d00, 0x9);
ctx               563 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x407d00, 0x15);
ctx               566 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x407d08, 0x00380040);
ctx               569 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, 0x407d08, 0x00010040);
ctx               571 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, 0x407d08, 0x00390040);
ctx               574 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, 0x407d08, 0x003d0040);
ctx               576 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, 0x407d08, 0x003c0040);
ctx               578 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		gr_def(ctx, 0x407d0c, 0x00000022);
ctx               592 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			cp_ctx(ctx, offset + 0x00, 1);
ctx               593 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, offset + 0x00, 0x0000ff0a);
ctx               594 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			cp_ctx(ctx, offset + 0x08, 1);
ctx               603 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, offset, 0x20);
ctx               604 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x00, 0x01800000);
ctx               605 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x04, 0x00160000);
ctx               606 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x08, 0x01800000);
ctx               607 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x18, 0x0003ffff);
ctx               610 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, offset + 0x1c, 0x00080000);
ctx               613 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, offset + 0x1c, 0x00880000);
ctx               616 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, offset + 0x1c, 0x018c0000);
ctx               621 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, offset + 0x1c, 0x118c0000);
ctx               624 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, offset + 0x1c, 0x10880000);
ctx               628 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, offset + 0x1c, 0x310c0000);
ctx               635 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, offset + 0x1c, 0x300c0000);
ctx               638 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x40, 0x00010401);
ctx               640 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, offset + 0x48, 0x00000040);
ctx               642 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, offset + 0x48, 0x00000078);
ctx               643 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x50, 0x000000bf);
ctx               644 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x58, 0x00001210);
ctx               646 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, offset + 0x5c, 0x00000080);
ctx               648 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, offset + 0x5c, 0x08000080);
ctx               650 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, offset + 0x68, 0x0000003e);
ctx               654 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, base + 0x300, 0x4);
ctx               656 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, base + 0x300, 0x5);
ctx               658 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, base + 0x304, 0x00007070);
ctx               660 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, base + 0x304, 0x00027070);
ctx               662 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, base + 0x304, 0x01127070);
ctx               664 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, base + 0x304, 0x05127070);
ctx               667 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, base + 0x318, 1);
ctx               669 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, base + 0x320, 1);
ctx               671 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, base + 0x318, 0x0003ffff);
ctx               673 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, base + 0x318, 0x03ffffff);
ctx               675 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, base + 0x320, 0x07ffffff);
ctx               678 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, base + 0x324, 5);
ctx               680 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, base + 0x328, 4);
ctx               683 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, base + 0x340, 9);
ctx               686 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, base + 0x33c, 0xb);
ctx               689 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, base + 0x33c, 0xd);
ctx               692 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, offset + 0x0, 0x00120407);
ctx               693 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, offset + 0x4, 0x05091507);
ctx               695 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x8, 0x05100202);
ctx               697 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x8, 0x05010202);
ctx               698 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, offset + 0xc, 0x00030201);
ctx               700 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, base + 0x36c, 1);
ctx               702 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			cp_ctx(ctx, base + 0x400, 2);
ctx               703 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, base + 0x404, 0x00000040);
ctx               704 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			cp_ctx(ctx, base + 0x40c, 2);
ctx               705 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, base + 0x40c, 0x0d0c0b0a);
ctx               706 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, base + 0x410, 0x00141210);
ctx               712 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			cp_ctx(ctx, offset, 6);
ctx               713 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, offset + 0x0, 0x000001f0);
ctx               714 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, offset + 0x4, 0x00000001);
ctx               715 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, offset + 0x8, 0x00000003);
ctx               717 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0xc, 0x00008000);
ctx               718 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, offset + 0x14, 0x00039e00);
ctx               719 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			cp_ctx(ctx, offset + 0x1c, 2);
ctx               721 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x1c, 0x00000040);
ctx               723 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x1c, 0x00000100);
ctx               724 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, offset + 0x20, 0x00003800);
ctx               727 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, base + 0x54c, 2);
ctx               729 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, base + 0x54c, 0x003fe006);
ctx               731 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 					gr_def(ctx, base + 0x54c, 0x003fe007);
ctx               732 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, base + 0x550, 0x003fe000);
ctx               739 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			cp_ctx(ctx, offset, 1);
ctx               740 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			gr_def(ctx, offset, 0x00404040);
ctx               746 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			cp_ctx(ctx, offset, 2);
ctx               748 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset, 0x0077f005);
ctx               750 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset, 0x6cf7f007);
ctx               752 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset, 0x6cfff007);
ctx               754 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset, 0x0cfff007);
ctx               756 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset, 0x0cf7f007);
ctx               758 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x4, 0x00007fff);
ctx               760 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x4, 0x003f7fff);
ctx               762 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x4, 0x02bf7fff);
ctx               763 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			cp_ctx(ctx, offset + 0x2c, 1);
ctx               765 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, offset + 0x50, 9);
ctx               766 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x54, 0x000003ff);
ctx               767 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x58, 0x00000003);
ctx               768 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x5c, 0x00000003);
ctx               769 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x60, 0x000001ff);
ctx               770 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x64, 0x0000001f);
ctx               771 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x68, 0x0000000f);
ctx               772 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				gr_def(ctx, offset + 0x6c, 0x0000000f);
ctx               774 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, offset + 0x50, 1);
ctx               775 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, offset + 0x70, 1);
ctx               777 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, offset + 0x50, 1);
ctx               778 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				cp_ctx(ctx, offset + 0x60, 5);
ctx               785 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c dd_emit(struct nvkm_grctx *ctx, int num, u32 val) {
ctx               787 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	if (val && ctx->mode == NVKM_GRCTX_VALS) {
ctx               789 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nvkm_wo32(ctx->data, 4 * (ctx->ctxvals_pos + i), val);
ctx               791 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	ctx->ctxvals_pos += num;
ctx               795 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_mmio_ddata(struct nvkm_grctx *ctx)
ctx               797 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx               799 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	base = ctx->ctxvals_pos;
ctx               802 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);	/* 00000001 UNK0F90 */
ctx               803 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);	/* 00000001 UNK135C */
ctx               806 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);	/* 00000007 SRC_TILE_MODE_Z */
ctx               807 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 2);	/* 00000007 SRC_TILE_MODE_Y */
ctx               808 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);	/* 00000001 SRC_LINEAR #1 */
ctx               809 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);	/* 000000ff SRC_ADDRESS_HIGH */
ctx               810 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);	/* 00000001 SRC_SRGB */
ctx               812 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 00000003 eng2d UNK0258 */
ctx               813 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);	/* 00000fff SRC_DEPTH */
ctx               814 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x100);	/* 0000ffff SRC_HEIGHT */
ctx               817 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 0000000f TEXTURES_LOG2 */
ctx               818 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 0000000f SAMPLERS_LOG2 */
ctx               819 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 000000ff CB_DEF_ADDRESS_HIGH */
ctx               820 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* ffffffff CB_DEF_ADDRESS_LOW */
ctx               821 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* ffffffff SHARED_SIZE */
ctx               822 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 2);		/* ffffffff REG_MODE */
ctx               823 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 0000ffff BLOCK_ALLOC_THREADS */
ctx               824 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000001 LANES32 */
ctx               825 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 000000ff UNK370 */
ctx               826 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 000000ff USER_PARAM_UNK */
ctx               827 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 000000ff USER_PARAM_COUNT */
ctx               828 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 000000ff UNK384 bits 8-15 */
ctx               829 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x3fffff);	/* 003fffff TIC_LIMIT */
ctx               830 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x1fff);	/* 000fffff TSC_LIMIT */
ctx               831 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 0000ffff CB_ADDR_INDEX */
ctx               832 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 000007ff BLOCKDIM_X */
ctx               833 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 000007ff BLOCKDIM_XMY */
ctx               834 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 BLOCKDIM_XMY_OVERFLOW */
ctx               835 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 0003ffff BLOCKDIM_XMYMZ */
ctx               836 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 000007ff BLOCKDIM_Y */
ctx               837 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 0000007f BLOCKDIM_Z */
ctx               838 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 4);		/* 000000ff CP_REG_ALLOC_TEMP */
ctx               839 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000001 BLOCKDIM_DIRTY */
ctx               841 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 00000003 UNK03E8 */
ctx               842 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 0000007f BLOCK_ALLOC_HALFWARPS */
ctx               843 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000007 LOCAL_WARPS_NO_CLAMP */
ctx               844 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 7);		/* 00000007 LOCAL_WARPS_LOG_ALLOC */
ctx               845 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000007 STACK_WARPS_NO_CLAMP */
ctx               846 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 7);		/* 00000007 STACK_WARPS_LOG_ALLOC */
ctx               847 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00001fff BLOCK_ALLOC_REGSLOTS_PACKED */
ctx               848 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00001fff BLOCK_ALLOC_REGSLOTS_STRIDED */
ctx               849 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 000007ff BLOCK_ALLOC_THREADS */
ctx               853 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 4, 0);		/* 0000ffff clip X, Y, W, H */
ctx               855 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);		/* ffffffff chroma COLOR_FORMAT */
ctx               857 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);		/* ffffffff pattern COLOR_FORMAT */
ctx               858 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff pattern SHAPE */
ctx               859 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);		/* ffffffff pattern PATTERN_SELECT */
ctx               861 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0xa);		/* ffffffff surf2d SRC_FORMAT */
ctx               862 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff surf2d DMA_SRC */
ctx               863 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 000000ff surf2d SRC_ADDRESS_HIGH */
ctx               864 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff surf2d SRC_ADDRESS_LOW */
ctx               865 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x40);		/* 0000ffff surf2d SRC_PITCH */
ctx               866 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000000f surf2d SRC_TILE_MODE_Z */
ctx               867 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 2);		/* 0000000f surf2d SRC_TILE_MODE_Y */
ctx               868 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x100);		/* ffffffff surf2d SRC_HEIGHT */
ctx               869 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);		/* 00000001 surf2d SRC_LINEAR */
ctx               870 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x100);		/* ffffffff surf2d SRC_WIDTH */
ctx               872 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000ffff gdirect CLIP_B_X */
ctx               873 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000ffff gdirect CLIP_B_Y */
ctx               874 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000ffff gdirect CLIP_C_X */
ctx               875 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000ffff gdirect CLIP_C_Y */
ctx               876 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000ffff gdirect CLIP_D_X */
ctx               877 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000ffff gdirect CLIP_D_Y */
ctx               878 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);		/* ffffffff gdirect COLOR_FORMAT */
ctx               879 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff gdirect OPERATION */
ctx               880 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000ffff gdirect POINT_X */
ctx               881 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000ffff gdirect POINT_Y */
ctx               883 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000ffff blit SRC_Y */
ctx               884 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff blit OPERATION */
ctx               886 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff ifc OPERATION */
ctx               888 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff iifc INDEX_FORMAT */
ctx               889 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff iifc LUT_OFFSET */
ctx               890 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 4);		/* ffffffff iifc COLOR_FORMAT */
ctx               891 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff iifc OPERATION */
ctx               895 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* ffffffff m2mf LINE_COUNT */
ctx               896 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* ffffffff m2mf LINE_LENGTH_IN */
ctx               897 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 2, 0);		/* ffffffff m2mf OFFSET_IN, OFFSET_OUT */
ctx               898 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* ffffffff m2mf TILING_DEPTH_OUT */
ctx               899 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x100);		/* ffffffff m2mf TILING_HEIGHT_OUT */
ctx               900 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* ffffffff m2mf TILING_POSITION_OUT_Z */
ctx               901 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000001 m2mf LINEAR_OUT */
ctx               902 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 2, 0);		/* 0000ffff m2mf TILING_POSITION_OUT_X, Y */
ctx               903 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x100);		/* ffffffff m2mf TILING_PITCH_OUT */
ctx               904 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* ffffffff m2mf TILING_DEPTH_IN */
ctx               905 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x100);		/* ffffffff m2mf TILING_HEIGHT_IN */
ctx               906 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* ffffffff m2mf TILING_POSITION_IN_Z */
ctx               907 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000001 m2mf LINEAR_IN */
ctx               908 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 2, 0);		/* 0000ffff m2mf TILING_POSITION_IN_X, Y */
ctx               909 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x100);		/* ffffffff m2mf TILING_PITCH_IN */
ctx               913 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);		/* ffffffff line COLOR_FORMAT */
ctx               914 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff line OPERATION */
ctx               916 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);		/* ffffffff triangle COLOR_FORMAT */
ctx               917 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff triangle OPERATION */
ctx               919 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000000f sifm TILE_MODE_Z */
ctx               920 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 2);		/* 0000000f sifm TILE_MODE_Y */
ctx               921 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 000000ff sifm FORMAT_FILTER */
ctx               922 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);		/* 000000ff sifm FORMAT_ORIGIN */
ctx               923 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000ffff sifm SRC_PITCH */
ctx               924 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);		/* 00000001 sifm SRC_LINEAR */
ctx               925 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 000000ff sifm SRC_OFFSET_HIGH */
ctx               926 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff sifm SRC_OFFSET */
ctx               927 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000ffff sifm SRC_HEIGHT */
ctx               928 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* 0000ffff sifm SRC_WIDTH */
ctx               929 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 3);		/* ffffffff sifm COLOR_FORMAT */
ctx               930 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff sifm OPERATION */
ctx               932 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);		/* ffffffff sifc OPERATION */
ctx               936 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 0000000f GP_TEXTURES_LOG2 */
ctx               937 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 0000000f GP_SAMPLERS_LOG2 */
ctx               938 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 000000ff */
ctx               939 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* ffffffff */
ctx               940 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 4);		/* 000000ff UNK12B0_0 */
ctx               941 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x70);		/* 000000ff UNK12B0_1 */
ctx               942 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x80);		/* 000000ff UNK12B0_3 */
ctx               943 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 000000ff UNK12B0_2 */
ctx               944 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 0000000f FP_TEXTURES_LOG2 */
ctx               945 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 0000000f FP_SAMPLERS_LOG2 */
ctx               947 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* ffffffff */
ctx               948 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 0000007f MULTISAMPLE_SAMPLES_LOG2 */
ctx               950 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 0000000f MULTISAMPLE_SAMPLES_LOG2 */
ctx               952 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0xc);		/* 000000ff SEMANTIC_COLOR.BFC0_ID */
ctx               954 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 00000001 SEMANTIC_COLOR.CLMP_EN */
ctx               955 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 8);		/* 000000ff SEMANTIC_COLOR.COLR_NR */
ctx               956 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x14);		/* 000000ff SEMANTIC_COLOR.FFC0_ID */
ctx               958 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 000000ff SEMANTIC_LAYER */
ctx               959 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 00000001 */
ctx               961 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 00000001 SEMANTIC_PTSZ.ENABLE */
ctx               962 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x29);	/* 000000ff SEMANTIC_PTSZ.PTSZ_ID */
ctx               963 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x27);	/* 000000ff SEMANTIC_PRIM */
ctx               964 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x26);	/* 000000ff SEMANTIC_LAYER */
ctx               965 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 8);	/* 0000000f SMENATIC_CLIP.CLIP_HIGH */
ctx               966 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 4);	/* 000000ff SEMANTIC_CLIP.CLIP_LO */
ctx               967 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x27);	/* 000000ff UNK0FD4 */
ctx               968 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 00000001 UNK1900 */
ctx               970 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000007 RT_CONTROL_MAP0 */
ctx               971 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000007 RT_CONTROL_MAP1 */
ctx               972 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 2);		/* 00000007 RT_CONTROL_MAP2 */
ctx               973 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 3);		/* 00000007 RT_CONTROL_MAP3 */
ctx               974 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 4);		/* 00000007 RT_CONTROL_MAP4 */
ctx               975 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 5);		/* 00000007 RT_CONTROL_MAP5 */
ctx               976 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 6);		/* 00000007 RT_CONTROL_MAP6 */
ctx               977 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 7);		/* 00000007 RT_CONTROL_MAP7 */
ctx               978 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 0000000f RT_CONTROL_COUNT */
ctx               979 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 8, 0);		/* 00000001 RT_HORIZ_UNK */
ctx               980 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 8, 0);		/* ffffffff RT_ADDRESS_LOW */
ctx               981 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0xcf);		/* 000000ff RT_FORMAT */
ctx               982 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 7, 0);		/* 000000ff RT_FORMAT */
ctx               984 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 3, 0);	/* 1, 1, 1 */
ctx               986 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 2, 0);	/* 1, 1 */
ctx               987 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* ffffffff GP_ENABLE */
ctx               988 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x80);		/* 0000ffff GP_VERTEX_OUTPUT_COUNT*/
ctx               989 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 4);		/* 000000ff GP_REG_ALLOC_RESULT */
ctx               990 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 4);		/* 000000ff GP_RESULT_MAP_SIZE */
ctx               992 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 3);	/* 00000003 */
ctx               993 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 00000001 UNK1418. Alone. */
ctx               996 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 3);	/* 00000003 UNK15AC */
ctx               997 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* ffffffff RASTERIZE_ENABLE */
ctx               998 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 FP_CONTROL.EXPORTS_Z */
ctx              1000 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 00000001 FP_CONTROL.MULTIPLE_RESULTS */
ctx              1001 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x12);		/* 000000ff FP_INTERPOLANT_CTRL.COUNT */
ctx              1002 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x10);		/* 000000ff FP_INTERPOLANT_CTRL.COUNT_NONFLAT */
ctx              1003 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0xc);		/* 000000ff FP_INTERPOLANT_CTRL.OFFSET */
ctx              1004 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000001 FP_INTERPOLANT_CTRL.UMASK.W */
ctx              1005 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 FP_INTERPOLANT_CTRL.UMASK.X */
ctx              1006 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 FP_INTERPOLANT_CTRL.UMASK.Y */
ctx              1007 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 FP_INTERPOLANT_CTRL.UMASK.Z */
ctx              1008 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 4);		/* 000000ff FP_RESULT_COUNT */
ctx              1009 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 2);		/* ffffffff REG_MODE */
ctx              1010 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 4);		/* 000000ff FP_REG_ALLOC_TEMP */
ctx              1012 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* ffffffff */
ctx              1013 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 GP_BUILTIN_RESULT_EN.LAYER_IDX */
ctx              1014 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* ffffffff STRMOUT_ENABLE */
ctx              1015 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x3fffff);	/* 003fffff TIC_LIMIT */
ctx              1016 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x1fff);	/* 000fffff TSC_LIMIT */
ctx              1017 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 VERTEX_TWO_SIDE_ENABLE*/
ctx              1019 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 8, 0);	/* 00000001 */
ctx              1021 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);	/* 00000007 VTX_ATTR_DEFINE.COMP */
ctx              1022 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);	/* 00000007 VTX_ATTR_DEFINE.SIZE */
ctx              1023 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 2);	/* 00000007 VTX_ATTR_DEFINE.TYPE */
ctx              1024 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 000000ff VTX_ATTR_DEFINE.ATTR */
ctx              1026 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 4);		/* 0000007f VP_RESULT_MAP_SIZE */
ctx              1027 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x14);		/* 0000001f ZETA_FORMAT */
ctx              1028 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000001 ZETA_ENABLE */
ctx              1029 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 0000000f VP_TEXTURES_LOG2 */
ctx              1030 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 0000000f VP_SAMPLERS_LOG2 */
ctx              1032 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 00000001 */
ctx              1033 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 2);		/* 00000003 POLYGON_MODE_BACK */
ctx              1035 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 00000003 VTX_ATTR_DEFINE.SIZE - 1 */
ctx              1036 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 0000ffff CB_ADDR_INDEX */
ctx              1038 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 00000003 */
ctx              1039 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 CULL_FACE_ENABLE */
ctx              1040 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000003 CULL_FACE */
ctx              1041 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 FRONT_FACE */
ctx              1042 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 2);		/* 00000003 POLYGON_MODE_FRONT */
ctx              1043 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x1000);	/* 00007fff UNK141C */
ctx              1045 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0xe00);		/* 7fff */
ctx              1046 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x1000);	/* 7fff */
ctx              1047 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x1e00);	/* 7fff */
ctx              1049 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 BEGIN_END_ACTIVE */
ctx              1050 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000001 POLYGON_MODE_??? */
ctx              1051 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 000000ff GP_REG_ALLOC_TEMP / 4 rounded up */
ctx              1052 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 000000ff FP_REG_ALLOC_TEMP... without /4? */
ctx              1053 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 000000ff VP_REG_ALLOC_TEMP / 4 rounded up */
ctx              1054 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000001 */
ctx              1055 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 */
ctx              1056 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 VTX_ATTR_MASK_UNK0 nonempty */
ctx              1057 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 VTX_ATTR_MASK_UNK1 nonempty */
ctx              1058 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0x200);		/* 0003ffff GP_VERTEX_OUTPUT_COUNT*GP_REG_ALLOC_RESULT */
ctx              1060 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x200);
ctx              1061 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 */
ctx              1063 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);	/* 00000001 */
ctx              1064 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x70);	/* 000000ff */
ctx              1065 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x80);	/* 000000ff */
ctx              1066 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 000000ff */
ctx              1067 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 00000001 */
ctx              1068 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);	/* 00000001 */
ctx              1069 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x70);	/* 000000ff */
ctx              1070 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0x80);	/* 000000ff */
ctx              1071 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 000000ff */
ctx              1073 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);	/* 00000001 */
ctx              1074 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0xf0);	/* 000000ff */
ctx              1075 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0xff);	/* 000000ff */
ctx              1076 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 000000ff */
ctx              1077 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 00000001 */
ctx              1078 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 1);	/* 00000001 */
ctx              1079 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0xf0);	/* 000000ff */
ctx              1080 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0xff);	/* 000000ff */
ctx              1081 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 0);	/* 000000ff */
ctx              1082 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		dd_emit(ctx, 1, 9);	/* 0000003f UNK114C.COMP,SIZE */
ctx              1086 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 eng2d COLOR_KEY_ENABLE */
ctx              1087 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000007 eng2d COLOR_KEY_FORMAT */
ctx              1088 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* ffffffff eng2d DST_DEPTH */
ctx              1089 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0xcf);		/* 000000ff eng2d DST_FORMAT */
ctx              1090 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* ffffffff eng2d DST_LAYER */
ctx              1091 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000001 eng2d DST_LINEAR */
ctx              1092 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000007 eng2d PATTERN_COLOR_FORMAT */
ctx              1093 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000007 eng2d OPERATION */
ctx              1094 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000003 eng2d PATTERN_SELECT */
ctx              1095 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0xcf);		/* 000000ff eng2d SIFC_FORMAT */
ctx              1096 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 eng2d SIFC_BITMAP_ENABLE */
ctx              1097 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 2);		/* 00000003 eng2d SIFC_BITMAP_UNK808 */
ctx              1098 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* ffffffff eng2d BLIT_DU_DX_FRACT */
ctx              1099 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* ffffffff eng2d BLIT_DU_DX_INT */
ctx              1100 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* ffffffff eng2d BLIT_DV_DY_FRACT */
ctx              1101 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* ffffffff eng2d BLIT_DV_DY_INT */
ctx              1102 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0);		/* 00000001 eng2d BLIT_CONTROL_FILTER */
ctx              1103 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0xcf);		/* 000000ff eng2d DRAW_COLOR_FORMAT */
ctx              1104 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 0xcf);		/* 000000ff eng2d SRC_FORMAT */
ctx              1105 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	dd_emit(ctx, 1, 1);		/* 00000001 eng2d SRC_LINEAR #2 */
ctx              1107 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	num = ctx->ctxvals_pos - base;
ctx              1108 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	ctx->ctxvals_pos = base;
ctx              1110 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x404800, num);
ctx              1112 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		cp_ctx(ctx, 0x405400, num);
ctx              1158 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c xf_emit(struct nvkm_grctx *ctx, int num, u32 val) {
ctx              1160 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	if (val && ctx->mode == NVKM_GRCTX_VALS) {
ctx              1162 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nvkm_wo32(ctx->data, 4 * (ctx->ctxvals_pos + (i << 3)), val);
ctx              1164 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	ctx->ctxvals_pos += num << 3;
ctx              1169 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_dispatch(struct nvkm_grctx *ctx);
ctx              1170 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_m2mf(struct nvkm_grctx *ctx);
ctx              1171 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_ccache(struct nvkm_grctx *ctx);
ctx              1172 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_unk10xx(struct nvkm_grctx *ctx);
ctx              1173 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_unk14xx(struct nvkm_grctx *ctx);
ctx              1174 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_zcull(struct nvkm_grctx *ctx);
ctx              1175 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_clipid(struct nvkm_grctx *ctx);
ctx              1176 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_unk24xx(struct nvkm_grctx *ctx);
ctx              1177 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_vfetch(struct nvkm_grctx *ctx);
ctx              1178 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_eng2d(struct nvkm_grctx *ctx);
ctx              1179 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_csched(struct nvkm_grctx *ctx);
ctx              1180 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_unk1cxx(struct nvkm_grctx *ctx);
ctx              1181 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_strmout(struct nvkm_grctx *ctx);
ctx              1182 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_unk34xx(struct nvkm_grctx *ctx);
ctx              1183 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_ropm1(struct nvkm_grctx *ctx);
ctx              1184 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_ropm2(struct nvkm_grctx *ctx);
ctx              1185 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_gene_ropc(struct nvkm_grctx *ctx);
ctx              1186 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c static void nv50_gr_construct_xfer_tp(struct nvkm_grctx *ctx);
ctx              1189 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_xfer1(struct nvkm_grctx *ctx)
ctx              1191 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              1197 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	offset = (ctx->ctxvals_pos+0x3f)&~0x3f;
ctx              1198 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	ctx->ctxvals_base = offset;
ctx              1202 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset;
ctx              1203 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_dispatch(ctx);
ctx              1204 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_m2mf(ctx);
ctx              1205 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_unk24xx(ctx);
ctx              1206 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_clipid(ctx);
ctx              1207 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_zcull(ctx);
ctx              1208 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              1209 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              1212 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset + 0x1;
ctx              1213 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_vfetch(ctx);
ctx              1214 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_eng2d(ctx);
ctx              1215 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_csched(ctx);
ctx              1216 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_ropm1(ctx);
ctx              1217 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_ropm2(ctx);
ctx              1218 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              1219 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              1222 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset + 0x2;
ctx              1223 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_ccache(ctx);
ctx              1224 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_unk1cxx(ctx);
ctx              1225 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_strmout(ctx);
ctx              1226 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_unk14xx(ctx);
ctx              1227 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_unk10xx(ctx);
ctx              1228 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_unk34xx(ctx);
ctx              1229 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              1230 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              1233 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset + 3;
ctx              1236 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				nv50_gr_construct_gene_ropc(ctx);
ctx              1237 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              1238 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              1242 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			ctx->ctxvals_pos = offset + 4 + i;
ctx              1244 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				nv50_gr_construct_xfer_tp(ctx);
ctx              1246 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				nv50_gr_construct_xfer_tp(ctx);
ctx              1247 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              1248 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				size = (ctx->ctxvals_pos-offset)/8;
ctx              1252 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset;
ctx              1253 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_dispatch(ctx);
ctx              1254 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_m2mf(ctx);
ctx              1255 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_unk34xx(ctx);
ctx              1256 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_csched(ctx);
ctx              1257 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_unk1cxx(ctx);
ctx              1258 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_strmout(ctx);
ctx              1259 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              1260 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              1263 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset + 1;
ctx              1264 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_unk10xx(ctx);
ctx              1265 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              1266 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              1269 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset + 2;
ctx              1271 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_gene_unk14xx(ctx);
ctx              1272 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_unk24xx(ctx);
ctx              1273 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              1274 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              1277 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset + 3;
ctx              1278 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_vfetch(ctx);
ctx              1279 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              1280 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              1283 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset + 4;
ctx              1284 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_ccache(ctx);
ctx              1285 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              1286 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              1289 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset + 5;
ctx              1290 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_ropm2(ctx);
ctx              1291 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_ropm1(ctx);
ctx              1295 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				nv50_gr_construct_gene_ropc(ctx);
ctx              1296 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              1297 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              1300 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset + 6;
ctx              1301 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_zcull(ctx);
ctx              1302 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_clipid(ctx);
ctx              1303 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_gene_eng2d(ctx);
ctx              1305 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_tp(ctx);
ctx              1307 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_tp(ctx);
ctx              1309 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_tp(ctx);
ctx              1311 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_tp(ctx);
ctx              1312 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              1313 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              1316 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset + 7;
ctx              1319 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				nv50_gr_construct_xfer_tp(ctx);
ctx              1321 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				nv50_gr_construct_xfer_tp(ctx);
ctx              1323 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				nv50_gr_construct_xfer_tp(ctx);
ctx              1325 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				nv50_gr_construct_xfer_tp(ctx);
ctx              1327 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				nv50_gr_construct_xfer_tp(ctx);
ctx              1329 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				nv50_gr_construct_xfer_tp(ctx);
ctx              1331 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_gene_unk14xx(ctx);
ctx              1333 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              1334 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              1337 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	ctx->ctxvals_pos = offset + size * 8;
ctx              1338 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	ctx->ctxvals_pos = (ctx->ctxvals_pos+0x3f)&~0x3f;
ctx              1339 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_lsr (ctx, offset);
ctx              1340 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_SET_XFER_POINTER);
ctx              1341 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_lsr (ctx, size);
ctx              1342 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_SEEK_1);
ctx              1343 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_XFER_1);
ctx              1344 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_wait(ctx, XFER, BUSY);
ctx              1352 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_dispatch(struct nvkm_grctx *ctx)
ctx              1355 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              1358 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 5, 0);
ctx              1360 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 6, 0);
ctx              1362 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);
ctx              1366 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8*3, 0);
ctx              1368 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x100*3, 0);
ctx              1370 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);
ctx              1373 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 3, 0);
ctx              1376 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 9, 0);
ctx              1378 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 9, 0);
ctx              1380 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 9, 0);
ctx              1382 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 9, 0);
ctx              1385 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);
ctx              1387 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);
ctx              1389 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 6*2, 0);
ctx              1390 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);
ctx              1392 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);
ctx              1394 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 6*2, 0);
ctx              1395 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);
ctx              1398 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x1c, 0);
ctx              1400 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x1e, 0);
ctx              1402 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x22, 0);
ctx              1404 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x15, 0);
ctx              1408 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_m2mf(struct nvkm_grctx *ctx)
ctx              1411 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              1416 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* DMA_NOTIFY instance >> 4 */
ctx              1417 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* DMA_BUFFER_IN instance >> 4 */
ctx              1418 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* DMA_BUFFER_OUT instance >> 4 */
ctx              1419 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* OFFSET_IN */
ctx              1420 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* OFFSET_OUT */
ctx              1421 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* PITCH_IN */
ctx              1422 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* PITCH_OUT */
ctx              1423 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* LINE_LENGTH */
ctx              1424 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* LINE_COUNT */
ctx              1425 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0x21);		/* FORMAT: bits 0-4 INPUT_INC, bits 5-9 OUTPUT_INC */
ctx              1426 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 1);		/* LINEAR_IN */
ctx              1427 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0x2);		/* TILING_MODE_IN: bits 0-2 y tiling, bits 3-5 z tiling */
ctx              1428 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0x100);	/* TILING_PITCH_IN */
ctx              1429 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0x100);	/* TILING_HEIGHT_IN */
ctx              1430 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 1);		/* TILING_DEPTH_IN */
ctx              1431 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* TILING_POSITION_IN_Z */
ctx              1432 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* TILING_POSITION_IN */
ctx              1433 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 1);		/* LINEAR_OUT */
ctx              1434 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0x2);		/* TILING_MODE_OUT: bits 0-2 y tiling, bits 3-5 z tiling */
ctx              1435 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0x100);	/* TILING_PITCH_OUT */
ctx              1436 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0x100);	/* TILING_HEIGHT_OUT */
ctx              1437 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 1);		/* TILING_DEPTH_OUT */
ctx              1438 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* TILING_POSITION_OUT_Z */
ctx              1439 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* TILING_POSITION_OUT */
ctx              1440 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* OFFSET_IN_HIGH */
ctx              1441 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit (ctx, 1, 0);		/* OFFSET_OUT_HIGH */
ctx              1444 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x40, 0);	/* 20 * ffffffff, 3ffff */
ctx              1446 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x100, 0);	/* 80 * ffffffff, 3ffff */
ctx              1447 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* 1f/7f, 0, 1f/7f, 0 [1f for smallm2mf, 7f otherwise] */
ctx              1450 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x400, 0);	/* ffffffff */
ctx              1452 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x800, 0);	/* ffffffff */
ctx              1453 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* ff/1ff, 0, 0, 0 [ff for smallm2mf, 1ff otherwise] */
ctx              1455 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x40, 0);		/* 20 * bits ffffffff, 3ffff */
ctx              1456 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x6, 0);		/* 1f, 0, 1f, 0, 1f, 0 */
ctx              1460 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_ccache(struct nvkm_grctx *ctx)
ctx              1462 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              1463 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* RO */
ctx              1464 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x800, 0);		/* ffffffff */
ctx              1469 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x2b, 0);
ctx              1472 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x29, 0);
ctx              1477 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x27, 0);
ctx              1486 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x25, 0);
ctx              1491 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x100, 0);		/* ffffffff CB_DEF */
ctx              1492 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000007f CB_ADDR_BUFFER */
ctx              1493 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0 */
ctx              1494 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x30, 0);		/* ff SET_PROGRAM_CB */
ctx              1495 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 3f last SET_PROGRAM_CB */
ctx              1496 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* RO */
ctx              1497 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x100, 0);		/* ffffffff */
ctx              1498 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0);		/* 1f, 0, 0, ... */
ctx              1499 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0);		/* ffffffff */
ctx              1500 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* ffffffff */
ctx              1501 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 3 */
ctx              1502 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff */
ctx              1503 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_CODE_CB */
ctx              1504 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_TIC */
ctx              1505 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_TSC */
ctx              1506 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 LINKED_TSC */
ctx              1507 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff TIC_ADDRESS_HIGH */
ctx              1508 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff TIC_ADDRESS_LOW */
ctx              1509 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x3fffff);	/* 003fffff TIC_LIMIT */
ctx              1510 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff TSC_ADDRESS_HIGH */
ctx              1511 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff TSC_ADDRESS_LOW */
ctx              1512 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x1fff);	/* 000fffff TSC_LIMIT */
ctx              1513 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff VP_ADDRESS_HIGH */
ctx              1514 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff VP_ADDRESS_LOW */
ctx              1515 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00ffffff VP_START_ID */
ctx              1516 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff CB_DEF_ADDRESS_HIGH */
ctx              1517 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff CB_DEF_ADDRESS_LOW */
ctx              1518 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              1519 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff GP_ADDRESS_HIGH */
ctx              1520 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff GP_ADDRESS_LOW */
ctx              1521 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00ffffff GP_START_ID */
ctx              1522 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff FP_ADDRESS_HIGH */
ctx              1523 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff FP_ADDRESS_LOW */
ctx              1524 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00ffffff FP_START_ID */
ctx              1528 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_unk10xx(struct nvkm_grctx *ctx)
ctx              1530 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              1533 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff GP_RESULT_MAP_SIZE */
ctx              1534 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 0000007f VP_RESULT_MAP_SIZE */
ctx              1535 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              1536 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x80);		/* 0000ffff GP_VERTEX_OUTPUT_COUNT */
ctx              1537 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff GP_REG_ALLOC_RESULT */
ctx              1538 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x80c14);	/* 01ffffff SEMANTIC_COLOR */
ctx              1539 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 VERTEX_TWO_SIDE_ENABLE */
ctx              1541 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x3ff);
ctx              1543 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x7ff);	/* 000007ff */
ctx              1544 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 111/113 */
ctx              1545 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              1553 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 0xa0, 0);	/* ffffffff */
ctx              1559 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 0x120, 0);
ctx              1563 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 0x100, 0);	/* ffffffff */
ctx              1568 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 0x400, 0);	/* ffffffff */
ctx              1571 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);	/* 3f, 0, 0, 0 */
ctx              1572 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);	/* ffffffff */
ctx              1574 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff GP_RESULT_MAP_SIZE */
ctx              1575 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 0000007f VP_RESULT_MAP_SIZE */
ctx              1576 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              1577 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x80);		/* 0000ffff GP_VERTEX_OUTPUT_COUNT */
ctx              1578 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff GP_REG_ALLOC_TEMP */
ctx              1579 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 RASTERIZE_ENABLE */
ctx              1580 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1900 */
ctx              1581 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x27);		/* 000000ff UNK0FD4 */
ctx              1582 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0001ffff GP_BUILTIN_RESULT_EN */
ctx              1583 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x26);		/* 000000ff SEMANTIC_LAYER */
ctx              1584 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              1588 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_unk34xx(struct nvkm_grctx *ctx)
ctx              1590 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              1592 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 VIEWPORT_CLIP_RECTS_EN */
ctx              1593 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 VIEWPORT_CLIP_MODE */
ctx              1594 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x10, 0x04000000);	/* 07ffffff VIEWPORT_CLIP_HORIZ*8, VIEWPORT_CLIP_VERT*8 */
ctx              1595 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 POLYGON_STIPPLE_ENABLE */
ctx              1596 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x20, 0);		/* ffffffff POLYGON_STIPPLE */
ctx              1597 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* 00007fff WINDOW_OFFSET_XY */
ctx              1598 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffff0ff3 */
ctx              1599 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x04e3bfdf);	/* ffffffff UNK0D64 */
ctx              1600 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x04e3bfdf);	/* ffffffff UNK0DF4 */
ctx              1601 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 WINDOW_ORIGIN */
ctx              1602 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 */
ctx              1603 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x1fe21);	/* 0001ffff tesla UNK0FAC */
ctx              1605 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x0fac6881);
ctx              1607 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);
ctx              1608 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 3, 0);
ctx              1613 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_unk14xx(struct nvkm_grctx *ctx)
ctx              1615 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              1618 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 5, 0);		/* ffffffff */
ctx              1619 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x80c14);	/* 01ffffff SEMANTIC_COLOR */
ctx              1620 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 00000001 */
ctx              1621 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 000003ff */
ctx              1622 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x804);		/* 00000fff SEMANTIC_CLIP */
ctx              1623 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 00000001 */
ctx              1624 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 4);		/* 7f, ff */
ctx              1625 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x8100c12);	/* 1fffffff FP_INTERPOLANT_CTRL */
ctx              1627 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ffffffff tesla UNK1A30 */
ctx              1628 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);			/* 0000007f VP_RESULT_MAP_SIZE */
ctx              1629 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);			/* 000000ff GP_RESULT_MAP_SIZE */
ctx              1630 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 GP_ENABLE */
ctx              1631 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10);			/* 7f/ff VIEW_VOLUME_CLIP_CTRL */
ctx              1632 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 000000ff VP_CLIP_DISTANCE_ENABLE */
ctx              1634 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 3ff */
ctx              1635 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 000000ff tesla UNK1940 */
ctx              1636 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 tesla UNK0D7C */
ctx              1637 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x804);			/* 00000fff SEMANTIC_CLIP */
ctx              1638 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);			/* 00000001 VIEWPORT_TRANSFORM_EN */
ctx              1639 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x1a);			/* 0000001f POLYGON_MODE */
ctx              1641 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x7f);		/* 000000ff tesla UNK0FFC */
ctx              1642 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ffffffff tesla UNK1A30 */
ctx              1643 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);			/* 00000001 SHADE_MODEL */
ctx              1644 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x80c14);		/* 01ffffff SEMANTIC_COLOR */
ctx              1645 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 tesla UNK1900 */
ctx              1646 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x8100c12);		/* 1fffffff FP_INTERPOLANT_CTRL */
ctx              1647 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);			/* 0000007f VP_RESULT_MAP_SIZE */
ctx              1648 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);			/* 000000ff GP_RESULT_MAP_SIZE */
ctx              1649 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 GP_ENABLE */
ctx              1650 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10);			/* 7f/ff VIEW_VOLUME_CLIP_CTRL */
ctx              1651 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 tesla UNK0D7C */
ctx              1652 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 tesla UNK0F8C */
ctx              1653 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ffffffff tesla UNK1A30 */
ctx              1654 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);			/* 00000001 VIEWPORT_TRANSFORM_EN */
ctx              1655 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x8100c12);		/* 1fffffff FP_INTERPOLANT_CTRL */
ctx              1656 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);			/* ffffffff NOPERSPECTIVE_BITMAP */
ctx              1657 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 tesla UNK1900 */
ctx              1658 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 0000000f */
ctx              1660 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x3ff);		/* 000003ff tesla UNK0D68 */
ctx              1662 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x7ff);		/* 000007ff tesla UNK0D68 */
ctx              1663 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x80c14);		/* 01ffffff SEMANTIC_COLOR */
ctx              1664 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 VERTEX_TWO_SIDE_ENABLE */
ctx              1665 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x30, 0);			/* ffffffff VIEWPORT_SCALE: X0, Y0, Z0, X1, Y1, ... */
ctx              1666 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);			/* f, 0, 0 */
ctx              1667 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);			/* ffffffff last VIEWPORT_SCALE? */
ctx              1668 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ffffffff tesla UNK1A30 */
ctx              1669 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);			/* 00000001 VIEWPORT_TRANSFORM_EN */
ctx              1670 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 tesla UNK1900 */
ctx              1671 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 tesla UNK1924 */
ctx              1672 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10);			/* 000000ff VIEW_VOLUME_CLIP_CTRL */
ctx              1673 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 */
ctx              1674 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x30, 0);			/* ffffffff VIEWPORT_TRANSLATE */
ctx              1675 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);			/* f, 0, 0 */
ctx              1676 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);			/* ffffffff */
ctx              1677 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ffffffff tesla UNK1A30 */
ctx              1678 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0x88);			/* 000001ff tesla UNK19D8 */
ctx              1679 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 tesla UNK1924 */
ctx              1680 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ffffffff tesla UNK1A30 */
ctx              1681 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);			/* 0000000f CULL_MODE */
ctx              1682 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);			/* 07ffffff SCREEN_SCISSOR */
ctx              1683 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);			/* 00007fff WINDOW_OFFSET_XY */
ctx              1684 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000003 WINDOW_ORIGIN */
ctx              1685 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x10, 0);			/* 00000001 SCISSOR_ENABLE */
ctx              1686 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 0001ffff GP_BUILTIN_RESULT_EN */
ctx              1687 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x26);			/* 000000ff SEMANTIC_LAYER */
ctx              1688 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 tesla UNK1900 */
ctx              1689 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 0000000f */
ctx              1690 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x3f800000);		/* ffffffff LINE_WIDTH */
ctx              1691 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 LINE_STIPPLE_ENABLE */
ctx              1692 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 LINE_SMOOTH_ENABLE */
ctx              1693 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000007 MULTISAMPLE_SAMPLES_LOG2 */
ctx              1695 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 00000001 */
ctx              1696 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x1a);			/* 0000001f POLYGON_MODE */
ctx              1697 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10);			/* 000000ff VIEW_VOLUME_CLIP_CTRL */
ctx              1699 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* ffffffff */
ctx              1700 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 00000001 */
ctx              1701 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 000003ff */
ctx              1703 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x20, 0);			/* 10xbits ffffffff, 3fffff. SCISSOR_* */
ctx              1704 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* f */
ctx              1705 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 0? */
ctx              1706 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ffffffff */
ctx              1707 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 003fffff */
ctx              1708 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ffffffff tesla UNK1A30 */
ctx              1709 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x52);			/* 000001ff SEMANTIC_PTSZ */
ctx              1710 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 0001ffff GP_BUILTIN_RESULT_EN */
ctx              1711 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x26);			/* 000000ff SEMANTIC_LAYER */
ctx              1712 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 tesla UNK1900 */
ctx              1713 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);			/* 0000007f VP_RESULT_MAP_SIZE */
ctx              1714 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);			/* 000000ff GP_RESULT_MAP_SIZE */
ctx              1715 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 GP_ENABLE */
ctx              1716 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x1a);			/* 0000001f POLYGON_MODE */
ctx              1717 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 LINE_SMOOTH_ENABLE */
ctx              1718 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 LINE_STIPPLE_ENABLE */
ctx              1719 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x00ffff00);		/* 00ffffff LINE_STIPPLE_PATTERN */
ctx              1720 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 0000000f */
ctx              1724 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_zcull(struct nvkm_grctx *ctx)
ctx              1726 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              1729 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x3f);		/* 0000003f UNK1590 */
ctx              1730 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 ALPHA_TEST_ENABLE */
ctx              1731 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 MULTISAMPLE_SAMPLES_LOG2 */
ctx              1732 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1534 */
ctx              1733 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 STENCIL_BACK_FUNC_FUNC */
ctx              1734 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_BACK_FUNC_MASK */
ctx              1735 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_BACK_FUNC_REF */
ctx              1736 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_BACK_MASK */
ctx              1737 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* 00000007 STENCIL_BACK_OP_FAIL, ZFAIL, ZPASS */
ctx              1738 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 00000003 tesla UNK143C */
ctx              1739 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0x04000000);	/* 07ffffff tesla UNK0D6C */
ctx              1740 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffff0ff3 */
ctx              1741 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 CLIPID_ENABLE */
ctx              1742 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffffffff DEPTH_BOUNDS */
ctx              1743 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 */
ctx              1744 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 DEPTH_TEST_FUNC */
ctx              1745 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_TEST_ENABLE */
ctx              1746 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_WRITE_ENABLE */
ctx              1747 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 0000000f CULL_MODE */
ctx              1748 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff */
ctx              1749 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 UNK0FB0 */
ctx              1750 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 POLYGON_STIPPLE_ENABLE */
ctx              1751 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 00000007 FP_CONTROL */
ctx              1752 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff */
ctx              1753 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0001ffff GP_BUILTIN_RESULT_EN */
ctx              1754 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff CLEAR_STENCIL */
ctx              1755 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 STENCIL_FRONT_FUNC_FUNC */
ctx              1756 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_FUNC_MASK */
ctx              1757 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_FUNC_REF */
ctx              1758 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_MASK */
ctx              1759 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* 00000007 STENCIL_FRONT_OP_FAIL, ZFAIL, ZPASS */
ctx              1760 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_FRONT_ENABLE */
ctx              1761 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_BACK_ENABLE */
ctx              1762 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff CLEAR_DEPTH */
ctx              1763 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 */
ctx              1765 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 tesla UNK1108 */
ctx              1766 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 SAMPLECNT_ENABLE */
ctx              1767 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f ZETA_FORMAT */
ctx              1768 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 ZETA_ENABLE */
ctx              1769 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x1001);	/* 00001fff ZETA_ARRAY_MODE */
ctx              1771 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0xffff);	/* 0000ffff MSAA_MASK */
ctx              1772 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x10, 0);		/* 00000001 SCISSOR_ENABLE */
ctx              1773 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x10, 0);		/* ffffffff DEPTH_RANGE_NEAR */
ctx              1774 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x10, 0x3f800000);	/* ffffffff DEPTH_RANGE_FAR */
ctx              1775 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10);		/* 7f/ff/3ff VIEW_VOLUME_CLIP_CTRL */
ctx              1776 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 VIEWPORT_CLIP_RECTS_EN */
ctx              1777 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 3);		/* 00000003 FP_CTRL_UNK196C */
ctx              1778 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 tesla UNK1968 */
ctx              1780 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 0fffffff tesla UNK1104 */
ctx              1781 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK151C */
ctx              1785 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_clipid(struct nvkm_grctx *ctx)
ctx              1789 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 UNK0FB4 */
ctx              1791 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* 07ffffff CLIPID_REGION_HORIZ */
ctx              1792 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* 07ffffff CLIPID_REGION_VERT */
ctx              1793 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* 07ffffff SCREEN_SCISSOR */
ctx              1794 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0x04000000);	/* 07ffffff UNK1508 */
ctx              1795 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 CLIPID_ENABLE */
ctx              1796 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x80);		/* 00003fff CLIPID_WIDTH */
ctx              1797 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff CLIPID_ID */
ctx              1798 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff CLIPID_ADDRESS_HIGH */
ctx              1799 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff CLIPID_ADDRESS_LOW */
ctx              1800 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x80);		/* 00003fff CLIPID_HEIGHT */
ctx              1801 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_CLIPID */
ctx              1805 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_unk24xx(struct nvkm_grctx *ctx)
ctx              1807 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              1811 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x33, 0);
ctx              1813 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);
ctx              1815 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              1816 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 0000007f VP_RESULT_MAP_SIZE */
ctx              1817 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff GP_RESULT_MAP_SIZE */
ctx              1820 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);	/* RO */
ctx              1821 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0xe10, 0); /* 190 * 9: 8*ffffffff, 7ff */
ctx              1822 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 1ff */
ctx              1823 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 0);	/* 0? */
ctx              1824 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 9, 0);	/* ffffffff, 7ff */
ctx              1826 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);	/* RO */
ctx              1827 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0xe10, 0); /* 190 * 9: 8*ffffffff, 7ff */
ctx              1828 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 1ff */
ctx              1829 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 0);	/* 0? */
ctx              1830 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 9, 0);	/* ffffffff, 7ff */
ctx              1832 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0xc, 0);	/* RO */
ctx              1834 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0xe10, 0); /* 190 * 9: 8*ffffffff, 7ff */
ctx              1835 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 1ff */
ctx              1836 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 0);	/* 0? */
ctx              1839 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0xc, 0);	/* RO */
ctx              1841 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0xe10, 0); /* 190 * 9: 8*ffffffff, 7ff */
ctx              1842 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 1ff */
ctx              1843 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 0);	/* 0? */
ctx              1846 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              1847 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff GP_RESULT_MAP_SIZE */
ctx              1848 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 0000007f VP_RESULT_MAP_SIZE */
ctx              1849 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x8100c12);	/* 1fffffff FP_INTERPOLANT_CTRL */
ctx              1851 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 3);	/* 00000003 tesla UNK1100 */
ctx              1853 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              1854 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x8100c12);	/* 1fffffff FP_INTERPOLANT_CTRL */
ctx              1855 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f VP_GP_BUILTIN_ATTR_EN */
ctx              1856 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x80c14);	/* 01ffffff SEMANTIC_COLOR */
ctx              1857 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 */
ctx              1860 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 4);	/* 000000ff */
ctx              1861 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x80c14);	/* 01ffffff SEMANTIC_COLOR */
ctx              1862 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 VERTEX_TWO_SIDE_ENABLE */
ctx              1863 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 POINT_SPRITE_ENABLE */
ctx              1864 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x8100c12);	/* 1fffffff FP_INTERPOLANT_CTRL */
ctx              1865 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x27);		/* 000000ff SEMANTIC_PRIM_ID */
ctx              1866 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              1867 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f */
ctx              1868 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 */
ctx              1871 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x40, 0);		/* ffffffff */
ctx              1872 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x10, 0);		/* 3, 0, 0.... */
ctx              1873 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x10, 0);		/* ffffffff */
ctx              1876 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 POINT_SPRITE_CTRL */
ctx              1877 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 */
ctx              1878 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff */
ctx              1879 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* ffffffff NOPERSPECTIVE_BITMAP */
ctx              1880 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x10, 0);		/* 00ffffff POINT_COORD_REPLACE_MAP */
ctx              1881 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 WINDOW_ORIGIN */
ctx              1882 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x8100c12);	/* 1fffffff FP_INTERPOLANT_CTRL */
ctx              1884 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 000003ff */
ctx              1888 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_vfetch(struct nvkm_grctx *ctx)
ctx              1890 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              1897 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* ffffffff tesla UNK13A4 */
ctx              1898 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 00000fff tesla UNK1318 */
ctx              1900 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff VERTEX_BUFFER_FIRST */
ctx              1901 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 PRIMITIVE_RESTART_ENABLE */
ctx              1902 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 UNK0DE8 */
ctx              1903 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff PRIMITIVE_RESTART_INDEX */
ctx              1904 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xf);		/* ffffffff VP_ATTR_EN */
ctx              1905 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, (acnt/8)-1, 0);	/* ffffffff VP_ATTR_EN */
ctx              1906 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, acnt/8, 0);	/* ffffffff VTX_ATR_MASK_UNK0DD0 */
ctx              1907 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f VP_GP_BUILTIN_ATTR_EN */
ctx              1908 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x20);		/* 0000ffff tesla UNK129C */
ctx              1909 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff turing UNK370??? */
ctx              1910 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff turing USER_PARAM_COUNT */
ctx              1911 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              1914 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0xb, 0);	/* RO */
ctx              1916 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x9, 0);	/* RO */
ctx              1918 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x8, 0);	/* RO */
ctx              1920 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 EDGE_FLAG */
ctx              1921 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 PROVOKING_VERTEX_LAST */
ctx              1922 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              1923 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x1a);		/* 0000001f POLYGON_MODE */
ctx              1925 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0xc, 0);		/* RO */
ctx              1927 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7f/ff */
ctx              1928 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 7f/ff VP_REG_ALLOC_RESULT */
ctx              1929 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 7f/ff VP_RESULT_MAP_SIZE */
ctx              1930 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f VP_GP_BUILTIN_ATTR_EN */
ctx              1931 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000001ff UNK1A28 */
ctx              1932 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 8);		/* 000001ff UNK0DF0 */
ctx              1933 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              1935 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x3ff);	/* 3ff tesla UNK0D68 */
ctx              1937 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x7ff);	/* 7ff tesla UNK0D68 */
ctx              1939 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x1e00);	/* 7fff */
ctx              1941 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0xc, 0);		/* RO or close */
ctx              1943 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xf);		/* ffffffff VP_ATTR_EN */
ctx              1944 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, (acnt/8)-1, 0);	/* ffffffff VP_ATTR_EN */
ctx              1945 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f VP_GP_BUILTIN_ATTR_EN */
ctx              1947 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* ffffffff */
ctx              1949 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* ffffffff */
ctx              1950 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 tesla UNK0FD8 */
ctx              1953 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x10, 0);	/* 0? */
ctx              1954 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* weird... */
ctx              1955 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* RO */
ctx              1957 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 0);	/* 0? */
ctx              1958 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* weird... */
ctx              1959 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* RO */
ctx              1962 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff VB_ELEMENT_BASE */
ctx              1963 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff UNK1438 */
ctx              1964 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, acnt, 0);		/* 1 tesla UNK1000 */
ctx              1966 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* ffffffff tesla UNK1118? */
ctx              1968 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, acnt, 0);		/* ffffffff VERTEX_ARRAY_UNK90C */
ctx              1969 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* f/1f */
ctx              1971 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, acnt, 0);		/* ffffffff VERTEX_ARRAY_UNK90C */
ctx              1972 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* f/1f */
ctx              1974 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, acnt, 0);		/* RO */
ctx              1975 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* RO */
ctx              1977 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK111C? */
ctx              1978 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* RO */
ctx              1980 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff UNK15F4_ADDRESS_HIGH */
ctx              1981 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff UNK15F4_ADDRESS_LOW */
ctx              1982 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff UNK0F84_ADDRESS_HIGH */
ctx              1983 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff UNK0F84_ADDRESS_LOW */
ctx              1985 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, acnt, 0);		/* 00003fff VERTEX_ARRAY_ATTRIB_OFFSET */
ctx              1986 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* f/1f */
ctx              1988 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, acnt, 0);		/* 00000fff VERTEX_ARRAY_STRIDE */
ctx              1989 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* f/1f */
ctx              1991 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, acnt, 0);		/* ffffffff VERTEX_ARRAY_LOW */
ctx              1992 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* f/1f */
ctx              1994 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, acnt, 0);		/* 000000ff VERTEX_ARRAY_HIGH */
ctx              1995 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* f/1f */
ctx              1997 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, acnt, 0);		/* ffffffff VERTEX_LIMIT_LOW */
ctx              1998 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* f/1f */
ctx              2000 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, acnt, 0);		/* 000000ff VERTEX_LIMIT_HIGH */
ctx              2001 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* f/1f */
ctx              2004 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, acnt, 0);		/* f */
ctx              2005 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 3, 0);		/* f/1f */
ctx              2009 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* RO */
ctx              2011 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 5, 0);	/* RO */
ctx              2013 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffff DMA_VTXBUF */
ctx              2016 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x41, 0);	/* RO */
ctx              2018 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x11, 0);	/* RO */
ctx              2020 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x50, 0);	/* RO */
ctx              2022 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x58, 0);	/* RO */
ctx              2024 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xf);		/* ffffffff VP_ATTR_EN */
ctx              2025 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, (acnt/8)-1, 0);	/* ffffffff VP_ATTR_EN */
ctx              2026 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 1 UNK0DEC */
ctx              2028 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, acnt*4, 0);	/* ffffffff VTX_ATTR */
ctx              2029 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* f/1f, 0, 0, 0 */
ctx              2032 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x1d, 0);	/* RO */
ctx              2034 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x16, 0);	/* RO */
ctx              2036 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xf);		/* ffffffff VP_ATTR_EN */
ctx              2037 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, (acnt/8)-1, 0);	/* ffffffff VP_ATTR_EN */
ctx              2040 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 0);	/* RO */
ctx              2042 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0xc, 0);	/* RO */
ctx              2044 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 7, 0);	/* RO */
ctx              2046 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0xa, 0);		/* RO */
ctx              2054 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 0x20, 0);	/* ffffffff */
ctx              2055 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x200, 0);	/* ffffffff */
ctx              2056 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);	/* 7f/ff, 0, 0, 0 */
ctx              2057 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);	/* ffffffff */
ctx              2060 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 113/111 */
ctx              2061 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xf);		/* ffffffff VP_ATTR_EN */
ctx              2062 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, (acnt/8)-1, 0);	/* ffffffff VP_ATTR_EN */
ctx              2063 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, acnt/8, 0);	/* ffffffff VTX_ATTR_MASK_UNK0DD0 */
ctx              2064 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f VP_GP_BUILTIN_ATTR_EN */
ctx              2065 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              2068 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 7, 0);	/* weird... */
ctx              2070 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 5, 0);	/* weird... */
ctx              2074 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_eng2d(struct nvkm_grctx *ctx)
ctx              2076 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              2079 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* 0001ffff CLIP_X, CLIP_Y */
ctx              2080 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* 0000ffff CLIP_W, CLIP_H */
ctx              2081 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 CLIP_ENABLE */
ctx              2085 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* 0000ffff IFC_CLIP_X, Y */
ctx              2086 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 1);	/* 0000ffff IFC_CLIP_W, H */
ctx              2087 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 IFC_CLIP_ENABLE */
ctx              2089 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 DST_LINEAR */
ctx              2090 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x100);		/* 0001ffff DST_WIDTH */
ctx              2091 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x100);		/* 0001ffff DST_HEIGHT */
ctx              2092 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f[NV50]/7f[NV84+] DST_FORMAT */
ctx              2093 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0001ffff DRAW_POINT_X */
ctx              2094 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 8);		/* 0000000f DRAW_UNK58C */
ctx              2095 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000fffff SIFC_DST_X_FRACT */
ctx              2096 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0001ffff SIFC_DST_X_INT */
ctx              2097 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000fffff SIFC_DST_Y_FRACT */
ctx              2098 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0001ffff SIFC_DST_Y_INT */
ctx              2099 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000fffff SIFC_DX_DU_FRACT */
ctx              2100 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0001ffff SIFC_DX_DU_INT */
ctx              2101 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000fffff SIFC_DY_DV_FRACT */
ctx              2102 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0001ffff SIFC_DY_DV_INT */
ctx              2103 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000ffff SIFC_WIDTH */
ctx              2104 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000ffff SIFC_HEIGHT */
ctx              2105 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xcf);		/* 000000ff SIFC_FORMAT */
ctx              2106 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 00000003 SIFC_BITMAP_UNK808 */
ctx              2107 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 SIFC_BITMAP_LINE_PACK_MODE */
ctx              2108 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 SIFC_BITMAP_LSB_FIRST */
ctx              2109 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 SIFC_BITMAP_ENABLE */
ctx              2110 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff BLIT_DST_X */
ctx              2111 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff BLIT_DST_Y */
ctx              2112 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000fffff BLIT_DU_DX_FRACT */
ctx              2113 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0001ffff BLIT_DU_DX_INT */
ctx              2114 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000fffff BLIT_DV_DY_FRACT */
ctx              2115 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0001ffff BLIT_DV_DY_INT */
ctx              2116 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000ffff BLIT_DST_W */
ctx              2117 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000ffff BLIT_DST_H */
ctx              2118 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000fffff BLIT_SRC_X_FRACT */
ctx              2119 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0001ffff BLIT_SRC_X_INT */
ctx              2120 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000fffff BLIT_SRC_Y_FRACT */
ctx              2121 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 UNK888 */
ctx              2122 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 0000003f UNK884 */
ctx              2123 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 UNK880 */
ctx              2124 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000001f tesla UNK0FB8 */
ctx              2125 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x15);		/* 000000ff tesla UNK128C */
ctx              2126 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* 00000007, ffff0ff3 */
ctx              2127 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 UNK260 */
ctx              2128 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x4444480);	/* 1fffffff UNK870 */
ctx              2130 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x10, 0);
ctx              2132 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x27, 0);
ctx              2136 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_csched(struct nvkm_grctx *ctx)
ctx              2138 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              2141 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* 00007fff WINDOW_OFFSET_XY... what is it doing here??? */
ctx              2142 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1924 */
ctx              2143 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 WINDOW_ORIGIN */
ctx              2144 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x8100c12);	/* 1fffffff FP_INTERPOLANT_CTRL */
ctx              2145 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000003ff */
ctx              2147 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff turing UNK364 */
ctx              2148 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f turing UNK36C */
ctx              2149 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff USER_PARAM_COUNT */
ctx              2150 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x100);		/* 00ffffff turing UNK384 */
ctx              2151 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f turing UNK2A0 */
ctx              2152 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff GRIDID */
ctx              2153 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10001);	/* ffffffff GRIDDIM_XY */
ctx              2154 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff */
ctx              2155 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10001);	/* ffffffff BLOCKDIM_XY */
ctx              2156 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000ffff BLOCKDIM_Z */
ctx              2157 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10001);	/* 00ffffff BLOCK_ALLOC */
ctx              2158 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 LANES32 */
ctx              2159 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff FP_REG_ALLOC_TEMP */
ctx              2160 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 00000003 REG_MODE */
ctx              2162 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x40, 0);		/* ffffffff USER_PARAM */
ctx              2166 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 0);	/* 7, 0, 0, 0, ... */
ctx              2167 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x80, 0);	/* fff */
ctx              2168 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* ff, fff */
ctx              2169 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x10*2, 0);	/* ffffffff, 1f */
ctx              2172 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 0);	/* 7, 0, 0, 0, ... */
ctx              2173 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x60, 0);	/* fff */
ctx              2174 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* ff, fff */
ctx              2175 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0xc*2, 0);	/* ffffffff, 1f */
ctx              2179 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 0);	/* 7, 0, 0, 0, ... */
ctx              2180 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x40, 0);	/* fff */
ctx              2181 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* ff, fff */
ctx              2182 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8*2, 0);	/* ffffffff, 1f */
ctx              2186 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);	/* f, 0, 0, 0 */
ctx              2187 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x10, 0);	/* fff */
ctx              2188 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* ff, fff */
ctx              2189 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2*2, 0);	/* ffffffff, 1f */
ctx              2192 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 0);	/* 7, 0, 0, 0, ... */
ctx              2193 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0xf0, 0);	/* fff */
ctx              2194 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* ff, fff */
ctx              2195 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x1e*2, 0);	/* ffffffff, 1f */
ctx              2198 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 0);	/* 7, 0, 0, 0, ... */
ctx              2199 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x60, 0);	/* fff */
ctx              2200 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* ff, fff */
ctx              2201 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0xc*2, 0);	/* ffffffff, 1f */
ctx              2205 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 0);	/* 7, 0, 0, 0, ... */
ctx              2206 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x30, 0);	/* fff */
ctx              2207 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* ff, fff */
ctx              2208 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 6*2, 0);	/* ffffffff, 1f */
ctx              2211 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x12, 0);
ctx              2215 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);	/* f, 0, 0, 0 */
ctx              2216 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x10, 0);	/* fff */
ctx              2217 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* ff, fff */
ctx              2218 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2*2, 0);	/* ffffffff, 1f */
ctx              2221 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f */
ctx              2222 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000000 */
ctx              2223 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff */
ctx              2224 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000001f */
ctx              2225 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* ffffffff */
ctx              2226 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 turing UNK35C */
ctx              2227 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff */
ctx              2228 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* ffffffff */
ctx              2229 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 turing UNK35C */
ctx              2230 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff */
ctx              2231 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff */
ctx              2235 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_unk1cxx(struct nvkm_grctx *ctx)
ctx              2237 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              2238 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* 00007fff WINDOW_OFFSET_XY */
ctx              2239 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x3f800000);	/* ffffffff LINE_WIDTH */
ctx              2240 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 LINE_SMOOTH_ENABLE */
ctx              2241 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1658 */
ctx              2242 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 POLYGON_SMOOTH_ENABLE */
ctx              2243 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* 00000001 POLYGON_OFFSET_*_ENABLE */
ctx              2244 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 0000000f CULL_MODE */
ctx              2245 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x1a);		/* 0000001f POLYGON_MODE */
ctx              2246 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f ZETA_FORMAT */
ctx              2247 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 POINT_SPRITE_ENABLE */
ctx              2248 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 tesla UNK165C */
ctx              2249 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x10, 0);		/* 00000001 SCISSOR_ENABLE */
ctx              2250 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1534 */
ctx              2251 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 LINE_STIPPLE_ENABLE */
ctx              2252 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x00ffff00);	/* 00ffffff LINE_STIPPLE_PATTERN */
ctx              2253 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff POLYGON_OFFSET_UNITS */
ctx              2254 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff POLYGON_OFFSET_FACTOR */
ctx              2255 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 tesla UNK1668 */
ctx              2256 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* 07ffffff SCREEN_SCISSOR */
ctx              2257 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1900 */
ctx              2258 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xf);		/* 0000000f COLOR_MASK */
ctx              2259 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);		/* 0000000f COLOR_MASK */
ctx              2260 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x0fac6881);	/* 0fffffff RT_CONTROL */
ctx              2261 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 0000007f RT_FORMAT */
ctx              2262 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);		/* 0000007f RT_FORMAT */
ctx              2263 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0);		/* 00000001 RT_HORIZ_LINEAR */
ctx              2264 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 00000007 FP_CONTROL */
ctx              2265 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 ALPHA_TEST_ENABLE */
ctx              2266 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 ALPHA_TEST_FUNC */
ctx              2268 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 3);	/* 00000003 UNK16B4 */
ctx              2270 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 00000001 UNK16B4 */
ctx              2271 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 MULTISAMPLE_CTRL */
ctx              2272 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 tesla UNK0F90 */
ctx              2273 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 00000003 tesla UNK143C */
ctx              2274 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0x04000000);	/* 07ffffff tesla UNK0D6C */
ctx              2275 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_MASK */
ctx              2276 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_WRITE_ENABLE */
ctx              2277 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 SAMPLECNT_ENABLE */
ctx              2278 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 5);		/* 0000000f UNK1408 */
ctx              2279 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x52);		/* 000001ff SEMANTIC_PTSZ */
ctx              2280 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff POINT_SIZE */
ctx              2281 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 */
ctx              2282 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 tesla UNK0FB4 */
ctx              2284 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 3ff */
ctx              2285 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 00000001 tesla UNK1110 */
ctx              2288 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 tesla UNK1928 */
ctx              2289 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x10, 0);		/* ffffffff DEPTH_RANGE_NEAR */
ctx              2290 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x10, 0x3f800000);	/* ffffffff DEPTH_RANGE_FAR */
ctx              2291 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10);		/* 000000ff VIEW_VOLUME_CLIP_CTRL */
ctx              2292 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x20, 0);		/* 07ffffff VIEWPORT_HORIZ, then VIEWPORT_VERT. (W&0x3fff)<<13 | (X&0x1fff). */
ctx              2293 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK187C */
ctx              2294 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 WINDOW_ORIGIN */
ctx              2295 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_FRONT_ENABLE */
ctx              2296 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_TEST_ENABLE */
ctx              2297 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_BACK_ENABLE */
ctx              2298 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_BACK_MASK */
ctx              2299 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x8100c12);	/* 1fffffff FP_INTERPOLANT_CTRL */
ctx              2300 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 5);		/* 0000000f tesla UNK1220 */
ctx              2301 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 MULTISAMPLE_SAMPLES_LOG2 */
ctx              2302 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff tesla UNK1A20 */
ctx              2303 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 ZETA_ENABLE */
ctx              2304 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 VERTEX_TWO_SIDE_ENABLE */
ctx              2305 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0xffff);	/* 0000ffff MSAA_MASK */
ctx              2307 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 3);	/* 00000003 tesla UNK1100 */
ctx              2309 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x1c, 0);	/* RO */
ctx              2311 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x9, 0);
ctx              2312 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 UNK1534 */
ctx              2313 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 LINE_SMOOTH_ENABLE */
ctx              2314 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 LINE_STIPPLE_ENABLE */
ctx              2315 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x00ffff00);	/* 00ffffff LINE_STIPPLE_PATTERN */
ctx              2316 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x1a);		/* 0000001f POLYGON_MODE */
ctx              2317 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 WINDOW_ORIGIN */
ctx              2319 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 3);	/* 00000003 tesla UNK1100 */
ctx              2320 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 3ff */
ctx              2325 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x25, 0);
ctx              2327 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x3b, 0);
ctx              2331 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_strmout(struct nvkm_grctx *ctx)
ctx              2333 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              2334 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x102);		/* 0000ffff STRMOUT_BUFFER_CTRL */
ctx              2335 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff STRMOUT_PRIMITIVE_COUNT */
ctx              2336 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 4);		/* 000000ff STRMOUT_NUM_ATTRIBS */
ctx              2338 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);	/* ffffffff UNK1A8C */
ctx              2339 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);	/* ffffffff UNK1780 */
ctx              2341 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff GP_RESULT_MAP_SIZE */
ctx              2342 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 0000007f VP_RESULT_MAP_SIZE */
ctx              2343 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              2345 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x3ff);	/* 000003ff tesla UNK0D68 */
ctx              2347 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x7ff);	/* 000007ff tesla UNK0D68 */
ctx              2348 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              2350 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x102);		/* 0000ffff STRMOUT_BUFFER_CTRL */
ctx              2351 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff STRMOUT_PRIMITIVE_COUNT */
ctx              2352 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* 000000ff STRMOUT_ADDRESS_HIGH */
ctx              2353 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* ffffffff STRMOUT_ADDRESS_LOW */
ctx              2354 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 4);		/* 000000ff STRMOUT_NUM_ATTRIBS */
ctx              2356 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);	/* ffffffff UNK1A8C */
ctx              2357 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);	/* ffffffff UNK1780 */
ctx              2359 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_STRMOUT */
ctx              2360 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_QUERY */
ctx              2361 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff QUERY_ADDRESS_HIGH */
ctx              2362 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffffffff QUERY_ADDRESS_LOW QUERY_COUNTER */
ctx              2363 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffffffff */
ctx              2364 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              2366 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x20, 0);		/* ffffffff STRMOUT_MAP */
ctx              2367 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f */
ctx              2368 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000000? */
ctx              2369 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffffffff */
ctx              2373 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_ropm1(struct nvkm_grctx *ctx)
ctx              2375 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              2376 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x4e3bfdf);	/* ffffffff UNK0D64 */
ctx              2377 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x4e3bfdf);	/* ffffffff UNK0DF4 */
ctx              2378 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 */
ctx              2379 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000003ff */
ctx              2381 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x11);	/* 000000ff tesla UNK1968 */
ctx              2382 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A3C */
ctx              2386 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_ropm2(struct nvkm_grctx *ctx)
ctx              2388 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              2390 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_QUERY */
ctx              2391 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x0fac6881);	/* 0fffffff RT_CONTROL */
ctx              2392 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffffffff */
ctx              2393 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff QUERY_ADDRESS_HIGH */
ctx              2394 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffffffff QUERY_ADDRESS_LOW, COUNTER */
ctx              2395 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 SAMPLECNT_ENABLE */
ctx              2396 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7 */
ctx              2398 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_QUERY */
ctx              2399 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff QUERY_ADDRESS_HIGH */
ctx              2400 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffffffff QUERY_ADDRESS_LOW, COUNTER */
ctx              2401 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x4e3bfdf);	/* ffffffff UNK0D64 */
ctx              2402 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x4e3bfdf);	/* ffffffff UNK0DF4 */
ctx              2403 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 eng2d UNK260 */
ctx              2404 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ff/3ff */
ctx              2405 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 */
ctx              2407 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x11);	/* 000000ff tesla UNK1968 */
ctx              2408 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A3C */
ctx              2412 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_gene_ropc(struct nvkm_grctx *ctx)
ctx              2414 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              2423 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* f/7 MUTISAMPLE_SAMPLES_LOG2 */
ctx              2424 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1534 */
ctx              2425 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 STENCIL_BACK_FUNC_FUNC */
ctx              2426 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_BACK_FUNC_MASK */
ctx              2427 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_BACK_MASK */
ctx              2428 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* 00000007 STENCIL_BACK_OP_FAIL, ZFAIL, ZPASS */
ctx              2429 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 00000003 tesla UNK143C */
ctx              2430 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffff0ff3 */
ctx              2431 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, magic2);	/* 001fffff tesla UNK0F78 */
ctx              2432 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_BOUNDS_EN */
ctx              2433 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 DEPTH_TEST_FUNC */
ctx              2434 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_TEST_ENABLE */
ctx              2435 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_WRITE_ENABLE */
ctx              2437 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 0000001f tesla UNK169C */
ctx              2438 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 STENCIL_FRONT_FUNC_FUNC */
ctx              2439 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_FUNC_MASK */
ctx              2440 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_MASK */
ctx              2441 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* 00000007 STENCIL_FRONT_OP_FAIL, ZFAIL, ZPASS */
ctx              2442 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_FRONT_ENABLE */
ctx              2444 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x15);	/* 000000ff */
ctx              2445 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_BACK_ENABLE */
ctx              2446 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 tesla UNK15B4 */
ctx              2447 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10);		/* 3ff/ff VIEW_VOLUME_CLIP_CTRL */
ctx              2448 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff CLEAR_DEPTH */
ctx              2449 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f ZETA_FORMAT */
ctx              2450 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 ZETA_ENABLE */
ctx              2451 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A3C */
ctx              2453 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 3, 0);	/* ff, ffffffff, ffffffff */
ctx              2454 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 4);	/* 7 */
ctx              2455 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x400);	/* fffffff */
ctx              2456 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x300);	/* ffff */
ctx              2457 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x1001);	/* 1fff */
ctx              2460 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				xf_emit(ctx, 1, 0);	/* 0000000f UNK15C8 */
ctx              2462 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				xf_emit(ctx, 1, 0x15);	/* ff */
ctx              2465 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 MULTISAMPLE_SAMPLES_LOG2 */
ctx              2466 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1534 */
ctx              2467 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 STENCIL_BACK_FUNC_FUNC */
ctx              2468 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_BACK_FUNC_MASK */
ctx              2469 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffff0ff3 */
ctx              2470 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 00000003 tesla UNK143C */
ctx              2471 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_BOUNDS_EN */
ctx              2472 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 DEPTH_TEST_FUNC */
ctx              2473 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_TEST_ENABLE */
ctx              2474 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_WRITE_ENABLE */
ctx              2475 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 STENCIL_FRONT_FUNC_FUNC */
ctx              2476 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_FUNC_MASK */
ctx              2477 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_FRONT_ENABLE */
ctx              2478 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_BACK_ENABLE */
ctx              2479 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 tesla UNK15B4 */
ctx              2480 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10);		/* 7f/ff VIEW_VOLUME_CLIP_CTRL */
ctx              2481 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f ZETA_FORMAT */
ctx              2482 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 ZETA_ENABLE */
ctx              2483 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A3C */
ctx              2484 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1534 */
ctx              2485 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1900 */
ctx              2486 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 STENCIL_BACK_FUNC_FUNC */
ctx              2487 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_BACK_FUNC_MASK */
ctx              2488 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_BACK_FUNC_REF */
ctx              2489 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffffffff DEPTH_BOUNDS */
ctx              2490 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_BOUNDS_EN */
ctx              2491 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 DEPTH_TEST_FUNC */
ctx              2492 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_TEST_ENABLE */
ctx              2493 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_WRITE_ENABLE */
ctx              2494 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f */
ctx              2495 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK0FB0 */
ctx              2496 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 STENCIL_FRONT_FUNC_FUNC */
ctx              2497 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_FUNC_MASK */
ctx              2498 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_FUNC_REF */
ctx              2499 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_FRONT_ENABLE */
ctx              2500 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_BACK_ENABLE */
ctx              2501 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10);		/* 7f/ff VIEW_VOLUME_CLIP_CTRL */
ctx              2502 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x10, 0);		/* ffffffff DEPTH_RANGE_NEAR */
ctx              2503 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x10, 0x3f800000);	/* ffffffff DEPTH_RANGE_FAR */
ctx              2504 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f ZETA_FORMAT */
ctx              2505 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 MULTISAMPLE_SAMPLES_LOG2 */
ctx              2506 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 STENCIL_BACK_FUNC_FUNC */
ctx              2507 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_BACK_FUNC_MASK */
ctx              2508 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_BACK_FUNC_REF */
ctx              2509 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_BACK_MASK */
ctx              2510 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* 00000007 STENCIL_BACK_OP_FAIL, ZFAIL, ZPASS */
ctx              2511 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffffffff DEPTH_BOUNDS */
ctx              2512 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_BOUNDS_EN */
ctx              2513 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 DEPTH_TEST_FUNC */
ctx              2514 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_TEST_ENABLE */
ctx              2515 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_WRITE_ENABLE */
ctx              2516 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff CLEAR_STENCIL */
ctx              2517 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 STENCIL_FRONT_FUNC_FUNC */
ctx              2518 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_FUNC_MASK */
ctx              2519 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_FUNC_REF */
ctx              2520 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_MASK */
ctx              2521 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* 00000007 STENCIL_FRONT_OP_FAIL, ZFAIL, ZPASS */
ctx              2522 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_FRONT_ENABLE */
ctx              2523 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_BACK_ENABLE */
ctx              2524 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10);		/* 7f/ff VIEW_VOLUME_CLIP_CTRL */
ctx              2525 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f ZETA_FORMAT */
ctx              2526 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x3f);		/* 0000003f UNK1590 */
ctx              2527 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 MULTISAMPLE_SAMPLES_LOG2 */
ctx              2528 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1534 */
ctx              2529 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffff0ff3, ffff */
ctx              2530 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK0FB0 */
ctx              2531 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0001ffff GP_BUILTIN_RESULT_EN */
ctx              2532 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 tesla UNK15B4 */
ctx              2533 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f ZETA_FORMAT */
ctx              2534 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 ZETA_ENABLE */
ctx              2535 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff CLEAR_DEPTH */
ctx              2536 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 tesla UNK19CC */
ctx              2538 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);
ctx              2539 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x1001);
ctx              2540 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0xb, 0);
ctx              2542 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000007 */
ctx              2543 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 tesla UNK1534 */
ctx              2544 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000007 MULTISAMPLE_SAMPLES_LOG2 */
ctx              2545 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 0);	/* 00000001 BLEND_ENABLE */
ctx              2546 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* ffff0ff3 */
ctx              2548 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f/7f RT_FORMAT */
ctx              2549 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);		/* 3f/7f RT_FORMAT */
ctx              2550 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xf);		/* 0000000f COLOR_MASK */
ctx              2551 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);		/* 0000000f COLOR_MASK */
ctx              2552 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f/7f */
ctx              2553 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 LOGIC_OP_ENABLE */
ctx              2555 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 0000000f LOGIC_OP */
ctx              2556 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 000000ff */
ctx              2558 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 OPERATION */
ctx              2559 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ff/3ff */
ctx              2560 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 UNK0F90 */
ctx              2561 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 1);		/* 00000007 BLEND_EQUATION_RGB, ALPHA */
ctx              2562 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 UNK133C */
ctx              2563 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 0000001f BLEND_FUNC_SRC_RGB */
ctx              2564 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000001f BLEND_FUNC_DST_RGB */
ctx              2565 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 0000001f BLEND_FUNC_SRC_ALPHA */
ctx              2566 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000001f BLEND_FUNC_DST_ALPHA */
ctx              2567 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 */
ctx              2568 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, magic2);	/* 001fffff tesla UNK0F78 */
ctx              2569 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A3C */
ctx              2570 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x0fac6881);	/* 0fffffff RT_CONTROL */
ctx              2572 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 tesla UNK12E4 */
ctx              2573 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 00000007 IBLEND_EQUATION_RGB */
ctx              2574 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 00000007 IBLEND_EQUATION_ALPHA */
ctx              2575 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 00000001 IBLEND_UNK00 */
ctx              2576 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 2);	/* 0000001f IBLEND_FUNC_SRC_RGB */
ctx              2577 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 0000001f IBLEND_FUNC_DST_RGB */
ctx              2578 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 2);	/* 0000001f IBLEND_FUNC_SRC_ALPHA */
ctx              2579 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 0000001f IBLEND_FUNC_DST_ALPHA */
ctx              2580 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 tesla UNK1140 */
ctx              2581 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* 00000001 */
ctx              2582 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 0000001f tesla UNK169C */
ctx              2583 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 0000000f */
ctx              2584 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 */
ctx              2585 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* ffffffff */
ctx              2586 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* 00000001 */
ctx              2587 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 0000001f tesla UNK169C */
ctx              2588 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 */
ctx              2589 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 000003ff */
ctx              2591 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* 00000001 */
ctx              2592 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000007 */
ctx              2593 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 */
ctx              2594 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* ffffffff */
ctx              2595 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* 00000001 */
ctx              2597 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000007 MULTISAMPLE_SAMPLES_LOG2 */
ctx              2598 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 tesla UNK1430 */
ctx              2599 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* ffffffff tesla UNK1A3C */
ctx              2601 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* ffffffff CLEAR_COLOR */
ctx              2602 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* ffffffff BLEND_COLOR A R G B */
ctx              2603 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000fff eng2d UNK2B0 */
ctx              2605 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* 00000001 */
ctx              2606 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000003ff */
ctx              2607 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0);		/* 00000001 BLEND_ENABLE */
ctx              2608 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 UNK133C */
ctx              2609 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 0000001f BLEND_FUNC_SRC_RGB */
ctx              2610 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000001f BLEND_FUNC_DST_RGB */
ctx              2611 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000007 BLEND_EQUATION_RGB */
ctx              2612 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 0000001f BLEND_FUNC_SRC_ALPHA */
ctx              2613 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000001f BLEND_FUNC_DST_ALPHA */
ctx              2614 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000007 BLEND_EQUATION_ALPHA */
ctx              2615 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 UNK19C0 */
ctx              2616 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 LOGIC_OP_ENABLE */
ctx              2617 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f LOGIC_OP */
ctx              2619 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 UNK12E4? NVA3+ only? */
ctx              2621 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 00000001 IBLEND_UNK00 */
ctx              2622 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 00000007 IBLEND_EQUATION_RGB */
ctx              2623 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 2);	/* 0000001f IBLEND_FUNC_SRC_RGB */
ctx              2624 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 0000001f IBLEND_FUNC_DST_RGB */
ctx              2625 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 00000007 IBLEND_EQUATION_ALPHA */
ctx              2626 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 2);	/* 0000001f IBLEND_FUNC_SRC_ALPHA */
ctx              2627 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 0000001f IBLEND_FUNC_DST_ALPHA */
ctx              2628 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 tesla UNK15C4 */
ctx              2629 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 */
ctx              2630 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 tesla UNK1140 */
ctx              2632 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f/7f DST_FORMAT */
ctx              2633 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 DST_LINEAR */
ctx              2634 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 PATTERN_COLOR_FORMAT */
ctx              2635 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffffffff PATTERN_MONO_COLOR */
ctx              2636 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 PATTERN_MONO_FORMAT */
ctx              2637 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffffffff PATTERN_MONO_BITMAP */
ctx              2638 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 PATTERN_SELECT */
ctx              2639 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff ROP */
ctx              2640 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff BETA1 */
ctx              2641 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff BETA4 */
ctx              2642 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 OPERATION */
ctx              2643 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x50, 0);		/* 10x ffffff, ffffff, ffffff, ffffff, 3 PATTERN */
ctx              2647 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_xfer_unk84xx(struct nvkm_grctx *ctx)
ctx              2649 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              2666 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              2667 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 7f/ff[NVA0+] VP_REG_ALLOC_RESULT */
ctx              2668 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              2669 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              2670 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 111/113[NVA0+] */
ctx              2672 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x1f, 0);	/* ffffffff */
ctx              2674 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x0f, 0);	/* ffffffff */
ctx              2676 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x10, 0);	/* fffffff VP_RESULT_MAP_1 up */
ctx              2677 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* f/1f[NVA3], fffffff/ffffffff[NVA0+] */
ctx              2678 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 7f/ff VP_REG_ALLOC_RESULT */
ctx              2679 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 7f/ff VP_RESULT_MAP_SIZE */
ctx              2681 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x03020100);	/* ffffffff */
ctx              2683 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x00608080);	/* fffffff VP_RESULT_MAP_0 */
ctx              2684 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              2685 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              2686 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* 111/113, 7f/ff */
ctx              2687 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 7f/ff VP_RESULT_MAP_SIZE */
ctx              2688 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              2689 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              2690 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff GP_REG_ALLOC_RESULT */
ctx              2691 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff GP_RESULT_MAP_SIZE */
ctx              2692 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x80);		/* 0000ffff GP_VERTEX_OUTPUT_COUNT */
ctx              2694 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, magic3);	/* 00007fff tesla UNK141C */
ctx              2695 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 7f/ff VP_RESULT_MAP_SIZE */
ctx              2696 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              2697 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 111/113 */
ctx              2698 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x1f, 0);		/* ffffffff GP_RESULT_MAP_1 up */
ctx              2699 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000001f */
ctx              2700 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff */
ctx              2701 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              2702 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff GP_REG_ALLOC_RESULT */
ctx              2703 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x80);		/* 0000ffff GP_VERTEX_OUTPUT_COUNT */
ctx              2704 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff GP_RESULT_MAP_SIZE */
ctx              2705 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x03020100);	/* ffffffff GP_RESULT_MAP_0 */
ctx              2706 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 3);		/* 00000003 GP_OUTPUT_PRIMITIVE_TYPE */
ctx              2708 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, magic3);	/* 7fff tesla UNK141C */
ctx              2709 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 7f/ff VP_RESULT_MAP_SIZE */
ctx              2710 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 PROVOKING_VERTEX_LAST */
ctx              2711 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              2712 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 111/113 */
ctx              2713 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              2714 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff GP_RESULT_MAP_SIZE */
ctx              2715 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 3);		/* 00000003 GP_OUTPUT_PRIMITIVE_TYPE */
ctx              2716 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 PROVOKING_VERTEX_LAST */
ctx              2717 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              2718 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 tesla UNK13A0 */
ctx              2719 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 7f/ff VP_REG_ALLOC_RESULT */
ctx              2720 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              2721 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              2722 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 111/113 */
ctx              2724 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x1020, 0);	/* 4 x (0x400 x 0xffffffff, ff, 0, 0, 0, 4 x ffffffff) */
ctx              2726 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0xa20, 0);	/* 4 x (0x280 x 0xffffffff, ff, 0, 0, 0, 4 x ffffffff) */
ctx              2728 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x210, 0);	/* ffffffff */
ctx              2730 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x410, 0);	/* ffffffff */
ctx              2731 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              2732 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 000000ff GP_RESULT_MAP_SIZE */
ctx              2733 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 3);		/* 00000003 GP_OUTPUT_PRIMITIVE_TYPE */
ctx              2734 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 PROVOKING_VERTEX_LAST */
ctx              2735 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              2739 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_xfer_tprop(struct nvkm_grctx *ctx)
ctx              2741 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              2753 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 ALPHA_TEST_FUNC */
ctx              2754 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff ALPHA_TEST_REF */
ctx              2755 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 ALPHA_TEST_ENABLE */
ctx              2757 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 0000000f UNK16A0 */
ctx              2758 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7/f MULTISAMPLE_SAMPLES_LOG2 */
ctx              2759 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1534 */
ctx              2760 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_BACK_MASK */
ctx              2761 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* 00000007 STENCIL_BACK_OP_FAIL, ZFAIL, ZPASS */
ctx              2762 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0);		/* ffffffff BLEND_COLOR */
ctx              2763 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 UNK19C0 */
ctx              2764 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 UNK0FDC */
ctx              2765 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xf);		/* 0000000f COLOR_MASK */
ctx              2766 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);		/* 0000000f COLOR_MASK */
ctx              2767 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_TEST_ENABLE */
ctx              2768 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_WRITE_ENABLE */
ctx              2769 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 LOGIC_OP_ENABLE */
ctx              2770 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ff[NV50]/3ff[NV84+] */
ctx              2771 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 00000007 FP_CONTROL */
ctx              2772 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0xffff);	/* 0000ffff MSAA_MASK */
ctx              2773 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_MASK */
ctx              2774 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);		/* 00000007 STENCIL_FRONT_OP_FAIL, ZFAIL, ZPASS */
ctx              2775 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_FRONT_ENABLE */
ctx              2776 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_BACK_ENABLE */
ctx              2777 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* 00007fff WINDOW_OFFSET_XY */
ctx              2778 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 tesla UNK19CC */
ctx              2779 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7 */
ctx              2780 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 SAMPLECNT_ENABLE */
ctx              2781 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f ZETA_FORMAT */
ctx              2782 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 ZETA_ENABLE */
ctx              2783 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff COLOR_KEY */
ctx              2784 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 COLOR_KEY_ENABLE */
ctx              2785 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 COLOR_KEY_FORMAT */
ctx              2786 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffffffff SIFC_BITMAP_COLOR */
ctx              2787 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 SIFC_BITMAP_WRITE_BIT0_ENABLE */
ctx              2788 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 ALPHA_TEST_FUNC */
ctx              2789 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 ALPHA_TEST_ENABLE */
ctx              2791 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 3);	/* 00000003 tesla UNK16B4 */
ctx              2792 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 */
ctx              2793 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 tesla UNK1298 */
ctx              2795 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 00000001 tesla UNK16B4 */
ctx              2796 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 */
ctx              2798 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 MULTISAMPLE_CTRL */
ctx              2800 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1534 */
ctx              2801 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0);		/* 00000001 BLEND_ENABLE */
ctx              2802 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000001f BLEND_FUNC_DST_ALPHA */
ctx              2803 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000007 BLEND_EQUATION_ALPHA */
ctx              2804 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 0000001f BLEND_FUNC_SRC_ALPHA */
ctx              2805 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000001f BLEND_FUNC_DST_RGB */
ctx              2806 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000007 BLEND_EQUATION_RGB */
ctx              2807 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 0000001f BLEND_FUNC_SRC_RGB */
ctx              2809 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 UNK12E4 */
ctx              2810 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 00000007 IBLEND_EQUATION_RGB */
ctx              2811 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 00000007 IBLEND_EQUATION_ALPHA */
ctx              2812 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 00000001 IBLEND_UNK00 */
ctx              2813 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 2);	/* 0000001f IBLEND_SRC_RGB */
ctx              2814 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 0000001f IBLEND_DST_RGB */
ctx              2815 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 2);	/* 0000001f IBLEND_SRC_ALPHA */
ctx              2816 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);	/* 0000001f IBLEND_DST_ALPHA */
ctx              2817 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 UNK1140 */
ctx              2819 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 UNK133C */
ctx              2820 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffff0ff3 */
ctx              2821 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f/7f RT_FORMAT */
ctx              2822 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);		/* 3f/7f RT_FORMAT */
ctx              2823 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x0fac6881);	/* 0fffffff RT_CONTROL */
ctx              2824 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 LOGIC_OP_ENABLE */
ctx              2825 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ff/3ff */
ctx              2826 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 00000007 FP_CONTROL */
ctx              2827 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 UNK0F90 */
ctx              2828 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 FRAMEBUFFER_SRGB */
ctx              2829 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7 */
ctx              2830 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f/7f DST_FORMAT */
ctx              2831 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 DST_LINEAR */
ctx              2832 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 OPERATION */
ctx              2833 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xcf);		/* 000000ff SIFC_FORMAT */
ctx              2834 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xcf);		/* 000000ff DRAW_COLOR_FORMAT */
ctx              2835 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xcf);		/* 000000ff SRC_FORMAT */
ctx              2837 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 0000001f tesla UNK169C */
ctx              2838 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A3C */
ctx              2839 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7/f[NVA3] MULTISAMPLE_SAMPLES_LOG2 */
ctx              2840 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0);		/* 00000001 BLEND_ENABLE */
ctx              2841 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000001f BLEND_FUNC_DST_ALPHA */
ctx              2842 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000007 BLEND_EQUATION_ALPHA */
ctx              2843 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 0000001f BLEND_FUNC_SRC_ALPHA */
ctx              2844 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0000001f BLEND_FUNC_DST_RGB */
ctx              2845 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000007 BLEND_EQUATION_RGB */
ctx              2846 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 0000001f BLEND_FUNC_SRC_RGB */
ctx              2847 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 UNK133C */
ctx              2848 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffff0ff3 */
ctx              2849 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 1);		/* 00000001 UNK19E0 */
ctx              2850 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f/7f RT_FORMAT */
ctx              2851 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);		/* 3f/7f RT_FORMAT */
ctx              2852 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x0fac6881);	/* 0fffffff RT_CONTROL */
ctx              2853 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xf);		/* 0000000f COLOR_MASK */
ctx              2854 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);		/* 0000000f COLOR_MASK */
ctx              2855 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, magic2);	/* 001fffff tesla UNK0F78 */
ctx              2856 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_BOUNDS_EN */
ctx              2857 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_TEST_ENABLE */
ctx              2858 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f/7f DST_FORMAT */
ctx              2859 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 DST_LINEAR */
ctx              2861 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 0000001f tesla UNK169C */
ctx              2863 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* ff */
ctx              2865 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 3, 0);	/* 1, 7, 3ff */
ctx              2866 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 00000007 FP_CONTROL */
ctx              2867 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 UNK0F90 */
ctx              2868 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_FRONT_ENABLE */
ctx              2869 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 */
ctx              2870 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 SAMPLECNT_ENABLE */
ctx              2871 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f ZETA_FORMAT */
ctx              2872 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 ZETA_ENABLE */
ctx              2873 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A3C */
ctx              2874 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7/f MULTISAMPLE_SAMPLES_LOG2 */
ctx              2875 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1534 */
ctx              2876 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffff0ff3 */
ctx              2877 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f/7f RT_FORMAT */
ctx              2878 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);		/* 3f/7f RT_FORMAT */
ctx              2879 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x0fac6881);	/* 0fffffff RT_CONTROL */
ctx              2880 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_BOUNDS_EN */
ctx              2881 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_TEST_ENABLE */
ctx              2882 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_WRITE_ENABLE */
ctx              2883 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f/7f DST_FORMAT */
ctx              2884 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 DST_LINEAR */
ctx              2885 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000fffff BLIT_DU_DX_FRACT */
ctx              2886 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0001ffff BLIT_DU_DX_INT */
ctx              2887 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000fffff BLIT_DV_DY_FRACT */
ctx              2888 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 0001ffff BLIT_DV_DY_INT */
ctx              2889 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ff/3ff */
ctx              2890 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, magic1);	/* 3ff/7ff tesla UNK0D68 */
ctx              2891 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_FRONT_ENABLE */
ctx              2892 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 tesla UNK15B4 */
ctx              2893 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f ZETA_FORMAT */
ctx              2894 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 ZETA_ENABLE */
ctx              2895 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 */
ctx              2896 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A3C */
ctx              2898 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 0000001f tesla UNK169C */
ctx              2899 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0);		/* 0000ffff DMA_COLOR */
ctx              2900 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_GLOBAL */
ctx              2901 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_LOCAL */
ctx              2902 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_STACK */
ctx              2903 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ff/3ff */
ctx              2904 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_DST */
ctx              2905 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7 */
ctx              2906 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7/f MULTISAMPLE_SAMPLES_LOG2 */
ctx              2907 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffff0ff3 */
ctx              2908 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0);		/* 000000ff RT_ADDRESS_HIGH */
ctx              2909 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0);		/* ffffffff RT_LAYER_STRIDE */
ctx              2910 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0);		/* ffffffff RT_ADDRESS_LOW */
ctx              2911 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 8);		/* 0000007f RT_TILE_MODE */
ctx              2912 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f/7f RT_FORMAT */
ctx              2913 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);		/* 3f/7f RT_FORMAT */
ctx              2914 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x0fac6881);	/* 0fffffff RT_CONTROL */
ctx              2915 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0x400);		/* 0fffffff RT_HORIZ */
ctx              2916 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0x300);		/* 0000ffff RT_VERT */
ctx              2917 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00001fff RT_ARRAY_MODE */
ctx              2918 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xf);		/* 0000000f COLOR_MASK */
ctx              2919 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);		/* 0000000f COLOR_MASK */
ctx              2920 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x20);		/* 00000fff DST_TILE_MODE */
ctx              2921 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f/7f DST_FORMAT */
ctx              2922 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x100);		/* 0001ffff DST_HEIGHT */
ctx              2923 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000007ff DST_LAYER */
ctx              2924 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 DST_LINEAR */
ctx              2925 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff DST_ADDRESS_LOW */
ctx              2926 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff DST_ADDRESS_HIGH */
ctx              2927 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x40);		/* 0007ffff DST_PITCH */
ctx              2928 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x100);		/* 0001ffff DST_WIDTH */
ctx              2929 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff */
ctx              2930 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 3);		/* 00000003 tesla UNK15AC */
ctx              2931 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ff/3ff */
ctx              2932 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0001ffff GP_BUILTIN_RESULT_EN */
ctx              2933 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 UNK0F90 */
ctx              2934 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 */
ctx              2936 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 0000001f tesla UNK169C */
ctx              2937 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, magic2);	/* 001fffff tesla UNK0F78 */
ctx              2938 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7/f MULTISAMPLE_SAMPLES_LOG2 */
ctx              2939 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1534 */
ctx              2940 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffff0ff3 */
ctx              2941 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);		/* 00000003 tesla UNK143C */
ctx              2942 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x0fac6881);	/* 0fffffff RT_CONTROL */
ctx              2943 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_ZETA */
ctx              2944 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_BOUNDS_EN */
ctx              2945 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_TEST_ENABLE */
ctx              2946 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_WRITE_ENABLE */
ctx              2947 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* ffff, ff/3ff */
ctx              2948 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0001ffff GP_BUILTIN_RESULT_EN */
ctx              2949 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_FRONT_ENABLE */
ctx              2950 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff STENCIL_FRONT_MASK */
ctx              2951 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 tesla UNK15B4 */
ctx              2952 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 */
ctx              2953 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff ZETA_LAYER_STRIDE */
ctx              2954 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 000000ff ZETA_ADDRESS_HIGH */
ctx              2955 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff ZETA_ADDRESS_LOW */
ctx              2956 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 00000007 ZETA_TILE_MODE */
ctx              2957 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f ZETA_FORMAT */
ctx              2958 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 ZETA_ENABLE */
ctx              2959 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x400);		/* 0fffffff ZETA_HORIZ */
ctx              2960 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x300);		/* 0000ffff ZETA_VERT */
ctx              2961 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x1001);	/* 00001fff ZETA_ARRAY_MODE */
ctx              2962 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A3C */
ctx              2963 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7/f MULTISAMPLE_SAMPLES_LOG2 */
ctx              2965 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 */
ctx              2966 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffff0ff3 */
ctx              2967 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f/7f RT_FORMAT */
ctx              2968 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);		/* 3f/7f RT_FORMAT */
ctx              2969 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x0fac6881);	/* 0fffffff RT_CONTROL */
ctx              2970 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xf);		/* 0000000f COLOR_MASK */
ctx              2971 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);		/* 0000000f COLOR_MASK */
ctx              2972 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ff/3ff */
ctx              2973 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0);		/* 00000001 BLEND_ENABLE */
ctx              2974 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 UNK0F90 */
ctx              2975 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 FRAMEBUFFER_SRGB */
ctx              2976 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7 */
ctx              2977 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 LOGIC_OP_ENABLE */
ctx              2979 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 UNK1140 */
ctx              2980 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 0000001f tesla UNK169C */
ctx              2982 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7/f MULTISAMPLE_SAMPLES_LOG2 */
ctx              2983 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 UNK1534 */
ctx              2984 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffff0ff3 */
ctx              2986 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x0fac6881);	/* fffffff */
ctx              2987 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, magic2);	/* 001fffff tesla UNK0F78 */
ctx              2988 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_BOUNDS_EN */
ctx              2989 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_TEST_ENABLE */
ctx              2990 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_WRITE_ENABLE */
ctx              2991 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);		/* 3f/7f DST_FORMAT */
ctx              2992 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK0FB0 */
ctx              2993 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ff/3ff */
ctx              2994 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);		/* 00000007 FP_CONTROL */
ctx              2995 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 STENCIL_FRONT_ENABLE */
ctx              2996 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 tesla UNK15B4 */
ctx              2997 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 tesla UNK19CC */
ctx              2998 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000007 */
ctx              2999 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 SAMPLECNT_ENABLE */
ctx              3000 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000000f ZETA_FORMAT */
ctx              3001 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 ZETA_ENABLE */
ctx              3003 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 0000001f tesla UNK169C */
ctx              3004 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 0000000f tesla UNK15C8 */
ctx              3006 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A3C */
ctx              3008 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 3, 0);		/* 7/f, 1, ffff0ff3 */
ctx              3009 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0xfac6881);	/* fffffff */
ctx              3010 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 4, 0);		/* 1, 1, 1, 3ff */
ctx              3011 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 4);		/* 7 */
ctx              3012 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 1 */
ctx              3013 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 1);		/* 1 */
ctx              3014 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);		/* 7, f */
ctx              3015 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);		/* 1 */
ctx              3016 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 7/f */
ctx              3018 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 0x9, 0);	/* 1 */
ctx              3020 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 0x8, 0);	/* 1 */
ctx              3021 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* ffff0ff3 */
ctx              3022 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);		/* 1 */
ctx              3023 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x11);		/* 7f */
ctx              3024 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 7, 0);		/* 7f */
ctx              3025 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0xfac6881);	/* fffffff */
ctx              3026 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0xf);		/* f */
ctx              3027 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 7, 0);		/* f */
ctx              3028 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x11);		/* 7f */
ctx              3029 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);		/* 1 */
ctx              3030 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 5, 0);		/* 1, 7, 3ff, 3, 7 */
ctx              3032 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 1, 0);	/* 00000001 UNK1140 */
ctx              3033 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 1, 1);	/* 0000001f tesla UNK169C */
ctx              3039 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_xfer_tex(struct nvkm_grctx *ctx)
ctx              3041 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              3042 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* 1 LINKED_TSC. yes, 2. */
ctx              3044 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 3 */
ctx              3045 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 1ffff BLIT_DU_DX_INT */
ctx              3046 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* fffff BLIT_DU_DX_FRACT */
ctx              3047 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 1ffff BLIT_DV_DY_INT */
ctx              3048 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* fffff BLIT_DV_DY_FRACT */
ctx              3050 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 3 BLIT_CONTROL */
ctx              3052 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 2, 0);	/* 3ff, 1 */
ctx              3053 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x2a712488);	/* ffffffff SRC_TIC_0 */
ctx              3054 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff SRC_TIC_1 */
ctx              3055 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x4085c000);	/* ffffffff SRC_TIC_2 */
ctx              3056 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x40);		/* ffffffff SRC_TIC_3 */
ctx              3057 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x100);		/* ffffffff SRC_TIC_4 */
ctx              3058 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10100);	/* ffffffff SRC_TIC_5 */
ctx              3059 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x02800000);	/* ffffffff SRC_TIC_6 */
ctx              3060 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff SRC_TIC_7 */
ctx              3062 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 turing UNK358 */
ctx              3063 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* ffffffff tesla UNK1A34? */
ctx              3064 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 turing UNK37C tesla UNK1690 */
ctx              3065 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 BLIT_CONTROL */
ctx              3066 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000001 turing UNK32C tesla UNK0F94 */
ctx              3068 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* ffffffff tesla UNK1A34? */
ctx              3069 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 */
ctx              3070 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 000003ff */
ctx              3071 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 */
ctx              3072 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 000003ff */
ctx              3073 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 tesla UNK1664 / turing UNK03E8 */
ctx              3074 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 00000003 */
ctx              3075 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);	/* 000003ff */
ctx              3077 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x6, 0);
ctx              3079 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A34 */
ctx              3080 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_TEXTURE */
ctx              3081 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 0000ffff DMA_SRC */
ctx              3085 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_xfer_unk8cxx(struct nvkm_grctx *ctx)
ctx              3087 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              3088 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 UNK1534 */
ctx              3089 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7/f MULTISAMPLE_SAMPLES_LOG2 */
ctx              3090 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 2, 0);		/* 7, ffff0ff3 */
ctx              3091 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_TEST_ENABLE */
ctx              3092 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_WRITE */
ctx              3093 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x04e3bfdf);	/* ffffffff UNK0D64 */
ctx              3094 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x04e3bfdf);	/* ffffffff UNK0DF4 */
ctx              3095 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 UNK15B4 */
ctx              3096 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 LINE_STIPPLE_ENABLE */
ctx              3097 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x00ffff00);	/* 00ffffff LINE_STIPPLE_PATTERN */
ctx              3098 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 tesla UNK0F98 */
ctx              3100 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);	/* 0000001f tesla UNK169C */
ctx              3101 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000003 tesla UNK1668 */
ctx              3102 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 LINE_STIPPLE_ENABLE */
ctx              3103 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x00ffff00);	/* 00ffffff LINE_STIPPLE_PATTERN */
ctx              3104 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 POLYGON_SMOOTH_ENABLE */
ctx              3105 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 UNK1534 */
ctx              3106 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 7/f MULTISAMPLE_SAMPLES_LOG2 */
ctx              3107 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 tesla UNK1658 */
ctx              3108 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 LINE_SMOOTH_ENABLE */
ctx              3109 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* ffff0ff3 */
ctx              3110 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_TEST_ENABLE */
ctx              3111 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 DEPTH_WRITE */
ctx              3112 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 UNK15B4 */
ctx              3113 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 POINT_SPRITE_ENABLE */
ctx              3114 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);		/* 00000001 tesla UNK165C */
ctx              3115 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x30201000);	/* ffffffff tesla UNK1670 */
ctx              3116 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x70605040);	/* ffffffff tesla UNK1670 */
ctx              3117 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xb8a89888);	/* ffffffff tesla UNK1670 */
ctx              3118 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xf8e8d8c8);	/* ffffffff tesla UNK1670 */
ctx              3119 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);		/* 00000001 VERTEX_TWO_SIDE_ENABLE */
ctx              3120 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x1a);		/* 0000001f POLYGON_MODE */
ctx              3124 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_xfer_tp(struct nvkm_grctx *ctx)
ctx              3126 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              3128 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_xfer_unk84xx(ctx);
ctx              3129 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_xfer_tprop(ctx);
ctx              3130 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_xfer_tex(ctx);
ctx              3131 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_xfer_unk8cxx(ctx);
ctx              3133 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_xfer_tex(ctx);
ctx              3134 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_xfer_tprop(ctx);
ctx              3135 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_xfer_unk8cxx(ctx);
ctx              3136 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		nv50_gr_construct_xfer_unk84xx(ctx);
ctx              3141 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_xfer_mpc(struct nvkm_grctx *ctx)
ctx              3143 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              3168 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* ff */
ctx              3169 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x80);		/* ffffffff tesla UNK1404 */
ctx              3170 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x80007004);	/* ffffffff tesla UNK12B0 */
ctx              3171 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x04000400);	/* ffffffff */
ctx              3173 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 1, 0xc0);	/* 00007fff tesla UNK152C */
ctx              3174 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x1000);	/* 0000ffff tesla UNK0D60 */
ctx              3175 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* ff/3ff */
ctx              3176 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* ffffffff tesla UNK1A30 */
ctx              3178 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 1, 0xe00);		/* 7fff */
ctx              3179 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 1, 0x1e00);	/* 7fff */
ctx              3181 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);		/* 000000ff VP_REG_ALLOC_TEMP */
ctx              3182 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 00000001 LINKED_TSC */
ctx              3183 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              3185 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 2, 0x1000);	/* 7fff tesla UNK141C */
ctx              3186 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);		/* 000000ff GP_REG_ALLOC_TEMP */
ctx              3187 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 00000001 GP_ENABLE */
ctx              3188 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 4);		/* 000000ff FP_REG_ALLOC_TEMP */
ctx              3189 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 2);		/* 00000003 REG_MODE */
ctx              3191 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 0xb, 0);	/* RO */
ctx              3193 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 0xc, 0);	/* RO */
ctx              3195 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			xf_emit(ctx, 0xa, 0);	/* RO */
ctx              3197 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x08100c12);		/* 1fffffff FP_INTERPOLANT_CTRL */
ctx              3198 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ff/3ff */
ctx              3200 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x1fe21);	/* 0003ffff tesla UNK0FAC */
ctx              3202 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 3, 0);			/* 7fff, 0, 0 */
ctx              3203 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 tesla UNK1534 */
ctx              3204 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 7/f MULTISAMPLE_SAMPLES_LOG2 */
ctx              3205 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 4, 0xffff);		/* 0000ffff MSAA_MASK */
ctx              3206 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);			/* 00000001 LANES32 */
ctx              3207 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10001);		/* 00ffffff BLOCK_ALLOC */
ctx              3208 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x10001);		/* ffffffff BLOCKDIM_XY */
ctx              3209 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);			/* 0000ffff BLOCKDIM_Z */
ctx              3210 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ffffffff SHARED_SIZE */
ctx              3211 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x1fe21);		/* 1ffff/3ffff[NVA0+] tesla UNk0FAC */
ctx              3212 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ffffffff tesla UNK1A34 */
ctx              3214 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 1);		/* 0000001f tesla UNK169C */
ctx              3215 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ff/3ff */
ctx              3216 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 1 LINKED_TSC */
ctx              3217 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ff FP_ADDRESS_HIGH */
ctx              3218 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* ffffffff FP_ADDRESS_LOW */
ctx              3219 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x08100c12);		/* 1fffffff FP_INTERPOLANT_CTRL */
ctx              3220 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);			/* 00000007 FP_CONTROL */
ctx              3221 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 000000ff FRAG_COLOR_CLAMP_EN */
ctx              3222 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);			/* 00000003 REG_MODE */
ctx              3223 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);			/* 0000007f RT_FORMAT */
ctx              3224 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 7, 0);			/* 0000007f RT_FORMAT */
ctx              3225 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000007 */
ctx              3226 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0xfac6881);		/* 0fffffff RT_CONTROL */
ctx              3227 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000003 MULTISAMPLE_CTRL */
ctx              3229 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 3);		/* 00000003 tesla UNK16B4 */
ctx              3230 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 ALPHA_TEST_ENABLE */
ctx              3231 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000007 ALPHA_TEST_FUNC */
ctx              3232 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 FRAMEBUFFER_SRGB */
ctx              3233 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);			/* ffffffff tesla UNK1400 */
ctx              3234 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 8, 0);			/* 00000001 BLEND_ENABLE */
ctx              3235 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000001 LOGIC_OP_ENABLE */
ctx              3236 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);			/* 0000001f BLEND_FUNC_SRC_RGB */
ctx              3237 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);			/* 0000001f BLEND_FUNC_DST_RGB */
ctx              3238 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);			/* 00000007 BLEND_EQUATION_RGB */
ctx              3239 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 2);			/* 0000001f BLEND_FUNC_SRC_ALPHA */
ctx              3240 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);			/* 0000001f BLEND_FUNC_DST_ALPHA */
ctx              3241 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);			/* 00000007 BLEND_EQUATION_ALPHA */
ctx              3242 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);			/* 00000001 UNK133C */
ctx              3244 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 00000001 UNK12E4 */
ctx              3245 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 2);		/* 0000001f IBLEND_FUNC_SRC_RGB */
ctx              3246 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);		/* 0000001f IBLEND_FUNC_DST_RGB */
ctx              3247 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);		/* 00000007 IBLEND_EQUATION_RGB */
ctx              3248 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 2);		/* 0000001f IBLEND_FUNC_SRC_ALPHA */
ctx              3249 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);		/* 0000001f IBLEND_FUNC_DST_ALPHA */
ctx              3250 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);		/* 00000007 IBLEND_EQUATION_ALPHA */
ctx              3251 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 8, 1);		/* 00000001 IBLEND_UNK00 */
ctx              3252 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 00000003 tesla UNK1928 */
ctx              3253 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0);		/* 00000001 UNK1140 */
ctx              3255 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 00000003 tesla UNK0F90 */
ctx              3256 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 4);			/* 000000ff FP_RESULT_COUNT */
ctx              3259 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x3a0, 0);
ctx              3261 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x3a2, 0);
ctx              3263 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x39f, 0);
ctx              3265 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 0x3a3, 0);
ctx              3266 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0x11);			/* 3f/7f DST_FORMAT */
ctx              3267 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 0);			/* 7 OPERATION */
ctx              3268 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 1, 1);			/* 1 DST_LINEAR */
ctx              3269 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	xf_emit(ctx, 0x2d, 0);
ctx              3273 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c nv50_gr_construct_xfer2(struct nvkm_grctx *ctx)
ctx              3275 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	struct nvkm_device *device = ctx->device;
ctx              3281 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	offset = (ctx->ctxvals_pos+0x3f)&~0x3f;
ctx              3285 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			ctx->ctxvals_pos = offset + i;
ctx              3289 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				xf_emit(ctx, 1, 0x08100c12); /* FP_INTERPOLANT_CTRL */
ctx              3291 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				nv50_gr_construct_xfer_mpc(ctx);
ctx              3292 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              3293 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 				size = (ctx->ctxvals_pos-offset)/8;
ctx              3297 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset;
ctx              3300 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		xf_emit(ctx, 1, 0x08100c12); /* FP_INTERPOLANT_CTRL */
ctx              3302 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_mpc(ctx);
ctx              3304 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_mpc(ctx);
ctx              3305 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              3306 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              3309 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset + 1;
ctx              3311 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_mpc(ctx);
ctx              3313 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_mpc(ctx);
ctx              3314 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              3315 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              3318 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset + 2;
ctx              3320 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_mpc(ctx);
ctx              3322 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_mpc(ctx);
ctx              3324 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_mpc(ctx);
ctx              3325 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              3326 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              3329 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		ctx->ctxvals_pos = offset + 3;
ctx              3331 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_mpc(ctx);
ctx              3333 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_mpc(ctx);
ctx              3335 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			nv50_gr_construct_xfer_mpc(ctx);
ctx              3336 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 		if ((ctx->ctxvals_pos-offset)/8 > size)
ctx              3337 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 			size = (ctx->ctxvals_pos-offset)/8;
ctx              3339 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	ctx->ctxvals_pos = offset + size * 8;
ctx              3340 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	ctx->ctxvals_pos = (ctx->ctxvals_pos+0x3f)&~0x3f;
ctx              3341 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_lsr (ctx, offset);
ctx              3342 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_SET_XFER_POINTER);
ctx              3343 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_lsr (ctx, size);
ctx              3344 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_SEEK_2);
ctx              3345 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_out (ctx, CP_XFER_2);
ctx              3346 drivers/gpu/drm/nouveau/nvkm/engine/gr/ctxnv50.c 	cp_wait(ctx, XFER, BUSY);
ctx               183 drivers/gpu/drm/nouveau/nvkm/falcon/v1.c nvkm_falcon_v1_bind_context(struct nvkm_falcon *falcon, struct nvkm_memory *ctx)
ctx               190 drivers/gpu/drm/nouveau/nvkm/falcon/v1.c 	if (ctx == NULL) {
ctx               220 drivers/gpu/drm/nouveau/nvkm/falcon/v1.c 	switch (nvkm_memory_target(ctx)) {
ctx               232 drivers/gpu/drm/nouveau/nvkm/falcon/v1.c 			 ((nvkm_memory_addr(ctx) >> 12) & 0xfffffff) |
ctx                39 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	struct context *ctx = info;
ctx                44 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	    desc.dig_conn == ctx->desc.dig_conn)
ctx                53 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	struct context *ctx = info;
ctx                56 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	mxms_output_device(mxm, data, &ctx->desc);
ctx                59 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	if ((ctx->outp[0] & 0x0000000f) != ctx->desc.outp_type)
ctx                68 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 		u8 link = mxm_sor_map(bios, ctx->desc.dig_conn);
ctx                69 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 		if ((ctx->outp[0] & 0x0f000000) != (link & 0x0f) << 24)
ctx                74 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 		if ((link & ((ctx->outp[1] & 0x00000030) >> 4)) != link)
ctx                84 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	if (ctx->desc.outp_type == 6 && ctx->desc.conn_type == 6 &&
ctx                85 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	    mxms_foreach(mxm, 0x01, mxm_match_tmds_partner, ctx)) {
ctx                98 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	struct context ctx = { .outp = (u32 *)(bios->data + pdcb) };
ctx               105 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	if (mxms_foreach(mxm, 0x01, mxm_match_dcb, &ctx)) {
ctx               107 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 			   idx, ctx.outp[0], ctx.outp[1]);
ctx               108 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 		ctx.outp[0] |= 0x0000000f;
ctx               116 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	i2cidx = mxm_ddc_map(bios, ctx.desc.ddc_port);
ctx               117 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	if ((ctx.outp[0] & 0x0000000f) != DCB_OUTPUT_DP)
ctx               123 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 		ctx.outp[0] &= ~0x000000f0;
ctx               124 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 		ctx.outp[0] |= i2cidx;
ctx               128 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	switch (ctx.desc.outp_type) {
ctx               133 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 		link = mxm_sor_map(bios, ctx.desc.dig_conn) & 0x30;
ctx               134 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 		ctx.outp[1] &= ~0x00000030;
ctx               135 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 		ctx.outp[1] |= link;
ctx               147 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	conn += nvbios_connEe(bios, (ctx.outp[0] & 0x0000f000) >> 12, &ver, &len);
ctx               149 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 	switch (ctx.desc.conn_type) {
ctx               151 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 		ctx.outp[1] |= 0x00000004; /* use_power_scripts */
ctx               161 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 		ctx.outp[1] |= 0x00010000;
ctx               164 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c 		ctx.outp[1] |= 0x00000004; /* use_power_scripts? */
ctx               177 drivers/gpu/drm/omapdrm/dss/dispc.c 	u32		ctx[DISPC_SZ_REGS / sizeof(u32)];
ctx               427 drivers/gpu/drm/omapdrm/dss/dispc.c 	dispc->ctx[DISPC_##reg / sizeof(u32)] = dispc_read_reg(dispc, DISPC_##reg)
ctx               429 drivers/gpu/drm/omapdrm/dss/dispc.c 	dispc_write_reg(dispc, DISPC_##reg, dispc->ctx[DISPC_##reg / sizeof(u32)])
ctx               152 drivers/gpu/drm/omapdrm/dss/dpi.c 	struct dpi_clk_calc_ctx *ctx = data;
ctx               159 drivers/gpu/drm/omapdrm/dss/dpi.c 	if (ctx->pck_min >= 100000000) {
ctx               167 drivers/gpu/drm/omapdrm/dss/dpi.c 	ctx->dispc_cinfo.lck_div = lckd;
ctx               168 drivers/gpu/drm/omapdrm/dss/dpi.c 	ctx->dispc_cinfo.pck_div = pckd;
ctx               169 drivers/gpu/drm/omapdrm/dss/dpi.c 	ctx->dispc_cinfo.lck = lck;
ctx               170 drivers/gpu/drm/omapdrm/dss/dpi.c 	ctx->dispc_cinfo.pck = pck;
ctx               179 drivers/gpu/drm/omapdrm/dss/dpi.c 	struct dpi_clk_calc_ctx *ctx = data;
ctx               181 drivers/gpu/drm/omapdrm/dss/dpi.c 	ctx->pll_cinfo.mX[ctx->clkout_idx] = m_dispc;
ctx               182 drivers/gpu/drm/omapdrm/dss/dpi.c 	ctx->pll_cinfo.clkout[ctx->clkout_idx] = dispc;
ctx               184 drivers/gpu/drm/omapdrm/dss/dpi.c 	return dispc_div_calc(ctx->dpi->dss->dispc, dispc,
ctx               185 drivers/gpu/drm/omapdrm/dss/dpi.c 			      ctx->pck_min, ctx->pck_max,
ctx               186 drivers/gpu/drm/omapdrm/dss/dpi.c 			      dpi_calc_dispc_cb, ctx);
ctx               194 drivers/gpu/drm/omapdrm/dss/dpi.c 	struct dpi_clk_calc_ctx *ctx = data;
ctx               196 drivers/gpu/drm/omapdrm/dss/dpi.c 	ctx->pll_cinfo.n = n;
ctx               197 drivers/gpu/drm/omapdrm/dss/dpi.c 	ctx->pll_cinfo.m = m;
ctx               198 drivers/gpu/drm/omapdrm/dss/dpi.c 	ctx->pll_cinfo.fint = fint;
ctx               199 drivers/gpu/drm/omapdrm/dss/dpi.c 	ctx->pll_cinfo.clkdco = clkdco;
ctx               201 drivers/gpu/drm/omapdrm/dss/dpi.c 	return dss_pll_hsdiv_calc_a(ctx->dpi->pll, clkdco,
ctx               202 drivers/gpu/drm/omapdrm/dss/dpi.c 		ctx->pck_min, dss_get_max_fck_rate(ctx->dpi->dss),
ctx               203 drivers/gpu/drm/omapdrm/dss/dpi.c 		dpi_calc_hsdiv_cb, ctx);
ctx               208 drivers/gpu/drm/omapdrm/dss/dpi.c 	struct dpi_clk_calc_ctx *ctx = data;
ctx               210 drivers/gpu/drm/omapdrm/dss/dpi.c 	ctx->fck = fck;
ctx               212 drivers/gpu/drm/omapdrm/dss/dpi.c 	return dispc_div_calc(ctx->dpi->dss->dispc, fck,
ctx               213 drivers/gpu/drm/omapdrm/dss/dpi.c 			      ctx->pck_min, ctx->pck_max,
ctx               214 drivers/gpu/drm/omapdrm/dss/dpi.c 			      dpi_calc_dispc_cb, ctx);
ctx               218 drivers/gpu/drm/omapdrm/dss/dpi.c 		struct dpi_clk_calc_ctx *ctx)
ctx               222 drivers/gpu/drm/omapdrm/dss/dpi.c 	memset(ctx, 0, sizeof(*ctx));
ctx               223 drivers/gpu/drm/omapdrm/dss/dpi.c 	ctx->dpi = dpi;
ctx               224 drivers/gpu/drm/omapdrm/dss/dpi.c 	ctx->clkout_idx = dss_pll_get_clkout_idx_for_src(dpi->clk_src);
ctx               231 drivers/gpu/drm/omapdrm/dss/dpi.c 		ctx->pck_min = pck - 1000;
ctx               232 drivers/gpu/drm/omapdrm/dss/dpi.c 		ctx->pck_max = pck + 1000;
ctx               237 drivers/gpu/drm/omapdrm/dss/dpi.c 		return dss_pll_calc_a(ctx->dpi->pll, clkin,
ctx               239 drivers/gpu/drm/omapdrm/dss/dpi.c 				dpi_calc_pll_cb, ctx);
ctx               241 drivers/gpu/drm/omapdrm/dss/dpi.c 		dss_pll_calc_b(dpi->pll, clkin, pck, &ctx->pll_cinfo);
ctx               243 drivers/gpu/drm/omapdrm/dss/dpi.c 		ctx->dispc_cinfo.lck_div = 1;
ctx               244 drivers/gpu/drm/omapdrm/dss/dpi.c 		ctx->dispc_cinfo.pck_div = 1;
ctx               245 drivers/gpu/drm/omapdrm/dss/dpi.c 		ctx->dispc_cinfo.lck = ctx->pll_cinfo.clkout[0];
ctx               246 drivers/gpu/drm/omapdrm/dss/dpi.c 		ctx->dispc_cinfo.pck = ctx->dispc_cinfo.lck;
ctx               253 drivers/gpu/drm/omapdrm/dss/dpi.c 			     struct dpi_clk_calc_ctx *ctx)
ctx               267 drivers/gpu/drm/omapdrm/dss/dpi.c 		memset(ctx, 0, sizeof(*ctx));
ctx               268 drivers/gpu/drm/omapdrm/dss/dpi.c 		ctx->dpi = dpi;
ctx               270 drivers/gpu/drm/omapdrm/dss/dpi.c 			ctx->pck_min = max(pck - 1000 * i * i * i, 0lu);
ctx               272 drivers/gpu/drm/omapdrm/dss/dpi.c 			ctx->pck_min = 0;
ctx               273 drivers/gpu/drm/omapdrm/dss/dpi.c 		ctx->pck_max = pck + 1000 * i * i * i;
ctx               275 drivers/gpu/drm/omapdrm/dss/dpi.c 		ok = dss_div_calc(dpi->dss, pck, ctx->pck_min,
ctx               276 drivers/gpu/drm/omapdrm/dss/dpi.c 				  dpi_calc_dss_cb, ctx);
ctx               290 drivers/gpu/drm/omapdrm/dss/dpi.c 	struct dpi_clk_calc_ctx ctx;
ctx               294 drivers/gpu/drm/omapdrm/dss/dpi.c 	ok = dpi_pll_clk_calc(dpi, pck_req, &ctx);
ctx               298 drivers/gpu/drm/omapdrm/dss/dpi.c 	r = dss_pll_set_config(dpi->pll, &ctx.pll_cinfo);
ctx               304 drivers/gpu/drm/omapdrm/dss/dpi.c 	dpi->mgr_config.clock_info = ctx.dispc_cinfo;
ctx               306 drivers/gpu/drm/omapdrm/dss/dpi.c 	*fck = ctx.pll_cinfo.clkout[ctx.clkout_idx];
ctx               307 drivers/gpu/drm/omapdrm/dss/dpi.c 	*lck_div = ctx.dispc_cinfo.lck_div;
ctx               308 drivers/gpu/drm/omapdrm/dss/dpi.c 	*pck_div = ctx.dispc_cinfo.pck_div;
ctx               316 drivers/gpu/drm/omapdrm/dss/dpi.c 	struct dpi_clk_calc_ctx ctx;
ctx               320 drivers/gpu/drm/omapdrm/dss/dpi.c 	ok = dpi_dss_clk_calc(dpi, pck_req, &ctx);
ctx               324 drivers/gpu/drm/omapdrm/dss/dpi.c 	r = dss_set_fck_rate(dpi->dss, ctx.fck);
ctx               328 drivers/gpu/drm/omapdrm/dss/dpi.c 	dpi->mgr_config.clock_info = ctx.dispc_cinfo;
ctx               330 drivers/gpu/drm/omapdrm/dss/dpi.c 	*fck = ctx.fck;
ctx               331 drivers/gpu/drm/omapdrm/dss/dpi.c 	*lck_div = ctx.dispc_cinfo.lck_div;
ctx               332 drivers/gpu/drm/omapdrm/dss/dpi.c 	*pck_div = ctx.dispc_cinfo.pck_div;
ctx               470 drivers/gpu/drm/omapdrm/dss/dpi.c 	struct dpi_clk_calc_ctx ctx;
ctx               480 drivers/gpu/drm/omapdrm/dss/dpi.c 		ok = dpi_pll_clk_calc(dpi, mode->clock * 1000, &ctx);
ctx               484 drivers/gpu/drm/omapdrm/dss/dpi.c 		fck = ctx.pll_cinfo.clkout[ctx.clkout_idx];
ctx               486 drivers/gpu/drm/omapdrm/dss/dpi.c 		ok = dpi_dss_clk_calc(dpi, mode->clock * 1000, &ctx);
ctx               490 drivers/gpu/drm/omapdrm/dss/dpi.c 		fck = ctx.fck;
ctx               493 drivers/gpu/drm/omapdrm/dss/dpi.c 	lck_div = ctx.dispc_cinfo.lck_div;
ctx               494 drivers/gpu/drm/omapdrm/dss/dpi.c 	pck_div = ctx.dispc_cinfo.pck_div;
ctx              4300 drivers/gpu/drm/omapdrm/dss/dsi.c 	struct dsi_clk_calc_ctx *ctx = data;
ctx              4301 drivers/gpu/drm/omapdrm/dss/dsi.c 	struct videomode *vm = &ctx->vm;
ctx              4303 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dispc_cinfo.lck_div = lckd;
ctx              4304 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dispc_cinfo.pck_div = pckd;
ctx              4305 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dispc_cinfo.lck = lck;
ctx              4306 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dispc_cinfo.pck = pck;
ctx              4308 drivers/gpu/drm/omapdrm/dss/dsi.c 	*vm = *ctx->config->vm;
ctx              4310 drivers/gpu/drm/omapdrm/dss/dsi.c 	vm->hactive = ctx->config->vm->hactive;
ctx              4311 drivers/gpu/drm/omapdrm/dss/dsi.c 	vm->vactive = ctx->config->vm->vactive;
ctx              4321 drivers/gpu/drm/omapdrm/dss/dsi.c 	struct dsi_clk_calc_ctx *ctx = data;
ctx              4323 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi_cinfo.mX[HSDIV_DISPC] = m_dispc;
ctx              4324 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi_cinfo.clkout[HSDIV_DISPC] = dispc;
ctx              4326 drivers/gpu/drm/omapdrm/dss/dsi.c 	return dispc_div_calc(ctx->dsi->dss->dispc, dispc,
ctx              4327 drivers/gpu/drm/omapdrm/dss/dsi.c 			      ctx->req_pck_min, ctx->req_pck_max,
ctx              4328 drivers/gpu/drm/omapdrm/dss/dsi.c 			      dsi_cm_calc_dispc_cb, ctx);
ctx              4334 drivers/gpu/drm/omapdrm/dss/dsi.c 	struct dsi_clk_calc_ctx *ctx = data;
ctx              4335 drivers/gpu/drm/omapdrm/dss/dsi.c 	struct dsi_data *dsi = ctx->dsi;
ctx              4337 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi_cinfo.n = n;
ctx              4338 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi_cinfo.m = m;
ctx              4339 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi_cinfo.fint = fint;
ctx              4340 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi_cinfo.clkdco = clkdco;
ctx              4342 drivers/gpu/drm/omapdrm/dss/dsi.c 	return dss_pll_hsdiv_calc_a(ctx->pll, clkdco, ctx->req_pck_min,
ctx              4344 drivers/gpu/drm/omapdrm/dss/dsi.c 			dsi_cm_calc_hsdiv_cb, ctx);
ctx              4349 drivers/gpu/drm/omapdrm/dss/dsi.c 		struct dsi_clk_calc_ctx *ctx)
ctx              4370 drivers/gpu/drm/omapdrm/dss/dsi.c 	memset(ctx, 0, sizeof(*ctx));
ctx              4371 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi = dsi;
ctx              4372 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->pll = &dsi->pll;
ctx              4373 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->config = cfg;
ctx              4374 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->req_pck_min = pck;
ctx              4375 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->req_pck_nom = pck;
ctx              4376 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->req_pck_max = pck * 3 / 2;
ctx              4381 drivers/gpu/drm/omapdrm/dss/dsi.c 	return dss_pll_calc_a(ctx->pll, clkin,
ctx              4383 drivers/gpu/drm/omapdrm/dss/dsi.c 			dsi_cm_calc_pll_cb, ctx);
ctx              4386 drivers/gpu/drm/omapdrm/dss/dsi.c static bool dsi_vm_calc_blanking(struct dsi_clk_calc_ctx *ctx)
ctx              4388 drivers/gpu/drm/omapdrm/dss/dsi.c 	struct dsi_data *dsi = ctx->dsi;
ctx              4389 drivers/gpu/drm/omapdrm/dss/dsi.c 	const struct omap_dss_dsi_config *cfg = ctx->config;
ctx              4392 drivers/gpu/drm/omapdrm/dss/dsi.c 	unsigned long hsclk = ctx->dsi_cinfo.clkdco / 4;
ctx              4409 drivers/gpu/drm/omapdrm/dss/dsi.c 	req_pck_min = ctx->req_pck_min;
ctx              4410 drivers/gpu/drm/omapdrm/dss/dsi.c 	req_pck_max = ctx->req_pck_max;
ctx              4411 drivers/gpu/drm/omapdrm/dss/dsi.c 	req_pck_nom = ctx->req_pck_nom;
ctx              4413 drivers/gpu/drm/omapdrm/dss/dsi.c 	dispc_pck = ctx->dispc_cinfo.pck;
ctx              4478 drivers/gpu/drm/omapdrm/dss/dsi.c 	dsi_vm = &ctx->dsi_vm;
ctx              4542 drivers/gpu/drm/omapdrm/dss/dsi.c 	dispc_vm = &ctx->vm;
ctx              4588 drivers/gpu/drm/omapdrm/dss/dsi.c 	struct dsi_clk_calc_ctx *ctx = data;
ctx              4590 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dispc_cinfo.lck_div = lckd;
ctx              4591 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dispc_cinfo.pck_div = pckd;
ctx              4592 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dispc_cinfo.lck = lck;
ctx              4593 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dispc_cinfo.pck = pck;
ctx              4595 drivers/gpu/drm/omapdrm/dss/dsi.c 	if (dsi_vm_calc_blanking(ctx) == false)
ctx              4599 drivers/gpu/drm/omapdrm/dss/dsi.c 	print_dispc_vm("dispc", &ctx->vm);
ctx              4600 drivers/gpu/drm/omapdrm/dss/dsi.c 	print_dsi_vm("dsi  ", &ctx->dsi_vm);
ctx              4601 drivers/gpu/drm/omapdrm/dss/dsi.c 	print_dispc_vm("req  ", ctx->config->vm);
ctx              4602 drivers/gpu/drm/omapdrm/dss/dsi.c 	print_dsi_dispc_vm("act  ", &ctx->dsi_vm);
ctx              4611 drivers/gpu/drm/omapdrm/dss/dsi.c 	struct dsi_clk_calc_ctx *ctx = data;
ctx              4614 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi_cinfo.mX[HSDIV_DISPC] = m_dispc;
ctx              4615 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi_cinfo.clkout[HSDIV_DISPC] = dispc;
ctx              4622 drivers/gpu/drm/omapdrm/dss/dsi.c 	if (ctx->config->trans_mode == OMAP_DSS_DSI_BURST_MODE)
ctx              4623 drivers/gpu/drm/omapdrm/dss/dsi.c 		pck_max = ctx->req_pck_max + 10000000;
ctx              4625 drivers/gpu/drm/omapdrm/dss/dsi.c 		pck_max = ctx->req_pck_max;
ctx              4627 drivers/gpu/drm/omapdrm/dss/dsi.c 	return dispc_div_calc(ctx->dsi->dss->dispc, dispc,
ctx              4628 drivers/gpu/drm/omapdrm/dss/dsi.c 			      ctx->req_pck_min, pck_max,
ctx              4629 drivers/gpu/drm/omapdrm/dss/dsi.c 			      dsi_vm_calc_dispc_cb, ctx);
ctx              4635 drivers/gpu/drm/omapdrm/dss/dsi.c 	struct dsi_clk_calc_ctx *ctx = data;
ctx              4636 drivers/gpu/drm/omapdrm/dss/dsi.c 	struct dsi_data *dsi = ctx->dsi;
ctx              4638 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi_cinfo.n = n;
ctx              4639 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi_cinfo.m = m;
ctx              4640 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi_cinfo.fint = fint;
ctx              4641 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi_cinfo.clkdco = clkdco;
ctx              4643 drivers/gpu/drm/omapdrm/dss/dsi.c 	return dss_pll_hsdiv_calc_a(ctx->pll, clkdco, ctx->req_pck_min,
ctx              4645 drivers/gpu/drm/omapdrm/dss/dsi.c 			dsi_vm_calc_hsdiv_cb, ctx);
ctx              4650 drivers/gpu/drm/omapdrm/dss/dsi.c 		struct dsi_clk_calc_ctx *ctx)
ctx              4662 drivers/gpu/drm/omapdrm/dss/dsi.c 	memset(ctx, 0, sizeof(*ctx));
ctx              4663 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->dsi = dsi;
ctx              4664 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->pll = &dsi->pll;
ctx              4665 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->config = cfg;
ctx              4668 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->req_pck_min = vm->pixelclock - 1000;
ctx              4669 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->req_pck_nom = vm->pixelclock;
ctx              4670 drivers/gpu/drm/omapdrm/dss/dsi.c 	ctx->req_pck_max = vm->pixelclock + 1000;
ctx              4672 drivers/gpu/drm/omapdrm/dss/dsi.c 	byteclk_min = div64_u64((u64)ctx->req_pck_min * bitspp, ndl * 8);
ctx              4679 drivers/gpu/drm/omapdrm/dss/dsi.c 		byteclk_max = div64_u64((u64)ctx->req_pck_max * bitspp,
ctx              4685 drivers/gpu/drm/omapdrm/dss/dsi.c 	return dss_pll_calc_a(ctx->pll, clkin,
ctx              4687 drivers/gpu/drm/omapdrm/dss/dsi.c 			dsi_vm_calc_pll_cb, ctx);
ctx              4694 drivers/gpu/drm/omapdrm/dss/dsi.c 	struct dsi_clk_calc_ctx ctx;
ctx              4704 drivers/gpu/drm/omapdrm/dss/dsi.c 		ok = dsi_vm_calc(dsi, config, &ctx);
ctx              4706 drivers/gpu/drm/omapdrm/dss/dsi.c 		ok = dsi_cm_calc(dsi, config, &ctx);
ctx              4714 drivers/gpu/drm/omapdrm/dss/dsi.c 	dsi_pll_calc_dsi_fck(dsi, &ctx.dsi_cinfo);
ctx              4716 drivers/gpu/drm/omapdrm/dss/dsi.c 	r = dsi_lp_clock_calc(ctx.dsi_cinfo.clkout[HSDIV_DSI],
ctx              4723 drivers/gpu/drm/omapdrm/dss/dsi.c 	dsi->user_dsi_cinfo = ctx.dsi_cinfo;
ctx              4724 drivers/gpu/drm/omapdrm/dss/dsi.c 	dsi->user_dispc_cinfo = ctx.dispc_cinfo;
ctx              4726 drivers/gpu/drm/omapdrm/dss/dsi.c 	dsi->vm = ctx.vm;
ctx              4751 drivers/gpu/drm/omapdrm/dss/dsi.c 	dsi->vm_timings = ctx.dsi_vm;
ctx               106 drivers/gpu/drm/omapdrm/dss/dss.c 	dss->ctx[(DSS_##reg).idx / sizeof(u32)] = dss_read_reg(dss, DSS_##reg)
ctx               108 drivers/gpu/drm/omapdrm/dss/dss.c 	dss_write_reg(dss, DSS_##reg, dss->ctx[(DSS_##reg).idx / sizeof(u32)])
ctx               245 drivers/gpu/drm/omapdrm/dss/dss.h 	u32		ctx[DSS_SZ_REGS / sizeof(u32)];
ctx                48 drivers/gpu/drm/omapdrm/dss/sdi.c 	struct sdi_clk_calc_ctx *ctx = data;
ctx                50 drivers/gpu/drm/omapdrm/dss/sdi.c 	ctx->dispc_cinfo.lck_div = lckd;
ctx                51 drivers/gpu/drm/omapdrm/dss/sdi.c 	ctx->dispc_cinfo.pck_div = pckd;
ctx                52 drivers/gpu/drm/omapdrm/dss/sdi.c 	ctx->dispc_cinfo.lck = lck;
ctx                53 drivers/gpu/drm/omapdrm/dss/sdi.c 	ctx->dispc_cinfo.pck = pck;
ctx                60 drivers/gpu/drm/omapdrm/dss/sdi.c 	struct sdi_clk_calc_ctx *ctx = data;
ctx                62 drivers/gpu/drm/omapdrm/dss/sdi.c 	ctx->fck = fck;
ctx                64 drivers/gpu/drm/omapdrm/dss/sdi.c 	return dispc_div_calc(ctx->sdi->dss->dispc, fck,
ctx                65 drivers/gpu/drm/omapdrm/dss/sdi.c 			      ctx->pck_min, ctx->pck_max,
ctx                66 drivers/gpu/drm/omapdrm/dss/sdi.c 			      dpi_calc_dispc_cb, ctx);
ctx                74 drivers/gpu/drm/omapdrm/dss/sdi.c 	struct sdi_clk_calc_ctx ctx;
ctx                86 drivers/gpu/drm/omapdrm/dss/sdi.c 		memset(&ctx, 0, sizeof(ctx));
ctx                88 drivers/gpu/drm/omapdrm/dss/sdi.c 		ctx.sdi = sdi;
ctx                91 drivers/gpu/drm/omapdrm/dss/sdi.c 			ctx.pck_min = max(pclk - 1000 * i * i * i, 0lu);
ctx                93 drivers/gpu/drm/omapdrm/dss/sdi.c 			ctx.pck_min = 0;
ctx                94 drivers/gpu/drm/omapdrm/dss/sdi.c 		ctx.pck_max = pclk + 1000 * i * i * i;
ctx                96 drivers/gpu/drm/omapdrm/dss/sdi.c 		ok = dss_div_calc(sdi->dss, pclk, ctx.pck_min,
ctx                97 drivers/gpu/drm/omapdrm/dss/sdi.c 				  dpi_calc_dss_cb, &ctx);
ctx                99 drivers/gpu/drm/omapdrm/dss/sdi.c 			*fck = ctx.fck;
ctx               100 drivers/gpu/drm/omapdrm/dss/sdi.c 			*dispc_cinfo = ctx.dispc_cinfo;
ctx                52 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	struct feiyang *ctx = panel_to_feiyang(panel);
ctx                53 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	struct mipi_dsi_device *dsi = ctx->dsi;
ctx                57 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	ret = regulator_enable(ctx->dvdd);
ctx                64 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	ret = regulator_enable(ctx->avdd);
ctx                71 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	gpiod_set_value(ctx->reset, 0);
ctx                79 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	gpiod_set_value(ctx->reset, 1);
ctx                99 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	struct feiyang *ctx = panel_to_feiyang(panel);
ctx               104 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	mipi_dsi_dcs_set_display_on(ctx->dsi);
ctx               105 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	backlight_enable(ctx->backlight);
ctx               112 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	struct feiyang *ctx = panel_to_feiyang(panel);
ctx               114 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	backlight_disable(ctx->backlight);
ctx               115 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	return mipi_dsi_dcs_set_display_off(ctx->dsi);
ctx               120 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	struct feiyang *ctx = panel_to_feiyang(panel);
ctx               123 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	ret = mipi_dsi_dcs_set_display_off(ctx->dsi);
ctx               128 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	ret = mipi_dsi_dcs_enter_sleep_mode(ctx->dsi);
ctx               136 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	gpiod_set_value(ctx->reset, 0);
ctx               138 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	regulator_disable(ctx->avdd);
ctx               143 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	regulator_disable(ctx->dvdd);
ctx               168 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	struct feiyang *ctx = panel_to_feiyang(panel);
ctx               173 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 		DRM_DEV_ERROR(&ctx->dsi->dev, "failed to add mode %ux%ux@%u\n",
ctx               197 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	struct feiyang *ctx;
ctx               200 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	ctx = devm_kzalloc(&dsi->dev, sizeof(*ctx), GFP_KERNEL);
ctx               201 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	if (!ctx)
ctx               204 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	mipi_dsi_set_drvdata(dsi, ctx);
ctx               205 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	ctx->dsi = dsi;
ctx               207 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	drm_panel_init(&ctx->panel);
ctx               208 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	ctx->panel.dev = &dsi->dev;
ctx               209 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	ctx->panel.funcs = &feiyang_funcs;
ctx               211 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	ctx->dvdd = devm_regulator_get(&dsi->dev, "dvdd");
ctx               212 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	if (IS_ERR(ctx->dvdd)) {
ctx               214 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 		return PTR_ERR(ctx->dvdd);
ctx               217 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	ctx->avdd = devm_regulator_get(&dsi->dev, "avdd");
ctx               218 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	if (IS_ERR(ctx->avdd)) {
ctx               220 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 		return PTR_ERR(ctx->avdd);
ctx               223 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	ctx->reset = devm_gpiod_get(&dsi->dev, "reset", GPIOD_OUT_LOW);
ctx               224 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	if (IS_ERR(ctx->reset)) {
ctx               226 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 		return PTR_ERR(ctx->reset);
ctx               229 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	ctx->backlight = devm_of_find_backlight(&dsi->dev);
ctx               230 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	if (IS_ERR(ctx->backlight))
ctx               231 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 		return PTR_ERR(ctx->backlight);
ctx               233 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	ret = drm_panel_add(&ctx->panel);
ctx               246 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	struct feiyang *ctx = mipi_dsi_get_drvdata(dsi);
ctx               249 drivers/gpu/drm/panel/panel-feiyang-fy07024di26a30d.c 	drm_panel_remove(&ctx->panel);
ctx               273 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c static int ili9881c_switch_page(struct ili9881c *ctx, u8 page)
ctx               278 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	ret = mipi_dsi_dcs_write_buffer(ctx->dsi, buf, sizeof(buf));
ctx               285 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c static int ili9881c_send_cmd_data(struct ili9881c *ctx, u8 cmd, u8 data)
ctx               290 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	ret = mipi_dsi_dcs_write_buffer(ctx->dsi, buf, sizeof(buf));
ctx               299 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	struct ili9881c *ctx = panel_to_ili9881c(panel);
ctx               304 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	ret = regulator_enable(ctx->power);
ctx               310 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	gpiod_set_value(ctx->reset, 1);
ctx               313 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	gpiod_set_value(ctx->reset, 0);
ctx               320 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 			ret = ili9881c_switch_page(ctx, instr->arg.page);
ctx               322 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 			ret = ili9881c_send_cmd_data(ctx, instr->arg.cmd.cmd,
ctx               329 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	ret = ili9881c_switch_page(ctx, 0);
ctx               333 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	ret = mipi_dsi_dcs_set_tear_on(ctx->dsi, MIPI_DSI_DCS_TEAR_MODE_VBLANK);
ctx               337 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	ret = mipi_dsi_dcs_exit_sleep_mode(ctx->dsi);
ctx               346 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	struct ili9881c *ctx = panel_to_ili9881c(panel);
ctx               350 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	mipi_dsi_dcs_set_display_on(ctx->dsi);
ctx               351 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	backlight_enable(ctx->backlight);
ctx               358 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	struct ili9881c *ctx = panel_to_ili9881c(panel);
ctx               360 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	backlight_disable(ctx->backlight);
ctx               361 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	return mipi_dsi_dcs_set_display_off(ctx->dsi);
ctx               366 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	struct ili9881c *ctx = panel_to_ili9881c(panel);
ctx               368 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	mipi_dsi_dcs_enter_sleep_mode(ctx->dsi);
ctx               369 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	regulator_disable(ctx->power);
ctx               370 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	gpiod_set_value(ctx->reset, 1);
ctx               393 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	struct ili9881c *ctx = panel_to_ili9881c(panel);
ctx               398 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 		dev_err(&ctx->dsi->dev, "failed to add mode %ux%ux@%u\n",
ctx               427 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	struct ili9881c *ctx;
ctx               430 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	ctx = devm_kzalloc(&dsi->dev, sizeof(*ctx), GFP_KERNEL);
ctx               431 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	if (!ctx)
ctx               433 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	mipi_dsi_set_drvdata(dsi, ctx);
ctx               434 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	ctx->dsi = dsi;
ctx               436 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	drm_panel_init(&ctx->panel);
ctx               437 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	ctx->panel.dev = &dsi->dev;
ctx               438 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	ctx->panel.funcs = &ili9881c_funcs;
ctx               440 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	ctx->power = devm_regulator_get(&dsi->dev, "power");
ctx               441 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	if (IS_ERR(ctx->power)) {
ctx               443 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 		return PTR_ERR(ctx->power);
ctx               446 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	ctx->reset = devm_gpiod_get(&dsi->dev, "reset", GPIOD_OUT_LOW);
ctx               447 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	if (IS_ERR(ctx->reset)) {
ctx               449 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 		return PTR_ERR(ctx->reset);
ctx               454 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 		ctx->backlight = of_find_backlight_by_node(np);
ctx               457 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 		if (!ctx->backlight)
ctx               461 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	ret = drm_panel_add(&ctx->panel);
ctx               474 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	struct ili9881c *ctx = mipi_dsi_get_drvdata(dsi);
ctx               477 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	drm_panel_remove(&ctx->panel);
ctx               479 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 	if (ctx->backlight)
ctx               480 drivers/gpu/drm/panel/panel-ilitek-ili9881c.c 		put_device(&ctx->backlight->dev);
ctx                40 drivers/gpu/drm/panel/panel-lg-lg4573.c static int lg4573_spi_write_u16(struct lg4573 *ctx, u16 data)
ctx                48 drivers/gpu/drm/panel/panel-lg-lg4573.c 	dev_dbg(ctx->panel.dev, "writing data: %x\n", data);
ctx                53 drivers/gpu/drm/panel/panel-lg-lg4573.c 	return spi_sync(ctx->spi, &msg);
ctx                56 drivers/gpu/drm/panel/panel-lg-lg4573.c static int lg4573_spi_write_u16_array(struct lg4573 *ctx, const u16 *buffer,
ctx                63 drivers/gpu/drm/panel/panel-lg-lg4573.c 		ret = lg4573_spi_write_u16(ctx, buffer[i]);
ctx                71 drivers/gpu/drm/panel/panel-lg-lg4573.c static int lg4573_spi_write_dcs(struct lg4573 *ctx, u8 dcs)
ctx                73 drivers/gpu/drm/panel/panel-lg-lg4573.c 	return lg4573_spi_write_u16(ctx, (0x70 << 8 | dcs));
ctx                76 drivers/gpu/drm/panel/panel-lg-lg4573.c static int lg4573_display_on(struct lg4573 *ctx)
ctx                80 drivers/gpu/drm/panel/panel-lg-lg4573.c 	ret = lg4573_spi_write_dcs(ctx, MIPI_DCS_EXIT_SLEEP_MODE);
ctx                86 drivers/gpu/drm/panel/panel-lg-lg4573.c 	return lg4573_spi_write_dcs(ctx, MIPI_DCS_SET_DISPLAY_ON);
ctx                89 drivers/gpu/drm/panel/panel-lg-lg4573.c static int lg4573_display_off(struct lg4573 *ctx)
ctx                93 drivers/gpu/drm/panel/panel-lg-lg4573.c 	ret = lg4573_spi_write_dcs(ctx, MIPI_DCS_SET_DISPLAY_OFF);
ctx                99 drivers/gpu/drm/panel/panel-lg-lg4573.c 	return lg4573_spi_write_dcs(ctx, MIPI_DCS_ENTER_SLEEP_MODE);
ctx               102 drivers/gpu/drm/panel/panel-lg-lg4573.c static int lg4573_display_mode_settings(struct lg4573 *ctx)
ctx               115 drivers/gpu/drm/panel/panel-lg-lg4573.c 	dev_dbg(ctx->panel.dev, "transfer display mode settings\n");
ctx               116 drivers/gpu/drm/panel/panel-lg-lg4573.c 	return lg4573_spi_write_u16_array(ctx, display_mode_settings,
ctx               120 drivers/gpu/drm/panel/panel-lg-lg4573.c static int lg4573_power_settings(struct lg4573 *ctx)
ctx               131 drivers/gpu/drm/panel/panel-lg-lg4573.c 	dev_dbg(ctx->panel.dev, "transfer power settings\n");
ctx               132 drivers/gpu/drm/panel/panel-lg-lg4573.c 	return lg4573_spi_write_u16_array(ctx, power_settings,
ctx               136 drivers/gpu/drm/panel/panel-lg-lg4573.c static int lg4573_gamma_settings(struct lg4573 *ctx)
ctx               156 drivers/gpu/drm/panel/panel-lg-lg4573.c 	dev_dbg(ctx->panel.dev, "transfer gamma settings\n");
ctx               157 drivers/gpu/drm/panel/panel-lg-lg4573.c 	return lg4573_spi_write_u16_array(ctx, gamma_settings,
ctx               161 drivers/gpu/drm/panel/panel-lg-lg4573.c static int lg4573_init(struct lg4573 *ctx)
ctx               165 drivers/gpu/drm/panel/panel-lg-lg4573.c 	dev_dbg(ctx->panel.dev, "initializing LCD\n");
ctx               167 drivers/gpu/drm/panel/panel-lg-lg4573.c 	ret = lg4573_display_mode_settings(ctx);
ctx               171 drivers/gpu/drm/panel/panel-lg-lg4573.c 	ret = lg4573_power_settings(ctx);
ctx               175 drivers/gpu/drm/panel/panel-lg-lg4573.c 	return lg4573_gamma_settings(ctx);
ctx               178 drivers/gpu/drm/panel/panel-lg-lg4573.c static int lg4573_power_on(struct lg4573 *ctx)
ctx               180 drivers/gpu/drm/panel/panel-lg-lg4573.c 	return lg4573_display_on(ctx);
ctx               185 drivers/gpu/drm/panel/panel-lg-lg4573.c 	struct lg4573 *ctx = panel_to_lg4573(panel);
ctx               187 drivers/gpu/drm/panel/panel-lg-lg4573.c 	return lg4573_display_off(ctx);
ctx               192 drivers/gpu/drm/panel/panel-lg-lg4573.c 	struct lg4573 *ctx = panel_to_lg4573(panel);
ctx               194 drivers/gpu/drm/panel/panel-lg-lg4573.c 	lg4573_init(ctx);
ctx               196 drivers/gpu/drm/panel/panel-lg-lg4573.c 	return lg4573_power_on(ctx);
ctx               244 drivers/gpu/drm/panel/panel-lg-lg4573.c 	struct lg4573 *ctx;
ctx               247 drivers/gpu/drm/panel/panel-lg-lg4573.c 	ctx = devm_kzalloc(&spi->dev, sizeof(*ctx), GFP_KERNEL);
ctx               248 drivers/gpu/drm/panel/panel-lg-lg4573.c 	if (!ctx)
ctx               251 drivers/gpu/drm/panel/panel-lg-lg4573.c 	ctx->spi = spi;
ctx               253 drivers/gpu/drm/panel/panel-lg-lg4573.c 	spi_set_drvdata(spi, ctx);
ctx               262 drivers/gpu/drm/panel/panel-lg-lg4573.c 	drm_panel_init(&ctx->panel);
ctx               263 drivers/gpu/drm/panel/panel-lg-lg4573.c 	ctx->panel.dev = &spi->dev;
ctx               264 drivers/gpu/drm/panel/panel-lg-lg4573.c 	ctx->panel.funcs = &lg4573_drm_funcs;
ctx               266 drivers/gpu/drm/panel/panel-lg-lg4573.c 	return drm_panel_add(&ctx->panel);
ctx               271 drivers/gpu/drm/panel/panel-lg-lg4573.c 	struct lg4573 *ctx = spi_get_drvdata(spi);
ctx               273 drivers/gpu/drm/panel/panel-lg-lg4573.c 	lg4573_display_off(ctx);
ctx               274 drivers/gpu/drm/panel/panel-lg-lg4573.c 	drm_panel_remove(&ctx->panel);
ctx                95 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c static void otm8009a_dcs_write_buf(struct otm8009a *ctx, const void *data,
ctx                98 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               104 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c static void otm8009a_dcs_write_buf_hs(struct otm8009a *ctx, const void *data,
ctx               107 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               112 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	otm8009a_dcs_write_buf(ctx, data, len);
ctx               118 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c #define dcs_write_seq(ctx, seq...)			\
ctx               121 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	otm8009a_dcs_write_buf(ctx, d, ARRAY_SIZE(d));	\
ctx               124 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c #define dcs_write_cmd_at(ctx, cmd, seq...)		\
ctx               126 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_seq(ctx, MCS_ADRSFT, (cmd) & 0xFF);	\
ctx               127 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_seq(ctx, (cmd) >> 8, seq);		\
ctx               130 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c static int otm8009a_init_sequence(struct otm8009a *ctx)
ctx               132 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               136 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_CMD2_ENA1, 0x80, 0x09, 0x01);
ctx               139 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_CMD2_ENA2, 0x80, 0x09);
ctx               141 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_SD_PCH_CTRL, 0x30);
ctx               144 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_NO_DOC1, 0x40);
ctx               147 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PWR_CTRL4 + 1, 0xA9);
ctx               148 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PWR_CTRL2 + 1, 0x34);
ctx               149 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_P_DRV_M, 0x50);
ctx               150 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_VCOMDC, 0x4E);
ctx               151 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_OSC_ADJ, 0x66); /* 65Hz */
ctx               152 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PWR_CTRL2 + 2, 0x01);
ctx               153 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PWR_CTRL2 + 5, 0x34);
ctx               154 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PWR_CTRL2 + 4, 0x33);
ctx               155 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_GVDDSET, 0x79, 0x79);
ctx               156 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_SD_CTRL + 1, 0x1B);
ctx               157 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PWR_CTRL1 + 2, 0x83);
ctx               158 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_SD_PCH_CTRL + 1, 0x83);
ctx               159 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_RGB_VID_SET, 0x0E);
ctx               160 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PANSET, 0x00, 0x01);
ctx               162 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_GOAVST, 0x85, 0x01, 0x00, 0x84, 0x01, 0x00);
ctx               163 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_GOACLKA1, 0x18, 0x04, 0x03, 0x39, 0x00, 0x00,
ctx               165 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_GOACLKA3, 0x18, 0x02, 0x03, 0x3B, 0x00, 0x00,
ctx               167 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_GOAECLK, 0x01, 0x01, 0x20, 0x20, 0x00, 0x00,
ctx               170 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_NO_DOC2, 0x00);
ctx               172 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PANCTRLSET1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
ctx               173 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PANCTRLSET2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
ctx               175 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PANCTRLSET3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
ctx               177 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PANCTRLSET4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
ctx               178 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PANCTRLSET5, 0, 4, 4, 4, 4, 4, 0, 0, 0, 0,
ctx               180 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PANCTRLSET6, 0, 0, 0, 0, 0, 0, 4, 4, 4, 4,
ctx               182 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PANCTRLSET7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);
ctx               183 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PANCTRLSET8, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
ctx               186 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PANU2D1, 0x00, 0x26, 0x09, 0x0B, 0x01, 0x25,
ctx               188 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PANU2D2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
ctx               190 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PANU2D3, 0x25, 0x00, 0x00, 0x00, 0x00, 0x00,
ctx               192 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PAND2U1, 0x00, 0x25, 0x0C, 0x0A, 0x02, 0x26,
ctx               194 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PAND2U2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
ctx               196 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PAND2U3, 0x26, 0x00, 0x00, 0x00, 0x00, 0x00,
ctx               199 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_PWR_CTRL1 + 1, 0x66);
ctx               201 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_NO_DOC3, 0x06);
ctx               203 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_GMCT2_2P, 0x00, 0x09, 0x0F, 0x0E, 0x07, 0x10,
ctx               206 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_GMCT2_2N, 0x00, 0x09, 0x0F, 0x0E, 0x07, 0x10,
ctx               211 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_cmd_at(ctx, MCS_CMD2_ENA1, 0xFF, 0xFF, 0xFF);
ctx               225 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_seq(ctx, MIPI_DCS_SET_ADDRESS_MODE, 0x00);
ctx               243 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_seq(ctx, MIPI_DCS_WRITE_POWER_SAVE, 0x00);
ctx               254 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	dcs_write_seq(ctx, MIPI_DCS_WRITE_MEMORY_START);
ctx               264 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	struct otm8009a *ctx = panel_to_otm8009a(panel);
ctx               265 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               268 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	if (!ctx->enabled)
ctx               271 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	backlight_disable(ctx->bl_dev);
ctx               283 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->enabled = false;
ctx               290 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	struct otm8009a *ctx = panel_to_otm8009a(panel);
ctx               292 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	if (!ctx->prepared)
ctx               295 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	if (ctx->reset_gpio) {
ctx               296 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 		gpiod_set_value_cansleep(ctx->reset_gpio, 1);
ctx               300 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	regulator_disable(ctx->supply);
ctx               302 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->prepared = false;
ctx               309 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	struct otm8009a *ctx = panel_to_otm8009a(panel);
ctx               312 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	if (ctx->prepared)
ctx               315 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ret = regulator_enable(ctx->supply);
ctx               321 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	if (ctx->reset_gpio) {
ctx               322 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 		gpiod_set_value_cansleep(ctx->reset_gpio, 0);
ctx               323 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 		gpiod_set_value_cansleep(ctx->reset_gpio, 1);
ctx               325 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 		gpiod_set_value_cansleep(ctx->reset_gpio, 0);
ctx               329 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ret = otm8009a_init_sequence(ctx);
ctx               333 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->prepared = true;
ctx               340 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	struct otm8009a *ctx = panel_to_otm8009a(panel);
ctx               342 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	if (ctx->enabled)
ctx               345 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	backlight_enable(ctx->bl_dev);
ctx               347 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->enabled = true;
ctx               389 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	struct otm8009a *ctx = bl_get_data(bd);
ctx               392 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	if (!ctx->prepared) {
ctx               404 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 		otm8009a_dcs_write_buf_hs(ctx, data, ARRAY_SIZE(data));
ctx               416 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	otm8009a_dcs_write_buf_hs(ctx, data, ARRAY_SIZE(data));
ctx               428 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	struct otm8009a *ctx;
ctx               431 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               432 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	if (!ctx)
ctx               435 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->reset_gpio = devm_gpiod_get_optional(dev, "reset", GPIOD_OUT_LOW);
ctx               436 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	if (IS_ERR(ctx->reset_gpio)) {
ctx               438 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 		return PTR_ERR(ctx->reset_gpio);
ctx               441 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->supply = devm_regulator_get(dev, "power");
ctx               442 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	if (IS_ERR(ctx->supply)) {
ctx               443 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 		ret = PTR_ERR(ctx->supply);
ctx               449 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	mipi_dsi_set_drvdata(dsi, ctx);
ctx               451 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->dev = dev;
ctx               458 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	drm_panel_init(&ctx->panel);
ctx               459 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->panel.dev = dev;
ctx               460 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->panel.funcs = &otm8009a_drm_funcs;
ctx               462 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->bl_dev = devm_backlight_device_register(dev, dev_name(dev),
ctx               463 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 						     dsi->host->dev, ctx,
ctx               466 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	if (IS_ERR(ctx->bl_dev)) {
ctx               467 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 		ret = PTR_ERR(ctx->bl_dev);
ctx               472 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->bl_dev->props.max_brightness = OTM8009A_BACKLIGHT_MAX;
ctx               473 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->bl_dev->props.brightness = OTM8009A_BACKLIGHT_DEFAULT;
ctx               474 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->bl_dev->props.power = FB_BLANK_POWERDOWN;
ctx               475 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	ctx->bl_dev->props.type = BACKLIGHT_RAW;
ctx               477 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	drm_panel_add(&ctx->panel);
ctx               482 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 		drm_panel_remove(&ctx->panel);
ctx               483 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 		backlight_device_unregister(ctx->bl_dev);
ctx               492 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	struct otm8009a *ctx = mipi_dsi_get_drvdata(dsi);
ctx               495 drivers/gpu/drm/panel/panel-orisetech-otm8009a.c 	drm_panel_remove(&ctx->panel);
ctx               107 drivers/gpu/drm/panel/panel-raydium-rm68200.c static void rm68200_dcs_write_buf(struct rm68200 *ctx, const void *data,
ctx               110 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               119 drivers/gpu/drm/panel/panel-raydium-rm68200.c static void rm68200_dcs_write_cmd(struct rm68200 *ctx, u8 cmd, u8 value)
ctx               121 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               129 drivers/gpu/drm/panel/panel-raydium-rm68200.c #define dcs_write_seq(ctx, seq...)				\
ctx               133 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	rm68200_dcs_write_buf(ctx, d, ARRAY_SIZE(d));		\
ctx               140 drivers/gpu/drm/panel/panel-raydium-rm68200.c #define dcs_write_cmd_seq(ctx, cmd, seq...)			\
ctx               146 drivers/gpu/drm/panel/panel-raydium-rm68200.c 		rm68200_dcs_write_cmd(ctx, cmd + i, d[i]);	\
ctx               149 drivers/gpu/drm/panel/panel-raydium-rm68200.c static void rm68200_init_sequence(struct rm68200 *ctx)
ctx               152 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_CMD_MODE_SW, MCS_CMD2_P0);
ctx               153 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, MCS_EXT_PWR_IC, 0xC0, 0x53, 0x00);
ctx               154 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_BT2CTR, 0xE5);
ctx               155 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_SETAVDD, 0x0A);
ctx               156 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_SETAVEE, 0x0A);
ctx               157 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_SGOPCTR, 0x52);
ctx               158 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_BT3CTR, 0x53);
ctx               159 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_BT4CTR, 0x5A);
ctx               160 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_INVCTR, 0x00);
ctx               161 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_STBCTR, 0x0A);
ctx               162 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_SDCTR, 0x06);
ctx               163 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_VCMCTR, 0x56);
ctx               164 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_SETVGN, 0xA0, 0x00);
ctx               165 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_SETVGP, 0xA0, 0x00);
ctx               166 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_SW_CTRL, 0x11); /* 2 data lanes, see doc */
ctx               168 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_CMD_MODE_SW, MCS_CMD2_P2);
ctx               169 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, GOA_VSTV1, 0x05);
ctx               170 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, 0x02, 0x0B);
ctx               171 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, 0x03, 0x0F);
ctx               172 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, 0x04, 0x7D, 0x00, 0x50);
ctx               173 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, GOA_VSTV2, 0x05, 0x16, 0x0D, 0x11, 0x7D, 0x00,
ctx               175 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, GOA_VCLK1, 0x07, 0x08, 0x01, 0x02, 0x00, 0x7D,
ctx               177 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, GOA_VCLK2, 0x03, 0x04, 0x05, 0x06, 0x00, 0x7D,
ctx               179 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, GOA_VCLK_OPT1, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
ctx               181 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, GOA_BICLK1, 0x07, 0x08);
ctx               182 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, 0x2D, 0x01);
ctx               183 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, 0x2F, 0x02, 0x00, 0x40, 0x05, 0x08, 0x54, 0x7D,
ctx               185 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, GOA_BICLK2, 0x03, 0x04, 0x05, 0x06, 0x00);
ctx               186 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, 0x3D, 0x40);
ctx               187 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, 0x3F, 0x05, 0x08, 0x54, 0x7D, 0x00);
ctx               188 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, GOA_BICLK3, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
ctx               190 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, GOA_BICLK4, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
ctx               192 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, 0x58, 0x00, 0x00, 0x00);
ctx               193 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, GOA_BICLK_OPT1, 0x00, 0x00, 0x00, 0x00, 0x00);
ctx               194 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, GOA_BICLK_OPT2, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
ctx               196 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_GOA_GPO1, 0x00, 0x00, 0x00, 0x00);
ctx               197 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_GOA_GPO2, 0x00, 0x20, 0x00);
ctx               198 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_GOA_EQ, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
ctx               200 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_GOA_CLK_GALLON, 0x00, 0x00);
ctx               201 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, MCS_GOA_FS_SEL0, 0xBF, 0x02, 0x06, 0x14, 0x10,
ctx               203 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, MCS_GOA_FS_SEL1, 0x3F, 0x3F, 0x3F, 0x3F, 0x0C,
ctx               205 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, MCS_GOA_FS_SEL2, 0x04, 0x3F, 0x3F, 0x3F, 0x3F,
ctx               207 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, MCS_GOA_FS_SEL3, 0x0B, 0x0D, 0x3F, 0x3F, 0x3F,
ctx               209 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, 0xA2, 0x3F, 0x09, 0x13, 0x17, 0x11, 0x15);
ctx               210 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, 0xA9, 0x07, 0x03, 0x3F);
ctx               211 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, MCS_GOA_BS_SEL0, 0x3F, 0x05, 0x01, 0x17, 0x13,
ctx               213 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, MCS_GOA_BS_SEL1, 0x3F, 0x3F, 0x3F, 0x3F, 0x0B,
ctx               215 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, MCS_GOA_BS_SEL2, 0x03, 0x3F, 0x3F, 0x3F, 0x3F,
ctx               217 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, MCS_GOA_BS_SEL3, 0x0C, 0x0A, 0x3F, 0x3F, 0x3F,
ctx               219 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, MCS_GOA_BS_SEL4, 0x12, 0x16, 0x00, 0x04, 0x3F);
ctx               220 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, 0xDC, 0x02);
ctx               221 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, 0xDE, 0x12);
ctx               223 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_CMD_MODE_SW, 0x0E); /* No documentation */
ctx               224 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, 0x01, 0x75);
ctx               226 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_CMD_MODE_SW, MCS_CMD2_P3);
ctx               227 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, MCS_GAMMA_VP, 0x00, 0x0C, 0x12, 0x0E, 0x06,
ctx               230 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_cmd_seq(ctx, MCS_GAMMA_VN, 0x00, 0x0C, 0x12, 0x0E, 0x06,
ctx               235 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	dcs_write_seq(ctx, MCS_CMD_MODE_SW, MCS_CMD1_UCS);
ctx               240 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	struct rm68200 *ctx = panel_to_rm68200(panel);
ctx               242 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	if (!ctx->enabled)
ctx               245 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	backlight_disable(ctx->backlight);
ctx               247 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	ctx->enabled = false;
ctx               254 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	struct rm68200 *ctx = panel_to_rm68200(panel);
ctx               255 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               258 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	if (!ctx->prepared)
ctx               271 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	if (ctx->reset_gpio) {
ctx               272 drivers/gpu/drm/panel/panel-raydium-rm68200.c 		gpiod_set_value_cansleep(ctx->reset_gpio, 1);
ctx               276 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	regulator_disable(ctx->supply);
ctx               278 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	ctx->prepared = false;
ctx               285 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	struct rm68200 *ctx = panel_to_rm68200(panel);
ctx               286 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               289 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	if (ctx->prepared)
ctx               292 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	ret = regulator_enable(ctx->supply);
ctx               298 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	if (ctx->reset_gpio) {
ctx               299 drivers/gpu/drm/panel/panel-raydium-rm68200.c 		gpiod_set_value_cansleep(ctx->reset_gpio, 1);
ctx               301 drivers/gpu/drm/panel/panel-raydium-rm68200.c 		gpiod_set_value_cansleep(ctx->reset_gpio, 0);
ctx               305 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	rm68200_init_sequence(ctx);
ctx               319 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	ctx->prepared = true;
ctx               326 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	struct rm68200 *ctx = panel_to_rm68200(panel);
ctx               328 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	if (ctx->enabled)
ctx               331 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	backlight_enable(ctx->backlight);
ctx               333 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	ctx->enabled = true;
ctx               372 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	struct rm68200 *ctx;
ctx               375 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               376 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	if (!ctx)
ctx               379 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	ctx->reset_gpio = devm_gpiod_get_optional(dev, "reset", GPIOD_OUT_LOW);
ctx               380 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	if (IS_ERR(ctx->reset_gpio)) {
ctx               381 drivers/gpu/drm/panel/panel-raydium-rm68200.c 		ret = PTR_ERR(ctx->reset_gpio);
ctx               386 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	ctx->supply = devm_regulator_get(dev, "power");
ctx               387 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	if (IS_ERR(ctx->supply)) {
ctx               388 drivers/gpu/drm/panel/panel-raydium-rm68200.c 		ret = PTR_ERR(ctx->supply);
ctx               394 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	ctx->backlight = devm_of_find_backlight(dev);
ctx               395 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	if (IS_ERR(ctx->backlight))
ctx               396 drivers/gpu/drm/panel/panel-raydium-rm68200.c 		return PTR_ERR(ctx->backlight);
ctx               398 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	mipi_dsi_set_drvdata(dsi, ctx);
ctx               400 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	ctx->dev = dev;
ctx               407 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	drm_panel_init(&ctx->panel);
ctx               408 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	ctx->panel.dev = dev;
ctx               409 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	ctx->panel.funcs = &rm68200_drm_funcs;
ctx               411 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	drm_panel_add(&ctx->panel);
ctx               416 drivers/gpu/drm/panel/panel-raydium-rm68200.c 		drm_panel_remove(&ctx->panel);
ctx               425 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	struct rm68200 *ctx = mipi_dsi_get_drvdata(dsi);
ctx               428 drivers/gpu/drm/panel/panel-raydium-rm68200.c 	drm_panel_remove(&ctx->panel);
ctx                71 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c static int jh057n_init_sequence(struct jh057n *ctx)
ctx                73 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx                74 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	struct device *dev = ctx->dev;
ctx               145 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	struct jh057n *ctx = panel_to_jh057n(panel);
ctx               148 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ret = jh057n_init_sequence(ctx);
ctx               150 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 		DRM_DEV_ERROR(ctx->dev, "Panel init sequence failed: %d\n",
ctx               155 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	return backlight_enable(ctx->backlight);
ctx               160 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	struct jh057n *ctx = panel_to_jh057n(panel);
ctx               161 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               163 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	backlight_disable(ctx->backlight);
ctx               169 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	struct jh057n *ctx = panel_to_jh057n(panel);
ctx               171 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	if (!ctx->prepared)
ctx               174 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	regulator_disable(ctx->iovcc);
ctx               175 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	regulator_disable(ctx->vcc);
ctx               176 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ctx->prepared = false;
ctx               183 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	struct jh057n *ctx = panel_to_jh057n(panel);
ctx               186 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	if (ctx->prepared)
ctx               189 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	DRM_DEV_DEBUG_DRIVER(ctx->dev, "Resetting the panel\n");
ctx               190 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ret = regulator_enable(ctx->vcc);
ctx               192 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 		DRM_DEV_ERROR(ctx->dev,
ctx               196 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ret = regulator_enable(ctx->iovcc);
ctx               198 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 		DRM_DEV_ERROR(ctx->dev,
ctx               203 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	gpiod_set_value_cansleep(ctx->reset_gpio, 1);
ctx               205 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	gpiod_set_value_cansleep(ctx->reset_gpio, 0);
ctx               208 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ctx->prepared = true;
ctx               213 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	regulator_disable(ctx->vcc);
ctx               235 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	struct jh057n *ctx = panel_to_jh057n(panel);
ctx               240 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 		DRM_DEV_ERROR(ctx->dev, "Failed to add mode %ux%u@%u\n",
ctx               266 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	struct jh057n *ctx = data;
ctx               267 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               269 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	DRM_DEV_DEBUG_DRIVER(ctx->dev, "Setting all pixels on\n");
ctx               273 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	drm_panel_disable(&ctx->panel);
ctx               274 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	drm_panel_unprepare(&ctx->panel);
ctx               275 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	drm_panel_prepare(&ctx->panel);
ctx               276 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	drm_panel_enable(&ctx->panel);
ctx               284 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c static void jh057n_debugfs_init(struct jh057n *ctx)
ctx               286 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ctx->debugfs = debugfs_create_dir(DRV_NAME, NULL);
ctx               288 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	debugfs_create_file("allpixelson", 0600, ctx->debugfs, ctx,
ctx               292 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c static void jh057n_debugfs_remove(struct jh057n *ctx)
ctx               294 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	debugfs_remove_recursive(ctx->debugfs);
ctx               295 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ctx->debugfs = NULL;
ctx               301 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	struct jh057n *ctx;
ctx               304 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               305 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	if (!ctx)
ctx               308 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ctx->reset_gpio = devm_gpiod_get(dev, "reset", GPIOD_OUT_LOW);
ctx               309 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	if (IS_ERR(ctx->reset_gpio)) {
ctx               311 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 		return PTR_ERR(ctx->reset_gpio);
ctx               314 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	mipi_dsi_set_drvdata(dsi, ctx);
ctx               316 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ctx->dev = dev;
ctx               323 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ctx->backlight = devm_of_find_backlight(dev);
ctx               324 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	if (IS_ERR(ctx->backlight))
ctx               325 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 		return PTR_ERR(ctx->backlight);
ctx               327 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ctx->vcc = devm_regulator_get(dev, "vcc");
ctx               328 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	if (IS_ERR(ctx->vcc)) {
ctx               329 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 		ret = PTR_ERR(ctx->vcc);
ctx               336 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ctx->iovcc = devm_regulator_get(dev, "iovcc");
ctx               337 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	if (IS_ERR(ctx->iovcc)) {
ctx               338 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 		ret = PTR_ERR(ctx->iovcc);
ctx               346 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	drm_panel_init(&ctx->panel);
ctx               347 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ctx->panel.dev = dev;
ctx               348 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ctx->panel.funcs = &jh057n_drm_funcs;
ctx               350 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	drm_panel_add(&ctx->panel);
ctx               357 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 		drm_panel_remove(&ctx->panel);
ctx               366 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	jh057n_debugfs_init(ctx);
ctx               372 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	struct jh057n *ctx = mipi_dsi_get_drvdata(dsi);
ctx               375 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ret = drm_panel_unprepare(&ctx->panel);
ctx               380 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	ret = drm_panel_disable(&ctx->panel);
ctx               388 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	struct jh057n *ctx = mipi_dsi_get_drvdata(dsi);
ctx               398 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	drm_panel_remove(&ctx->panel);
ctx               400 drivers/gpu/drm/panel/panel-rocktech-jh057n00900.c 	jh057n_debugfs_remove(ctx);
ctx                50 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	struct rb070d30_panel *ctx = panel_to_rb070d30_panel(panel);
ctx                53 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ret = regulator_enable(ctx->supply);
ctx                55 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 		DRM_DEV_ERROR(&ctx->dsi->dev, "Failed to enable supply: %d\n", ret);
ctx                60 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	gpiod_set_value(ctx->gpios.power, 1);
ctx                62 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	gpiod_set_value(ctx->gpios.reset, 1);
ctx                69 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	struct rb070d30_panel *ctx = panel_to_rb070d30_panel(panel);
ctx                71 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	gpiod_set_value(ctx->gpios.reset, 0);
ctx                72 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	gpiod_set_value(ctx->gpios.power, 0);
ctx                73 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	regulator_disable(ctx->supply);
ctx                80 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	struct rb070d30_panel *ctx = panel_to_rb070d30_panel(panel);
ctx                83 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ret = mipi_dsi_dcs_exit_sleep_mode(ctx->dsi);
ctx                87 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ret = backlight_enable(ctx->backlight);
ctx                94 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	mipi_dsi_dcs_enter_sleep_mode(ctx->dsi);
ctx               100 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	struct rb070d30_panel *ctx = panel_to_rb070d30_panel(panel);
ctx               102 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	backlight_disable(ctx->backlight);
ctx               103 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	return mipi_dsi_dcs_enter_sleep_mode(ctx->dsi);
ctx               126 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	struct rb070d30_panel *ctx = panel_to_rb070d30_panel(panel);
ctx               132 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 		DRM_DEV_ERROR(&ctx->dsi->dev,
ctx               162 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	struct rb070d30_panel *ctx;
ctx               165 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ctx = devm_kzalloc(&dsi->dev, sizeof(*ctx), GFP_KERNEL);
ctx               166 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	if (!ctx)
ctx               169 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ctx->supply = devm_regulator_get(&dsi->dev, "vcc-lcd");
ctx               170 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	if (IS_ERR(ctx->supply))
ctx               171 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 		return PTR_ERR(ctx->supply);
ctx               173 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	mipi_dsi_set_drvdata(dsi, ctx);
ctx               174 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ctx->dsi = dsi;
ctx               176 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	drm_panel_init(&ctx->panel);
ctx               177 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ctx->panel.dev = &dsi->dev;
ctx               178 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ctx->panel.funcs = &rb070d30_panel_funcs;
ctx               180 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ctx->gpios.reset = devm_gpiod_get(&dsi->dev, "reset", GPIOD_OUT_LOW);
ctx               181 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	if (IS_ERR(ctx->gpios.reset)) {
ctx               183 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 		return PTR_ERR(ctx->gpios.reset);
ctx               186 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ctx->gpios.power = devm_gpiod_get(&dsi->dev, "power", GPIOD_OUT_LOW);
ctx               187 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	if (IS_ERR(ctx->gpios.power)) {
ctx               189 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 		return PTR_ERR(ctx->gpios.power);
ctx               196 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ctx->gpios.updn = devm_gpiod_get(&dsi->dev, "updn", GPIOD_OUT_LOW);
ctx               197 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	if (IS_ERR(ctx->gpios.updn)) {
ctx               199 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 		return PTR_ERR(ctx->gpios.updn);
ctx               206 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ctx->gpios.shlr = devm_gpiod_get(&dsi->dev, "shlr", GPIOD_OUT_LOW);
ctx               207 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	if (IS_ERR(ctx->gpios.shlr)) {
ctx               209 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 		return PTR_ERR(ctx->gpios.shlr);
ctx               212 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ctx->backlight = devm_of_find_backlight(&dsi->dev);
ctx               213 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	if (IS_ERR(ctx->backlight)) {
ctx               215 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 		return PTR_ERR(ctx->backlight);
ctx               218 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	ret = drm_panel_add(&ctx->panel);
ctx               231 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	struct rb070d30_panel *ctx = mipi_dsi_get_drvdata(dsi);
ctx               234 drivers/gpu/drm/panel/panel-ronbo-rb070d30.c 	drm_panel_remove(&ctx->panel);
ctx               119 drivers/gpu/drm/panel/panel-samsung-ld9040.c static int ld9040_clear_error(struct ld9040 *ctx)
ctx               121 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	int ret = ctx->error;
ctx               123 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ctx->error = 0;
ctx               127 drivers/gpu/drm/panel/panel-samsung-ld9040.c static int ld9040_spi_write_word(struct ld9040 *ctx, u16 data)
ctx               129 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	struct spi_device *spi = to_spi_device(ctx->dev);
ctx               142 drivers/gpu/drm/panel/panel-samsung-ld9040.c static void ld9040_dcs_write(struct ld9040 *ctx, const u8 *data, size_t len)
ctx               146 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	if (ctx->error < 0 || len == 0)
ctx               149 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	dev_dbg(ctx->dev, "writing dcs seq: %*ph\n", (int)len, data);
ctx               150 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ret = ld9040_spi_write_word(ctx, *data);
ctx               154 drivers/gpu/drm/panel/panel-samsung-ld9040.c 		ret = ld9040_spi_write_word(ctx, *data | 0x100);
ctx               158 drivers/gpu/drm/panel/panel-samsung-ld9040.c 		dev_err(ctx->dev, "error %d writing dcs seq: %*ph\n", ret,
ctx               160 drivers/gpu/drm/panel/panel-samsung-ld9040.c 		ctx->error = ret;
ctx               166 drivers/gpu/drm/panel/panel-samsung-ld9040.c #define ld9040_dcs_write_seq_static(ctx, seq...) \
ctx               169 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write(ctx, d, ARRAY_SIZE(d));\
ctx               172 drivers/gpu/drm/panel/panel-samsung-ld9040.c static void ld9040_brightness_set(struct ld9040 *ctx)
ctx               174 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write(ctx, ld9040_gammas[ctx->brightness],
ctx               175 drivers/gpu/drm/panel/panel-samsung-ld9040.c 			 ARRAY_SIZE(ld9040_gammas[ctx->brightness]));
ctx               177 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write_seq_static(ctx, MCS_GAMMA_CTRL, 0x02, 0x5a);
ctx               180 drivers/gpu/drm/panel/panel-samsung-ld9040.c static void ld9040_init(struct ld9040 *ctx)
ctx               182 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write_seq_static(ctx, MCS_USER_SETTING, 0x5a, 0x5a);
ctx               183 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write_seq_static(ctx, MCS_PANEL_CONDITION,
ctx               187 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write_seq_static(ctx, MCS_DISPCTL,
ctx               189 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write_seq_static(ctx, MCS_MANPWR, 0x04);
ctx               190 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write_seq_static(ctx, MCS_POWER_CTRL,
ctx               192 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write_seq_static(ctx, MCS_ELVSS_ON, 0x0d, 0x00, 0x16);
ctx               193 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write_seq_static(ctx, MCS_GTCON, 0x09, 0x00, 0x00);
ctx               194 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_brightness_set(ctx);
ctx               195 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write_seq_static(ctx, MIPI_DCS_EXIT_SLEEP_MODE);
ctx               196 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write_seq_static(ctx, MIPI_DCS_SET_DISPLAY_ON);
ctx               199 drivers/gpu/drm/panel/panel-samsung-ld9040.c static int ld9040_power_on(struct ld9040 *ctx)
ctx               203 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ret = regulator_bulk_enable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               207 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	msleep(ctx->power_on_delay);
ctx               208 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	gpiod_set_value(ctx->reset_gpio, 0);
ctx               209 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	msleep(ctx->reset_delay);
ctx               210 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	gpiod_set_value(ctx->reset_gpio, 1);
ctx               211 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	msleep(ctx->reset_delay);
ctx               216 drivers/gpu/drm/panel/panel-samsung-ld9040.c static int ld9040_power_off(struct ld9040 *ctx)
ctx               218 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	return regulator_bulk_disable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               228 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	struct ld9040 *ctx = panel_to_ld9040(panel);
ctx               231 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write_seq_static(ctx, MIPI_DCS_SET_DISPLAY_OFF);
ctx               232 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_dcs_write_seq_static(ctx, MIPI_DCS_ENTER_SLEEP_MODE);
ctx               235 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_clear_error(ctx);
ctx               237 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	return ld9040_power_off(ctx);
ctx               242 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	struct ld9040 *ctx = panel_to_ld9040(panel);
ctx               245 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ret = ld9040_power_on(ctx);
ctx               249 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_init(ctx);
ctx               251 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ret = ld9040_clear_error(ctx);
ctx               267 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	struct ld9040 *ctx = panel_to_ld9040(panel);
ctx               276 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	drm_display_mode_from_videomode(&ctx->vm, mode);
ctx               277 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	mode->width_mm = ctx->width_mm;
ctx               278 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	mode->height_mm = ctx->height_mm;
ctx               296 drivers/gpu/drm/panel/panel-samsung-ld9040.c static int ld9040_parse_dt(struct ld9040 *ctx)
ctx               298 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	struct device *dev = ctx->dev;
ctx               302 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ret = of_get_videomode(np, &ctx->vm, 0);
ctx               306 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	of_property_read_u32(np, "power-on-delay", &ctx->power_on_delay);
ctx               307 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	of_property_read_u32(np, "reset-delay", &ctx->reset_delay);
ctx               308 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	of_property_read_u32(np, "panel-width-mm", &ctx->width_mm);
ctx               309 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	of_property_read_u32(np, "panel-height-mm", &ctx->height_mm);
ctx               317 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	struct ld9040 *ctx;
ctx               320 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ctx = devm_kzalloc(dev, sizeof(struct ld9040), GFP_KERNEL);
ctx               321 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	if (!ctx)
ctx               324 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	spi_set_drvdata(spi, ctx);
ctx               326 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ctx->dev = dev;
ctx               327 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ctx->brightness = ARRAY_SIZE(ld9040_gammas) - 1;
ctx               329 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ret = ld9040_parse_dt(ctx);
ctx               333 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ctx->supplies[0].supply = "vdd3";
ctx               334 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ctx->supplies[1].supply = "vci";
ctx               335 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ret = devm_regulator_bulk_get(dev, ARRAY_SIZE(ctx->supplies),
ctx               336 drivers/gpu/drm/panel/panel-samsung-ld9040.c 				      ctx->supplies);
ctx               340 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ctx->reset_gpio = devm_gpiod_get(dev, "reset", GPIOD_OUT_HIGH);
ctx               341 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	if (IS_ERR(ctx->reset_gpio)) {
ctx               343 drivers/gpu/drm/panel/panel-samsung-ld9040.c 			PTR_ERR(ctx->reset_gpio));
ctx               344 drivers/gpu/drm/panel/panel-samsung-ld9040.c 		return PTR_ERR(ctx->reset_gpio);
ctx               354 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	drm_panel_init(&ctx->panel);
ctx               355 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ctx->panel.dev = dev;
ctx               356 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ctx->panel.funcs = &ld9040_drm_funcs;
ctx               358 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	return drm_panel_add(&ctx->panel);
ctx               363 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	struct ld9040 *ctx = spi_get_drvdata(spi);
ctx               365 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	ld9040_power_off(ctx);
ctx               366 drivers/gpu/drm/panel/panel-samsung-ld9040.c 	drm_panel_remove(&ctx->panel);
ctx               245 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_dcs_write(struct s6e3ha2 *ctx, const void *data, size_t len)
ctx               247 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               252 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c #define s6e3ha2_dcs_write_seq_static(ctx, seq...) do {	\
ctx               255 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ret = s6e3ha2_dcs_write(ctx, d, ARRAY_SIZE(d));	\
ctx               266 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_test_key_on_f0(struct s6e3ha2 *ctx)
ctx               268 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xf0, 0x5a, 0x5a);
ctx               272 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_test_key_off_f0(struct s6e3ha2 *ctx)
ctx               274 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xf0, 0xa5, 0xa5);
ctx               278 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_test_key_on_fc(struct s6e3ha2 *ctx)
ctx               280 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xfc, 0x5a, 0x5a);
ctx               284 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_test_key_off_fc(struct s6e3ha2 *ctx)
ctx               286 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xfc, 0xa5, 0xa5);
ctx               290 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_single_dsi_set(struct s6e3ha2 *ctx)
ctx               292 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xf2, 0x67);
ctx               293 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xf9, 0x09);
ctx               297 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_freq_calibration(struct s6e3ha2 *ctx)
ctx               299 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xfd, 0x1c);
ctx               300 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	if (ctx->desc->type == HF2_TYPE)
ctx               301 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 		s6e3ha2_dcs_write_seq_static(ctx, 0xf2, 0x67, 0x40, 0xc5);
ctx               302 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xfe, 0x20, 0x39);
ctx               303 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xfe, 0xa0);
ctx               304 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xfe, 0x20);
ctx               306 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	if (ctx->desc->type == HA2_TYPE)
ctx               307 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 		s6e3ha2_dcs_write_seq_static(ctx, 0xce, 0x03, 0x3b, 0x12, 0x62,
ctx               311 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 		s6e3ha2_dcs_write_seq_static(ctx, 0xce, 0x03, 0x3b, 0x14, 0x6d,
ctx               318 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_aor_control(struct s6e3ha2 *ctx)
ctx               320 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xb2, 0x03, 0x10);
ctx               324 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_caps_elvss_set(struct s6e3ha2 *ctx)
ctx               326 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xb6, 0x9c, 0x0a);
ctx               330 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_acl_off(struct s6e3ha2 *ctx)
ctx               332 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0x55, 0x00);
ctx               336 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_acl_off_opr(struct s6e3ha2 *ctx)
ctx               338 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xb5, 0x40);
ctx               342 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_test_global(struct s6e3ha2 *ctx)
ctx               344 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xb0, 0x07);
ctx               348 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_test(struct s6e3ha2 *ctx)
ctx               350 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xb8, 0x19);
ctx               354 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_touch_hsync_on1(struct s6e3ha2 *ctx)
ctx               356 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xbd, 0x33, 0x11, 0x02,
ctx               361 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_pentile_control(struct s6e3ha2 *ctx)
ctx               363 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xc0, 0x00, 0x00, 0xd8, 0xd8);
ctx               367 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_poc_global(struct s6e3ha2 *ctx)
ctx               369 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xb0, 0x20);
ctx               373 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_poc_setting(struct s6e3ha2 *ctx)
ctx               375 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xfe, 0x08);
ctx               379 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_pcd_set_off(struct s6e3ha2 *ctx)
ctx               381 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xcc, 0x40, 0x51);
ctx               385 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_err_fg_set(struct s6e3ha2 *ctx)
ctx               387 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xed, 0x44);
ctx               391 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_hbm_off(struct s6e3ha2 *ctx)
ctx               393 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0x53, 0x00);
ctx               397 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_te_start_setting(struct s6e3ha2 *ctx)
ctx               399 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xb9, 0x10, 0x09, 0xff, 0x00, 0x09);
ctx               403 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_gamma_update(struct s6e3ha2 *ctx)
ctx               405 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xf7, 0x03);
ctx               407 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_dcs_write_seq_static(ctx, 0xf7, 0x00);
ctx               416 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_set_vint(struct s6e3ha2 *ctx)
ctx               418 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct backlight_device *bl_dev = ctx->bl_dev;
ctx               424 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	return s6e3ha2_dcs_write(ctx, data, ARRAY_SIZE(data));
ctx               433 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_update_gamma(struct s6e3ha2 *ctx, unsigned int brightness)
ctx               435 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct backlight_device *bl_dev = ctx->bl_dev;
ctx               442 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 				s6e3ha2_dcs_write(ctx, data, ARRAY_SIZE(data)));
ctx               444 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_gamma_update(ctx));
ctx               452 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct s6e3ha2 *ctx = bl_get_data(bl_dev);
ctx               458 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 		dev_err(ctx->dev, "Invalid brightness: %u\n", brightness);
ctx               465 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_test_key_on_f0(ctx));
ctx               466 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_update_gamma(ctx, brightness));
ctx               467 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_aor_control(ctx));
ctx               468 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_set_vint(ctx));
ctx               469 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_test_key_off_f0(ctx));
ctx               479 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_panel_init(struct s6e3ha2 *ctx)
ctx               481 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               487 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_test_key_on_f0(ctx));
ctx               488 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_single_dsi_set(ctx));
ctx               489 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_test_key_on_fc(ctx));
ctx               490 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_freq_calibration(ctx));
ctx               491 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_test_key_off_fc(ctx));
ctx               492 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_test_key_off_f0(ctx));
ctx               497 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_power_off(struct s6e3ha2 *ctx)
ctx               499 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	return regulator_bulk_disable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               504 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct s6e3ha2 *ctx = container_of(panel, struct s6e3ha2, panel);
ctx               505 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               512 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->bl_dev->props.power = FB_BLANK_NORMAL;
ctx               519 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct s6e3ha2 *ctx = container_of(panel, struct s6e3ha2, panel);
ctx               521 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	return s6e3ha2_power_off(ctx);
ctx               524 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c static int s6e3ha2_power_on(struct s6e3ha2 *ctx)
ctx               528 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ret = regulator_bulk_enable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               534 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	gpiod_set_value(ctx->enable_gpio, 0);
ctx               536 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	gpiod_set_value(ctx->enable_gpio, 1);
ctx               538 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	gpiod_set_value(ctx->reset_gpio, 1);
ctx               540 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	gpiod_set_value(ctx->reset_gpio, 0);
ctx               547 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct s6e3ha2 *ctx = container_of(panel, struct s6e3ha2, panel);
ctx               550 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ret = s6e3ha2_power_on(ctx);
ctx               554 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ret = s6e3ha2_panel_init(ctx);
ctx               558 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->bl_dev->props.power = FB_BLANK_NORMAL;
ctx               563 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_power_off(ctx);
ctx               569 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct s6e3ha2 *ctx = container_of(panel, struct s6e3ha2, panel);
ctx               570 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               577 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_test_key_on_f0(ctx));
ctx               578 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_test_key_on_fc(ctx));
ctx               579 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_touch_hsync_on1(ctx));
ctx               580 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_pentile_control(ctx));
ctx               581 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_poc_global(ctx));
ctx               582 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_poc_setting(ctx));
ctx               583 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_test_key_off_fc(ctx));
ctx               586 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_pcd_set_off(ctx));
ctx               587 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_err_fg_set(ctx));
ctx               588 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_te_start_setting(ctx));
ctx               591 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_set_brightness(ctx->bl_dev));
ctx               592 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_aor_control(ctx));
ctx               593 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_caps_elvss_set(ctx));
ctx               594 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_gamma_update(ctx));
ctx               595 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_acl_off(ctx));
ctx               596 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_acl_off_opr(ctx));
ctx               597 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_hbm_off(ctx));
ctx               600 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_test_global(ctx));
ctx               601 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_test(ctx));
ctx               602 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	s6e3ha2_call_write_func(ret, s6e3ha2_test_key_off_f0(ctx));
ctx               605 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->bl_dev->props.power = FB_BLANK_UNBLANK;
ctx               651 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct s6e3ha2 *ctx = container_of(panel, struct s6e3ha2, panel);
ctx               654 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	mode = drm_mode_duplicate(panel->drm, ctx->desc->mode);
ctx               657 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 			ctx->desc->mode->hdisplay, ctx->desc->mode->vdisplay,
ctx               658 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 			ctx->desc->mode->vrefresh);
ctx               684 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct s6e3ha2 *ctx;
ctx               687 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               688 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	if (!ctx)
ctx               691 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	mipi_dsi_set_drvdata(dsi, ctx);
ctx               693 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->dev = dev;
ctx               694 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->desc = of_device_get_match_data(dev);
ctx               700 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->supplies[0].supply = "vdd3";
ctx               701 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->supplies[1].supply = "vci";
ctx               703 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ret = devm_regulator_bulk_get(dev, ARRAY_SIZE(ctx->supplies),
ctx               704 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 				      ctx->supplies);
ctx               710 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->reset_gpio = devm_gpiod_get(dev, "reset", GPIOD_OUT_LOW);
ctx               711 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	if (IS_ERR(ctx->reset_gpio)) {
ctx               713 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 			PTR_ERR(ctx->reset_gpio));
ctx               714 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 		return PTR_ERR(ctx->reset_gpio);
ctx               717 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->enable_gpio = devm_gpiod_get(dev, "enable", GPIOD_OUT_HIGH);
ctx               718 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	if (IS_ERR(ctx->enable_gpio)) {
ctx               720 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 			PTR_ERR(ctx->enable_gpio));
ctx               721 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 		return PTR_ERR(ctx->enable_gpio);
ctx               724 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->bl_dev = backlight_device_register("s6e3ha2", dev, ctx,
ctx               726 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	if (IS_ERR(ctx->bl_dev)) {
ctx               728 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 		return PTR_ERR(ctx->bl_dev);
ctx               731 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->bl_dev->props.max_brightness = S6E3HA2_MAX_BRIGHTNESS;
ctx               732 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->bl_dev->props.brightness = S6E3HA2_DEFAULT_BRIGHTNESS;
ctx               733 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->bl_dev->props.power = FB_BLANK_POWERDOWN;
ctx               735 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	drm_panel_init(&ctx->panel);
ctx               736 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->panel.dev = dev;
ctx               737 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ctx->panel.funcs = &s6e3ha2_drm_funcs;
ctx               739 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	ret = drm_panel_add(&ctx->panel);
ctx               750 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	drm_panel_remove(&ctx->panel);
ctx               753 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	backlight_device_unregister(ctx->bl_dev);
ctx               760 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	struct s6e3ha2 *ctx = mipi_dsi_get_drvdata(dsi);
ctx               763 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	drm_panel_remove(&ctx->panel);
ctx               764 drivers/gpu/drm/panel/panel-samsung-s6e3ha2.c 	backlight_device_unregister(ctx->bl_dev);
ctx               121 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c static inline ssize_t s6e63j0x03_dcs_write_seq(struct s6e63j0x03 *ctx,
ctx               124 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               129 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c #define s6e63j0x03_dcs_write_seq_static(ctx, seq...)			\
ctx               132 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 		s6e63j0x03_dcs_write_seq(ctx, d, ARRAY_SIZE(d));	\
ctx               135 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c static inline int s6e63j0x03_enable_lv2_command(struct s6e63j0x03 *ctx)
ctx               137 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	return s6e63j0x03_dcs_write_seq_static(ctx, MCS_LEVEL2_KEY, 0x5a, 0x5a);
ctx               140 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c static inline int s6e63j0x03_apply_mtp_key(struct s6e63j0x03 *ctx, bool on)
ctx               143 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 		return s6e63j0x03_dcs_write_seq_static(ctx,
ctx               146 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	return s6e63j0x03_dcs_write_seq_static(ctx, MCS_MTP_KEY, 0xa5, 0xa5);
ctx               149 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c static int s6e63j0x03_power_on(struct s6e63j0x03 *ctx)
ctx               153 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = regulator_bulk_enable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               159 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	gpiod_set_value(ctx->reset_gpio, 1);
ctx               161 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	gpiod_set_value(ctx->reset_gpio, 0);
ctx               167 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c static int s6e63j0x03_power_off(struct s6e63j0x03 *ctx)
ctx               169 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	return regulator_bulk_disable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               184 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c static int s6e63j0x03_update_gamma(struct s6e63j0x03 *ctx,
ctx               187 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	struct backlight_device *bl_dev = ctx->bl_dev;
ctx               191 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_apply_mtp_key(ctx, true);
ctx               195 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_dcs_write_seq(ctx, gamma_tbl[index], GAMMA_CMD_CNT);
ctx               199 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_apply_mtp_key(ctx, false);
ctx               210 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	struct s6e63j0x03 *ctx = bl_get_data(bl_dev);
ctx               213 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	return s6e63j0x03_update_gamma(ctx, brightness);
ctx               222 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	struct s6e63j0x03 *ctx = panel_to_s6e63j0x03(panel);
ctx               223 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               230 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->bl_dev->props.power = FB_BLANK_NORMAL;
ctx               243 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	struct s6e63j0x03 *ctx = panel_to_s6e63j0x03(panel);
ctx               246 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_power_off(ctx);
ctx               250 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->bl_dev->props.power = FB_BLANK_POWERDOWN;
ctx               255 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c static int s6e63j0x03_panel_init(struct s6e63j0x03 *ctx)
ctx               257 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               260 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_enable_lv2_command(ctx);
ctx               264 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_apply_mtp_key(ctx, true);
ctx               269 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_dcs_write_seq_static(ctx, 0xf2, 0x1c, 0x28);
ctx               274 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_dcs_write_seq_static(ctx, 0xb5, 0x00, 0x02, 0x00);
ctx               289 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_dcs_write_seq_static(ctx, 0xf8, 0x08, 0x08, 0x08, 0x17,
ctx               294 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_dcs_write_seq_static(ctx, 0xf7, 0x02);
ctx               299 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_dcs_write_seq_static(ctx, 0xb0, 0x01);
ctx               304 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_dcs_write_seq_static(ctx, 0xe2, 0x0f);
ctx               309 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_dcs_write_seq_static(ctx, 0xb0, 0x00);
ctx               317 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_apply_mtp_key(ctx, false);
ctx               326 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	struct s6e63j0x03 *ctx = panel_to_s6e63j0x03(panel);
ctx               329 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_power_on(ctx);
ctx               333 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_panel_init(ctx);
ctx               337 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->bl_dev->props.power = FB_BLANK_NORMAL;
ctx               342 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	s6e63j0x03_power_off(ctx);
ctx               348 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	struct s6e63j0x03 *ctx = panel_to_s6e63j0x03(panel);
ctx               349 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               354 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_apply_mtp_key(ctx, true);
ctx               359 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_dcs_write_seq_static(ctx, 0xb1, 0x00, 0x09);
ctx               364 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_dcs_write_seq_static(ctx,
ctx               375 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_dcs_write_seq_static(ctx,
ctx               381 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_dcs_write_seq_static(ctx,
ctx               390 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = s6e63j0x03_apply_mtp_key(ctx, false);
ctx               398 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->bl_dev->props.power = FB_BLANK_UNBLANK;
ctx               438 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	struct s6e63j0x03 *ctx;
ctx               441 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx = devm_kzalloc(dev, sizeof(struct s6e63j0x03), GFP_KERNEL);
ctx               442 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	if (!ctx)
ctx               445 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	mipi_dsi_set_drvdata(dsi, ctx);
ctx               447 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->dev = dev;
ctx               453 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->supplies[0].supply = "vdd3";
ctx               454 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->supplies[1].supply = "vci";
ctx               455 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = devm_regulator_bulk_get(dev, ARRAY_SIZE(ctx->supplies),
ctx               456 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 				      ctx->supplies);
ctx               462 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->reset_gpio = devm_gpiod_get(dev, "reset", GPIOD_OUT_LOW);
ctx               463 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	if (IS_ERR(ctx->reset_gpio)) {
ctx               465 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 				PTR_ERR(ctx->reset_gpio));
ctx               466 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 		return PTR_ERR(ctx->reset_gpio);
ctx               469 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	drm_panel_init(&ctx->panel);
ctx               470 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->panel.dev = dev;
ctx               471 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->panel.funcs = &s6e63j0x03_funcs;
ctx               473 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->bl_dev = backlight_device_register("s6e63j0x03", dev, ctx,
ctx               475 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	if (IS_ERR(ctx->bl_dev)) {
ctx               477 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 		return PTR_ERR(ctx->bl_dev);
ctx               480 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->bl_dev->props.max_brightness = MAX_BRIGHTNESS;
ctx               481 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->bl_dev->props.brightness = DEFAULT_BRIGHTNESS;
ctx               482 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ctx->bl_dev->props.power = FB_BLANK_POWERDOWN;
ctx               484 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	ret = drm_panel_add(&ctx->panel);
ctx               495 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	drm_panel_remove(&ctx->panel);
ctx               498 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	backlight_device_unregister(ctx->bl_dev);
ctx               505 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	struct s6e63j0x03 *ctx = mipi_dsi_get_drvdata(dsi);
ctx               508 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	drm_panel_remove(&ctx->panel);
ctx               510 drivers/gpu/drm/panel/panel-samsung-s6e63j0x03.c 	backlight_device_unregister(ctx->bl_dev);
ctx               131 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c static int s6e63m0_clear_error(struct s6e63m0 *ctx)
ctx               133 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	int ret = ctx->error;
ctx               135 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->error = 0;
ctx               139 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c static int s6e63m0_spi_write_word(struct s6e63m0 *ctx, u16 data)
ctx               141 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	struct spi_device *spi = to_spi_device(ctx->dev);
ctx               154 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c static void s6e63m0_dcs_write(struct s6e63m0 *ctx, const u8 *data, size_t len)
ctx               158 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	if (ctx->error < 0 || len == 0)
ctx               161 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	DRM_DEV_DEBUG(ctx->dev, "writing dcs seq: %*ph\n", (int)len, data);
ctx               162 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ret = s6e63m0_spi_write_word(ctx, *data);
ctx               166 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 		ret = s6e63m0_spi_write_word(ctx, *data | DATA_MASK);
ctx               170 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 		DRM_DEV_ERROR(ctx->dev, "error %d writing dcs seq: %*ph\n", ret,
ctx               172 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 		ctx->error = ret;
ctx               178 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c #define s6e63m0_dcs_write_seq_static(ctx, seq ...) \
ctx               181 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 		s6e63m0_dcs_write(ctx, d, ARRAY_SIZE(d)); \
ctx               184 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c static void s6e63m0_init(struct s6e63m0 *ctx)
ctx               186 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, MCS_PANELCTL,
ctx               190 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, MCS_DISCTL,
ctx               192 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, MCS_IFCTL,
ctx               195 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, MCS_PGAMMACTL,
ctx               200 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, MCS_PGAMMACTL,
ctx               203 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, MCS_SRCCTL,
ctx               205 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, 0xb3,
ctx               208 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, 0xb5,
ctx               215 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, 0xb6,
ctx               220 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, 0xb7,
ctx               229 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, 0xb9,
ctx               236 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, 0xba,
ctx               241 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, MCS_BCMODE,
ctx               247 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, 0xb2,
ctx               250 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, MCS_MIECTL1,
ctx               253 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, MCS_ELVSS_ON,
ctx               256 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, MIPI_DCS_EXIT_SLEEP_MODE);
ctx               259 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c static int s6e63m0_power_on(struct s6e63m0 *ctx)
ctx               263 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ret = regulator_bulk_enable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               269 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	gpiod_set_value(ctx->reset_gpio, 0);
ctx               275 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c static int s6e63m0_power_off(struct s6e63m0 *ctx)
ctx               279 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	gpiod_set_value(ctx->reset_gpio, 1);
ctx               282 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ret = regulator_bulk_disable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               291 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	struct s6e63m0 *ctx = panel_to_s6e63m0(panel);
ctx               293 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	if (!ctx->enabled)
ctx               296 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	backlight_disable(ctx->bl_dev);
ctx               298 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, MIPI_DCS_ENTER_SLEEP_MODE);
ctx               301 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->enabled = false;
ctx               308 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	struct s6e63m0 *ctx = panel_to_s6e63m0(panel);
ctx               311 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	if (!ctx->prepared)
ctx               314 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_clear_error(ctx);
ctx               316 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ret = s6e63m0_power_off(ctx);
ctx               320 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->prepared = false;
ctx               327 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	struct s6e63m0 *ctx = panel_to_s6e63m0(panel);
ctx               330 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	if (ctx->prepared)
ctx               333 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ret = s6e63m0_power_on(ctx);
ctx               337 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_init(ctx);
ctx               339 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ret = s6e63m0_clear_error(ctx);
ctx               344 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->prepared = true;
ctx               351 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	struct s6e63m0 *ctx = panel_to_s6e63m0(panel);
ctx               353 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	if (ctx->enabled)
ctx               356 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, MIPI_DCS_SET_DISPLAY_ON);
ctx               358 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	backlight_enable(ctx->bl_dev);
ctx               360 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->enabled = true;
ctx               396 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	struct s6e63m0 *ctx = bl_get_data(bd);
ctx               401 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write(ctx, s6e63m0_gamma_22[brightness],
ctx               405 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	s6e63m0_dcs_write_seq_static(ctx, MCS_PGAMMACTL, 0x01);
ctx               407 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	return s6e63m0_clear_error(ctx);
ctx               414 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c static int s6e63m0_backlight_register(struct s6e63m0 *ctx)
ctx               421 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	struct device *dev = ctx->dev;
ctx               424 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->bl_dev = devm_backlight_device_register(dev, "panel", dev, ctx,
ctx               427 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	if (IS_ERR(ctx->bl_dev)) {
ctx               428 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 		ret = PTR_ERR(ctx->bl_dev);
ctx               439 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	struct s6e63m0 *ctx;
ctx               442 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx = devm_kzalloc(dev, sizeof(struct s6e63m0), GFP_KERNEL);
ctx               443 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	if (!ctx)
ctx               446 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	spi_set_drvdata(spi, ctx);
ctx               448 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->dev = dev;
ctx               449 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->enabled = false;
ctx               450 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->prepared = false;
ctx               452 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->supplies[0].supply = "vdd3";
ctx               453 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->supplies[1].supply = "vci";
ctx               454 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ret = devm_regulator_bulk_get(dev, ARRAY_SIZE(ctx->supplies),
ctx               455 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 				      ctx->supplies);
ctx               461 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->reset_gpio = devm_gpiod_get(dev, "reset", GPIOD_OUT_HIGH);
ctx               462 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	if (IS_ERR(ctx->reset_gpio)) {
ctx               464 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 			      PTR_ERR(ctx->reset_gpio));
ctx               465 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 		return PTR_ERR(ctx->reset_gpio);
ctx               476 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	drm_panel_init(&ctx->panel);
ctx               477 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->panel.dev = dev;
ctx               478 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ctx->panel.funcs = &s6e63m0_drm_funcs;
ctx               480 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	ret = s6e63m0_backlight_register(ctx);
ctx               484 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	return drm_panel_add(&ctx->panel);
ctx               489 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	struct s6e63m0 *ctx = spi_get_drvdata(spi);
ctx               491 drivers/gpu/drm/panel/panel-samsung-s6e63m0.c 	drm_panel_remove(&ctx->panel);
ctx               129 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static int s6e8aa0_clear_error(struct s6e8aa0 *ctx)
ctx               131 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	int ret = ctx->error;
ctx               133 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx->error = 0;
ctx               137 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_dcs_write(struct s6e8aa0 *ctx, const void *data, size_t len)
ctx               139 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               142 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (ctx->error < 0)
ctx               147 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		dev_err(ctx->dev, "error %zd writing dcs seq: %*ph\n", ret,
ctx               149 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		ctx->error = ret;
ctx               153 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static int s6e8aa0_dcs_read(struct s6e8aa0 *ctx, u8 cmd, void *data, size_t len)
ctx               155 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               158 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (ctx->error < 0)
ctx               159 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		return ctx->error;
ctx               163 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		dev_err(ctx->dev, "error %d reading dcs seq(%#x)\n", ret, cmd);
ctx               164 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		ctx->error = ret;
ctx               170 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c #define s6e8aa0_dcs_write_seq(ctx, seq...) \
ctx               174 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write(ctx, d, ARRAY_SIZE(d));\
ctx               177 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c #define s6e8aa0_dcs_write_seq_static(ctx, seq...) \
ctx               180 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write(ctx, d, ARRAY_SIZE(d));\
ctx               183 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_apply_level_1_key(struct s6e8aa0 *ctx)
ctx               185 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write_seq_static(ctx, 0xf0, 0x5a, 0x5a);
ctx               188 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_panel_cond_set_v142(struct s6e8aa0 *ctx)
ctx               193 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	u8 aid = aids[ctx->id >> 5];
ctx               202 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (ctx->flip_vertical) {
ctx               208 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (ctx->flip_horizontal) {
ctx               214 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (ctx->flip_horizontal || ctx->flip_vertical) {
ctx               250 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write_seq(ctx,
ctx               259 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_panel_cond_set(struct s6e8aa0 *ctx)
ctx               261 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (ctx->version < 142)
ctx               262 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		s6e8aa0_dcs_write_seq_static(ctx,
ctx               270 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		s6e8aa0_panel_cond_set_v142(ctx);
ctx               273 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_display_condition_set(struct s6e8aa0 *ctx)
ctx               275 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write_seq_static(ctx, 0xf2, 0x80, 0x03, 0x0d);
ctx               278 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_etc_source_control(struct s6e8aa0 *ctx)
ctx               280 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write_seq_static(ctx, 0xf6, 0x00, 0x02, 0x00);
ctx               283 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_etc_pentile_control(struct s6e8aa0 *ctx)
ctx               293 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (ctx->version < 142)
ctx               294 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		s6e8aa0_dcs_write(ctx, pent32, ARRAY_SIZE(pent32));
ctx               296 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		s6e8aa0_dcs_write(ctx, pent142, ARRAY_SIZE(pent142));
ctx               299 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_etc_power_control(struct s6e8aa0 *ctx)
ctx               309 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (ctx->version < 142)
ctx               310 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		s6e8aa0_dcs_write(ctx, pwr32, ARRAY_SIZE(pwr32));
ctx               312 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		s6e8aa0_dcs_write(ctx, pwr142, ARRAY_SIZE(pwr142));
ctx               315 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_etc_elvss_control(struct s6e8aa0 *ctx)
ctx               317 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	u8 id = ctx->id ? 0 : 0x95;
ctx               319 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write_seq(ctx, 0xb1, 0x04, id);
ctx               322 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_elvss_nvm_set_v142(struct s6e8aa0 *ctx)
ctx               326 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	switch (ctx->brightness) {
ctx               342 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write_seq(ctx, 0xd9, 0x14, 0x40, 0x0c, 0xcb, 0xce, 0x6e,
ctx               346 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_elvss_nvm_set(struct s6e8aa0 *ctx)
ctx               348 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (ctx->version < 142)
ctx               349 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		s6e8aa0_dcs_write_seq_static(ctx,
ctx               353 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		s6e8aa0_elvss_nvm_set_v142(ctx);
ctx               356 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_apply_level_2_key(struct s6e8aa0 *ctx)
ctx               358 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write_seq_static(ctx, 0xfc, 0x5a, 0x5a);
ctx               765 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_brightness_set(struct s6e8aa0 *ctx)
ctx               769 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (ctx->error)
ctx               772 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	gamma = ctx->variant->gamma_tables[ctx->brightness];
ctx               774 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (ctx->version >= 142)
ctx               775 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		s6e8aa0_elvss_nvm_set(ctx);
ctx               777 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write(ctx, gamma, GAMMA_TABLE_LEN);
ctx               780 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write_seq_static(ctx, 0xf7, 0x03);
ctx               783 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_panel_init(struct s6e8aa0 *ctx)
ctx               785 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_apply_level_1_key(ctx);
ctx               786 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_apply_level_2_key(ctx);
ctx               789 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write_seq_static(ctx, MIPI_DCS_EXIT_SLEEP_MODE);
ctx               792 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_panel_cond_set(ctx);
ctx               793 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_display_condition_set(ctx);
ctx               794 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_brightness_set(ctx);
ctx               795 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_etc_source_control(ctx);
ctx               796 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_etc_pentile_control(ctx);
ctx               797 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_elvss_nvm_set(ctx);
ctx               798 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_etc_power_control(ctx);
ctx               799 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_etc_elvss_control(ctx);
ctx               800 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	msleep(ctx->init_delay);
ctx               803 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_set_maximum_return_packet_size(struct s6e8aa0 *ctx,
ctx               806 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	struct mipi_dsi_device *dsi = to_mipi_dsi_device(ctx->dev);
ctx               809 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (ctx->error < 0)
ctx               814 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		dev_err(ctx->dev,
ctx               817 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		ctx->error = ret;
ctx               821 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_read_mtp_id(struct s6e8aa0 *ctx)
ctx               826 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ret = s6e8aa0_dcs_read(ctx, 0xd1, id, ARRAY_SIZE(id));
ctx               828 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		dev_err(ctx->dev, "read id failed\n");
ctx               829 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		ctx->error = -EIO;
ctx               833 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	dev_info(ctx->dev, "ID: 0x%2x, 0x%2x, 0x%2x\n", id[0], id[1], id[2]);
ctx               840 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		dev_err(ctx->dev, "unsupported display version %d\n", id[1]);
ctx               841 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		ctx->error = -EINVAL;
ctx               845 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx->variant = &s6e8aa0_variants[i];
ctx               846 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx->version = id[1];
ctx               847 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx->id = id[2];
ctx               850 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static void s6e8aa0_set_sequence(struct s6e8aa0 *ctx)
ctx               852 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_set_maximum_return_packet_size(ctx, 3);
ctx               853 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_read_mtp_id(ctx);
ctx               854 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_panel_init(ctx);
ctx               855 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write_seq_static(ctx, MIPI_DCS_SET_DISPLAY_ON);
ctx               858 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static int s6e8aa0_power_on(struct s6e8aa0 *ctx)
ctx               862 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ret = regulator_bulk_enable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               866 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	msleep(ctx->power_on_delay);
ctx               868 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	gpiod_set_value(ctx->reset_gpio, 0);
ctx               870 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	gpiod_set_value(ctx->reset_gpio, 1);
ctx               872 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	msleep(ctx->reset_delay);
ctx               877 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static int s6e8aa0_power_off(struct s6e8aa0 *ctx)
ctx               879 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	return regulator_bulk_disable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               889 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	struct s6e8aa0 *ctx = panel_to_s6e8aa0(panel);
ctx               891 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write_seq_static(ctx, MIPI_DCS_ENTER_SLEEP_MODE);
ctx               892 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_dcs_write_seq_static(ctx, MIPI_DCS_SET_DISPLAY_OFF);
ctx               895 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_clear_error(ctx);
ctx               897 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	return s6e8aa0_power_off(ctx);
ctx               902 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	struct s6e8aa0 *ctx = panel_to_s6e8aa0(panel);
ctx               905 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ret = s6e8aa0_power_on(ctx);
ctx               909 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	s6e8aa0_set_sequence(ctx);
ctx               910 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ret = ctx->error;
ctx               926 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	struct s6e8aa0 *ctx = panel_to_s6e8aa0(panel);
ctx               935 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	drm_display_mode_from_videomode(&ctx->vm, mode);
ctx               936 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	mode->width_mm = ctx->width_mm;
ctx               937 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	mode->height_mm = ctx->height_mm;
ctx               955 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c static int s6e8aa0_parse_dt(struct s6e8aa0 *ctx)
ctx               957 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	struct device *dev = ctx->dev;
ctx               961 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ret = of_get_videomode(np, &ctx->vm, 0);
ctx               965 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	of_property_read_u32(np, "power-on-delay", &ctx->power_on_delay);
ctx               966 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	of_property_read_u32(np, "reset-delay", &ctx->reset_delay);
ctx               967 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	of_property_read_u32(np, "init-delay", &ctx->init_delay);
ctx               968 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	of_property_read_u32(np, "panel-width-mm", &ctx->width_mm);
ctx               969 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	of_property_read_u32(np, "panel-height-mm", &ctx->height_mm);
ctx               971 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx->flip_horizontal = of_property_read_bool(np, "flip-horizontal");
ctx               972 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx->flip_vertical = of_property_read_bool(np, "flip-vertical");
ctx               980 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	struct s6e8aa0 *ctx;
ctx               983 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx = devm_kzalloc(dev, sizeof(struct s6e8aa0), GFP_KERNEL);
ctx               984 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (!ctx)
ctx               987 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	mipi_dsi_set_drvdata(dsi, ctx);
ctx               989 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx->dev = dev;
ctx               998 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ret = s6e8aa0_parse_dt(ctx);
ctx              1002 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx->supplies[0].supply = "vdd3";
ctx              1003 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx->supplies[1].supply = "vci";
ctx              1004 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ret = devm_regulator_bulk_get(dev, ARRAY_SIZE(ctx->supplies),
ctx              1005 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 				      ctx->supplies);
ctx              1011 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx->reset_gpio = devm_gpiod_get(dev, "reset", GPIOD_OUT_HIGH);
ctx              1012 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	if (IS_ERR(ctx->reset_gpio)) {
ctx              1014 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 			PTR_ERR(ctx->reset_gpio));
ctx              1015 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		return PTR_ERR(ctx->reset_gpio);
ctx              1018 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx->brightness = GAMMA_LEVEL_NUM - 1;
ctx              1020 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	drm_panel_init(&ctx->panel);
ctx              1021 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx->panel.dev = dev;
ctx              1022 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ctx->panel.funcs = &s6e8aa0_drm_funcs;
ctx              1024 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	ret = drm_panel_add(&ctx->panel);
ctx              1030 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 		drm_panel_remove(&ctx->panel);
ctx              1037 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	struct s6e8aa0 *ctx = mipi_dsi_get_drvdata(dsi);
ctx              1040 drivers/gpu/drm/panel/panel-samsung-s6e8aa0.c 	drm_panel_remove(&ctx->panel);
ctx               133 drivers/gpu/drm/panel/panel-sitronix-st7789v.c static int st7789v_spi_write(struct st7789v *ctx, enum st7789v_prefix prefix,
ctx               147 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	return spi_sync(ctx->spi, &msg);
ctx               150 drivers/gpu/drm/panel/panel-sitronix-st7789v.c static int st7789v_write_command(struct st7789v *ctx, u8 cmd)
ctx               152 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	return st7789v_spi_write(ctx, ST7789V_COMMAND, cmd);
ctx               155 drivers/gpu/drm/panel/panel-sitronix-st7789v.c static int st7789v_write_data(struct st7789v *ctx, u8 cmd)
ctx               157 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	return st7789v_spi_write(ctx, ST7789V_DATA, cmd);
ctx               199 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	struct st7789v *ctx = panel_to_st7789v(panel);
ctx               202 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ret = regulator_enable(ctx->power);
ctx               206 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	gpiod_set_value(ctx->reset, 1);
ctx               208 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	gpiod_set_value(ctx->reset, 0);
ctx               211 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, MIPI_DCS_EXIT_SLEEP_MODE));
ctx               216 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx,
ctx               218 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, 0));
ctx               220 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx,
ctx               222 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx,
ctx               226 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, ST7789V_PORCTRL_CMD));
ctx               227 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, 0xc));
ctx               228 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, 0xc));
ctx               229 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, 0));
ctx               230 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PORCTRL_IDLE_BP(3) |
ctx               232 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx,
ctx               236 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, ST7789V_GCTRL_CMD));
ctx               237 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_GCTRL_VGLS(5) |
ctx               240 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, ST7789V_VCOMS_CMD));
ctx               241 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, 0x2b));
ctx               243 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, ST7789V_LCMCTRL_CMD));
ctx               244 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_LCMCTRL_XMH |
ctx               248 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, ST7789V_VDVVRHEN_CMD));
ctx               249 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_VDVVRHEN_CMDEN));
ctx               251 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, ST7789V_VRHS_CMD));
ctx               252 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, 0xf));
ctx               254 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, ST7789V_VDVS_CMD));
ctx               255 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, 0x20));
ctx               257 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, ST7789V_FRCTRL2_CMD));
ctx               258 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, 0xf));
ctx               260 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, ST7789V_PWCTRL1_CMD));
ctx               261 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PWCTRL1_MAGIC));
ctx               262 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PWCTRL1_AVDD(2) |
ctx               266 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, ST7789V_PVGAMCTRL_CMD));
ctx               267 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_VP63(0xd)));
ctx               268 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_VP1(0xca)));
ctx               269 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_VP2(0xe)));
ctx               270 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_VP4(8)));
ctx               271 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_VP6(9)));
ctx               272 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_VP13(7)));
ctx               273 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_VP20(0x2d)));
ctx               274 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_VP27(0xb) |
ctx               276 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_VP43(0x3d)));
ctx               277 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_JP1(3) |
ctx               279 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_VP57(0xa)));
ctx               280 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_VP59(0xa)));
ctx               281 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_VP61(0x1b)));
ctx               282 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_PVGAMCTRL_VP62(0x28)));
ctx               284 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, ST7789V_NVGAMCTRL_CMD));
ctx               285 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_VN63(0xd)));
ctx               286 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_VN1(0xca)));
ctx               287 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_VN2(0xf)));
ctx               288 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_VN4(8)));
ctx               289 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_VN6(8)));
ctx               290 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_VN13(7)));
ctx               291 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_VN20(0x2e)));
ctx               292 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_VN27(0xc) |
ctx               294 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_VN43(0x40)));
ctx               295 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_JN1(3) |
ctx               297 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_VN57(9)));
ctx               298 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_VN59(0xb)));
ctx               299 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_VN61(0x1b)));
ctx               300 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_NVGAMCTRL_VN62(0x28)));
ctx               302 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, MIPI_DCS_ENTER_INVERT_MODE));
ctx               304 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, ST7789V_RAMCTRL_CMD));
ctx               305 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_RAMCTRL_DM_RGB |
ctx               307 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_RAMCTRL_EPF(3) |
ctx               310 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, ST7789V_RGBCTRL_CMD));
ctx               311 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_RGBCTRL_WO |
ctx               316 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_RGBCTRL_VBP(8)));
ctx               317 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_data(ctx, ST7789V_RGBCTRL_HBP(20)));
ctx               324 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	struct st7789v *ctx = panel_to_st7789v(panel);
ctx               326 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	if (ctx->backlight) {
ctx               327 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 		ctx->backlight->props.state &= ~BL_CORE_FBBLANK;
ctx               328 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 		ctx->backlight->props.power = FB_BLANK_UNBLANK;
ctx               329 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 		backlight_update_status(ctx->backlight);
ctx               332 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	return st7789v_write_command(ctx, MIPI_DCS_SET_DISPLAY_ON);
ctx               337 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	struct st7789v *ctx = panel_to_st7789v(panel);
ctx               340 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, MIPI_DCS_SET_DISPLAY_OFF));
ctx               342 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	if (ctx->backlight) {
ctx               343 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 		ctx->backlight->props.power = FB_BLANK_POWERDOWN;
ctx               344 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 		ctx->backlight->props.state |= BL_CORE_FBBLANK;
ctx               345 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 		backlight_update_status(ctx->backlight);
ctx               353 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	struct st7789v *ctx = panel_to_st7789v(panel);
ctx               356 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ST7789V_TEST(ret, st7789v_write_command(ctx, MIPI_DCS_ENTER_SLEEP_MODE));
ctx               358 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	regulator_disable(ctx->power);
ctx               374 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	struct st7789v *ctx;
ctx               377 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ctx = devm_kzalloc(&spi->dev, sizeof(*ctx), GFP_KERNEL);
ctx               378 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	if (!ctx)
ctx               381 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	spi_set_drvdata(spi, ctx);
ctx               382 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ctx->spi = spi;
ctx               384 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	drm_panel_init(&ctx->panel);
ctx               385 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ctx->panel.dev = &spi->dev;
ctx               386 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ctx->panel.funcs = &st7789v_drm_funcs;
ctx               388 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ctx->power = devm_regulator_get(&spi->dev, "power");
ctx               389 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	if (IS_ERR(ctx->power))
ctx               390 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 		return PTR_ERR(ctx->power);
ctx               392 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ctx->reset = devm_gpiod_get(&spi->dev, "reset", GPIOD_OUT_LOW);
ctx               393 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	if (IS_ERR(ctx->reset)) {
ctx               395 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 		return PTR_ERR(ctx->reset);
ctx               400 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 		ctx->backlight = of_find_backlight_by_node(backlight);
ctx               403 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 		if (!ctx->backlight)
ctx               407 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	ret = drm_panel_add(&ctx->panel);
ctx               414 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	if (ctx->backlight)
ctx               415 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 		put_device(&ctx->backlight->dev);
ctx               422 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	struct st7789v *ctx = spi_get_drvdata(spi);
ctx               424 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	drm_panel_remove(&ctx->panel);
ctx               426 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 	if (ctx->backlight)
ctx               427 drivers/gpu/drm/panel/panel-sitronix-st7789v.c 		put_device(&ctx->backlight->dev);
ctx               228 drivers/gpu/drm/panel/panel-truly-nt35597.c 	struct truly_nt35597 *ctx = panel_to_ctx(panel);
ctx               231 drivers/gpu/drm/panel/panel-truly-nt35597.c 	for (i = 0; i < ARRAY_SIZE(ctx->dsi); i++) {
ctx               232 drivers/gpu/drm/panel/panel-truly-nt35597.c 		ret = mipi_dsi_dcs_write(ctx->dsi[i], command, NULL, 0);
ctx               234 drivers/gpu/drm/panel/panel-truly-nt35597.c 			DRM_DEV_ERROR(ctx->dev,
ctx               246 drivers/gpu/drm/panel/panel-truly-nt35597.c 	struct truly_nt35597 *ctx = panel_to_ctx(panel);
ctx               250 drivers/gpu/drm/panel/panel-truly-nt35597.c 	for (i = 0; i < ARRAY_SIZE(ctx->dsi); i++) {
ctx               251 drivers/gpu/drm/panel/panel-truly-nt35597.c 		ret = mipi_dsi_dcs_write_buffer(ctx->dsi[i], buf, size);
ctx               253 drivers/gpu/drm/panel/panel-truly-nt35597.c 			DRM_DEV_ERROR(ctx->dev,
ctx               262 drivers/gpu/drm/panel/panel-truly-nt35597.c static int truly_35597_power_on(struct truly_nt35597 *ctx)
ctx               266 drivers/gpu/drm/panel/panel-truly-nt35597.c 	for (i = 0; i < ARRAY_SIZE(ctx->supplies); i++) {
ctx               267 drivers/gpu/drm/panel/panel-truly-nt35597.c 		ret = regulator_set_load(ctx->supplies[i].consumer,
ctx               273 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ret = regulator_bulk_enable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               282 drivers/gpu/drm/panel/panel-truly-nt35597.c 	gpiod_set_value(ctx->reset_gpio, 0);
ctx               284 drivers/gpu/drm/panel/panel-truly-nt35597.c 	gpiod_set_value(ctx->reset_gpio, 1);
ctx               286 drivers/gpu/drm/panel/panel-truly-nt35597.c 	gpiod_set_value(ctx->reset_gpio, 0);
ctx               292 drivers/gpu/drm/panel/panel-truly-nt35597.c static int truly_nt35597_power_off(struct truly_nt35597 *ctx)
ctx               297 drivers/gpu/drm/panel/panel-truly-nt35597.c 	gpiod_set_value(ctx->reset_gpio, 1);
ctx               299 drivers/gpu/drm/panel/panel-truly-nt35597.c 	for (i = 0; i < ARRAY_SIZE(ctx->supplies); i++) {
ctx               300 drivers/gpu/drm/panel/panel-truly-nt35597.c 		ret = regulator_set_load(ctx->supplies[i].consumer,
ctx               303 drivers/gpu/drm/panel/panel-truly-nt35597.c 			DRM_DEV_ERROR(ctx->dev,
ctx               309 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ret = regulator_bulk_disable(ARRAY_SIZE(ctx->supplies), ctx->supplies);
ctx               311 drivers/gpu/drm/panel/panel-truly-nt35597.c 		DRM_DEV_ERROR(ctx->dev,
ctx               319 drivers/gpu/drm/panel/panel-truly-nt35597.c 	struct truly_nt35597 *ctx = panel_to_ctx(panel);
ctx               322 drivers/gpu/drm/panel/panel-truly-nt35597.c 	if (!ctx->enabled)
ctx               325 drivers/gpu/drm/panel/panel-truly-nt35597.c 	if (ctx->backlight) {
ctx               326 drivers/gpu/drm/panel/panel-truly-nt35597.c 		ret = backlight_disable(ctx->backlight);
ctx               328 drivers/gpu/drm/panel/panel-truly-nt35597.c 			DRM_DEV_ERROR(ctx->dev, "backlight disable failed %d\n",
ctx               332 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->enabled = false;
ctx               338 drivers/gpu/drm/panel/panel-truly-nt35597.c 	struct truly_nt35597 *ctx = panel_to_ctx(panel);
ctx               341 drivers/gpu/drm/panel/panel-truly-nt35597.c 	if (!ctx->prepared)
ctx               344 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->dsi[0]->mode_flags = 0;
ctx               345 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->dsi[1]->mode_flags = 0;
ctx               349 drivers/gpu/drm/panel/panel-truly-nt35597.c 		DRM_DEV_ERROR(ctx->dev,
ctx               359 drivers/gpu/drm/panel/panel-truly-nt35597.c 		DRM_DEV_ERROR(ctx->dev,
ctx               363 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ret = truly_nt35597_power_off(ctx);
ctx               365 drivers/gpu/drm/panel/panel-truly-nt35597.c 		DRM_DEV_ERROR(ctx->dev, "power_off failed ret = %d\n", ret);
ctx               367 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->prepared = false;
ctx               373 drivers/gpu/drm/panel/panel-truly-nt35597.c 	struct truly_nt35597 *ctx = panel_to_ctx(panel);
ctx               380 drivers/gpu/drm/panel/panel-truly-nt35597.c 	if (ctx->prepared)
ctx               383 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ret = truly_35597_power_on(ctx);
ctx               387 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->dsi[0]->mode_flags |= MIPI_DSI_MODE_LPM;
ctx               388 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->dsi[1]->mode_flags |= MIPI_DSI_MODE_LPM;
ctx               390 drivers/gpu/drm/panel/panel-truly-nt35597.c 	config = ctx->config;
ctx               399 drivers/gpu/drm/panel/panel-truly-nt35597.c 			DRM_DEV_ERROR(ctx->dev,
ctx               408 drivers/gpu/drm/panel/panel-truly-nt35597.c 		DRM_DEV_ERROR(ctx->dev,
ctx               419 drivers/gpu/drm/panel/panel-truly-nt35597.c 		DRM_DEV_ERROR(ctx->dev,
ctx               427 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->prepared = true;
ctx               432 drivers/gpu/drm/panel/panel-truly-nt35597.c 	if (truly_nt35597_power_off(ctx))
ctx               433 drivers/gpu/drm/panel/panel-truly-nt35597.c 		DRM_DEV_ERROR(ctx->dev, "power_off failed\n");
ctx               439 drivers/gpu/drm/panel/panel-truly-nt35597.c 	struct truly_nt35597 *ctx = panel_to_ctx(panel);
ctx               442 drivers/gpu/drm/panel/panel-truly-nt35597.c 	if (ctx->enabled)
ctx               445 drivers/gpu/drm/panel/panel-truly-nt35597.c 	if (ctx->backlight) {
ctx               446 drivers/gpu/drm/panel/panel-truly-nt35597.c 		ret = backlight_enable(ctx->backlight);
ctx               448 drivers/gpu/drm/panel/panel-truly-nt35597.c 			DRM_DEV_ERROR(ctx->dev, "backlight enable failed %d\n",
ctx               452 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->enabled = true;
ctx               460 drivers/gpu/drm/panel/panel-truly-nt35597.c 	struct truly_nt35597 *ctx = panel_to_ctx(panel);
ctx               464 drivers/gpu/drm/panel/panel-truly-nt35597.c 	config = ctx->config;
ctx               467 drivers/gpu/drm/panel/panel-truly-nt35597.c 		DRM_DEV_ERROR(ctx->dev,
ctx               489 drivers/gpu/drm/panel/panel-truly-nt35597.c static int truly_nt35597_panel_add(struct truly_nt35597 *ctx)
ctx               491 drivers/gpu/drm/panel/panel-truly-nt35597.c 	struct device *dev = ctx->dev;
ctx               495 drivers/gpu/drm/panel/panel-truly-nt35597.c 	config = ctx->config;
ctx               496 drivers/gpu/drm/panel/panel-truly-nt35597.c 	for (i = 0; i < ARRAY_SIZE(ctx->supplies); i++)
ctx               497 drivers/gpu/drm/panel/panel-truly-nt35597.c 		ctx->supplies[i].supply = regulator_names[i];
ctx               499 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ret = devm_regulator_bulk_get(dev, ARRAY_SIZE(ctx->supplies),
ctx               500 drivers/gpu/drm/panel/panel-truly-nt35597.c 				      ctx->supplies);
ctx               504 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->reset_gpio = devm_gpiod_get(dev, "reset", GPIOD_OUT_LOW);
ctx               505 drivers/gpu/drm/panel/panel-truly-nt35597.c 	if (IS_ERR(ctx->reset_gpio)) {
ctx               507 drivers/gpu/drm/panel/panel-truly-nt35597.c 			PTR_ERR(ctx->reset_gpio));
ctx               508 drivers/gpu/drm/panel/panel-truly-nt35597.c 		return PTR_ERR(ctx->reset_gpio);
ctx               511 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->mode_gpio = devm_gpiod_get(dev, "mode", GPIOD_OUT_LOW);
ctx               512 drivers/gpu/drm/panel/panel-truly-nt35597.c 	if (IS_ERR(ctx->mode_gpio)) {
ctx               514 drivers/gpu/drm/panel/panel-truly-nt35597.c 			PTR_ERR(ctx->mode_gpio));
ctx               515 drivers/gpu/drm/panel/panel-truly-nt35597.c 		return PTR_ERR(ctx->mode_gpio);
ctx               519 drivers/gpu/drm/panel/panel-truly-nt35597.c 	gpiod_set_value(ctx->mode_gpio, 0);
ctx               521 drivers/gpu/drm/panel/panel-truly-nt35597.c 	drm_panel_init(&ctx->panel);
ctx               522 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->panel.dev = dev;
ctx               523 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->panel.funcs = &truly_nt35597_drm_funcs;
ctx               524 drivers/gpu/drm/panel/panel-truly-nt35597.c 	drm_panel_add(&ctx->panel);
ctx               556 drivers/gpu/drm/panel/panel-truly-nt35597.c 	struct truly_nt35597 *ctx;
ctx               570 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               572 drivers/gpu/drm/panel/panel-truly-nt35597.c 	if (!ctx)
ctx               582 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->config = of_device_get_match_data(dev);
ctx               584 drivers/gpu/drm/panel/panel-truly-nt35597.c 	if (!ctx->config) {
ctx               610 drivers/gpu/drm/panel/panel-truly-nt35597.c 	mipi_dsi_set_drvdata(dsi, ctx);
ctx               612 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->dev = dev;
ctx               613 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->dsi[0] = dsi;
ctx               614 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ctx->dsi[1] = dsi1_device;
ctx               616 drivers/gpu/drm/panel/panel-truly-nt35597.c 	ret = truly_nt35597_panel_add(ctx);
ctx               622 drivers/gpu/drm/panel/panel-truly-nt35597.c 	for (i = 0; i < ARRAY_SIZE(ctx->dsi); i++) {
ctx               623 drivers/gpu/drm/panel/panel-truly-nt35597.c 		dsi_dev = ctx->dsi[i];
ctx               639 drivers/gpu/drm/panel/panel-truly-nt35597.c 	drm_panel_remove(&ctx->panel);
ctx               647 drivers/gpu/drm/panel/panel-truly-nt35597.c 	struct truly_nt35597 *ctx = mipi_dsi_get_drvdata(dsi);
ctx               649 drivers/gpu/drm/panel/panel-truly-nt35597.c 	if (ctx->dsi[0])
ctx               650 drivers/gpu/drm/panel/panel-truly-nt35597.c 		mipi_dsi_detach(ctx->dsi[0]);
ctx               651 drivers/gpu/drm/panel/panel-truly-nt35597.c 	if (ctx->dsi[1]) {
ctx               652 drivers/gpu/drm/panel/panel-truly-nt35597.c 		mipi_dsi_detach(ctx->dsi[1]);
ctx               653 drivers/gpu/drm/panel/panel-truly-nt35597.c 		mipi_dsi_device_unregister(ctx->dsi[1]);
ctx               656 drivers/gpu/drm/panel/panel-truly-nt35597.c 	drm_panel_remove(&ctx->panel);
ctx               313 drivers/gpu/drm/qxl/qxl_ioctl.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               331 drivers/gpu/drm/qxl/qxl_ioctl.c 		ret = ttm_bo_validate(&qobj->tbo, &qobj->placement, &ctx);
ctx               229 drivers/gpu/drm/qxl/qxl_object.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               238 drivers/gpu/drm/qxl/qxl_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               249 drivers/gpu/drm/qxl/qxl_object.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               262 drivers/gpu/drm/qxl/qxl_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               231 drivers/gpu/drm/qxl/qxl_release.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               236 drivers/gpu/drm/qxl/qxl_release.c 		ret = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               273 drivers/gpu/drm/qxl/qxl_ttm.c 		       struct ttm_operation_ctx *ctx,
ctx               279 drivers/gpu/drm/qxl/qxl_ttm.c 	ret = ttm_bo_wait(bo, ctx->interruptible, ctx->no_wait_gpu);
ctx               287 drivers/gpu/drm/qxl/qxl_ttm.c 	return ttm_bo_move_memcpy(bo, ctx, new_mem);
ctx                92 drivers/gpu/drm/r128/r128_state.c 	drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
ctx                99 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->scale_3d_cntl);
ctx               107 drivers/gpu/drm/r128/r128_state.c 	drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
ctx               114 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->dst_pitch_offset_c);
ctx               115 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->dp_gui_master_cntl_c);
ctx               116 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->sc_top_left_c);
ctx               117 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->sc_bottom_right_c);
ctx               118 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->z_offset_c);
ctx               119 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->z_pitch_c);
ctx               120 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->z_sten_cntl_c);
ctx               121 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->tex_cntl_c);
ctx               122 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->misc_3d_state_cntl_reg);
ctx               123 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->texture_clr_cmp_clr_c);
ctx               124 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->texture_clr_cmp_msk_c);
ctx               125 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->fog_color_c);
ctx               133 drivers/gpu/drm/r128/r128_state.c 	drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
ctx               140 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->setup_cntl);
ctx               141 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->pm4_vc_fpu_setup);
ctx               149 drivers/gpu/drm/r128/r128_state.c 	drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
ctx               156 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->dp_write_mask);
ctx               159 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->sten_ref_mask_c);
ctx               160 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->plane_3d_mask_c);
ctx               168 drivers/gpu/drm/r128/r128_state.c 	drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
ctx               175 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->window_xy_offset);
ctx               183 drivers/gpu/drm/r128/r128_state.c 	drm_r128_context_regs_t *ctx = &sarea_priv->context_state;
ctx               195 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->tex_size_pitch_c);
ctx               200 drivers/gpu/drm/r128/r128_state.c 	OUT_RING(ctx->constant_color_c);
ctx                32 drivers/gpu/drm/radeon/atom-bits.h #define U8(ptr) get_u8(ctx->ctx->bios, (ptr))
ctx                33 drivers/gpu/drm/radeon/atom-bits.h #define CU8(ptr) get_u8(ctx->bios, (ptr))
ctx                38 drivers/gpu/drm/radeon/atom-bits.h #define U16(ptr) get_u16(ctx->ctx->bios, (ptr))
ctx                39 drivers/gpu/drm/radeon/atom-bits.h #define CU16(ptr) get_u16(ctx->bios, (ptr))
ctx                44 drivers/gpu/drm/radeon/atom-bits.h #define U32(ptr) get_u32(ctx->ctx->bios, (ptr))
ctx                45 drivers/gpu/drm/radeon/atom-bits.h #define CU32(ptr) get_u32(ctx->bios, (ptr))
ctx                46 drivers/gpu/drm/radeon/atom-bits.h #define CSTR(ptr) (((char *)(ctx->bios))+(ptr))
ctx                60 drivers/gpu/drm/radeon/atom.c 	struct atom_context *ctx;
ctx                70 drivers/gpu/drm/radeon/atom.c static int atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t * params);
ctx                71 drivers/gpu/drm/radeon/atom.c int atom_execute_table(struct atom_context *ctx, int index, uint32_t * params);
ctx               107 drivers/gpu/drm/radeon/atom.c static uint32_t atom_iio_execute(struct atom_context *ctx, int base,
ctx               110 drivers/gpu/drm/radeon/atom.c 	struct radeon_device *rdev = ctx->card->dev->dev_private;
ctx               119 drivers/gpu/drm/radeon/atom.c 			temp = ctx->card->ioreg_read(ctx->card, CU16(base + 1));
ctx               124 drivers/gpu/drm/radeon/atom.c 				(void)ctx->card->ioreg_read(ctx->card, CU16(base + 1));
ctx               125 drivers/gpu/drm/radeon/atom.c 			ctx->card->ioreg_write(ctx->card, CU16(base + 1), temp);
ctx               165 drivers/gpu/drm/radeon/atom.c 			    ((ctx->
ctx               182 drivers/gpu/drm/radeon/atom.c static uint32_t atom_get_src_int(atom_exec_context *ctx, uint8_t attr,
ctx               186 drivers/gpu/drm/radeon/atom.c 	struct atom_context *gctx = ctx->ctx;
ctx               227 drivers/gpu/drm/radeon/atom.c 		val = get_unaligned_le32((u32 *)&ctx->ps[idx]);
ctx               265 drivers/gpu/drm/radeon/atom.c 			val = ctx->ws[idx];
ctx               367 drivers/gpu/drm/radeon/atom.c static void atom_skip_src_int(atom_exec_context *ctx, uint8_t attr, int *ptr)
ctx               403 drivers/gpu/drm/radeon/atom.c static uint32_t atom_get_src(atom_exec_context *ctx, uint8_t attr, int *ptr)
ctx               405 drivers/gpu/drm/radeon/atom.c 	return atom_get_src_int(ctx, attr, ptr, NULL, 1);
ctx               408 drivers/gpu/drm/radeon/atom.c static uint32_t atom_get_src_direct(atom_exec_context *ctx, uint8_t align, int *ptr)
ctx               434 drivers/gpu/drm/radeon/atom.c static uint32_t atom_get_dst(atom_exec_context *ctx, int arg, uint8_t attr,
ctx               437 drivers/gpu/drm/radeon/atom.c 	return atom_get_src_int(ctx,
ctx               443 drivers/gpu/drm/radeon/atom.c static void atom_skip_dst(atom_exec_context *ctx, int arg, uint8_t attr, int *ptr)
ctx               445 drivers/gpu/drm/radeon/atom.c 	atom_skip_src_int(ctx,
ctx               450 drivers/gpu/drm/radeon/atom.c static void atom_put_dst(atom_exec_context *ctx, int arg, uint8_t attr,
ctx               456 drivers/gpu/drm/radeon/atom.c 	struct atom_context *gctx = ctx->ctx;
ctx               500 drivers/gpu/drm/radeon/atom.c 		ctx->ps[idx] = cpu_to_le32(val);
ctx               532 drivers/gpu/drm/radeon/atom.c 			ctx->ws[idx] = val;
ctx               586 drivers/gpu/drm/radeon/atom.c static void atom_op_add(atom_exec_context *ctx, int *ptr, int arg)
ctx               592 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               594 drivers/gpu/drm/radeon/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               597 drivers/gpu/drm/radeon/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               600 drivers/gpu/drm/radeon/atom.c static void atom_op_and(atom_exec_context *ctx, int *ptr, int arg)
ctx               606 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               608 drivers/gpu/drm/radeon/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               611 drivers/gpu/drm/radeon/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               614 drivers/gpu/drm/radeon/atom.c static void atom_op_beep(atom_exec_context *ctx, int *ptr, int arg)
ctx               619 drivers/gpu/drm/radeon/atom.c static void atom_op_calltable(atom_exec_context *ctx, int *ptr, int arg)
ctx               628 drivers/gpu/drm/radeon/atom.c 	if (U16(ctx->ctx->cmd_table + 4 + 2 * idx))
ctx               629 drivers/gpu/drm/radeon/atom.c 		r = atom_execute_table_locked(ctx->ctx, idx, ctx->ps + ctx->ps_shift);
ctx               631 drivers/gpu/drm/radeon/atom.c 		ctx->abort = true;
ctx               635 drivers/gpu/drm/radeon/atom.c static void atom_op_clear(atom_exec_context *ctx, int *ptr, int arg)
ctx               642 drivers/gpu/drm/radeon/atom.c 	atom_get_dst(ctx, arg, attr, ptr, &saved, 0);
ctx               644 drivers/gpu/drm/radeon/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, 0, saved);
ctx               647 drivers/gpu/drm/radeon/atom.c static void atom_op_compare(atom_exec_context *ctx, int *ptr, int arg)
ctx               652 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
ctx               654 drivers/gpu/drm/radeon/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               655 drivers/gpu/drm/radeon/atom.c 	ctx->ctx->cs_equal = (dst == src);
ctx               656 drivers/gpu/drm/radeon/atom.c 	ctx->ctx->cs_above = (dst > src);
ctx               657 drivers/gpu/drm/radeon/atom.c 	SDEBUG("   result: %s %s\n", ctx->ctx->cs_equal ? "EQ" : "NE",
ctx               658 drivers/gpu/drm/radeon/atom.c 	       ctx->ctx->cs_above ? "GT" : "LE");
ctx               661 drivers/gpu/drm/radeon/atom.c static void atom_op_delay(atom_exec_context *ctx, int *ptr, int arg)
ctx               673 drivers/gpu/drm/radeon/atom.c static void atom_op_div(atom_exec_context *ctx, int *ptr, int arg)
ctx               678 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
ctx               680 drivers/gpu/drm/radeon/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               682 drivers/gpu/drm/radeon/atom.c 		ctx->ctx->divmul[0] = dst / src;
ctx               683 drivers/gpu/drm/radeon/atom.c 		ctx->ctx->divmul[1] = dst % src;
ctx               685 drivers/gpu/drm/radeon/atom.c 		ctx->ctx->divmul[0] = 0;
ctx               686 drivers/gpu/drm/radeon/atom.c 		ctx->ctx->divmul[1] = 0;
ctx               690 drivers/gpu/drm/radeon/atom.c static void atom_op_eot(atom_exec_context *ctx, int *ptr, int arg)
ctx               695 drivers/gpu/drm/radeon/atom.c static void atom_op_jump(atom_exec_context *ctx, int *ptr, int arg)
ctx               703 drivers/gpu/drm/radeon/atom.c 		execute = ctx->ctx->cs_above;
ctx               706 drivers/gpu/drm/radeon/atom.c 		execute = ctx->ctx->cs_above || ctx->ctx->cs_equal;
ctx               712 drivers/gpu/drm/radeon/atom.c 		execute = !(ctx->ctx->cs_above || ctx->ctx->cs_equal);
ctx               715 drivers/gpu/drm/radeon/atom.c 		execute = !ctx->ctx->cs_above;
ctx               718 drivers/gpu/drm/radeon/atom.c 		execute = ctx->ctx->cs_equal;
ctx               721 drivers/gpu/drm/radeon/atom.c 		execute = !ctx->ctx->cs_equal;
ctx               728 drivers/gpu/drm/radeon/atom.c 		if (ctx->last_jump == (ctx->start + target)) {
ctx               730 drivers/gpu/drm/radeon/atom.c 			if (time_after(cjiffies, ctx->last_jump_jiffies)) {
ctx               731 drivers/gpu/drm/radeon/atom.c 				cjiffies -= ctx->last_jump_jiffies;
ctx               734 drivers/gpu/drm/radeon/atom.c 					ctx->abort = true;
ctx               738 drivers/gpu/drm/radeon/atom.c 				ctx->last_jump_jiffies = jiffies;
ctx               741 drivers/gpu/drm/radeon/atom.c 			ctx->last_jump = ctx->start + target;
ctx               742 drivers/gpu/drm/radeon/atom.c 			ctx->last_jump_jiffies = jiffies;
ctx               744 drivers/gpu/drm/radeon/atom.c 		*ptr = ctx->start + target;
ctx               748 drivers/gpu/drm/radeon/atom.c static void atom_op_mask(atom_exec_context *ctx, int *ptr, int arg)
ctx               754 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               755 drivers/gpu/drm/radeon/atom.c 	mask = atom_get_src_direct(ctx, ((attr >> 3) & 7), ptr);
ctx               758 drivers/gpu/drm/radeon/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               762 drivers/gpu/drm/radeon/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               765 drivers/gpu/drm/radeon/atom.c static void atom_op_move(atom_exec_context *ctx, int *ptr, int arg)
ctx               771 drivers/gpu/drm/radeon/atom.c 		atom_get_dst(ctx, arg, attr, ptr, &saved, 0);
ctx               773 drivers/gpu/drm/radeon/atom.c 		atom_skip_dst(ctx, arg, attr, ptr);
ctx               777 drivers/gpu/drm/radeon/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               779 drivers/gpu/drm/radeon/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, src, saved);
ctx               782 drivers/gpu/drm/radeon/atom.c static void atom_op_mul(atom_exec_context *ctx, int *ptr, int arg)
ctx               787 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
ctx               789 drivers/gpu/drm/radeon/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               790 drivers/gpu/drm/radeon/atom.c 	ctx->ctx->divmul[0] = dst * src;
ctx               793 drivers/gpu/drm/radeon/atom.c static void atom_op_nop(atom_exec_context *ctx, int *ptr, int arg)
ctx               798 drivers/gpu/drm/radeon/atom.c static void atom_op_or(atom_exec_context *ctx, int *ptr, int arg)
ctx               804 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               806 drivers/gpu/drm/radeon/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               809 drivers/gpu/drm/radeon/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               812 drivers/gpu/drm/radeon/atom.c static void atom_op_postcard(atom_exec_context *ctx, int *ptr, int arg)
ctx               818 drivers/gpu/drm/radeon/atom.c static void atom_op_repeat(atom_exec_context *ctx, int *ptr, int arg)
ctx               823 drivers/gpu/drm/radeon/atom.c static void atom_op_restorereg(atom_exec_context *ctx, int *ptr, int arg)
ctx               828 drivers/gpu/drm/radeon/atom.c static void atom_op_savereg(atom_exec_context *ctx, int *ptr, int arg)
ctx               833 drivers/gpu/drm/radeon/atom.c static void atom_op_setdatablock(atom_exec_context *ctx, int *ptr, int arg)
ctx               839 drivers/gpu/drm/radeon/atom.c 		ctx->ctx->data_block = 0;
ctx               841 drivers/gpu/drm/radeon/atom.c 		ctx->ctx->data_block = ctx->start;
ctx               843 drivers/gpu/drm/radeon/atom.c 		ctx->ctx->data_block = U16(ctx->ctx->data_table + 4 + 2 * idx);
ctx               844 drivers/gpu/drm/radeon/atom.c 	SDEBUG("   base: 0x%04X\n", ctx->ctx->data_block);
ctx               847 drivers/gpu/drm/radeon/atom.c static void atom_op_setfbbase(atom_exec_context *ctx, int *ptr, int arg)
ctx               851 drivers/gpu/drm/radeon/atom.c 	ctx->ctx->fb_base = atom_get_src(ctx, attr, ptr);
ctx               854 drivers/gpu/drm/radeon/atom.c static void atom_op_setport(atom_exec_context *ctx, int *ptr, int arg)
ctx               865 drivers/gpu/drm/radeon/atom.c 			ctx->ctx->io_mode = ATOM_IO_MM;
ctx               867 drivers/gpu/drm/radeon/atom.c 			ctx->ctx->io_mode = ATOM_IO_IIO | port;
ctx               871 drivers/gpu/drm/radeon/atom.c 		ctx->ctx->io_mode = ATOM_IO_PCI;
ctx               875 drivers/gpu/drm/radeon/atom.c 		ctx->ctx->io_mode = ATOM_IO_SYSIO;
ctx               881 drivers/gpu/drm/radeon/atom.c static void atom_op_setregblock(atom_exec_context *ctx, int *ptr, int arg)
ctx               883 drivers/gpu/drm/radeon/atom.c 	ctx->ctx->reg_block = U16(*ptr);
ctx               885 drivers/gpu/drm/radeon/atom.c 	SDEBUG("   base: 0x%04X\n", ctx->ctx->reg_block);
ctx               888 drivers/gpu/drm/radeon/atom.c static void atom_op_shift_left(atom_exec_context *ctx, int *ptr, int arg)
ctx               896 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               897 drivers/gpu/drm/radeon/atom.c 	shift = atom_get_src_direct(ctx, ATOM_SRC_BYTE0, ptr);
ctx               901 drivers/gpu/drm/radeon/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               904 drivers/gpu/drm/radeon/atom.c static void atom_op_shift_right(atom_exec_context *ctx, int *ptr, int arg)
ctx               912 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               913 drivers/gpu/drm/radeon/atom.c 	shift = atom_get_src_direct(ctx, ATOM_SRC_BYTE0, ptr);
ctx               917 drivers/gpu/drm/radeon/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               920 drivers/gpu/drm/radeon/atom.c static void atom_op_shl(atom_exec_context *ctx, int *ptr, int arg)
ctx               927 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               930 drivers/gpu/drm/radeon/atom.c 	shift = atom_get_src(ctx, attr, ptr);
ctx               936 drivers/gpu/drm/radeon/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               939 drivers/gpu/drm/radeon/atom.c static void atom_op_shr(atom_exec_context *ctx, int *ptr, int arg)
ctx               946 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               949 drivers/gpu/drm/radeon/atom.c 	shift = atom_get_src(ctx, attr, ptr);
ctx               955 drivers/gpu/drm/radeon/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               958 drivers/gpu/drm/radeon/atom.c static void atom_op_sub(atom_exec_context *ctx, int *ptr, int arg)
ctx               964 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx               966 drivers/gpu/drm/radeon/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               969 drivers/gpu/drm/radeon/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx               972 drivers/gpu/drm/radeon/atom.c static void atom_op_switch(atom_exec_context *ctx, int *ptr, int arg)
ctx               977 drivers/gpu/drm/radeon/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx               983 drivers/gpu/drm/radeon/atom.c 			    atom_get_src(ctx, (attr & 0x38) | ATOM_ARG_IMM,
ctx               988 drivers/gpu/drm/radeon/atom.c 				*ptr = ctx->start + target;
ctx               999 drivers/gpu/drm/radeon/atom.c static void atom_op_test(atom_exec_context *ctx, int *ptr, int arg)
ctx              1004 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, NULL, 1);
ctx              1006 drivers/gpu/drm/radeon/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx              1007 drivers/gpu/drm/radeon/atom.c 	ctx->ctx->cs_equal = ((dst & src) == 0);
ctx              1008 drivers/gpu/drm/radeon/atom.c 	SDEBUG("   result: %s\n", ctx->ctx->cs_equal ? "EQ" : "NE");
ctx              1011 drivers/gpu/drm/radeon/atom.c static void atom_op_xor(atom_exec_context *ctx, int *ptr, int arg)
ctx              1017 drivers/gpu/drm/radeon/atom.c 	dst = atom_get_dst(ctx, arg, attr, ptr, &saved, 1);
ctx              1019 drivers/gpu/drm/radeon/atom.c 	src = atom_get_src(ctx, attr, ptr);
ctx              1022 drivers/gpu/drm/radeon/atom.c 	atom_put_dst(ctx, arg, attr, &dptr, dst, saved);
ctx              1025 drivers/gpu/drm/radeon/atom.c static void atom_op_debug(atom_exec_context *ctx, int *ptr, int arg)
ctx              1158 drivers/gpu/drm/radeon/atom.c static int atom_execute_table_locked(struct atom_context *ctx, int index, uint32_t * params)
ctx              1160 drivers/gpu/drm/radeon/atom.c 	int base = CU16(ctx->cmd_table + 4 + 2 * index);
ctx              1176 drivers/gpu/drm/radeon/atom.c 	ectx.ctx = ctx;
ctx              1219 drivers/gpu/drm/radeon/atom.c int atom_execute_table_scratch_unlocked(struct atom_context *ctx, int index, uint32_t * params)
ctx              1223 drivers/gpu/drm/radeon/atom.c 	mutex_lock(&ctx->mutex);
ctx              1225 drivers/gpu/drm/radeon/atom.c 	ctx->data_block = 0;
ctx              1227 drivers/gpu/drm/radeon/atom.c 	ctx->reg_block = 0;
ctx              1229 drivers/gpu/drm/radeon/atom.c 	ctx->fb_base = 0;
ctx              1231 drivers/gpu/drm/radeon/atom.c 	ctx->io_mode = ATOM_IO_MM;
ctx              1233 drivers/gpu/drm/radeon/atom.c 	ctx->divmul[0] = 0;
ctx              1234 drivers/gpu/drm/radeon/atom.c 	ctx->divmul[1] = 0;
ctx              1235 drivers/gpu/drm/radeon/atom.c 	r = atom_execute_table_locked(ctx, index, params);
ctx              1236 drivers/gpu/drm/radeon/atom.c 	mutex_unlock(&ctx->mutex);
ctx              1240 drivers/gpu/drm/radeon/atom.c int atom_execute_table(struct atom_context *ctx, int index, uint32_t * params)
ctx              1243 drivers/gpu/drm/radeon/atom.c 	mutex_lock(&ctx->scratch_mutex);
ctx              1244 drivers/gpu/drm/radeon/atom.c 	r = atom_execute_table_scratch_unlocked(ctx, index, params);
ctx              1245 drivers/gpu/drm/radeon/atom.c 	mutex_unlock(&ctx->scratch_mutex);
ctx              1251 drivers/gpu/drm/radeon/atom.c static void atom_index_iio(struct atom_context *ctx, int base)
ctx              1253 drivers/gpu/drm/radeon/atom.c 	ctx->iio = kzalloc(2 * 256, GFP_KERNEL);
ctx              1254 drivers/gpu/drm/radeon/atom.c 	if (!ctx->iio)
ctx              1257 drivers/gpu/drm/radeon/atom.c 		ctx->iio[CU8(base + 1)] = base + 2;
ctx              1268 drivers/gpu/drm/radeon/atom.c 	struct atom_context *ctx =
ctx              1274 drivers/gpu/drm/radeon/atom.c 	if (!ctx)
ctx              1277 drivers/gpu/drm/radeon/atom.c 	ctx->card = card;
ctx              1278 drivers/gpu/drm/radeon/atom.c 	ctx->bios = bios;
ctx              1282 drivers/gpu/drm/radeon/atom.c 		kfree(ctx);
ctx              1289 drivers/gpu/drm/radeon/atom.c 		kfree(ctx);
ctx              1298 drivers/gpu/drm/radeon/atom.c 		kfree(ctx);
ctx              1302 drivers/gpu/drm/radeon/atom.c 	ctx->cmd_table = CU16(base + ATOM_ROM_CMD_PTR);
ctx              1303 drivers/gpu/drm/radeon/atom.c 	ctx->data_table = CU16(base + ATOM_ROM_DATA_PTR);
ctx              1304 drivers/gpu/drm/radeon/atom.c 	atom_index_iio(ctx, CU16(ctx->data_table + ATOM_DATA_IIO_PTR) + 4);
ctx              1305 drivers/gpu/drm/radeon/atom.c 	if (!ctx->iio) {
ctx              1306 drivers/gpu/drm/radeon/atom.c 		atom_destroy(ctx);
ctx              1323 drivers/gpu/drm/radeon/atom.c 	return ctx;
ctx              1326 drivers/gpu/drm/radeon/atom.c int atom_asic_init(struct atom_context *ctx)
ctx              1328 drivers/gpu/drm/radeon/atom.c 	struct radeon_device *rdev = ctx->card->dev->dev_private;
ctx              1329 drivers/gpu/drm/radeon/atom.c 	int hwi = CU16(ctx->data_table + ATOM_DATA_FWI_PTR);
ctx              1340 drivers/gpu/drm/radeon/atom.c 	if (!CU16(ctx->cmd_table + 4 + 2 * ATOM_CMD_INIT))
ctx              1342 drivers/gpu/drm/radeon/atom.c 	ret = atom_execute_table(ctx, ATOM_CMD_INIT, ps);
ctx              1349 drivers/gpu/drm/radeon/atom.c 		if (CU16(ctx->cmd_table + 4 + 2 * ATOM_CMD_SPDFANCNTL))
ctx              1350 drivers/gpu/drm/radeon/atom.c 			atom_execute_table(ctx, ATOM_CMD_SPDFANCNTL, ps);
ctx              1355 drivers/gpu/drm/radeon/atom.c void atom_destroy(struct atom_context *ctx)
ctx              1357 drivers/gpu/drm/radeon/atom.c 	kfree(ctx->iio);
ctx              1358 drivers/gpu/drm/radeon/atom.c 	kfree(ctx);
ctx              1361 drivers/gpu/drm/radeon/atom.c bool atom_parse_data_header(struct atom_context *ctx, int index,
ctx              1366 drivers/gpu/drm/radeon/atom.c 	int idx = CU16(ctx->data_table + offset);
ctx              1367 drivers/gpu/drm/radeon/atom.c 	u16 *mdt = (u16 *)(ctx->bios + ctx->data_table + 4);
ctx              1382 drivers/gpu/drm/radeon/atom.c bool atom_parse_cmd_header(struct atom_context *ctx, int index, uint8_t * frev,
ctx              1386 drivers/gpu/drm/radeon/atom.c 	int idx = CU16(ctx->cmd_table + offset);
ctx              1387 drivers/gpu/drm/radeon/atom.c 	u16 *mct = (u16 *)(ctx->bios + ctx->cmd_table + 4);
ctx              1399 drivers/gpu/drm/radeon/atom.c int atom_allocate_fb_scratch(struct atom_context *ctx)
ctx              1406 drivers/gpu/drm/radeon/atom.c 	if (atom_parse_data_header(ctx, index, NULL, NULL, NULL, &data_offset)) {
ctx              1407 drivers/gpu/drm/radeon/atom.c 		firmware_usage = (struct _ATOM_VRAM_USAGE_BY_FIRMWARE *)(ctx->bios + data_offset);
ctx              1415 drivers/gpu/drm/radeon/atom.c 	ctx->scratch_size_bytes = 0;
ctx              1419 drivers/gpu/drm/radeon/atom.c 	ctx->scratch = kzalloc(usage_bytes, GFP_KERNEL);
ctx              1420 drivers/gpu/drm/radeon/atom.c 	if (!ctx->scratch)
ctx              1422 drivers/gpu/drm/radeon/atom.c 	ctx->scratch_size_bytes = usage_bytes;
ctx               151 drivers/gpu/drm/radeon/atom.h bool atom_parse_data_header(struct atom_context *ctx, int index, uint16_t *size,
ctx               153 drivers/gpu/drm/radeon/atom.h bool atom_parse_cmd_header(struct atom_context *ctx, int index,
ctx               155 drivers/gpu/drm/radeon/atom.h int atom_allocate_fb_scratch(struct atom_context *ctx);
ctx               136 drivers/gpu/drm/radeon/radeon_atombios.c 	struct atom_context *ctx = rdev->mode_info.atom_context;
ctx               147 drivers/gpu/drm/radeon/radeon_atombios.c 	if (atom_parse_data_header(ctx, index, &size, NULL, NULL, &data_offset)) {
ctx               148 drivers/gpu/drm/radeon/radeon_atombios.c 		i2c_info = (struct _ATOM_GPIO_I2C_INFO *)(ctx->bios + data_offset);
ctx               172 drivers/gpu/drm/radeon/radeon_atombios.c 	struct atom_context *ctx = rdev->mode_info.atom_context;
ctx               181 drivers/gpu/drm/radeon/radeon_atombios.c 	if (atom_parse_data_header(ctx, index, &size, NULL, NULL, &data_offset)) {
ctx               182 drivers/gpu/drm/radeon/radeon_atombios.c 		i2c_info = (struct _ATOM_GPIO_I2C_INFO *)(ctx->bios + data_offset);
ctx               206 drivers/gpu/drm/radeon/radeon_atombios.c 	struct atom_context *ctx = rdev->mode_info.atom_context;
ctx               217 drivers/gpu/drm/radeon/radeon_atombios.c 	if (atom_parse_data_header(ctx, index, &size, NULL, NULL, &data_offset)) {
ctx               218 drivers/gpu/drm/radeon/radeon_atombios.c 		gpio_info = (struct _ATOM_GPIO_PIN_LUT *)(ctx->bios + data_offset);
ctx               525 drivers/gpu/drm/radeon/radeon_atombios.c 	struct atom_context *ctx = mode_info->atom_context;
ctx               542 drivers/gpu/drm/radeon/radeon_atombios.c 	if (!atom_parse_data_header(ctx, index, &size, &frev, &crev, &data_offset))
ctx               548 drivers/gpu/drm/radeon/radeon_atombios.c 	obj_header = (ATOM_OBJECT_HEADER *) (ctx->bios + data_offset);
ctx               550 drivers/gpu/drm/radeon/radeon_atombios.c 	    (ctx->bios + data_offset +
ctx               553 drivers/gpu/drm/radeon/radeon_atombios.c 	    (ctx->bios + data_offset +
ctx               556 drivers/gpu/drm/radeon/radeon_atombios.c 	    (ctx->bios + data_offset +
ctx               559 drivers/gpu/drm/radeon/radeon_atombios.c 		(ctx->bios + data_offset +
ctx               600 drivers/gpu/drm/radeon/radeon_atombios.c 				if (atom_parse_data_header(ctx, index, &size, &frev,
ctx               606 drivers/gpu/drm/radeon/radeon_atombios.c 							 *) (ctx->bios + igp_offset);
ctx               667 drivers/gpu/drm/radeon/radeon_atombios.c 								(ctx->bios + data_offset +
ctx               698 drivers/gpu/drm/radeon/radeon_atombios.c 								(ctx->bios + data_offset +
ctx               706 drivers/gpu/drm/radeon/radeon_atombios.c 								(ctx->bios + data_offset +
ctx               776 drivers/gpu/drm/radeon/radeon_atombios.c 						    (ctx->bios + data_offset +
ctx               863 drivers/gpu/drm/radeon/radeon_atombios.c 		struct atom_context *ctx = mode_info->atom_context;
ctx               869 drivers/gpu/drm/radeon/radeon_atombios.c 		if (atom_parse_data_header(ctx, index, &size, &frev, &crev, &data_offset)) {
ctx               870 drivers/gpu/drm/radeon/radeon_atombios.c 			xtmds = (ATOM_XTMDS_INFO *)(ctx->bios + data_offset);
ctx               907 drivers/gpu/drm/radeon/radeon_atombios.c 	struct atom_context *ctx = mode_info->atom_context;
ctx               926 drivers/gpu/drm/radeon/radeon_atombios.c 	if (!atom_parse_data_header(ctx, index, &size, &frev, &crev,
ctx               933 drivers/gpu/drm/radeon/radeon_atombios.c 	    (union atom_supported_devices *)(ctx->bios + data_offset);
ctx               240 drivers/gpu/drm/radeon/radeon_display.c 				 struct drm_modeset_acquire_ctx *ctx)
ctx               485 drivers/gpu/drm/radeon/radeon_display.c 					struct drm_modeset_acquire_ctx *ctx)
ctx               620 drivers/gpu/drm/radeon/radeon_display.c 		       struct drm_modeset_acquire_ctx *ctx)
ctx               637 drivers/gpu/drm/radeon/radeon_display.c 	ret = drm_crtc_helper_set_config(set, ctx);
ctx               291 drivers/gpu/drm/radeon/radeon_gem.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               352 drivers/gpu/drm/radeon/radeon_gem.c 		r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx                67 drivers/gpu/drm/radeon/radeon_mn.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               114 drivers/gpu/drm/radeon/radeon_mn.c 			r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               331 drivers/gpu/drm/radeon/radeon_object.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               374 drivers/gpu/drm/radeon/radeon_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               396 drivers/gpu/drm/radeon/radeon_object.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               410 drivers/gpu/drm/radeon/radeon_object.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               537 drivers/gpu/drm/radeon/radeon_object.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               579 drivers/gpu/drm/radeon/radeon_object.c 			r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx               799 drivers/gpu/drm/radeon/radeon_object.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               831 drivers/gpu/drm/radeon/radeon_object.c 	r = ttm_bo_validate(bo, &rbo->placement, &ctx);
ctx               834 drivers/gpu/drm/radeon/radeon_object.c 		return ttm_bo_validate(bo, &rbo->placement, &ctx);
ctx               261 drivers/gpu/drm/radeon/radeon_ttm.c 	struct ttm_operation_ctx ctx = { interruptible, no_wait_gpu };
ctx               277 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_bo_mem_space(bo, &placement, &tmp_mem, &ctx);
ctx               287 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_tt_bind(bo->ttm, &tmp_mem, &ctx);
ctx               295 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_bo_move_ttm(bo, &ctx, new_mem);
ctx               306 drivers/gpu/drm/radeon/radeon_ttm.c 	struct ttm_operation_ctx ctx = { interruptible, no_wait_gpu };
ctx               322 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_bo_mem_space(bo, &placement, &tmp_mem, &ctx);
ctx               326 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_bo_move_ttm(bo, &ctx, &tmp_mem);
ctx               340 drivers/gpu/drm/radeon/radeon_ttm.c 			  struct ttm_operation_ctx *ctx,
ctx               348 drivers/gpu/drm/radeon/radeon_ttm.c 	r = ttm_bo_wait(bo, ctx->interruptible, ctx->no_wait_gpu);
ctx               378 drivers/gpu/drm/radeon/radeon_ttm.c 		r = radeon_move_vram_ram(bo, evict, ctx->interruptible,
ctx               379 drivers/gpu/drm/radeon/radeon_ttm.c 					ctx->no_wait_gpu, new_mem);
ctx               382 drivers/gpu/drm/radeon/radeon_ttm.c 		r = radeon_move_ram_vram(bo, evict, ctx->interruptible,
ctx               383 drivers/gpu/drm/radeon/radeon_ttm.c 					    ctx->no_wait_gpu, new_mem);
ctx               385 drivers/gpu/drm/radeon/radeon_ttm.c 		r = radeon_move_blit(bo, evict, ctx->no_wait_gpu,
ctx               391 drivers/gpu/drm/radeon/radeon_ttm.c 		r = ttm_bo_move_memcpy(bo, ctx, new_mem);
ctx               667 drivers/gpu/drm/radeon/radeon_ttm.c 			struct ttm_operation_ctx *ctx)
ctx               693 drivers/gpu/drm/radeon/radeon_ttm.c 		return ttm_agp_tt_populate(ttm, ctx);
ctx               699 drivers/gpu/drm/radeon/radeon_ttm.c 		return ttm_dma_populate(&gtt->ttm, rdev->dev, ctx);
ctx               703 drivers/gpu/drm/radeon/radeon_ttm.c 	return ttm_populate_and_map_pages(rdev->dev, &gtt->ttm, ctx);
ctx               390 drivers/gpu/drm/radeon/radeon_vm.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               400 drivers/gpu/drm/radeon/radeon_vm.c 	r = ttm_bo_validate(&bo->tbo, &bo->placement, &ctx);
ctx              1002 drivers/gpu/drm/rcar-du/rcar_du_crtc.c 	struct drm_modeset_acquire_ctx ctx;
ctx              1016 drivers/gpu/drm/rcar-du/rcar_du_crtc.c 	drm_modeset_acquire_init(&ctx, 0);
ctx              1024 drivers/gpu/drm/rcar-du/rcar_du_crtc.c 	state->acquire_ctx = &ctx;
ctx              1042 drivers/gpu/drm/rcar-du/rcar_du_crtc.c 		drm_modeset_backoff(&ctx);
ctx              1049 drivers/gpu/drm/rcar-du/rcar_du_crtc.c 	drm_modeset_drop_locks(&ctx);
ctx              1050 drivers/gpu/drm/rcar-du/rcar_du_crtc.c 	drm_modeset_acquire_fini(&ctx);
ctx                82 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h 			     __field(uint64_t, ctx)
ctx                90 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h 			   __entry->ctx = fence->context;
ctx                95 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h 		      __entry->fence, __entry->ctx,
ctx               409 drivers/gpu/drm/shmobile/shmob_drm_crtc.c 				    struct drm_modeset_acquire_ctx *ctx)
ctx               175 drivers/gpu/drm/shmobile/shmob_drm_plane.c 		       struct drm_modeset_acquire_ctx *ctx)
ctx               207 drivers/gpu/drm/shmobile/shmob_drm_plane.c 				   struct drm_modeset_acquire_ctx *ctx)
ctx               137 drivers/gpu/drm/ttm/ttm_agp_backend.c int ttm_agp_tt_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx)
ctx               142 drivers/gpu/drm/ttm/ttm_agp_backend.c 	return ttm_pool_populate(ttm, ctx);
ctx               322 drivers/gpu/drm/ttm/ttm_bo.c 				  struct ttm_operation_ctx *ctx)
ctx               357 drivers/gpu/drm/ttm/ttm_bo.c 			ret = ttm_tt_bind(bo->ttm, mem, ctx);
ctx               376 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_move_ttm(bo, ctx, mem);
ctx               378 drivers/gpu/drm/ttm/ttm_bo.c 		ret = bdev->driver->move(bo, evict, ctx, mem);
ctx               380 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_move_memcpy(bo, ctx, mem);
ctx               408 drivers/gpu/drm/ttm/ttm_bo.c 	ctx->bytes_moved += bo->num_pages << PAGE_SHIFT;
ctx               710 drivers/gpu/drm/ttm/ttm_bo.c 			struct ttm_operation_ctx *ctx)
ctx               736 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_mem_space(bo, &placement, &evict_mem, ctx);
ctx               746 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_handle_move_mem(bo, &evict_mem, true, ctx);
ctx               783 drivers/gpu/drm/ttm/ttm_bo.c 			struct ttm_operation_ctx *ctx, bool *locked, bool *busy)
ctx               787 drivers/gpu/drm/ttm/ttm_bo.c 	if (bo->base.resv == ctx->resv) {
ctx               789 drivers/gpu/drm/ttm/ttm_bo.c 		if (ctx->flags & TTM_OPT_FLAG_ALLOW_RES_EVICT
ctx               815 drivers/gpu/drm/ttm/ttm_bo.c 				   struct ttm_operation_ctx *ctx,
ctx               823 drivers/gpu/drm/ttm/ttm_bo.c 	if (ctx->interruptible)
ctx               843 drivers/gpu/drm/ttm/ttm_bo.c 			       struct ttm_operation_ctx *ctx,
ctx               858 drivers/gpu/drm/ttm/ttm_bo.c 			if (!ttm_bo_evict_swapout_allowable(bo, ctx, &locked,
ctx               886 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_mem_evict_wait_busy(busy_bo, ctx, ticket);
ctx               895 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_cleanup_refs(bo, ctx->interruptible,
ctx               896 drivers/gpu/drm/ttm/ttm_bo.c 					  ctx->no_wait_gpu, locked);
ctx               904 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_evict(bo, ctx);
ctx               967 drivers/gpu/drm/ttm/ttm_bo.c 				  struct ttm_operation_ctx *ctx)
ctx               981 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_mem_evict_first(bdev, mem->mem_type, place, ctx,
ctx               987 drivers/gpu/drm/ttm/ttm_bo.c 	return ttm_bo_add_move_fence(bo, man, mem, ctx->no_wait_gpu);
ctx              1048 drivers/gpu/drm/ttm/ttm_bo.c 				struct ttm_operation_ctx *ctx)
ctx              1098 drivers/gpu/drm/ttm/ttm_bo.c 			struct ttm_operation_ctx *ctx)
ctx              1113 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_mem_placement(bo, place, mem, ctx);
ctx              1132 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_add_move_fence(bo, man, mem, ctx->no_wait_gpu);
ctx              1146 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_mem_placement(bo, place, mem, ctx);
ctx              1157 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_mem_force_space(bo, place, mem, ctx);
ctx              1184 drivers/gpu/drm/ttm/ttm_bo.c 			      struct ttm_operation_ctx *ctx)
ctx              1199 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_mem_space(bo, placement, &mem, ctx);
ctx              1202 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_bo_handle_move_mem(bo, &mem, false, ctx);
ctx              1254 drivers/gpu/drm/ttm/ttm_bo.c 		    struct ttm_operation_ctx *ctx)
ctx              1264 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_move_buffer(bo, placement, ctx);
ctx              1293 drivers/gpu/drm/ttm/ttm_bo.c 			 struct ttm_operation_ctx *ctx,
ctx              1304 drivers/gpu/drm/ttm/ttm_bo.c 	ret = ttm_mem_global_alloc(mem_glob, acc_size, ctx);
ctx              1382 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_validate(bo, placement, ctx);
ctx              1414 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_operation_ctx ctx = { interruptible, false };
ctx              1418 drivers/gpu/drm/ttm/ttm_bo.c 				   page_alignment, &ctx, acc_size,
ctx              1488 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_operation_ctx ctx = {
ctx              1507 drivers/gpu/drm/ttm/ttm_bo.c 			ret = ttm_mem_evict_first(bdev, mem_type, NULL, &ctx,
ctx              1869 drivers/gpu/drm/ttm/ttm_bo.c int ttm_bo_swapout(struct ttm_bo_global *glob, struct ttm_operation_ctx *ctx)
ctx              1879 drivers/gpu/drm/ttm/ttm_bo.c 			if (ttm_bo_evict_swapout_allowable(bo, ctx, &locked,
ctx              1911 drivers/gpu/drm/ttm/ttm_bo.c 		struct ttm_operation_ctx ctx = { false, false };
ctx              1919 drivers/gpu/drm/ttm/ttm_bo.c 		ret = ttm_bo_handle_move_mem(bo, &evict_mem, true, &ctx);
ctx              1959 drivers/gpu/drm/ttm/ttm_bo.c 	struct ttm_operation_ctx ctx = {
ctx              1964 drivers/gpu/drm/ttm/ttm_bo.c 	while (ttm_bo_swapout(bdev->glob, &ctx) == 0)
ctx                54 drivers/gpu/drm/ttm/ttm_bo_util.c 		   struct ttm_operation_ctx *ctx,
ctx                62 drivers/gpu/drm/ttm/ttm_bo_util.c 		ret = ttm_bo_wait(bo, ctx->interruptible, ctx->no_wait_gpu);
ctx                82 drivers/gpu/drm/ttm/ttm_bo_util.c 		ret = ttm_tt_bind(ttm, new_mem, ctx);
ctx               357 drivers/gpu/drm/ttm/ttm_bo_util.c 		       struct ttm_operation_ctx *ctx,
ctx               373 drivers/gpu/drm/ttm/ttm_bo_util.c 	ret = ttm_bo_wait(bo, ctx->interruptible, ctx->no_wait_gpu);
ctx               404 drivers/gpu/drm/ttm/ttm_bo_util.c 		ret = ttm_tt_populate(ttm, ctx);
ctx               583 drivers/gpu/drm/ttm/ttm_bo_util.c 	struct ttm_operation_ctx ctx = {
ctx               593 drivers/gpu/drm/ttm/ttm_bo_util.c 	ret = ttm_tt_populate(ttm, &ctx);
ctx               235 drivers/gpu/drm/ttm/ttm_bo_vm.c 		struct ttm_operation_ctx ctx = {
ctx               247 drivers/gpu/drm/ttm/ttm_bo_vm.c 		if (ttm_tt_populate(ttm, &ctx)) {
ctx               270 drivers/gpu/drm/ttm/ttm_memory.c 			uint64_t extra, struct ttm_operation_ctx *ctx)
ctx               278 drivers/gpu/drm/ttm/ttm_memory.c 		ret = ttm_bo_swapout(glob->bo_glob, ctx);
ctx               289 drivers/gpu/drm/ttm/ttm_memory.c 	struct ttm_operation_ctx ctx = {
ctx               296 drivers/gpu/drm/ttm/ttm_memory.c 	ttm_shrink(glob, true, 0ULL, &ctx);
ctx               543 drivers/gpu/drm/ttm/ttm_memory.c 			struct ttm_operation_ctx *ctx)
ctx               547 drivers/gpu/drm/ttm/ttm_memory.c 	if (ctx->flags & TTM_OPT_FLAG_FORCE_ALLOC)
ctx               602 drivers/gpu/drm/ttm/ttm_memory.c 				     struct ttm_operation_ctx *ctx)
ctx               610 drivers/gpu/drm/ttm/ttm_memory.c 		if (ctx->no_wait_gpu)
ctx               614 drivers/gpu/drm/ttm/ttm_memory.c 		ttm_shrink(glob, false, memory + (memory >> 2) + 16, ctx);
ctx               621 drivers/gpu/drm/ttm/ttm_memory.c 			 struct ttm_operation_ctx *ctx)
ctx               628 drivers/gpu/drm/ttm/ttm_memory.c 	return ttm_mem_global_alloc_zone(glob, glob->zone_kernel, memory, ctx);
ctx               634 drivers/gpu/drm/ttm/ttm_memory.c 			      struct ttm_operation_ctx *ctx)
ctx               650 drivers/gpu/drm/ttm/ttm_memory.c 	return ttm_mem_global_alloc_zone(glob, zone, size, ctx);
ctx              1050 drivers/gpu/drm/ttm/ttm_page_alloc.c int ttm_pool_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx)
ctx              1059 drivers/gpu/drm/ttm/ttm_page_alloc.c 	if (ttm_check_under_lowerlimit(mem_glob, ttm->num_pages, ctx))
ctx              1071 drivers/gpu/drm/ttm/ttm_page_alloc.c 						PAGE_SIZE, ctx);
ctx              1098 drivers/gpu/drm/ttm/ttm_page_alloc.c 					struct ttm_operation_ctx *ctx)
ctx              1103 drivers/gpu/drm/ttm/ttm_page_alloc.c 	r = ttm_pool_populate(&tt->ttm, ctx);
ctx               887 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 			struct ttm_operation_ctx *ctx)
ctx               901 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	if (ttm_check_under_lowerlimit(mem_glob, num_pages, ctx))
ctx               930 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 						pool->size, ctx);
ctx               967 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 						pool->size, ctx);
ctx               323 drivers/gpu/drm/ttm/ttm_tt.c 		struct ttm_operation_ctx *ctx)
ctx               333 drivers/gpu/drm/ttm/ttm_tt.c 	ret = ttm_tt_populate(ttm, ctx);
ctx               460 drivers/gpu/drm/ttm/ttm_tt.c int ttm_tt_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx)
ctx               468 drivers/gpu/drm/ttm/ttm_tt.c 		ret = ttm->bdev->driver->ttm_tt_populate(ttm, ctx);
ctx               470 drivers/gpu/drm/ttm/ttm_tt.c 		ret = ttm_pool_populate(ttm, ctx);
ctx               363 drivers/gpu/drm/udl/udl_modeset.c 			      struct drm_modeset_acquire_ctx *ctx)
ctx                17 drivers/gpu/drm/vboxvideo/hgsmi_base.c int hgsmi_report_flags_location(struct gen_pool *ctx, u32 location)
ctx                21 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	p = hgsmi_buffer_alloc(ctx, sizeof(*p), HGSMI_CH_HGSMI,
ctx                29 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	hgsmi_buffer_submit(ctx, p);
ctx                30 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	hgsmi_buffer_free(ctx, p);
ctx                41 drivers/gpu/drm/vboxvideo/hgsmi_base.c int hgsmi_send_caps_info(struct gen_pool *ctx, u32 caps)
ctx                45 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	p = hgsmi_buffer_alloc(ctx, sizeof(*p), HGSMI_CH_VBVA, VBVA_INFO_CAPS);
ctx                52 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	hgsmi_buffer_submit(ctx, p);
ctx                56 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	hgsmi_buffer_free(ctx, p);
ctx                61 drivers/gpu/drm/vboxvideo/hgsmi_base.c int hgsmi_test_query_conf(struct gen_pool *ctx)
ctx                66 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	ret = hgsmi_query_conf(ctx, U32_MAX, &value);
ctx                80 drivers/gpu/drm/vboxvideo/hgsmi_base.c int hgsmi_query_conf(struct gen_pool *ctx, u32 index, u32 *value_ret)
ctx                84 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	p = hgsmi_buffer_alloc(ctx, sizeof(*p), HGSMI_CH_VBVA,
ctx                92 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	hgsmi_buffer_submit(ctx, p);
ctx                96 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	hgsmi_buffer_free(ctx, p);
ctx               113 drivers/gpu/drm/vboxvideo/hgsmi_base.c int hgsmi_update_pointer_shape(struct gen_pool *ctx, u32 flags,
ctx               138 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	p = hgsmi_buffer_alloc(ctx, sizeof(*p) + pixel_len, HGSMI_CH_VBVA,
ctx               152 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	hgsmi_buffer_submit(ctx, p);
ctx               168 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	hgsmi_buffer_free(ctx, p);
ctx               185 drivers/gpu/drm/vboxvideo/hgsmi_base.c int hgsmi_cursor_position(struct gen_pool *ctx, bool report_position,
ctx               190 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	p = hgsmi_buffer_alloc(ctx, sizeof(*p), HGSMI_CH_VBVA,
ctx               199 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	hgsmi_buffer_submit(ctx, p);
ctx               204 drivers/gpu/drm/vboxvideo/hgsmi_base.c 	hgsmi_buffer_free(ctx, p);
ctx                27 drivers/gpu/drm/vboxvideo/modesetting.c void hgsmi_process_display_info(struct gen_pool *ctx, u32 display,
ctx                34 drivers/gpu/drm/vboxvideo/modesetting.c 	p = hgsmi_buffer_alloc(ctx, sizeof(*p), HGSMI_CH_VBVA,
ctx                49 drivers/gpu/drm/vboxvideo/modesetting.c 	hgsmi_buffer_submit(ctx, p);
ctx                50 drivers/gpu/drm/vboxvideo/modesetting.c 	hgsmi_buffer_free(ctx, p);
ctx                65 drivers/gpu/drm/vboxvideo/modesetting.c int hgsmi_update_input_mapping(struct gen_pool *ctx, s32 origin_x, s32 origin_y,
ctx                70 drivers/gpu/drm/vboxvideo/modesetting.c 	p = hgsmi_buffer_alloc(ctx, sizeof(*p), HGSMI_CH_VBVA,
ctx                80 drivers/gpu/drm/vboxvideo/modesetting.c 	hgsmi_buffer_submit(ctx, p);
ctx                81 drivers/gpu/drm/vboxvideo/modesetting.c 	hgsmi_buffer_free(ctx, p);
ctx                93 drivers/gpu/drm/vboxvideo/modesetting.c int hgsmi_get_mode_hints(struct gen_pool *ctx, unsigned int screens,
ctx               103 drivers/gpu/drm/vboxvideo/modesetting.c 	p = hgsmi_buffer_alloc(ctx, sizeof(*p) + size, HGSMI_CH_VBVA,
ctx               112 drivers/gpu/drm/vboxvideo/modesetting.c 	hgsmi_buffer_submit(ctx, p);
ctx               115 drivers/gpu/drm/vboxvideo/modesetting.c 		hgsmi_buffer_free(ctx, p);
ctx               120 drivers/gpu/drm/vboxvideo/modesetting.c 	hgsmi_buffer_free(ctx, p);
ctx                30 drivers/gpu/drm/vboxvideo/vboxvideo_guest.h int hgsmi_report_flags_location(struct gen_pool *ctx, u32 location);
ctx                31 drivers/gpu/drm/vboxvideo/vboxvideo_guest.h int hgsmi_send_caps_info(struct gen_pool *ctx, u32 caps);
ctx                32 drivers/gpu/drm/vboxvideo/vboxvideo_guest.h int hgsmi_test_query_conf(struct gen_pool *ctx);
ctx                33 drivers/gpu/drm/vboxvideo/vboxvideo_guest.h int hgsmi_query_conf(struct gen_pool *ctx, u32 index, u32 *value_ret);
ctx                34 drivers/gpu/drm/vboxvideo/vboxvideo_guest.h int hgsmi_update_pointer_shape(struct gen_pool *ctx, u32 flags,
ctx                37 drivers/gpu/drm/vboxvideo/vboxvideo_guest.h int hgsmi_cursor_position(struct gen_pool *ctx, bool report_position,
ctx                40 drivers/gpu/drm/vboxvideo/vboxvideo_guest.h bool vbva_enable(struct vbva_buf_ctx *vbva_ctx, struct gen_pool *ctx,
ctx                42 drivers/gpu/drm/vboxvideo/vboxvideo_guest.h void vbva_disable(struct vbva_buf_ctx *vbva_ctx, struct gen_pool *ctx,
ctx                45 drivers/gpu/drm/vboxvideo/vboxvideo_guest.h 			      struct gen_pool *ctx);
ctx                47 drivers/gpu/drm/vboxvideo/vboxvideo_guest.h bool vbva_write(struct vbva_buf_ctx *vbva_ctx, struct gen_pool *ctx,
ctx                52 drivers/gpu/drm/vboxvideo/vboxvideo_guest.h void hgsmi_process_display_info(struct gen_pool *ctx, u32 display,
ctx                56 drivers/gpu/drm/vboxvideo/vboxvideo_guest.h int hgsmi_update_input_mapping(struct gen_pool *ctx, s32 origin_x, s32 origin_y,
ctx                58 drivers/gpu/drm/vboxvideo/vboxvideo_guest.h int hgsmi_get_mode_hints(struct gen_pool *ctx, unsigned int screens,
ctx                47 drivers/gpu/drm/vboxvideo/vbva_base.c static void vbva_buffer_flush(struct gen_pool *ctx)
ctx                51 drivers/gpu/drm/vboxvideo/vbva_base.c 	p = hgsmi_buffer_alloc(ctx, sizeof(*p), HGSMI_CH_VBVA, VBVA_FLUSH);
ctx                57 drivers/gpu/drm/vboxvideo/vbva_base.c 	hgsmi_buffer_submit(ctx, p);
ctx                58 drivers/gpu/drm/vboxvideo/vbva_base.c 	hgsmi_buffer_free(ctx, p);
ctx                61 drivers/gpu/drm/vboxvideo/vbva_base.c bool vbva_write(struct vbva_buf_ctx *vbva_ctx, struct gen_pool *ctx,
ctx                81 drivers/gpu/drm/vboxvideo/vbva_base.c 			vbva_buffer_flush(ctx);
ctx               108 drivers/gpu/drm/vboxvideo/vbva_base.c 			     struct gen_pool *ctx, s32 screen, bool enable)
ctx               113 drivers/gpu/drm/vboxvideo/vbva_base.c 	p = hgsmi_buffer_alloc(ctx, sizeof(*p), HGSMI_CH_VBVA, VBVA_ENABLE);
ctx               125 drivers/gpu/drm/vboxvideo/vbva_base.c 	hgsmi_buffer_submit(ctx, p);
ctx               132 drivers/gpu/drm/vboxvideo/vbva_base.c 	hgsmi_buffer_free(ctx, p);
ctx               137 drivers/gpu/drm/vboxvideo/vbva_base.c bool vbva_enable(struct vbva_buf_ctx *vbva_ctx, struct gen_pool *ctx,
ctx               147 drivers/gpu/drm/vboxvideo/vbva_base.c 	ret = vbva_inform_host(vbva_ctx, ctx, screen, true);
ctx               149 drivers/gpu/drm/vboxvideo/vbva_base.c 		vbva_disable(vbva_ctx, ctx, screen);
ctx               154 drivers/gpu/drm/vboxvideo/vbva_base.c void vbva_disable(struct vbva_buf_ctx *vbva_ctx, struct gen_pool *ctx,
ctx               161 drivers/gpu/drm/vboxvideo/vbva_base.c 	vbva_inform_host(vbva_ctx, ctx, screen, false);
ctx               165 drivers/gpu/drm/vboxvideo/vbva_base.c 			      struct gen_pool *ctx)
ctx               180 drivers/gpu/drm/vboxvideo/vbva_base.c 		vbva_buffer_flush(ctx);
ctx               967 drivers/gpu/drm/vc4/vc4_crtc.c 			 struct drm_modeset_acquire_ctx *ctx)
ctx               972 drivers/gpu/drm/vc4/vc4_crtc.c 		return drm_atomic_helper_page_flip(crtc, fb, event, flags, ctx);
ctx                62 drivers/gpu/drm/virtio/virtgpu_ioctl.c 	struct ttm_operation_ctx ctx = { false, false };
ctx                75 drivers/gpu/drm/virtio/virtgpu_ioctl.c 		ret = ttm_bo_validate(bo, &qobj->placement, &ctx);
ctx               363 drivers/gpu/drm/virtio/virtgpu_ioctl.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               384 drivers/gpu/drm/virtio/virtgpu_ioctl.c 	ret = ttm_bo_validate(&qobj->tbo, &qobj->placement, &ctx);
ctx               416 drivers/gpu/drm/virtio/virtgpu_ioctl.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               434 drivers/gpu/drm/virtio/virtgpu_ioctl.c 	ret = ttm_bo_validate(&qobj->tbo, &qobj->placement, &ctx);
ctx               209 drivers/gpu/drm/virtio/virtgpu_object.c 	struct ttm_operation_ctx ctx = {
ctx               220 drivers/gpu/drm/virtio/virtgpu_object.c 		bo->tbo.ttm->bdev->driver->ttm_tt_populate(bo->tbo.ttm, &ctx);
ctx               353 drivers/gpu/drm/vmwgfx/ttm_object.c 	struct ttm_operation_ctx ctx = {
ctx               382 drivers/gpu/drm/vmwgfx/ttm_object.c 					   &ctx);
ctx               715 drivers/gpu/drm/vmwgfx/ttm_object.c 		struct ttm_operation_ctx ctx = {
ctx               728 drivers/gpu/drm/vmwgfx/ttm_object.c 					   &ctx);
ctx               250 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 				ctx_list)->ctx;
ctx               285 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	bi->ctx = NULL;
ctx               305 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	if (loc->ctx != NULL)
ctx               329 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	if (loc->ctx != NULL) {
ctx               425 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 			vmw_context_binding_state(entry->ctx);
ctx               496 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	struct vmw_private *dev_priv = bi->ctx->dev_priv;
ctx               508 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	cmd->body.cid = bi->ctx->id;
ctx               528 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	struct vmw_private *dev_priv = bi->ctx->dev_priv;
ctx               540 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	cmd->body.cid = bi->ctx->id;
ctx               564 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	struct vmw_private *dev_priv = bi->ctx->dev_priv;
ctx               579 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	cmd->body.c.cid = bi->ctx->id;
ctx               598 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	struct vmw_private *dev_priv = bi->ctx->dev_priv;
ctx               604 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	cmd = VMW_FIFO_RESERVE_DX(dev_priv, sizeof(*cmd), bi->ctx->id);
ctx               627 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	struct vmw_private *dev_priv = bi->ctx->dev_priv;
ctx               633 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	cmd = VMW_FIFO_RESERVE_DX(dev_priv, sizeof(*cmd), bi->ctx->id);
ctx               681 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 		if (!biv->bi.ctx)
ctx               721 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 			((!biv->bi.ctx || biv->bi.scrubbed) ?
ctx               747 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	const struct vmw_resource *ctx = vmw_cbs_context(cbs);
ctx               757 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id);
ctx               768 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	vmw_fifo_commit(ctx->dev_priv, cmd_size);
ctx               788 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	const struct vmw_resource *ctx = vmw_cbs_context(cbs);
ctx               793 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id);
ctx               800 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	if (cbs->ds_view.bi.ctx && !cbs->ds_view.bi.scrubbed)
ctx               807 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	vmw_fifo_commit(ctx->dev_priv, cmd_size);
ctx               841 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 		if (!biso->bi.ctx)
ctx               869 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	const struct vmw_resource *ctx = vmw_cbs_context(cbs);
ctx               877 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id);
ctx               885 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	vmw_fifo_commit(ctx->dev_priv, cmd_size);
ctx               949 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 		if (!biv->bi.ctx || biv->bi.scrubbed) {
ctx               982 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	const struct vmw_resource *ctx = vmw_cbs_context(cbs);
ctx               991 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id);
ctx              1001 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	vmw_fifo_commit(ctx->dev_priv, cmd_size);
ctx              1064 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 		vmw_context_binding_state(bi->ctx);
ctx              1084 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 		vmw_context_binding_state(bi->ctx);
ctx              1101 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 		vmw_context_binding_state(bi->ctx);
ctx              1120 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 		vmw_context_binding_state(bi->ctx);
ctx              1138 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	struct vmw_private *dev_priv = bi->ctx->dev_priv;
ctx              1144 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	cmd = VMW_FIFO_RESERVE_DX(dev_priv, sizeof(*cmd), bi->ctx->id);
ctx              1177 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 	struct ttm_operation_ctx ctx = {
ctx              1184 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c 				&ctx);
ctx                72 drivers/gpu/drm/vmwgfx/vmwgfx_binding.h 	struct vmw_resource *ctx;
ctx               452 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 	struct ttm_operation_ctx ctx = {
ctx               467 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 		ret = dst->ttm->bdev->driver->ttm_tt_populate(dst->ttm, &ctx);
ctx               473 drivers/gpu/drm/vmwgfx/vmwgfx_blit.c 		ret = src->ttm->bdev->driver->ttm_tt_populate(src->ttm, &ctx);
ctx                94 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct ttm_operation_ctx ctx = {interruptible, false };
ctx               113 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ret = ttm_bo_validate(bo, placement, &ctx);
ctx               143 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct ttm_operation_ctx ctx = {interruptible, false };
ctx               164 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ret = ttm_bo_validate(bo, &vmw_vram_gmr_placement, &ctx);
ctx               168 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ret = ttm_bo_validate(bo, &vmw_vram_placement, &ctx);
ctx               218 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct ttm_operation_ctx ctx = {interruptible, false };
ctx               250 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ctx.interruptible = false;
ctx               251 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		(void) ttm_bo_validate(bo, &vmw_sys_placement, &ctx);
ctx               258 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 		ret = ttm_bo_validate(bo, &placement, &ctx);
ctx               337 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	struct ttm_operation_ctx ctx = { false, true };
ctx               366 drivers/gpu/drm/vmwgfx/vmwgfx_bo.c 	ret = ttm_bo_validate(bo, &placement, &ctx);
ctx               115 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	struct vmw_cmdbuf_context ctx[SVGA_CB_CONTEXT_MAX];
ctx               193 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	for (_i = 0, _ctx = &(_man)->ctx[0]; (_i) < (_man)->num_contexts; \
ctx               321 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c static void vmw_cmdbuf_ctx_init(struct vmw_cmdbuf_context *ctx)
ctx               323 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	INIT_LIST_HEAD(&ctx->hw_submitted);
ctx               324 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	INIT_LIST_HEAD(&ctx->submitted);
ctx               325 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	INIT_LIST_HEAD(&ctx->preempted);
ctx               326 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	ctx->num_hw_submitted = 0;
ctx               340 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 				  struct vmw_cmdbuf_context *ctx)
ctx               342 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	while (ctx->num_hw_submitted < man->max_hw_submitted &&
ctx               343 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	       !list_empty(&ctx->submitted) &&
ctx               344 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	       !ctx->block_submission) {
ctx               348 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		entry = list_first_entry(&ctx->submitted,
ctx               361 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		list_add_tail(&entry->list, &ctx->hw_submitted);
ctx               362 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		ctx->num_hw_submitted++;
ctx               378 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 				   struct vmw_cmdbuf_context *ctx,
ctx               383 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	vmw_cmdbuf_ctx_submit(man, ctx);
ctx               385 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	list_for_each_entry_safe(entry, next, &ctx->hw_submitted, list) {
ctx               393 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		ctx->num_hw_submitted--;
ctx               406 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 			list_add_tail(&entry->list, &ctx->preempted);
ctx               419 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	vmw_cmdbuf_ctx_submit(man, ctx);
ctx               420 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	if (!list_empty(&ctx->submitted))
ctx               437 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	struct vmw_cmdbuf_context *ctx;
ctx               442 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	for_each_cmdbuf_ctx(man, i, ctx)
ctx               443 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		vmw_cmdbuf_ctx_process(man, ctx, &notempty);
ctx               481 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	list_add_tail(&header->list, &man->ctx[cb_context].submitted);
ctx               521 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	struct vmw_cmdbuf_context *ctx;
ctx               524 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	for_each_cmdbuf_ctx(man, i, ctx)
ctx               576 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	for_each_cmdbuf_ctx(man, i, ctx)
ctx               577 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		man->ctx[i].block_submission = true;
ctx               586 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	for_each_cmdbuf_ctx(man, i, ctx) {
ctx               588 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		vmw_cmdbuf_ctx_process(man, ctx, &dummy);
ctx               594 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		list_splice_init(&ctx->preempted, restart_head[i].prev);
ctx               601 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		ctx->block_submission = false;
ctx               602 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		list_splice_init(&restart_head[i], &ctx->submitted);
ctx               630 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	struct vmw_cmdbuf_context *ctx;
ctx               636 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	for_each_cmdbuf_ctx(man, i, ctx) {
ctx               637 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		if (!list_empty(&ctx->submitted) ||
ctx               638 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		    !list_empty(&ctx->hw_submitted) ||
ctx               639 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		    (check_preempted && !list_empty(&ctx->preempted)))
ctx              1301 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	struct vmw_cmdbuf_context *ctx;
ctx              1332 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 	for_each_cmdbuf_ctx(man, i, ctx)
ctx              1333 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c 		vmw_cmdbuf_ctx_init(ctx);
ctx               543 drivers/gpu/drm/vmwgfx/vmwgfx_context.c void vmw_dx_context_scrub_cotables(struct vmw_resource *ctx,
ctx               547 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 		container_of(ctx, struct vmw_user_context, res);
ctx               680 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	struct vmw_user_context *ctx =
ctx               684 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	if (ctx->cbs)
ctx               685 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 		vmw_binding_state_free(ctx->cbs);
ctx               689 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	ttm_base_object_kfree(ctx, base);
ctx               702 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	struct vmw_user_context *ctx =
ctx               704 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	struct vmw_resource *res = &ctx->res;
ctx               723 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	struct vmw_user_context *ctx;
ctx               740 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 		vmw_user_context_size = ttm_round_pot(sizeof(*ctx)) +
ctx               758 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               759 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	if (unlikely(!ctx)) {
ctx               766 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	res = &ctx->res;
ctx               767 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	ctx->base.shareable = false;
ctx               768 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	ctx->base.tfile = NULL;
ctx               778 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	tmp = vmw_resource_reference(&ctx->res);
ctx               779 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	ret = ttm_base_object_init(tfile, &ctx->base, false, VMW_RES_CONTEXT,
ctx               787 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	arg->cid = ctx->base.handle;
ctx               826 drivers/gpu/drm/vmwgfx/vmwgfx_context.c struct list_head *vmw_context_binding_list(struct vmw_resource *ctx)
ctx               829 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 		container_of(ctx, struct vmw_user_context, res);
ctx               834 drivers/gpu/drm/vmwgfx/vmwgfx_context.c struct vmw_cmdbuf_res_manager *vmw_context_res_man(struct vmw_resource *ctx)
ctx               836 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	return container_of(ctx, struct vmw_user_context, res)->man;
ctx               839 drivers/gpu/drm/vmwgfx/vmwgfx_context.c struct vmw_resource *vmw_context_cotable(struct vmw_resource *ctx,
ctx               845 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	return container_of(ctx, struct vmw_user_context, res)->
ctx               859 drivers/gpu/drm/vmwgfx/vmwgfx_context.c vmw_context_binding_state(struct vmw_resource *ctx)
ctx               861 drivers/gpu/drm/vmwgfx/vmwgfx_context.c 	return container_of(ctx, struct vmw_user_context, res)->cbs;
ctx                52 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	struct vmw_resource *ctx;
ctx               180 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	WARN_ON(vcotbl->ctx->id == SVGA3D_INVALID_ID);
ctx               184 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	cmd->body.cid = vcotbl->ctx->id;
ctx               273 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 		cmd0->body.cid = vcotbl->ctx->id;
ctx               280 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	cmd1->body.cid = vcotbl->ctx->id;
ctx               320 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 		vmw_dx_context_scrub_cotables(vcotbl->ctx, readback);
ctx               356 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 		cmd->body.cid = vcotbl->ctx->id;
ctx               383 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	struct ttm_operation_ctx ctx = { false, false };
ctx               452 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	ret = ttm_bo_validate(bo, &vmw_mob_placement, &ctx);
ctx               566 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 				       struct vmw_resource *ctx,
ctx               610 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	vcotbl->ctx = ctx;
ctx               955 drivers/gpu/drm/vmwgfx/vmwgfx_drv.c 	if (dev_priv->ctx.staged_bindings)
ctx               956 drivers/gpu/drm/vmwgfx/vmwgfx_drv.c 		vmw_binding_state_free(dev_priv->ctx.staged_bindings);
ctx               968 drivers/gpu/drm/vmwgfx/vmwgfx_drv.c 	if (dev_priv->ctx.res_ht_initialized)
ctx               969 drivers/gpu/drm/vmwgfx/vmwgfx_drv.c 		drm_ht_remove(&dev_priv->ctx.res_ht);
ctx               970 drivers/gpu/drm/vmwgfx/vmwgfx_drv.c 	vfree(dev_priv->ctx.cmd_bounce);
ctx              1000 drivers/gpu/drm/vmwgfx/vmwgfx_drv.c 	if (dev_priv->ctx.staged_bindings)
ctx              1001 drivers/gpu/drm/vmwgfx/vmwgfx_drv.c 		vmw_binding_state_free(dev_priv->ctx.staged_bindings);
ctx               395 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h 	struct vmw_validation_context *ctx;
ctx               545 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h 	struct vmw_sw_context ctx;
ctx              1184 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern struct list_head *vmw_context_binding_list(struct vmw_resource *ctx);
ctx              1186 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h vmw_context_res_man(struct vmw_resource *ctx);
ctx              1187 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern struct vmw_resource *vmw_context_cotable(struct vmw_resource *ctx,
ctx              1189 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern struct list_head *vmw_context_binding_list(struct vmw_resource *ctx);
ctx              1192 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h vmw_context_binding_state(struct vmw_resource *ctx);
ctx              1193 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern void vmw_dx_context_scrub_cotables(struct vmw_resource *ctx,
ctx              1262 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h 			     struct vmw_resource *ctx,
ctx              1304 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h 					      struct vmw_resource *ctx,
ctx              1308 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h extern void vmw_cotable_add_resource(struct vmw_resource *ctx,
ctx               117 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	struct vmw_resource *ctx;
ctx               144 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 					struct vmw_resource *ctx);
ctx               240 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	node->ctx = res;
ctx               314 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 			vmw_validation_res_set_dirty(sw_context->ctx,
ctx               321 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	ret = vmw_validation_add_resource(sw_context->ctx, res, priv_size,
ctx               363 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 			vmw_validation_res_set_dirty(sw_context->ctx,
ctx               368 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	ret = vmw_validation_add_resource(sw_context->ctx, res, 0, dirty,
ctx               455 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 					struct vmw_resource *ctx)
ctx               464 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	if (dev_priv->has_dx && vmw_res_type(ctx) == vmw_res_dx_context) {
ctx               466 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 			res = vmw_context_cotable(ctx, i);
ctx               479 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	binding_list = vmw_context_binding_list(ctx);
ctx               492 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	if (dev_priv->has_dx && vmw_res_type(ctx) == vmw_res_dx_context) {
ctx               495 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		dx_query_mob = vmw_context_get_dx_query_mob(ctx);
ctx               497 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 			ret = vmw_validation_add_bo(sw_context->ctx,
ctx               522 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	rel = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*rel));
ctx               609 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	ret = vmw_validation_res_reserve(sw_context->ctx, true);
ctx               668 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 			vmw_validation_res_set_dirty(sw_context->ctx,
ctx               673 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		ret = vmw_validation_preload_res(sw_context->ctx, size);
ctx               759 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		ret = vmw_rebind_all_dx_query(val->ctx);
ctx               806 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		binding.bi.ctx = ctx_node->ctx;
ctx               873 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	struct vmw_resource *ctx;
ctx               887 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 				&cmd->body.cid, &ctx);
ctx               901 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		node = vmw_execbuf_info_from_res(sw_context, ctx);
ctx               905 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		binding.bi.ctx = ctx;
ctx              1048 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 			ret = vmw_validation_add_bo(sw_context->ctx,
ctx              1056 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		ret = vmw_validation_add_bo(sw_context->ctx,
ctx              1093 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		struct vmw_resource *ctx;
ctx              1097 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		ctx = ctx_entry->res;
ctx              1099 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		ret = vmw_fifo_emit_dummy_query(dev_priv, ctx->id);
ctx              1164 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	vmw_validation_preload_bo(sw_context->ctx);
ctx              1171 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	ret = vmw_validation_add_bo(sw_context->ctx, vmw_bo, true, false);
ctx              1176 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	reloc = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*reloc));
ctx              1219 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	vmw_validation_preload_bo(sw_context->ctx);
ctx              1226 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	ret = vmw_validation_add_bo(sw_context->ctx, vmw_bo, false, false);
ctx              1231 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	reloc = vmw_validation_mem_alloc(sw_context->ctx, sizeof(*reloc));
ctx              1270 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	cotable_res = vmw_context_cotable(ctx_node->ctx, SVGA_COTABLE_DXQUERY);
ctx              1308 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	sw_context->dx_query_ctx = sw_context->dx_ctx_node->ctx;
ctx              1629 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	struct vmw_resource *ctx;
ctx              1637 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 				&cmd->body.cid, &ctx);
ctx              1662 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 			node = vmw_execbuf_info_from_res(sw_context, ctx);
ctx              1666 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 			binding.bi.ctx = ctx;
ctx              1725 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	vmw_validation_res_switch_backup(sw_context->ctx, info, vbo,
ctx              1915 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	struct vmw_resource *ctx;
ctx              1921 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 				&cmd->body.cid, &ctx);
ctx              1929 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	ret = vmw_compat_shader_add(dev_priv, vmw_context_res_man(ctx),
ctx              1954 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	struct vmw_resource *ctx;
ctx              1960 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 				&cmd->body.cid, &ctx);
ctx              1967 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	ret = vmw_shader_remove(vmw_context_res_man(ctx), cmd->body.shid,
ctx              1991 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	struct vmw_resource *ctx, *res = NULL;
ctx              2005 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 				&cmd->body.cid, &ctx);
ctx              2018 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		res = vmw_shader_lookup(vmw_context_res_man(ctx),
ctx              2045 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	ctx_info = vmw_execbuf_info_from_res(sw_context, ctx);
ctx              2049 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	binding.bi.ctx = ctx;
ctx              2134 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	binding.bi.ctx = ctx_node->ctx;
ctx              2229 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	binding.bi.ctx = ctx_node->ctx;
ctx              2281 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		binding.bi.ctx = ctx_node->ctx;
ctx              2322 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	binding.bi.ctx = ctx_node->ctx;
ctx              2441 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	res = vmw_context_cotable(ctx_node->ctx, vmw_view_cotables[view_type]);
ctx              2446 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	return vmw_view_add(sw_context->man, ctx_node->ctx, srf, view_type,
ctx              2492 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		binding.bi.ctx = ctx_node->ctx;
ctx              2526 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	res = vmw_context_cotable(ctx_node->ctx, vmw_so_cotables[so_type]);
ctx              2645 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	res = vmw_context_cotable(ctx_node->ctx, SVGA_COTABLE_DXSHADER);
ctx              2650 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	return vmw_dx_shader_add(sw_context->man, ctx_node->ctx,
ctx              2691 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	struct vmw_resource *ctx;
ctx              2701 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 					&ctx);
ctx              2711 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 		ctx = ctx_node->ctx;
ctx              2714 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	res = vmw_shader_lookup(vmw_context_res_man(ctx), cmd->body.shid, 0);
ctx              3489 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 					  sw_context->dx_ctx_node->ctx->id);
ctx              3522 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	u32 id = ((sw_context->dx_ctx_node) ? sw_context->dx_ctx_node->ctx->id :
ctx              3607 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	ret = vmw_validation_preload_res(sw_context->ctx, size);
ctx              3638 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	struct vmw_sw_context *sw_context = &dev_priv->ctx;
ctx              3722 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	sw_context->ctx = &val_ctx;
ctx              3789 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c 	vmw_validation_bo_fence(sw_context->ctx, fence);
ctx               442 drivers/gpu/drm/vmwgfx/vmwgfx_fb.c 	struct drm_modeset_acquire_ctx ctx;
ctx               445 drivers/gpu/drm/vmwgfx/vmwgfx_fb.c 	drm_modeset_acquire_init(&ctx, 0);
ctx               448 drivers/gpu/drm/vmwgfx/vmwgfx_fb.c 	ret = crtc->funcs->set_config(set, &ctx);
ctx               451 drivers/gpu/drm/vmwgfx/vmwgfx_fb.c 		drm_modeset_backoff(&ctx);
ctx               455 drivers/gpu/drm/vmwgfx/vmwgfx_fb.c 	drm_modeset_drop_locks(&ctx);
ctx               456 drivers/gpu/drm/vmwgfx/vmwgfx_fb.c 	drm_modeset_acquire_fini(&ctx);
ctx                50 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c 	u64 ctx;
ctx               319 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c 	fman->ctx = dma_fence_context_alloc(1);
ctx               346 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c 		       fman->ctx, seqno);
ctx               610 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c 	struct ttm_operation_ctx ctx = {
ctx               622 drivers/gpu/drm/vmwgfx/vmwgfx_fence.c 				   &ctx);
ctx              2028 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	struct drm_modeset_acquire_ctx ctx;
ctx              2034 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	drm_modeset_acquire_init(&ctx, 0);
ctx              2037 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		ret = drm_modeset_lock(&crtc->mutex, &ctx);
ctx              2040 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 				drm_modeset_backoff(&ctx);
ctx              2088 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	drm_modeset_drop_locks(&ctx);
ctx              2089 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	drm_modeset_acquire_fini(&ctx);
ctx              2098 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 			  struct drm_modeset_acquire_ctx *ctx)
ctx              2556 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 				      struct vmw_validation_context *ctx,
ctx              2565 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	if (file_priv || user_fence_rep || vmw_validation_has_bos(ctx) ||
ctx              2569 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	vmw_validation_done(ctx, fence);
ctx               395 drivers/gpu/drm/vmwgfx/vmwgfx_kms.h 			   struct drm_modeset_acquire_ctx *ctx);
ctx               426 drivers/gpu/drm/vmwgfx/vmwgfx_kms.h 				      struct vmw_validation_context *ctx,
ctx               241 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	struct ttm_operation_ctx ctx = {
ctx               268 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ret = vmw_bo_driver.ttm_tt_populate(batch->otable_bo->ttm, &ctx);
ctx               436 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	struct ttm_operation_ctx ctx = {
ctx               453 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ret = vmw_bo_driver.ttm_tt_populate(mob->pt_bo->ttm, &ctx);
ctx               479 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	struct ttm_operation_ctx ctx = { true, false };
ctx               506 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 			      &ctx);
ctx               920 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 	struct ttm_operation_ctx ctx = { interruptible, false };
ctx               941 drivers/gpu/drm/vmwgfx/vmwgfx_resource.c 					 &ctx);
ctx                49 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	struct vmw_resource *ctx;
ctx               397 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	cmd = VMW_FIFO_RESERVE_DX(dev_priv, sizeof(*cmd), shader->ctx->id);
ctx               403 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	cmd->body.cid = shader->ctx->id;
ctx               490 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	cmd->body.cid = shader->ctx->id;
ctx               589 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 		      struct vmw_resource *ctx,
ctx               596 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	struct vmw_private *dev_priv = ctx->dev_priv;
ctx               625 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	shader->ctx = ctx;
ctx               627 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 		(vmw_context_cotable(ctx, SVGA_COTABLE_DXSHADER));
ctx               725 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	struct ttm_operation_ctx ctx = {
ctx               738 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 				   &ctx);
ctx               796 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	struct ttm_operation_ctx ctx = {
ctx               809 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 				   &ctx);
ctx               966 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	struct ttm_operation_ctx ctx = { false, true };
ctx              1002 drivers/gpu/drm/vmwgfx/vmwgfx_shader.c 	ret = ttm_bo_validate(&buf->base, &vmw_sys_placement, &ctx);
ctx               152 drivers/gpu/drm/vmwgfx/vmwgfx_simple_resource.c 	struct ttm_operation_ctx ctx = {
ctx               170 drivers/gpu/drm/vmwgfx/vmwgfx_simple_resource.c 				   &ctx);
ctx                63 drivers/gpu/drm/vmwgfx/vmwgfx_so.c 	struct vmw_resource *ctx;      /* Immutable */
ctx               173 drivers/gpu/drm/vmwgfx/vmwgfx_so.c 	cmd = VMW_FIFO_RESERVE_DX(res->dev_priv, view->cmd_size, view->ctx->id);
ctx               216 drivers/gpu/drm/vmwgfx/vmwgfx_so.c 	cmd = VMW_FIFO_RESERVE_DX(dev_priv, sizeof(*cmd), view->ctx->id);
ctx               311 drivers/gpu/drm/vmwgfx/vmwgfx_so.c 		 struct vmw_resource *ctx,
ctx               325 drivers/gpu/drm/vmwgfx/vmwgfx_so.c 	struct vmw_private *dev_priv = ctx->dev_priv;
ctx               362 drivers/gpu/drm/vmwgfx/vmwgfx_so.c 	view->ctx = ctx;
ctx               365 drivers/gpu/drm/vmwgfx/vmwgfx_so.c 		(vmw_context_cotable(ctx, vmw_view_cotables[view_type]));
ctx               138 drivers/gpu/drm/vmwgfx/vmwgfx_so.h 			struct vmw_resource *ctx,
ctx               713 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	struct ttm_operation_ctx ctx = {
ctx               757 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 				   size, &ctx);
ctx              1371 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 	struct ttm_operation_ctx ctx = {
ctx              1417 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c 				   user_accounting_size, &ctx);
ctx               413 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 	struct ttm_operation_ctx ctx = {
ctx               440 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		ret = ttm_mem_global_alloc(glob, vmw_tt->sg_alloc_size, &ctx);
ctx               654 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c static int vmw_ttm_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx)
ctx               668 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		ret = ttm_mem_global_alloc(glob, size, ctx);
ctx               673 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 					ctx);
ctx               677 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c 		ret = ttm_pool_populate(ttm, ctx);
ctx                41 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_glue.c 	static struct ttm_operation_ctx ctx = {.interruptible = false,
ctx                45 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_glue.c 	return ttm_mem_global_alloc(vmw_mem_glob(dev_priv), size, &ctx);
ctx                99 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c void *vmw_validation_mem_alloc(struct vmw_validation_context *ctx,
ctx               108 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	if (ctx->mem_size_left < size) {
ctx               111 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		if (ctx->vm && ctx->vm_size_left < PAGE_SIZE) {
ctx               112 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 			int ret = ctx->vm->reserve_mem(ctx->vm, ctx->vm->gran);
ctx               117 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 			ctx->vm_size_left += ctx->vm->gran;
ctx               118 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 			ctx->total_mem += ctx->vm->gran;
ctx               125 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		if (ctx->vm)
ctx               126 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 			ctx->vm_size_left -= PAGE_SIZE;
ctx               128 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		list_add_tail(&page->lru, &ctx->page_list);
ctx               129 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		ctx->page_address = page_address(page);
ctx               130 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		ctx->mem_size_left = PAGE_SIZE;
ctx               133 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	addr = (void *) (ctx->page_address + (PAGE_SIZE - ctx->mem_size_left));
ctx               134 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ctx->mem_size_left -= size;
ctx               147 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c static void vmw_validation_mem_free(struct vmw_validation_context *ctx)
ctx               151 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	list_for_each_entry_safe(entry, next, &ctx->page_list, lru) {
ctx               156 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ctx->mem_size_left = 0;
ctx               157 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	if (ctx->vm && ctx->total_mem) {
ctx               158 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		ctx->vm->unreserve_mem(ctx->vm, ctx->total_mem);
ctx               159 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		ctx->total_mem = 0;
ctx               160 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		ctx->vm_size_left = 0;
ctx               174 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c vmw_validation_find_bo_dup(struct vmw_validation_context *ctx,
ctx               179 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	if (!ctx->merge_dups)
ctx               182 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	if (ctx->ht) {
ctx               185 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		if (!drm_ht_find_item(ctx->ht, (unsigned long) vbo, &hash))
ctx               190 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		list_for_each_entry(entry, &ctx->bo_list, base.head) {
ctx               211 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c vmw_validation_find_res_dup(struct vmw_validation_context *ctx,
ctx               216 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	if (!ctx->merge_dups)
ctx               219 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	if (ctx->ht) {
ctx               222 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		if (!drm_ht_find_item(ctx->ht, (unsigned long) res, &hash))
ctx               227 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		list_for_each_entry(entry, &ctx->resource_ctx_list, head) {
ctx               234 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		list_for_each_entry(entry, &ctx->resource_list, head) {
ctx               255 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c int vmw_validation_add_bo(struct vmw_validation_context *ctx,
ctx               262 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	bo_node = vmw_validation_find_bo_dup(ctx, vbo);
ctx               273 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		bo_node = vmw_validation_mem_alloc(ctx, sizeof(*bo_node));
ctx               277 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		if (ctx->ht) {
ctx               279 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 			ret = drm_ht_insert_item(ctx->ht, &bo_node->hash);
ctx               291 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		list_add_tail(&val_buf->head, &ctx->bo_list);
ctx               310 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c int vmw_validation_add_resource(struct vmw_validation_context *ctx,
ctx               320 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	node = vmw_validation_find_res_dup(ctx, res);
ctx               326 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	node = vmw_validation_mem_alloc(ctx, sizeof(*node) + priv_size);
ctx               332 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	if (ctx->ht) {
ctx               334 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		ret = drm_ht_insert_item(ctx->ht, &node->hash);
ctx               347 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		list_add_tail(&node->head, &ctx->resource_list);
ctx               352 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 			list_add(&node->head, &ctx->resource_ctx_list);
ctx               355 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 			list_add_tail(&node->head, &ctx->resource_ctx_list);
ctx               358 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 			list_add_tail(&node->head, &ctx->resource_list);
ctx               386 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c void vmw_validation_res_set_dirty(struct vmw_validation_context *ctx,
ctx               411 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c void vmw_validation_res_switch_backup(struct vmw_validation_context *ctx,
ctx               437 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c int vmw_validation_res_reserve(struct vmw_validation_context *ctx,
ctx               443 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	list_splice_init(&ctx->resource_ctx_list, &ctx->resource_list);
ctx               445 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	list_for_each_entry(val, &ctx->resource_list, head) {
ctx               457 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 				(ctx, vbo, vmw_resource_needs_backup(res),
ctx               467 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	vmw_validation_res_unreserve(ctx, true);
ctx               478 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c void vmw_validation_res_unreserve(struct vmw_validation_context *ctx,
ctx               483 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	list_splice_init(&ctx->resource_ctx_list, &ctx->resource_list);
ctx               485 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		list_for_each_entry(val, &ctx->resource_list, head) {
ctx               492 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		list_for_each_entry(val, &ctx->resource_list, head) {
ctx               518 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	struct ttm_operation_ctx ctx = {
ctx               528 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		return ttm_bo_validate(bo, &vmw_mob_placement, &ctx);
ctx               537 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ret = ttm_bo_validate(bo, &vmw_vram_gmr_placement, &ctx);
ctx               546 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ret = ttm_bo_validate(bo, &vmw_vram_placement, &ctx);
ctx               559 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c int vmw_validation_bo_validate(struct vmw_validation_context *ctx, bool intr)
ctx               564 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	list_for_each_entry(entry, &ctx->bo_list, base.head) {
ctx               566 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 			struct ttm_operation_ctx ctx = {
ctx               572 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 					      &vmw_nonfixed_placement, &ctx);
ctx               595 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c int vmw_validation_res_validate(struct vmw_validation_context *ctx, bool intr)
ctx               600 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	list_for_each_entry(val, &ctx->resource_list, head) {
ctx               616 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 				(ctx, vbo, vmw_resource_needs_backup(res),
ctx               637 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c void vmw_validation_drop_ht(struct vmw_validation_context *ctx)
ctx               642 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	if (!ctx->ht)
ctx               645 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	list_for_each_entry(entry, &ctx->bo_list, base.head)
ctx               646 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		(void) drm_ht_remove_item(ctx->ht, &entry->hash);
ctx               648 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	list_for_each_entry(val, &ctx->resource_list, head)
ctx               649 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		(void) drm_ht_remove_item(ctx->ht, &val->hash);
ctx               651 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	list_for_each_entry(val, &ctx->resource_ctx_list, head)
ctx               652 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		(void) drm_ht_remove_item(ctx->ht, &val->hash);
ctx               654 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ctx->ht = NULL;
ctx               665 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c void vmw_validation_unref_lists(struct vmw_validation_context *ctx)
ctx               670 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	list_for_each_entry(entry, &ctx->bo_list, base.head) {
ctx               675 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	list_splice_init(&ctx->resource_ctx_list, &ctx->resource_list);
ctx               676 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	list_for_each_entry(val, &ctx->resource_list, head)
ctx               683 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	INIT_LIST_HEAD(&ctx->bo_list);
ctx               684 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	INIT_LIST_HEAD(&ctx->resource_list);
ctx               686 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	vmw_validation_mem_free(ctx);
ctx               704 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c int vmw_validation_prepare(struct vmw_validation_context *ctx,
ctx               719 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ctx->res_mutex = mutex;
ctx               720 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ret = vmw_validation_res_reserve(ctx, intr);
ctx               724 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ret = vmw_validation_bo_reserve(ctx, intr);
ctx               728 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ret = vmw_validation_bo_validate(ctx, intr);
ctx               732 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ret = vmw_validation_res_validate(ctx, intr);
ctx               739 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	vmw_validation_bo_backoff(ctx);
ctx               741 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	vmw_validation_res_unreserve(ctx, true);
ctx               757 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c void vmw_validation_revert(struct vmw_validation_context *ctx)
ctx               759 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	vmw_validation_bo_backoff(ctx);
ctx               760 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	vmw_validation_res_unreserve(ctx, true);
ctx               761 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	if (ctx->res_mutex)
ctx               762 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		mutex_unlock(ctx->res_mutex);
ctx               763 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	vmw_validation_unref_lists(ctx);
ctx               775 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c void vmw_validation_done(struct vmw_validation_context *ctx,
ctx               778 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	vmw_validation_bo_fence(ctx, fence);
ctx               779 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	vmw_validation_res_unreserve(ctx, false);
ctx               780 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	if (ctx->res_mutex)
ctx               781 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 		mutex_unlock(ctx->res_mutex);
ctx               782 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	vmw_validation_unref_lists(ctx);
ctx               796 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c int vmw_validation_preload_bo(struct vmw_validation_context *ctx)
ctx               800 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	if (!vmw_validation_mem_alloc(ctx, size))
ctx               803 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ctx->mem_size_left += size;
ctx               819 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c int vmw_validation_preload_res(struct vmw_validation_context *ctx,
ctx               825 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	if (!vmw_validation_mem_alloc(ctx, size))
ctx               828 drivers/gpu/drm/vmwgfx/vmwgfx_validation.c 	ctx->mem_size_left += size;
ctx               126 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h vmw_validation_has_bos(struct vmw_validation_context *ctx)
ctx               128 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h 	return !list_empty(&ctx->bo_list);
ctx               140 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h vmw_validation_set_val_mem(struct vmw_validation_context *ctx,
ctx               143 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h 	ctx->vm = vm;
ctx               153 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h static inline void vmw_validation_set_ht(struct vmw_validation_context *ctx,
ctx               156 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h 	ctx->ht = ht;
ctx               169 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h vmw_validation_bo_reserve(struct vmw_validation_context *ctx,
ctx               172 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h 	return ttm_eu_reserve_buffers(&ctx->ticket, &ctx->bo_list, intr,
ctx               185 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h vmw_validation_bo_backoff(struct vmw_validation_context *ctx)
ctx               187 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h 	ttm_eu_backoff_reservation(&ctx->ticket, &ctx->bo_list);
ctx               199 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h vmw_validation_bo_fence(struct vmw_validation_context *ctx,
ctx               202 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h 	ttm_eu_fence_buffer_objects(&ctx->ticket, &ctx->bo_list,
ctx               214 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h vmw_validation_context_init(struct vmw_validation_context *ctx)
ctx               216 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h 	memset(ctx, 0, sizeof(*ctx));
ctx               217 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h 	INIT_LIST_HEAD(&ctx->resource_list);
ctx               218 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h 	INIT_LIST_HEAD(&ctx->resource_ctx_list);
ctx               219 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h 	INIT_LIST_HEAD(&ctx->bo_list);
ctx               234 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h int vmw_validation_add_bo(struct vmw_validation_context *ctx,
ctx               240 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h int vmw_validation_bo_validate(struct vmw_validation_context *ctx, bool intr);
ctx               241 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h void vmw_validation_unref_lists(struct vmw_validation_context *ctx);
ctx               242 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h int vmw_validation_add_resource(struct vmw_validation_context *ctx,
ctx               248 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h void vmw_validation_drop_ht(struct vmw_validation_context *ctx);
ctx               249 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h int vmw_validation_res_reserve(struct vmw_validation_context *ctx,
ctx               251 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h void vmw_validation_res_unreserve(struct vmw_validation_context *ctx,
ctx               253 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h void vmw_validation_res_switch_backup(struct vmw_validation_context *ctx,
ctx               257 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h int vmw_validation_res_validate(struct vmw_validation_context *ctx, bool intr);
ctx               259 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h int vmw_validation_prepare(struct vmw_validation_context *ctx,
ctx               261 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h void vmw_validation_revert(struct vmw_validation_context *ctx);
ctx               262 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h void vmw_validation_done(struct vmw_validation_context *ctx,
ctx               265 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h void *vmw_validation_mem_alloc(struct vmw_validation_context *ctx,
ctx               267 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h int vmw_validation_preload_bo(struct vmw_validation_context *ctx);
ctx               268 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h int vmw_validation_preload_res(struct vmw_validation_context *ctx,
ctx               270 drivers/gpu/drm/vmwgfx/vmwgfx_validation.h void vmw_validation_res_set_dirty(struct vmw_validation_context *ctx,
ctx                45 drivers/gpu/drm/xen/xen_drm_front_conn.c 			    struct drm_modeset_acquire_ctx *ctx,
ctx                34 drivers/gpu/host1x/debug.c 	o->fn(o->ctx, o->buf, len, false);
ctx                46 drivers/gpu/host1x/debug.c 	o->fn(o->ctx, o->buf, len, true);
ctx               117 drivers/gpu/host1x/debug.c 		.ctx = s
ctx               129 drivers/gpu/host1x/debug.c 		.ctx = s
ctx                16 drivers/gpu/host1x/debug.h 	void (*fn)(void *ctx, const char *str, size_t len, bool cont);
ctx                17 drivers/gpu/host1x/debug.h 	void *ctx;
ctx                21 drivers/gpu/host1x/debug.h static inline void write_to_seqfile(void *ctx, const char *str, size_t len,
ctx                24 drivers/gpu/host1x/debug.h 	seq_write((struct seq_file *)ctx, str, len);
ctx                27 drivers/gpu/host1x/debug.h static inline void write_to_printk(void *ctx, const char *str, size_t len,
ctx               321 drivers/gpu/ipu-v3/ipu-image-convert.c static void dump_format(struct ipu_image_convert_ctx *ctx,
ctx               324 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx               329 drivers/gpu/ipu-v3/ipu-image-convert.c 		chan->ic_task, ctx,
ctx               391 drivers/gpu/ipu-v3/ipu-image-convert.c static int calc_image_resize_coefficients(struct ipu_image_convert_ctx *ctx,
ctx               406 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ipu_rot_mode_is_irt(ctx->rot_mode)) {
ctx               442 drivers/gpu/ipu-v3/ipu-image-convert.c 	dev_dbg(ctx->chan->priv->ipu->dev,
ctx               451 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->downsize_coeff_h = downsize_coeff_h;
ctx               452 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->downsize_coeff_v = downsize_coeff_v;
ctx               453 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->image_resize_coeff_h = resize_coeff_h;
ctx               454 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->image_resize_coeff_v = resize_coeff_v;
ctx               455 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->in.num_cols = cols;
ctx               456 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->in.num_rows = rows;
ctx               482 drivers/gpu/ipu-v3/ipu-image-convert.c static void find_best_seam(struct ipu_image_convert_ctx *ctx,
ctx               495 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct device *dev = ctx->chan->priv->ipu->dev;
ctx               650 drivers/gpu/ipu-v3/ipu-image-convert.c static void fill_tile_column(struct ipu_image_convert_ctx *ctx,
ctx               663 drivers/gpu/ipu-v3/ipu-image-convert.c 		out_tile = &out->tile[ctx->out_tile_map[tile_idx]];
ctx               668 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ipu_rot_mode_is_irt(ctx->rot_mode)) {
ctx               683 drivers/gpu/ipu-v3/ipu-image-convert.c static void fill_tile_row(struct ipu_image_convert_ctx *ctx, unsigned int row,
ctx               695 drivers/gpu/ipu-v3/ipu-image-convert.c 		out_tile = &out->tile[ctx->out_tile_map[tile_idx]];
ctx               700 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ipu_rot_mode_is_irt(ctx->rot_mode)) {
ctx               715 drivers/gpu/ipu-v3/ipu-image-convert.c static void find_seams(struct ipu_image_convert_ctx *ctx,
ctx               719 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct device *dev = ctx->chan->priv->ipu->dev;
ctx               729 drivers/gpu/ipu-v3/ipu-image-convert.c 							ctx->rot_mode);
ctx               731 drivers/gpu/ipu-v3/ipu-image-convert.c 							  ctx->rot_mode);
ctx               739 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ipu_rot_mode_is_irt(ctx->rot_mode)) {
ctx               752 drivers/gpu/ipu-v3/ipu-image-convert.c 		bool allow_in_overshoot = ipu_rot_mode_is_irt(ctx->rot_mode) ||
ctx               753 drivers/gpu/ipu-v3/ipu-image-convert.c 					  !(ctx->rot_mode & IPU_ROT_BIT_HFLIP);
ctx               755 drivers/gpu/ipu-v3/ipu-image-convert.c 					   !(ctx->rot_mode & IPU_ROT_BIT_HFLIP);
ctx               764 drivers/gpu/ipu-v3/ipu-image-convert.c 		find_best_seam(ctx, col,
ctx               769 drivers/gpu/ipu-v3/ipu-image-convert.c 			       ctx->downsize_coeff_h, ctx->image_resize_coeff_h,
ctx               772 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ctx->rot_mode & IPU_ROT_BIT_HFLIP)
ctx               777 drivers/gpu/ipu-v3/ipu-image-convert.c 		fill_tile_column(ctx, col, in, in_left, in_right - in_left,
ctx               788 drivers/gpu/ipu-v3/ipu-image-convert.c 	flipped_out_left = (ctx->rot_mode & IPU_ROT_BIT_HFLIP) ?
ctx               791 drivers/gpu/ipu-v3/ipu-image-convert.c 	fill_tile_column(ctx, 0, in, 0, in_right,
ctx               802 drivers/gpu/ipu-v3/ipu-image-convert.c 		find_best_seam(ctx, row,
ctx               806 drivers/gpu/ipu-v3/ipu-image-convert.c 			       ctx->downsize_coeff_v, ctx->image_resize_coeff_v,
ctx               809 drivers/gpu/ipu-v3/ipu-image-convert.c 		if ((ctx->rot_mode & IPU_ROT_BIT_VFLIP) ^
ctx               810 drivers/gpu/ipu-v3/ipu-image-convert.c 		    ipu_rot_mode_is_irt(ctx->rot_mode))
ctx               815 drivers/gpu/ipu-v3/ipu-image-convert.c 		fill_tile_row(ctx, row, in, in_top, in_bottom - in_top,
ctx               826 drivers/gpu/ipu-v3/ipu-image-convert.c 	if ((ctx->rot_mode & IPU_ROT_BIT_VFLIP) ^
ctx               827 drivers/gpu/ipu-v3/ipu-image-convert.c 	    ipu_rot_mode_is_irt(ctx->rot_mode))
ctx               832 drivers/gpu/ipu-v3/ipu-image-convert.c 	fill_tile_row(ctx, 0, in, 0, in_bottom,
ctx               839 drivers/gpu/ipu-v3/ipu-image-convert.c static int calc_tile_dimensions(struct ipu_image_convert_ctx *ctx,
ctx               842 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx               850 drivers/gpu/ipu-v3/ipu-image-convert.c 		max_width <<= ctx->downsize_coeff_h;
ctx               851 drivers/gpu/ipu-v3/ipu-image-convert.c 		max_height <<= ctx->downsize_coeff_v;
ctx               854 drivers/gpu/ipu-v3/ipu-image-convert.c 	for (i = 0; i < ctx->num_tiles; i++) {
ctx               860 drivers/gpu/ipu-v3/ipu-image-convert.c 			tile = &image->tile[ctx->out_tile_map[i]];
ctx               879 drivers/gpu/ipu-v3/ipu-image-convert.c 			chan->ic_task, ctx,
ctx               902 drivers/gpu/ipu-v3/ipu-image-convert.c static int transform_tile_index(struct ipu_image_convert_ctx *ctx,
ctx               905 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx               907 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_image *s_image = &ctx->in;
ctx               908 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_image *d_image = &ctx->out;
ctx               912 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ctx->rot_mode == IPU_ROTATE_NONE)
ctx               923 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ctx->rot_mode & IPU_ROT_BIT_90) {
ctx               932 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ctx->rot_mode & IPU_ROT_BIT_HFLIP)
ctx               934 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ctx->rot_mode & IPU_ROT_BIT_VFLIP)
ctx               938 drivers/gpu/ipu-v3/ipu-image-convert.c 		chan->ic_task, ctx, src_col, src_row, dst_col, dst_row);
ctx               955 drivers/gpu/ipu-v3/ipu-image-convert.c static void calc_out_tile_map(struct ipu_image_convert_ctx *ctx)
ctx               957 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_image *s_image = &ctx->in;
ctx               962 drivers/gpu/ipu-v3/ipu-image-convert.c 			ctx->out_tile_map[tile] =
ctx               963 drivers/gpu/ipu-v3/ipu-image-convert.c 				transform_tile_index(ctx, row, col);
ctx               969 drivers/gpu/ipu-v3/ipu-image-convert.c static int calc_tile_offsets_planar(struct ipu_image_convert_ctx *ctx,
ctx               972 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx              1022 drivers/gpu/ipu-v3/ipu-image-convert.c 					chan->ic_task, ctx,
ctx              1034 drivers/gpu/ipu-v3/ipu-image-convert.c static int calc_tile_offsets_packed(struct ipu_image_convert_ctx *ctx,
ctx              1037 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx              1064 drivers/gpu/ipu-v3/ipu-image-convert.c 					chan->ic_task, ctx,
ctx              1076 drivers/gpu/ipu-v3/ipu-image-convert.c static int calc_tile_offsets(struct ipu_image_convert_ctx *ctx,
ctx              1080 drivers/gpu/ipu-v3/ipu-image-convert.c 		return calc_tile_offsets_planar(ctx, image);
ctx              1082 drivers/gpu/ipu-v3/ipu-image-convert.c 	return calc_tile_offsets_packed(ctx, image);
ctx              1111 drivers/gpu/ipu-v3/ipu-image-convert.c static void calc_tile_resize_coefficients(struct ipu_image_convert_ctx *ctx)
ctx              1113 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx              1119 drivers/gpu/ipu-v3/ipu-image-convert.c 	for (col = 0; col < ctx->in.num_cols; col++) {
ctx              1120 drivers/gpu/ipu-v3/ipu-image-convert.c 		bool closest = (col < ctx->in.num_cols - 1) &&
ctx              1121 drivers/gpu/ipu-v3/ipu-image-convert.c 			       !(ctx->rot_mode & IPU_ROT_BIT_HFLIP);
ctx              1127 drivers/gpu/ipu-v3/ipu-image-convert.c 		in_tile = &ctx->in.tile[tile_idx];
ctx              1128 drivers/gpu/ipu-v3/ipu-image-convert.c 		out_tile = &ctx->out.tile[ctx->out_tile_map[tile_idx]];
ctx              1130 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ipu_rot_mode_is_irt(ctx->rot_mode))
ctx              1136 drivers/gpu/ipu-v3/ipu-image-convert.c 						   ctx->downsize_coeff_h,
ctx              1158 drivers/gpu/ipu-v3/ipu-image-convert.c 			<< ctx->downsize_coeff_h, 8);
ctx              1160 drivers/gpu/ipu-v3/ipu-image-convert.c 		for (row = 0; row < ctx->in.num_rows; row++) {
ctx              1161 drivers/gpu/ipu-v3/ipu-image-convert.c 			tile_idx = row * ctx->in.num_cols + col;
ctx              1162 drivers/gpu/ipu-v3/ipu-image-convert.c 			in_tile = &ctx->in.tile[tile_idx];
ctx              1163 drivers/gpu/ipu-v3/ipu-image-convert.c 			out_tile = &ctx->out.tile[ctx->out_tile_map[tile_idx]];
ctx              1165 drivers/gpu/ipu-v3/ipu-image-convert.c 			if (ipu_rot_mode_is_irt(ctx->rot_mode))
ctx              1173 drivers/gpu/ipu-v3/ipu-image-convert.c 		ctx->resize_coeffs_h[col] = resize_coeff_h;
ctx              1176 drivers/gpu/ipu-v3/ipu-image-convert.c 	for (row = 0; row < ctx->in.num_rows; row++) {
ctx              1177 drivers/gpu/ipu-v3/ipu-image-convert.c 		bool closest = (row < ctx->in.num_rows - 1) &&
ctx              1178 drivers/gpu/ipu-v3/ipu-image-convert.c 			       !(ctx->rot_mode & IPU_ROT_BIT_VFLIP);
ctx              1183 drivers/gpu/ipu-v3/ipu-image-convert.c 		tile_idx = row * ctx->in.num_cols;
ctx              1184 drivers/gpu/ipu-v3/ipu-image-convert.c 		in_tile = &ctx->in.tile[tile_idx];
ctx              1185 drivers/gpu/ipu-v3/ipu-image-convert.c 		out_tile = &ctx->out.tile[ctx->out_tile_map[tile_idx]];
ctx              1187 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ipu_rot_mode_is_irt(ctx->rot_mode))
ctx              1193 drivers/gpu/ipu-v3/ipu-image-convert.c 						   ctx->downsize_coeff_v,
ctx              1215 drivers/gpu/ipu-v3/ipu-image-convert.c 			<< ctx->downsize_coeff_v, 2);
ctx              1217 drivers/gpu/ipu-v3/ipu-image-convert.c 		for (col = 0; col < ctx->in.num_cols; col++) {
ctx              1218 drivers/gpu/ipu-v3/ipu-image-convert.c 			tile_idx = row * ctx->in.num_cols + col;
ctx              1219 drivers/gpu/ipu-v3/ipu-image-convert.c 			in_tile = &ctx->in.tile[tile_idx];
ctx              1220 drivers/gpu/ipu-v3/ipu-image-convert.c 			out_tile = &ctx->out.tile[ctx->out_tile_map[tile_idx]];
ctx              1222 drivers/gpu/ipu-v3/ipu-image-convert.c 			if (ipu_rot_mode_is_irt(ctx->rot_mode))
ctx              1230 drivers/gpu/ipu-v3/ipu-image-convert.c 		ctx->resize_coeffs_v[row] = resize_coeff_v;
ctx              1238 drivers/gpu/ipu-v3/ipu-image-convert.c static int get_run_count(struct ipu_image_convert_ctx *ctx,
ctx              1244 drivers/gpu/ipu-v3/ipu-image-convert.c 	lockdep_assert_held(&ctx->chan->irqlock);
ctx              1247 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (run->ctx == ctx)
ctx              1256 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_ctx *ctx = run->ctx;
ctx              1257 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx              1261 drivers/gpu/ipu-v3/ipu-image-convert.c 		__func__, chan->ic_task, ctx, run);
ctx              1268 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ipu_rot_mode_is_irt(ctx->rot_mode)) {
ctx              1277 drivers/gpu/ipu-v3/ipu-image-convert.c static void init_idmac_channel(struct ipu_image_convert_ctx *ctx,
ctx              1284 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx              1292 drivers/gpu/ipu-v3/ipu-image-convert.c 		tile_idx[0] = ctx->out_tile_map[tile];
ctx              1293 drivers/gpu/ipu-v3/ipu-image-convert.c 		tile_idx[1] = ctx->out_tile_map[1];
ctx              1303 drivers/gpu/ipu-v3/ipu-image-convert.c 		addr0 = ctx->rot_intermediate[0].phys;
ctx              1304 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ctx->double_buffering)
ctx              1305 drivers/gpu/ipu-v3/ipu-image-convert.c 			addr1 = ctx->rot_intermediate[1].phys;
ctx              1312 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ctx->double_buffering)
ctx              1364 drivers/gpu/ipu-v3/ipu-image-convert.c 	ipu_idmac_set_double_buffer(channel, ctx->double_buffering);
ctx              1369 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_ctx *ctx = run->ctx;
ctx              1370 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx              1372 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_image *s_image = &ctx->in;
ctx              1373 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_image *d_image = &ctx->out;
ctx              1374 drivers/gpu/ipu-v3/ipu-image-convert.c 	unsigned int dst_tile = ctx->out_tile_map[tile];
ctx              1381 drivers/gpu/ipu-v3/ipu-image-convert.c 		__func__, chan->ic_task, ctx, run, tile, dst_tile);
ctx              1383 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ipu_rot_mode_is_irt(ctx->rot_mode)) {
ctx              1395 drivers/gpu/ipu-v3/ipu-image-convert.c 	rsc =  (ctx->downsize_coeff_v << 30) |
ctx              1396 drivers/gpu/ipu-v3/ipu-image-convert.c 	       (ctx->resize_coeffs_v[row] << 16) |
ctx              1397 drivers/gpu/ipu-v3/ipu-image-convert.c 	       (ctx->downsize_coeff_h << 14) |
ctx              1398 drivers/gpu/ipu-v3/ipu-image-convert.c 	       (ctx->resize_coeffs_h[col]);
ctx              1405 drivers/gpu/ipu-v3/ipu-image-convert.c 	ret = ipu_ic_task_init_rsc(chan->ic, &ctx->csc,
ctx              1417 drivers/gpu/ipu-v3/ipu-image-convert.c 	init_idmac_channel(ctx, chan->in_chan, s_image,
ctx              1420 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ipu_rot_mode_is_irt(ctx->rot_mode)) {
ctx              1422 drivers/gpu/ipu-v3/ipu-image-convert.c 		init_idmac_channel(ctx, chan->out_chan, d_image,
ctx              1426 drivers/gpu/ipu-v3/ipu-image-convert.c 		init_idmac_channel(ctx, chan->rotation_in_chan, d_image,
ctx              1427 drivers/gpu/ipu-v3/ipu-image-convert.c 				   ctx->rot_mode, true, tile);
ctx              1430 drivers/gpu/ipu-v3/ipu-image-convert.c 		init_idmac_channel(ctx, chan->rotation_out_chan, d_image,
ctx              1437 drivers/gpu/ipu-v3/ipu-image-convert.c 		init_idmac_channel(ctx, chan->out_chan, d_image,
ctx              1438 drivers/gpu/ipu-v3/ipu-image-convert.c 				   ctx->rot_mode, false, tile);
ctx              1447 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ipu_rot_mode_is_irt(ctx->rot_mode))
ctx              1449 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ctx->double_buffering) {
ctx              1452 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ipu_rot_mode_is_irt(ctx->rot_mode))
ctx              1459 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ipu_rot_mode_is_irt(ctx->rot_mode)) {
ctx              1468 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ipu_rot_mode_is_irt(ctx->rot_mode)) {
ctx              1481 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_ctx *ctx = run->ctx;
ctx              1482 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx              1486 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->in.base.phys0 = run->in_phys;
ctx              1487 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->out.base.phys0 = run->out_phys;
ctx              1489 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->cur_buf_num = 0;
ctx              1490 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->next_tile = 1;
ctx              1510 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (run->ctx->aborting) {
ctx              1513 drivers/gpu/ipu-v3/ipu-image-convert.c 				__func__, chan->ic_task, run->ctx, run);
ctx              1549 drivers/gpu/ipu-v3/ipu-image-convert.c 			__func__, chan->ic_task, run->ctx, run, run->status);
ctx              1553 drivers/gpu/ipu-v3/ipu-image-convert.c 		run->ctx->complete(run, run->ctx->complete_context);
ctx              1568 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_ctx *ctx;
ctx              1582 drivers/gpu/ipu-v3/ipu-image-convert.c 	list_for_each_entry(ctx, &chan->ctx_list, list) {
ctx              1583 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ctx->aborting) {
ctx              1586 drivers/gpu/ipu-v3/ipu-image-convert.c 				__func__, chan->ic_task, ctx);
ctx              1587 drivers/gpu/ipu-v3/ipu-image-convert.c 			complete_all(&ctx->aborted);
ctx              1599 drivers/gpu/ipu-v3/ipu-image-convert.c static bool ic_settings_changed(struct ipu_image_convert_ctx *ctx)
ctx              1601 drivers/gpu/ipu-v3/ipu-image-convert.c 	unsigned int cur_tile = ctx->next_tile - 1;
ctx              1602 drivers/gpu/ipu-v3/ipu-image-convert.c 	unsigned int next_tile = ctx->next_tile;
ctx              1604 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ctx->resize_coeffs_h[cur_tile % ctx->in.num_cols] !=
ctx              1605 drivers/gpu/ipu-v3/ipu-image-convert.c 	    ctx->resize_coeffs_h[next_tile % ctx->in.num_cols] ||
ctx              1606 drivers/gpu/ipu-v3/ipu-image-convert.c 	    ctx->resize_coeffs_v[cur_tile / ctx->in.num_cols] !=
ctx              1607 drivers/gpu/ipu-v3/ipu-image-convert.c 	    ctx->resize_coeffs_v[next_tile / ctx->in.num_cols] ||
ctx              1608 drivers/gpu/ipu-v3/ipu-image-convert.c 	    ctx->in.tile[cur_tile].width != ctx->in.tile[next_tile].width ||
ctx              1609 drivers/gpu/ipu-v3/ipu-image-convert.c 	    ctx->in.tile[cur_tile].height != ctx->in.tile[next_tile].height ||
ctx              1610 drivers/gpu/ipu-v3/ipu-image-convert.c 	    ctx->out.tile[cur_tile].width != ctx->out.tile[next_tile].width ||
ctx              1611 drivers/gpu/ipu-v3/ipu-image-convert.c 	    ctx->out.tile[cur_tile].height != ctx->out.tile[next_tile].height)
ctx              1620 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_ctx *ctx = run->ctx;
ctx              1621 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx              1623 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_image *s_image = &ctx->in;
ctx              1624 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_image *d_image = &ctx->out;
ctx              1630 drivers/gpu/ipu-v3/ipu-image-convert.c 	outch = ipu_rot_mode_is_irt(ctx->rot_mode) ?
ctx              1641 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ctx->aborting && !ctx->double_buffering) {
ctx              1647 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ctx->next_tile == ctx->num_tiles) {
ctx              1659 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (!ctx->double_buffering) {
ctx              1660 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ic_settings_changed(ctx)) {
ctx              1662 drivers/gpu/ipu-v3/ipu-image-convert.c 			convert_start(run, ctx->next_tile);
ctx              1664 drivers/gpu/ipu-v3/ipu-image-convert.c 			src_tile = &s_image->tile[ctx->next_tile];
ctx              1665 drivers/gpu/ipu-v3/ipu-image-convert.c 			dst_idx = ctx->out_tile_map[ctx->next_tile];
ctx              1686 drivers/gpu/ipu-v3/ipu-image-convert.c 	} else if (ctx->next_tile < ctx->num_tiles - 1) {
ctx              1688 drivers/gpu/ipu-v3/ipu-image-convert.c 		src_tile = &s_image->tile[ctx->next_tile + 1];
ctx              1689 drivers/gpu/ipu-v3/ipu-image-convert.c 		dst_idx = ctx->out_tile_map[ctx->next_tile + 1];
ctx              1692 drivers/gpu/ipu-v3/ipu-image-convert.c 		ipu_cpmem_set_buffer(chan->in_chan, ctx->cur_buf_num,
ctx              1694 drivers/gpu/ipu-v3/ipu-image-convert.c 		ipu_cpmem_set_buffer(outch, ctx->cur_buf_num,
ctx              1697 drivers/gpu/ipu-v3/ipu-image-convert.c 		ipu_idmac_select_buffer(chan->in_chan, ctx->cur_buf_num);
ctx              1698 drivers/gpu/ipu-v3/ipu-image-convert.c 		ipu_idmac_select_buffer(outch, ctx->cur_buf_num);
ctx              1700 drivers/gpu/ipu-v3/ipu-image-convert.c 		ctx->cur_buf_num ^= 1;
ctx              1703 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->next_tile++;
ctx              1715 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_ctx *ctx;
ctx              1729 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx = run->ctx;
ctx              1731 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ipu_rot_mode_is_irt(ctx->rot_mode)) {
ctx              1747 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_ctx *ctx;
ctx              1761 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx = run->ctx;
ctx              1763 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (!ipu_rot_mode_is_irt(ctx->rot_mode)) {
ctx              1780 drivers/gpu/ipu-v3/ipu-image-convert.c static void force_abort(struct ipu_image_convert_ctx *ctx)
ctx              1782 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx              1789 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (run && run->ctx == ctx) {
ctx              1890 drivers/gpu/ipu-v3/ipu-image-convert.c static int fill_image(struct ipu_image_convert_ctx *ctx,
ctx              1895 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_priv *priv = ctx->chan->priv;
ctx              2047 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_ctx *ctx;
ctx              2068 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              2069 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (!ctx)
ctx              2073 drivers/gpu/ipu-v3/ipu-image-convert.c 		chan->ic_task, ctx);
ctx              2075 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->chan = chan;
ctx              2076 drivers/gpu/ipu-v3/ipu-image-convert.c 	init_completion(&ctx->aborted);
ctx              2078 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->rot_mode = rot_mode;
ctx              2081 drivers/gpu/ipu-v3/ipu-image-convert.c 	ret = calc_image_resize_coefficients(ctx, in, out);
ctx              2085 drivers/gpu/ipu-v3/ipu-image-convert.c 	s_image = &ctx->in;
ctx              2086 drivers/gpu/ipu-v3/ipu-image-convert.c 	d_image = &ctx->out;
ctx              2097 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->num_tiles = d_image->num_cols * d_image->num_rows;
ctx              2099 drivers/gpu/ipu-v3/ipu-image-convert.c 	ret = fill_image(ctx, s_image, in, IMAGE_CONVERT_IN);
ctx              2102 drivers/gpu/ipu-v3/ipu-image-convert.c 	ret = fill_image(ctx, d_image, out, IMAGE_CONVERT_OUT);
ctx              2106 drivers/gpu/ipu-v3/ipu-image-convert.c 	calc_out_tile_map(ctx);
ctx              2108 drivers/gpu/ipu-v3/ipu-image-convert.c 	find_seams(ctx, s_image, d_image);
ctx              2110 drivers/gpu/ipu-v3/ipu-image-convert.c 	ret = calc_tile_dimensions(ctx, s_image);
ctx              2114 drivers/gpu/ipu-v3/ipu-image-convert.c 	ret = calc_tile_offsets(ctx, s_image);
ctx              2118 drivers/gpu/ipu-v3/ipu-image-convert.c 	calc_tile_dimensions(ctx, d_image);
ctx              2119 drivers/gpu/ipu-v3/ipu-image-convert.c 	ret = calc_tile_offsets(ctx, d_image);
ctx              2123 drivers/gpu/ipu-v3/ipu-image-convert.c 	calc_tile_resize_coefficients(ctx);
ctx              2125 drivers/gpu/ipu-v3/ipu-image-convert.c 	ret = ipu_ic_calc_csc(&ctx->csc,
ctx              2135 drivers/gpu/ipu-v3/ipu-image-convert.c 	dump_format(ctx, s_image);
ctx              2136 drivers/gpu/ipu-v3/ipu-image-convert.c 	dump_format(ctx, d_image);
ctx              2138 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->complete = complete;
ctx              2139 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->complete_context = complete_context;
ctx              2154 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->double_buffering = (ctx->num_tiles > 1 &&
ctx              2157 drivers/gpu/ipu-v3/ipu-image-convert.c 	for (i = 1; i < ctx->num_tiles; i++) {
ctx              2158 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ctx->in.tile[i].width != ctx->in.tile[0].width ||
ctx              2159 drivers/gpu/ipu-v3/ipu-image-convert.c 		    ctx->in.tile[i].height != ctx->in.tile[0].height ||
ctx              2160 drivers/gpu/ipu-v3/ipu-image-convert.c 		    ctx->out.tile[i].width != ctx->out.tile[0].width ||
ctx              2161 drivers/gpu/ipu-v3/ipu-image-convert.c 		    ctx->out.tile[i].height != ctx->out.tile[0].height) {
ctx              2162 drivers/gpu/ipu-v3/ipu-image-convert.c 			ctx->double_buffering = false;
ctx              2166 drivers/gpu/ipu-v3/ipu-image-convert.c 	for (i = 1; i < ctx->in.num_cols; i++) {
ctx              2167 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ctx->resize_coeffs_h[i] != ctx->resize_coeffs_h[0]) {
ctx              2168 drivers/gpu/ipu-v3/ipu-image-convert.c 			ctx->double_buffering = false;
ctx              2172 drivers/gpu/ipu-v3/ipu-image-convert.c 	for (i = 1; i < ctx->in.num_rows; i++) {
ctx              2173 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ctx->resize_coeffs_v[i] != ctx->resize_coeffs_v[0]) {
ctx              2174 drivers/gpu/ipu-v3/ipu-image-convert.c 			ctx->double_buffering = false;
ctx              2179 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ipu_rot_mode_is_irt(ctx->rot_mode)) {
ctx              2182 drivers/gpu/ipu-v3/ipu-image-convert.c 		for (i = 1; i < ctx->num_tiles; i++) {
ctx              2187 drivers/gpu/ipu-v3/ipu-image-convert.c 		ret = alloc_dma_buf(priv, &ctx->rot_intermediate[0],
ctx              2191 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (ctx->double_buffering) {
ctx              2193 drivers/gpu/ipu-v3/ipu-image-convert.c 					    &ctx->rot_intermediate[1],
ctx              2204 drivers/gpu/ipu-v3/ipu-image-convert.c 	list_add_tail(&ctx->list, &chan->ctx_list);
ctx              2214 drivers/gpu/ipu-v3/ipu-image-convert.c 	return ctx;
ctx              2217 drivers/gpu/ipu-v3/ipu-image-convert.c 	free_dma_buf(priv, &ctx->rot_intermediate[1]);
ctx              2219 drivers/gpu/ipu-v3/ipu-image-convert.c 	list_del(&ctx->list);
ctx              2222 drivers/gpu/ipu-v3/ipu-image-convert.c 	free_dma_buf(priv, &ctx->rot_intermediate[0]);
ctx              2224 drivers/gpu/ipu-v3/ipu-image-convert.c 	kfree(ctx);
ctx              2238 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_ctx *ctx;
ctx              2242 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (!run || !run->ctx || !run->in_phys || !run->out_phys)
ctx              2245 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx = run->ctx;
ctx              2246 drivers/gpu/ipu-v3/ipu-image-convert.c 	chan = ctx->chan;
ctx              2250 drivers/gpu/ipu-v3/ipu-image-convert.c 		chan->ic_task, ctx, run);
ctx              2256 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (ctx->aborting) {
ctx              2275 drivers/gpu/ipu-v3/ipu-image-convert.c static void __ipu_image_convert_abort(struct ipu_image_convert_ctx *ctx)
ctx              2277 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx              2287 drivers/gpu/ipu-v3/ipu-image-convert.c 		if (run->ctx != ctx)
ctx              2293 drivers/gpu/ipu-v3/ipu-image-convert.c 	run_count = get_run_count(ctx, &chan->done_q);
ctx              2294 drivers/gpu/ipu-v3/ipu-image-convert.c 	active_run = (chan->current_run && chan->current_run->ctx == ctx) ?
ctx              2298 drivers/gpu/ipu-v3/ipu-image-convert.c 		reinit_completion(&ctx->aborted);
ctx              2300 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->aborting = true;
ctx              2307 drivers/gpu/ipu-v3/ipu-image-convert.c 			__func__, chan->ic_task, ctx);
ctx              2320 drivers/gpu/ipu-v3/ipu-image-convert.c 	ret = wait_for_completion_timeout(&ctx->aborted,
ctx              2324 drivers/gpu/ipu-v3/ipu-image-convert.c 		force_abort(ctx);
ctx              2328 drivers/gpu/ipu-v3/ipu-image-convert.c void ipu_image_convert_abort(struct ipu_image_convert_ctx *ctx)
ctx              2330 drivers/gpu/ipu-v3/ipu-image-convert.c 	__ipu_image_convert_abort(ctx);
ctx              2331 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx->aborting = false;
ctx              2336 drivers/gpu/ipu-v3/ipu-image-convert.c void ipu_image_convert_unprepare(struct ipu_image_convert_ctx *ctx)
ctx              2338 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_chan *chan = ctx->chan;
ctx              2344 drivers/gpu/ipu-v3/ipu-image-convert.c 	__ipu_image_convert_abort(ctx);
ctx              2347 drivers/gpu/ipu-v3/ipu-image-convert.c 		chan->ic_task, ctx);
ctx              2351 drivers/gpu/ipu-v3/ipu-image-convert.c 	list_del(&ctx->list);
ctx              2360 drivers/gpu/ipu-v3/ipu-image-convert.c 	free_dma_buf(priv, &ctx->rot_intermediate[1]);
ctx              2361 drivers/gpu/ipu-v3/ipu-image-convert.c 	free_dma_buf(priv, &ctx->rot_intermediate[0]);
ctx              2363 drivers/gpu/ipu-v3/ipu-image-convert.c 	kfree(ctx);
ctx              2379 drivers/gpu/ipu-v3/ipu-image-convert.c 	struct ipu_image_convert_ctx *ctx;
ctx              2383 drivers/gpu/ipu-v3/ipu-image-convert.c 	ctx = ipu_image_convert_prepare(ipu, ic_task, in, out, rot_mode,
ctx              2385 drivers/gpu/ipu-v3/ipu-image-convert.c 	if (IS_ERR(ctx))
ctx              2386 drivers/gpu/ipu-v3/ipu-image-convert.c 		return ERR_CAST(ctx);
ctx              2390 drivers/gpu/ipu-v3/ipu-image-convert.c 		ipu_image_convert_unprepare(ctx);
ctx              2394 drivers/gpu/ipu-v3/ipu-image-convert.c 	run->ctx = ctx;
ctx              2400 drivers/gpu/ipu-v3/ipu-image-convert.c 		ipu_image_convert_unprepare(ctx);
ctx              2436 drivers/gpu/ipu-v3/ipu-image-convert.c 	ipu_image_convert_unprepare(run->ctx);
ctx                98 drivers/hid/hid-u2fzero.c 	struct u2fzero_transfer_context *ctx = urb->context;
ctx               100 drivers/hid/hid-u2fzero.c 	ctx->status = urb->status;
ctx               101 drivers/hid/hid-u2fzero.c 	complete(&ctx->done);
ctx               110 drivers/hid/hid-u2fzero.c 	struct u2fzero_transfer_context ctx;
ctx               116 drivers/hid/hid-u2fzero.c 	dev->urb->context = &ctx;
ctx               117 drivers/hid/hid-u2fzero.c 	init_completion(&ctx.done);
ctx               134 drivers/hid/hid-u2fzero.c 		&ctx.done, msecs_to_jiffies(USB_CTRL_SET_TIMEOUT)));
ctx              1039 drivers/hv/vmbus_drv.c 	struct onmessage_work_context *ctx;
ctx              1045 drivers/hv/vmbus_drv.c 	ctx = container_of(work, struct onmessage_work_context,
ctx              1047 drivers/hv/vmbus_drv.c 	vmbus_onmessage(&ctx->msg);
ctx              1048 drivers/hv/vmbus_drv.c 	kfree(ctx);
ctx              1059 drivers/hv/vmbus_drv.c 	struct onmessage_work_context *ctx;
ctx              1077 drivers/hv/vmbus_drv.c 		ctx = kmalloc(sizeof(*ctx), GFP_ATOMIC);
ctx              1078 drivers/hv/vmbus_drv.c 		if (ctx == NULL)
ctx              1081 drivers/hv/vmbus_drv.c 		INIT_WORK(&ctx->work, vmbus_onmessage_work);
ctx              1082 drivers/hv/vmbus_drv.c 		memcpy(&ctx->msg, msg, sizeof(*msg));
ctx              1097 drivers/hv/vmbus_drv.c 					 &ctx->work);
ctx              1104 drivers/hv/vmbus_drv.c 				      &ctx->work);
ctx              1108 drivers/hv/vmbus_drv.c 			queue_work(vmbus_connection.work_queue, &ctx->work);
ctx              1124 drivers/hv/vmbus_drv.c 	struct onmessage_work_context *ctx;
ctx              1133 drivers/hv/vmbus_drv.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL | __GFP_NOFAIL);
ctx              1139 drivers/hv/vmbus_drv.c 	ctx->msg.header.message_type = 1;
ctx              1140 drivers/hv/vmbus_drv.c 	ctx->msg.header.payload_size = sizeof(*rescind);
ctx              1143 drivers/hv/vmbus_drv.c 	rescind = (struct vmbus_channel_rescind_offer *)ctx->msg.u.payload;
ctx              1147 drivers/hv/vmbus_drv.c 	INIT_WORK(&ctx->work, vmbus_onmessage_work);
ctx              1151 drivers/hv/vmbus_drv.c 		      &ctx->work);
ctx              1896 drivers/hv/vmbus_drv.c static acpi_status vmbus_walk_resources(struct acpi_resource *res, void *ctx)
ctx               118 drivers/hwmon/occ/p8_i2c.c 	struct p8_i2c_occ *ctx = to_p8_i2c_occ(occ);
ctx               119 drivers/hwmon/occ/p8_i2c.c 	struct i2c_client *client = ctx->client;
ctx               210 drivers/hwmon/occ/p8_i2c.c 	struct p8_i2c_occ *ctx = devm_kzalloc(&client->dev, sizeof(*ctx),
ctx               212 drivers/hwmon/occ/p8_i2c.c 	if (!ctx)
ctx               215 drivers/hwmon/occ/p8_i2c.c 	ctx->client = client;
ctx               216 drivers/hwmon/occ/p8_i2c.c 	occ = &ctx->occ;
ctx                22 drivers/hwmon/occ/p9_sbe.c 	struct p9_sbe_occ *ctx = to_p9_sbe_occ(occ);
ctx                26 drivers/hwmon/occ/p9_sbe.c 	rc = fsi_occ_submit(ctx->sbe, cmd, 8, resp, &resp_len);
ctx                63 drivers/hwmon/occ/p9_sbe.c 	struct p9_sbe_occ *ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx),
ctx                65 drivers/hwmon/occ/p9_sbe.c 	if (!ctx)
ctx                68 drivers/hwmon/occ/p9_sbe.c 	ctx->sbe = pdev->dev.parent;
ctx                69 drivers/hwmon/occ/p9_sbe.c 	occ = &ctx->occ;
ctx                87 drivers/hwmon/occ/p9_sbe.c 	struct p9_sbe_occ *ctx = to_p9_sbe_occ(occ);
ctx                89 drivers/hwmon/occ/p9_sbe.c 	ctx->sbe = NULL;
ctx                47 drivers/hwmon/pwm-fan.c 	struct pwm_fan_ctx *ctx = dev_id;
ctx                49 drivers/hwmon/pwm-fan.c 	atomic_inc(&ctx->pulses);
ctx                56 drivers/hwmon/pwm-fan.c 	struct pwm_fan_ctx *ctx = from_timer(ctx, t, rpm_timer);
ctx                60 drivers/hwmon/pwm-fan.c 	pulses = atomic_read(&ctx->pulses);
ctx                61 drivers/hwmon/pwm-fan.c 	atomic_sub(pulses, &ctx->pulses);
ctx                62 drivers/hwmon/pwm-fan.c 	tmp = (u64)pulses * ktime_ms_delta(ktime_get(), ctx->sample_start) * 60;
ctx                63 drivers/hwmon/pwm-fan.c 	do_div(tmp, ctx->pulses_per_revolution * 1000);
ctx                64 drivers/hwmon/pwm-fan.c 	ctx->rpm = tmp;
ctx                66 drivers/hwmon/pwm-fan.c 	ctx->sample_start = ktime_get();
ctx                67 drivers/hwmon/pwm-fan.c 	mod_timer(&ctx->rpm_timer, jiffies + HZ);
ctx                70 drivers/hwmon/pwm-fan.c static int  __set_pwm(struct pwm_fan_ctx *ctx, unsigned long pwm)
ctx                76 drivers/hwmon/pwm-fan.c 	mutex_lock(&ctx->lock);
ctx                77 drivers/hwmon/pwm-fan.c 	if (ctx->pwm_value == pwm)
ctx                80 drivers/hwmon/pwm-fan.c 	pwm_init_state(ctx->pwm, &state);
ctx                81 drivers/hwmon/pwm-fan.c 	period = ctx->pwm->args.period;
ctx                85 drivers/hwmon/pwm-fan.c 	ret = pwm_apply_state(ctx->pwm, &state);
ctx                87 drivers/hwmon/pwm-fan.c 		ctx->pwm_value = pwm;
ctx                89 drivers/hwmon/pwm-fan.c 	mutex_unlock(&ctx->lock);
ctx                93 drivers/hwmon/pwm-fan.c static void pwm_fan_update_state(struct pwm_fan_ctx *ctx, unsigned long pwm)
ctx                97 drivers/hwmon/pwm-fan.c 	for (i = 0; i < ctx->pwm_fan_max_state; ++i)
ctx                98 drivers/hwmon/pwm-fan.c 		if (pwm < ctx->pwm_fan_cooling_levels[i + 1])
ctx               101 drivers/hwmon/pwm-fan.c 	ctx->pwm_fan_state = i;
ctx               107 drivers/hwmon/pwm-fan.c 	struct pwm_fan_ctx *ctx = dev_get_drvdata(dev);
ctx               114 drivers/hwmon/pwm-fan.c 	ret = __set_pwm(ctx, pwm);
ctx               118 drivers/hwmon/pwm-fan.c 	pwm_fan_update_state(ctx, pwm);
ctx               125 drivers/hwmon/pwm-fan.c 	struct pwm_fan_ctx *ctx = dev_get_drvdata(dev);
ctx               127 drivers/hwmon/pwm-fan.c 	return sprintf(buf, "%u\n", ctx->pwm_value);
ctx               133 drivers/hwmon/pwm-fan.c 	struct pwm_fan_ctx *ctx = dev_get_drvdata(dev);
ctx               135 drivers/hwmon/pwm-fan.c 	return sprintf(buf, "%u\n", ctx->rpm);
ctx               151 drivers/hwmon/pwm-fan.c 	struct pwm_fan_ctx *ctx = dev_get_drvdata(dev);
ctx               154 drivers/hwmon/pwm-fan.c 	if (n == 1 && ctx->irq <= 0)
ctx               174 drivers/hwmon/pwm-fan.c 	struct pwm_fan_ctx *ctx = cdev->devdata;
ctx               176 drivers/hwmon/pwm-fan.c 	if (!ctx)
ctx               179 drivers/hwmon/pwm-fan.c 	*state = ctx->pwm_fan_max_state;
ctx               187 drivers/hwmon/pwm-fan.c 	struct pwm_fan_ctx *ctx = cdev->devdata;
ctx               189 drivers/hwmon/pwm-fan.c 	if (!ctx)
ctx               192 drivers/hwmon/pwm-fan.c 	*state = ctx->pwm_fan_state;
ctx               200 drivers/hwmon/pwm-fan.c 	struct pwm_fan_ctx *ctx = cdev->devdata;
ctx               203 drivers/hwmon/pwm-fan.c 	if (!ctx || (state > ctx->pwm_fan_max_state))
ctx               206 drivers/hwmon/pwm-fan.c 	if (state == ctx->pwm_fan_state)
ctx               209 drivers/hwmon/pwm-fan.c 	ret = __set_pwm(ctx, ctx->pwm_fan_cooling_levels[state]);
ctx               215 drivers/hwmon/pwm-fan.c 	ctx->pwm_fan_state = state;
ctx               227 drivers/hwmon/pwm-fan.c 				       struct pwm_fan_ctx *ctx)
ctx               242 drivers/hwmon/pwm-fan.c 	ctx->pwm_fan_cooling_levels = devm_kcalloc(dev, num, sizeof(u32),
ctx               244 drivers/hwmon/pwm-fan.c 	if (!ctx->pwm_fan_cooling_levels)
ctx               248 drivers/hwmon/pwm-fan.c 					 ctx->pwm_fan_cooling_levels, num);
ctx               255 drivers/hwmon/pwm-fan.c 		if (ctx->pwm_fan_cooling_levels[i] > MAX_PWM) {
ctx               257 drivers/hwmon/pwm-fan.c 				ctx->pwm_fan_cooling_levels[i], MAX_PWM);
ctx               262 drivers/hwmon/pwm-fan.c 	ctx->pwm_fan_max_state = num - 1;
ctx               274 drivers/hwmon/pwm-fan.c 	struct pwm_fan_ctx *ctx = __ctx;
ctx               275 drivers/hwmon/pwm-fan.c 	pwm_disable(ctx->pwm);
ctx               276 drivers/hwmon/pwm-fan.c 	del_timer_sync(&ctx->rpm_timer);
ctx               283 drivers/hwmon/pwm-fan.c 	struct pwm_fan_ctx *ctx;
ctx               289 drivers/hwmon/pwm-fan.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               290 drivers/hwmon/pwm-fan.c 	if (!ctx)
ctx               293 drivers/hwmon/pwm-fan.c 	mutex_init(&ctx->lock);
ctx               295 drivers/hwmon/pwm-fan.c 	ctx->pwm = devm_of_pwm_get(dev, dev->of_node, NULL);
ctx               296 drivers/hwmon/pwm-fan.c 	if (IS_ERR(ctx->pwm)) {
ctx               297 drivers/hwmon/pwm-fan.c 		ret = PTR_ERR(ctx->pwm);
ctx               305 drivers/hwmon/pwm-fan.c 	platform_set_drvdata(pdev, ctx);
ctx               307 drivers/hwmon/pwm-fan.c 	ctx->irq = platform_get_irq_optional(pdev, 0);
ctx               308 drivers/hwmon/pwm-fan.c 	if (ctx->irq == -EPROBE_DEFER)
ctx               309 drivers/hwmon/pwm-fan.c 		return ctx->irq;
ctx               311 drivers/hwmon/pwm-fan.c 	ctx->reg_en = devm_regulator_get_optional(dev, "fan");
ctx               312 drivers/hwmon/pwm-fan.c 	if (IS_ERR(ctx->reg_en)) {
ctx               313 drivers/hwmon/pwm-fan.c 		if (PTR_ERR(ctx->reg_en) != -ENODEV)
ctx               314 drivers/hwmon/pwm-fan.c 			return PTR_ERR(ctx->reg_en);
ctx               316 drivers/hwmon/pwm-fan.c 		ctx->reg_en = NULL;
ctx               318 drivers/hwmon/pwm-fan.c 		ret = regulator_enable(ctx->reg_en);
ctx               324 drivers/hwmon/pwm-fan.c 					       ctx->reg_en);
ctx               329 drivers/hwmon/pwm-fan.c 	ctx->pwm_value = MAX_PWM;
ctx               332 drivers/hwmon/pwm-fan.c 	pwm_init_state(ctx->pwm, &state);
ctx               333 drivers/hwmon/pwm-fan.c 	state.duty_cycle = ctx->pwm->args.period - 1;
ctx               336 drivers/hwmon/pwm-fan.c 	ret = pwm_apply_state(ctx->pwm, &state);
ctx               341 drivers/hwmon/pwm-fan.c 	timer_setup(&ctx->rpm_timer, sample_timer, 0);
ctx               342 drivers/hwmon/pwm-fan.c 	ret = devm_add_action_or_reset(dev, pwm_fan_pwm_disable, ctx);
ctx               347 drivers/hwmon/pwm-fan.c 	ctx->pulses_per_revolution = ppr;
ctx               348 drivers/hwmon/pwm-fan.c 	if (!ctx->pulses_per_revolution) {
ctx               353 drivers/hwmon/pwm-fan.c 	if (ctx->irq > 0) {
ctx               354 drivers/hwmon/pwm-fan.c 		ret = devm_request_irq(dev, ctx->irq, pulse_handler, 0,
ctx               355 drivers/hwmon/pwm-fan.c 				       pdev->name, ctx);
ctx               360 drivers/hwmon/pwm-fan.c 		ctx->sample_start = ktime_get();
ctx               361 drivers/hwmon/pwm-fan.c 		mod_timer(&ctx->rpm_timer, jiffies + HZ);
ctx               365 drivers/hwmon/pwm-fan.c 						       ctx, pwm_fan_groups);
ctx               371 drivers/hwmon/pwm-fan.c 	ret = pwm_fan_of_get_cooling_data(dev, ctx);
ctx               375 drivers/hwmon/pwm-fan.c 	ctx->pwm_fan_state = ctx->pwm_fan_max_state;
ctx               378 drivers/hwmon/pwm-fan.c 			dev->of_node, "pwm-fan", ctx, &pwm_fan_cooling_ops);
ctx               386 drivers/hwmon/pwm-fan.c 		ctx->cdev = cdev;
ctx               396 drivers/hwmon/pwm-fan.c 	struct pwm_fan_ctx *ctx = dev_get_drvdata(dev);
ctx               400 drivers/hwmon/pwm-fan.c 	pwm_get_args(ctx->pwm, &args);
ctx               402 drivers/hwmon/pwm-fan.c 	if (ctx->pwm_value) {
ctx               403 drivers/hwmon/pwm-fan.c 		ret = pwm_config(ctx->pwm, 0, args.period);
ctx               407 drivers/hwmon/pwm-fan.c 		pwm_disable(ctx->pwm);
ctx               410 drivers/hwmon/pwm-fan.c 	if (ctx->reg_en) {
ctx               411 drivers/hwmon/pwm-fan.c 		ret = regulator_disable(ctx->reg_en);
ctx               423 drivers/hwmon/pwm-fan.c 	struct pwm_fan_ctx *ctx = dev_get_drvdata(dev);
ctx               428 drivers/hwmon/pwm-fan.c 	if (ctx->reg_en) {
ctx               429 drivers/hwmon/pwm-fan.c 		ret = regulator_enable(ctx->reg_en);
ctx               436 drivers/hwmon/pwm-fan.c 	if (ctx->pwm_value == 0)
ctx               439 drivers/hwmon/pwm-fan.c 	pwm_get_args(ctx->pwm, &pargs);
ctx               440 drivers/hwmon/pwm-fan.c 	duty = DIV_ROUND_UP(ctx->pwm_value * (pargs.period - 1), MAX_PWM);
ctx               441 drivers/hwmon/pwm-fan.c 	ret = pwm_config(ctx->pwm, duty, pargs.period);
ctx               444 drivers/hwmon/pwm-fan.c 	return pwm_enable(ctx->pwm);
ctx               131 drivers/hwmon/xgene-hwmon.c static int xgene_hwmon_pcc_rd(struct xgene_hwmon_dev *ctx, u32 *msg)
ctx               133 drivers/hwmon/xgene-hwmon.c 	struct acpi_pcct_shared_memory *generic_comm_base = ctx->pcc_comm_addr;
ctx               138 drivers/hwmon/xgene-hwmon.c 	mutex_lock(&ctx->rd_mutex);
ctx               139 drivers/hwmon/xgene-hwmon.c 	init_completion(&ctx->rd_complete);
ctx               140 drivers/hwmon/xgene-hwmon.c 	ctx->resp_pending = true;
ctx               144 drivers/hwmon/xgene-hwmon.c 		   cpu_to_le32(PCC_SIGNATURE_MASK | ctx->mbox_idx));
ctx               160 drivers/hwmon/xgene-hwmon.c 	rc = mbox_send_message(ctx->mbox_chan, msg);
ctx               162 drivers/hwmon/xgene-hwmon.c 		dev_err(ctx->dev, "Mailbox send error %d\n", rc);
ctx               165 drivers/hwmon/xgene-hwmon.c 	if (!wait_for_completion_timeout(&ctx->rd_complete,
ctx               166 drivers/hwmon/xgene-hwmon.c 					 usecs_to_jiffies(ctx->usecs_lat))) {
ctx               167 drivers/hwmon/xgene-hwmon.c 		dev_err(ctx->dev, "Mailbox operation timed out\n");
ctx               173 drivers/hwmon/xgene-hwmon.c 	if (MSG_TYPE(ctx->sync_msg.msg) == MSG_TYPE_ERR) {
ctx               178 drivers/hwmon/xgene-hwmon.c 	msg[0] = ctx->sync_msg.msg;
ctx               179 drivers/hwmon/xgene-hwmon.c 	msg[1] = ctx->sync_msg.param1;
ctx               180 drivers/hwmon/xgene-hwmon.c 	msg[2] = ctx->sync_msg.param2;
ctx               183 drivers/hwmon/xgene-hwmon.c 	mbox_chan_txdone(ctx->mbox_chan, 0);
ctx               184 drivers/hwmon/xgene-hwmon.c 	ctx->resp_pending = false;
ctx               185 drivers/hwmon/xgene-hwmon.c 	mutex_unlock(&ctx->rd_mutex);
ctx               189 drivers/hwmon/xgene-hwmon.c static int xgene_hwmon_rd(struct xgene_hwmon_dev *ctx, u32 *msg)
ctx               193 drivers/hwmon/xgene-hwmon.c 	mutex_lock(&ctx->rd_mutex);
ctx               194 drivers/hwmon/xgene-hwmon.c 	init_completion(&ctx->rd_complete);
ctx               195 drivers/hwmon/xgene-hwmon.c 	ctx->resp_pending = true;
ctx               197 drivers/hwmon/xgene-hwmon.c 	rc = mbox_send_message(ctx->mbox_chan, msg);
ctx               199 drivers/hwmon/xgene-hwmon.c 		dev_err(ctx->dev, "Mailbox send error %d\n", rc);
ctx               203 drivers/hwmon/xgene-hwmon.c 	if (!wait_for_completion_timeout(&ctx->rd_complete,
ctx               205 drivers/hwmon/xgene-hwmon.c 		dev_err(ctx->dev, "Mailbox operation timed out\n");
ctx               211 drivers/hwmon/xgene-hwmon.c 	if (MSG_TYPE(ctx->sync_msg.msg) == MSG_TYPE_ERR) {
ctx               216 drivers/hwmon/xgene-hwmon.c 	msg[0] = ctx->sync_msg.msg;
ctx               217 drivers/hwmon/xgene-hwmon.c 	msg[1] = ctx->sync_msg.param1;
ctx               218 drivers/hwmon/xgene-hwmon.c 	msg[2] = ctx->sync_msg.param2;
ctx               221 drivers/hwmon/xgene-hwmon.c 	ctx->resp_pending = false;
ctx               222 drivers/hwmon/xgene-hwmon.c 	mutex_unlock(&ctx->rd_mutex);
ctx               226 drivers/hwmon/xgene-hwmon.c static int xgene_hwmon_reg_map_rd(struct xgene_hwmon_dev *ctx, u32 addr,
ctx               237 drivers/hwmon/xgene-hwmon.c 		rc = xgene_hwmon_rd(ctx, msg);
ctx               239 drivers/hwmon/xgene-hwmon.c 		rc = xgene_hwmon_pcc_rd(ctx, msg);
ctx               255 drivers/hwmon/xgene-hwmon.c static int xgene_hwmon_get_notification_msg(struct xgene_hwmon_dev *ctx,
ctx               265 drivers/hwmon/xgene-hwmon.c 	rc = xgene_hwmon_pcc_rd(ctx, msg);
ctx               276 drivers/hwmon/xgene-hwmon.c static int xgene_hwmon_get_cpu_pwr(struct xgene_hwmon_dev *ctx, u32 *val)
ctx               281 drivers/hwmon/xgene-hwmon.c 	rc = xgene_hwmon_reg_map_rd(ctx, PMD_PWR_REG, &watt);
ctx               285 drivers/hwmon/xgene-hwmon.c 	rc = xgene_hwmon_reg_map_rd(ctx, PMD_PWR_MW_REG, &mwatt);
ctx               293 drivers/hwmon/xgene-hwmon.c static int xgene_hwmon_get_io_pwr(struct xgene_hwmon_dev *ctx, u32 *val)
ctx               298 drivers/hwmon/xgene-hwmon.c 	rc = xgene_hwmon_reg_map_rd(ctx, SOC_PWR_REG, &watt);
ctx               302 drivers/hwmon/xgene-hwmon.c 	rc = xgene_hwmon_reg_map_rd(ctx, SOC_PWR_MW_REG, &mwatt);
ctx               310 drivers/hwmon/xgene-hwmon.c static int xgene_hwmon_get_temp(struct xgene_hwmon_dev *ctx, u32 *val)
ctx               312 drivers/hwmon/xgene-hwmon.c 	return xgene_hwmon_reg_map_rd(ctx, SOC_TEMP_REG, val);
ctx               322 drivers/hwmon/xgene-hwmon.c 	struct xgene_hwmon_dev *ctx = dev_get_drvdata(dev);
ctx               326 drivers/hwmon/xgene-hwmon.c 	rc = xgene_hwmon_get_temp(ctx, &val);
ctx               346 drivers/hwmon/xgene-hwmon.c 	struct xgene_hwmon_dev *ctx = dev_get_drvdata(dev);
ctx               348 drivers/hwmon/xgene-hwmon.c 	return snprintf(buf, PAGE_SIZE, "%d\n", ctx->temp_critical_alarm);
ctx               369 drivers/hwmon/xgene-hwmon.c 	struct xgene_hwmon_dev *ctx = dev_get_drvdata(dev);
ctx               373 drivers/hwmon/xgene-hwmon.c 	rc = xgene_hwmon_get_cpu_pwr(ctx, &val);
ctx               384 drivers/hwmon/xgene-hwmon.c 	struct xgene_hwmon_dev *ctx = dev_get_drvdata(dev);
ctx               388 drivers/hwmon/xgene-hwmon.c 	rc = xgene_hwmon_get_io_pwr(ctx, &val);
ctx               416 drivers/hwmon/xgene-hwmon.c static int xgene_hwmon_tpc_alarm(struct xgene_hwmon_dev *ctx,
ctx               419 drivers/hwmon/xgene-hwmon.c 	ctx->temp_critical_alarm = !!amsg->param2;
ctx               420 drivers/hwmon/xgene-hwmon.c 	sysfs_notify(&ctx->dev->kobj, NULL, "temp1_critical_alarm");
ctx               425 drivers/hwmon/xgene-hwmon.c static void xgene_hwmon_process_pwrmsg(struct xgene_hwmon_dev *ctx,
ctx               430 drivers/hwmon/xgene-hwmon.c 		xgene_hwmon_tpc_alarm(ctx, amsg);
ctx               439 drivers/hwmon/xgene-hwmon.c 	struct xgene_hwmon_dev *ctx;
ctx               442 drivers/hwmon/xgene-hwmon.c 	ctx = container_of(work, struct xgene_hwmon_dev, workq);
ctx               443 drivers/hwmon/xgene-hwmon.c 	while (kfifo_out_spinlocked(&ctx->async_msg_fifo, &amsg,
ctx               445 drivers/hwmon/xgene-hwmon.c 				    &ctx->kfifo_lock)) {
ctx               451 drivers/hwmon/xgene-hwmon.c 			ret = xgene_hwmon_get_notification_msg(ctx,
ctx               458 drivers/hwmon/xgene-hwmon.c 			xgene_hwmon_process_pwrmsg(ctx, &amsg);
ctx               462 drivers/hwmon/xgene-hwmon.c static int xgene_hwmon_rx_ready(struct xgene_hwmon_dev *ctx, void *msg)
ctx               464 drivers/hwmon/xgene-hwmon.c 	if (IS_ERR_OR_NULL(ctx->hwmon_dev) && !ctx->resp_pending) {
ctx               466 drivers/hwmon/xgene-hwmon.c 		kfifo_in_spinlocked(&ctx->async_msg_fifo, msg,
ctx               468 drivers/hwmon/xgene-hwmon.c 				    &ctx->kfifo_lock);
ctx               480 drivers/hwmon/xgene-hwmon.c 	struct xgene_hwmon_dev *ctx = to_xgene_hwmon_dev(cl);
ctx               488 drivers/hwmon/xgene-hwmon.c 	if (xgene_hwmon_rx_ready(ctx, msg) < 0)
ctx               501 drivers/hwmon/xgene-hwmon.c 	if (ctx->resp_pending &&
ctx               508 drivers/hwmon/xgene-hwmon.c 		ctx->sync_msg.msg = ((u32 *)msg)[0];
ctx               509 drivers/hwmon/xgene-hwmon.c 		ctx->sync_msg.param1 = ((u32 *)msg)[1];
ctx               510 drivers/hwmon/xgene-hwmon.c 		ctx->sync_msg.param2 = ((u32 *)msg)[2];
ctx               513 drivers/hwmon/xgene-hwmon.c 		complete(&ctx->rd_complete);
ctx               519 drivers/hwmon/xgene-hwmon.c 	kfifo_in_spinlocked(&ctx->async_msg_fifo, msg,
ctx               520 drivers/hwmon/xgene-hwmon.c 			    sizeof(struct slimpro_resp_msg), &ctx->kfifo_lock);
ctx               522 drivers/hwmon/xgene-hwmon.c 	schedule_work(&ctx->workq);
ctx               530 drivers/hwmon/xgene-hwmon.c 	struct xgene_hwmon_dev *ctx = to_xgene_hwmon_dev(cl);
ctx               531 drivers/hwmon/xgene-hwmon.c 	struct acpi_pcct_shared_memory *generic_comm_base = ctx->pcc_comm_addr;
ctx               540 drivers/hwmon/xgene-hwmon.c 	if (xgene_hwmon_rx_ready(ctx, &amsg) < 0)
ctx               559 drivers/hwmon/xgene-hwmon.c 	if (ctx->resp_pending &&
ctx               569 drivers/hwmon/xgene-hwmon.c 			ctx->sync_msg.msg = ((u32 *)msg)[0];
ctx               570 drivers/hwmon/xgene-hwmon.c 			ctx->sync_msg.param1 = ((u32 *)msg)[1];
ctx               571 drivers/hwmon/xgene-hwmon.c 			ctx->sync_msg.param2 = ((u32 *)msg)[2];
ctx               574 drivers/hwmon/xgene-hwmon.c 			complete(&ctx->rd_complete);
ctx               588 drivers/hwmon/xgene-hwmon.c 	kfifo_in_spinlocked(&ctx->async_msg_fifo, &amsg,
ctx               589 drivers/hwmon/xgene-hwmon.c 			    sizeof(struct slimpro_resp_msg), &ctx->kfifo_lock);
ctx               591 drivers/hwmon/xgene-hwmon.c 	schedule_work(&ctx->workq);
ctx               616 drivers/hwmon/xgene-hwmon.c 	struct xgene_hwmon_dev *ctx;
ctx               620 drivers/hwmon/xgene-hwmon.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               621 drivers/hwmon/xgene-hwmon.c 	if (!ctx)
ctx               624 drivers/hwmon/xgene-hwmon.c 	ctx->dev = &pdev->dev;
ctx               625 drivers/hwmon/xgene-hwmon.c 	platform_set_drvdata(pdev, ctx);
ctx               626 drivers/hwmon/xgene-hwmon.c 	cl = &ctx->mbox_client;
ctx               628 drivers/hwmon/xgene-hwmon.c 	spin_lock_init(&ctx->kfifo_lock);
ctx               629 drivers/hwmon/xgene-hwmon.c 	mutex_init(&ctx->rd_mutex);
ctx               631 drivers/hwmon/xgene-hwmon.c 	rc = kfifo_alloc(&ctx->async_msg_fifo,
ctx               637 drivers/hwmon/xgene-hwmon.c 	INIT_WORK(&ctx->workq, xgene_hwmon_evt_work);
ctx               647 drivers/hwmon/xgene-hwmon.c 		ctx->mbox_chan = mbox_request_channel(cl, 0);
ctx               648 drivers/hwmon/xgene-hwmon.c 		if (IS_ERR(ctx->mbox_chan)) {
ctx               667 drivers/hwmon/xgene-hwmon.c 					     &ctx->mbox_idx)) {
ctx               674 drivers/hwmon/xgene-hwmon.c 		ctx->mbox_chan = pcc_mbox_request_channel(cl, ctx->mbox_idx);
ctx               675 drivers/hwmon/xgene-hwmon.c 		if (IS_ERR(ctx->mbox_chan)) {
ctx               688 drivers/hwmon/xgene-hwmon.c 		cppc_ss = ctx->mbox_chan->con_priv;
ctx               695 drivers/hwmon/xgene-hwmon.c 		if (!ctx->mbox_chan->mbox->txdone_irq) {
ctx               705 drivers/hwmon/xgene-hwmon.c 		ctx->comm_base_addr = cppc_ss->base_address;
ctx               706 drivers/hwmon/xgene-hwmon.c 		if (ctx->comm_base_addr) {
ctx               708 drivers/hwmon/xgene-hwmon.c 				ctx->pcc_comm_addr = (void __force *)ioremap(
ctx               709 drivers/hwmon/xgene-hwmon.c 							ctx->comm_base_addr,
ctx               712 drivers/hwmon/xgene-hwmon.c 				ctx->pcc_comm_addr = memremap(
ctx               713 drivers/hwmon/xgene-hwmon.c 							ctx->comm_base_addr,
ctx               722 drivers/hwmon/xgene-hwmon.c 		if (!ctx->pcc_comm_addr) {
ctx               734 drivers/hwmon/xgene-hwmon.c 		ctx->usecs_lat = PCC_NUM_RETRIES * cppc_ss->latency;
ctx               737 drivers/hwmon/xgene-hwmon.c 	ctx->hwmon_dev = hwmon_device_register_with_groups(ctx->dev,
ctx               739 drivers/hwmon/xgene-hwmon.c 							   ctx,
ctx               741 drivers/hwmon/xgene-hwmon.c 	if (IS_ERR(ctx->hwmon_dev)) {
ctx               743 drivers/hwmon/xgene-hwmon.c 		rc = PTR_ERR(ctx->hwmon_dev);
ctx               750 drivers/hwmon/xgene-hwmon.c 	schedule_work(&ctx->workq);
ctx               758 drivers/hwmon/xgene-hwmon.c 		mbox_free_channel(ctx->mbox_chan);
ctx               760 drivers/hwmon/xgene-hwmon.c 		pcc_mbox_free_channel(ctx->mbox_chan);
ctx               762 drivers/hwmon/xgene-hwmon.c 	kfifo_free(&ctx->async_msg_fifo);
ctx               769 drivers/hwmon/xgene-hwmon.c 	struct xgene_hwmon_dev *ctx = platform_get_drvdata(pdev);
ctx               771 drivers/hwmon/xgene-hwmon.c 	hwmon_device_unregister(ctx->hwmon_dev);
ctx               772 drivers/hwmon/xgene-hwmon.c 	kfifo_free(&ctx->async_msg_fifo);
ctx               774 drivers/hwmon/xgene-hwmon.c 		mbox_free_channel(ctx->mbox_chan);
ctx               776 drivers/hwmon/xgene-hwmon.c 		pcc_mbox_free_channel(ctx->mbox_chan);
ctx               141 drivers/i2c/busses/i2c-xgene-slimpro.c 	struct slimpro_i2c_dev *ctx = to_slimpro_i2c_dev(cl);
ctx               149 drivers/i2c/busses/i2c-xgene-slimpro.c 	if (ctx->resp_msg)
ctx               150 drivers/i2c/busses/i2c-xgene-slimpro.c 		*ctx->resp_msg = ((u32 *)mssg)[1];
ctx               152 drivers/i2c/busses/i2c-xgene-slimpro.c 	if (ctx->mbox_client.tx_block)
ctx               153 drivers/i2c/busses/i2c-xgene-slimpro.c 		complete(&ctx->rd_complete);
ctx               158 drivers/i2c/busses/i2c-xgene-slimpro.c 	struct slimpro_i2c_dev *ctx = to_slimpro_i2c_dev(cl);
ctx               159 drivers/i2c/busses/i2c-xgene-slimpro.c 	struct acpi_pcct_shared_memory *generic_comm_base = ctx->pcc_comm_addr;
ctx               171 drivers/i2c/busses/i2c-xgene-slimpro.c 		if (ctx->resp_msg)
ctx               172 drivers/i2c/busses/i2c-xgene-slimpro.c 			*ctx->resp_msg = ((u32 *)msg)[1];
ctx               174 drivers/i2c/busses/i2c-xgene-slimpro.c 		complete(&ctx->rd_complete);
ctx               178 drivers/i2c/busses/i2c-xgene-slimpro.c static void slimpro_i2c_pcc_tx_prepare(struct slimpro_i2c_dev *ctx, u32 *msg)
ctx               180 drivers/i2c/busses/i2c-xgene-slimpro.c 	struct acpi_pcct_shared_memory *generic_comm_base = ctx->pcc_comm_addr;
ctx               186 drivers/i2c/busses/i2c-xgene-slimpro.c 		   cpu_to_le32(PCC_SIGNATURE | ctx->mbox_idx));
ctx               200 drivers/i2c/busses/i2c-xgene-slimpro.c static int start_i2c_msg_xfer(struct slimpro_i2c_dev *ctx)
ctx               202 drivers/i2c/busses/i2c-xgene-slimpro.c 	if (ctx->mbox_client.tx_block || !acpi_disabled) {
ctx               203 drivers/i2c/busses/i2c-xgene-slimpro.c 		if (!wait_for_completion_timeout(&ctx->rd_complete,
ctx               209 drivers/i2c/busses/i2c-xgene-slimpro.c 	if (*ctx->resp_msg == 0xffffffff)
ctx               215 drivers/i2c/busses/i2c-xgene-slimpro.c static int slimpro_i2c_send_msg(struct slimpro_i2c_dev *ctx,
ctx               221 drivers/i2c/busses/i2c-xgene-slimpro.c 	ctx->resp_msg = data;
ctx               224 drivers/i2c/busses/i2c-xgene-slimpro.c 		reinit_completion(&ctx->rd_complete);
ctx               225 drivers/i2c/busses/i2c-xgene-slimpro.c 		slimpro_i2c_pcc_tx_prepare(ctx, msg);
ctx               228 drivers/i2c/busses/i2c-xgene-slimpro.c 	rc = mbox_send_message(ctx->mbox_chan, msg);
ctx               232 drivers/i2c/busses/i2c-xgene-slimpro.c 	rc = start_i2c_msg_xfer(ctx);
ctx               236 drivers/i2c/busses/i2c-xgene-slimpro.c 		mbox_chan_txdone(ctx->mbox_chan, 0);
ctx               238 drivers/i2c/busses/i2c-xgene-slimpro.c 	ctx->resp_msg = NULL;
ctx               243 drivers/i2c/busses/i2c-xgene-slimpro.c static int slimpro_i2c_rd(struct slimpro_i2c_dev *ctx, u32 chip,
ctx               254 drivers/i2c/busses/i2c-xgene-slimpro.c 	return slimpro_i2c_send_msg(ctx, msg, data);
ctx               257 drivers/i2c/busses/i2c-xgene-slimpro.c static int slimpro_i2c_wr(struct slimpro_i2c_dev *ctx, u32 chip,
ctx               268 drivers/i2c/busses/i2c-xgene-slimpro.c 	return slimpro_i2c_send_msg(ctx, msg, msg);
ctx               271 drivers/i2c/busses/i2c-xgene-slimpro.c static int slimpro_i2c_blkrd(struct slimpro_i2c_dev *ctx, u32 chip, u32 addr,
ctx               279 drivers/i2c/busses/i2c-xgene-slimpro.c 	paddr = dma_map_single(ctx->dev, ctx->dma_buffer, readlen, DMA_FROM_DEVICE);
ctx               280 drivers/i2c/busses/i2c-xgene-slimpro.c 	if (dma_mapping_error(ctx->dev, paddr)) {
ctx               281 drivers/i2c/busses/i2c-xgene-slimpro.c 		dev_err(&ctx->adapter.dev, "Error in mapping dma buffer %p\n",
ctx               282 drivers/i2c/busses/i2c-xgene-slimpro.c 			ctx->dma_buffer);
ctx               294 drivers/i2c/busses/i2c-xgene-slimpro.c 	rc = slimpro_i2c_send_msg(ctx, msg, msg);
ctx               297 drivers/i2c/busses/i2c-xgene-slimpro.c 	memcpy(data, ctx->dma_buffer, readlen);
ctx               299 drivers/i2c/busses/i2c-xgene-slimpro.c 	dma_unmap_single(ctx->dev, paddr, readlen, DMA_FROM_DEVICE);
ctx               303 drivers/i2c/busses/i2c-xgene-slimpro.c static int slimpro_i2c_blkwr(struct slimpro_i2c_dev *ctx, u32 chip,
ctx               311 drivers/i2c/busses/i2c-xgene-slimpro.c 	memcpy(ctx->dma_buffer, data, writelen);
ctx               312 drivers/i2c/busses/i2c-xgene-slimpro.c 	paddr = dma_map_single(ctx->dev, ctx->dma_buffer, writelen,
ctx               314 drivers/i2c/busses/i2c-xgene-slimpro.c 	if (dma_mapping_error(ctx->dev, paddr)) {
ctx               315 drivers/i2c/busses/i2c-xgene-slimpro.c 		dev_err(&ctx->adapter.dev, "Error in mapping dma buffer %p\n",
ctx               316 drivers/i2c/busses/i2c-xgene-slimpro.c 			ctx->dma_buffer);
ctx               327 drivers/i2c/busses/i2c-xgene-slimpro.c 	if (ctx->mbox_client.tx_block)
ctx               328 drivers/i2c/busses/i2c-xgene-slimpro.c 		reinit_completion(&ctx->rd_complete);
ctx               330 drivers/i2c/busses/i2c-xgene-slimpro.c 	rc = slimpro_i2c_send_msg(ctx, msg, msg);
ctx               332 drivers/i2c/busses/i2c-xgene-slimpro.c 	dma_unmap_single(ctx->dev, paddr, writelen, DMA_TO_DEVICE);
ctx               341 drivers/i2c/busses/i2c-xgene-slimpro.c 	struct slimpro_i2c_dev *ctx = i2c_get_adapdata(adap);
ctx               348 drivers/i2c/busses/i2c-xgene-slimpro.c 			ret = slimpro_i2c_rd(ctx, addr, 0, 0,
ctx               353 drivers/i2c/busses/i2c-xgene-slimpro.c 			ret = slimpro_i2c_wr(ctx, addr, command, SMBUS_CMD_LEN,
ctx               360 drivers/i2c/busses/i2c-xgene-slimpro.c 			ret = slimpro_i2c_rd(ctx, addr, command, SMBUS_CMD_LEN,
ctx               366 drivers/i2c/busses/i2c-xgene-slimpro.c 			ret = slimpro_i2c_wr(ctx, addr, command, SMBUS_CMD_LEN,
ctx               373 drivers/i2c/busses/i2c-xgene-slimpro.c 			ret = slimpro_i2c_rd(ctx, addr, command, SMBUS_CMD_LEN,
ctx               379 drivers/i2c/busses/i2c-xgene-slimpro.c 			ret = slimpro_i2c_wr(ctx, addr, command, SMBUS_CMD_LEN,
ctx               386 drivers/i2c/busses/i2c-xgene-slimpro.c 			ret = slimpro_i2c_blkrd(ctx, addr, command,
ctx               394 drivers/i2c/busses/i2c-xgene-slimpro.c 			ret = slimpro_i2c_blkwr(ctx, addr, command,
ctx               403 drivers/i2c/busses/i2c-xgene-slimpro.c 			ret = slimpro_i2c_blkrd(ctx, addr,
ctx               411 drivers/i2c/busses/i2c-xgene-slimpro.c 			ret = slimpro_i2c_blkwr(ctx, addr, command,
ctx               443 drivers/i2c/busses/i2c-xgene-slimpro.c 	struct slimpro_i2c_dev *ctx;
ctx               448 drivers/i2c/busses/i2c-xgene-slimpro.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               449 drivers/i2c/busses/i2c-xgene-slimpro.c 	if (!ctx)
ctx               452 drivers/i2c/busses/i2c-xgene-slimpro.c 	ctx->dev = &pdev->dev;
ctx               453 drivers/i2c/busses/i2c-xgene-slimpro.c 	platform_set_drvdata(pdev, ctx);
ctx               454 drivers/i2c/busses/i2c-xgene-slimpro.c 	cl = &ctx->mbox_client;
ctx               458 drivers/i2c/busses/i2c-xgene-slimpro.c 	init_completion(&ctx->rd_complete);
ctx               464 drivers/i2c/busses/i2c-xgene-slimpro.c 		ctx->mbox_chan = mbox_request_channel(cl, MAILBOX_I2C_INDEX);
ctx               465 drivers/i2c/busses/i2c-xgene-slimpro.c 		if (IS_ERR(ctx->mbox_chan)) {
ctx               467 drivers/i2c/busses/i2c-xgene-slimpro.c 			return PTR_ERR(ctx->mbox_chan);
ctx               482 drivers/i2c/busses/i2c-xgene-slimpro.c 					     &ctx->mbox_idx))
ctx               483 drivers/i2c/busses/i2c-xgene-slimpro.c 			ctx->mbox_idx = MAILBOX_I2C_INDEX;
ctx               487 drivers/i2c/busses/i2c-xgene-slimpro.c 		ctx->mbox_chan = pcc_mbox_request_channel(cl, ctx->mbox_idx);
ctx               488 drivers/i2c/busses/i2c-xgene-slimpro.c 		if (IS_ERR(ctx->mbox_chan)) {
ctx               490 drivers/i2c/busses/i2c-xgene-slimpro.c 			return PTR_ERR(ctx->mbox_chan);
ctx               499 drivers/i2c/busses/i2c-xgene-slimpro.c 		cppc_ss = ctx->mbox_chan->con_priv;
ctx               506 drivers/i2c/busses/i2c-xgene-slimpro.c 		if (!ctx->mbox_chan->mbox->txdone_irq) {
ctx               516 drivers/i2c/busses/i2c-xgene-slimpro.c 		ctx->comm_base_addr = cppc_ss->base_address;
ctx               517 drivers/i2c/busses/i2c-xgene-slimpro.c 		if (ctx->comm_base_addr) {
ctx               519 drivers/i2c/busses/i2c-xgene-slimpro.c 				ctx->pcc_comm_addr = memremap(
ctx               520 drivers/i2c/busses/i2c-xgene-slimpro.c 							ctx->comm_base_addr,
ctx               524 drivers/i2c/busses/i2c-xgene-slimpro.c 				ctx->pcc_comm_addr = memremap(
ctx               525 drivers/i2c/busses/i2c-xgene-slimpro.c 							ctx->comm_base_addr,
ctx               534 drivers/i2c/busses/i2c-xgene-slimpro.c 		if (!ctx->pcc_comm_addr) {
ctx               546 drivers/i2c/busses/i2c-xgene-slimpro.c 	adapter = &ctx->adapter;
ctx               553 drivers/i2c/busses/i2c-xgene-slimpro.c 	i2c_set_adapdata(adapter, ctx);
ctx               563 drivers/i2c/busses/i2c-xgene-slimpro.c 		mbox_free_channel(ctx->mbox_chan);
ctx               565 drivers/i2c/busses/i2c-xgene-slimpro.c 		pcc_mbox_free_channel(ctx->mbox_chan);
ctx               572 drivers/i2c/busses/i2c-xgene-slimpro.c 	struct slimpro_i2c_dev *ctx = platform_get_drvdata(pdev);
ctx               574 drivers/i2c/busses/i2c-xgene-slimpro.c 	i2c_del_adapter(&ctx->adapter);
ctx               577 drivers/i2c/busses/i2c-xgene-slimpro.c 		mbox_free_channel(ctx->mbox_chan);
ctx               579 drivers/i2c/busses/i2c-xgene-slimpro.c 		pcc_mbox_free_channel(ctx->mbox_chan);
ctx               116 drivers/iio/adc/envelope-detector.c static irqreturn_t envelope_detector_comp_isr(int irq, void *ctx)
ctx               118 drivers/iio/adc/envelope-detector.c 	struct envelope *env = ctx;
ctx                26 drivers/iio/chemical/bme680_spi.c 	struct bme680_spi_bus_context *ctx, u8 reg)
ctx                28 drivers/iio/chemical/bme680_spi.c 	struct spi_device *spi = ctx->spi;
ctx                33 drivers/iio/chemical/bme680_spi.c 	if (page == ctx->current_page)
ctx                59 drivers/iio/chemical/bme680_spi.c 	ctx->current_page = page;
ctx                67 drivers/iio/chemical/bme680_spi.c 	struct bme680_spi_bus_context *ctx = context;
ctx                68 drivers/iio/chemical/bme680_spi.c 	struct spi_device *spi = ctx->spi;
ctx                74 drivers/iio/chemical/bme680_spi.c 	ret = bme680_regmap_spi_select_page(ctx, buf[0]);
ctx                90 drivers/iio/chemical/bme680_spi.c 	struct bme680_spi_bus_context *ctx = context;
ctx                91 drivers/iio/chemical/bme680_spi.c 	struct spi_device *spi = ctx->spi;
ctx                95 drivers/iio/chemical/bme680_spi.c 	ret = bme680_regmap_spi_select_page(ctx, addr);
ctx               815 drivers/infiniband/core/addr.c 	struct resolve_cb_context ctx;
ctx               829 drivers/infiniband/core/addr.c 	init_completion(&ctx.comp);
ctx               832 drivers/infiniband/core/addr.c 			      resolve_cb, true, &ctx);
ctx               836 drivers/infiniband/core/addr.c 	wait_for_completion(&ctx.comp);
ctx               838 drivers/infiniband/core/addr.c 	ret = ctx.status;
ctx               848 drivers/infiniband/core/addr.c 	void *ctx)
ctx               853 drivers/infiniband/core/addr.c 		struct neighbour *neigh = ctx;
ctx               118 drivers/infiniband/core/rw.c static int rdma_rw_init_mr_wrs(struct rdma_rw_ctx *ctx, struct ib_qp *qp,
ctx               127 drivers/infiniband/core/rw.c 	ctx->nr_ops = (sg_cnt + pages_per_mr - 1) / pages_per_mr;
ctx               128 drivers/infiniband/core/rw.c 	ctx->reg = kcalloc(ctx->nr_ops, sizeof(*ctx->reg), GFP_KERNEL);
ctx               129 drivers/infiniband/core/rw.c 	if (!ctx->reg) {
ctx               134 drivers/infiniband/core/rw.c 	for (i = 0; i < ctx->nr_ops; i++) {
ctx               135 drivers/infiniband/core/rw.c 		struct rdma_rw_reg_ctx *reg = &ctx->reg[i];
ctx               178 drivers/infiniband/core/rw.c 	ctx->type = RDMA_RW_MR;
ctx               183 drivers/infiniband/core/rw.c 		ib_mr_pool_put(qp, &qp->rdma_mrs, ctx->reg[i].mr);
ctx               184 drivers/infiniband/core/rw.c 	kfree(ctx->reg);
ctx               189 drivers/infiniband/core/rw.c static int rdma_rw_init_map_wrs(struct rdma_rw_ctx *ctx, struct ib_qp *qp,
ctx               198 drivers/infiniband/core/rw.c 	ctx->nr_ops = DIV_ROUND_UP(sg_cnt, max_sge);
ctx               200 drivers/infiniband/core/rw.c 	ctx->map.sges = sge = kcalloc(sg_cnt, sizeof(*sge), GFP_KERNEL);
ctx               201 drivers/infiniband/core/rw.c 	if (!ctx->map.sges)
ctx               204 drivers/infiniband/core/rw.c 	ctx->map.wrs = kcalloc(ctx->nr_ops, sizeof(*ctx->map.wrs), GFP_KERNEL);
ctx               205 drivers/infiniband/core/rw.c 	if (!ctx->map.wrs)
ctx               208 drivers/infiniband/core/rw.c 	for (i = 0; i < ctx->nr_ops; i++) {
ctx               209 drivers/infiniband/core/rw.c 		struct ib_rdma_wr *rdma_wr = &ctx->map.wrs[i];
ctx               232 drivers/infiniband/core/rw.c 		rdma_wr->wr.next = i + 1 < ctx->nr_ops ?
ctx               233 drivers/infiniband/core/rw.c 			&ctx->map.wrs[i + 1].wr : NULL;
ctx               236 drivers/infiniband/core/rw.c 	ctx->type = RDMA_RW_MULTI_WR;
ctx               237 drivers/infiniband/core/rw.c 	return ctx->nr_ops;
ctx               240 drivers/infiniband/core/rw.c 	kfree(ctx->map.sges);
ctx               245 drivers/infiniband/core/rw.c static int rdma_rw_init_single_wr(struct rdma_rw_ctx *ctx, struct ib_qp *qp,
ctx               249 drivers/infiniband/core/rw.c 	struct ib_rdma_wr *rdma_wr = &ctx->single.wr;
ctx               251 drivers/infiniband/core/rw.c 	ctx->nr_ops = 1;
ctx               253 drivers/infiniband/core/rw.c 	ctx->single.sge.lkey = qp->pd->local_dma_lkey;
ctx               254 drivers/infiniband/core/rw.c 	ctx->single.sge.addr = sg_dma_address(sg) + offset;
ctx               255 drivers/infiniband/core/rw.c 	ctx->single.sge.length = sg_dma_len(sg) - offset;
ctx               262 drivers/infiniband/core/rw.c 	rdma_wr->wr.sg_list = &ctx->single.sge;
ctx               267 drivers/infiniband/core/rw.c 	ctx->type = RDMA_RW_SINGLE_WR;
ctx               303 drivers/infiniband/core/rw.c int rdma_rw_ctx_init(struct rdma_rw_ctx *ctx, struct ib_qp *qp, u8 port_num,
ctx               334 drivers/infiniband/core/rw.c 		ret = rdma_rw_init_mr_wrs(ctx, qp, port_num, sg, sg_cnt,
ctx               337 drivers/infiniband/core/rw.c 		ret = rdma_rw_init_map_wrs(ctx, qp, sg, sg_cnt, sg_offset,
ctx               340 drivers/infiniband/core/rw.c 		ret = rdma_rw_init_single_wr(ctx, qp, sg, sg_offset,
ctx               371 drivers/infiniband/core/rw.c int rdma_rw_ctx_signature_init(struct rdma_rw_ctx *ctx, struct ib_qp *qp,
ctx               403 drivers/infiniband/core/rw.c 	ctx->type = RDMA_RW_SIG_MR;
ctx               404 drivers/infiniband/core/rw.c 	ctx->nr_ops = 1;
ctx               405 drivers/infiniband/core/rw.c 	ctx->reg = kcalloc(1, sizeof(*ctx->reg), GFP_KERNEL);
ctx               406 drivers/infiniband/core/rw.c 	if (!ctx->reg) {
ctx               411 drivers/infiniband/core/rw.c 	ctx->reg->mr = ib_mr_pool_get(qp, &qp->sig_mrs);
ctx               412 drivers/infiniband/core/rw.c 	if (!ctx->reg->mr) {
ctx               417 drivers/infiniband/core/rw.c 	count += rdma_rw_inv_key(ctx->reg);
ctx               419 drivers/infiniband/core/rw.c 	memcpy(ctx->reg->mr->sig_attrs, sig_attrs, sizeof(struct ib_sig_attrs));
ctx               421 drivers/infiniband/core/rw.c 	ret = ib_map_mr_sg_pi(ctx->reg->mr, sg, sg_cnt, NULL, prot_sg,
ctx               428 drivers/infiniband/core/rw.c 	ctx->reg->reg_wr.wr.opcode = IB_WR_REG_MR_INTEGRITY;
ctx               429 drivers/infiniband/core/rw.c 	ctx->reg->reg_wr.wr.wr_cqe = NULL;
ctx               430 drivers/infiniband/core/rw.c 	ctx->reg->reg_wr.wr.num_sge = 0;
ctx               431 drivers/infiniband/core/rw.c 	ctx->reg->reg_wr.wr.send_flags = 0;
ctx               432 drivers/infiniband/core/rw.c 	ctx->reg->reg_wr.access = IB_ACCESS_LOCAL_WRITE;
ctx               434 drivers/infiniband/core/rw.c 		ctx->reg->reg_wr.access |= IB_ACCESS_REMOTE_WRITE;
ctx               435 drivers/infiniband/core/rw.c 	ctx->reg->reg_wr.mr = ctx->reg->mr;
ctx               436 drivers/infiniband/core/rw.c 	ctx->reg->reg_wr.key = ctx->reg->mr->lkey;
ctx               439 drivers/infiniband/core/rw.c 	ctx->reg->sge.addr = ctx->reg->mr->iova;
ctx               440 drivers/infiniband/core/rw.c 	ctx->reg->sge.length = ctx->reg->mr->length;
ctx               442 drivers/infiniband/core/rw.c 		ctx->reg->sge.length -= ctx->reg->mr->sig_attrs->meta_length;
ctx               444 drivers/infiniband/core/rw.c 	rdma_wr = &ctx->reg->wr;
ctx               445 drivers/infiniband/core/rw.c 	rdma_wr->wr.sg_list = &ctx->reg->sge;
ctx               453 drivers/infiniband/core/rw.c 	ctx->reg->reg_wr.wr.next = &rdma_wr->wr;
ctx               459 drivers/infiniband/core/rw.c 	ib_mr_pool_put(qp, &qp->sig_mrs, ctx->reg->mr);
ctx               461 drivers/infiniband/core/rw.c 	kfree(ctx->reg);
ctx               499 drivers/infiniband/core/rw.c struct ib_send_wr *rdma_rw_ctx_wrs(struct rdma_rw_ctx *ctx, struct ib_qp *qp,
ctx               505 drivers/infiniband/core/rw.c 	switch (ctx->type) {
ctx               509 drivers/infiniband/core/rw.c 		for (i = 0; i < ctx->nr_ops; i++) {
ctx               510 drivers/infiniband/core/rw.c 			rdma_rw_update_lkey(&ctx->reg[i],
ctx               511 drivers/infiniband/core/rw.c 				ctx->reg[i].wr.wr.opcode !=
ctx               515 drivers/infiniband/core/rw.c 		if (ctx->reg[0].inv_wr.next)
ctx               516 drivers/infiniband/core/rw.c 			first_wr = &ctx->reg[0].inv_wr;
ctx               518 drivers/infiniband/core/rw.c 			first_wr = &ctx->reg[0].reg_wr.wr;
ctx               519 drivers/infiniband/core/rw.c 		last_wr = &ctx->reg[ctx->nr_ops - 1].wr.wr;
ctx               522 drivers/infiniband/core/rw.c 		first_wr = &ctx->map.wrs[0].wr;
ctx               523 drivers/infiniband/core/rw.c 		last_wr = &ctx->map.wrs[ctx->nr_ops - 1].wr;
ctx               526 drivers/infiniband/core/rw.c 		first_wr = &ctx->single.wr.wr;
ctx               527 drivers/infiniband/core/rw.c 		last_wr = &ctx->single.wr.wr;
ctx               558 drivers/infiniband/core/rw.c int rdma_rw_ctx_post(struct rdma_rw_ctx *ctx, struct ib_qp *qp, u8 port_num,
ctx               563 drivers/infiniband/core/rw.c 	first_wr = rdma_rw_ctx_wrs(ctx, qp, port_num, cqe, chain_wr);
ctx               577 drivers/infiniband/core/rw.c void rdma_rw_ctx_destroy(struct rdma_rw_ctx *ctx, struct ib_qp *qp, u8 port_num,
ctx               582 drivers/infiniband/core/rw.c 	switch (ctx->type) {
ctx               584 drivers/infiniband/core/rw.c 		for (i = 0; i < ctx->nr_ops; i++)
ctx               585 drivers/infiniband/core/rw.c 			ib_mr_pool_put(qp, &qp->rdma_mrs, ctx->reg[i].mr);
ctx               586 drivers/infiniband/core/rw.c 		kfree(ctx->reg);
ctx               589 drivers/infiniband/core/rw.c 		kfree(ctx->map.wrs);
ctx               590 drivers/infiniband/core/rw.c 		kfree(ctx->map.sges);
ctx               615 drivers/infiniband/core/rw.c void rdma_rw_ctx_destroy_signature(struct rdma_rw_ctx *ctx, struct ib_qp *qp,
ctx               620 drivers/infiniband/core/rw.c 	if (WARN_ON_ONCE(ctx->type != RDMA_RW_SIG_MR))
ctx               623 drivers/infiniband/core/rw.c 	ib_mr_pool_put(qp, &qp->sig_mrs, ctx->reg->mr);
ctx               624 drivers/infiniband/core/rw.c 	kfree(ctx->reg);
ctx               109 drivers/infiniband/core/ucma.c 	struct ucma_context	*ctx;
ctx               120 drivers/infiniband/core/ucma.c 	struct ucma_context	*ctx;
ctx               136 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx               138 drivers/infiniband/core/ucma.c 	ctx = xa_load(&ctx_table, id);
ctx               139 drivers/infiniband/core/ucma.c 	if (!ctx)
ctx               140 drivers/infiniband/core/ucma.c 		ctx = ERR_PTR(-ENOENT);
ctx               141 drivers/infiniband/core/ucma.c 	else if (ctx->file != file || !ctx->cm_id)
ctx               142 drivers/infiniband/core/ucma.c 		ctx = ERR_PTR(-EINVAL);
ctx               143 drivers/infiniband/core/ucma.c 	return ctx;
ctx               148 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx               151 drivers/infiniband/core/ucma.c 	ctx = _ucma_find_context(id, file);
ctx               152 drivers/infiniband/core/ucma.c 	if (!IS_ERR(ctx)) {
ctx               153 drivers/infiniband/core/ucma.c 		if (ctx->closing)
ctx               154 drivers/infiniband/core/ucma.c 			ctx = ERR_PTR(-EIO);
ctx               156 drivers/infiniband/core/ucma.c 			atomic_inc(&ctx->ref);
ctx               159 drivers/infiniband/core/ucma.c 	return ctx;
ctx               162 drivers/infiniband/core/ucma.c static void ucma_put_ctx(struct ucma_context *ctx)
ctx               164 drivers/infiniband/core/ucma.c 	if (atomic_dec_and_test(&ctx->ref))
ctx               165 drivers/infiniband/core/ucma.c 		complete(&ctx->comp);
ctx               174 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx = ucma_get_ctx(file, id);
ctx               176 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx               177 drivers/infiniband/core/ucma.c 		return ctx;
ctx               178 drivers/infiniband/core/ucma.c 	if (!ctx->cm_id->device) {
ctx               179 drivers/infiniband/core/ucma.c 		ucma_put_ctx(ctx);
ctx               182 drivers/infiniband/core/ucma.c 	return ctx;
ctx               195 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx =  container_of(work, struct ucma_context, close_work);
ctx               201 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx               202 drivers/infiniband/core/ucma.c 	wait_for_completion(&ctx->comp);
ctx               204 drivers/infiniband/core/ucma.c 	rdma_destroy_id(ctx->cm_id);
ctx               209 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx               211 drivers/infiniband/core/ucma.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               212 drivers/infiniband/core/ucma.c 	if (!ctx)
ctx               215 drivers/infiniband/core/ucma.c 	INIT_WORK(&ctx->close_work, ucma_close_id);
ctx               216 drivers/infiniband/core/ucma.c 	atomic_set(&ctx->ref, 1);
ctx               217 drivers/infiniband/core/ucma.c 	init_completion(&ctx->comp);
ctx               218 drivers/infiniband/core/ucma.c 	INIT_LIST_HEAD(&ctx->mc_list);
ctx               219 drivers/infiniband/core/ucma.c 	ctx->file = file;
ctx               220 drivers/infiniband/core/ucma.c 	mutex_init(&ctx->mutex);
ctx               222 drivers/infiniband/core/ucma.c 	if (xa_alloc(&ctx_table, &ctx->id, ctx, xa_limit_32b, GFP_KERNEL))
ctx               225 drivers/infiniband/core/ucma.c 	list_add_tail(&ctx->list, &file->ctx_list);
ctx               226 drivers/infiniband/core/ucma.c 	return ctx;
ctx               229 drivers/infiniband/core/ucma.c 	kfree(ctx);
ctx               233 drivers/infiniband/core/ucma.c static struct ucma_multicast* ucma_alloc_multicast(struct ucma_context *ctx)
ctx               241 drivers/infiniband/core/ucma.c 	mc->ctx = ctx;
ctx               245 drivers/infiniband/core/ucma.c 	list_add_tail(&mc->list, &ctx->mc_list);
ctx               282 drivers/infiniband/core/ucma.c static void ucma_set_event_context(struct ucma_context *ctx,
ctx               286 drivers/infiniband/core/ucma.c 	uevent->ctx = ctx;
ctx               296 drivers/infiniband/core/ucma.c 		uevent->resp.uid = ctx->uid;
ctx               297 drivers/infiniband/core/ucma.c 		uevent->resp.id = ctx->id;
ctx               305 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx = cm_id->context;
ctx               309 drivers/infiniband/core/ucma.c 	if (ctx->destroying)
ctx               318 drivers/infiniband/core/ucma.c 	if (ctx->cm_id == cm_id) {
ctx               320 drivers/infiniband/core/ucma.c 		ctx->closing = 1;
ctx               322 drivers/infiniband/core/ucma.c 		queue_work(ctx->file->close_wq, &ctx->close_work);
ctx               326 drivers/infiniband/core/ucma.c 	list_for_each_entry(con_req_eve, &ctx->file->event_list, list) {
ctx               331 drivers/infiniband/core/ucma.c 			queue_work(ctx->file->close_wq, &con_req_eve->close_work);
ctx               344 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx = cm_id->context;
ctx               351 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->file->mut);
ctx               353 drivers/infiniband/core/ucma.c 	ucma_set_event_context(ctx, event, uevent);
ctx               364 drivers/infiniband/core/ucma.c 		if (!ctx->backlog) {
ctx               369 drivers/infiniband/core/ucma.c 		ctx->backlog--;
ctx               370 drivers/infiniband/core/ucma.c 	} else if (!ctx->uid || ctx->cm_id != cm_id) {
ctx               386 drivers/infiniband/core/ucma.c 	list_add_tail(&uevent->list, &ctx->file->event_list);
ctx               387 drivers/infiniband/core/ucma.c 	wake_up_interruptible(&ctx->file->poll_wait);
ctx               391 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->file->mut);
ctx               398 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx               430 drivers/infiniband/core/ucma.c 		ctx = ucma_alloc_ctx(file);
ctx               431 drivers/infiniband/core/ucma.c 		if (!ctx) {
ctx               435 drivers/infiniband/core/ucma.c 		uevent->ctx->backlog++;
ctx               436 drivers/infiniband/core/ucma.c 		ctx->cm_id = uevent->cm_id;
ctx               437 drivers/infiniband/core/ucma.c 		ctx->cm_id->context = ctx;
ctx               438 drivers/infiniband/core/ucma.c 		uevent->resp.id = ctx->id;
ctx               449 drivers/infiniband/core/ucma.c 	uevent->ctx->events_reported++;
ctx               481 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx               497 drivers/infiniband/core/ucma.c 	ctx = ucma_alloc_ctx(file);
ctx               499 drivers/infiniband/core/ucma.c 	if (!ctx)
ctx               502 drivers/infiniband/core/ucma.c 	ctx->uid = cmd.uid;
ctx               504 drivers/infiniband/core/ucma.c 				 ucma_event_handler, ctx, cmd.ps, qp_type, NULL);
ctx               510 drivers/infiniband/core/ucma.c 	resp.id = ctx->id;
ctx               517 drivers/infiniband/core/ucma.c 	ctx->cm_id = cm_id;
ctx               523 drivers/infiniband/core/ucma.c 	xa_erase(&ctx_table, ctx->id);
ctx               525 drivers/infiniband/core/ucma.c 	list_del(&ctx->list);
ctx               527 drivers/infiniband/core/ucma.c 	kfree(ctx);
ctx               531 drivers/infiniband/core/ucma.c static void ucma_cleanup_multicast(struct ucma_context *ctx)
ctx               535 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->file->mut);
ctx               536 drivers/infiniband/core/ucma.c 	list_for_each_entry_safe(mc, tmp, &ctx->mc_list, list) {
ctx               541 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->file->mut);
ctx               548 drivers/infiniband/core/ucma.c 	list_for_each_entry_safe(uevent, tmp, &mc->ctx->file->event_list, list) {
ctx               568 drivers/infiniband/core/ucma.c static int ucma_free_ctx(struct ucma_context *ctx)
ctx               575 drivers/infiniband/core/ucma.c 	ucma_cleanup_multicast(ctx);
ctx               578 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->file->mut);
ctx               579 drivers/infiniband/core/ucma.c 	list_for_each_entry_safe(uevent, tmp, &ctx->file->event_list, list) {
ctx               580 drivers/infiniband/core/ucma.c 		if (uevent->ctx == ctx)
ctx               583 drivers/infiniband/core/ucma.c 	list_del(&ctx->list);
ctx               584 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->file->mut);
ctx               593 drivers/infiniband/core/ucma.c 	events_reported = ctx->events_reported;
ctx               594 drivers/infiniband/core/ucma.c 	mutex_destroy(&ctx->mutex);
ctx               595 drivers/infiniband/core/ucma.c 	kfree(ctx);
ctx               604 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx               614 drivers/infiniband/core/ucma.c 	ctx = _ucma_find_context(cmd.id, file);
ctx               615 drivers/infiniband/core/ucma.c 	if (!IS_ERR(ctx))
ctx               616 drivers/infiniband/core/ucma.c 		__xa_erase(&ctx_table, ctx->id);
ctx               619 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx               620 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx               622 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->file->mut);
ctx               623 drivers/infiniband/core/ucma.c 	ctx->destroying = 1;
ctx               624 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->file->mut);
ctx               626 drivers/infiniband/core/ucma.c 	flush_workqueue(ctx->file->close_wq);
ctx               630 drivers/infiniband/core/ucma.c 	if (!ctx->closing) {
ctx               632 drivers/infiniband/core/ucma.c 		ucma_put_ctx(ctx);
ctx               633 drivers/infiniband/core/ucma.c 		wait_for_completion(&ctx->comp);
ctx               634 drivers/infiniband/core/ucma.c 		rdma_destroy_id(ctx->cm_id);
ctx               639 drivers/infiniband/core/ucma.c 	resp.events_reported = ucma_free_ctx(ctx);
ctx               651 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx               660 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx(file, cmd.id);
ctx               661 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx               662 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx               664 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx               665 drivers/infiniband/core/ucma.c 	ret = rdma_bind_addr(ctx->cm_id, (struct sockaddr *) &cmd.addr);
ctx               666 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx               668 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx               676 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx               686 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx(file, cmd.id);
ctx               687 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx               688 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx               690 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx               691 drivers/infiniband/core/ucma.c 	ret = rdma_bind_addr(ctx->cm_id, (struct sockaddr *) &cmd.addr);
ctx               692 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx               693 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx               702 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx               712 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx(file, cmd.id);
ctx               713 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx               714 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx               716 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx               717 drivers/infiniband/core/ucma.c 	ret = rdma_resolve_addr(ctx->cm_id, (struct sockaddr *) &cmd.src_addr,
ctx               719 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx               720 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx               729 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx               740 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx(file, cmd.id);
ctx               741 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx               742 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx               744 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx               745 drivers/infiniband/core/ucma.c 	ret = rdma_resolve_addr(ctx->cm_id, (struct sockaddr *) &cmd.src_addr,
ctx               747 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx               748 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx               757 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx               763 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx_dev(file, cmd.id);
ctx               764 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx               765 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx               767 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx               768 drivers/infiniband/core/ucma.c 	ret = rdma_resolve_route(ctx->cm_id, cmd.timeout_ms);
ctx               769 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx               770 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx               844 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx               854 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx(file, cmd.id);
ctx               855 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx               856 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx               858 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx               860 drivers/infiniband/core/ucma.c 	addr = (struct sockaddr *) &ctx->cm_id->route.addr.src_addr;
ctx               864 drivers/infiniband/core/ucma.c 	addr = (struct sockaddr *) &ctx->cm_id->route.addr.dst_addr;
ctx               868 drivers/infiniband/core/ucma.c 	if (!ctx->cm_id->device)
ctx               871 drivers/infiniband/core/ucma.c 	resp.node_guid = (__force __u64) ctx->cm_id->device->node_guid;
ctx               872 drivers/infiniband/core/ucma.c 	resp.port_num = ctx->cm_id->port_num;
ctx               874 drivers/infiniband/core/ucma.c 	if (rdma_cap_ib_sa(ctx->cm_id->device, ctx->cm_id->port_num))
ctx               875 drivers/infiniband/core/ucma.c 		ucma_copy_ib_route(&resp, &ctx->cm_id->route);
ctx               876 drivers/infiniband/core/ucma.c 	else if (rdma_protocol_roce(ctx->cm_id->device, ctx->cm_id->port_num))
ctx               877 drivers/infiniband/core/ucma.c 		ucma_copy_iboe_route(&resp, &ctx->cm_id->route);
ctx               878 drivers/infiniband/core/ucma.c 	else if (rdma_protocol_iwarp(ctx->cm_id->device, ctx->cm_id->port_num))
ctx               879 drivers/infiniband/core/ucma.c 		ucma_copy_iw_route(&resp, &ctx->cm_id->route);
ctx               882 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx               887 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx               903 drivers/infiniband/core/ucma.c static ssize_t ucma_query_addr(struct ucma_context *ctx,
ctx               915 drivers/infiniband/core/ucma.c 	addr = (struct sockaddr *) &ctx->cm_id->route.addr.src_addr;
ctx               919 drivers/infiniband/core/ucma.c 	addr = (struct sockaddr *) &ctx->cm_id->route.addr.dst_addr;
ctx               923 drivers/infiniband/core/ucma.c 	ucma_query_device_addr(ctx->cm_id, &resp);
ctx               931 drivers/infiniband/core/ucma.c static ssize_t ucma_query_path(struct ucma_context *ctx,
ctx               944 drivers/infiniband/core/ucma.c 	resp->num_paths = ctx->cm_id->route.num_paths;
ctx               948 drivers/infiniband/core/ucma.c 		struct sa_path_rec *rec = &ctx->cm_id->route.path_rec[i];
ctx               970 drivers/infiniband/core/ucma.c static ssize_t ucma_query_gid(struct ucma_context *ctx,
ctx               982 drivers/infiniband/core/ucma.c 	ucma_query_device_addr(ctx->cm_id, &resp);
ctx               986 drivers/infiniband/core/ucma.c 	if (ctx->cm_id->route.addr.src_addr.ss_family == AF_IB) {
ctx               987 drivers/infiniband/core/ucma.c 		memcpy(addr, &ctx->cm_id->route.addr.src_addr, resp.src_size);
ctx               991 drivers/infiniband/core/ucma.c 		rdma_read_gids(ctx->cm_id, (union ib_gid *)&addr->sib_addr,
ctx               993 drivers/infiniband/core/ucma.c 		addr->sib_sid = rdma_get_service_id(ctx->cm_id, (struct sockaddr *)
ctx               994 drivers/infiniband/core/ucma.c 						    &ctx->cm_id->route.addr.src_addr);
ctx               999 drivers/infiniband/core/ucma.c 	if (ctx->cm_id->route.addr.dst_addr.ss_family == AF_IB) {
ctx              1000 drivers/infiniband/core/ucma.c 		memcpy(addr, &ctx->cm_id->route.addr.dst_addr, resp.dst_size);
ctx              1004 drivers/infiniband/core/ucma.c 		rdma_read_gids(ctx->cm_id, NULL,
ctx              1006 drivers/infiniband/core/ucma.c 		addr->sib_sid = rdma_get_service_id(ctx->cm_id, (struct sockaddr *)
ctx              1007 drivers/infiniband/core/ucma.c 						    &ctx->cm_id->route.addr.dst_addr);
ctx              1021 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx              1029 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx(file, cmd.id);
ctx              1030 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx              1031 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx              1033 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx              1036 drivers/infiniband/core/ucma.c 		ret = ucma_query_addr(ctx, response, out_len);
ctx              1039 drivers/infiniband/core/ucma.c 		ret = ucma_query_path(ctx, response, out_len);
ctx              1042 drivers/infiniband/core/ucma.c 		ret = ucma_query_gid(ctx, response, out_len);
ctx              1048 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx              1050 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx              1075 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx              1084 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx_dev(file, cmd.id);
ctx              1085 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx              1086 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx              1088 drivers/infiniband/core/ucma.c 	ucma_copy_conn_param(ctx->cm_id, &conn_param, &cmd.conn_param);
ctx              1089 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx              1090 drivers/infiniband/core/ucma.c 	ret = rdma_connect(ctx->cm_id, &conn_param);
ctx              1091 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx              1092 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx              1100 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx              1106 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx(file, cmd.id);
ctx              1107 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx              1108 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx              1110 drivers/infiniband/core/ucma.c 	ctx->backlog = cmd.backlog > 0 && cmd.backlog < max_backlog ?
ctx              1112 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx              1113 drivers/infiniband/core/ucma.c 	ret = rdma_listen(ctx->cm_id, ctx->backlog);
ctx              1114 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx              1115 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx              1124 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx              1130 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx_dev(file, cmd.id);
ctx              1131 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx              1132 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx              1135 drivers/infiniband/core/ucma.c 		ucma_copy_conn_param(ctx->cm_id, &conn_param, &cmd.conn_param);
ctx              1137 drivers/infiniband/core/ucma.c 		mutex_lock(&ctx->mutex);
ctx              1138 drivers/infiniband/core/ucma.c 		ret = __rdma_accept(ctx->cm_id, &conn_param, NULL);
ctx              1139 drivers/infiniband/core/ucma.c 		mutex_unlock(&ctx->mutex);
ctx              1141 drivers/infiniband/core/ucma.c 			ctx->uid = cmd.uid;
ctx              1144 drivers/infiniband/core/ucma.c 		mutex_lock(&ctx->mutex);
ctx              1145 drivers/infiniband/core/ucma.c 		ret = __rdma_accept(ctx->cm_id, NULL, NULL);
ctx              1146 drivers/infiniband/core/ucma.c 		mutex_unlock(&ctx->mutex);
ctx              1148 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx              1156 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx              1162 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx_dev(file, cmd.id);
ctx              1163 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx              1164 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx              1166 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx              1167 drivers/infiniband/core/ucma.c 	ret = rdma_reject(ctx->cm_id, cmd.private_data, cmd.private_data_len);
ctx              1168 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx              1169 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx              1177 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx              1183 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx_dev(file, cmd.id);
ctx              1184 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx              1185 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx              1187 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx              1188 drivers/infiniband/core/ucma.c 	ret = rdma_disconnect(ctx->cm_id);
ctx              1189 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx              1190 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx              1200 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx              1213 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx_dev(file, cmd.id);
ctx              1214 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx              1215 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx              1220 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx              1221 drivers/infiniband/core/ucma.c 	ret = rdma_init_qp_attr(ctx->cm_id, &qp_attr, &resp.qp_attr_mask);
ctx              1222 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx              1226 drivers/infiniband/core/ucma.c 	ib_copy_qp_attr_to_user(ctx->cm_id->device, &resp, &qp_attr);
ctx              1232 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx              1236 drivers/infiniband/core/ucma.c static int ucma_set_option_id(struct ucma_context *ctx, int optname,
ctx              1247 drivers/infiniband/core/ucma.c 		rdma_set_service_type(ctx->cm_id, *((u8 *) optval));
ctx              1254 drivers/infiniband/core/ucma.c 		ret = rdma_set_reuseaddr(ctx->cm_id, *((int *) optval) ? 1 : 0);
ctx              1261 drivers/infiniband/core/ucma.c 		ret = rdma_set_afonly(ctx->cm_id, *((int *) optval) ? 1 : 0);
ctx              1268 drivers/infiniband/core/ucma.c 		ret = rdma_set_ack_timeout(ctx->cm_id, *((u8 *)optval));
ctx              1277 drivers/infiniband/core/ucma.c static int ucma_set_ib_path(struct ucma_context *ctx,
ctx              1296 drivers/infiniband/core/ucma.c 	if (!ctx->cm_id->device)
ctx              1304 drivers/infiniband/core/ucma.c 	if (rdma_cap_opa_ah(ctx->cm_id->device, ctx->cm_id->port_num)) {
ctx              1308 drivers/infiniband/core/ucma.c 		mutex_lock(&ctx->mutex);
ctx              1309 drivers/infiniband/core/ucma.c 		ret = rdma_set_ib_path(ctx->cm_id, &opa);
ctx              1310 drivers/infiniband/core/ucma.c 		mutex_unlock(&ctx->mutex);
ctx              1312 drivers/infiniband/core/ucma.c 		mutex_lock(&ctx->mutex);
ctx              1313 drivers/infiniband/core/ucma.c 		ret = rdma_set_ib_path(ctx->cm_id, &sa_path);
ctx              1314 drivers/infiniband/core/ucma.c 		mutex_unlock(&ctx->mutex);
ctx              1321 drivers/infiniband/core/ucma.c 	return ucma_event_handler(ctx->cm_id, &event);
ctx              1324 drivers/infiniband/core/ucma.c static int ucma_set_option_ib(struct ucma_context *ctx, int optname,
ctx              1331 drivers/infiniband/core/ucma.c 		ret = ucma_set_ib_path(ctx, optval, optlen);
ctx              1340 drivers/infiniband/core/ucma.c static int ucma_set_option_level(struct ucma_context *ctx, int level,
ctx              1347 drivers/infiniband/core/ucma.c 		mutex_lock(&ctx->mutex);
ctx              1348 drivers/infiniband/core/ucma.c 		ret = ucma_set_option_id(ctx, optname, optval, optlen);
ctx              1349 drivers/infiniband/core/ucma.c 		mutex_unlock(&ctx->mutex);
ctx              1352 drivers/infiniband/core/ucma.c 		ret = ucma_set_option_ib(ctx, optname, optval, optlen);
ctx              1365 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx              1375 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx(file, cmd.id);
ctx              1376 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx              1377 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx              1386 drivers/infiniband/core/ucma.c 	ret = ucma_set_option_level(ctx, cmd.level, cmd.optname, optval,
ctx              1391 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx              1399 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx              1405 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx(file, cmd.id);
ctx              1406 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx              1407 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx              1409 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx              1410 drivers/infiniband/core/ucma.c 	if (ctx->cm_id->device)
ctx              1411 drivers/infiniband/core/ucma.c 		ret = rdma_notify(ctx->cm_id, (enum ib_event_type)cmd.event);
ctx              1412 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx              1414 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx              1422 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx              1442 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx_dev(file, cmd->id);
ctx              1443 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx))
ctx              1444 drivers/infiniband/core/ucma.c 		return PTR_ERR(ctx);
ctx              1447 drivers/infiniband/core/ucma.c 	mc = ucma_alloc_multicast(ctx);
ctx              1455 drivers/infiniband/core/ucma.c 	mutex_lock(&ctx->mutex);
ctx              1456 drivers/infiniband/core/ucma.c 	ret = rdma_join_multicast(ctx->cm_id, (struct sockaddr *)&mc->addr,
ctx              1458 drivers/infiniband/core/ucma.c 	mutex_unlock(&ctx->mutex);
ctx              1472 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx              1476 drivers/infiniband/core/ucma.c 	rdma_leave_multicast(ctx->cm_id, (struct sockaddr *) &mc->addr);
ctx              1484 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx              1545 drivers/infiniband/core/ucma.c 	else if (mc->ctx->file != file)
ctx              1547 drivers/infiniband/core/ucma.c 	else if (!atomic_inc_not_zero(&mc->ctx->ref))
ctx              1558 drivers/infiniband/core/ucma.c 	mutex_lock(&mc->ctx->mutex);
ctx              1559 drivers/infiniband/core/ucma.c 	rdma_leave_multicast(mc->ctx->cm_id, (struct sockaddr *) &mc->addr);
ctx              1560 drivers/infiniband/core/ucma.c 	mutex_unlock(&mc->ctx->mutex);
ctx              1562 drivers/infiniband/core/ucma.c 	mutex_lock(&mc->ctx->file->mut);
ctx              1565 drivers/infiniband/core/ucma.c 	mutex_unlock(&mc->ctx->file->mut);
ctx              1567 drivers/infiniband/core/ucma.c 	ucma_put_ctx(mc->ctx);
ctx              1601 drivers/infiniband/core/ucma.c static void ucma_move_events(struct ucma_context *ctx, struct ucma_file *file)
ctx              1605 drivers/infiniband/core/ucma.c 	list_for_each_entry_safe(uevent, tmp, &ctx->file->event_list, list)
ctx              1606 drivers/infiniband/core/ucma.c 		if (uevent->ctx == ctx)
ctx              1616 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx;
ctx              1634 drivers/infiniband/core/ucma.c 	ctx = ucma_get_ctx(f.file->private_data, cmd.id);
ctx              1635 drivers/infiniband/core/ucma.c 	if (IS_ERR(ctx)) {
ctx              1636 drivers/infiniband/core/ucma.c 		ret = PTR_ERR(ctx);
ctx              1640 drivers/infiniband/core/ucma.c 	cur_file = ctx->file;
ctx              1642 drivers/infiniband/core/ucma.c 		resp.events_reported = ctx->events_reported;
ctx              1653 drivers/infiniband/core/ucma.c 	list_move_tail(&ctx->list, &new_file->ctx_list);
ctx              1654 drivers/infiniband/core/ucma.c 	ucma_move_events(ctx, new_file);
ctx              1655 drivers/infiniband/core/ucma.c 	ctx->file = new_file;
ctx              1656 drivers/infiniband/core/ucma.c 	resp.events_reported = ctx->events_reported;
ctx              1666 drivers/infiniband/core/ucma.c 	ucma_put_ctx(ctx);
ctx              1786 drivers/infiniband/core/ucma.c 	struct ucma_context *ctx, *tmp;
ctx              1789 drivers/infiniband/core/ucma.c 	list_for_each_entry_safe(ctx, tmp, &file->ctx_list, list) {
ctx              1790 drivers/infiniband/core/ucma.c 		ctx->destroying = 1;
ctx              1793 drivers/infiniband/core/ucma.c 		xa_erase(&ctx_table, ctx->id);
ctx              1800 drivers/infiniband/core/ucma.c 		if (!ctx->closing) {
ctx              1802 drivers/infiniband/core/ucma.c 			ucma_put_ctx(ctx);
ctx              1803 drivers/infiniband/core/ucma.c 			wait_for_completion(&ctx->comp);
ctx              1807 drivers/infiniband/core/ucma.c 			rdma_destroy_id(ctx->cm_id);
ctx              1812 drivers/infiniband/core/ucma.c 		ucma_free_ctx(ctx);
ctx               664 drivers/infiniband/core/verbs.c 	struct find_gid_index_context *ctx = context;
ctx               668 drivers/infiniband/core/verbs.c 	if (ctx->gid_type != gid_attr->gid_type)
ctx               675 drivers/infiniband/core/verbs.c 	return ctx->vlan_id == vlan_id;
ctx               307 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct bnxt_re_gid_ctx *ctx, **ctx_tbl;
ctx               314 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	ctx = *context;
ctx               315 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	if (!ctx)
ctx               319 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		if (ctx->idx >= sgid_tbl->max)
ctx               321 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		gid_to_del = &sgid_tbl->tbl[ctx->idx].gid;
ctx               322 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		vlan_id = sgid_tbl->tbl[ctx->idx].vlan_id;
ctx               331 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		if (ctx->idx == 0 &&
ctx               333 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		    ctx->refcnt == 1 && rdev->qp1_sqp) {
ctx               338 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		ctx->refcnt--;
ctx               339 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		if (!ctx->refcnt) {
ctx               346 drivers/infiniband/hw/bnxt_re/ib_verbs.c 				ctx_tbl = sgid_tbl->ctx;
ctx               347 drivers/infiniband/hw/bnxt_re/ib_verbs.c 				ctx_tbl[ctx->idx] = NULL;
ctx               348 drivers/infiniband/hw/bnxt_re/ib_verbs.c 				kfree(ctx);
ctx               362 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct bnxt_re_gid_ctx *ctx, **ctx_tbl;
ctx               374 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		ctx_tbl = sgid_tbl->ctx;
ctx               385 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	ctx = kmalloc(sizeof(*ctx), GFP_KERNEL);
ctx               386 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	if (!ctx)
ctx               388 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	ctx_tbl = sgid_tbl->ctx;
ctx               389 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	ctx->idx = tbl_idx;
ctx               390 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	ctx->refcnt = 1;
ctx               391 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	ctx_tbl[tbl_idx] = ctx;
ctx               392 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	*context = ctx;
ctx              3603 drivers/infiniband/hw/bnxt_re/ib_verbs.c int bnxt_re_alloc_ucontext(struct ib_ucontext *ctx, struct ib_udata *udata)
ctx              3605 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	struct ib_device *ibdev = ctx->device;
ctx              3607 drivers/infiniband/hw/bnxt_re/ib_verbs.c 		container_of(ctx, struct bnxt_re_ucontext, ib_uctx);
ctx               211 drivers/infiniband/hw/bnxt_re/ib_verbs.h int bnxt_re_alloc_ucontext(struct ib_ucontext *ctx, struct ib_udata *udata);
ctx               477 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 			 struct bnxt_qplib_ctx *ctx, int is_virtfn)
ctx               500 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	level = ctx->qpc_tbl.level;
ctx               502 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 				__get_pbl_pg_idx(&ctx->qpc_tbl.pbl[level]);
ctx               503 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	level = ctx->mrw_tbl.level;
ctx               505 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 				__get_pbl_pg_idx(&ctx->mrw_tbl.pbl[level]);
ctx               506 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	level = ctx->srqc_tbl.level;
ctx               508 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 				__get_pbl_pg_idx(&ctx->srqc_tbl.pbl[level]);
ctx               509 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	level = ctx->cq_tbl.level;
ctx               511 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 				__get_pbl_pg_idx(&ctx->cq_tbl.pbl[level]);
ctx               512 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	level = ctx->srqc_tbl.level;
ctx               514 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 				__get_pbl_pg_idx(&ctx->srqc_tbl.pbl[level]);
ctx               515 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	level = ctx->cq_tbl.level;
ctx               517 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 				__get_pbl_pg_idx(&ctx->cq_tbl.pbl[level]);
ctx               518 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	level = ctx->tim_tbl.level;
ctx               520 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 				  __get_pbl_pg_idx(&ctx->tim_tbl.pbl[level]);
ctx               521 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	level = ctx->tqm_pde_level;
ctx               523 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 				  __get_pbl_pg_idx(&ctx->tqm_pde.pbl[level]);
ctx               526 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 		cpu_to_le64(ctx->qpc_tbl.pbl[PBL_LVL_0].pg_map_arr[0]);
ctx               528 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 		cpu_to_le64(ctx->mrw_tbl.pbl[PBL_LVL_0].pg_map_arr[0]);
ctx               530 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 		cpu_to_le64(ctx->srqc_tbl.pbl[PBL_LVL_0].pg_map_arr[0]);
ctx               532 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 		cpu_to_le64(ctx->cq_tbl.pbl[PBL_LVL_0].pg_map_arr[0]);
ctx               534 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 		cpu_to_le64(ctx->tim_tbl.pbl[PBL_LVL_0].pg_map_arr[0]);
ctx               536 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 		cpu_to_le64(ctx->tqm_pde.pbl[PBL_LVL_0].pg_map_arr[0]);
ctx               538 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	req.number_of_qp = cpu_to_le32(ctx->qpc_tbl.max_elements);
ctx               539 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	req.number_of_mrw = cpu_to_le32(ctx->mrw_tbl.max_elements);
ctx               540 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	req.number_of_srq = cpu_to_le32(ctx->srqc_tbl.max_elements);
ctx               541 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	req.number_of_cq = cpu_to_le32(ctx->cq_tbl.max_elements);
ctx               543 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	req.max_qp_per_vf = cpu_to_le32(ctx->vf_res.max_qp_per_vf);
ctx               544 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	req.max_mrw_per_vf = cpu_to_le32(ctx->vf_res.max_mrw_per_vf);
ctx               545 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	req.max_srq_per_vf = cpu_to_le32(ctx->vf_res.max_srq_per_vf);
ctx               546 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	req.max_cq_per_vf = cpu_to_le32(ctx->vf_res.max_cq_per_vf);
ctx               547 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	req.max_gid_per_vf = cpu_to_le32(ctx->vf_res.max_gid_per_vf);
ctx               550 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	req.stat_ctx_id = cpu_to_le32(ctx->stats.fw_id);
ctx               570 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 				  struct bnxt_qplib_ctx *ctx,
ctx               586 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c 	if (ctx->hwrm_intf_ver < HWRM_VERSION_RCFW_CMDQ_DEPTH_CHECK)
ctx               273 drivers/infiniband/hw/bnxt_re/qplib_rcfw.h 				  struct bnxt_qplib_ctx *ctx,
ctx               297 drivers/infiniband/hw/bnxt_re/qplib_rcfw.h 			 struct bnxt_qplib_ctx *ctx, int is_virtfn);
ctx               302 drivers/infiniband/hw/bnxt_re/qplib_res.c 			 struct bnxt_qplib_ctx *ctx)
ctx               306 drivers/infiniband/hw/bnxt_re/qplib_res.c 	bnxt_qplib_free_hwq(pdev, &ctx->qpc_tbl);
ctx               307 drivers/infiniband/hw/bnxt_re/qplib_res.c 	bnxt_qplib_free_hwq(pdev, &ctx->mrw_tbl);
ctx               308 drivers/infiniband/hw/bnxt_re/qplib_res.c 	bnxt_qplib_free_hwq(pdev, &ctx->srqc_tbl);
ctx               309 drivers/infiniband/hw/bnxt_re/qplib_res.c 	bnxt_qplib_free_hwq(pdev, &ctx->cq_tbl);
ctx               310 drivers/infiniband/hw/bnxt_re/qplib_res.c 	bnxt_qplib_free_hwq(pdev, &ctx->tim_tbl);
ctx               312 drivers/infiniband/hw/bnxt_re/qplib_res.c 		bnxt_qplib_free_hwq(pdev, &ctx->tqm_tbl[i]);
ctx               313 drivers/infiniband/hw/bnxt_re/qplib_res.c 	bnxt_qplib_free_hwq(pdev, &ctx->tqm_pde);
ctx               314 drivers/infiniband/hw/bnxt_re/qplib_res.c 	bnxt_qplib_free_stats_ctx(pdev, &ctx->stats);
ctx               339 drivers/infiniband/hw/bnxt_re/qplib_res.c 			 struct bnxt_qplib_ctx *ctx,
ctx               350 drivers/infiniband/hw/bnxt_re/qplib_res.c 	ctx->qpc_tbl.max_elements = ctx->qpc_count;
ctx               351 drivers/infiniband/hw/bnxt_re/qplib_res.c 	rc = bnxt_qplib_alloc_init_hwq(pdev, &ctx->qpc_tbl, NULL,
ctx               352 drivers/infiniband/hw/bnxt_re/qplib_res.c 				       &ctx->qpc_tbl.max_elements,
ctx               359 drivers/infiniband/hw/bnxt_re/qplib_res.c 	ctx->mrw_tbl.max_elements = ctx->mrw_count;
ctx               360 drivers/infiniband/hw/bnxt_re/qplib_res.c 	rc = bnxt_qplib_alloc_init_hwq(pdev, &ctx->mrw_tbl, NULL,
ctx               361 drivers/infiniband/hw/bnxt_re/qplib_res.c 				       &ctx->mrw_tbl.max_elements,
ctx               368 drivers/infiniband/hw/bnxt_re/qplib_res.c 	ctx->srqc_tbl.max_elements = ctx->srqc_count;
ctx               369 drivers/infiniband/hw/bnxt_re/qplib_res.c 	rc = bnxt_qplib_alloc_init_hwq(pdev, &ctx->srqc_tbl, NULL,
ctx               370 drivers/infiniband/hw/bnxt_re/qplib_res.c 				       &ctx->srqc_tbl.max_elements,
ctx               377 drivers/infiniband/hw/bnxt_re/qplib_res.c 	ctx->cq_tbl.max_elements = ctx->cq_count;
ctx               378 drivers/infiniband/hw/bnxt_re/qplib_res.c 	rc = bnxt_qplib_alloc_init_hwq(pdev, &ctx->cq_tbl, NULL,
ctx               379 drivers/infiniband/hw/bnxt_re/qplib_res.c 				       &ctx->cq_tbl.max_elements,
ctx               386 drivers/infiniband/hw/bnxt_re/qplib_res.c 	ctx->tqm_pde.max_elements = 512;
ctx               387 drivers/infiniband/hw/bnxt_re/qplib_res.c 	rc = bnxt_qplib_alloc_init_hwq(pdev, &ctx->tqm_pde, NULL,
ctx               388 drivers/infiniband/hw/bnxt_re/qplib_res.c 				       &ctx->tqm_pde.max_elements, sizeof(u64),
ctx               394 drivers/infiniband/hw/bnxt_re/qplib_res.c 		if (!ctx->tqm_count[i])
ctx               396 drivers/infiniband/hw/bnxt_re/qplib_res.c 		ctx->tqm_tbl[i].max_elements = ctx->qpc_count *
ctx               397 drivers/infiniband/hw/bnxt_re/qplib_res.c 					       ctx->tqm_count[i];
ctx               398 drivers/infiniband/hw/bnxt_re/qplib_res.c 		rc = bnxt_qplib_alloc_init_hwq(pdev, &ctx->tqm_tbl[i], NULL,
ctx               399 drivers/infiniband/hw/bnxt_re/qplib_res.c 					       &ctx->tqm_tbl[i].max_elements, 1,
ctx               404 drivers/infiniband/hw/bnxt_re/qplib_res.c 	pbl_ptr = (__le64 **)ctx->tqm_pde.pbl_ptr;
ctx               407 drivers/infiniband/hw/bnxt_re/qplib_res.c 		if (!ctx->tqm_tbl[i].max_elements)
ctx               411 drivers/infiniband/hw/bnxt_re/qplib_res.c 		switch (ctx->tqm_tbl[i].level) {
ctx               413 drivers/infiniband/hw/bnxt_re/qplib_res.c 			for (k = 0; k < ctx->tqm_tbl[i].pbl[PBL_LVL_1].pg_count;
ctx               417 drivers/infiniband/hw/bnxt_re/qplib_res.c 				    ctx->tqm_tbl[i].pbl[PBL_LVL_1].pg_map_arr[k]
ctx               424 drivers/infiniband/hw/bnxt_re/qplib_res.c 				ctx->tqm_tbl[i].pbl[PBL_LVL_0].pg_map_arr[0] |
ctx               431 drivers/infiniband/hw/bnxt_re/qplib_res.c 	ctx->tqm_pde_level = ctx->tqm_tbl[fnz_idx].level == PBL_LVL_2 ?
ctx               432 drivers/infiniband/hw/bnxt_re/qplib_res.c 			     PBL_LVL_2 : ctx->tqm_tbl[fnz_idx].level + 1;
ctx               435 drivers/infiniband/hw/bnxt_re/qplib_res.c 	ctx->tim_tbl.max_elements = ctx->qpc_count * 16;
ctx               436 drivers/infiniband/hw/bnxt_re/qplib_res.c 	rc = bnxt_qplib_alloc_init_hwq(pdev, &ctx->tim_tbl, NULL,
ctx               437 drivers/infiniband/hw/bnxt_re/qplib_res.c 				       &ctx->tim_tbl.max_elements, 1,
ctx               444 drivers/infiniband/hw/bnxt_re/qplib_res.c 	rc = bnxt_qplib_alloc_stats_ctx(pdev, &ctx->stats);
ctx               451 drivers/infiniband/hw/bnxt_re/qplib_res.c 	bnxt_qplib_free_ctx(pdev, ctx);
ctx               477 drivers/infiniband/hw/bnxt_re/qplib_res.c 	kfree(sgid_tbl->ctx);
ctx               481 drivers/infiniband/hw/bnxt_re/qplib_res.c 	sgid_tbl->ctx = NULL;
ctx               499 drivers/infiniband/hw/bnxt_re/qplib_res.c 	sgid_tbl->ctx = kcalloc(max, sizeof(void *), GFP_KERNEL);
ctx               500 drivers/infiniband/hw/bnxt_re/qplib_res.c 	if (!sgid_tbl->ctx)
ctx               510 drivers/infiniband/hw/bnxt_re/qplib_res.c 	kfree(sgid_tbl->ctx);
ctx               511 drivers/infiniband/hw/bnxt_re/qplib_res.c 	sgid_tbl->ctx = NULL;
ctx               118 drivers/infiniband/hw/bnxt_re/qplib_res.h 	void				*ctx;
ctx               262 drivers/infiniband/hw/bnxt_re/qplib_res.h 			 struct bnxt_qplib_ctx *ctx);
ctx               264 drivers/infiniband/hw/bnxt_re/qplib_res.h 			 struct bnxt_qplib_ctx *ctx,
ctx               176 drivers/infiniband/hw/bnxt_re/qplib_sp.c 				  struct bnxt_qplib_ctx *ctx)
ctx               185 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	req.number_of_qp = cpu_to_le32(ctx->qpc_count);
ctx               186 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	req.number_of_mrw = cpu_to_le32(ctx->mrw_count);
ctx               187 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	req.number_of_srq =  cpu_to_le32(ctx->srqc_count);
ctx               188 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	req.number_of_cq = cpu_to_le32(ctx->cq_count);
ctx               190 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	req.max_qp_per_vf = cpu_to_le32(ctx->vf_res.max_qp_per_vf);
ctx               191 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	req.max_mrw_per_vf = cpu_to_le32(ctx->vf_res.max_mrw_per_vf);
ctx               192 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	req.max_srq_per_vf = cpu_to_le32(ctx->vf_res.max_srq_per_vf);
ctx               193 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	req.max_cq_per_vf = cpu_to_le32(ctx->vf_res.max_cq_per_vf);
ctx               194 drivers/infiniband/hw/bnxt_re/qplib_sp.c 	req.max_gid_per_vf = cpu_to_le32(ctx->vf_res.max_gid_per_vf);
ctx               248 drivers/infiniband/hw/bnxt_re/qplib_sp.h 				  struct bnxt_qplib_ctx *ctx);
ctx               629 drivers/infiniband/hw/cxgb3/iwch_cm.c static int act_establish(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx               631 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep *ep = ctx;
ctx              1040 drivers/infiniband/hw/cxgb3/iwch_cm.c static int rx_data(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx              1042 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep *ep = ctx;
ctx              1085 drivers/infiniband/hw/cxgb3/iwch_cm.c static int tx_ack(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx              1087 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep *ep = ctx;
ctx              1132 drivers/infiniband/hw/cxgb3/iwch_cm.c static int abort_rpl(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx              1134 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep *ep = ctx;
ctx              1176 drivers/infiniband/hw/cxgb3/iwch_cm.c static int act_open_rpl(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx              1178 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep *ep = ctx;
ctx              1222 drivers/infiniband/hw/cxgb3/iwch_cm.c static int pass_open_rpl(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx              1224 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_listen_ep *ep = ctx;
ctx              1256 drivers/infiniband/hw/cxgb3/iwch_cm.c 			     void *ctx)
ctx              1258 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_listen_ep *ep = ctx;
ctx              1334 drivers/infiniband/hw/cxgb3/iwch_cm.c static int pass_accept_req(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx              1336 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep *child_ep, *parent_ep = ctx;
ctx              1410 drivers/infiniband/hw/cxgb3/iwch_cm.c static int pass_establish(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx              1412 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep *ep = ctx;
ctx              1428 drivers/infiniband/hw/cxgb3/iwch_cm.c static int peer_close(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx              1430 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep *ep = ctx;
ctx              1519 drivers/infiniband/hw/cxgb3/iwch_cm.c static int peer_abort(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx              1522 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep *ep = ctx;
ctx              1627 drivers/infiniband/hw/cxgb3/iwch_cm.c static int close_con_rpl(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx              1629 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep *ep = ctx;
ctx              1681 drivers/infiniband/hw/cxgb3/iwch_cm.c static int terminate(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx              1683 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep *ep = ctx;
ctx              1698 drivers/infiniband/hw/cxgb3/iwch_cm.c static int ec_status(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx              1701 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep *ep = ctx;
ctx              2126 drivers/infiniband/hw/cxgb3/iwch_cm.c int iwch_ep_redirect(void *ctx, struct dst_entry *old, struct dst_entry *new,
ctx              2129 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep *ep = ctx;
ctx              2189 drivers/infiniband/hw/cxgb3/iwch_cm.c static int sched(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx              2191 drivers/infiniband/hw/cxgb3/iwch_cm.c 	struct iwch_ep_common *epc = ctx;
ctx              2198 drivers/infiniband/hw/cxgb3/iwch_cm.c 	*((void **) skb->cb) = ctx;
ctx              2209 drivers/infiniband/hw/cxgb3/iwch_cm.c static int set_tcb_rpl(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx               227 drivers/infiniband/hw/cxgb3/iwch_cm.h int iwch_ep_redirect(void *ctx, struct dst_entry *old, struct dst_entry *new, struct l2t_entry *l2t);
ctx               936 drivers/infiniband/hw/cxgb4/device.c void c4iw_dealloc(struct uld_ctx *ctx)
ctx               938 drivers/infiniband/hw/cxgb4/device.c 	c4iw_rdev_close(&ctx->dev->rdev);
ctx               939 drivers/infiniband/hw/cxgb4/device.c 	WARN_ON(!xa_empty(&ctx->dev->cqs));
ctx               940 drivers/infiniband/hw/cxgb4/device.c 	WARN_ON(!xa_empty(&ctx->dev->qps));
ctx               941 drivers/infiniband/hw/cxgb4/device.c 	WARN_ON(!xa_empty(&ctx->dev->mrs));
ctx               942 drivers/infiniband/hw/cxgb4/device.c 	wait_event(ctx->dev->wait, xa_empty(&ctx->dev->hwtids));
ctx               943 drivers/infiniband/hw/cxgb4/device.c 	WARN_ON(!xa_empty(&ctx->dev->stids));
ctx               944 drivers/infiniband/hw/cxgb4/device.c 	WARN_ON(!xa_empty(&ctx->dev->atids));
ctx               945 drivers/infiniband/hw/cxgb4/device.c 	if (ctx->dev->rdev.bar2_kva)
ctx               946 drivers/infiniband/hw/cxgb4/device.c 		iounmap(ctx->dev->rdev.bar2_kva);
ctx               947 drivers/infiniband/hw/cxgb4/device.c 	if (ctx->dev->rdev.oc_mw_kva)
ctx               948 drivers/infiniband/hw/cxgb4/device.c 		iounmap(ctx->dev->rdev.oc_mw_kva);
ctx               949 drivers/infiniband/hw/cxgb4/device.c 	ib_dealloc_device(&ctx->dev->ibdev);
ctx               950 drivers/infiniband/hw/cxgb4/device.c 	ctx->dev = NULL;
ctx               953 drivers/infiniband/hw/cxgb4/device.c static void c4iw_remove(struct uld_ctx *ctx)
ctx               955 drivers/infiniband/hw/cxgb4/device.c 	pr_debug("c4iw_dev %p\n", ctx->dev);
ctx               956 drivers/infiniband/hw/cxgb4/device.c 	c4iw_unregister_device(ctx->dev);
ctx               957 drivers/infiniband/hw/cxgb4/device.c 	c4iw_dealloc(ctx);
ctx              1073 drivers/infiniband/hw/cxgb4/device.c 	struct uld_ctx *ctx;
ctx              1081 drivers/infiniband/hw/cxgb4/device.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              1082 drivers/infiniband/hw/cxgb4/device.c 	if (!ctx) {
ctx              1083 drivers/infiniband/hw/cxgb4/device.c 		ctx = ERR_PTR(-ENOMEM);
ctx              1086 drivers/infiniband/hw/cxgb4/device.c 	ctx->lldi = *infop;
ctx              1089 drivers/infiniband/hw/cxgb4/device.c 		 pci_name(ctx->lldi.pdev),
ctx              1090 drivers/infiniband/hw/cxgb4/device.c 		 ctx->lldi.nchan, ctx->lldi.nrxq,
ctx              1091 drivers/infiniband/hw/cxgb4/device.c 		 ctx->lldi.ntxq, ctx->lldi.nports);
ctx              1094 drivers/infiniband/hw/cxgb4/device.c 	list_add_tail(&ctx->entry, &uld_ctx_list);
ctx              1097 drivers/infiniband/hw/cxgb4/device.c 	for (i = 0; i < ctx->lldi.nrxq; i++)
ctx              1098 drivers/infiniband/hw/cxgb4/device.c 		pr_debug("rxqid[%u] %u\n", i, ctx->lldi.rxq_ids[i]);
ctx              1100 drivers/infiniband/hw/cxgb4/device.c 	return ctx;
ctx              1168 drivers/infiniband/hw/cxgb4/device.c 	struct uld_ctx *ctx = handle;
ctx              1169 drivers/infiniband/hw/cxgb4/device.c 	struct c4iw_dev *dev = ctx->dev;
ctx              1193 drivers/infiniband/hw/cxgb4/device.c 			pci_name(ctx->lldi.pdev), gl->va,
ctx              1220 drivers/infiniband/hw/cxgb4/device.c 	struct uld_ctx *ctx = handle;
ctx              1225 drivers/infiniband/hw/cxgb4/device.c 		pr_info("%s: Up\n", pci_name(ctx->lldi.pdev));
ctx              1226 drivers/infiniband/hw/cxgb4/device.c 		if (!ctx->dev) {
ctx              1227 drivers/infiniband/hw/cxgb4/device.c 			ctx->dev = c4iw_alloc(&ctx->lldi);
ctx              1228 drivers/infiniband/hw/cxgb4/device.c 			if (IS_ERR(ctx->dev)) {
ctx              1230 drivers/infiniband/hw/cxgb4/device.c 				       pci_name(ctx->lldi.pdev),
ctx              1231 drivers/infiniband/hw/cxgb4/device.c 				       PTR_ERR(ctx->dev));
ctx              1232 drivers/infiniband/hw/cxgb4/device.c 				ctx->dev = NULL;
ctx              1236 drivers/infiniband/hw/cxgb4/device.c 			INIT_WORK(&ctx->reg_work, c4iw_register_device);
ctx              1237 drivers/infiniband/hw/cxgb4/device.c 			queue_work(reg_workq, &ctx->reg_work);
ctx              1241 drivers/infiniband/hw/cxgb4/device.c 		pr_info("%s: Down\n", pci_name(ctx->lldi.pdev));
ctx              1242 drivers/infiniband/hw/cxgb4/device.c 		if (ctx->dev)
ctx              1243 drivers/infiniband/hw/cxgb4/device.c 			c4iw_remove(ctx);
ctx              1247 drivers/infiniband/hw/cxgb4/device.c 		pr_info("%s: Fatal Error\n", pci_name(ctx->lldi.pdev));
ctx              1248 drivers/infiniband/hw/cxgb4/device.c 		if (ctx->dev) {
ctx              1251 drivers/infiniband/hw/cxgb4/device.c 			ctx->dev->rdev.flags |= T4_FATAL_ERROR;
ctx              1253 drivers/infiniband/hw/cxgb4/device.c 			event.device = &ctx->dev->ibdev;
ctx              1255 drivers/infiniband/hw/cxgb4/device.c 			c4iw_remove(ctx);
ctx              1259 drivers/infiniband/hw/cxgb4/device.c 		pr_info("%s: Detach\n", pci_name(ctx->lldi.pdev));
ctx              1260 drivers/infiniband/hw/cxgb4/device.c 		if (ctx->dev)
ctx              1261 drivers/infiniband/hw/cxgb4/device.c 			c4iw_remove(ctx);
ctx              1267 drivers/infiniband/hw/cxgb4/device.c static void stop_queues(struct uld_ctx *ctx)
ctx              1272 drivers/infiniband/hw/cxgb4/device.c 	xa_lock_irqsave(&ctx->dev->qps, flags);
ctx              1273 drivers/infiniband/hw/cxgb4/device.c 	ctx->dev->rdev.stats.db_state_transitions++;
ctx              1274 drivers/infiniband/hw/cxgb4/device.c 	ctx->dev->db_state = STOPPED;
ctx              1275 drivers/infiniband/hw/cxgb4/device.c 	if (ctx->dev->rdev.flags & T4_STATUS_PAGE_DISABLED) {
ctx              1276 drivers/infiniband/hw/cxgb4/device.c 		xa_for_each(&ctx->dev->qps, index, qp)
ctx              1279 drivers/infiniband/hw/cxgb4/device.c 		ctx->dev->rdev.status_page->db_off = 1;
ctx              1281 drivers/infiniband/hw/cxgb4/device.c 	xa_unlock_irqrestore(&ctx->dev->qps, flags);
ctx              1294 drivers/infiniband/hw/cxgb4/device.c static void resume_a_chunk(struct uld_ctx *ctx)
ctx              1300 drivers/infiniband/hw/cxgb4/device.c 		qp = list_first_entry(&ctx->dev->db_fc_list, struct c4iw_qp,
ctx              1304 drivers/infiniband/hw/cxgb4/device.c 		if (list_empty(&ctx->dev->db_fc_list))
ctx              1309 drivers/infiniband/hw/cxgb4/device.c static void resume_queues(struct uld_ctx *ctx)
ctx              1311 drivers/infiniband/hw/cxgb4/device.c 	xa_lock_irq(&ctx->dev->qps);
ctx              1312 drivers/infiniband/hw/cxgb4/device.c 	if (ctx->dev->db_state != STOPPED)
ctx              1314 drivers/infiniband/hw/cxgb4/device.c 	ctx->dev->db_state = FLOW_CONTROL;
ctx              1316 drivers/infiniband/hw/cxgb4/device.c 		if (list_empty(&ctx->dev->db_fc_list)) {
ctx              1320 drivers/infiniband/hw/cxgb4/device.c 			WARN_ON(ctx->dev->db_state != FLOW_CONTROL);
ctx              1321 drivers/infiniband/hw/cxgb4/device.c 			ctx->dev->db_state = NORMAL;
ctx              1322 drivers/infiniband/hw/cxgb4/device.c 			ctx->dev->rdev.stats.db_state_transitions++;
ctx              1323 drivers/infiniband/hw/cxgb4/device.c 			if (ctx->dev->rdev.flags & T4_STATUS_PAGE_DISABLED) {
ctx              1324 drivers/infiniband/hw/cxgb4/device.c 				xa_for_each(&ctx->dev->qps, index, qp)
ctx              1327 drivers/infiniband/hw/cxgb4/device.c 				ctx->dev->rdev.status_page->db_off = 0;
ctx              1331 drivers/infiniband/hw/cxgb4/device.c 			if (cxgb4_dbfifo_count(ctx->dev->rdev.lldi.ports[0], 1)
ctx              1332 drivers/infiniband/hw/cxgb4/device.c 			    < (ctx->dev->rdev.lldi.dbfifo_int_thresh <<
ctx              1334 drivers/infiniband/hw/cxgb4/device.c 				resume_a_chunk(ctx);
ctx              1336 drivers/infiniband/hw/cxgb4/device.c 			if (!list_empty(&ctx->dev->db_fc_list)) {
ctx              1337 drivers/infiniband/hw/cxgb4/device.c 				xa_unlock_irq(&ctx->dev->qps);
ctx              1342 drivers/infiniband/hw/cxgb4/device.c 				xa_lock_irq(&ctx->dev->qps);
ctx              1343 drivers/infiniband/hw/cxgb4/device.c 				if (ctx->dev->db_state != FLOW_CONTROL)
ctx              1349 drivers/infiniband/hw/cxgb4/device.c 	if (ctx->dev->db_state != NORMAL)
ctx              1350 drivers/infiniband/hw/cxgb4/device.c 		ctx->dev->rdev.stats.db_fc_interruptions++;
ctx              1351 drivers/infiniband/hw/cxgb4/device.c 	xa_unlock_irq(&ctx->dev->qps);
ctx              1367 drivers/infiniband/hw/cxgb4/device.c static void recover_lost_dbs(struct uld_ctx *ctx, struct qp_list *qp_list)
ctx              1383 drivers/infiniband/hw/cxgb4/device.c 			       pci_name(ctx->lldi.pdev), qp->wq.sq.qid);
ctx              1397 drivers/infiniband/hw/cxgb4/device.c 			       pci_name(ctx->lldi.pdev), qp->wq.rq.qid);
ctx              1414 drivers/infiniband/hw/cxgb4/device.c static void recover_queues(struct uld_ctx *ctx)
ctx              1427 drivers/infiniband/hw/cxgb4/device.c 	ret = cxgb4_flush_eq_cache(ctx->dev->rdev.lldi.ports[0]);
ctx              1430 drivers/infiniband/hw/cxgb4/device.c 		       pci_name(ctx->lldi.pdev));
ctx              1435 drivers/infiniband/hw/cxgb4/device.c 	xa_lock_irq(&ctx->dev->qps);
ctx              1436 drivers/infiniband/hw/cxgb4/device.c 	WARN_ON(ctx->dev->db_state != STOPPED);
ctx              1437 drivers/infiniband/hw/cxgb4/device.c 	ctx->dev->db_state = RECOVERY;
ctx              1438 drivers/infiniband/hw/cxgb4/device.c 	xa_for_each(&ctx->dev->qps, index, qp)
ctx              1443 drivers/infiniband/hw/cxgb4/device.c 		xa_unlock_irq(&ctx->dev->qps);
ctx              1449 drivers/infiniband/hw/cxgb4/device.c 	xa_for_each(&ctx->dev->qps, index, qp) {
ctx              1454 drivers/infiniband/hw/cxgb4/device.c 	xa_unlock_irq(&ctx->dev->qps);
ctx              1457 drivers/infiniband/hw/cxgb4/device.c 	recover_lost_dbs(ctx, &qp_list);
ctx              1463 drivers/infiniband/hw/cxgb4/device.c 	xa_lock_irq(&ctx->dev->qps);
ctx              1464 drivers/infiniband/hw/cxgb4/device.c 	WARN_ON(ctx->dev->db_state != RECOVERY);
ctx              1465 drivers/infiniband/hw/cxgb4/device.c 	ctx->dev->db_state = STOPPED;
ctx              1466 drivers/infiniband/hw/cxgb4/device.c 	xa_unlock_irq(&ctx->dev->qps);
ctx              1471 drivers/infiniband/hw/cxgb4/device.c 	struct uld_ctx *ctx = handle;
ctx              1475 drivers/infiniband/hw/cxgb4/device.c 		stop_queues(ctx);
ctx              1476 drivers/infiniband/hw/cxgb4/device.c 		ctx->dev->rdev.stats.db_full++;
ctx              1479 drivers/infiniband/hw/cxgb4/device.c 		resume_queues(ctx);
ctx              1480 drivers/infiniband/hw/cxgb4/device.c 		mutex_lock(&ctx->dev->rdev.stats.lock);
ctx              1481 drivers/infiniband/hw/cxgb4/device.c 		ctx->dev->rdev.stats.db_empty++;
ctx              1482 drivers/infiniband/hw/cxgb4/device.c 		mutex_unlock(&ctx->dev->rdev.stats.lock);
ctx              1485 drivers/infiniband/hw/cxgb4/device.c 		recover_queues(ctx);
ctx              1486 drivers/infiniband/hw/cxgb4/device.c 		mutex_lock(&ctx->dev->rdev.stats.lock);
ctx              1487 drivers/infiniband/hw/cxgb4/device.c 		ctx->dev->rdev.stats.db_drop++;
ctx              1488 drivers/infiniband/hw/cxgb4/device.c 		mutex_unlock(&ctx->dev->rdev.stats.lock);
ctx              1492 drivers/infiniband/hw/cxgb4/device.c 			pci_name(ctx->lldi.pdev), control);
ctx              1555 drivers/infiniband/hw/cxgb4/device.c 	struct uld_ctx *ctx, *tmp;
ctx              1558 drivers/infiniband/hw/cxgb4/device.c 	list_for_each_entry_safe(ctx, tmp, &uld_ctx_list, entry) {
ctx              1559 drivers/infiniband/hw/cxgb4/device.c 		if (ctx->dev)
ctx              1560 drivers/infiniband/hw/cxgb4/device.c 			c4iw_remove(ctx);
ctx              1561 drivers/infiniband/hw/cxgb4/device.c 		kfree(ctx);
ctx               945 drivers/infiniband/hw/cxgb4/iw_cxgb4.h int c4iw_ep_redirect(void *ctx, struct dst_entry *old, struct dst_entry *new,
ctx               987 drivers/infiniband/hw/cxgb4/iw_cxgb4.h void c4iw_dealloc(struct uld_ctx *ctx);
ctx               538 drivers/infiniband/hw/cxgb4/provider.c 	struct uld_ctx *ctx = container_of(work, struct uld_ctx, reg_work);
ctx               539 drivers/infiniband/hw/cxgb4/provider.c 	struct c4iw_dev *dev = ctx->dev;
ctx               592 drivers/infiniband/hw/cxgb4/provider.c 	       pci_name(ctx->lldi.pdev), ret);
ctx               593 drivers/infiniband/hw/cxgb4/provider.c 	c4iw_dealloc(ctx);
ctx              1243 drivers/infiniband/hw/hfi1/chip.c #define OVR_LBL(ctx) C_RCV_HDR_OVF_ ## ctx
ctx              1244 drivers/infiniband/hw/hfi1/chip.c #define OVR_ELM(ctx) \
ctx              1245 drivers/infiniband/hw/hfi1/chip.c CNTR_ELEM("RcvHdrOvr" #ctx, \
ctx              1246 drivers/infiniband/hw/hfi1/chip.c 	  (RCV_HDR_OVFL_CNT + ctx * 0x100), \
ctx              4392 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	struct hns_roce_v2_qp_context ctx[2];
ctx              4393 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	struct hns_roce_v2_qp_context *context = ctx;
ctx              4394 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	struct hns_roce_v2_qp_context *qpc_mask = ctx + 1;
ctx              4450 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	ret = hns_roce_v2_qp_modify(hr_dev, cur_state, new_state, ctx, hr_qp);
ctx               102 drivers/infiniband/hw/mlx4/mad.c __be64 mlx4_ib_get_new_demux_tid(struct mlx4_ib_demux_ctx *ctx)
ctx               104 drivers/infiniband/hw/mlx4/mad.c 	return cpu_to_be64(atomic_inc_return(&ctx->tid)) |
ctx              1302 drivers/infiniband/hw/mlx4/mad.c 	struct mlx4_ib_demux_pv_ctx *ctx = cq->cq_context;
ctx              1303 drivers/infiniband/hw/mlx4/mad.c 	struct mlx4_ib_dev *dev = to_mdev(ctx->ib_dev);
ctx              1305 drivers/infiniband/hw/mlx4/mad.c 	if (!dev->sriov.is_going_down && ctx->state == DEMUX_PV_STATE_ACTIVE)
ctx              1306 drivers/infiniband/hw/mlx4/mad.c 		queue_work(ctx->wq, &ctx->work);
ctx              1310 drivers/infiniband/hw/mlx4/mad.c static int mlx4_ib_post_pv_qp_buf(struct mlx4_ib_demux_pv_ctx *ctx,
ctx              1324 drivers/infiniband/hw/mlx4/mad.c 	sg_list.lkey = ctx->pd->local_dma_lkey;
ctx              1331 drivers/infiniband/hw/mlx4/mad.c 	ib_dma_sync_single_for_device(ctx->ib_dev, tun_qp->ring[index].map,
ctx              1481 drivers/infiniband/hw/mlx4/mad.c static void mlx4_ib_multiplex_mad(struct mlx4_ib_demux_pv_ctx *ctx, struct ib_wc *wc)
ctx              1483 drivers/infiniband/hw/mlx4/mad.c 	struct mlx4_ib_dev *dev = to_mdev(ctx->ib_dev);
ctx              1484 drivers/infiniband/hw/mlx4/mad.c 	struct mlx4_ib_demux_pv_qp *tun_qp = &ctx->qp[MLX4_TUN_WRID_QPN(wc->wr_id)];
ctx              1499 drivers/infiniband/hw/mlx4/mad.c 	    (wc->src_qp & 0x1) != ctx->port - 1 ||
ctx              1501 drivers/infiniband/hw/mlx4/mad.c 		mlx4_ib_warn(ctx->ib_dev, "can't multiplex bad sqp:%d\n", wc->src_qp);
ctx              1505 drivers/infiniband/hw/mlx4/mad.c 	if (slave != ctx->slave) {
ctx              1506 drivers/infiniband/hw/mlx4/mad.c 		mlx4_ib_warn(ctx->ib_dev, "can't multiplex bad sqp:%d: "
ctx              1512 drivers/infiniband/hw/mlx4/mad.c 	ib_dma_sync_single_for_cpu(ctx->ib_dev, tun_qp->ring[wr_ix].map,
ctx              1525 drivers/infiniband/hw/mlx4/mad.c 			mlx4_ib_warn(ctx->ib_dev, "egress mad has non-null tid msb:%d "
ctx              1540 drivers/infiniband/hw/mlx4/mad.c 		    !mlx4_vf_smi_enabled(dev->dev, slave, ctx->port))
ctx              1544 drivers/infiniband/hw/mlx4/mad.c 		if (mlx4_ib_multiplex_sa_handler(ctx->ib_dev, ctx->port, slave,
ctx              1549 drivers/infiniband/hw/mlx4/mad.c 		if (mlx4_ib_multiplex_cm_handler(ctx->ib_dev, ctx->port, slave,
ctx              1561 drivers/infiniband/hw/mlx4/mad.c 			mlx4_ib_warn(ctx->ib_dev, "dropping unsupported egress mad from class:%d "
ctx              1570 drivers/infiniband/hw/mlx4/mad.c 	ah.ibah.device = ctx->ib_dev;
ctx              1581 drivers/infiniband/hw/mlx4/mad.c 		fill_in_real_sgid_index(dev, slave, ctx->port, &ah_attr);
ctx              1587 drivers/infiniband/hw/mlx4/mad.c 	if (mlx4_get_slave_default_vlan(dev->dev, ctx->port, slave,
ctx              1591 drivers/infiniband/hw/mlx4/mad.c 	mlx4_ib_send_to_wire(dev, slave, ctx->port,
ctx              1600 drivers/infiniband/hw/mlx4/mad.c static int mlx4_ib_alloc_pv_bufs(struct mlx4_ib_demux_pv_ctx *ctx,
ctx              1610 drivers/infiniband/hw/mlx4/mad.c 	tun_qp = &ctx->qp[qp_type];
ctx              1639 drivers/infiniband/hw/mlx4/mad.c 		tun_qp->ring[i].map = ib_dma_map_single(ctx->ib_dev,
ctx              1643 drivers/infiniband/hw/mlx4/mad.c 		if (ib_dma_mapping_error(ctx->ib_dev, tun_qp->ring[i].map)) {
ctx              1655 drivers/infiniband/hw/mlx4/mad.c 			ib_dma_map_single(ctx->ib_dev,
ctx              1659 drivers/infiniband/hw/mlx4/mad.c 		if (ib_dma_mapping_error(ctx->ib_dev,
ctx              1676 drivers/infiniband/hw/mlx4/mad.c 		ib_dma_unmap_single(ctx->ib_dev, tun_qp->tx_ring[i].buf.map,
ctx              1684 drivers/infiniband/hw/mlx4/mad.c 		ib_dma_unmap_single(ctx->ib_dev, tun_qp->ring[i].map,
ctx              1695 drivers/infiniband/hw/mlx4/mad.c static void mlx4_ib_free_pv_qp_bufs(struct mlx4_ib_demux_pv_ctx *ctx,
ctx              1705 drivers/infiniband/hw/mlx4/mad.c 	tun_qp = &ctx->qp[qp_type];
ctx              1716 drivers/infiniband/hw/mlx4/mad.c 		ib_dma_unmap_single(ctx->ib_dev, tun_qp->ring[i].map,
ctx              1722 drivers/infiniband/hw/mlx4/mad.c 		ib_dma_unmap_single(ctx->ib_dev, tun_qp->tx_ring[i].buf.map,
ctx              1734 drivers/infiniband/hw/mlx4/mad.c 	struct mlx4_ib_demux_pv_ctx *ctx;
ctx              1738 drivers/infiniband/hw/mlx4/mad.c 	ctx = container_of(work, struct mlx4_ib_demux_pv_ctx, work);
ctx              1739 drivers/infiniband/hw/mlx4/mad.c 	ib_req_notify_cq(ctx->cq, IB_CQ_NEXT_COMP);
ctx              1741 drivers/infiniband/hw/mlx4/mad.c 	while (ib_poll_cq(ctx->cq, 1, &wc) == 1) {
ctx              1742 drivers/infiniband/hw/mlx4/mad.c 		tun_qp = &ctx->qp[MLX4_TUN_WRID_QPN(wc.wr_id)];
ctx              1746 drivers/infiniband/hw/mlx4/mad.c 				mlx4_ib_multiplex_mad(ctx, &wc);
ctx              1747 drivers/infiniband/hw/mlx4/mad.c 				ret = mlx4_ib_post_pv_qp_buf(ctx, tun_qp,
ctx              1773 drivers/infiniband/hw/mlx4/mad.c 				 ctx->slave, wc.status, wc.wr_id);
ctx              1796 drivers/infiniband/hw/mlx4/mad.c static int create_pv_sqp(struct mlx4_ib_demux_pv_ctx *ctx,
ctx              1808 drivers/infiniband/hw/mlx4/mad.c 	tun_qp = &ctx->qp[qp_type];
ctx              1811 drivers/infiniband/hw/mlx4/mad.c 	qp_init_attr.init_attr.send_cq = ctx->cq;
ctx              1812 drivers/infiniband/hw/mlx4/mad.c 	qp_init_attr.init_attr.recv_cq = ctx->cq;
ctx              1821 drivers/infiniband/hw/mlx4/mad.c 		qp_init_attr.port = ctx->port;
ctx              1822 drivers/infiniband/hw/mlx4/mad.c 		qp_init_attr.slave = ctx->slave;
ctx              1831 drivers/infiniband/hw/mlx4/mad.c 	qp_init_attr.init_attr.port_num = ctx->port;
ctx              1832 drivers/infiniband/hw/mlx4/mad.c 	qp_init_attr.init_attr.qp_context = ctx;
ctx              1834 drivers/infiniband/hw/mlx4/mad.c 	tun_qp->qp = ib_create_qp(ctx->pd, &qp_init_attr.init_attr);
ctx              1847 drivers/infiniband/hw/mlx4/mad.c 		ret = find_slave_port_pkey_ix(to_mdev(ctx->ib_dev), ctx->slave,
ctx              1848 drivers/infiniband/hw/mlx4/mad.c 					      ctx->port, IB_DEFAULT_PKEY_FULL,
ctx              1852 drivers/infiniband/hw/mlx4/mad.c 			to_mdev(ctx->ib_dev)->pkeys.virt2phys_pkey[ctx->slave][ctx->port - 1][0];
ctx              1854 drivers/infiniband/hw/mlx4/mad.c 	attr.port_num = ctx->port;
ctx              1878 drivers/infiniband/hw/mlx4/mad.c 		ret = mlx4_ib_post_pv_qp_buf(ctx, tun_qp, i);
ctx              1898 drivers/infiniband/hw/mlx4/mad.c 	struct mlx4_ib_demux_pv_ctx *ctx;
ctx              1904 drivers/infiniband/hw/mlx4/mad.c 	ctx = container_of(work, struct mlx4_ib_demux_pv_ctx, work);
ctx              1905 drivers/infiniband/hw/mlx4/mad.c 	ib_req_notify_cq(ctx->cq, IB_CQ_NEXT_COMP);
ctx              1907 drivers/infiniband/hw/mlx4/mad.c 	while (mlx4_ib_poll_cq(ctx->cq, 1, &wc) == 1) {
ctx              1908 drivers/infiniband/hw/mlx4/mad.c 		sqp = &ctx->qp[MLX4_TUN_WRID_QPN(wc.wr_id)];
ctx              1927 drivers/infiniband/hw/mlx4/mad.c 				mlx4_ib_demux_mad(ctx->ib_dev, ctx->port, &wc, grh, mad);
ctx              1928 drivers/infiniband/hw/mlx4/mad.c 				if (mlx4_ib_post_pv_qp_buf(ctx, sqp, wc.wr_id &
ctx              1939 drivers/infiniband/hw/mlx4/mad.c 				 ctx->slave, wc.status, wc.wr_id);
ctx              1956 drivers/infiniband/hw/mlx4/mad.c 	struct mlx4_ib_demux_pv_ctx *ctx;
ctx              1959 drivers/infiniband/hw/mlx4/mad.c 	ctx = kzalloc(sizeof (struct mlx4_ib_demux_pv_ctx), GFP_KERNEL);
ctx              1960 drivers/infiniband/hw/mlx4/mad.c 	if (!ctx)
ctx              1963 drivers/infiniband/hw/mlx4/mad.c 	ctx->ib_dev = &dev->ib_dev;
ctx              1964 drivers/infiniband/hw/mlx4/mad.c 	ctx->port = port;
ctx              1965 drivers/infiniband/hw/mlx4/mad.c 	ctx->slave = slave;
ctx              1966 drivers/infiniband/hw/mlx4/mad.c 	*ret_ctx = ctx;
ctx              1979 drivers/infiniband/hw/mlx4/mad.c 			       int create_tun, struct mlx4_ib_demux_pv_ctx *ctx)
ctx              1984 drivers/infiniband/hw/mlx4/mad.c 	if (ctx->state != DEMUX_PV_STATE_DOWN)
ctx              1987 drivers/infiniband/hw/mlx4/mad.c 	ctx->state = DEMUX_PV_STATE_STARTING;
ctx              1989 drivers/infiniband/hw/mlx4/mad.c 	if (rdma_port_get_link_layer(ibdev, ctx->port) ==
ctx              1991 drivers/infiniband/hw/mlx4/mad.c 		ctx->has_smi = 1;
ctx              1993 drivers/infiniband/hw/mlx4/mad.c 	if (ctx->has_smi) {
ctx              1994 drivers/infiniband/hw/mlx4/mad.c 		ret = mlx4_ib_alloc_pv_bufs(ctx, IB_QPT_SMI, create_tun);
ctx              2001 drivers/infiniband/hw/mlx4/mad.c 	ret = mlx4_ib_alloc_pv_bufs(ctx, IB_QPT_GSI, create_tun);
ctx              2008 drivers/infiniband/hw/mlx4/mad.c 	if (ctx->has_smi)
ctx              2012 drivers/infiniband/hw/mlx4/mad.c 	ctx->cq = ib_create_cq(ctx->ib_dev, mlx4_ib_tunnel_comp_handler,
ctx              2013 drivers/infiniband/hw/mlx4/mad.c 			       NULL, ctx, &cq_attr);
ctx              2014 drivers/infiniband/hw/mlx4/mad.c 	if (IS_ERR(ctx->cq)) {
ctx              2015 drivers/infiniband/hw/mlx4/mad.c 		ret = PTR_ERR(ctx->cq);
ctx              2020 drivers/infiniband/hw/mlx4/mad.c 	ctx->pd = ib_alloc_pd(ctx->ib_dev, 0);
ctx              2021 drivers/infiniband/hw/mlx4/mad.c 	if (IS_ERR(ctx->pd)) {
ctx              2022 drivers/infiniband/hw/mlx4/mad.c 		ret = PTR_ERR(ctx->pd);
ctx              2027 drivers/infiniband/hw/mlx4/mad.c 	if (ctx->has_smi) {
ctx              2028 drivers/infiniband/hw/mlx4/mad.c 		ret = create_pv_sqp(ctx, IB_QPT_SMI, create_tun);
ctx              2036 drivers/infiniband/hw/mlx4/mad.c 	ret = create_pv_sqp(ctx, IB_QPT_GSI, create_tun);
ctx              2044 drivers/infiniband/hw/mlx4/mad.c 		INIT_WORK(&ctx->work, mlx4_ib_tunnel_comp_worker);
ctx              2046 drivers/infiniband/hw/mlx4/mad.c 		INIT_WORK(&ctx->work, mlx4_ib_sqp_comp_worker);
ctx              2048 drivers/infiniband/hw/mlx4/mad.c 	ctx->wq = to_mdev(ibdev)->sriov.demux[port - 1].wq;
ctx              2050 drivers/infiniband/hw/mlx4/mad.c 	ret = ib_req_notify_cq(ctx->cq, IB_CQ_NEXT_COMP);
ctx              2055 drivers/infiniband/hw/mlx4/mad.c 	ctx->state = DEMUX_PV_STATE_ACTIVE;
ctx              2059 drivers/infiniband/hw/mlx4/mad.c 	ctx->wq = NULL;
ctx              2060 drivers/infiniband/hw/mlx4/mad.c 	ib_destroy_qp(ctx->qp[1].qp);
ctx              2061 drivers/infiniband/hw/mlx4/mad.c 	ctx->qp[1].qp = NULL;
ctx              2065 drivers/infiniband/hw/mlx4/mad.c 	if (ctx->has_smi)
ctx              2066 drivers/infiniband/hw/mlx4/mad.c 		ib_destroy_qp(ctx->qp[0].qp);
ctx              2067 drivers/infiniband/hw/mlx4/mad.c 	ctx->qp[0].qp = NULL;
ctx              2070 drivers/infiniband/hw/mlx4/mad.c 	ib_dealloc_pd(ctx->pd);
ctx              2071 drivers/infiniband/hw/mlx4/mad.c 	ctx->pd = NULL;
ctx              2074 drivers/infiniband/hw/mlx4/mad.c 	ib_destroy_cq(ctx->cq);
ctx              2075 drivers/infiniband/hw/mlx4/mad.c 	ctx->cq = NULL;
ctx              2078 drivers/infiniband/hw/mlx4/mad.c 	mlx4_ib_free_pv_qp_bufs(ctx, IB_QPT_GSI, create_tun);
ctx              2081 drivers/infiniband/hw/mlx4/mad.c 	if (ctx->has_smi)
ctx              2082 drivers/infiniband/hw/mlx4/mad.c 		mlx4_ib_free_pv_qp_bufs(ctx, IB_QPT_SMI, create_tun);
ctx              2084 drivers/infiniband/hw/mlx4/mad.c 	ctx->state = DEMUX_PV_STATE_DOWN;
ctx              2089 drivers/infiniband/hw/mlx4/mad.c 				 struct mlx4_ib_demux_pv_ctx *ctx, int flush)
ctx              2091 drivers/infiniband/hw/mlx4/mad.c 	if (!ctx)
ctx              2093 drivers/infiniband/hw/mlx4/mad.c 	if (ctx->state > DEMUX_PV_STATE_DOWN) {
ctx              2094 drivers/infiniband/hw/mlx4/mad.c 		ctx->state = DEMUX_PV_STATE_DOWNING;
ctx              2096 drivers/infiniband/hw/mlx4/mad.c 			flush_workqueue(ctx->wq);
ctx              2097 drivers/infiniband/hw/mlx4/mad.c 		if (ctx->has_smi) {
ctx              2098 drivers/infiniband/hw/mlx4/mad.c 			ib_destroy_qp(ctx->qp[0].qp);
ctx              2099 drivers/infiniband/hw/mlx4/mad.c 			ctx->qp[0].qp = NULL;
ctx              2100 drivers/infiniband/hw/mlx4/mad.c 			mlx4_ib_free_pv_qp_bufs(ctx, IB_QPT_SMI, 1);
ctx              2102 drivers/infiniband/hw/mlx4/mad.c 		ib_destroy_qp(ctx->qp[1].qp);
ctx              2103 drivers/infiniband/hw/mlx4/mad.c 		ctx->qp[1].qp = NULL;
ctx              2104 drivers/infiniband/hw/mlx4/mad.c 		mlx4_ib_free_pv_qp_bufs(ctx, IB_QPT_GSI, 1);
ctx              2105 drivers/infiniband/hw/mlx4/mad.c 		ib_dealloc_pd(ctx->pd);
ctx              2106 drivers/infiniband/hw/mlx4/mad.c 		ctx->pd = NULL;
ctx              2107 drivers/infiniband/hw/mlx4/mad.c 		ib_destroy_cq(ctx->cq);
ctx              2108 drivers/infiniband/hw/mlx4/mad.c 		ctx->cq = NULL;
ctx              2109 drivers/infiniband/hw/mlx4/mad.c 		ctx->state = DEMUX_PV_STATE_DOWN;
ctx              2153 drivers/infiniband/hw/mlx4/mad.c 				       struct mlx4_ib_demux_ctx *ctx,
ctx              2160 drivers/infiniband/hw/mlx4/mad.c 	ctx->tun = kcalloc(dev->dev->caps.sqp_demux,
ctx              2162 drivers/infiniband/hw/mlx4/mad.c 	if (!ctx->tun)
ctx              2165 drivers/infiniband/hw/mlx4/mad.c 	ctx->dev = dev;
ctx              2166 drivers/infiniband/hw/mlx4/mad.c 	ctx->port = port;
ctx              2167 drivers/infiniband/hw/mlx4/mad.c 	ctx->ib_dev = &dev->ib_dev;
ctx              2179 drivers/infiniband/hw/mlx4/mad.c 		ret = alloc_pv_object(dev, i, port, &ctx->tun[i]);
ctx              2186 drivers/infiniband/hw/mlx4/mad.c 	ret = mlx4_ib_mcg_port_init(ctx);
ctx              2193 drivers/infiniband/hw/mlx4/mad.c 	ctx->wq = alloc_ordered_workqueue(name, WQ_MEM_RECLAIM);
ctx              2194 drivers/infiniband/hw/mlx4/mad.c 	if (!ctx->wq) {
ctx              2201 drivers/infiniband/hw/mlx4/mad.c 	ctx->ud_wq = alloc_ordered_workqueue(name, WQ_MEM_RECLAIM);
ctx              2202 drivers/infiniband/hw/mlx4/mad.c 	if (!ctx->ud_wq) {
ctx              2211 drivers/infiniband/hw/mlx4/mad.c 	destroy_workqueue(ctx->wq);
ctx              2212 drivers/infiniband/hw/mlx4/mad.c 	ctx->wq = NULL;
ctx              2215 drivers/infiniband/hw/mlx4/mad.c 	mlx4_ib_mcg_port_cleanup(ctx, 1);
ctx              2219 drivers/infiniband/hw/mlx4/mad.c 	kfree(ctx->tun);
ctx              2220 drivers/infiniband/hw/mlx4/mad.c 	ctx->tun = NULL;
ctx              2245 drivers/infiniband/hw/mlx4/mad.c static void mlx4_ib_free_demux_ctx(struct mlx4_ib_demux_ctx *ctx)
ctx              2248 drivers/infiniband/hw/mlx4/mad.c 	if (ctx) {
ctx              2249 drivers/infiniband/hw/mlx4/mad.c 		struct mlx4_ib_dev *dev = to_mdev(ctx->ib_dev);
ctx              2250 drivers/infiniband/hw/mlx4/mad.c 		mlx4_ib_mcg_port_cleanup(ctx, 1);
ctx              2252 drivers/infiniband/hw/mlx4/mad.c 			if (!ctx->tun[i])
ctx              2254 drivers/infiniband/hw/mlx4/mad.c 			if (ctx->tun[i]->state > DEMUX_PV_STATE_DOWN)
ctx              2255 drivers/infiniband/hw/mlx4/mad.c 				ctx->tun[i]->state = DEMUX_PV_STATE_DOWNING;
ctx              2257 drivers/infiniband/hw/mlx4/mad.c 		flush_workqueue(ctx->wq);
ctx              2259 drivers/infiniband/hw/mlx4/mad.c 			destroy_pv_resources(dev, i, ctx->port, ctx->tun[i], 0);
ctx              2260 drivers/infiniband/hw/mlx4/mad.c 			free_pv_object(dev, i, ctx->port);
ctx              2262 drivers/infiniband/hw/mlx4/mad.c 		kfree(ctx->tun);
ctx              2263 drivers/infiniband/hw/mlx4/mad.c 		destroy_workqueue(ctx->ud_wq);
ctx              2264 drivers/infiniband/hw/mlx4/mad.c 		destroy_workqueue(ctx->wq);
ctx               252 drivers/infiniband/hw/mlx4/main.c 	kfree(entry->ctx);
ctx               253 drivers/infiniband/hw/mlx4/main.c 	entry->ctx = NULL;
ctx               293 drivers/infiniband/hw/mlx4/main.c 			port_gid_table->gids[free].ctx = kmalloc(sizeof(*port_gid_table->gids[free].ctx), GFP_ATOMIC);
ctx               294 drivers/infiniband/hw/mlx4/main.c 			if (!port_gid_table->gids[free].ctx) {
ctx               297 drivers/infiniband/hw/mlx4/main.c 				*context = port_gid_table->gids[free].ctx;
ctx               301 drivers/infiniband/hw/mlx4/main.c 				port_gid_table->gids[free].ctx->real_index = free;
ctx               302 drivers/infiniband/hw/mlx4/main.c 				port_gid_table->gids[free].ctx->refcount = 1;
ctx               307 drivers/infiniband/hw/mlx4/main.c 		struct gid_cache_context *ctx = port_gid_table->gids[found].ctx;
ctx               308 drivers/infiniband/hw/mlx4/main.c 		*context = ctx;
ctx               309 drivers/infiniband/hw/mlx4/main.c 		ctx->refcount++;
ctx               343 drivers/infiniband/hw/mlx4/main.c 	struct gid_cache_context *ctx = *context;
ctx               359 drivers/infiniband/hw/mlx4/main.c 	if (ctx) {
ctx               360 drivers/infiniband/hw/mlx4/main.c 		ctx->refcount--;
ctx               361 drivers/infiniband/hw/mlx4/main.c 		if (!ctx->refcount) {
ctx               362 drivers/infiniband/hw/mlx4/main.c 			unsigned int real_index = ctx->real_index;
ctx               398 drivers/infiniband/hw/mlx4/main.c 	struct gid_cache_context *ctx = NULL;
ctx               421 drivers/infiniband/hw/mlx4/main.c 			ctx = port_gid_table->gids[i].ctx;
ctx               424 drivers/infiniband/hw/mlx4/main.c 	if (ctx)
ctx               425 drivers/infiniband/hw/mlx4/main.c 		real_index = ctx->real_index;
ctx               162 drivers/infiniband/hw/mlx4/mcg.c static struct mcast_group *mcast_find(struct mlx4_ib_demux_ctx *ctx,
ctx               165 drivers/infiniband/hw/mlx4/mcg.c 	struct rb_node *node = ctx->mcg_table.rb_node;
ctx               183 drivers/infiniband/hw/mlx4/mcg.c static struct mcast_group *mcast_insert(struct mlx4_ib_demux_ctx *ctx,
ctx               186 drivers/infiniband/hw/mlx4/mcg.c 	struct rb_node **link = &ctx->mcg_table.rb_node;
ctx               205 drivers/infiniband/hw/mlx4/mcg.c 	rb_insert_color(&group->node, &ctx->mcg_table);
ctx               209 drivers/infiniband/hw/mlx4/mcg.c static int send_mad_to_wire(struct mlx4_ib_demux_ctx *ctx, struct ib_mad *mad)
ctx               211 drivers/infiniband/hw/mlx4/mcg.c 	struct mlx4_ib_dev *dev = ctx->dev;
ctx               216 drivers/infiniband/hw/mlx4/mcg.c 	if (!dev->sm_ah[ctx->port - 1]) {
ctx               221 drivers/infiniband/hw/mlx4/mcg.c 	mlx4_ib_query_ah(dev->sm_ah[ctx->port - 1], &ah_attr);
ctx               224 drivers/infiniband/hw/mlx4/mcg.c 				    ctx->port, IB_QPT_GSI, 0, 1, IB_QP1_QKEY,
ctx               228 drivers/infiniband/hw/mlx4/mcg.c static int send_mad_to_slave(int slave, struct mlx4_ib_demux_ctx *ctx,
ctx               231 drivers/infiniband/hw/mlx4/mcg.c 	struct mlx4_ib_dev *dev = ctx->dev;
ctx               232 drivers/infiniband/hw/mlx4/mcg.c 	struct ib_mad_agent *agent = dev->send_agent[ctx->port - 1][1];
ctx               240 drivers/infiniband/hw/mlx4/mcg.c 	rdma_query_ah(dev->sm_ah[ctx->port - 1], &ah_attr);
ctx               242 drivers/infiniband/hw/mlx4/mcg.c 	if (ib_find_cached_pkey(&dev->ib_dev, ctx->port, IB_DEFAULT_PKEY_FULL, &wc.pkey_index))
ctx               246 drivers/infiniband/hw/mlx4/mcg.c 	wc.port_num = ctx->port;
ctx               249 drivers/infiniband/hw/mlx4/mcg.c 	return mlx4_ib_send_to_slave(dev, slave, ctx->port, IB_QPT_GSI, &wc, NULL, mad);
ctx               439 drivers/infiniband/hw/mlx4/mcg.c 	struct mlx4_ib_demux_ctx *ctx = group->demux;
ctx               442 drivers/infiniband/hw/mlx4/mcg.c 	mutex_lock(&ctx->mcg_table_lock);
ctx               450 drivers/infiniband/hw/mlx4/mcg.c 				mutex_unlock(&ctx->mcg_table_lock);
ctx               457 drivers/infiniband/hw/mlx4/mcg.c 			del_sysfs_port_mcg_attr(ctx->dev, ctx->port, &group->dentry.attr);
ctx               461 drivers/infiniband/hw/mlx4/mcg.c 			rb_erase(&group->node, &ctx->mcg_table);
ctx               464 drivers/infiniband/hw/mlx4/mcg.c 		mutex_unlock(&ctx->mcg_table_lock);
ctx               469 drivers/infiniband/hw/mlx4/mcg.c 		mutex_unlock(&ctx->mcg_table_lock);
ctx               746 drivers/infiniband/hw/mlx4/mcg.c static struct mcast_group *search_relocate_mgid0_group(struct mlx4_ib_demux_ctx *ctx,
ctx               753 drivers/infiniband/hw/mlx4/mcg.c 	mutex_lock(&ctx->mcg_table_lock);
ctx               754 drivers/infiniband/hw/mlx4/mcg.c 	list_for_each_entry_safe(group, n, &ctx->mcg_mgid0_list, mgid0_list) {
ctx               763 drivers/infiniband/hw/mlx4/mcg.c 				cur_group = mcast_insert(ctx, group);
ctx               773 drivers/infiniband/hw/mlx4/mcg.c 					mutex_unlock(&ctx->mcg_table_lock);
ctx               779 drivers/infiniband/hw/mlx4/mcg.c 				add_sysfs_port_mcg_attr(ctx->dev, ctx->port, &group->dentry.attr);
ctx               781 drivers/infiniband/hw/mlx4/mcg.c 				mutex_unlock(&ctx->mcg_table_lock);
ctx               795 drivers/infiniband/hw/mlx4/mcg.c 				mutex_unlock(&ctx->mcg_table_lock);
ctx               802 drivers/infiniband/hw/mlx4/mcg.c 	mutex_unlock(&ctx->mcg_table_lock);
ctx               810 drivers/infiniband/hw/mlx4/mcg.c static struct mcast_group *acquire_group(struct mlx4_ib_demux_ctx *ctx,
ctx               819 drivers/infiniband/hw/mlx4/mcg.c 		group = mcast_find(ctx, mgid);
ctx               831 drivers/infiniband/hw/mlx4/mcg.c 	group->demux = ctx;
ctx               851 drivers/infiniband/hw/mlx4/mcg.c 		list_add(&group->mgid0_list, &ctx->mcg_mgid0_list);
ctx               855 drivers/infiniband/hw/mlx4/mcg.c 	cur_group = mcast_insert(ctx, group);
ctx               862 drivers/infiniband/hw/mlx4/mcg.c 	add_sysfs_port_mcg_attr(ctx->dev, ctx->port, &group->dentry.attr);
ctx               887 drivers/infiniband/hw/mlx4/mcg.c 	struct mlx4_ib_demux_ctx *ctx = &dev->sriov.demux[port - 1];
ctx               893 drivers/infiniband/hw/mlx4/mcg.c 		mutex_lock(&ctx->mcg_table_lock);
ctx               894 drivers/infiniband/hw/mlx4/mcg.c 		group = acquire_group(ctx, &rec->mgid, 0);
ctx               895 drivers/infiniband/hw/mlx4/mcg.c 		mutex_unlock(&ctx->mcg_table_lock);
ctx               900 drivers/infiniband/hw/mlx4/mcg.c 				group = search_relocate_mgid0_group(ctx, tid, &rec->mgid);
ctx               914 drivers/infiniband/hw/mlx4/mcg.c 		if (!queue_work(ctx->mcg_wq, &group->work))
ctx               936 drivers/infiniband/hw/mlx4/mcg.c 	struct mlx4_ib_demux_ctx *ctx = &dev->sriov.demux[port - 1];
ctx               941 drivers/infiniband/hw/mlx4/mcg.c 	if (ctx->flushing)
ctx               956 drivers/infiniband/hw/mlx4/mcg.c 		mutex_lock(&ctx->mcg_table_lock);
ctx               957 drivers/infiniband/hw/mlx4/mcg.c 		group = acquire_group(ctx, &rec->mgid, may_create);
ctx               958 drivers/infiniband/hw/mlx4/mcg.c 		mutex_unlock(&ctx->mcg_table_lock);
ctx              1042 drivers/infiniband/hw/mlx4/mcg.c int mlx4_ib_mcg_port_init(struct mlx4_ib_demux_ctx *ctx)
ctx              1046 drivers/infiniband/hw/mlx4/mcg.c 	atomic_set(&ctx->tid, 0);
ctx              1047 drivers/infiniband/hw/mlx4/mcg.c 	sprintf(name, "mlx4_ib_mcg%d", ctx->port);
ctx              1048 drivers/infiniband/hw/mlx4/mcg.c 	ctx->mcg_wq = alloc_ordered_workqueue(name, WQ_MEM_RECLAIM);
ctx              1049 drivers/infiniband/hw/mlx4/mcg.c 	if (!ctx->mcg_wq)
ctx              1052 drivers/infiniband/hw/mlx4/mcg.c 	mutex_init(&ctx->mcg_table_lock);
ctx              1053 drivers/infiniband/hw/mlx4/mcg.c 	ctx->mcg_table = RB_ROOT;
ctx              1054 drivers/infiniband/hw/mlx4/mcg.c 	INIT_LIST_HEAD(&ctx->mcg_mgid0_list);
ctx              1055 drivers/infiniband/hw/mlx4/mcg.c 	ctx->flushing = 0;
ctx              1073 drivers/infiniband/hw/mlx4/mcg.c static void _mlx4_ib_mcg_port_cleanup(struct mlx4_ib_demux_ctx *ctx, int destroy_wq)
ctx              1082 drivers/infiniband/hw/mlx4/mcg.c 		clean_vf_mcast(ctx, i);
ctx              1087 drivers/infiniband/hw/mlx4/mcg.c 		mutex_lock(&ctx->mcg_table_lock);
ctx              1088 drivers/infiniband/hw/mlx4/mcg.c 		for (p = rb_first(&ctx->mcg_table); p; p = rb_next(p))
ctx              1090 drivers/infiniband/hw/mlx4/mcg.c 		mutex_unlock(&ctx->mcg_table_lock);
ctx              1097 drivers/infiniband/hw/mlx4/mcg.c 	flush_workqueue(ctx->mcg_wq);
ctx              1099 drivers/infiniband/hw/mlx4/mcg.c 		destroy_workqueue(ctx->mcg_wq);
ctx              1101 drivers/infiniband/hw/mlx4/mcg.c 	mutex_lock(&ctx->mcg_table_lock);
ctx              1102 drivers/infiniband/hw/mlx4/mcg.c 	while ((p = rb_first(&ctx->mcg_table)) != NULL) {
ctx              1110 drivers/infiniband/hw/mlx4/mcg.c 	mutex_unlock(&ctx->mcg_table_lock);
ctx              1115 drivers/infiniband/hw/mlx4/mcg.c 	struct mlx4_ib_demux_ctx *ctx;
ctx              1123 drivers/infiniband/hw/mlx4/mcg.c 	_mlx4_ib_mcg_port_cleanup(cw->ctx, cw->destroy_wq);
ctx              1124 drivers/infiniband/hw/mlx4/mcg.c 	cw->ctx->flushing = 0;
ctx              1128 drivers/infiniband/hw/mlx4/mcg.c void mlx4_ib_mcg_port_cleanup(struct mlx4_ib_demux_ctx *ctx, int destroy_wq)
ctx              1132 drivers/infiniband/hw/mlx4/mcg.c 	if (ctx->flushing)
ctx              1135 drivers/infiniband/hw/mlx4/mcg.c 	ctx->flushing = 1;
ctx              1138 drivers/infiniband/hw/mlx4/mcg.c 		_mlx4_ib_mcg_port_cleanup(ctx, destroy_wq);
ctx              1139 drivers/infiniband/hw/mlx4/mcg.c 		ctx->flushing = 0;
ctx              1145 drivers/infiniband/hw/mlx4/mcg.c 		ctx->flushing = 0;
ctx              1149 drivers/infiniband/hw/mlx4/mcg.c 	work->ctx = ctx;
ctx              1225 drivers/infiniband/hw/mlx4/mcg.c void clean_vf_mcast(struct mlx4_ib_demux_ctx *ctx, int slave)
ctx              1230 drivers/infiniband/hw/mlx4/mcg.c 	mutex_lock(&ctx->mcg_table_lock);
ctx              1231 drivers/infiniband/hw/mlx4/mcg.c 	for (p = rb_first(&ctx->mcg_table); p; p = rb_next(p)) {
ctx              1241 drivers/infiniband/hw/mlx4/mcg.c 	mutex_unlock(&ctx->mcg_table_lock);
ctx               510 drivers/infiniband/hw/mlx4/mlx4_ib.h 	struct gid_cache_context *ctx;
ctx               534 drivers/infiniband/hw/mlx4/mlx4_ib.h 	void *ctx;
ctx               821 drivers/infiniband/hw/mlx4/mlx4_ib.h int mlx4_ib_mcg_port_init(struct mlx4_ib_demux_ctx *ctx);
ctx               822 drivers/infiniband/hw/mlx4/mlx4_ib.h void mlx4_ib_mcg_port_cleanup(struct mlx4_ib_demux_ctx *ctx, int destroy_wq);
ctx               823 drivers/infiniband/hw/mlx4/mlx4_ib.h void clean_vf_mcast(struct mlx4_ib_demux_ctx *ctx, int slave);
ctx               851 drivers/infiniband/hw/mlx4/mlx4_ib.h __be64 mlx4_ib_get_new_demux_tid(struct mlx4_ib_demux_ctx *ctx);
ctx                51 drivers/infiniband/hw/mlx4/sysfs.c 	struct mlx4_ib_iov_port *port = mlx4_ib_iov_dentry->ctx;
ctx                76 drivers/infiniband/hw/mlx4/sysfs.c 	struct mlx4_ib_iov_port *port = mlx4_ib_iov_dentry->ctx;
ctx               117 drivers/infiniband/hw/mlx4/sysfs.c 	struct mlx4_ib_iov_port *port = mlx4_ib_iov_dentry->ctx;
ctx               144 drivers/infiniband/hw/mlx4/sysfs.c 	struct mlx4_ib_iov_port *port = mlx4_ib_iov_dentry->ctx;
ctx               175 drivers/infiniband/hw/mlx4/sysfs.c 	vdentry->ctx = _ctx;
ctx               193 drivers/infiniband/hw/mlx4/sysfs.c 		vdentry->ctx = NULL;
ctx               685 drivers/infiniband/hw/mlx5/devx.c 		rqc = MLX5_ADDR_OF(create_rq_in, in, ctx);
ctx               696 drivers/infiniband/hw/mlx5/devx.c 		sqc = MLX5_ADDR_OF(create_sq_in, in, ctx);
ctx               711 drivers/infiniband/hw/mlx5/devx.c 		rmpc = MLX5_ADDR_OF(create_rmp_in, in, ctx);
ctx              1522 drivers/infiniband/hw/mlx5/main.c 	struct mlx5_hca_vport_context ctx = {};
ctx              1531 drivers/infiniband/hw/mlx5/main.c 	err = mlx5_query_hca_vport_context(mdev, 0, mdev_port_num, 0, &ctx);
ctx              1535 drivers/infiniband/hw/mlx5/main.c 	if (~ctx.cap_mask1_perm & mask) {
ctx              1537 drivers/infiniband/hw/mlx5/main.c 			     mask, ctx.cap_mask1_perm);
ctx              1542 drivers/infiniband/hw/mlx5/main.c 	ctx.cap_mask1 = value;
ctx              1543 drivers/infiniband/hw/mlx5/main.c 	ctx.cap_mask1_perm = mask;
ctx              1545 drivers/infiniband/hw/mlx5/main.c 						 0, &ctx);
ctx              2285 drivers/infiniband/hw/mlx5/main.c static int handle_alloc_dm_memic(struct ib_ucontext *ctx,
ctx              2290 drivers/infiniband/hw/mlx5/main.c 	struct mlx5_dm *dm_db = &to_mdev(ctx->device)->dm;
ctx              2319 drivers/infiniband/hw/mlx5/main.c 	bitmap_set(to_mucontext(ctx)->dm_pages, page_idx,
ctx              2330 drivers/infiniband/hw/mlx5/main.c static int handle_alloc_dm_sw_icm(struct ib_ucontext *ctx,
ctx              2336 drivers/infiniband/hw/mlx5/main.c 	struct mlx5_core_dev *dev = to_mdev(ctx->device)->mdev;
ctx              2348 drivers/infiniband/hw/mlx5/main.c 				   to_mucontext(ctx)->devx_uid, &dm->dev_addr,
ctx              2358 drivers/infiniband/hw/mlx5/main.c 				       to_mucontext(ctx)->devx_uid, dm->dev_addr,
ctx              2424 drivers/infiniband/hw/mlx5/main.c 	struct mlx5_ib_ucontext *ctx = rdma_udata_to_drv_context(
ctx              2441 drivers/infiniband/hw/mlx5/main.c 		bitmap_clear(ctx->dm_pages, page_idx,
ctx              2446 drivers/infiniband/hw/mlx5/main.c 					     dm->size, ctx->devx_uid, dm->dev_addr,
ctx              2453 drivers/infiniband/hw/mlx5/main.c 					     dm->size, ctx->devx_uid, dm->dev_addr,
ctx              2638 drivers/infiniband/hw/mlx5/main.c 		action->esp_id = (uintptr_t)maction->esp_aes_gcm.ctx;
ctx              4252 drivers/infiniband/hw/mlx5/main.c 	action->esp_aes_gcm.ctx =
ctx              4254 drivers/infiniband/hw/mlx5/main.c 	if (IS_ERR(action->esp_aes_gcm.ctx)) {
ctx              4255 drivers/infiniband/hw/mlx5/main.c 		err = PTR_ERR(action->esp_aes_gcm.ctx);
ctx              4294 drivers/infiniband/hw/mlx5/main.c 	memcpy(&accel_attrs, &maction->esp_aes_gcm.ctx->attrs,
ctx              4303 drivers/infiniband/hw/mlx5/main.c 	err = mlx5_accel_esp_modify_xfrm(maction->esp_aes_gcm.ctx,
ctx              4326 drivers/infiniband/hw/mlx5/main.c 		mlx5_accel_esp_destroy_xfrm(maction->esp_aes_gcm.ctx);
ctx               867 drivers/infiniband/hw/mlx5/mlx5_ib.h 			struct mlx5_accel_esp_xfrm *ctx;
ctx              1196 drivers/infiniband/hw/mlx5/qp.c 	void *tisc = MLX5_ADDR_OF(create_tis_in, in, ctx);
ctx              1251 drivers/infiniband/hw/mlx5/qp.c 	sqc = MLX5_ADDR_OF(create_sq_in, in, ctx);
ctx              1340 drivers/infiniband/hw/mlx5/qp.c 	rqc = MLX5_ADDR_OF(create_rq_in, in, ctx);
ctx              1416 drivers/infiniband/hw/mlx5/qp.c 	tirc = MLX5_ADDR_OF(create_tir_in, in, ctx);
ctx              1685 drivers/infiniband/hw/mlx5/qp.c 	tirc = MLX5_ADDR_OF(create_tir_in, in, ctx);
ctx              2870 drivers/infiniband/hw/mlx5/qp.c 	tisc = MLX5_ADDR_OF(modify_tis_in, in, ctx);
ctx              2897 drivers/infiniband/hw/mlx5/qp.c 	tisc = MLX5_ADDR_OF(modify_tis_in, in, ctx);
ctx              3166 drivers/infiniband/hw/mlx5/qp.c 	rqc = MLX5_ADDR_OF(modify_rq_in, in, ctx);
ctx              3213 drivers/infiniband/hw/mlx5/qp.c 	sqc = MLX5_ADDR_OF(modify_sq_in, in, ctx);
ctx              5938 drivers/infiniband/hw/mlx5/qp.c 	rqc = MLX5_ADDR_OF(create_rq_in, in, ctx);
ctx              6316 drivers/infiniband/hw/mlx5/qp.c 	rqc = MLX5_ADDR_OF(modify_rq_in, in, ctx);
ctx               321 drivers/infiniband/hw/mlx5/srq_cmd.c 	rmpc = MLX5_ADDR_OF(create_rmp_in, create_in, ctx);
ctx               375 drivers/infiniband/hw/mlx5/srq_cmd.c 	rmpc =	  MLX5_ADDR_OF(modify_rmp_in,   in,   ctx);
ctx               769 drivers/infiniband/hw/mthca/mthca_cq.c 		  struct mthca_ucontext *ctx, u32 pdn,
ctx               777 drivers/infiniband/hw/mthca/mthca_cq.c 	cq->is_kernel = !ctx;
ctx               827 drivers/infiniband/hw/mthca/mthca_cq.c 	if (ctx)
ctx               828 drivers/infiniband/hw/mthca/mthca_cq.c 		cq_context->logsize_usrpage |= cpu_to_be32(ctx->uar.index);
ctx               915 drivers/infiniband/hw/mthca/mthca_cq.c 		__be32 *ctx = mailbox->buf;
ctx               922 drivers/infiniband/hw/mthca/mthca_cq.c 			printk(KERN_ERR "[%2x] %08x\n", j * 4, be32_to_cpu(ctx[j]));
ctx               499 drivers/infiniband/hw/mthca/mthca_dev.h 		  struct mthca_ucontext *ctx, u32 pdn,
ctx               475 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	struct ocrdma_ucontext *ctx = get_ocrdma_ucontext(uctx);
ctx               483 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	INIT_LIST_HEAD(&ctx->mm_head);
ctx               484 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	mutex_init(&ctx->mm_list_lock);
ctx               486 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	ctx->ah_tbl.va = dma_alloc_coherent(&pdev->dev, map_len,
ctx               487 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 					    &ctx->ah_tbl.pa, GFP_KERNEL);
ctx               488 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	if (!ctx->ah_tbl.va)
ctx               491 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	ctx->ah_tbl.len = map_len;
ctx               493 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	resp.ah_tbl_len = ctx->ah_tbl.len;
ctx               494 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	resp.ah_tbl_page = virt_to_phys(ctx->ah_tbl.va);
ctx               496 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	status = ocrdma_add_mmap(ctx, resp.ah_tbl_page, resp.ah_tbl_len);
ctx               500 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	status = ocrdma_alloc_ucontext_pd(dev, ctx, udata);
ctx               517 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	ocrdma_dealloc_ucontext_pd(ctx);
ctx               519 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	ocrdma_del_mmap(ctx, ctx->ah_tbl.pa, ctx->ah_tbl.len);
ctx               521 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	dma_free_coherent(&pdev->dev, ctx->ah_tbl.len, ctx->ah_tbl.va,
ctx               522 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 			  ctx->ah_tbl.pa);
ctx               318 drivers/infiniband/hw/qedr/verbs.c 	struct qedr_ucontext *ctx = get_qedr_ucontext(uctx);
ctx               334 drivers/infiniband/hw/qedr/verbs.c 	ctx->dpi = oparams.dpi;
ctx               335 drivers/infiniband/hw/qedr/verbs.c 	ctx->dpi_addr = oparams.dpi_addr;
ctx               336 drivers/infiniband/hw/qedr/verbs.c 	ctx->dpi_phys_addr = oparams.dpi_phys_addr;
ctx               337 drivers/infiniband/hw/qedr/verbs.c 	ctx->dpi_size = oparams.dpi_size;
ctx               338 drivers/infiniband/hw/qedr/verbs.c 	INIT_LIST_HEAD(&ctx->mm_head);
ctx               339 drivers/infiniband/hw/qedr/verbs.c 	mutex_init(&ctx->mm_list_lock);
ctx               344 drivers/infiniband/hw/qedr/verbs.c 	uresp.db_pa = ctx->dpi_phys_addr;
ctx               345 drivers/infiniband/hw/qedr/verbs.c 	uresp.db_size = ctx->dpi_size;
ctx               358 drivers/infiniband/hw/qedr/verbs.c 	ctx->dev = dev;
ctx               360 drivers/infiniband/hw/qedr/verbs.c 	rc = qedr_add_mmap(ctx, ctx->dpi_phys_addr, ctx->dpi_size);
ctx               365 drivers/infiniband/hw/qedr/verbs.c 		 &ctx->ibucontext);
ctx               749 drivers/infiniband/hw/qedr/verbs.c 				       struct qedr_ucontext *ctx,
ctx               761 drivers/infiniband/hw/qedr/verbs.c 	params->dpi = (ctx) ? ctx->dpi : dev->dpi;
ctx               814 drivers/infiniband/hw/qedr/verbs.c 	struct qedr_ucontext *ctx = rdma_udata_to_drv_context(
ctx               887 drivers/infiniband/hw/qedr/verbs.c 	qedr_init_cq_params(cq, ctx, dev, vector, chain_entries, page_cnt,
ctx               106 drivers/infiniband/hw/usnic/usnic_ib_main.c 	struct usnic_ib_ucontext *ctx;
ctx               113 drivers/infiniband/hw/usnic/usnic_ib_main.c 	list_for_each_entry(ctx, &us_ibdev->ctx_list, link) {
ctx               114 drivers/infiniband/hw/usnic/usnic_ib_main.c 		list_for_each_entry(qp_grp, &ctx->qp_grp_list, link) {
ctx                54 drivers/infiniband/hw/usnic/usnic_ib_qp_grp.h 	struct usnic_ib_ucontext		*ctx;
ctx               220 drivers/infiniband/hw/usnic/usnic_ib_sysfs.c 	return scnprintf(buf, PAGE_SIZE, "0x%p\n", qp_grp->ctx);
ctx               529 drivers/infiniband/hw/usnic/usnic_ib_verbs.c 	qp_grp->ctx = ucontext;
ctx                69 drivers/infiniband/sw/siw/siw_verbs.c int siw_mmap(struct ib_ucontext *ctx, struct vm_area_struct *vma)
ctx                71 drivers/infiniband/sw/siw/siw_verbs.c 	struct siw_ucontext *uctx = to_siw_ctx(ctx);
ctx               100 drivers/infiniband/sw/siw/siw_verbs.c 	struct siw_ucontext *ctx = to_siw_ctx(base_ctx);
ctx               108 drivers/infiniband/sw/siw/siw_verbs.c 	xa_init_flags(&ctx->xa, XA_FLAGS_ALLOC);
ctx               109 drivers/infiniband/sw/siw/siw_verbs.c 	ctx->uobj_nextkey = 0;
ctx               110 drivers/infiniband/sw/siw/siw_verbs.c 	ctx->sdev = sdev;
ctx              1085 drivers/infiniband/sw/siw/siw_verbs.c 	struct siw_ucontext *ctx =
ctx              1093 drivers/infiniband/sw/siw/siw_verbs.c 	if (ctx && cq->xa_cq_index != SIW_INVAL_UOBJ_KEY)
ctx              1094 drivers/infiniband/sw/siw/siw_verbs.c 		kfree(xa_erase(&ctx->xa, cq->xa_cq_index));
ctx              1154 drivers/infiniband/sw/siw/siw_verbs.c 		struct siw_ucontext *ctx =
ctx              1159 drivers/infiniband/sw/siw/siw_verbs.c 			siw_create_uobj(ctx, cq->queue,
ctx              1184 drivers/infiniband/sw/siw/siw_verbs.c 		struct siw_ucontext *ctx =
ctx              1188 drivers/infiniband/sw/siw/siw_verbs.c 			kfree(xa_erase(&ctx->xa, cq->xa_cq_index));
ctx              1575 drivers/infiniband/sw/siw/siw_verbs.c 	struct siw_ucontext *ctx =
ctx              1613 drivers/infiniband/sw/siw/siw_verbs.c 			ctx, srq->recvq, srq->num_rqe * sizeof(struct siw_rqe));
ctx              1638 drivers/infiniband/sw/siw/siw_verbs.c 		if (ctx && srq->xa_srq_index != SIW_INVAL_UOBJ_KEY)
ctx              1639 drivers/infiniband/sw/siw/siw_verbs.c 			kfree(xa_erase(&ctx->xa, srq->xa_srq_index));
ctx              1721 drivers/infiniband/sw/siw/siw_verbs.c 	struct siw_ucontext *ctx =
ctx              1725 drivers/infiniband/sw/siw/siw_verbs.c 	if (ctx && srq->xa_srq_index != SIW_INVAL_UOBJ_KEY)
ctx              1726 drivers/infiniband/sw/siw/siw_verbs.c 		kfree(xa_erase(&ctx->xa, srq->xa_srq_index));
ctx                85 drivers/infiniband/sw/siw/siw_verbs.h int siw_mmap(struct ib_ucontext *ctx, struct vm_area_struct *vma);
ctx               233 drivers/infiniband/ulp/ipoib/ipoib_cm.c static void ipoib_cm_rx_event_handler(struct ib_event *event, void *ctx)
ctx               235 drivers/infiniband/ulp/ipoib/ipoib_cm.c 	struct ipoib_cm_rx *p = ctx;
ctx               191 drivers/infiniband/ulp/srpt/ib_srpt.c static void srpt_srq_event(struct ib_event *event, void *ctx)
ctx               908 drivers/infiniband/ulp/srpt/ib_srpt.c 		struct srpt_rw_ctx *ctx = &ioctx->rw_ctxs[i];
ctx               913 drivers/infiniband/ulp/srpt/ib_srpt.c 		ret = target_alloc_sgl(&ctx->sg, &ctx->nents, size, false,
ctx               918 drivers/infiniband/ulp/srpt/ib_srpt.c 		ret = rdma_rw_ctx_init(&ctx->rw, ch->qp, ch->sport->port,
ctx               919 drivers/infiniband/ulp/srpt/ib_srpt.c 				ctx->sg, ctx->nents, 0, remote_addr, rkey, dir);
ctx               921 drivers/infiniband/ulp/srpt/ib_srpt.c 			target_free_sgl(ctx->sg, ctx->nents);
ctx               930 drivers/infiniband/ulp/srpt/ib_srpt.c 			sg_chain(prev, prev_nents + 1, ctx->sg);
ctx               932 drivers/infiniband/ulp/srpt/ib_srpt.c 			*sg = ctx->sg;
ctx               935 drivers/infiniband/ulp/srpt/ib_srpt.c 		prev = ctx->sg;
ctx               936 drivers/infiniband/ulp/srpt/ib_srpt.c 		prev_nents = ctx->nents;
ctx               938 drivers/infiniband/ulp/srpt/ib_srpt.c 		*sg_cnt += ctx->nents;
ctx               945 drivers/infiniband/ulp/srpt/ib_srpt.c 		struct srpt_rw_ctx *ctx = &ioctx->rw_ctxs[i];
ctx               947 drivers/infiniband/ulp/srpt/ib_srpt.c 		rdma_rw_ctx_destroy(&ctx->rw, ch->qp, ch->sport->port,
ctx               948 drivers/infiniband/ulp/srpt/ib_srpt.c 				ctx->sg, ctx->nents, dir);
ctx               949 drivers/infiniband/ulp/srpt/ib_srpt.c 		target_free_sgl(ctx->sg, ctx->nents);
ctx               963 drivers/infiniband/ulp/srpt/ib_srpt.c 		struct srpt_rw_ctx *ctx = &ioctx->rw_ctxs[i];
ctx               965 drivers/infiniband/ulp/srpt/ib_srpt.c 		rdma_rw_ctx_destroy(&ctx->rw, ch->qp, ch->sport->port,
ctx               966 drivers/infiniband/ulp/srpt/ib_srpt.c 				ctx->sg, ctx->nents, dir);
ctx               967 drivers/infiniband/ulp/srpt/ib_srpt.c 		target_free_sgl(ctx->sg, ctx->nents);
ctx              2752 drivers/infiniband/ulp/srpt/ib_srpt.c 		struct srpt_rw_ctx *ctx = &ioctx->rw_ctxs[i];
ctx              2754 drivers/infiniband/ulp/srpt/ib_srpt.c 		first_wr = rdma_rw_ctx_wrs(&ctx->rw, ch->qp, ch->sport->port,
ctx              2828 drivers/infiniband/ulp/srpt/ib_srpt.c 			struct srpt_rw_ctx *ctx = &ioctx->rw_ctxs[i];
ctx              2830 drivers/infiniband/ulp/srpt/ib_srpt.c 			first_wr = rdma_rw_ctx_wrs(&ctx->rw, ch->qp,
ctx                84 drivers/input/rmi4/rmi_bus.h 	irqreturn_t (*attention)(int irq, void *ctx);
ctx               502 drivers/input/rmi4/rmi_driver.c 			     void *ctx,
ctx               504 drivers/input/rmi4/rmi_driver.c 					     void *ctx,
ctx               524 drivers/input/rmi4/rmi_driver.c 		retval = callback(rmi_dev, ctx, &pdt_entry);
ctx               542 drivers/input/rmi4/rmi_driver.c int rmi_scan_pdt(struct rmi_device *rmi_dev, void *ctx,
ctx               544 drivers/input/rmi4/rmi_driver.c 		 void *ctx, const struct pdt_entry *entry))
ctx               552 drivers/input/rmi4/rmi_driver.c 					   ctx, callback);
ctx               777 drivers/input/rmi4/rmi_driver.c 			 void *ctx, const struct pdt_entry *pdt)
ctx               779 drivers/input/rmi4/rmi_driver.c 	int *irq_count = ctx;
ctx               791 drivers/input/rmi4/rmi_driver.c int rmi_initial_reset(struct rmi_device *rmi_dev, void *ctx,
ctx               829 drivers/input/rmi4/rmi_driver.c 			       void *ctx, const struct pdt_entry *pdt)
ctx               833 drivers/input/rmi4/rmi_driver.c 	int *current_irq_count = ctx;
ctx                93 drivers/input/rmi4/rmi_driver.h int rmi_scan_pdt(struct rmi_device *rmi_dev, void *ctx,
ctx                94 drivers/input/rmi4/rmi_driver.h 		 int (*callback)(struct rmi_device *rmi_dev, void *ctx,
ctx               100 drivers/input/rmi4/rmi_driver.h int rmi_initial_reset(struct rmi_device *rmi_dev, void *ctx,
ctx               681 drivers/input/rmi4/rmi_f01.c static irqreturn_t rmi_f01_attention(int irq, void *ctx)
ctx               683 drivers/input/rmi4/rmi_f01.c 	struct rmi_function *fn = ctx;
ctx               244 drivers/input/rmi4/rmi_f03.c static irqreturn_t rmi_f03_attention(int irq, void *ctx)
ctx               246 drivers/input/rmi4/rmi_f03.c 	struct rmi_function *fn = ctx;
ctx              1263 drivers/input/rmi4/rmi_f11.c static irqreturn_t rmi_f11_attention(int irq, void *ctx)
ctx              1265 drivers/input/rmi4/rmi_f11.c 	struct rmi_function *fn = ctx;
ctx               198 drivers/input/rmi4/rmi_f12.c static irqreturn_t rmi_f12_attention(int irq, void *ctx)
ctx               201 drivers/input/rmi4/rmi_f12.c 	struct rmi_function *fn = ctx;
ctx               122 drivers/input/rmi4/rmi_f30.c static irqreturn_t rmi_f30_attention(int irq, void *ctx)
ctx               124 drivers/input/rmi4/rmi_f30.c 	struct rmi_function *fn = ctx;
ctx               100 drivers/input/rmi4/rmi_f34.c static irqreturn_t rmi_f34_attention(int irq, void *ctx)
ctx               102 drivers/input/rmi4/rmi_f34.c 	struct rmi_function *fn = ctx;
ctx              2097 drivers/input/touchscreen/atmel_mxt_ts.c static void mxt_config_cb(const struct firmware *cfg, void *ctx)
ctx              2099 drivers/input/touchscreen/atmel_mxt_ts.c 	mxt_configure_objects(ctx, cfg);
ctx               816 drivers/input/touchscreen/goodix.c static void goodix_config_cb(const struct firmware *cfg, void *ctx)
ctx               818 drivers/input/touchscreen/goodix.c 	struct goodix_ts_data *ts = ctx;
ctx                34 drivers/interconnect/qcom/smd-rpm.c int qcom_icc_rpm_smd_send(int ctx, int rsc_type, int id, u32 val)
ctx                42 drivers/interconnect/qcom/smd-rpm.c 	return qcom_rpm_smd_write(icc_smd_rpm, ctx, rsc_type, id, &req,
ctx                13 drivers/interconnect/qcom/smd-rpm.h int qcom_icc_rpm_smd_send(int ctx, int rsc_type, int id, u32 val);
ctx                63 drivers/iommu/ipmmu-vmsa.c 	DECLARE_BITMAP(ctx, IPMMU_CTX_MAX);
ctx               388 drivers/iommu/ipmmu-vmsa.c 	ret = find_first_zero_bit(mmu->ctx, mmu->num_ctx);
ctx               391 drivers/iommu/ipmmu-vmsa.c 		set_bit(ret, mmu->ctx);
ctx               407 drivers/iommu/ipmmu-vmsa.c 	clear_bit(context_id, mmu->ctx);
ctx              1061 drivers/iommu/ipmmu-vmsa.c 	bitmap_zero(mmu->ctx, IPMMU_CTX_MAX);
ctx              1122 drivers/iommu/ipmmu-vmsa.c 			set_bit(0, mmu->ctx);
ctx                81 drivers/iommu/msm_iommu.c 	int ctx;
ctx                95 drivers/iommu/msm_iommu.c 	for (ctx = 0; ctx < ncb; ctx++) {
ctx                96 drivers/iommu/msm_iommu.c 		SET_BPRCOSH(base, ctx, 0);
ctx                97 drivers/iommu/msm_iommu.c 		SET_BPRCISH(base, ctx, 0);
ctx                98 drivers/iommu/msm_iommu.c 		SET_BPRCNSH(base, ctx, 0);
ctx                99 drivers/iommu/msm_iommu.c 		SET_BPSHCFG(base, ctx, 0);
ctx               100 drivers/iommu/msm_iommu.c 		SET_BPMTCFG(base, ctx, 0);
ctx               101 drivers/iommu/msm_iommu.c 		SET_ACTLR(base, ctx, 0);
ctx               102 drivers/iommu/msm_iommu.c 		SET_SCTLR(base, ctx, 0);
ctx               103 drivers/iommu/msm_iommu.c 		SET_FSRRESTORE(base, ctx, 0);
ctx               104 drivers/iommu/msm_iommu.c 		SET_TTBR0(base, ctx, 0);
ctx               105 drivers/iommu/msm_iommu.c 		SET_TTBR1(base, ctx, 0);
ctx               106 drivers/iommu/msm_iommu.c 		SET_TTBCR(base, ctx, 0);
ctx               107 drivers/iommu/msm_iommu.c 		SET_BFBCR(base, ctx, 0);
ctx               108 drivers/iommu/msm_iommu.c 		SET_PAR(base, ctx, 0);
ctx               109 drivers/iommu/msm_iommu.c 		SET_FAR(base, ctx, 0);
ctx               110 drivers/iommu/msm_iommu.c 		SET_CTX_TLBIALL(base, ctx, 0);
ctx               111 drivers/iommu/msm_iommu.c 		SET_TLBFLPTER(base, ctx, 0);
ctx               112 drivers/iommu/msm_iommu.c 		SET_TLBSLPTER(base, ctx, 0);
ctx               113 drivers/iommu/msm_iommu.c 		SET_TLBLKCR(base, ctx, 0);
ctx               114 drivers/iommu/msm_iommu.c 		SET_CONTEXTIDR(base, ctx, 0);
ctx               217 drivers/iommu/msm_iommu.c 	int mid, ctx, i;
ctx               221 drivers/iommu/msm_iommu.c 		ctx = master->num;
ctx               224 drivers/iommu/msm_iommu.c 		SET_CBACR_N(iommu->base, ctx, 0);
ctx               230 drivers/iommu/msm_iommu.c 		SET_CBNDX(iommu->base, mid, ctx);
ctx               233 drivers/iommu/msm_iommu.c 		SET_CBVMID(iommu->base, ctx, 0);
ctx               236 drivers/iommu/msm_iommu.c 		SET_CONTEXTIDR_ASID(iommu->base, ctx, ctx);
ctx               243 drivers/iommu/msm_iommu.c static void __reset_context(void __iomem *base, int ctx)
ctx               245 drivers/iommu/msm_iommu.c 	SET_BPRCOSH(base, ctx, 0);
ctx               246 drivers/iommu/msm_iommu.c 	SET_BPRCISH(base, ctx, 0);
ctx               247 drivers/iommu/msm_iommu.c 	SET_BPRCNSH(base, ctx, 0);
ctx               248 drivers/iommu/msm_iommu.c 	SET_BPSHCFG(base, ctx, 0);
ctx               249 drivers/iommu/msm_iommu.c 	SET_BPMTCFG(base, ctx, 0);
ctx               250 drivers/iommu/msm_iommu.c 	SET_ACTLR(base, ctx, 0);
ctx               251 drivers/iommu/msm_iommu.c 	SET_SCTLR(base, ctx, 0);
ctx               252 drivers/iommu/msm_iommu.c 	SET_FSRRESTORE(base, ctx, 0);
ctx               253 drivers/iommu/msm_iommu.c 	SET_TTBR0(base, ctx, 0);
ctx               254 drivers/iommu/msm_iommu.c 	SET_TTBR1(base, ctx, 0);
ctx               255 drivers/iommu/msm_iommu.c 	SET_TTBCR(base, ctx, 0);
ctx               256 drivers/iommu/msm_iommu.c 	SET_BFBCR(base, ctx, 0);
ctx               257 drivers/iommu/msm_iommu.c 	SET_PAR(base, ctx, 0);
ctx               258 drivers/iommu/msm_iommu.c 	SET_FAR(base, ctx, 0);
ctx               259 drivers/iommu/msm_iommu.c 	SET_CTX_TLBIALL(base, ctx, 0);
ctx               260 drivers/iommu/msm_iommu.c 	SET_TLBFLPTER(base, ctx, 0);
ctx               261 drivers/iommu/msm_iommu.c 	SET_TLBSLPTER(base, ctx, 0);
ctx               262 drivers/iommu/msm_iommu.c 	SET_TLBLKCR(base, ctx, 0);
ctx               265 drivers/iommu/msm_iommu.c static void __program_context(void __iomem *base, int ctx,
ctx               268 drivers/iommu/msm_iommu.c 	__reset_context(base, ctx);
ctx               271 drivers/iommu/msm_iommu.c 	SET_TRE(base, ctx, 1);
ctx               272 drivers/iommu/msm_iommu.c 	SET_AFE(base, ctx, 1);
ctx               276 drivers/iommu/msm_iommu.c 	SET_TLBMCFG(base, ctx, 0x3);
ctx               279 drivers/iommu/msm_iommu.c 	SET_V2PCFG(base, ctx, 0x3);
ctx               281 drivers/iommu/msm_iommu.c 	SET_TTBCR(base, ctx, priv->cfg.arm_v7s_cfg.tcr);
ctx               282 drivers/iommu/msm_iommu.c 	SET_TTBR0(base, ctx, priv->cfg.arm_v7s_cfg.ttbr[0]);
ctx               283 drivers/iommu/msm_iommu.c 	SET_TTBR1(base, ctx, priv->cfg.arm_v7s_cfg.ttbr[1]);
ctx               286 drivers/iommu/msm_iommu.c 	SET_PRRR(base, ctx, priv->cfg.arm_v7s_cfg.prrr);
ctx               287 drivers/iommu/msm_iommu.c 	SET_NMRR(base, ctx, priv->cfg.arm_v7s_cfg.nmrr);
ctx               290 drivers/iommu/msm_iommu.c 	SET_CTX_TLBIALL(base, ctx, 0);
ctx               293 drivers/iommu/msm_iommu.c 	SET_IRPTNDX(base, ctx, 0);
ctx               296 drivers/iommu/msm_iommu.c 	SET_CFEIE(base, ctx, 1);
ctx               299 drivers/iommu/msm_iommu.c 	SET_CFCFG(base, ctx, 1);
ctx               302 drivers/iommu/msm_iommu.c 	SET_RCISH(base, ctx, 1);
ctx               303 drivers/iommu/msm_iommu.c 	SET_RCOSH(base, ctx, 1);
ctx               304 drivers/iommu/msm_iommu.c 	SET_RCNSH(base, ctx, 1);
ctx               307 drivers/iommu/msm_iommu.c 	SET_BFBDFE(base, ctx, 1);
ctx               310 drivers/iommu/msm_iommu.c 	SET_M(base, ctx, 1);
ctx               587 drivers/iommu/msm_iommu.c static void print_ctx_regs(void __iomem *base, int ctx)
ctx               589 drivers/iommu/msm_iommu.c 	unsigned int fsr = GET_FSR(base, ctx);
ctx               591 drivers/iommu/msm_iommu.c 	       GET_FAR(base, ctx), GET_PAR(base, ctx));
ctx               605 drivers/iommu/msm_iommu.c 	       GET_FSYNR0(base, ctx), GET_FSYNR1(base, ctx));
ctx               607 drivers/iommu/msm_iommu.c 	       GET_TTBR0(base, ctx), GET_TTBR1(base, ctx));
ctx               609 drivers/iommu/msm_iommu.c 	       GET_SCTLR(base, ctx), GET_ACTLR(base, ctx));
ctx                11 drivers/iommu/msm_iommu_hw-8xxx.h #define GET_CTX_REG(reg, base, ctx) \
ctx                12 drivers/iommu/msm_iommu_hw-8xxx.h 				(readl((base) + (reg) + ((ctx) << CTX_SHIFT)))
ctx                16 drivers/iommu/msm_iommu_hw-8xxx.h #define SET_CTX_REG(reg, base, ctx, val) \
ctx                17 drivers/iommu/msm_iommu_hw-8xxx.h 			writel((val), ((base) + (reg) + ((ctx) << CTX_SHIFT)))
ctx                95 drivers/iommu/omap-iommu.c 		p = obj->ctx;
ctx               125 drivers/iommu/omap-iommu.c 		p = obj->ctx;
ctx              1211 drivers/iommu/omap-iommu.c 	obj->ctx = (void *)obj + sizeof(*obj);
ctx                74 drivers/iommu/omap-iommu.h 	void *ctx; /* iommu context: registres saved area */
ctx                93 drivers/iommu/qcom_iommu.c iommu_writel(struct qcom_iommu_ctx *ctx, unsigned reg, u32 val)
ctx                95 drivers/iommu/qcom_iommu.c 	writel_relaxed(val, ctx->base + reg);
ctx                99 drivers/iommu/qcom_iommu.c iommu_writeq(struct qcom_iommu_ctx *ctx, unsigned reg, u64 val)
ctx               101 drivers/iommu/qcom_iommu.c 	writeq_relaxed(val, ctx->base + reg);
ctx               105 drivers/iommu/qcom_iommu.c iommu_readl(struct qcom_iommu_ctx *ctx, unsigned reg)
ctx               107 drivers/iommu/qcom_iommu.c 	return readl_relaxed(ctx->base + reg);
ctx               111 drivers/iommu/qcom_iommu.c iommu_readq(struct qcom_iommu_ctx *ctx, unsigned reg)
ctx               113 drivers/iommu/qcom_iommu.c 	return readq_relaxed(ctx->base + reg);
ctx               122 drivers/iommu/qcom_iommu.c 		struct qcom_iommu_ctx *ctx = to_ctx(fwspec, fwspec->ids[i]);
ctx               125 drivers/iommu/qcom_iommu.c 		iommu_writel(ctx, ARM_SMMU_CB_TLBSYNC, 0);
ctx               127 drivers/iommu/qcom_iommu.c 		ret = readl_poll_timeout(ctx->base + ARM_SMMU_CB_TLBSTATUS, val,
ctx               130 drivers/iommu/qcom_iommu.c 			dev_err(ctx->dev, "timeout waiting for TLB SYNC\n");
ctx               140 drivers/iommu/qcom_iommu.c 		struct qcom_iommu_ctx *ctx = to_ctx(fwspec, fwspec->ids[i]);
ctx               141 drivers/iommu/qcom_iommu.c 		iommu_writel(ctx, ARM_SMMU_CB_S1_TLBIASID, ctx->asid);
ctx               156 drivers/iommu/qcom_iommu.c 		struct qcom_iommu_ctx *ctx = to_ctx(fwspec, fwspec->ids[i]);
ctx               160 drivers/iommu/qcom_iommu.c 		iova |= ctx->asid;
ctx               162 drivers/iommu/qcom_iommu.c 			iommu_writel(ctx, reg, iova);
ctx               198 drivers/iommu/qcom_iommu.c 	struct qcom_iommu_ctx *ctx = dev;
ctx               202 drivers/iommu/qcom_iommu.c 	fsr = iommu_readl(ctx, ARM_SMMU_CB_FSR);
ctx               207 drivers/iommu/qcom_iommu.c 	fsynr = iommu_readl(ctx, ARM_SMMU_CB_FSYNR0);
ctx               208 drivers/iommu/qcom_iommu.c 	iova = iommu_readq(ctx, ARM_SMMU_CB_FAR);
ctx               210 drivers/iommu/qcom_iommu.c 	if (!report_iommu_fault(ctx->domain, ctx->dev, iova, 0)) {
ctx               211 drivers/iommu/qcom_iommu.c 		dev_err_ratelimited(ctx->dev,
ctx               214 drivers/iommu/qcom_iommu.c 				    fsr, iova, fsynr, ctx->asid);
ctx               217 drivers/iommu/qcom_iommu.c 	iommu_writel(ctx, ARM_SMMU_CB_FSR, fsr);
ctx               218 drivers/iommu/qcom_iommu.c 	iommu_writel(ctx, ARM_SMMU_CB_RESUME, RESUME_TERMINATE);
ctx               259 drivers/iommu/qcom_iommu.c 		struct qcom_iommu_ctx *ctx = to_ctx(fwspec, fwspec->ids[i]);
ctx               261 drivers/iommu/qcom_iommu.c 		if (!ctx->secure_init) {
ctx               262 drivers/iommu/qcom_iommu.c 			ret = qcom_scm_restore_sec_cfg(qcom_iommu->sec_id, ctx->asid);
ctx               267 drivers/iommu/qcom_iommu.c 			ctx->secure_init = true;
ctx               271 drivers/iommu/qcom_iommu.c 		iommu_writeq(ctx, ARM_SMMU_CB_TTBR0,
ctx               273 drivers/iommu/qcom_iommu.c 				FIELD_PREP(TTBRn_ASID, ctx->asid));
ctx               274 drivers/iommu/qcom_iommu.c 		iommu_writeq(ctx, ARM_SMMU_CB_TTBR1,
ctx               276 drivers/iommu/qcom_iommu.c 				FIELD_PREP(TTBRn_ASID, ctx->asid));
ctx               279 drivers/iommu/qcom_iommu.c 		iommu_writel(ctx, ARM_SMMU_CB_TCR2,
ctx               282 drivers/iommu/qcom_iommu.c 		iommu_writel(ctx, ARM_SMMU_CB_TCR,
ctx               286 drivers/iommu/qcom_iommu.c 		iommu_writel(ctx, ARM_SMMU_CB_S1_MAIR0,
ctx               288 drivers/iommu/qcom_iommu.c 		iommu_writel(ctx, ARM_SMMU_CB_S1_MAIR1,
ctx               298 drivers/iommu/qcom_iommu.c 		iommu_writel(ctx, ARM_SMMU_CB_SCTLR, reg);
ctx               300 drivers/iommu/qcom_iommu.c 		ctx->domain = domain;
ctx               411 drivers/iommu/qcom_iommu.c 		struct qcom_iommu_ctx *ctx = to_ctx(fwspec, fwspec->ids[i]);
ctx               414 drivers/iommu/qcom_iommu.c 		iommu_writel(ctx, ARM_SMMU_CB_SCTLR, 0);
ctx               416 drivers/iommu/qcom_iommu.c 		ctx->domain = NULL;
ctx               705 drivers/iommu/qcom_iommu.c 	struct qcom_iommu_ctx *ctx;
ctx               711 drivers/iommu/qcom_iommu.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               712 drivers/iommu/qcom_iommu.c 	if (!ctx)
ctx               715 drivers/iommu/qcom_iommu.c 	ctx->dev = dev;
ctx               716 drivers/iommu/qcom_iommu.c 	platform_set_drvdata(pdev, ctx);
ctx               719 drivers/iommu/qcom_iommu.c 	ctx->base = devm_ioremap_resource(dev, res);
ctx               720 drivers/iommu/qcom_iommu.c 	if (IS_ERR(ctx->base))
ctx               721 drivers/iommu/qcom_iommu.c 		return PTR_ERR(ctx->base);
ctx               730 drivers/iommu/qcom_iommu.c 	iommu_writel(ctx, ARM_SMMU_CB_FSR, iommu_readl(ctx, ARM_SMMU_CB_FSR));
ctx               736 drivers/iommu/qcom_iommu.c 			       ctx);
ctx               748 drivers/iommu/qcom_iommu.c 	ctx->asid = ret;
ctx               750 drivers/iommu/qcom_iommu.c 	dev_dbg(dev, "found asid %u\n", ctx->asid);
ctx               752 drivers/iommu/qcom_iommu.c 	qcom_iommu->ctxs[ctx->asid - 1] = ctx;
ctx               760 drivers/iommu/qcom_iommu.c 	struct qcom_iommu_ctx *ctx = platform_get_drvdata(pdev);
ctx               764 drivers/iommu/qcom_iommu.c 	qcom_iommu->ctxs[ctx->asid - 1] = NULL;
ctx               178 drivers/irqchip/qcom-irq-combiner.c 	struct get_registers_context *ctx = context;
ctx               191 drivers/irqchip/qcom-irq-combiner.c 		dev_err(ctx->dev, "Bad register resource @%pa\n", &paddr);
ctx               192 drivers/irqchip/qcom-irq-combiner.c 		ctx->err = -EINVAL;
ctx               196 drivers/irqchip/qcom-irq-combiner.c 	vaddr = devm_ioremap(ctx->dev, reg->address, REG_SIZE);
ctx               198 drivers/irqchip/qcom-irq-combiner.c 		dev_err(ctx->dev, "Can't map register @%pa\n", &paddr);
ctx               199 drivers/irqchip/qcom-irq-combiner.c 		ctx->err = -ENOMEM;
ctx               203 drivers/irqchip/qcom-irq-combiner.c 	ctx->combiner->regs[ctx->combiner->nregs].addr = vaddr;
ctx               204 drivers/irqchip/qcom-irq-combiner.c 	ctx->combiner->nirqs += reg->bit_width;
ctx               205 drivers/irqchip/qcom-irq-combiner.c 	ctx->combiner->nregs++;
ctx               213 drivers/irqchip/qcom-irq-combiner.c 	struct get_registers_context ctx;
ctx               218 drivers/irqchip/qcom-irq-combiner.c 	ctx.dev = &pdev->dev;
ctx               219 drivers/irqchip/qcom-irq-combiner.c 	ctx.combiner = comb;
ctx               220 drivers/irqchip/qcom-irq-combiner.c 	ctx.err = 0;
ctx               223 drivers/irqchip/qcom-irq-combiner.c 				     get_registers_cb, &ctx);
ctx               225 drivers/irqchip/qcom-irq-combiner.c 		return ctx.err;
ctx               162 drivers/mailbox/bcm-pdc-mailbox.c 	void *ctx;          /* opaque context associated with frame */
ctx               623 drivers/mailbox/bcm-pdc-mailbox.c 	mssg.ctx = rx_ctx->rxp_ctx;
ctx               815 drivers/mailbox/bcm-pdc-mailbox.c 			    void *ctx)
ctx               852 drivers/mailbox/bcm-pdc-mailbox.c 	rx_ctx->rxp_ctx = ctx;
ctx              1242 drivers/mailbox/bcm-pdc-mailbox.c 	err = pdc_rx_list_init(pdcs, mssg->spu.dst, mssg->ctx);
ctx               172 drivers/mailbox/mailbox-xgene-slimpro.c 	struct slimpro_mbox *ctx;
ctx               178 drivers/mailbox/mailbox-xgene-slimpro.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(struct slimpro_mbox), GFP_KERNEL);
ctx               179 drivers/mailbox/mailbox-xgene-slimpro.c 	if (!ctx)
ctx               182 drivers/mailbox/mailbox-xgene-slimpro.c 	platform_set_drvdata(pdev, ctx);
ctx               191 drivers/mailbox/mailbox-xgene-slimpro.c 		ctx->mc[i].irq = platform_get_irq(pdev, i);
ctx               192 drivers/mailbox/mailbox-xgene-slimpro.c 		if (ctx->mc[i].irq < 0) {
ctx               201 drivers/mailbox/mailbox-xgene-slimpro.c 		ctx->mc[i].dev = &pdev->dev;
ctx               202 drivers/mailbox/mailbox-xgene-slimpro.c 		ctx->mc[i].reg = mb_base + i * MBOX_REG_SET_OFFSET;
ctx               203 drivers/mailbox/mailbox-xgene-slimpro.c 		ctx->mc[i].chan = &ctx->chans[i];
ctx               204 drivers/mailbox/mailbox-xgene-slimpro.c 		ctx->chans[i].con_priv = &ctx->mc[i];
ctx               208 drivers/mailbox/mailbox-xgene-slimpro.c 	ctx->mb_ctrl.dev = &pdev->dev;
ctx               209 drivers/mailbox/mailbox-xgene-slimpro.c 	ctx->mb_ctrl.chans = ctx->chans;
ctx               210 drivers/mailbox/mailbox-xgene-slimpro.c 	ctx->mb_ctrl.txdone_irq = true;
ctx               211 drivers/mailbox/mailbox-xgene-slimpro.c 	ctx->mb_ctrl.ops = &slimpro_mbox_ops;
ctx               212 drivers/mailbox/mailbox-xgene-slimpro.c 	ctx->mb_ctrl.num_chans = i;
ctx               214 drivers/mailbox/mailbox-xgene-slimpro.c 	rc = devm_mbox_controller_register(&pdev->dev, &ctx->mb_ctrl);
ctx                71 drivers/md/dm-crypt.c 	struct convert_context ctx;
ctx                81 drivers/md/dm-crypt.c 	struct convert_context *ctx;
ctx               508 drivers/md/dm-crypt.c 	if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) {
ctx               526 drivers/md/dm-crypt.c 	if (bio_data_dir(dmreq->ctx->bio_in) == WRITE)
ctx               657 drivers/md/dm-crypt.c 	if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) {
ctx               680 drivers/md/dm-crypt.c 	if (bio_data_dir(dmreq->ctx->bio_in) != WRITE)
ctx               893 drivers/md/dm-crypt.c 			       struct convert_context *ctx,
ctx               897 drivers/md/dm-crypt.c 	ctx->bio_in = bio_in;
ctx               898 drivers/md/dm-crypt.c 	ctx->bio_out = bio_out;
ctx               900 drivers/md/dm-crypt.c 		ctx->iter_in = bio_in->bi_iter;
ctx               902 drivers/md/dm-crypt.c 		ctx->iter_out = bio_out->bi_iter;
ctx               903 drivers/md/dm-crypt.c 	ctx->cc_sector = sector + cc->iv_offset;
ctx               904 drivers/md/dm-crypt.c 	init_completion(&ctx->restart);
ctx               953 drivers/md/dm-crypt.c 	struct convert_context *ctx = dmreq->ctx;
ctx               954 drivers/md/dm-crypt.c 	struct dm_crypt_io *io = container_of(ctx, struct dm_crypt_io, ctx);
ctx               967 drivers/md/dm-crypt.c 				     struct convert_context *ctx,
ctx               971 drivers/md/dm-crypt.c 	struct bio_vec bv_in = bio_iter_iovec(ctx->bio_in, ctx->iter_in);
ctx               972 drivers/md/dm-crypt.c 	struct bio_vec bv_out = bio_iter_iovec(ctx->bio_out, ctx->iter_out);
ctx               985 drivers/md/dm-crypt.c 	dmreq->iv_sector = ctx->cc_sector;
ctx               988 drivers/md/dm-crypt.c 	dmreq->ctx = ctx;
ctx               993 drivers/md/dm-crypt.c 	*sector = cpu_to_le64(ctx->cc_sector - cc->iv_offset);
ctx              1019 drivers/md/dm-crypt.c 		if (cc->integrity_iv_size && bio_data_dir(ctx->bio_in) != WRITE) {
ctx              1034 drivers/md/dm-crypt.c 	if (bio_data_dir(ctx->bio_in) == WRITE) {
ctx              1049 drivers/md/dm-crypt.c 		DMERR_LIMIT("%s: INTEGRITY AEAD ERROR, sector %llu", bio_devname(ctx->bio_in, b),
ctx              1056 drivers/md/dm-crypt.c 	bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size);
ctx              1057 drivers/md/dm-crypt.c 	bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size);
ctx              1063 drivers/md/dm-crypt.c 					struct convert_context *ctx,
ctx              1067 drivers/md/dm-crypt.c 	struct bio_vec bv_in = bio_iter_iovec(ctx->bio_in, ctx->iter_in);
ctx              1068 drivers/md/dm-crypt.c 	struct bio_vec bv_out = bio_iter_iovec(ctx->bio_out, ctx->iter_out);
ctx              1080 drivers/md/dm-crypt.c 	dmreq->iv_sector = ctx->cc_sector;
ctx              1083 drivers/md/dm-crypt.c 	dmreq->ctx = ctx;
ctx              1092 drivers/md/dm-crypt.c 	*sector = cpu_to_le64(ctx->cc_sector - cc->iv_offset);
ctx              1106 drivers/md/dm-crypt.c 		if (cc->integrity_iv_size && bio_data_dir(ctx->bio_in) != WRITE) {
ctx              1122 drivers/md/dm-crypt.c 	if (bio_data_dir(ctx->bio_in) == WRITE)
ctx              1130 drivers/md/dm-crypt.c 	bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size);
ctx              1131 drivers/md/dm-crypt.c 	bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size);
ctx              1140 drivers/md/dm-crypt.c 				     struct convert_context *ctx)
ctx              1142 drivers/md/dm-crypt.c 	unsigned key_index = ctx->cc_sector & (cc->tfms_count - 1);
ctx              1144 drivers/md/dm-crypt.c 	if (!ctx->r.req)
ctx              1145 drivers/md/dm-crypt.c 		ctx->r.req = mempool_alloc(&cc->req_pool, GFP_NOIO);
ctx              1147 drivers/md/dm-crypt.c 	skcipher_request_set_tfm(ctx->r.req, cc->cipher_tfm.tfms[key_index]);
ctx              1153 drivers/md/dm-crypt.c 	skcipher_request_set_callback(ctx->r.req,
ctx              1155 drivers/md/dm-crypt.c 	    kcryptd_async_done, dmreq_of_req(cc, ctx->r.req));
ctx              1159 drivers/md/dm-crypt.c 				 struct convert_context *ctx)
ctx              1161 drivers/md/dm-crypt.c 	if (!ctx->r.req_aead)
ctx              1162 drivers/md/dm-crypt.c 		ctx->r.req_aead = mempool_alloc(&cc->req_pool, GFP_NOIO);
ctx              1164 drivers/md/dm-crypt.c 	aead_request_set_tfm(ctx->r.req_aead, cc->cipher_tfm.tfms_aead[0]);
ctx              1170 drivers/md/dm-crypt.c 	aead_request_set_callback(ctx->r.req_aead,
ctx              1172 drivers/md/dm-crypt.c 	    kcryptd_async_done, dmreq_of_req(cc, ctx->r.req_aead));
ctx              1176 drivers/md/dm-crypt.c 			    struct convert_context *ctx)
ctx              1179 drivers/md/dm-crypt.c 		crypt_alloc_req_aead(cc, ctx);
ctx              1181 drivers/md/dm-crypt.c 		crypt_alloc_req_skcipher(cc, ctx);
ctx              1214 drivers/md/dm-crypt.c 			 struct convert_context *ctx)
ctx              1220 drivers/md/dm-crypt.c 	atomic_set(&ctx->cc_pending, 1);
ctx              1222 drivers/md/dm-crypt.c 	while (ctx->iter_in.bi_size && ctx->iter_out.bi_size) {
ctx              1224 drivers/md/dm-crypt.c 		crypt_alloc_req(cc, ctx);
ctx              1225 drivers/md/dm-crypt.c 		atomic_inc(&ctx->cc_pending);
ctx              1228 drivers/md/dm-crypt.c 			r = crypt_convert_block_aead(cc, ctx, ctx->r.req_aead, tag_offset);
ctx              1230 drivers/md/dm-crypt.c 			r = crypt_convert_block_skcipher(cc, ctx, ctx->r.req, tag_offset);
ctx              1238 drivers/md/dm-crypt.c 			wait_for_completion(&ctx->restart);
ctx              1239 drivers/md/dm-crypt.c 			reinit_completion(&ctx->restart);
ctx              1246 drivers/md/dm-crypt.c 			ctx->r.req = NULL;
ctx              1247 drivers/md/dm-crypt.c 			ctx->cc_sector += sector_step;
ctx              1254 drivers/md/dm-crypt.c 			atomic_dec(&ctx->cc_pending);
ctx              1255 drivers/md/dm-crypt.c 			ctx->cc_sector += sector_step;
ctx              1263 drivers/md/dm-crypt.c 			atomic_dec(&ctx->cc_pending);
ctx              1269 drivers/md/dm-crypt.c 			atomic_dec(&ctx->cc_pending);
ctx              1364 drivers/md/dm-crypt.c 	io->ctx.r.req = NULL;
ctx              1388 drivers/md/dm-crypt.c 	if (io->ctx.r.req)
ctx              1389 drivers/md/dm-crypt.c 		crypt_free_req(cc, io->ctx.r.req, base_bio);
ctx              1504 drivers/md/dm-crypt.c 	struct bio *clone = io->ctx.bio_out;
ctx              1565 drivers/md/dm-crypt.c 	struct bio *clone = io->ctx.bio_out;
ctx              1579 drivers/md/dm-crypt.c 	BUG_ON(io->ctx.iter_out.bi_size);
ctx              1618 drivers/md/dm-crypt.c 	crypt_convert_init(cc, &io->ctx, NULL, io->base_bio, sector);
ctx              1626 drivers/md/dm-crypt.c 	io->ctx.bio_out = clone;
ctx              1627 drivers/md/dm-crypt.c 	io->ctx.iter_out = clone->bi_iter;
ctx              1632 drivers/md/dm-crypt.c 	r = crypt_convert(cc, &io->ctx);
ctx              1635 drivers/md/dm-crypt.c 	crypt_finished = atomic_dec_and_test(&io->ctx.cc_pending);
ctx              1659 drivers/md/dm-crypt.c 	crypt_convert_init(cc, &io->ctx, io->base_bio, io->base_bio,
ctx              1662 drivers/md/dm-crypt.c 	r = crypt_convert(cc, &io->ctx);
ctx              1666 drivers/md/dm-crypt.c 	if (atomic_dec_and_test(&io->ctx.cc_pending))
ctx              1676 drivers/md/dm-crypt.c 	struct convert_context *ctx = dmreq->ctx;
ctx              1677 drivers/md/dm-crypt.c 	struct dm_crypt_io *io = container_of(ctx, struct dm_crypt_io, ctx);
ctx              1686 drivers/md/dm-crypt.c 		complete(&ctx->restart);
ctx              1695 drivers/md/dm-crypt.c 		DMERR_LIMIT("%s: INTEGRITY AEAD ERROR, sector %llu", bio_devname(ctx->bio_in, b),
ctx              1703 drivers/md/dm-crypt.c 	if (!atomic_dec_and_test(&ctx->cc_pending))
ctx              2801 drivers/md/dm-crypt.c 		io->ctx.r.req_aead = (struct aead_request *)(io + 1);
ctx              2803 drivers/md/dm-crypt.c 		io->ctx.r.req = (struct skcipher_request *)(io + 1);
ctx              1635 drivers/md/raid5-cache.c 					    struct r5l_recovery_ctx *ctx)
ctx              1639 drivers/md/raid5-cache.c 	ctx->ra_bio = bio_alloc_bioset(GFP_KERNEL, BIO_MAX_PAGES, &log->bs);
ctx              1640 drivers/md/raid5-cache.c 	if (!ctx->ra_bio)
ctx              1643 drivers/md/raid5-cache.c 	ctx->valid_pages = 0;
ctx              1644 drivers/md/raid5-cache.c 	ctx->total_pages = 0;
ctx              1645 drivers/md/raid5-cache.c 	while (ctx->total_pages < R5L_RECOVERY_PAGE_POOL_SIZE) {
ctx              1650 drivers/md/raid5-cache.c 		ctx->ra_pool[ctx->total_pages] = page;
ctx              1651 drivers/md/raid5-cache.c 		ctx->total_pages += 1;
ctx              1654 drivers/md/raid5-cache.c 	if (ctx->total_pages == 0) {
ctx              1655 drivers/md/raid5-cache.c 		bio_put(ctx->ra_bio);
ctx              1659 drivers/md/raid5-cache.c 	ctx->pool_offset = 0;
ctx              1664 drivers/md/raid5-cache.c 					struct r5l_recovery_ctx *ctx)
ctx              1668 drivers/md/raid5-cache.c 	for (i = 0; i < ctx->total_pages; ++i)
ctx              1669 drivers/md/raid5-cache.c 		put_page(ctx->ra_pool[i]);
ctx              1670 drivers/md/raid5-cache.c 	bio_put(ctx->ra_bio);
ctx              1680 drivers/md/raid5-cache.c 				      struct r5l_recovery_ctx *ctx,
ctx              1683 drivers/md/raid5-cache.c 	bio_reset(ctx->ra_bio);
ctx              1684 drivers/md/raid5-cache.c 	bio_set_dev(ctx->ra_bio, log->rdev->bdev);
ctx              1685 drivers/md/raid5-cache.c 	bio_set_op_attrs(ctx->ra_bio, REQ_OP_READ, 0);
ctx              1686 drivers/md/raid5-cache.c 	ctx->ra_bio->bi_iter.bi_sector = log->rdev->data_offset + offset;
ctx              1688 drivers/md/raid5-cache.c 	ctx->valid_pages = 0;
ctx              1689 drivers/md/raid5-cache.c 	ctx->pool_offset = offset;
ctx              1691 drivers/md/raid5-cache.c 	while (ctx->valid_pages < ctx->total_pages) {
ctx              1692 drivers/md/raid5-cache.c 		bio_add_page(ctx->ra_bio,
ctx              1693 drivers/md/raid5-cache.c 			     ctx->ra_pool[ctx->valid_pages], PAGE_SIZE, 0);
ctx              1694 drivers/md/raid5-cache.c 		ctx->valid_pages += 1;
ctx              1702 drivers/md/raid5-cache.c 	return submit_bio_wait(ctx->ra_bio);
ctx              1710 drivers/md/raid5-cache.c 				  struct r5l_recovery_ctx *ctx,
ctx              1716 drivers/md/raid5-cache.c 	if (offset < ctx->pool_offset ||
ctx              1717 drivers/md/raid5-cache.c 	    offset >= ctx->pool_offset + ctx->valid_pages * BLOCK_SECTORS) {
ctx              1718 drivers/md/raid5-cache.c 		ret = r5l_recovery_fetch_ra_pool(log, ctx, offset);
ctx              1723 drivers/md/raid5-cache.c 	BUG_ON(offset < ctx->pool_offset ||
ctx              1724 drivers/md/raid5-cache.c 	       offset >= ctx->pool_offset + ctx->valid_pages * BLOCK_SECTORS);
ctx              1727 drivers/md/raid5-cache.c 	       page_address(ctx->ra_pool[(offset - ctx->pool_offset) >>
ctx              1734 drivers/md/raid5-cache.c 					struct r5l_recovery_ctx *ctx)
ctx              1736 drivers/md/raid5-cache.c 	struct page *page = ctx->meta_page;
ctx              1741 drivers/md/raid5-cache.c 	ret = r5l_recovery_read_page(log, ctx, page, ctx->pos);
ctx              1750 drivers/md/raid5-cache.c 	    le64_to_cpu(mb->seq) != ctx->seq ||
ctx              1752 drivers/md/raid5-cache.c 	    le64_to_cpu(mb->position) != ctx->pos)
ctx              1762 drivers/md/raid5-cache.c 	ctx->meta_total_blocks = BLOCK_SECTORS;
ctx              1814 drivers/md/raid5-cache.c 				   struct r5l_recovery_ctx *ctx,
ctx              1825 drivers/md/raid5-cache.c 	r5l_recovery_read_page(log, ctx, sh->dev[dd_idx].page, log_offset);
ctx              1828 drivers/md/raid5-cache.c 	ctx->meta_total_blocks += BLOCK_SECTORS;
ctx              1836 drivers/md/raid5-cache.c 				     struct r5l_recovery_ctx *ctx,
ctx              1843 drivers/md/raid5-cache.c 	ctx->meta_total_blocks += BLOCK_SECTORS * conf->max_degraded;
ctx              1844 drivers/md/raid5-cache.c 	r5l_recovery_read_page(log, ctx, sh->dev[sh->pd_idx].page, log_offset);
ctx              1851 drivers/md/raid5-cache.c 			log, ctx, sh->dev[sh->qd_idx].page,
ctx              1873 drivers/md/raid5-cache.c 			       struct r5l_recovery_ctx *ctx)
ctx              1923 drivers/md/raid5-cache.c 	ctx->data_parity_stripes++;
ctx              1958 drivers/md/raid5-cache.c 			  struct r5l_recovery_ctx *ctx)
ctx              1971 drivers/md/raid5-cache.c 			    struct r5l_recovery_ctx *ctx)
ctx              1977 drivers/md/raid5-cache.c 			r5l_recovery_replay_one_stripe(sh->raid_conf, sh, ctx);
ctx              1986 drivers/md/raid5-cache.c 				  struct r5l_recovery_ctx *ctx,
ctx              1993 drivers/md/raid5-cache.c 	r5l_recovery_read_page(log, ctx, page, log_offset);
ctx              2006 drivers/md/raid5-cache.c 					 struct r5l_recovery_ctx *ctx)
ctx              2010 drivers/md/raid5-cache.c 	struct r5l_meta_block *mb = page_address(ctx->meta_page);
ctx              2012 drivers/md/raid5-cache.c 	sector_t log_offset = r5l_ring_add(log, ctx->pos, BLOCK_SECTORS);
ctx              2027 drivers/md/raid5-cache.c 				    log, ctx, page, log_offset,
ctx              2032 drivers/md/raid5-cache.c 				    log, ctx, page, log_offset,
ctx              2037 drivers/md/raid5-cache.c 				    log, ctx, page,
ctx              2079 drivers/md/raid5-cache.c 				struct r5l_recovery_ctx *ctx,
ctx              2098 drivers/md/raid5-cache.c 	ret = r5l_recovery_verify_data_checksum_for_mb(log, ctx);
ctx              2104 drivers/md/raid5-cache.c 	mb = page_address(ctx->meta_page);
ctx              2106 drivers/md/raid5-cache.c 	log_offset = r5l_ring_add(log, ctx->pos, BLOCK_SECTORS);
ctx              2153 drivers/md/raid5-cache.c 					cached_stripe_list, ctx);
ctx              2186 drivers/md/raid5-cache.c 				r5l_recovery_replay_one_stripe(conf, sh, ctx);
ctx              2189 drivers/md/raid5-cache.c 			r5l_recovery_load_data(log, sh, ctx, payload,
ctx              2192 drivers/md/raid5-cache.c 			r5l_recovery_load_parity(log, sh, ctx, payload,
ctx              2244 drivers/md/raid5-cache.c 				  struct r5l_recovery_ctx *ctx)
ctx              2251 drivers/md/raid5-cache.c 		if (r5l_recovery_read_meta_block(log, ctx))
ctx              2254 drivers/md/raid5-cache.c 		ret = r5c_recovery_analyze_meta_block(log, ctx,
ctx              2255 drivers/md/raid5-cache.c 						      &ctx->cached_list);
ctx              2262 drivers/md/raid5-cache.c 		ctx->seq++;
ctx              2263 drivers/md/raid5-cache.c 		ctx->pos = r5l_ring_add(log, ctx->pos, ctx->meta_total_blocks);
ctx              2267 drivers/md/raid5-cache.c 		r5c_recovery_drop_stripes(&ctx->cached_list, ctx);
ctx              2272 drivers/md/raid5-cache.c 	r5c_recovery_replay_stripes(&ctx->cached_list, ctx);
ctx              2275 drivers/md/raid5-cache.c 	list_for_each_entry(sh, &ctx->cached_list, lru) {
ctx              2278 drivers/md/raid5-cache.c 		ctx->data_only_stripes++;
ctx              2355 drivers/md/raid5-cache.c 				       struct r5l_recovery_ctx *ctx)
ctx              2369 drivers/md/raid5-cache.c 	WARN_ON(list_empty(&ctx->cached_list));
ctx              2371 drivers/md/raid5-cache.c 	list_for_each_entry(sh, &ctx->cached_list, lru) {
ctx              2379 drivers/md/raid5-cache.c 						     ctx->pos, ctx->seq);
ctx              2382 drivers/md/raid5-cache.c 		write_pos = r5l_ring_add(log, ctx->pos, BLOCK_SECTORS);
ctx              2413 drivers/md/raid5-cache.c 		sync_page_io(log->rdev, ctx->pos, PAGE_SIZE, page,
ctx              2415 drivers/md/raid5-cache.c 		sh->log_start = ctx->pos;
ctx              2418 drivers/md/raid5-cache.c 		ctx->pos = write_pos;
ctx              2419 drivers/md/raid5-cache.c 		ctx->seq += 1;
ctx              2428 drivers/md/raid5-cache.c 						 struct r5l_recovery_ctx *ctx)
ctx              2434 drivers/md/raid5-cache.c 	if (ctx->data_only_stripes == 0)
ctx              2439 drivers/md/raid5-cache.c 	list_for_each_entry_safe(sh, next, &ctx->cached_list, lru) {
ctx              2456 drivers/md/raid5-cache.c 	struct r5l_recovery_ctx *ctx;
ctx              2460 drivers/md/raid5-cache.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              2461 drivers/md/raid5-cache.c 	if (!ctx)
ctx              2464 drivers/md/raid5-cache.c 	ctx->pos = log->last_checkpoint;
ctx              2465 drivers/md/raid5-cache.c 	ctx->seq = log->last_cp_seq;
ctx              2466 drivers/md/raid5-cache.c 	INIT_LIST_HEAD(&ctx->cached_list);
ctx              2467 drivers/md/raid5-cache.c 	ctx->meta_page = alloc_page(GFP_KERNEL);
ctx              2469 drivers/md/raid5-cache.c 	if (!ctx->meta_page) {
ctx              2474 drivers/md/raid5-cache.c 	if (r5l_recovery_allocate_ra_pool(log, ctx) != 0) {
ctx              2479 drivers/md/raid5-cache.c 	ret = r5c_recovery_flush_log(log, ctx);
ctx              2484 drivers/md/raid5-cache.c 	pos = ctx->pos;
ctx              2485 drivers/md/raid5-cache.c 	ctx->seq += 10000;
ctx              2487 drivers/md/raid5-cache.c 	if ((ctx->data_only_stripes == 0) && (ctx->data_parity_stripes == 0))
ctx              2492 drivers/md/raid5-cache.c 			 mdname(mddev), ctx->data_only_stripes,
ctx              2493 drivers/md/raid5-cache.c 			 ctx->data_parity_stripes);
ctx              2495 drivers/md/raid5-cache.c 	if (ctx->data_only_stripes == 0) {
ctx              2496 drivers/md/raid5-cache.c 		log->next_checkpoint = ctx->pos;
ctx              2497 drivers/md/raid5-cache.c 		r5l_log_write_empty_meta_block(log, ctx->pos, ctx->seq++);
ctx              2498 drivers/md/raid5-cache.c 		ctx->pos = r5l_ring_add(log, ctx->pos, BLOCK_SECTORS);
ctx              2499 drivers/md/raid5-cache.c 	} else if (r5c_recovery_rewrite_data_only_stripes(log, ctx)) {
ctx              2506 drivers/md/raid5-cache.c 	log->log_start = ctx->pos;
ctx              2507 drivers/md/raid5-cache.c 	log->seq = ctx->seq;
ctx              2511 drivers/md/raid5-cache.c 	r5c_recovery_flush_data_only_stripes(log, ctx);
ctx              2514 drivers/md/raid5-cache.c 	r5l_recovery_free_ra_pool(log, ctx);
ctx              2516 drivers/md/raid5-cache.c 	__free_page(ctx->meta_page);
ctx              2518 drivers/md/raid5-cache.c 	kfree(ctx);
ctx               439 drivers/media/dvb-core/dmxdev.c 	struct dvb_vb2_ctx *ctx;
ctx               453 drivers/media/dvb-core/dmxdev.c 		ctx = &dmxdevfilter->vb2_ctx;
ctx               458 drivers/media/dvb-core/dmxdev.c 		ctx = &dmxdevfilter->dev->dvr_vb2_ctx;
ctx               462 drivers/media/dvb-core/dmxdev.c 	if (dvb_vb2_is_streaming(ctx)) {
ctx               463 drivers/media/dvb-core/dmxdev.c 		ret = dvb_vb2_fill_buffer(ctx, buffer1, buffer1_len,
ctx               466 drivers/media/dvb-core/dmxdev.c 			ret = dvb_vb2_fill_buffer(ctx, buffer2, buffer2_len,
ctx                37 drivers/media/dvb-core/dvb_vb2.c 	struct dvb_vb2_ctx *ctx = vb2_get_drv_priv(vq);
ctx                39 drivers/media/dvb-core/dvb_vb2.c 	ctx->buf_cnt = *nbuffers;
ctx                41 drivers/media/dvb-core/dvb_vb2.c 	sizes[0] = ctx->buf_siz;
ctx                48 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s] count=%d, size=%d\n", ctx->name,
ctx                56 drivers/media/dvb-core/dvb_vb2.c 	struct dvb_vb2_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx                57 drivers/media/dvb-core/dvb_vb2.c 	unsigned long size = ctx->buf_siz;
ctx                61 drivers/media/dvb-core/dvb_vb2.c 			ctx->name, vb2_plane_size(vb, 0), size);
ctx                66 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s]\n", ctx->name);
ctx                73 drivers/media/dvb-core/dvb_vb2.c 	struct dvb_vb2_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx                77 drivers/media/dvb-core/dvb_vb2.c 	spin_lock_irqsave(&ctx->slock, flags);
ctx                78 drivers/media/dvb-core/dvb_vb2.c 	list_add_tail(&buf->list, &ctx->dvb_q);
ctx                79 drivers/media/dvb-core/dvb_vb2.c 	spin_unlock_irqrestore(&ctx->slock, flags);
ctx                81 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s]\n", ctx->name);
ctx                86 drivers/media/dvb-core/dvb_vb2.c 	struct dvb_vb2_ctx *ctx = vb2_get_drv_priv(vq);
ctx                88 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s] count=%d\n", ctx->name, count);
ctx                94 drivers/media/dvb-core/dvb_vb2.c 	struct dvb_vb2_ctx *ctx = vb2_get_drv_priv(vq);
ctx                98 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s]\n", ctx->name);
ctx               100 drivers/media/dvb-core/dvb_vb2.c 	spin_lock_irqsave(&ctx->slock, flags);
ctx               101 drivers/media/dvb-core/dvb_vb2.c 	while (!list_empty(&ctx->dvb_q)) {
ctx               102 drivers/media/dvb-core/dvb_vb2.c 		buf = list_entry(ctx->dvb_q.next,
ctx               107 drivers/media/dvb-core/dvb_vb2.c 	spin_unlock_irqrestore(&ctx->slock, flags);
ctx               112 drivers/media/dvb-core/dvb_vb2.c 	struct dvb_vb2_ctx *ctx = vb2_get_drv_priv(vq);
ctx               114 drivers/media/dvb-core/dvb_vb2.c 	mutex_lock(&ctx->mutex);
ctx               115 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s]\n", ctx->name);
ctx               120 drivers/media/dvb-core/dvb_vb2.c 	struct dvb_vb2_ctx *ctx = vb2_get_drv_priv(vq);
ctx               122 drivers/media/dvb-core/dvb_vb2.c 	if (mutex_is_locked(&ctx->mutex))
ctx               123 drivers/media/dvb-core/dvb_vb2.c 		mutex_unlock(&ctx->mutex);
ctx               124 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s]\n", ctx->name);
ctx               139 drivers/media/dvb-core/dvb_vb2.c 	struct dvb_vb2_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               146 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s]\n", ctx->name);
ctx               151 drivers/media/dvb-core/dvb_vb2.c 	struct dvb_vb2_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               154 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s]\n", ctx->name);
ctx               167 drivers/media/dvb-core/dvb_vb2.c int dvb_vb2_init(struct dvb_vb2_ctx *ctx, const char *name, int nonblocking)
ctx               169 drivers/media/dvb-core/dvb_vb2.c 	struct vb2_queue *q = &ctx->vb_q;
ctx               172 drivers/media/dvb-core/dvb_vb2.c 	memset(ctx, 0, sizeof(struct dvb_vb2_ctx));
ctx               178 drivers/media/dvb-core/dvb_vb2.c 	q->drv_priv = ctx;
ctx               187 drivers/media/dvb-core/dvb_vb2.c 		ctx->state = DVB_VB2_STATE_NONE;
ctx               188 drivers/media/dvb-core/dvb_vb2.c 		dprintk(1, "[%s] errno=%d\n", ctx->name, ret);
ctx               192 drivers/media/dvb-core/dvb_vb2.c 	mutex_init(&ctx->mutex);
ctx               193 drivers/media/dvb-core/dvb_vb2.c 	spin_lock_init(&ctx->slock);
ctx               194 drivers/media/dvb-core/dvb_vb2.c 	INIT_LIST_HEAD(&ctx->dvb_q);
ctx               196 drivers/media/dvb-core/dvb_vb2.c 	strscpy(ctx->name, name, DVB_VB2_NAME_MAX);
ctx               197 drivers/media/dvb-core/dvb_vb2.c 	ctx->nonblocking = nonblocking;
ctx               198 drivers/media/dvb-core/dvb_vb2.c 	ctx->state = DVB_VB2_STATE_INIT;
ctx               200 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s]\n", ctx->name);
ctx               205 drivers/media/dvb-core/dvb_vb2.c int dvb_vb2_release(struct dvb_vb2_ctx *ctx)
ctx               207 drivers/media/dvb-core/dvb_vb2.c 	struct vb2_queue *q = (struct vb2_queue *)&ctx->vb_q;
ctx               209 drivers/media/dvb-core/dvb_vb2.c 	if (ctx->state & DVB_VB2_STATE_INIT)
ctx               212 drivers/media/dvb-core/dvb_vb2.c 	ctx->state = DVB_VB2_STATE_NONE;
ctx               213 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s]\n", ctx->name);
ctx               218 drivers/media/dvb-core/dvb_vb2.c int dvb_vb2_stream_on(struct dvb_vb2_ctx *ctx)
ctx               220 drivers/media/dvb-core/dvb_vb2.c 	struct vb2_queue *q = &ctx->vb_q;
ctx               225 drivers/media/dvb-core/dvb_vb2.c 		ctx->state = DVB_VB2_STATE_NONE;
ctx               226 drivers/media/dvb-core/dvb_vb2.c 		dprintk(1, "[%s] errno=%d\n", ctx->name, ret);
ctx               229 drivers/media/dvb-core/dvb_vb2.c 	ctx->state |= DVB_VB2_STATE_STREAMON;
ctx               230 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s]\n", ctx->name);
ctx               235 drivers/media/dvb-core/dvb_vb2.c int dvb_vb2_stream_off(struct dvb_vb2_ctx *ctx)
ctx               237 drivers/media/dvb-core/dvb_vb2.c 	struct vb2_queue *q = (struct vb2_queue *)&ctx->vb_q;
ctx               240 drivers/media/dvb-core/dvb_vb2.c 	ctx->state &= ~DVB_VB2_STATE_STREAMON;
ctx               243 drivers/media/dvb-core/dvb_vb2.c 		ctx->state = DVB_VB2_STATE_NONE;
ctx               244 drivers/media/dvb-core/dvb_vb2.c 		dprintk(1, "[%s] errno=%d\n", ctx->name, ret);
ctx               247 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s]\n", ctx->name);
ctx               252 drivers/media/dvb-core/dvb_vb2.c int dvb_vb2_is_streaming(struct dvb_vb2_ctx *ctx)
ctx               254 drivers/media/dvb-core/dvb_vb2.c 	return (ctx->state & DVB_VB2_STATE_STREAMON);
ctx               257 drivers/media/dvb-core/dvb_vb2.c int dvb_vb2_fill_buffer(struct dvb_vb2_ctx *ctx,
ctx               273 drivers/media/dvb-core/dvb_vb2.c 	spin_lock_irqsave(&ctx->slock, flags);
ctx               275 drivers/media/dvb-core/dvb_vb2.c 		ctx->flags |= *buffer_flags;
ctx               279 drivers/media/dvb-core/dvb_vb2.c 		if (!ctx->buf) {
ctx               280 drivers/media/dvb-core/dvb_vb2.c 			if (list_empty(&ctx->dvb_q)) {
ctx               282 drivers/media/dvb-core/dvb_vb2.c 					ctx->name);
ctx               286 drivers/media/dvb-core/dvb_vb2.c 			ctx->buf = list_entry(ctx->dvb_q.next,
ctx               288 drivers/media/dvb-core/dvb_vb2.c 			ctx->remain = vb2_plane_size(&ctx->buf->vb, 0);
ctx               289 drivers/media/dvb-core/dvb_vb2.c 			ctx->offset = 0;
ctx               292 drivers/media/dvb-core/dvb_vb2.c 		if (!dvb_vb2_is_streaming(ctx)) {
ctx               293 drivers/media/dvb-core/dvb_vb2.c 			vb2_buffer_done(&ctx->buf->vb, VB2_BUF_STATE_ERROR);
ctx               294 drivers/media/dvb-core/dvb_vb2.c 			list_del(&ctx->buf->list);
ctx               295 drivers/media/dvb-core/dvb_vb2.c 			ctx->buf = NULL;
ctx               300 drivers/media/dvb-core/dvb_vb2.c 		ll = min(todo, ctx->remain);
ctx               301 drivers/media/dvb-core/dvb_vb2.c 		vbuf = vb2_plane_vaddr(&ctx->buf->vb, 0);
ctx               302 drivers/media/dvb-core/dvb_vb2.c 		memcpy(vbuf + ctx->offset, psrc, ll);
ctx               306 drivers/media/dvb-core/dvb_vb2.c 		ctx->remain -= ll;
ctx               307 drivers/media/dvb-core/dvb_vb2.c 		ctx->offset += ll;
ctx               309 drivers/media/dvb-core/dvb_vb2.c 		if (ctx->remain == 0) {
ctx               310 drivers/media/dvb-core/dvb_vb2.c 			vb2_buffer_done(&ctx->buf->vb, VB2_BUF_STATE_DONE);
ctx               311 drivers/media/dvb-core/dvb_vb2.c 			list_del(&ctx->buf->list);
ctx               312 drivers/media/dvb-core/dvb_vb2.c 			ctx->buf = NULL;
ctx               316 drivers/media/dvb-core/dvb_vb2.c 	if (ctx->nonblocking && ctx->buf) {
ctx               317 drivers/media/dvb-core/dvb_vb2.c 		vb2_set_plane_payload(&ctx->buf->vb, 0, ll);
ctx               318 drivers/media/dvb-core/dvb_vb2.c 		vb2_buffer_done(&ctx->buf->vb, VB2_BUF_STATE_DONE);
ctx               319 drivers/media/dvb-core/dvb_vb2.c 		list_del(&ctx->buf->list);
ctx               320 drivers/media/dvb-core/dvb_vb2.c 		ctx->buf = NULL;
ctx               322 drivers/media/dvb-core/dvb_vb2.c 	spin_unlock_irqrestore(&ctx->slock, flags);
ctx               325 drivers/media/dvb-core/dvb_vb2.c 		dprintk(1, "[%s] %d bytes are dropped.\n", ctx->name, todo);
ctx               327 drivers/media/dvb-core/dvb_vb2.c 		dprintk(3, "[%s]\n", ctx->name);
ctx               329 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s] %d bytes are copied\n", ctx->name, len - todo);
ctx               333 drivers/media/dvb-core/dvb_vb2.c int dvb_vb2_reqbufs(struct dvb_vb2_ctx *ctx, struct dmx_requestbuffers *req)
ctx               343 drivers/media/dvb-core/dvb_vb2.c 	ctx->buf_siz = req->size;
ctx               344 drivers/media/dvb-core/dvb_vb2.c 	ctx->buf_cnt = req->count;
ctx               345 drivers/media/dvb-core/dvb_vb2.c 	ret = vb2_core_reqbufs(&ctx->vb_q, VB2_MEMORY_MMAP, &req->count);
ctx               347 drivers/media/dvb-core/dvb_vb2.c 		ctx->state = DVB_VB2_STATE_NONE;
ctx               348 drivers/media/dvb-core/dvb_vb2.c 		dprintk(1, "[%s] count=%d size=%d errno=%d\n", ctx->name,
ctx               349 drivers/media/dvb-core/dvb_vb2.c 			ctx->buf_cnt, ctx->buf_siz, ret);
ctx               352 drivers/media/dvb-core/dvb_vb2.c 	ctx->state |= DVB_VB2_STATE_REQBUFS;
ctx               353 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s] count=%d size=%d\n", ctx->name,
ctx               354 drivers/media/dvb-core/dvb_vb2.c 		ctx->buf_cnt, ctx->buf_siz);
ctx               359 drivers/media/dvb-core/dvb_vb2.c int dvb_vb2_querybuf(struct dvb_vb2_ctx *ctx, struct dmx_buffer *b)
ctx               361 drivers/media/dvb-core/dvb_vb2.c 	vb2_core_querybuf(&ctx->vb_q, b->index, b);
ctx               362 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s] index=%d\n", ctx->name, b->index);
ctx               366 drivers/media/dvb-core/dvb_vb2.c int dvb_vb2_expbuf(struct dvb_vb2_ctx *ctx, struct dmx_exportbuffer *exp)
ctx               368 drivers/media/dvb-core/dvb_vb2.c 	struct vb2_queue *q = &ctx->vb_q;
ctx               371 drivers/media/dvb-core/dvb_vb2.c 	ret = vb2_core_expbuf(&ctx->vb_q, &exp->fd, q->type, exp->index,
ctx               374 drivers/media/dvb-core/dvb_vb2.c 		dprintk(1, "[%s] index=%d errno=%d\n", ctx->name,
ctx               378 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s] index=%d fd=%d\n", ctx->name, exp->index, exp->fd);
ctx               383 drivers/media/dvb-core/dvb_vb2.c int dvb_vb2_qbuf(struct dvb_vb2_ctx *ctx, struct dmx_buffer *b)
ctx               387 drivers/media/dvb-core/dvb_vb2.c 	ret = vb2_core_qbuf(&ctx->vb_q, b->index, b, NULL);
ctx               389 drivers/media/dvb-core/dvb_vb2.c 		dprintk(1, "[%s] index=%d errno=%d\n", ctx->name,
ctx               393 drivers/media/dvb-core/dvb_vb2.c 	dprintk(5, "[%s] index=%d\n", ctx->name, b->index);
ctx               398 drivers/media/dvb-core/dvb_vb2.c int dvb_vb2_dqbuf(struct dvb_vb2_ctx *ctx, struct dmx_buffer *b)
ctx               403 drivers/media/dvb-core/dvb_vb2.c 	ret = vb2_core_dqbuf(&ctx->vb_q, &b->index, b, ctx->nonblocking);
ctx               405 drivers/media/dvb-core/dvb_vb2.c 		dprintk(1, "[%s] errno=%d\n", ctx->name, ret);
ctx               409 drivers/media/dvb-core/dvb_vb2.c 	spin_lock_irqsave(&ctx->slock, flags);
ctx               410 drivers/media/dvb-core/dvb_vb2.c 	b->count = ctx->count++;
ctx               411 drivers/media/dvb-core/dvb_vb2.c 	b->flags = ctx->flags;
ctx               412 drivers/media/dvb-core/dvb_vb2.c 	ctx->flags = 0;
ctx               413 drivers/media/dvb-core/dvb_vb2.c 	spin_unlock_irqrestore(&ctx->slock, flags);
ctx               416 drivers/media/dvb-core/dvb_vb2.c 		ctx->name, b->index, ctx->count, b->flags);
ctx               422 drivers/media/dvb-core/dvb_vb2.c int dvb_vb2_mmap(struct dvb_vb2_ctx *ctx, struct vm_area_struct *vma)
ctx               426 drivers/media/dvb-core/dvb_vb2.c 	ret = vb2_mmap(&ctx->vb_q, vma);
ctx               428 drivers/media/dvb-core/dvb_vb2.c 		dprintk(1, "[%s] errno=%d\n", ctx->name, ret);
ctx               431 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s] ret=%d\n", ctx->name, ret);
ctx               436 drivers/media/dvb-core/dvb_vb2.c __poll_t dvb_vb2_poll(struct dvb_vb2_ctx *ctx, struct file *file,
ctx               439 drivers/media/dvb-core/dvb_vb2.c 	dprintk(3, "[%s]\n", ctx->name);
ctx               440 drivers/media/dvb-core/dvb_vb2.c 	return vb2_core_poll(&ctx->vb_q, file, wait);
ctx                85 drivers/media/firewire/firedtv-fw.c static int queue_iso(struct fdtv_ir_context *ctx, int index)
ctx                90 drivers/media/firewire/firedtv-fw.c 	p.interrupt = !(++ctx->interrupt_packet & (IRQ_INTERVAL - 1));
ctx                94 drivers/media/firewire/firedtv-fw.c 	return fw_iso_context_queue(ctx->context, &p, &ctx->buffer,
ctx               102 drivers/media/firewire/firedtv-fw.c 	struct fdtv_ir_context *ctx = fdtv->ir_context;
ctx               104 drivers/media/firewire/firedtv-fw.c 	int length, err, i = ctx->current_packet;
ctx               114 drivers/media/firewire/firedtv-fw.c 		p = ctx->pages[i / PACKETS_PER_PAGE]
ctx               122 drivers/media/firewire/firedtv-fw.c 		err = queue_iso(ctx, i);
ctx               128 drivers/media/firewire/firedtv-fw.c 	fw_iso_context_queue_flush(ctx->context);
ctx               129 drivers/media/firewire/firedtv-fw.c 	ctx->current_packet = i;
ctx               134 drivers/media/firewire/firedtv-fw.c 	struct fdtv_ir_context *ctx;
ctx               138 drivers/media/firewire/firedtv-fw.c 	ctx = kmalloc(sizeof(*ctx), GFP_KERNEL);
ctx               139 drivers/media/firewire/firedtv-fw.c 	if (!ctx)
ctx               142 drivers/media/firewire/firedtv-fw.c 	ctx->context = fw_iso_context_create(device->card,
ctx               145 drivers/media/firewire/firedtv-fw.c 	if (IS_ERR(ctx->context)) {
ctx               146 drivers/media/firewire/firedtv-fw.c 		err = PTR_ERR(ctx->context);
ctx               150 drivers/media/firewire/firedtv-fw.c 	err = fw_iso_buffer_init(&ctx->buffer, device->card,
ctx               155 drivers/media/firewire/firedtv-fw.c 	ctx->interrupt_packet = 0;
ctx               156 drivers/media/firewire/firedtv-fw.c 	ctx->current_packet = 0;
ctx               159 drivers/media/firewire/firedtv-fw.c 		ctx->pages[i] = page_address(ctx->buffer.pages[i]);
ctx               162 drivers/media/firewire/firedtv-fw.c 		err = queue_iso(ctx, i);
ctx               167 drivers/media/firewire/firedtv-fw.c 	err = fw_iso_context_start(ctx->context, -1, 0,
ctx               172 drivers/media/firewire/firedtv-fw.c 	fdtv->ir_context = ctx;
ctx               176 drivers/media/firewire/firedtv-fw.c 	fw_iso_buffer_destroy(&ctx->buffer, device->card);
ctx               178 drivers/media/firewire/firedtv-fw.c 	fw_iso_context_destroy(ctx->context);
ctx               180 drivers/media/firewire/firedtv-fw.c 	kfree(ctx);
ctx               187 drivers/media/firewire/firedtv-fw.c 	struct fdtv_ir_context *ctx = fdtv->ir_context;
ctx               189 drivers/media/firewire/firedtv-fw.c 	fw_iso_context_stop(ctx->context);
ctx               190 drivers/media/firewire/firedtv-fw.c 	fw_iso_buffer_destroy(&ctx->buffer, device_of(fdtv)->card);
ctx               191 drivers/media/firewire/firedtv-fw.c 	fw_iso_context_destroy(ctx->context);
ctx               192 drivers/media/firewire/firedtv-fw.c 	kfree(ctx);
ctx                30 drivers/media/i2c/max2175.c #define mxm_dbg(ctx, fmt, arg...) dev_dbg(&ctx->client->dev, fmt, ## arg)
ctx                31 drivers/media/i2c/max2175.c #define mxm_err(ctx, fmt, arg...) dev_err(&ctx->client->dev, fmt, ## arg)
ctx               316 drivers/media/i2c/max2175.c static int max2175_read(struct max2175 *ctx, u8 idx, u8 *val)
ctx               321 drivers/media/i2c/max2175.c 	ret = regmap_read(ctx->regmap, idx, &regval);
ctx               323 drivers/media/i2c/max2175.c 		mxm_err(ctx, "read ret(%d): idx 0x%02x\n", ret, idx);
ctx               330 drivers/media/i2c/max2175.c static int max2175_write(struct max2175 *ctx, u8 idx, u8 val)
ctx               334 drivers/media/i2c/max2175.c 	ret = regmap_write(ctx->regmap, idx, val);
ctx               336 drivers/media/i2c/max2175.c 		mxm_err(ctx, "write ret(%d): idx 0x%02x val 0x%02x\n",
ctx               342 drivers/media/i2c/max2175.c static u8 max2175_read_bits(struct max2175 *ctx, u8 idx, u8 msb, u8 lsb)
ctx               346 drivers/media/i2c/max2175.c 	if (max2175_read(ctx, idx, &val))
ctx               352 drivers/media/i2c/max2175.c static int max2175_write_bits(struct max2175 *ctx, u8 idx,
ctx               355 drivers/media/i2c/max2175.c 	int ret = regmap_update_bits(ctx->regmap, idx, GENMASK(msb, lsb),
ctx               359 drivers/media/i2c/max2175.c 		mxm_err(ctx, "wbits ret(%d): idx 0x%02x\n", ret, idx);
ctx               364 drivers/media/i2c/max2175.c static int max2175_write_bit(struct max2175 *ctx, u8 idx, u8 bit, u8 newval)
ctx               366 drivers/media/i2c/max2175.c 	return max2175_write_bits(ctx, idx, bit, bit, newval);
ctx               370 drivers/media/i2c/max2175.c static int max2175_poll_timeout(struct max2175 *ctx, u8 idx, u8 msb, u8 lsb,
ctx               375 drivers/media/i2c/max2175.c 	return regmap_read_poll_timeout(ctx->regmap, idx, val,
ctx               380 drivers/media/i2c/max2175.c static int max2175_poll_csm_ready(struct max2175 *ctx)
ctx               384 drivers/media/i2c/max2175.c 	ret = max2175_poll_timeout(ctx, 69, 1, 1, 0, 50000);
ctx               386 drivers/media/i2c/max2175.c 		mxm_err(ctx, "csm not ready\n");
ctx               391 drivers/media/i2c/max2175.c #define MAX2175_IS_BAND_AM(ctx)		\
ctx               392 drivers/media/i2c/max2175.c 	(max2175_read_bits(ctx, 5, 1, 0) == MAX2175_BAND_AM)
ctx               394 drivers/media/i2c/max2175.c #define MAX2175_IS_BAND_VHF(ctx)	\
ctx               395 drivers/media/i2c/max2175.c 	(max2175_read_bits(ctx, 5, 1, 0) == MAX2175_BAND_VHF)
ctx               397 drivers/media/i2c/max2175.c #define MAX2175_IS_FM_MODE(ctx)		\
ctx               398 drivers/media/i2c/max2175.c 	(max2175_read_bits(ctx, 12, 5, 4) == 0)
ctx               400 drivers/media/i2c/max2175.c #define MAX2175_IS_FMHD_MODE(ctx)	\
ctx               401 drivers/media/i2c/max2175.c 	(max2175_read_bits(ctx, 12, 5, 4) == 1)
ctx               403 drivers/media/i2c/max2175.c #define MAX2175_IS_DAB_MODE(ctx)	\
ctx               404 drivers/media/i2c/max2175.c 	(max2175_read_bits(ctx, 12, 5, 4) == 2)
ctx               416 drivers/media/i2c/max2175.c static void max2175_i2s_enable(struct max2175 *ctx, bool enable)
ctx               420 drivers/media/i2c/max2175.c 		max2175_write_bits(ctx, 104, 3, 0, 2);
ctx               423 drivers/media/i2c/max2175.c 		max2175_write_bits(ctx, 104, 3, 0, 9);
ctx               424 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "i2s %sabled\n", enable ? "en" : "dis");
ctx               427 drivers/media/i2c/max2175.c static void max2175_set_filter_coeffs(struct max2175 *ctx, u8 m_sel,
ctx               433 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "set_filter_coeffs: m_sel %d bank %d\n", m_sel, bank);
ctx               434 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 114, 5, 4, m_sel);
ctx               441 drivers/media/i2c/max2175.c 		max2175_write(ctx, 115, coeffs[i] >> 8);
ctx               442 drivers/media/i2c/max2175.c 		max2175_write(ctx, 116, coeffs[i]);
ctx               443 drivers/media/i2c/max2175.c 		max2175_write(ctx, 117, coeff_addr | 1 << 7);
ctx               445 drivers/media/i2c/max2175.c 	max2175_write_bit(ctx, 117, 7, 0);
ctx               448 drivers/media/i2c/max2175.c static void max2175_load_fmeu_1p2(struct max2175 *ctx)
ctx               453 drivers/media/i2c/max2175.c 		max2175_write(ctx, fmeu1p2_map[i].idx, fmeu1p2_map[i].val);
ctx               455 drivers/media/i2c/max2175.c 	ctx->decim_ratio = 36;
ctx               458 drivers/media/i2c/max2175.c 	max2175_set_filter_coeffs(ctx, MAX2175_CH_MSEL, 0, ch_coeff_fmeu);
ctx               459 drivers/media/i2c/max2175.c 	max2175_set_filter_coeffs(ctx, MAX2175_EQ_MSEL, 0,
ctx               463 drivers/media/i2c/max2175.c static void max2175_load_dab_1p2(struct max2175 *ctx)
ctx               468 drivers/media/i2c/max2175.c 		max2175_write(ctx, dab12_map[i].idx, dab12_map[i].val);
ctx               470 drivers/media/i2c/max2175.c 	ctx->decim_ratio = 1;
ctx               473 drivers/media/i2c/max2175.c 	max2175_set_filter_coeffs(ctx, MAX2175_CH_MSEL, 2, ch_coeff_dab1);
ctx               476 drivers/media/i2c/max2175.c static void max2175_load_fmna_1p0(struct max2175 *ctx)
ctx               481 drivers/media/i2c/max2175.c 		max2175_write(ctx, fmna1p0_map[i].idx, fmna1p0_map[i].val);
ctx               484 drivers/media/i2c/max2175.c static void max2175_load_fmna_2p0(struct max2175 *ctx)
ctx               489 drivers/media/i2c/max2175.c 		max2175_write(ctx, fmna2p0_map[i].idx, fmna2p0_map[i].val);
ctx               492 drivers/media/i2c/max2175.c static void max2175_set_bbfilter(struct max2175 *ctx)
ctx               494 drivers/media/i2c/max2175.c 	if (MAX2175_IS_BAND_AM(ctx)) {
ctx               495 drivers/media/i2c/max2175.c 		max2175_write_bits(ctx, 12, 3, 0, ctx->rom_bbf_bw_am);
ctx               496 drivers/media/i2c/max2175.c 		mxm_dbg(ctx, "set_bbfilter AM: rom %d\n", ctx->rom_bbf_bw_am);
ctx               497 drivers/media/i2c/max2175.c 	} else if (MAX2175_IS_DAB_MODE(ctx)) {
ctx               498 drivers/media/i2c/max2175.c 		max2175_write_bits(ctx, 12, 3, 0, ctx->rom_bbf_bw_dab);
ctx               499 drivers/media/i2c/max2175.c 		mxm_dbg(ctx, "set_bbfilter DAB: rom %d\n", ctx->rom_bbf_bw_dab);
ctx               501 drivers/media/i2c/max2175.c 		max2175_write_bits(ctx, 12, 3, 0, ctx->rom_bbf_bw_fm);
ctx               502 drivers/media/i2c/max2175.c 		mxm_dbg(ctx, "set_bbfilter FM: rom %d\n", ctx->rom_bbf_bw_fm);
ctx               506 drivers/media/i2c/max2175.c static bool max2175_set_csm_mode(struct max2175 *ctx,
ctx               509 drivers/media/i2c/max2175.c 	int ret = max2175_poll_csm_ready(ctx);
ctx               514 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 0, 2, 0, new_mode);
ctx               515 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "set csm new mode %d\n", new_mode);
ctx               530 drivers/media/i2c/max2175.c 	return max2175_poll_csm_ready(ctx);
ctx               533 drivers/media/i2c/max2175.c static int max2175_csm_action(struct max2175 *ctx,
ctx               538 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "csm_action: %d\n", action);
ctx               541 drivers/media/i2c/max2175.c 	ret = max2175_set_csm_mode(ctx, MAX2175_LOAD_TO_BUFFER);
ctx               545 drivers/media/i2c/max2175.c 	return max2175_set_csm_mode(ctx, MAX2175_PRESET_TUNE);
ctx               548 drivers/media/i2c/max2175.c static int max2175_set_lo_freq(struct max2175 *ctx, u32 lo_freq)
ctx               555 drivers/media/i2c/max2175.c 	band = max2175_read_bits(ctx, 5, 1, 0);
ctx               593 drivers/media/i2c/max2175.c 	int_desired = lo_freq / ctx->xtal_freq;
ctx               594 drivers/media/i2c/max2175.c 	frac_desired = div_u64((u64)(lo_freq % ctx->xtal_freq) << 20,
ctx               595 drivers/media/i2c/max2175.c 			       ctx->xtal_freq);
ctx               598 drivers/media/i2c/max2175.c 	ret = max2175_poll_csm_ready(ctx);
ctx               602 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "lo_mult %u int %u  frac %u\n",
ctx               606 drivers/media/i2c/max2175.c 	max2175_write(ctx, 1, int_desired);
ctx               607 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 2, 3, 0, (frac_desired >> 16) & 0xf);
ctx               608 drivers/media/i2c/max2175.c 	max2175_write(ctx, 3, frac_desired >> 8);
ctx               609 drivers/media/i2c/max2175.c 	max2175_write(ctx, 4, frac_desired);
ctx               610 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 5, 3, 2, loband_bits);
ctx               611 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 6, 7, 6, vcodiv_bits);
ctx               628 drivers/media/i2c/max2175.c static int max2175_set_nco_freq(struct max2175 *ctx, s32 nco_freq)
ctx               630 drivers/media/i2c/max2175.c 	s32 clock_rate = ctx->xtal_freq / ctx->decim_ratio;
ctx               649 drivers/media/i2c/max2175.c 	ret = max2175_poll_csm_ready(ctx);
ctx               653 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "freq %d desired %lld reg %u\n",
ctx               657 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 7, 4, 0, (nco_reg >> 16) & 0x1f);
ctx               658 drivers/media/i2c/max2175.c 	max2175_write(ctx, 8, nco_reg >> 8);
ctx               659 drivers/media/i2c/max2175.c 	max2175_write(ctx, 9, nco_reg);
ctx               664 drivers/media/i2c/max2175.c static int max2175_set_rf_freq_non_am_bands(struct max2175 *ctx, u64 freq,
ctx               670 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "rf_freq: non AM bands\n");
ctx               672 drivers/media/i2c/max2175.c 	if (MAX2175_IS_FM_MODE(ctx))
ctx               674 drivers/media/i2c/max2175.c 	else if (MAX2175_IS_FMHD_MODE(ctx))
ctx               677 drivers/media/i2c/max2175.c 		return max2175_set_lo_freq(ctx, freq);
ctx               679 drivers/media/i2c/max2175.c 	if (MAX2175_IS_BAND_VHF(ctx) == (lo_pos == MAX2175_LO_ABOVE_DESIRED))
ctx               684 drivers/media/i2c/max2175.c 	ret = max2175_set_lo_freq(ctx, adj_freq);
ctx               688 drivers/media/i2c/max2175.c 	return max2175_set_nco_freq(ctx, -low_if_freq);
ctx               691 drivers/media/i2c/max2175.c static int max2175_set_rf_freq(struct max2175 *ctx, u64 freq, u32 lo_pos)
ctx               695 drivers/media/i2c/max2175.c 	if (MAX2175_IS_BAND_AM(ctx))
ctx               696 drivers/media/i2c/max2175.c 		ret = max2175_set_nco_freq(ctx, freq);
ctx               698 drivers/media/i2c/max2175.c 		ret = max2175_set_rf_freq_non_am_bands(ctx, freq, lo_pos);
ctx               700 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "set_rf_freq: ret %d freq %llu\n", ret, freq);
ctx               705 drivers/media/i2c/max2175.c static int max2175_tune_rf_freq(struct max2175 *ctx, u64 freq, u32 hsls)
ctx               709 drivers/media/i2c/max2175.c 	ret = max2175_set_rf_freq(ctx, freq, hsls);
ctx               713 drivers/media/i2c/max2175.c 	ret = max2175_csm_action(ctx, MAX2175_BUFFER_PLUS_PRESET_TUNE);
ctx               717 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "tune_rf_freq: old %u new %llu\n", ctx->freq, freq);
ctx               718 drivers/media/i2c/max2175.c 	ctx->freq = freq;
ctx               723 drivers/media/i2c/max2175.c static void max2175_set_hsls(struct max2175 *ctx, u32 lo_pos)
ctx               725 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "set_hsls: lo_pos %u\n", lo_pos);
ctx               727 drivers/media/i2c/max2175.c 	if ((lo_pos == MAX2175_LO_BELOW_DESIRED) == MAX2175_IS_BAND_VHF(ctx))
ctx               728 drivers/media/i2c/max2175.c 		max2175_write_bit(ctx, 5, 4, 1);
ctx               730 drivers/media/i2c/max2175.c 		max2175_write_bit(ctx, 5, 4, 0);
ctx               733 drivers/media/i2c/max2175.c static void max2175_set_eu_rx_mode(struct max2175 *ctx, u32 rx_mode)
ctx               737 drivers/media/i2c/max2175.c 		max2175_load_fmeu_1p2(ctx);
ctx               741 drivers/media/i2c/max2175.c 		max2175_load_dab_1p2(ctx);
ctx               745 drivers/media/i2c/max2175.c 	if (!ctx->master)
ctx               746 drivers/media/i2c/max2175.c 		max2175_write_bit(ctx, 30, 7, 1);
ctx               749 drivers/media/i2c/max2175.c static void max2175_set_na_rx_mode(struct max2175 *ctx, u32 rx_mode)
ctx               753 drivers/media/i2c/max2175.c 		max2175_load_fmna_1p0(ctx);
ctx               756 drivers/media/i2c/max2175.c 		max2175_load_fmna_2p0(ctx);
ctx               760 drivers/media/i2c/max2175.c 	if (!ctx->master)
ctx               761 drivers/media/i2c/max2175.c 		max2175_write_bit(ctx, 30, 7, 1);
ctx               763 drivers/media/i2c/max2175.c 	ctx->decim_ratio = 27;
ctx               766 drivers/media/i2c/max2175.c 	max2175_set_filter_coeffs(ctx, MAX2175_CH_MSEL, 0, ch_coeff_fmna);
ctx               767 drivers/media/i2c/max2175.c 	max2175_set_filter_coeffs(ctx, MAX2175_EQ_MSEL, 0,
ctx               771 drivers/media/i2c/max2175.c static int max2175_set_rx_mode(struct max2175 *ctx, u32 rx_mode)
ctx               773 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "set_rx_mode: %u am_hiz %u\n", rx_mode, ctx->am_hiz);
ctx               774 drivers/media/i2c/max2175.c 	if (ctx->xtal_freq == MAX2175_EU_XTAL_FREQ)
ctx               775 drivers/media/i2c/max2175.c 		max2175_set_eu_rx_mode(ctx, rx_mode);
ctx               777 drivers/media/i2c/max2175.c 		max2175_set_na_rx_mode(ctx, rx_mode);
ctx               779 drivers/media/i2c/max2175.c 	if (ctx->am_hiz) {
ctx               780 drivers/media/i2c/max2175.c 		mxm_dbg(ctx, "setting AM HiZ related config\n");
ctx               781 drivers/media/i2c/max2175.c 		max2175_write_bit(ctx, 50, 5, 1);
ctx               782 drivers/media/i2c/max2175.c 		max2175_write_bit(ctx, 90, 7, 1);
ctx               783 drivers/media/i2c/max2175.c 		max2175_write_bits(ctx, 73, 1, 0, 2);
ctx               784 drivers/media/i2c/max2175.c 		max2175_write_bits(ctx, 80, 5, 0, 33);
ctx               788 drivers/media/i2c/max2175.c 	max2175_set_bbfilter(ctx);
ctx               791 drivers/media/i2c/max2175.c 	max2175_set_hsls(ctx, ctx->hsls->cur.val);
ctx               794 drivers/media/i2c/max2175.c 	max2175_i2s_enable(ctx, ctx->i2s_en->cur.val);
ctx               796 drivers/media/i2c/max2175.c 	ctx->mode_resolved = true;
ctx               801 drivers/media/i2c/max2175.c static int max2175_rx_mode_from_freq(struct max2175 *ctx, u32 freq, u32 *mode)
ctx               807 drivers/media/i2c/max2175.c 	for (i = 0; i <= ctx->rx_mode->maximum; i++) {
ctx               808 drivers/media/i2c/max2175.c 		if (ctx->rx_modes[i].band == band) {
ctx               810 drivers/media/i2c/max2175.c 			mxm_dbg(ctx, "rx_mode_from_freq: freq %u mode %d\n",
ctx               819 drivers/media/i2c/max2175.c static bool max2175_freq_rx_mode_valid(struct max2175 *ctx,
ctx               824 drivers/media/i2c/max2175.c 	return (ctx->rx_modes[mode].band == band);
ctx               827 drivers/media/i2c/max2175.c static void max2175_load_adc_presets(struct max2175 *ctx)
ctx               833 drivers/media/i2c/max2175.c 			max2175_write(ctx, 146 + j + i * 55, adc_presets[i][j]);
ctx               836 drivers/media/i2c/max2175.c static int max2175_init_power_manager(struct max2175 *ctx)
ctx               841 drivers/media/i2c/max2175.c 	max2175_write_bit(ctx, 99, 2, 0);
ctx               843 drivers/media/i2c/max2175.c 	max2175_write_bit(ctx, 99, 2, 1);
ctx               846 drivers/media/i2c/max2175.c 	ret = max2175_poll_timeout(ctx, 69, 7, 7, 1, 50000);
ctx               848 drivers/media/i2c/max2175.c 		mxm_err(ctx, "init pm failed\n");
ctx               853 drivers/media/i2c/max2175.c static int max2175_recalibrate_adc(struct max2175 *ctx)
ctx               858 drivers/media/i2c/max2175.c 	max2175_write(ctx, 150, 0xff);
ctx               859 drivers/media/i2c/max2175.c 	max2175_write(ctx, 205, 0xff);
ctx               860 drivers/media/i2c/max2175.c 	max2175_write(ctx, 147, 0x20);
ctx               861 drivers/media/i2c/max2175.c 	max2175_write(ctx, 147, 0x00);
ctx               862 drivers/media/i2c/max2175.c 	max2175_write(ctx, 202, 0x20);
ctx               863 drivers/media/i2c/max2175.c 	max2175_write(ctx, 202, 0x00);
ctx               865 drivers/media/i2c/max2175.c 	ret = max2175_poll_timeout(ctx, 69, 4, 3, 3, 50000);
ctx               867 drivers/media/i2c/max2175.c 		mxm_err(ctx, "adc recalibration failed\n");
ctx               872 drivers/media/i2c/max2175.c static u8 max2175_read_rom(struct max2175 *ctx, u8 row)
ctx               876 drivers/media/i2c/max2175.c 	max2175_write_bit(ctx, 56, 4, 0);
ctx               877 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 56, 3, 0, row);
ctx               880 drivers/media/i2c/max2175.c 	max2175_read(ctx, 58, &data);
ctx               882 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 56, 3, 0, 0);
ctx               884 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "read_rom: row %d data 0x%02x\n", row, data);
ctx               889 drivers/media/i2c/max2175.c static void max2175_load_from_rom(struct max2175 *ctx)
ctx               893 drivers/media/i2c/max2175.c 	data = max2175_read_rom(ctx, 0);
ctx               894 drivers/media/i2c/max2175.c 	ctx->rom_bbf_bw_am = data & 0x0f;
ctx               895 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 81, 3, 0, data >> 4);
ctx               897 drivers/media/i2c/max2175.c 	data = max2175_read_rom(ctx, 1);
ctx               898 drivers/media/i2c/max2175.c 	ctx->rom_bbf_bw_fm = data & 0x0f;
ctx               899 drivers/media/i2c/max2175.c 	ctx->rom_bbf_bw_dab = data >> 4;
ctx               901 drivers/media/i2c/max2175.c 	data = max2175_read_rom(ctx, 2);
ctx               902 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 82, 4, 0, data & 0x1f);
ctx               903 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 82, 7, 5, data >> 5);
ctx               905 drivers/media/i2c/max2175.c 	data = max2175_read_rom(ctx, 3);
ctx               906 drivers/media/i2c/max2175.c 	if (ctx->am_hiz) {
ctx               908 drivers/media/i2c/max2175.c 		data |= (max2175_read_rom(ctx, 7) & 0x40) >> 2;
ctx               913 drivers/media/i2c/max2175.c 		data |= (max2175_read_rom(ctx, 7) & 0x80) >> 3;
ctx               917 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 80, 5, 0, data + 31);
ctx               919 drivers/media/i2c/max2175.c 	data = max2175_read_rom(ctx, 6);
ctx               920 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 81, 7, 6, data >> 6);
ctx               923 drivers/media/i2c/max2175.c static void max2175_load_full_fm_eu_1p0(struct max2175 *ctx)
ctx               928 drivers/media/i2c/max2175.c 		max2175_write(ctx, i + 1, full_fm_eu_1p0[i]);
ctx               931 drivers/media/i2c/max2175.c 	ctx->decim_ratio = 36;
ctx               934 drivers/media/i2c/max2175.c static void max2175_load_full_fm_na_1p0(struct max2175 *ctx)
ctx               939 drivers/media/i2c/max2175.c 		max2175_write(ctx, i + 1, full_fm_na_1p0[i]);
ctx               942 drivers/media/i2c/max2175.c 	ctx->decim_ratio = 27;
ctx               945 drivers/media/i2c/max2175.c static int max2175_core_init(struct max2175 *ctx, u32 refout_bits)
ctx               950 drivers/media/i2c/max2175.c 	if (ctx->xtal_freq == MAX2175_EU_XTAL_FREQ)
ctx               951 drivers/media/i2c/max2175.c 		max2175_load_full_fm_eu_1p0(ctx);
ctx               953 drivers/media/i2c/max2175.c 		max2175_load_full_fm_na_1p0(ctx);
ctx               956 drivers/media/i2c/max2175.c 	if (!ctx->master)
ctx               957 drivers/media/i2c/max2175.c 		max2175_write_bit(ctx, 30, 7, 1);
ctx               959 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "refout_bits %u\n", refout_bits);
ctx               962 drivers/media/i2c/max2175.c 	max2175_write_bits(ctx, 56, 7, 5, refout_bits);
ctx               965 drivers/media/i2c/max2175.c 	max2175_write_bit(ctx, 99, 1, 0);
ctx               967 drivers/media/i2c/max2175.c 	max2175_write_bit(ctx, 99, 1, 1);
ctx               970 drivers/media/i2c/max2175.c 	max2175_load_adc_presets(ctx);
ctx               973 drivers/media/i2c/max2175.c 	ret = max2175_init_power_manager(ctx);
ctx               978 drivers/media/i2c/max2175.c 	ret = max2175_recalibrate_adc(ctx);
ctx               983 drivers/media/i2c/max2175.c 	max2175_load_from_rom(ctx);
ctx               985 drivers/media/i2c/max2175.c 	if (ctx->xtal_freq == MAX2175_EU_XTAL_FREQ) {
ctx               987 drivers/media/i2c/max2175.c 		max2175_set_filter_coeffs(ctx, MAX2175_CH_MSEL, 0,
ctx               989 drivers/media/i2c/max2175.c 		max2175_set_filter_coeffs(ctx, MAX2175_EQ_MSEL, 0,
ctx               993 drivers/media/i2c/max2175.c 		max2175_set_filter_coeffs(ctx, MAX2175_CH_MSEL, 0,
ctx               995 drivers/media/i2c/max2175.c 		max2175_set_filter_coeffs(ctx, MAX2175_EQ_MSEL, 0,
ctx               998 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "core initialized\n");
ctx              1003 drivers/media/i2c/max2175.c static void max2175_s_ctrl_rx_mode(struct max2175 *ctx, u32 rx_mode)
ctx              1006 drivers/media/i2c/max2175.c 	max2175_set_rx_mode(ctx, rx_mode);
ctx              1008 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "s_ctrl_rx_mode: %u curr freq %u\n", rx_mode, ctx->freq);
ctx              1011 drivers/media/i2c/max2175.c 	if (max2175_freq_rx_mode_valid(ctx, rx_mode, ctx->freq))
ctx              1012 drivers/media/i2c/max2175.c 		max2175_tune_rf_freq(ctx, ctx->freq, ctx->hsls->cur.val);
ctx              1015 drivers/media/i2c/max2175.c 		max2175_tune_rf_freq(ctx, ctx->rx_modes[rx_mode].freq,
ctx              1016 drivers/media/i2c/max2175.c 				     ctx->hsls->cur.val);
ctx              1021 drivers/media/i2c/max2175.c 	struct max2175 *ctx = max2175_from_ctrl_hdl(ctrl->handler);
ctx              1023 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "s_ctrl: id 0x%x, val %u\n", ctrl->id, ctrl->val);
ctx              1026 drivers/media/i2c/max2175.c 		max2175_i2s_enable(ctx, ctrl->val);
ctx              1029 drivers/media/i2c/max2175.c 		max2175_set_hsls(ctx, ctrl->val);
ctx              1032 drivers/media/i2c/max2175.c 		max2175_s_ctrl_rx_mode(ctx, ctrl->val);
ctx              1039 drivers/media/i2c/max2175.c static u32 max2175_get_lna_gain(struct max2175 *ctx)
ctx              1041 drivers/media/i2c/max2175.c 	enum max2175_band band = max2175_read_bits(ctx, 5, 1, 0);
ctx              1045 drivers/media/i2c/max2175.c 		return max2175_read_bits(ctx, 51, 3, 0);
ctx              1047 drivers/media/i2c/max2175.c 		return max2175_read_bits(ctx, 50, 3, 0);
ctx              1049 drivers/media/i2c/max2175.c 		return max2175_read_bits(ctx, 52, 5, 0);
ctx              1057 drivers/media/i2c/max2175.c 	struct max2175 *ctx = max2175_from_ctrl_hdl(ctrl->handler);
ctx              1061 drivers/media/i2c/max2175.c 		ctrl->val = max2175_get_lna_gain(ctx);
ctx              1064 drivers/media/i2c/max2175.c 		ctrl->val = max2175_read_bits(ctx, 49, 4, 0);
ctx              1067 drivers/media/i2c/max2175.c 		ctrl->val = (max2175_read_bits(ctx, 60, 7, 6) == 3);
ctx              1074 drivers/media/i2c/max2175.c static int max2175_set_freq_and_mode(struct max2175 *ctx, u32 freq)
ctx              1080 drivers/media/i2c/max2175.c 	ret = max2175_rx_mode_from_freq(ctx, freq, &rx_mode);
ctx              1084 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "set_freq_and_mode: freq %u rx_mode %d\n", freq, rx_mode);
ctx              1087 drivers/media/i2c/max2175.c 	max2175_set_rx_mode(ctx, rx_mode);
ctx              1088 drivers/media/i2c/max2175.c 	ctx->rx_mode->cur.val = rx_mode;
ctx              1091 drivers/media/i2c/max2175.c 	return max2175_tune_rf_freq(ctx, freq, ctx->hsls->cur.val);
ctx              1097 drivers/media/i2c/max2175.c 	struct max2175 *ctx = max2175_from_sd(sd);
ctx              1101 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "s_freq: new %u curr %u, mode_resolved %d\n",
ctx              1102 drivers/media/i2c/max2175.c 		vf->frequency, ctx->freq, ctx->mode_resolved);
ctx              1107 drivers/media/i2c/max2175.c 	freq = clamp(vf->frequency, ctx->bands_rf->rangelow,
ctx              1108 drivers/media/i2c/max2175.c 		     ctx->bands_rf->rangehigh);
ctx              1111 drivers/media/i2c/max2175.c 	if (ctx->mode_resolved &&
ctx              1112 drivers/media/i2c/max2175.c 	    max2175_freq_rx_mode_valid(ctx, ctx->rx_mode->cur.val, freq))
ctx              1113 drivers/media/i2c/max2175.c 		ret = max2175_tune_rf_freq(ctx, freq, ctx->hsls->cur.val);
ctx              1116 drivers/media/i2c/max2175.c 		ret = max2175_set_freq_and_mode(ctx, freq);
ctx              1118 drivers/media/i2c/max2175.c 	mxm_dbg(ctx, "s_freq: ret %d curr %u mode_resolved %d mode %u\n",
ctx              1119 drivers/media/i2c/max2175.c 		ret, ctx->freq, ctx->mode_resolved, ctx->rx_mode->cur.val);
ctx              1127 drivers/media/i2c/max2175.c 	struct max2175 *ctx = max2175_from_sd(sd);
ctx              1135 drivers/media/i2c/max2175.c 	vf->frequency = ctx->freq;
ctx              1143 drivers/media/i2c/max2175.c 	struct max2175 *ctx = max2175_from_sd(sd);
ctx              1148 drivers/media/i2c/max2175.c 	*band = *ctx->bands_rf;
ctx              1155 drivers/media/i2c/max2175.c 	struct max2175 *ctx = max2175_from_sd(sd);
ctx              1163 drivers/media/i2c/max2175.c 	vt->rangelow = ctx->bands_rf->rangelow;
ctx              1164 drivers/media/i2c/max2175.c 	vt->rangehigh = ctx->bands_rf->rangehigh;
ctx              1283 drivers/media/i2c/max2175.c 	struct max2175 *ctx;
ctx              1324 drivers/media/i2c/max2175.c 	ctx = devm_kzalloc(&client->dev, sizeof(*ctx), GFP_KERNEL);
ctx              1325 drivers/media/i2c/max2175.c 	if (ctx == NULL)
ctx              1328 drivers/media/i2c/max2175.c 	sd = &ctx->sd;
ctx              1329 drivers/media/i2c/max2175.c 	ctx->master = master;
ctx              1330 drivers/media/i2c/max2175.c 	ctx->am_hiz = am_hiz;
ctx              1331 drivers/media/i2c/max2175.c 	ctx->mode_resolved = false;
ctx              1332 drivers/media/i2c/max2175.c 	ctx->regmap = regmap;
ctx              1333 drivers/media/i2c/max2175.c 	ctx->xtal_freq = clk_get_rate(clk);
ctx              1334 drivers/media/i2c/max2175.c 	dev_info(&client->dev, "xtal freq %luHz\n", ctx->xtal_freq);
ctx              1337 drivers/media/i2c/max2175.c 	ctx->client = client;
ctx              1342 drivers/media/i2c/max2175.c 	hdl = &ctx->ctrl_hdl;
ctx              1347 drivers/media/i2c/max2175.c 	ctx->lna_gain = v4l2_ctrl_new_std(hdl, &max2175_ctrl_ops,
ctx              1350 drivers/media/i2c/max2175.c 	ctx->lna_gain->flags |= (V4L2_CTRL_FLAG_VOLATILE |
ctx              1352 drivers/media/i2c/max2175.c 	ctx->if_gain = v4l2_ctrl_new_std(hdl, &max2175_ctrl_ops,
ctx              1355 drivers/media/i2c/max2175.c 	ctx->if_gain->flags |= (V4L2_CTRL_FLAG_VOLATILE |
ctx              1357 drivers/media/i2c/max2175.c 	ctx->pll_lock = v4l2_ctrl_new_std(hdl, &max2175_ctrl_ops,
ctx              1360 drivers/media/i2c/max2175.c 	ctx->pll_lock->flags |= (V4L2_CTRL_FLAG_VOLATILE |
ctx              1362 drivers/media/i2c/max2175.c 	ctx->i2s_en = v4l2_ctrl_new_custom(hdl, &max2175_i2s_en, NULL);
ctx              1363 drivers/media/i2c/max2175.c 	ctx->hsls = v4l2_ctrl_new_custom(hdl, &max2175_hsls, NULL);
ctx              1365 drivers/media/i2c/max2175.c 	if (ctx->xtal_freq == MAX2175_EU_XTAL_FREQ) {
ctx              1366 drivers/media/i2c/max2175.c 		ctx->rx_mode = v4l2_ctrl_new_custom(hdl,
ctx              1368 drivers/media/i2c/max2175.c 		ctx->rx_modes = eu_rx_modes;
ctx              1369 drivers/media/i2c/max2175.c 		ctx->bands_rf = &eu_bands_rf;
ctx              1371 drivers/media/i2c/max2175.c 		ctx->rx_mode = v4l2_ctrl_new_custom(hdl,
ctx              1373 drivers/media/i2c/max2175.c 		ctx->rx_modes = na_rx_modes;
ctx              1374 drivers/media/i2c/max2175.c 		ctx->bands_rf = &na_bands_rf;
ctx              1376 drivers/media/i2c/max2175.c 	ctx->sd.ctrl_handler = &ctx->ctrl_hdl;
ctx              1379 drivers/media/i2c/max2175.c 	ctx->freq = ctx->bands_rf->rangelow;
ctx              1389 drivers/media/i2c/max2175.c 	ret = max2175_core_init(ctx, refout_bits);
ctx              1402 drivers/media/i2c/max2175.c 	v4l2_ctrl_handler_free(&ctx->ctrl_hdl);
ctx              1410 drivers/media/i2c/max2175.c 	struct max2175 *ctx = max2175_from_sd(sd);
ctx              1412 drivers/media/i2c/max2175.c 	v4l2_ctrl_handler_free(&ctx->ctrl_hdl);
ctx               233 drivers/media/i2c/mt9m111.c 	struct mt9m111_context *ctx;
ctx               377 drivers/media/i2c/mt9m111.c 			       struct mt9m111_context *ctx)
ctx               380 drivers/media/i2c/mt9m111.c 	return reg_write(CONTEXT_CONTROL, ctx->control);
ctx               384 drivers/media/i2c/mt9m111.c 			struct mt9m111_context *ctx, struct v4l2_rect *rect,
ctx               388 drivers/media/i2c/mt9m111.c 	int ret = mt9m111_reg_write(client, ctx->reducer_xzoom, rect->width);
ctx               390 drivers/media/i2c/mt9m111.c 		ret = mt9m111_reg_write(client, ctx->reducer_yzoom, rect->height);
ctx               392 drivers/media/i2c/mt9m111.c 		ret = mt9m111_reg_write(client, ctx->reducer_xsize, width);
ctx               394 drivers/media/i2c/mt9m111.c 		ret = mt9m111_reg_write(client, ctx->reducer_ysize, height);
ctx               747 drivers/media/i2c/mt9m111.c 	mt9m111->ctx = (best_gap_idx == MT9M111_MODE_QSXGA_30FPS) ? &context_a :
ctx               793 drivers/media/i2c/mt9m111.c 		ret = mt9m111_reg_set(client, mt9m111->ctx->read_mode, mask);
ctx               795 drivers/media/i2c/mt9m111.c 		ret = mt9m111_reg_clear(client, mt9m111->ctx->read_mode, mask);
ctx               939 drivers/media/i2c/mt9m111.c 	mt9m111_set_context(mt9m111, mt9m111->ctx);
ctx               944 drivers/media/i2c/mt9m111.c 	mt9m111_reg_mask(client, mt9m111->ctx->read_mode,
ctx               969 drivers/media/i2c/mt9m111.c 		ret = mt9m111_set_context(mt9m111, mt9m111->ctx);
ctx              1280 drivers/media/i2c/mt9m111.c 	mt9m111->ctx = &context_b;
ctx                40 drivers/media/platform/coda/coda-bit.c static void coda_free_bitstream_buffer(struct coda_ctx *ctx);
ctx                63 drivers/media/platform/coda/coda-bit.c static void coda_command_async(struct coda_ctx *ctx, int cmd)
ctx                65 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx                71 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->bit_stream_param,
ctx                73 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->frm_dis_flg,
ctx                74 drivers/media/platform/coda/coda-bit.c 				CODA_REG_BIT_FRM_DIS_FLG(ctx->reg_idx));
ctx                75 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->frame_mem_ctrl,
ctx                77 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->workbuf.paddr, CODA_REG_BIT_WORK_BUF_ADDR);
ctx                87 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, ctx->idx, CODA_REG_BIT_RUN_INDEX);
ctx                88 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, ctx->params.codec_mode, CODA_REG_BIT_RUN_COD_STD);
ctx                89 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, ctx->params.codec_mode_aux, CODA7_REG_BIT_RUN_AUX_STD);
ctx                91 drivers/media/platform/coda/coda-bit.c 	trace_coda_bit_run(ctx, cmd);
ctx                96 drivers/media/platform/coda/coda-bit.c static int coda_command_sync(struct coda_ctx *ctx, int cmd)
ctx                98 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx               103 drivers/media/platform/coda/coda-bit.c 	coda_command_async(ctx, cmd);
ctx               105 drivers/media/platform/coda/coda-bit.c 	trace_coda_bit_done(ctx);
ctx               110 drivers/media/platform/coda/coda-bit.c int coda_hw_reset(struct coda_ctx *ctx)
ctx               112 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx               148 drivers/media/platform/coda/coda-bit.c static void coda_kfifo_sync_from_device(struct coda_ctx *ctx)
ctx               150 drivers/media/platform/coda/coda-bit.c 	struct __kfifo *kfifo = &ctx->bitstream_fifo.kfifo;
ctx               151 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx               154 drivers/media/platform/coda/coda-bit.c 	rd_ptr = coda_read(dev, CODA_REG_BIT_RD_PTR(ctx->reg_idx));
ctx               156 drivers/media/platform/coda/coda-bit.c 		      (rd_ptr - ctx->bitstream.paddr);
ctx               161 drivers/media/platform/coda/coda-bit.c static void coda_kfifo_sync_to_device_full(struct coda_ctx *ctx)
ctx               163 drivers/media/platform/coda/coda-bit.c 	struct __kfifo *kfifo = &ctx->bitstream_fifo.kfifo;
ctx               164 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx               167 drivers/media/platform/coda/coda-bit.c 	rd_ptr = ctx->bitstream.paddr + (kfifo->out & kfifo->mask);
ctx               168 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, rd_ptr, CODA_REG_BIT_RD_PTR(ctx->reg_idx));
ctx               169 drivers/media/platform/coda/coda-bit.c 	wr_ptr = ctx->bitstream.paddr + (kfifo->in & kfifo->mask);
ctx               170 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, wr_ptr, CODA_REG_BIT_WR_PTR(ctx->reg_idx));
ctx               173 drivers/media/platform/coda/coda-bit.c static void coda_kfifo_sync_to_device_write(struct coda_ctx *ctx)
ctx               175 drivers/media/platform/coda/coda-bit.c 	struct __kfifo *kfifo = &ctx->bitstream_fifo.kfifo;
ctx               176 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx               179 drivers/media/platform/coda/coda-bit.c 	wr_ptr = ctx->bitstream.paddr + (kfifo->in & kfifo->mask);
ctx               180 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, wr_ptr, CODA_REG_BIT_WR_PTR(ctx->reg_idx));
ctx               183 drivers/media/platform/coda/coda-bit.c static int coda_h264_bitstream_pad(struct coda_ctx *ctx, u32 size)
ctx               196 drivers/media/platform/coda/coda-bit.c 	n = kfifo_in(&ctx->bitstream_fifo, buf, size);
ctx               202 drivers/media/platform/coda/coda-bit.c int coda_bitstream_flush(struct coda_ctx *ctx)
ctx               206 drivers/media/platform/coda/coda-bit.c 	if (ctx->inst_type != CODA_INST_DECODER || !ctx->use_bit)
ctx               209 drivers/media/platform/coda/coda-bit.c 	ret = coda_command_sync(ctx, CODA_COMMAND_DEC_BUF_FLUSH);
ctx               211 drivers/media/platform/coda/coda-bit.c 		v4l2_err(&ctx->dev->v4l2_dev, "failed to flush bitstream\n");
ctx               215 drivers/media/platform/coda/coda-bit.c 	kfifo_init(&ctx->bitstream_fifo, ctx->bitstream.vaddr,
ctx               216 drivers/media/platform/coda/coda-bit.c 		   ctx->bitstream.size);
ctx               217 drivers/media/platform/coda/coda-bit.c 	coda_kfifo_sync_to_device_full(ctx);
ctx               222 drivers/media/platform/coda/coda-bit.c static int coda_bitstream_queue(struct coda_ctx *ctx, const u8 *buf, u32 size)
ctx               224 drivers/media/platform/coda/coda-bit.c 	u32 n = kfifo_in(&ctx->bitstream_fifo, buf, size);
ctx               229 drivers/media/platform/coda/coda-bit.c static u32 coda_buffer_parse_headers(struct coda_ctx *ctx,
ctx               236 drivers/media/platform/coda/coda-bit.c 	switch (ctx->codec->src_fourcc) {
ctx               238 drivers/media/platform/coda/coda-bit.c 		size = coda_mpeg2_parse_headers(ctx, vaddr, payload);
ctx               241 drivers/media/platform/coda/coda-bit.c 		size = coda_mpeg4_parse_headers(ctx, vaddr, payload);
ctx               250 drivers/media/platform/coda/coda-bit.c static bool coda_bitstream_try_queue(struct coda_ctx *ctx,
ctx               258 drivers/media/platform/coda/coda-bit.c 	if (coda_get_bitstream_payload(ctx) + payload + 512 >=
ctx               259 drivers/media/platform/coda/coda-bit.c 	    ctx->bitstream.size)
ctx               263 drivers/media/platform/coda/coda-bit.c 		v4l2_err(&ctx->dev->v4l2_dev, "trying to queue empty buffer\n");
ctx               267 drivers/media/platform/coda/coda-bit.c 	if (ctx->qsequence == 0 && payload < 512) {
ctx               275 drivers/media/platform/coda/coda-bit.c 		u32 header_size = coda_buffer_parse_headers(ctx, src_buf,
ctx               279 drivers/media/platform/coda/coda-bit.c 			coda_dbg(1, ctx, "pad with %u-byte header\n",
ctx               282 drivers/media/platform/coda/coda-bit.c 				ret = coda_bitstream_queue(ctx, vaddr,
ctx               285 drivers/media/platform/coda/coda-bit.c 					v4l2_err(&ctx->dev->v4l2_dev,
ctx               289 drivers/media/platform/coda/coda-bit.c 				if (ctx->dev->devtype->product == CODA_960)
ctx               293 drivers/media/platform/coda/coda-bit.c 			coda_dbg(1, ctx,
ctx               299 drivers/media/platform/coda/coda-bit.c 	if (ctx->qsequence == 0 && payload < 512 &&
ctx               300 drivers/media/platform/coda/coda-bit.c 	    ctx->codec->src_fourcc == V4L2_PIX_FMT_H264)
ctx               301 drivers/media/platform/coda/coda-bit.c 		coda_h264_bitstream_pad(ctx, 512 - payload);
ctx               303 drivers/media/platform/coda/coda-bit.c 	ret = coda_bitstream_queue(ctx, vaddr, payload);
ctx               305 drivers/media/platform/coda/coda-bit.c 		v4l2_err(&ctx->dev->v4l2_dev, "bitstream buffer overflow\n");
ctx               309 drivers/media/platform/coda/coda-bit.c 	src_buf->sequence = ctx->qsequence++;
ctx               312 drivers/media/platform/coda/coda-bit.c 	if (ctx == v4l2_m2m_get_curr_priv(ctx->dev->m2m_dev))
ctx               313 drivers/media/platform/coda/coda-bit.c 		coda_kfifo_sync_to_device_write(ctx);
ctx               317 drivers/media/platform/coda/coda-bit.c 		coda_bit_stream_end_flag(ctx);
ctx               318 drivers/media/platform/coda/coda-bit.c 	ctx->hold = false;
ctx               323 drivers/media/platform/coda/coda-bit.c void coda_fill_bitstream(struct coda_ctx *ctx, struct list_head *buffer_list)
ctx               329 drivers/media/platform/coda/coda-bit.c 	if (ctx->bit_stream_param & CODA_BIT_STREAM_END_FLAG)
ctx               332 drivers/media/platform/coda/coda-bit.c 	while (v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx) > 0) {
ctx               338 drivers/media/platform/coda/coda-bit.c 		if (ctx->codec->src_fourcc == V4L2_PIX_FMT_JPEG &&
ctx               339 drivers/media/platform/coda/coda-bit.c 		    ctx->num_metas > 1)
ctx               342 drivers/media/platform/coda/coda-bit.c 		if (ctx->num_internal_frames &&
ctx               343 drivers/media/platform/coda/coda-bit.c 		    ctx->num_metas >= ctx->num_internal_frames) {
ctx               344 drivers/media/platform/coda/coda-bit.c 			meta = list_first_entry(&ctx->buffer_meta_list,
ctx               355 drivers/media/platform/coda/coda-bit.c 			if (coda_bitstream_can_fetch_past(ctx, meta->end))
ctx               359 drivers/media/platform/coda/coda-bit.c 		src_buf = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               362 drivers/media/platform/coda/coda-bit.c 		if (ctx->codec->src_fourcc == V4L2_PIX_FMT_JPEG &&
ctx               363 drivers/media/platform/coda/coda-bit.c 		    !coda_jpeg_check_buffer(ctx, &src_buf->vb2_buf)) {
ctx               364 drivers/media/platform/coda/coda-bit.c 			v4l2_err(&ctx->dev->v4l2_dev,
ctx               366 drivers/media/platform/coda/coda-bit.c 				 ctx->qsequence);
ctx               367 drivers/media/platform/coda/coda-bit.c 			src_buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               383 drivers/media/platform/coda/coda-bit.c 			src_buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               389 drivers/media/platform/coda/coda-bit.c 		start = ctx->bitstream_fifo.kfifo.in;
ctx               391 drivers/media/platform/coda/coda-bit.c 		if (coda_bitstream_try_queue(ctx, src_buf)) {
ctx               396 drivers/media/platform/coda/coda-bit.c 			src_buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               404 drivers/media/platform/coda/coda-bit.c 				meta->end = ctx->bitstream_fifo.kfifo.in;
ctx               407 drivers/media/platform/coda/coda-bit.c 					coda_dbg(1, ctx, "marking last meta");
ctx               408 drivers/media/platform/coda/coda-bit.c 				spin_lock(&ctx->buffer_meta_lock);
ctx               410 drivers/media/platform/coda/coda-bit.c 					      &ctx->buffer_meta_list);
ctx               411 drivers/media/platform/coda/coda-bit.c 				ctx->num_metas++;
ctx               412 drivers/media/platform/coda/coda-bit.c 				spin_unlock(&ctx->buffer_meta_lock);
ctx               414 drivers/media/platform/coda/coda-bit.c 				trace_coda_bit_queue(ctx, src_buf, meta);
ctx               433 drivers/media/platform/coda/coda-bit.c void coda_bit_stream_end_flag(struct coda_ctx *ctx)
ctx               435 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx               437 drivers/media/platform/coda/coda-bit.c 	ctx->bit_stream_param |= CODA_BIT_STREAM_END_FLAG;
ctx               442 drivers/media/platform/coda/coda-bit.c 	    (ctx->idx == coda_read(dev, CODA_REG_BIT_RUN_INDEX))) {
ctx               443 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->bit_stream_param,
ctx               448 drivers/media/platform/coda/coda-bit.c static void coda_parabuf_write(struct coda_ctx *ctx, int index, u32 value)
ctx               450 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx               451 drivers/media/platform/coda/coda-bit.c 	u32 *p = ctx->parabuf.vaddr;
ctx               459 drivers/media/platform/coda/coda-bit.c static inline int coda_alloc_context_buf(struct coda_ctx *ctx,
ctx               463 drivers/media/platform/coda/coda-bit.c 	return coda_alloc_aux_buf(ctx->dev, buf, size, name, ctx->debugfs_entry);
ctx               467 drivers/media/platform/coda/coda-bit.c static void coda_free_framebuffers(struct coda_ctx *ctx)
ctx               472 drivers/media/platform/coda/coda-bit.c 		coda_free_aux_buf(ctx->dev, &ctx->internal_frames[i].buf);
ctx               475 drivers/media/platform/coda/coda-bit.c static int coda_alloc_framebuffers(struct coda_ctx *ctx,
ctx               478 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx               483 drivers/media/platform/coda/coda-bit.c 	if (ctx->codec->src_fourcc == V4L2_PIX_FMT_H264 ||
ctx               484 drivers/media/platform/coda/coda-bit.c 	    ctx->codec->dst_fourcc == V4L2_PIX_FMT_H264 ||
ctx               485 drivers/media/platform/coda/coda-bit.c 	    ctx->codec->src_fourcc == V4L2_PIX_FMT_MPEG4 ||
ctx               486 drivers/media/platform/coda/coda-bit.c 	    ctx->codec->dst_fourcc == V4L2_PIX_FMT_MPEG4)
ctx               492 drivers/media/platform/coda/coda-bit.c 	if (ctx->tiled_map_type == GDI_TILED_FRAME_MB_RASTER_MAP)
ctx               498 drivers/media/platform/coda/coda-bit.c 	for (i = 0; i < ctx->num_internal_frames; i++) {
ctx               504 drivers/media/platform/coda/coda-bit.c 		    (ctx->codec->src_fourcc == V4L2_PIX_FMT_H264 ||
ctx               505 drivers/media/platform/coda/coda-bit.c 		     (ctx->codec->src_fourcc == V4L2_PIX_FMT_MPEG4 && i == 0)))
ctx               509 drivers/media/platform/coda/coda-bit.c 			coda_free_framebuffers(ctx);
ctx               512 drivers/media/platform/coda/coda-bit.c 		ret = coda_alloc_context_buf(ctx, &ctx->internal_frames[i].buf,
ctx               516 drivers/media/platform/coda/coda-bit.c 			coda_free_framebuffers(ctx);
ctx               522 drivers/media/platform/coda/coda-bit.c 	for (i = 0; i < ctx->num_internal_frames; i++) {
ctx               526 drivers/media/platform/coda/coda-bit.c 		y = ctx->internal_frames[i].buf.paddr;
ctx               530 drivers/media/platform/coda/coda-bit.c 		if (ctx->tiled_map_type == GDI_TILED_FRAME_MB_RASTER_MAP) {
ctx               539 drivers/media/platform/coda/coda-bit.c 		coda_parabuf_write(ctx, i * 3 + 0, y);
ctx               540 drivers/media/platform/coda/coda-bit.c 		coda_parabuf_write(ctx, i * 3 + 1, cb);
ctx               541 drivers/media/platform/coda/coda-bit.c 		coda_parabuf_write(ctx, i * 3 + 2, cr);
ctx               547 drivers/media/platform/coda/coda-bit.c 		if (ctx->codec->src_fourcc == V4L2_PIX_FMT_H264)
ctx               548 drivers/media/platform/coda/coda-bit.c 			coda_parabuf_write(ctx, 96 + i, mvcol);
ctx               549 drivers/media/platform/coda/coda-bit.c 		if (ctx->codec->src_fourcc == V4L2_PIX_FMT_MPEG4 && i == 0)
ctx               550 drivers/media/platform/coda/coda-bit.c 			coda_parabuf_write(ctx, 97, mvcol);
ctx               556 drivers/media/platform/coda/coda-bit.c static void coda_free_context_buffers(struct coda_ctx *ctx)
ctx               558 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx               560 drivers/media/platform/coda/coda-bit.c 	coda_free_aux_buf(dev, &ctx->slicebuf);
ctx               561 drivers/media/platform/coda/coda-bit.c 	coda_free_aux_buf(dev, &ctx->psbuf);
ctx               563 drivers/media/platform/coda/coda-bit.c 		coda_free_aux_buf(dev, &ctx->workbuf);
ctx               564 drivers/media/platform/coda/coda-bit.c 	coda_free_aux_buf(dev, &ctx->parabuf);
ctx               567 drivers/media/platform/coda/coda-bit.c static int coda_alloc_context_buffers(struct coda_ctx *ctx,
ctx               570 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx               574 drivers/media/platform/coda/coda-bit.c 	if (!ctx->parabuf.vaddr) {
ctx               575 drivers/media/platform/coda/coda-bit.c 		ret = coda_alloc_context_buf(ctx, &ctx->parabuf,
ctx               584 drivers/media/platform/coda/coda-bit.c 	if (!ctx->slicebuf.vaddr && q_data->fourcc == V4L2_PIX_FMT_H264) {
ctx               588 drivers/media/platform/coda/coda-bit.c 		ret = coda_alloc_context_buf(ctx, &ctx->slicebuf, size,
ctx               594 drivers/media/platform/coda/coda-bit.c 	if (!ctx->psbuf.vaddr && (dev->devtype->product == CODA_HX4 ||
ctx               596 drivers/media/platform/coda/coda-bit.c 		ret = coda_alloc_context_buf(ctx, &ctx->psbuf,
ctx               602 drivers/media/platform/coda/coda-bit.c 	if (!ctx->workbuf.vaddr) {
ctx               607 drivers/media/platform/coda/coda-bit.c 		ret = coda_alloc_context_buf(ctx, &ctx->workbuf, size,
ctx               616 drivers/media/platform/coda/coda-bit.c 	coda_free_context_buffers(ctx);
ctx               620 drivers/media/platform/coda/coda-bit.c static int coda_encode_header(struct coda_ctx *ctx, struct vb2_v4l2_buffer *buf,
ctx               624 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx               641 drivers/media/platform/coda/coda-bit.c 	    ctx->codec->dst_fourcc == V4L2_PIX_FMT_H264 &&
ctx               643 drivers/media/platform/coda/coda-bit.c 		q_data_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               658 drivers/media/platform/coda/coda-bit.c 	ret = coda_command_sync(ctx, CODA_COMMAND_ENCODE_HEADER);
ctx               670 drivers/media/platform/coda/coda-bit.c 		*size = coda_read(dev, CODA_REG_BIT_WR_PTR(ctx->reg_idx)) -
ctx               678 drivers/media/platform/coda/coda-bit.c static u32 coda_slice_mode(struct coda_ctx *ctx)
ctx               682 drivers/media/platform/coda/coda-bit.c 	switch (ctx->params.slice_mode) {
ctx               687 drivers/media/platform/coda/coda-bit.c 		size = ctx->params.slice_max_mb;
ctx               691 drivers/media/platform/coda/coda-bit.c 		size = ctx->params.slice_max_bits;
ctx               701 drivers/media/platform/coda/coda-bit.c static int coda_enc_param_change(struct coda_ctx *ctx)
ctx               703 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx               708 drivers/media/platform/coda/coda-bit.c 	if (ctx->params.gop_size_changed) {
ctx               710 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->params.gop_size,
ctx               712 drivers/media/platform/coda/coda-bit.c 		ctx->gopcounter = ctx->params.gop_size - 1;
ctx               713 drivers/media/platform/coda/coda-bit.c 		ctx->params.gop_size_changed = false;
ctx               715 drivers/media/platform/coda/coda-bit.c 	if (ctx->params.h264_intra_qp_changed) {
ctx               716 drivers/media/platform/coda/coda-bit.c 		coda_dbg(1, ctx, "parameter change: intra Qp %u\n",
ctx               717 drivers/media/platform/coda/coda-bit.c 			 ctx->params.h264_intra_qp);
ctx               719 drivers/media/platform/coda/coda-bit.c 		if (ctx->params.bitrate) {
ctx               721 drivers/media/platform/coda/coda-bit.c 			coda_write(dev, ctx->params.h264_intra_qp,
ctx               724 drivers/media/platform/coda/coda-bit.c 		ctx->params.h264_intra_qp_changed = false;
ctx               726 drivers/media/platform/coda/coda-bit.c 	if (ctx->params.bitrate_changed) {
ctx               727 drivers/media/platform/coda/coda-bit.c 		coda_dbg(1, ctx, "parameter change: bitrate %u kbit/s\n",
ctx               728 drivers/media/platform/coda/coda-bit.c 			 ctx->params.bitrate);
ctx               730 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->params.bitrate,
ctx               732 drivers/media/platform/coda/coda-bit.c 		ctx->params.bitrate_changed = false;
ctx               734 drivers/media/platform/coda/coda-bit.c 	if (ctx->params.framerate_changed) {
ctx               735 drivers/media/platform/coda/coda-bit.c 		coda_dbg(1, ctx, "parameter change: frame rate %u/%u Hz\n",
ctx               736 drivers/media/platform/coda/coda-bit.c 			 ctx->params.framerate & 0xffff,
ctx               737 drivers/media/platform/coda/coda-bit.c 			 (ctx->params.framerate >> 16) + 1);
ctx               739 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->params.framerate,
ctx               741 drivers/media/platform/coda/coda-bit.c 		ctx->params.framerate_changed = false;
ctx               743 drivers/media/platform/coda/coda-bit.c 	if (ctx->params.intra_refresh_changed) {
ctx               744 drivers/media/platform/coda/coda-bit.c 		coda_dbg(1, ctx, "parameter change: intra refresh MBs %u\n",
ctx               745 drivers/media/platform/coda/coda-bit.c 			 ctx->params.intra_refresh);
ctx               747 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->params.intra_refresh,
ctx               749 drivers/media/platform/coda/coda-bit.c 		ctx->params.intra_refresh_changed = false;
ctx               751 drivers/media/platform/coda/coda-bit.c 	if (ctx->params.slice_mode_changed) {
ctx               753 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, coda_slice_mode(ctx),
ctx               755 drivers/media/platform/coda/coda-bit.c 		ctx->params.slice_mode_changed = false;
ctx               763 drivers/media/platform/coda/coda-bit.c 	ret = coda_command_sync(ctx, CODA_COMMAND_RC_CHANGE_PARAMETER);
ctx               769 drivers/media/platform/coda/coda-bit.c 		coda_dbg(1, ctx, "parameter change failed: %u\n", success);
ctx               789 drivers/media/platform/coda/coda-bit.c static void coda_setup_iram(struct coda_ctx *ctx)
ctx               791 drivers/media/platform/coda/coda-bit.c 	struct coda_iram_info *iram_info = &ctx->iram_info;
ctx               792 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx               830 drivers/media/platform/coda/coda-bit.c 	if (ctx->inst_type == CODA_INST_ENCODER) {
ctx               833 drivers/media/platform/coda/coda-bit.c 		q_data_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               870 drivers/media/platform/coda/coda-bit.c 	} else if (ctx->inst_type == CODA_INST_DECODER) {
ctx               873 drivers/media/platform/coda/coda-bit.c 		q_data_dst = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               898 drivers/media/platform/coda/coda-bit.c 		coda_dbg(1, ctx, "IRAM smaller than needed\n");
ctx               903 drivers/media/platform/coda/coda-bit.c 		if (ctx->inst_type == CODA_INST_DECODER) {
ctx              1008 drivers/media/platform/coda/coda-bit.c static void coda9_set_frame_cache(struct coda_ctx *ctx, u32 fourcc)
ctx              1012 drivers/media/platform/coda/coda-bit.c 	if (ctx->tiled_map_type == GDI_LINEAR_FRAME_MAP) {
ctx              1021 drivers/media/platform/coda/coda-bit.c 	coda_write(ctx->dev, cache_size, CODA9_CMD_SET_FRAME_CACHE_SIZE);
ctx              1031 drivers/media/platform/coda/coda-bit.c 	coda_write(ctx->dev, cache_config, CODA9_CMD_SET_FRAME_CACHE_CONFIG);
ctx              1038 drivers/media/platform/coda/coda-bit.c static int coda_encoder_reqbufs(struct coda_ctx *ctx,
ctx              1048 drivers/media/platform/coda/coda-bit.c 		q_data_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx              1049 drivers/media/platform/coda/coda-bit.c 		ret = coda_alloc_context_buffers(ctx, q_data_src);
ctx              1053 drivers/media/platform/coda/coda-bit.c 		coda_free_context_buffers(ctx);
ctx              1059 drivers/media/platform/coda/coda-bit.c static int coda_start_encoding(struct coda_ctx *ctx)
ctx              1061 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx              1071 drivers/media/platform/coda/coda-bit.c 	q_data_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx              1072 drivers/media/platform/coda/coda-bit.c 	q_data_dst = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx              1075 drivers/media/platform/coda/coda-bit.c 	buf = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx              1085 drivers/media/platform/coda/coda-bit.c 		if (!ctx->params.jpeg_qmat_tab[0])
ctx              1086 drivers/media/platform/coda/coda-bit.c 			ctx->params.jpeg_qmat_tab[0] = kmalloc(64, GFP_KERNEL);
ctx              1087 drivers/media/platform/coda/coda-bit.c 		if (!ctx->params.jpeg_qmat_tab[1])
ctx              1088 drivers/media/platform/coda/coda-bit.c 			ctx->params.jpeg_qmat_tab[1] = kmalloc(64, GFP_KERNEL);
ctx              1089 drivers/media/platform/coda/coda-bit.c 		coda_set_jpeg_compression_quality(ctx, ctx->params.jpeg_quality);
ctx              1094 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, ctx->parabuf.paddr, CODA_REG_BIT_PARA_BUF_ADDR);
ctx              1095 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, bitstream_buf, CODA_REG_BIT_RD_PTR(ctx->reg_idx));
ctx              1096 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, bitstream_buf, CODA_REG_BIT_WR_PTR(ctx->reg_idx));
ctx              1112 drivers/media/platform/coda/coda-bit.c 	ctx->frame_mem_ctrl &= ~(CODA_FRAME_CHROMA_INTERLEAVE | (0x3 << 9) |
ctx              1115 drivers/media/platform/coda/coda-bit.c 		ctx->frame_mem_ctrl |= CODA_FRAME_CHROMA_INTERLEAVE;
ctx              1116 drivers/media/platform/coda/coda-bit.c 	if (ctx->tiled_map_type == GDI_TILED_FRAME_MB_RASTER_MAP)
ctx              1117 drivers/media/platform/coda/coda-bit.c 		ctx->frame_mem_ctrl |= (0x3 << 9) | CODA9_FRAME_TILED2LINEAR;
ctx              1118 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, ctx->frame_mem_ctrl, CODA_REG_BIT_FRAME_MEM_CTRL);
ctx              1153 drivers/media/platform/coda/coda-bit.c 		ctx->params.framerate = 0;
ctx              1154 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, ctx->params.framerate,
ctx              1157 drivers/media/platform/coda/coda-bit.c 	ctx->params.codec_mode = ctx->codec->mode;
ctx              1175 drivers/media/platform/coda/coda-bit.c 		value = ((ctx->params.h264_disable_deblocking_filter_idc &
ctx              1178 drivers/media/platform/coda/coda-bit.c 			((ctx->params.h264_slice_alpha_c0_offset_div2 &
ctx              1181 drivers/media/platform/coda/coda-bit.c 			((ctx->params.h264_slice_beta_offset_div2 &
ctx              1184 drivers/media/platform/coda/coda-bit.c 			(ctx->params.h264_constrained_intra_pred_flag <<
ctx              1186 drivers/media/platform/coda/coda-bit.c 			(ctx->params.h264_chroma_qp_index_offset &
ctx              1192 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->params.jpeg_restart_interval,
ctx              1198 drivers/media/platform/coda/coda-bit.c 		coda_jpeg_write_tables(ctx);
ctx              1212 drivers/media/platform/coda/coda-bit.c 		value = coda_slice_mode(ctx);
ctx              1214 drivers/media/platform/coda/coda-bit.c 		value = ctx->params.gop_size;
ctx              1218 drivers/media/platform/coda/coda-bit.c 	if (ctx->params.bitrate) {
ctx              1219 drivers/media/platform/coda/coda-bit.c 		ctx->params.bitrate_changed = false;
ctx              1220 drivers/media/platform/coda/coda-bit.c 		ctx->params.h264_intra_qp_changed = false;
ctx              1223 drivers/media/platform/coda/coda-bit.c 		value = (ctx->params.bitrate & CODA_RATECONTROL_BITRATE_MASK)
ctx              1226 drivers/media/platform/coda/coda-bit.c 		value |= (ctx->params.vbv_delay &
ctx              1236 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, ctx->params.vbv_size, CODA_CMD_ENC_SEQ_RC_BUF_SIZE);
ctx              1237 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, ctx->params.intra_refresh,
ctx              1254 drivers/media/platform/coda/coda-bit.c 	if (ctx->params.h264_min_qp || ctx->params.h264_max_qp) {
ctx              1256 drivers/media/platform/coda/coda-bit.c 			   ctx->params.h264_min_qp << CODA_QPMIN_OFFSET |
ctx              1257 drivers/media/platform/coda/coda-bit.c 			   ctx->params.h264_max_qp << CODA_QPMAX_OFFSET,
ctx              1261 drivers/media/platform/coda/coda-bit.c 		if (ctx->params.h264_max_qp)
ctx              1272 drivers/media/platform/coda/coda-bit.c 		if (ctx->params.h264_min_qp)
ctx              1274 drivers/media/platform/coda/coda-bit.c 		if (ctx->params.h264_max_qp)
ctx              1281 drivers/media/platform/coda/coda-bit.c 	coda_setup_iram(ctx);
ctx              1291 drivers/media/platform/coda/coda-bit.c 			coda_write(dev, ctx->iram_info.search_ram_paddr,
ctx              1293 drivers/media/platform/coda/coda-bit.c 			coda_write(dev, ctx->iram_info.search_ram_size,
ctx              1302 drivers/media/platform/coda/coda-bit.c 	ret = coda_command_sync(ctx, CODA_COMMAND_SEQ_INIT);
ctx              1313 drivers/media/platform/coda/coda-bit.c 	ctx->initialized = 1;
ctx              1317 drivers/media/platform/coda/coda-bit.c 			ctx->num_internal_frames = 4;
ctx              1319 drivers/media/platform/coda/coda-bit.c 			ctx->num_internal_frames = 2;
ctx              1320 drivers/media/platform/coda/coda-bit.c 		ret = coda_alloc_framebuffers(ctx, q_data_src, dst_fourcc);
ctx              1328 drivers/media/platform/coda/coda-bit.c 		ctx->num_internal_frames = 0;
ctx              1341 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->iram_info.buf_bit_use,
ctx              1343 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->iram_info.buf_ip_ac_dc_use,
ctx              1345 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->iram_info.buf_dbk_y_use,
ctx              1347 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->iram_info.buf_dbk_c_use,
ctx              1349 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->iram_info.buf_ovl_use,
ctx              1352 drivers/media/platform/coda/coda-bit.c 			coda_write(dev, ctx->iram_info.buf_btp_use,
ctx              1355 drivers/media/platform/coda/coda-bit.c 			coda9_set_frame_cache(ctx, q_data_src->fourcc);
ctx              1358 drivers/media/platform/coda/coda-bit.c 			coda_write(dev, ctx->internal_frames[2].buf.paddr,
ctx              1360 drivers/media/platform/coda/coda-bit.c 			coda_write(dev, ctx->internal_frames[3].buf.paddr,
ctx              1365 drivers/media/platform/coda/coda-bit.c 	ret = coda_command_sync(ctx, CODA_COMMAND_SET_FRAME_BUF);
ctx              1371 drivers/media/platform/coda/coda-bit.c 	coda_dbg(1, ctx, "start encoding %dx%d %4.4s->%4.4s @ %d/%d Hz\n",
ctx              1373 drivers/media/platform/coda/coda-bit.c 		 (char *)&ctx->codec->src_fourcc, (char *)&dst_fourcc,
ctx              1374 drivers/media/platform/coda/coda-bit.c 		 ctx->params.framerate & 0xffff,
ctx              1375 drivers/media/platform/coda/coda-bit.c 		 (ctx->params.framerate >> 16) + 1);
ctx              1378 drivers/media/platform/coda/coda-bit.c 	buf = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx              1385 drivers/media/platform/coda/coda-bit.c 		ret = coda_encode_header(ctx, buf, CODA_HEADER_H264_SPS,
ctx              1386 drivers/media/platform/coda/coda-bit.c 					 &ctx->vpu_header[0][0],
ctx              1387 drivers/media/platform/coda/coda-bit.c 					 &ctx->vpu_header_size[0]);
ctx              1400 drivers/media/platform/coda/coda-bit.c 		if (ctx->dev->devtype->product != CODA_960 &&
ctx              1403 drivers/media/platform/coda/coda-bit.c 			ret = coda_h264_sps_fixup(ctx, q_data_src->rect.width,
ctx              1405 drivers/media/platform/coda/coda-bit.c 						  &ctx->vpu_header[0][0],
ctx              1406 drivers/media/platform/coda/coda-bit.c 						  &ctx->vpu_header_size[0],
ctx              1407 drivers/media/platform/coda/coda-bit.c 						  sizeof(ctx->vpu_header[0]));
ctx              1416 drivers/media/platform/coda/coda-bit.c 		ret = coda_encode_header(ctx, buf, CODA_HEADER_H264_PPS,
ctx              1417 drivers/media/platform/coda/coda-bit.c 					 &ctx->vpu_header[1][0],
ctx              1418 drivers/media/platform/coda/coda-bit.c 					 &ctx->vpu_header_size[1]);
ctx              1427 drivers/media/platform/coda/coda-bit.c 		ctx->vpu_header_size[2] = coda_h264_padding(
ctx              1428 drivers/media/platform/coda/coda-bit.c 					(ctx->vpu_header_size[0] +
ctx              1429 drivers/media/platform/coda/coda-bit.c 					 ctx->vpu_header_size[1]),
ctx              1430 drivers/media/platform/coda/coda-bit.c 					 ctx->vpu_header[2]);
ctx              1437 drivers/media/platform/coda/coda-bit.c 		ret = coda_encode_header(ctx, buf, CODA_HEADER_MP4V_VOS,
ctx              1438 drivers/media/platform/coda/coda-bit.c 					 &ctx->vpu_header[0][0],
ctx              1439 drivers/media/platform/coda/coda-bit.c 					 &ctx->vpu_header_size[0]);
ctx              1443 drivers/media/platform/coda/coda-bit.c 		ret = coda_encode_header(ctx, buf, CODA_HEADER_MP4V_VIS,
ctx              1444 drivers/media/platform/coda/coda-bit.c 					 &ctx->vpu_header[1][0],
ctx              1445 drivers/media/platform/coda/coda-bit.c 					 &ctx->vpu_header_size[1]);
ctx              1449 drivers/media/platform/coda/coda-bit.c 		ret = coda_encode_header(ctx, buf, CODA_HEADER_MP4V_VOL,
ctx              1450 drivers/media/platform/coda/coda-bit.c 					 &ctx->vpu_header[2][0],
ctx              1451 drivers/media/platform/coda/coda-bit.c 					 &ctx->vpu_header_size[2]);
ctx              1465 drivers/media/platform/coda/coda-bit.c static int coda_prepare_encode(struct coda_ctx *ctx)
ctx              1469 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx              1478 drivers/media/platform/coda/coda-bit.c 	ret = coda_enc_param_change(ctx);
ctx              1480 drivers/media/platform/coda/coda-bit.c 		v4l2_warn(&ctx->dev->v4l2_dev, "parameter change failed: %d\n",
ctx              1484 drivers/media/platform/coda/coda-bit.c 	src_buf = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx              1485 drivers/media/platform/coda/coda-bit.c 	dst_buf = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx              1486 drivers/media/platform/coda/coda-bit.c 	q_data_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx              1487 drivers/media/platform/coda/coda-bit.c 	q_data_dst = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx              1490 drivers/media/platform/coda/coda-bit.c 	src_buf->sequence = ctx->osequence;
ctx              1491 drivers/media/platform/coda/coda-bit.c 	dst_buf->sequence = ctx->osequence;
ctx              1492 drivers/media/platform/coda/coda-bit.c 	ctx->osequence++;
ctx              1494 drivers/media/platform/coda/coda-bit.c 	force_ipicture = ctx->params.force_ipicture;
ctx              1496 drivers/media/platform/coda/coda-bit.c 		ctx->params.force_ipicture = false;
ctx              1497 drivers/media/platform/coda/coda-bit.c 	else if (ctx->params.gop_size != 0 &&
ctx              1498 drivers/media/platform/coda/coda-bit.c 		 (src_buf->sequence % ctx->params.gop_size) == 0)
ctx              1515 drivers/media/platform/coda/coda-bit.c 		coda_set_gdi_regs(ctx);
ctx              1524 drivers/media/platform/coda/coda-bit.c 			ctx->vpu_header_size[0] +
ctx              1525 drivers/media/platform/coda/coda-bit.c 			ctx->vpu_header_size[1] +
ctx              1526 drivers/media/platform/coda/coda-bit.c 			ctx->vpu_header_size[2];
ctx              1528 drivers/media/platform/coda/coda-bit.c 			ctx->vpu_header_size[0] -
ctx              1529 drivers/media/platform/coda/coda-bit.c 			ctx->vpu_header_size[1] -
ctx              1530 drivers/media/platform/coda/coda-bit.c 			ctx->vpu_header_size[2];
ctx              1532 drivers/media/platform/coda/coda-bit.c 		       &ctx->vpu_header[0][0], ctx->vpu_header_size[0]);
ctx              1534 drivers/media/platform/coda/coda-bit.c 			+ ctx->vpu_header_size[0], &ctx->vpu_header[1][0],
ctx              1535 drivers/media/platform/coda/coda-bit.c 			ctx->vpu_header_size[1]);
ctx              1537 drivers/media/platform/coda/coda-bit.c 			+ ctx->vpu_header_size[0] + ctx->vpu_header_size[1],
ctx              1538 drivers/media/platform/coda/coda-bit.c 			&ctx->vpu_header[2][0], ctx->vpu_header_size[2]);
ctx              1548 drivers/media/platform/coda/coda-bit.c 			quant_param = ctx->params.h264_intra_qp;
ctx              1551 drivers/media/platform/coda/coda-bit.c 			quant_param = ctx->params.mpeg4_intra_qp;
ctx              1557 drivers/media/platform/coda/coda-bit.c 			v4l2_warn(&ctx->dev->v4l2_dev,
ctx              1564 drivers/media/platform/coda/coda-bit.c 			quant_param = ctx->params.h264_inter_qp;
ctx              1567 drivers/media/platform/coda/coda-bit.c 			quant_param = ctx->params.mpeg4_inter_qp;
ctx              1570 drivers/media/platform/coda/coda-bit.c 			v4l2_warn(&ctx->dev->v4l2_dev,
ctx              1577 drivers/media/platform/coda/coda-bit.c 	if (ctx->params.rot_mode)
ctx              1578 drivers/media/platform/coda/coda-bit.c 		rot_mode = CODA_ROT_MIR_ENABLE | ctx->params.rot_mode;
ctx              1592 drivers/media/platform/coda/coda-bit.c 	coda_write_base(ctx, q_data_src, src_buf, reg);
ctx              1601 drivers/media/platform/coda/coda-bit.c 	if (!ctx->streamon_out) {
ctx              1603 drivers/media/platform/coda/coda-bit.c 		ctx->bit_stream_param |= CODA_BIT_STREAM_END_FLAG;
ctx              1604 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->bit_stream_param,
ctx              1609 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->iram_info.axi_sram_use,
ctx              1612 drivers/media/platform/coda/coda-bit.c 	trace_coda_enc_pic_run(ctx, src_buf);
ctx              1614 drivers/media/platform/coda/coda-bit.c 	coda_command_async(ctx, CODA_COMMAND_PIC_RUN);
ctx              1626 drivers/media/platform/coda/coda-bit.c static void coda_finish_encode(struct coda_ctx *ctx)
ctx              1629 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx              1637 drivers/media/platform/coda/coda-bit.c 	mutex_lock(&ctx->wakeup_mutex);
ctx              1638 drivers/media/platform/coda/coda-bit.c 	src_buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx              1639 drivers/media/platform/coda/coda-bit.c 	dst_buf = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx              1641 drivers/media/platform/coda/coda-bit.c 	trace_coda_enc_pic_done(ctx, dst_buf);
ctx              1645 drivers/media/platform/coda/coda-bit.c 	wr_ptr = coda_read(dev, CODA_REG_BIT_WR_PTR(ctx->reg_idx));
ctx              1651 drivers/media/platform/coda/coda-bit.c 					ctx->vpu_header_size[0] +
ctx              1652 drivers/media/platform/coda/coda-bit.c 					ctx->vpu_header_size[1] +
ctx              1653 drivers/media/platform/coda/coda-bit.c 					ctx->vpu_header_size[2]);
ctx              1658 drivers/media/platform/coda/coda-bit.c 	coda_dbg(1, ctx, "frame size = %u\n", wr_ptr - start_ptr);
ctx              1676 drivers/media/platform/coda/coda-bit.c 	dst_buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              1677 drivers/media/platform/coda/coda-bit.c 	coda_m2m_buf_done(ctx, dst_buf, VB2_BUF_STATE_DONE);
ctx              1678 drivers/media/platform/coda/coda-bit.c 	mutex_unlock(&ctx->wakeup_mutex);
ctx              1680 drivers/media/platform/coda/coda-bit.c 	ctx->gopcounter--;
ctx              1681 drivers/media/platform/coda/coda-bit.c 	if (ctx->gopcounter < 0)
ctx              1682 drivers/media/platform/coda/coda-bit.c 		ctx->gopcounter = ctx->params.gop_size - 1;
ctx              1684 drivers/media/platform/coda/coda-bit.c 	coda_dbg(1, ctx, "job finished: encoded %c frame (%d)%s\n",
ctx              1691 drivers/media/platform/coda/coda-bit.c 	struct coda_ctx *ctx = container_of(work, struct coda_ctx, seq_end_work);
ctx              1692 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx              1694 drivers/media/platform/coda/coda-bit.c 	mutex_lock(&ctx->buffer_mutex);
ctx              1697 drivers/media/platform/coda/coda-bit.c 	if (ctx->initialized == 0)
ctx              1700 drivers/media/platform/coda/coda-bit.c 	coda_dbg(1, ctx, "%s: sent command 'SEQ_END' to coda\n", __func__);
ctx              1701 drivers/media/platform/coda/coda-bit.c 	if (coda_command_sync(ctx, CODA_COMMAND_SEQ_END)) {
ctx              1712 drivers/media/platform/coda/coda-bit.c 		coda_hw_reset(ctx);
ctx              1714 drivers/media/platform/coda/coda-bit.c 	kfifo_init(&ctx->bitstream_fifo,
ctx              1715 drivers/media/platform/coda/coda-bit.c 		ctx->bitstream.vaddr, ctx->bitstream.size);
ctx              1717 drivers/media/platform/coda/coda-bit.c 	coda_free_framebuffers(ctx);
ctx              1719 drivers/media/platform/coda/coda-bit.c 	ctx->initialized = 0;
ctx              1723 drivers/media/platform/coda/coda-bit.c 	mutex_unlock(&ctx->buffer_mutex);
ctx              1726 drivers/media/platform/coda/coda-bit.c static void coda_bit_release(struct coda_ctx *ctx)
ctx              1728 drivers/media/platform/coda/coda-bit.c 	mutex_lock(&ctx->buffer_mutex);
ctx              1729 drivers/media/platform/coda/coda-bit.c 	coda_free_framebuffers(ctx);
ctx              1730 drivers/media/platform/coda/coda-bit.c 	coda_free_context_buffers(ctx);
ctx              1731 drivers/media/platform/coda/coda-bit.c 	coda_free_bitstream_buffer(ctx);
ctx              1732 drivers/media/platform/coda/coda-bit.c 	mutex_unlock(&ctx->buffer_mutex);
ctx              1749 drivers/media/platform/coda/coda-bit.c static int coda_alloc_bitstream_buffer(struct coda_ctx *ctx,
ctx              1752 drivers/media/platform/coda/coda-bit.c 	if (ctx->bitstream.vaddr)
ctx              1755 drivers/media/platform/coda/coda-bit.c 	ctx->bitstream.size = roundup_pow_of_two(q_data->sizeimage * 2);
ctx              1756 drivers/media/platform/coda/coda-bit.c 	ctx->bitstream.vaddr = dma_alloc_wc(ctx->dev->dev, ctx->bitstream.size,
ctx              1757 drivers/media/platform/coda/coda-bit.c 					    &ctx->bitstream.paddr, GFP_KERNEL);
ctx              1758 drivers/media/platform/coda/coda-bit.c 	if (!ctx->bitstream.vaddr) {
ctx              1759 drivers/media/platform/coda/coda-bit.c 		v4l2_err(&ctx->dev->v4l2_dev,
ctx              1763 drivers/media/platform/coda/coda-bit.c 	kfifo_init(&ctx->bitstream_fifo,
ctx              1764 drivers/media/platform/coda/coda-bit.c 		   ctx->bitstream.vaddr, ctx->bitstream.size);
ctx              1769 drivers/media/platform/coda/coda-bit.c static void coda_free_bitstream_buffer(struct coda_ctx *ctx)
ctx              1771 drivers/media/platform/coda/coda-bit.c 	if (ctx->bitstream.vaddr == NULL)
ctx              1774 drivers/media/platform/coda/coda-bit.c 	dma_free_wc(ctx->dev->dev, ctx->bitstream.size, ctx->bitstream.vaddr,
ctx              1775 drivers/media/platform/coda/coda-bit.c 		    ctx->bitstream.paddr);
ctx              1776 drivers/media/platform/coda/coda-bit.c 	ctx->bitstream.vaddr = NULL;
ctx              1777 drivers/media/platform/coda/coda-bit.c 	kfifo_init(&ctx->bitstream_fifo, NULL, 0);
ctx              1780 drivers/media/platform/coda/coda-bit.c static int coda_decoder_reqbufs(struct coda_ctx *ctx,
ctx              1790 drivers/media/platform/coda/coda-bit.c 		q_data_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx              1791 drivers/media/platform/coda/coda-bit.c 		ret = coda_alloc_context_buffers(ctx, q_data_src);
ctx              1794 drivers/media/platform/coda/coda-bit.c 		ret = coda_alloc_bitstream_buffer(ctx, q_data_src);
ctx              1796 drivers/media/platform/coda/coda-bit.c 			coda_free_context_buffers(ctx);
ctx              1800 drivers/media/platform/coda/coda-bit.c 		coda_free_bitstream_buffer(ctx);
ctx              1801 drivers/media/platform/coda/coda-bit.c 		coda_free_context_buffers(ctx);
ctx              1807 drivers/media/platform/coda/coda-bit.c static bool coda_reorder_enable(struct coda_ctx *ctx)
ctx              1809 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx              1817 drivers/media/platform/coda/coda-bit.c 	if (ctx->codec->src_fourcc == V4L2_PIX_FMT_JPEG)
ctx              1820 drivers/media/platform/coda/coda-bit.c 	if (ctx->codec->src_fourcc != V4L2_PIX_FMT_H264)
ctx              1823 drivers/media/platform/coda/coda-bit.c 	profile = coda_h264_profile(ctx->params.h264_profile_idc);
ctx              1826 drivers/media/platform/coda/coda-bit.c 			  ctx->params.h264_profile_idc);
ctx              1832 drivers/media/platform/coda/coda-bit.c static int __coda_decoder_seq_init(struct coda_ctx *ctx)
ctx              1836 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx              1844 drivers/media/platform/coda/coda-bit.c 	coda_dbg(1, ctx, "Video Data Order Adapter: %s\n",
ctx              1845 drivers/media/platform/coda/coda-bit.c 		 ctx->use_vdoa ? "Enabled" : "Disabled");
ctx              1848 drivers/media/platform/coda/coda-bit.c 	q_data_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx              1849 drivers/media/platform/coda/coda-bit.c 	q_data_dst = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx              1850 drivers/media/platform/coda/coda-bit.c 	bitstream_buf = ctx->bitstream.paddr;
ctx              1851 drivers/media/platform/coda/coda-bit.c 	bitstream_size = ctx->bitstream.size;
ctx              1856 drivers/media/platform/coda/coda-bit.c 	coda_kfifo_sync_to_device_full(ctx);
ctx              1858 drivers/media/platform/coda/coda-bit.c 	ctx->frame_mem_ctrl &= ~(CODA_FRAME_CHROMA_INTERLEAVE | (0x3 << 9) |
ctx              1861 drivers/media/platform/coda/coda-bit.c 		ctx->frame_mem_ctrl |= CODA_FRAME_CHROMA_INTERLEAVE;
ctx              1862 drivers/media/platform/coda/coda-bit.c 	if (ctx->tiled_map_type == GDI_TILED_FRAME_MB_RASTER_MAP)
ctx              1863 drivers/media/platform/coda/coda-bit.c 		ctx->frame_mem_ctrl |= (0x3 << 9) |
ctx              1864 drivers/media/platform/coda/coda-bit.c 			((ctx->use_vdoa) ? 0 : CODA9_FRAME_TILED2LINEAR);
ctx              1865 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, ctx->frame_mem_ctrl, CODA_REG_BIT_FRAME_MEM_CTRL);
ctx              1867 drivers/media/platform/coda/coda-bit.c 	ctx->display_idx = -1;
ctx              1868 drivers/media/platform/coda/coda-bit.c 	ctx->frm_dis_flg = 0;
ctx              1869 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, 0, CODA_REG_BIT_FRM_DIS_FLG(ctx->reg_idx));
ctx              1874 drivers/media/platform/coda/coda-bit.c 	if (coda_reorder_enable(ctx))
ctx              1876 drivers/media/platform/coda/coda-bit.c 	if (ctx->codec->src_fourcc == V4L2_PIX_FMT_JPEG)
ctx              1880 drivers/media/platform/coda/coda-bit.c 	ctx->params.codec_mode = ctx->codec->mode;
ctx              1883 drivers/media/platform/coda/coda-bit.c 		ctx->params.codec_mode_aux = CODA_MP4_AUX_MPEG4;
ctx              1885 drivers/media/platform/coda/coda-bit.c 		ctx->params.codec_mode_aux = 0;
ctx              1893 drivers/media/platform/coda/coda-bit.c 			coda_write(dev, ctx->psbuf.paddr,
ctx              1908 drivers/media/platform/coda/coda-bit.c 	ctx->bit_stream_param = CODA_BIT_DEC_SEQ_INIT_ESCAPE;
ctx              1909 drivers/media/platform/coda/coda-bit.c 	ret = coda_command_sync(ctx, CODA_COMMAND_SEQ_INIT);
ctx              1910 drivers/media/platform/coda/coda-bit.c 	ctx->bit_stream_param = 0;
ctx              1915 drivers/media/platform/coda/coda-bit.c 	ctx->sequence_offset = ~0U;
ctx              1916 drivers/media/platform/coda/coda-bit.c 	ctx->initialized = 1;
ctx              1919 drivers/media/platform/coda/coda-bit.c 	coda_kfifo_sync_from_device(ctx);
ctx              1947 drivers/media/platform/coda/coda-bit.c 	coda_dbg(1, ctx, "start decoding: %dx%d\n", width, height);
ctx              1949 drivers/media/platform/coda/coda-bit.c 	ctx->num_internal_frames = coda_read(dev, CODA_RET_DEC_SEQ_FRAME_NEED);
ctx              1957 drivers/media/platform/coda/coda-bit.c 	if (ctx->use_vdoa)
ctx              1958 drivers/media/platform/coda/coda-bit.c 		ctx->num_internal_frames += 1;
ctx              1959 drivers/media/platform/coda/coda-bit.c 	if (ctx->num_internal_frames > CODA_MAX_FRAMEBUFFERS) {
ctx              1962 drivers/media/platform/coda/coda-bit.c 			 CODA_MAX_FRAMEBUFFERS, ctx->num_internal_frames);
ctx              1989 drivers/media/platform/coda/coda-bit.c 			coda_update_profile_level_ctrls(ctx, profile, level);
ctx              1997 drivers/media/platform/coda/coda-bit.c 	struct coda_ctx *ctx = container_of(work,
ctx              1999 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx              2002 drivers/media/platform/coda/coda-bit.c 	mutex_lock(&ctx->buffer_mutex);
ctx              2005 drivers/media/platform/coda/coda-bit.c 	if (ctx->initialized == 1)
ctx              2008 drivers/media/platform/coda/coda-bit.c 	ret = __coda_decoder_seq_init(ctx);
ctx              2012 drivers/media/platform/coda/coda-bit.c 	ctx->initialized = 1;
ctx              2016 drivers/media/platform/coda/coda-bit.c 	mutex_unlock(&ctx->buffer_mutex);
ctx              2019 drivers/media/platform/coda/coda-bit.c static int __coda_start_decoding(struct coda_ctx *ctx)
ctx              2022 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx              2026 drivers/media/platform/coda/coda-bit.c 	if (!ctx->initialized) {
ctx              2027 drivers/media/platform/coda/coda-bit.c 		ret = __coda_decoder_seq_init(ctx);
ctx              2032 drivers/media/platform/coda/coda-bit.c 	q_data_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx              2033 drivers/media/platform/coda/coda-bit.c 	q_data_dst = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx              2037 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, ctx->parabuf.paddr, CODA_REG_BIT_PARA_BUF_ADDR);
ctx              2039 drivers/media/platform/coda/coda-bit.c 	ret = coda_alloc_framebuffers(ctx, q_data_dst, src_fourcc);
ctx              2046 drivers/media/platform/coda/coda-bit.c 	coda_write(dev, ctx->num_internal_frames, CODA_CMD_SET_FRAME_BUF_NUM);
ctx              2052 drivers/media/platform/coda/coda-bit.c 		coda_setup_iram(ctx);
ctx              2054 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->iram_info.buf_bit_use,
ctx              2056 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->iram_info.buf_ip_ac_dc_use,
ctx              2058 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->iram_info.buf_dbk_y_use,
ctx              2060 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->iram_info.buf_dbk_c_use,
ctx              2062 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->iram_info.buf_ovl_use,
ctx              2065 drivers/media/platform/coda/coda-bit.c 			coda_write(dev, ctx->iram_info.buf_btp_use,
ctx              2069 drivers/media/platform/coda/coda-bit.c 			coda9_set_frame_cache(ctx, dst_fourcc);
ctx              2074 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->slicebuf.paddr,
ctx              2076 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->slicebuf.size / 1024,
ctx              2097 drivers/media/platform/coda/coda-bit.c 	if (coda_command_sync(ctx, CODA_COMMAND_SET_FRAME_BUF)) {
ctx              2098 drivers/media/platform/coda/coda-bit.c 		v4l2_err(&ctx->dev->v4l2_dev,
ctx              2106 drivers/media/platform/coda/coda-bit.c static int coda_start_decoding(struct coda_ctx *ctx)
ctx              2108 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx              2112 drivers/media/platform/coda/coda-bit.c 	ret = __coda_start_decoding(ctx);
ctx              2118 drivers/media/platform/coda/coda-bit.c static int coda_prepare_decode(struct coda_ctx *ctx)
ctx              2121 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx              2127 drivers/media/platform/coda/coda-bit.c 	dst_buf = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx              2128 drivers/media/platform/coda/coda-bit.c 	q_data_dst = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx              2131 drivers/media/platform/coda/coda-bit.c 	mutex_lock(&ctx->bitstream_mutex);
ctx              2132 drivers/media/platform/coda/coda-bit.c 	coda_fill_bitstream(ctx, NULL);
ctx              2133 drivers/media/platform/coda/coda-bit.c 	mutex_unlock(&ctx->bitstream_mutex);
ctx              2135 drivers/media/platform/coda/coda-bit.c 	if (coda_get_bitstream_payload(ctx) < 512 &&
ctx              2136 drivers/media/platform/coda/coda-bit.c 	    (!(ctx->bit_stream_param & CODA_BIT_STREAM_END_FLAG))) {
ctx              2137 drivers/media/platform/coda/coda-bit.c 		coda_dbg(1, ctx, "bitstream payload: %d, skipping\n",
ctx              2138 drivers/media/platform/coda/coda-bit.c 			 coda_get_bitstream_payload(ctx));
ctx              2139 drivers/media/platform/coda/coda-bit.c 		v4l2_m2m_job_finish(ctx->dev->m2m_dev, ctx->fh.m2m_ctx);
ctx              2144 drivers/media/platform/coda/coda-bit.c 	if (!ctx->initialized) {
ctx              2145 drivers/media/platform/coda/coda-bit.c 		int ret = __coda_start_decoding(ctx);
ctx              2149 drivers/media/platform/coda/coda-bit.c 			v4l2_m2m_job_finish(ctx->dev->m2m_dev, ctx->fh.m2m_ctx);
ctx              2152 drivers/media/platform/coda/coda-bit.c 			ctx->initialized = 1;
ctx              2157 drivers/media/platform/coda/coda-bit.c 		coda_set_gdi_regs(ctx);
ctx              2159 drivers/media/platform/coda/coda-bit.c 	if (ctx->use_vdoa &&
ctx              2160 drivers/media/platform/coda/coda-bit.c 	    ctx->display_idx >= 0 &&
ctx              2161 drivers/media/platform/coda/coda-bit.c 	    ctx->display_idx < ctx->num_internal_frames) {
ctx              2162 drivers/media/platform/coda/coda-bit.c 		vdoa_device_run(ctx->vdoa,
ctx              2164 drivers/media/platform/coda/coda-bit.c 				ctx->internal_frames[ctx->display_idx].buf.paddr);
ctx              2186 drivers/media/platform/coda/coda-bit.c 		coda_write_base(ctx, q_data_dst, dst_buf, reg_addr);
ctx              2189 drivers/media/platform/coda/coda-bit.c 		rot_mode = CODA_ROT_MIR_ENABLE | ctx->params.rot_mode;
ctx              2213 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->iram_info.axi_sram_use,
ctx              2216 drivers/media/platform/coda/coda-bit.c 	spin_lock(&ctx->buffer_meta_lock);
ctx              2217 drivers/media/platform/coda/coda-bit.c 	meta = list_first_entry_or_null(&ctx->buffer_meta_list,
ctx              2220 drivers/media/platform/coda/coda-bit.c 	if (meta && ctx->codec->src_fourcc == V4L2_PIX_FMT_JPEG) {
ctx              2223 drivers/media/platform/coda/coda-bit.c 		if (meta->end == ctx->bitstream_fifo.kfifo.in) {
ctx              2232 drivers/media/platform/coda/coda-bit.c 			kfifo_in(&ctx->bitstream_fifo, buf, pad);
ctx              2235 drivers/media/platform/coda/coda-bit.c 	spin_unlock(&ctx->buffer_meta_lock);
ctx              2237 drivers/media/platform/coda/coda-bit.c 	coda_kfifo_sync_to_device_full(ctx);
ctx              2245 drivers/media/platform/coda/coda-bit.c 	trace_coda_dec_pic_run(ctx, meta);
ctx              2247 drivers/media/platform/coda/coda-bit.c 	coda_command_async(ctx, CODA_COMMAND_PIC_RUN);
ctx              2252 drivers/media/platform/coda/coda-bit.c static void coda_finish_decode(struct coda_ctx *ctx)
ctx              2254 drivers/media/platform/coda/coda-bit.c 	struct coda_dev *dev = ctx->dev;
ctx              2270 drivers/media/platform/coda/coda-bit.c 	coda_kfifo_sync_from_device(ctx);
ctx              2276 drivers/media/platform/coda/coda-bit.c 	if (ctx->bit_stream_param & CODA_BIT_STREAM_END_FLAG) {
ctx              2277 drivers/media/platform/coda/coda-bit.c 		if (coda_get_bitstream_payload(ctx) >= ctx->bitstream.size - 512)
ctx              2278 drivers/media/platform/coda/coda-bit.c 			kfifo_init(&ctx->bitstream_fifo,
ctx              2279 drivers/media/platform/coda/coda-bit.c 				ctx->bitstream.vaddr, ctx->bitstream.size);
ctx              2282 drivers/media/platform/coda/coda-bit.c 	q_data_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx              2297 drivers/media/platform/coda/coda-bit.c 				 ctx->psbuf.size);
ctx              2301 drivers/media/platform/coda/coda-bit.c 				 ctx->slicebuf.size);
ctx              2308 drivers/media/platform/coda/coda-bit.c 	q_data_dst = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx              2344 drivers/media/platform/coda/coda-bit.c 			coda_dbg(1, ctx, "prescan failed: %d\n", val);
ctx              2345 drivers/media/platform/coda/coda-bit.c 			ctx->hold = true;
ctx              2351 drivers/media/platform/coda/coda-bit.c 	if (ctx->use_vdoa &&
ctx              2352 drivers/media/platform/coda/coda-bit.c 	    ctx->display_idx >= 0 &&
ctx              2353 drivers/media/platform/coda/coda-bit.c 	    ctx->display_idx < ctx->num_internal_frames) {
ctx              2354 drivers/media/platform/coda/coda-bit.c 		err_vdoa = vdoa_wait_for_completion(ctx->vdoa);
ctx              2357 drivers/media/platform/coda/coda-bit.c 	ctx->frm_dis_flg = coda_read(dev,
ctx              2358 drivers/media/platform/coda/coda-bit.c 				     CODA_REG_BIT_FRM_DIS_FLG(ctx->reg_idx));
ctx              2361 drivers/media/platform/coda/coda-bit.c 	if (ctx->display_idx >= 0 &&
ctx              2362 drivers/media/platform/coda/coda-bit.c 	    ctx->display_idx < ctx->num_internal_frames) {
ctx              2363 drivers/media/platform/coda/coda-bit.c 		ctx->frm_dis_flg &= ~(1 << ctx->display_idx);
ctx              2364 drivers/media/platform/coda/coda-bit.c 		coda_write(dev, ctx->frm_dis_flg,
ctx              2365 drivers/media/platform/coda/coda-bit.c 				CODA_REG_BIT_FRM_DIS_FLG(ctx->reg_idx));
ctx              2378 drivers/media/platform/coda/coda-bit.c 		if (display_idx >= 0 && display_idx < ctx->num_internal_frames)
ctx              2379 drivers/media/platform/coda/coda-bit.c 			ctx->sequence_offset++;
ctx              2380 drivers/media/platform/coda/coda-bit.c 		else if (ctx->display_idx < 0)
ctx              2381 drivers/media/platform/coda/coda-bit.c 			ctx->hold = true;
ctx              2383 drivers/media/platform/coda/coda-bit.c 		if (ctx->display_idx >= 0 &&
ctx              2384 drivers/media/platform/coda/coda-bit.c 		    ctx->display_idx < ctx->num_internal_frames)
ctx              2385 drivers/media/platform/coda/coda-bit.c 			ctx->sequence_offset++;
ctx              2387 drivers/media/platform/coda/coda-bit.c 	} else if (decoded_idx < 0 || decoded_idx >= ctx->num_internal_frames) {
ctx              2391 drivers/media/platform/coda/coda-bit.c 		decoded_frame = &ctx->internal_frames[decoded_idx];
ctx              2394 drivers/media/platform/coda/coda-bit.c 		if (ctx->sequence_offset == -1)
ctx              2395 drivers/media/platform/coda/coda-bit.c 			ctx->sequence_offset = val;
ctx              2396 drivers/media/platform/coda/coda-bit.c 		val -= ctx->sequence_offset;
ctx              2397 drivers/media/platform/coda/coda-bit.c 		spin_lock(&ctx->buffer_meta_lock);
ctx              2398 drivers/media/platform/coda/coda-bit.c 		if (!list_empty(&ctx->buffer_meta_list)) {
ctx              2399 drivers/media/platform/coda/coda-bit.c 			meta = list_first_entry(&ctx->buffer_meta_list,
ctx              2402 drivers/media/platform/coda/coda-bit.c 			ctx->num_metas--;
ctx              2403 drivers/media/platform/coda/coda-bit.c 			spin_unlock(&ctx->buffer_meta_lock);
ctx              2414 drivers/media/platform/coda/coda-bit.c 					 val, ctx->sequence_offset,
ctx              2420 drivers/media/platform/coda/coda-bit.c 			spin_unlock(&ctx->buffer_meta_lock);
ctx              2426 drivers/media/platform/coda/coda-bit.c 			ctx->sequence_offset++;
ctx              2429 drivers/media/platform/coda/coda-bit.c 		trace_coda_dec_pic_done(ctx, &decoded_frame->meta);
ctx              2444 drivers/media/platform/coda/coda-bit.c 		ctx->hold = true;
ctx              2447 drivers/media/platform/coda/coda-bit.c 	} else if (display_idx < 0 || display_idx >= ctx->num_internal_frames) {
ctx              2454 drivers/media/platform/coda/coda-bit.c 	if (ctx->display_idx >= 0 &&
ctx              2455 drivers/media/platform/coda/coda-bit.c 	    ctx->display_idx < ctx->num_internal_frames) {
ctx              2458 drivers/media/platform/coda/coda-bit.c 		ready_frame = &ctx->internal_frames[ctx->display_idx];
ctx              2460 drivers/media/platform/coda/coda-bit.c 		dst_buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              2461 drivers/media/platform/coda/coda-bit.c 		dst_buf->sequence = ctx->osequence++;
ctx              2469 drivers/media/platform/coda/coda-bit.c 		if (meta->last && !coda_reorder_enable(ctx)) {
ctx              2474 drivers/media/platform/coda/coda-bit.c 			coda_dbg(1, ctx, "last meta, marking as last frame\n");
ctx              2476 drivers/media/platform/coda/coda-bit.c 		} else if (ctx->bit_stream_param & CODA_BIT_STREAM_END_FLAG &&
ctx              2482 drivers/media/platform/coda/coda-bit.c 			coda_dbg(1, ctx,
ctx              2489 drivers/media/platform/coda/coda-bit.c 		trace_coda_dec_rot_done(ctx, dst_buf, meta);
ctx              2495 drivers/media/platform/coda/coda-bit.c 			coda_m2m_buf_done(ctx, dst_buf, VB2_BUF_STATE_ERROR);
ctx              2497 drivers/media/platform/coda/coda-bit.c 			coda_m2m_buf_done(ctx, dst_buf, VB2_BUF_STATE_DONE);
ctx              2500 drivers/media/platform/coda/coda-bit.c 			coda_dbg(1, ctx, "job finished: decoded %c frame %u, returned %c frame %u (%u/%u)%s\n",
ctx              2505 drivers/media/platform/coda/coda-bit.c 				 dst_buf->sequence, ctx->qsequence,
ctx              2509 drivers/media/platform/coda/coda-bit.c 			coda_dbg(1, ctx, "job finished: no frame decoded (%d), returned %c frame %u (%u/%u)%s\n",
ctx              2513 drivers/media/platform/coda/coda-bit.c 				 dst_buf->sequence, ctx->qsequence,
ctx              2519 drivers/media/platform/coda/coda-bit.c 			coda_dbg(1, ctx, "job finished: decoded %c frame %u, no frame returned (%d)\n",
ctx              2522 drivers/media/platform/coda/coda-bit.c 				 ctx->display_idx);
ctx              2524 drivers/media/platform/coda/coda-bit.c 			coda_dbg(1, ctx, "job finished: no frame decoded (%d) or returned (%d)\n",
ctx              2525 drivers/media/platform/coda/coda-bit.c 				 decoded_idx, ctx->display_idx);
ctx              2530 drivers/media/platform/coda/coda-bit.c 	ctx->display_idx = display_idx;
ctx              2539 drivers/media/platform/coda/coda-bit.c 	mutex_lock(&ctx->bitstream_mutex);
ctx              2540 drivers/media/platform/coda/coda-bit.c 	coda_fill_bitstream(ctx, NULL);
ctx              2541 drivers/media/platform/coda/coda-bit.c 	mutex_unlock(&ctx->bitstream_mutex);
ctx              2544 drivers/media/platform/coda/coda-bit.c static void coda_decode_timeout(struct coda_ctx *ctx)
ctx              2554 drivers/media/platform/coda/coda-bit.c 	if (!(ctx->bit_stream_param & CODA_BIT_STREAM_END_FLAG))
ctx              2557 drivers/media/platform/coda/coda-bit.c 	dst_buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              2558 drivers/media/platform/coda/coda-bit.c 	dst_buf->sequence = ctx->qsequence - 1;
ctx              2560 drivers/media/platform/coda/coda-bit.c 	coda_m2m_buf_done(ctx, dst_buf, VB2_BUF_STATE_ERROR);
ctx              2578 drivers/media/platform/coda/coda-bit.c 	struct coda_ctx *ctx;
ctx              2586 drivers/media/platform/coda/coda-bit.c 	ctx = v4l2_m2m_get_curr_priv(dev->m2m_dev);
ctx              2587 drivers/media/platform/coda/coda-bit.c 	if (ctx == NULL) {
ctx              2593 drivers/media/platform/coda/coda-bit.c 	trace_coda_bit_done(ctx);
ctx              2595 drivers/media/platform/coda/coda-bit.c 	if (ctx->aborting) {
ctx              2596 drivers/media/platform/coda/coda-bit.c 		coda_dbg(1, ctx, "task has been aborted\n");
ctx              2599 drivers/media/platform/coda/coda-bit.c 	if (coda_isbusy(ctx->dev)) {
ctx              2600 drivers/media/platform/coda/coda-bit.c 		coda_dbg(1, ctx, "coda is still busy!!!!\n");
ctx              2604 drivers/media/platform/coda/coda-bit.c 	complete(&ctx->completion);
ctx                92 drivers/media/platform/coda/coda-common.c void coda_write_base(struct coda_ctx *ctx, struct coda_q_data *q_data,
ctx               117 drivers/media/platform/coda/coda-common.c 	coda_write(ctx->dev, base_y, reg_y);
ctx               118 drivers/media/platform/coda/coda-common.c 	coda_write(ctx->dev, base_cb, reg_y + 4);
ctx               119 drivers/media/platform/coda/coda-common.c 	coda_write(ctx->dev, base_cr, reg_y + 8);
ctx               387 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(priv);
ctx               390 drivers/media/platform/coda/coda-common.c 	strscpy(cap->card, coda_product_name(ctx->dev->devtype->product),
ctx               401 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(priv);
ctx               415 drivers/media/platform/coda/coda-common.c 	if (!ctx->vdoa && f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE &&
ctx               428 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(priv);
ctx               430 drivers/media/platform/coda/coda-common.c 	q_data = get_q_data(ctx, f->type);
ctx               441 drivers/media/platform/coda/coda-common.c 	f->fmt.pix.colorspace	= ctx->colorspace;
ctx               442 drivers/media/platform/coda/coda-common.c 	f->fmt.pix.xfer_func	= ctx->xfer_func;
ctx               443 drivers/media/platform/coda/coda-common.c 	f->fmt.pix.ycbcr_enc	= ctx->ycbcr_enc;
ctx               444 drivers/media/platform/coda/coda-common.c 	f->fmt.pix.quantization	= ctx->quantization;
ctx               449 drivers/media/platform/coda/coda-common.c static int coda_try_pixelformat(struct coda_ctx *ctx, struct v4l2_format *f)
ctx               456 drivers/media/platform/coda/coda-common.c 		formats = ctx->cvd->src_formats;
ctx               458 drivers/media/platform/coda/coda-common.c 		formats = ctx->cvd->dst_formats;
ctx               464 drivers/media/platform/coda/coda-common.c 		if (!ctx->vdoa && f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE &&
ctx               475 drivers/media/platform/coda/coda-common.c 	q_data = get_q_data(ctx, f->type);
ctx               481 drivers/media/platform/coda/coda-common.c static int coda_try_fmt_vdoa(struct coda_ctx *ctx, struct v4l2_format *f,
ctx               492 drivers/media/platform/coda/coda-common.c 	if (!ctx->vdoa) {
ctx               508 drivers/media/platform/coda/coda-common.c static unsigned int coda_estimate_sizeimage(struct coda_ctx *ctx, u32 sizeimage,
ctx               520 drivers/media/platform/coda/coda-common.c static int coda_try_fmt(struct coda_ctx *ctx, const struct coda_codec *codec,
ctx               523 drivers/media/platform/coda/coda-common.c 	struct coda_dev *dev = ctx->dev;
ctx               569 drivers/media/platform/coda/coda-common.c 		f->fmt.pix.sizeimage = coda_estimate_sizeimage(ctx,
ctx               584 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(priv);
ctx               591 drivers/media/platform/coda/coda-common.c 	ret = coda_try_pixelformat(ctx, f);
ctx               595 drivers/media/platform/coda/coda-common.c 	q_data_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               601 drivers/media/platform/coda/coda-common.c 	src_vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               607 drivers/media/platform/coda/coda-common.c 	f->fmt.pix.colorspace = ctx->colorspace;
ctx               608 drivers/media/platform/coda/coda-common.c 	f->fmt.pix.xfer_func = ctx->xfer_func;
ctx               609 drivers/media/platform/coda/coda-common.c 	f->fmt.pix.ycbcr_enc = ctx->ycbcr_enc;
ctx               610 drivers/media/platform/coda/coda-common.c 	f->fmt.pix.quantization = ctx->quantization;
ctx               612 drivers/media/platform/coda/coda-common.c 	q_data_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               613 drivers/media/platform/coda/coda-common.c 	codec = coda_find_codec(ctx->dev, q_data_src->fourcc,
ctx               618 drivers/media/platform/coda/coda-common.c 	ret = coda_try_fmt(ctx, codec, f);
ctx               629 drivers/media/platform/coda/coda-common.c 		ret = coda_try_fmt_vdoa(ctx, f, &use_vdoa);
ctx               666 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(priv);
ctx               667 drivers/media/platform/coda/coda-common.c 	struct coda_dev *dev = ctx->dev;
ctx               672 drivers/media/platform/coda/coda-common.c 	ret = coda_try_pixelformat(ctx, f);
ctx               679 drivers/media/platform/coda/coda-common.c 	q_data_dst = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               682 drivers/media/platform/coda/coda-common.c 	return coda_try_fmt(ctx, codec, f);
ctx               685 drivers/media/platform/coda/coda-common.c static int coda_s_fmt(struct coda_ctx *ctx, struct v4l2_format *f,
ctx               691 drivers/media/platform/coda/coda-common.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               695 drivers/media/platform/coda/coda-common.c 	q_data = get_q_data(ctx, f->type);
ctx               700 drivers/media/platform/coda/coda-common.c 		v4l2_err(&ctx->dev->v4l2_dev, "%s: %s queue busy: %d\n",
ctx               721 drivers/media/platform/coda/coda-common.c 		ctx->tiled_map_type = GDI_TILED_FRAME_MB_RASTER_MAP;
ctx               724 drivers/media/platform/coda/coda-common.c 		if (!disable_tiling && ctx->dev->devtype->product == CODA_960) {
ctx               725 drivers/media/platform/coda/coda-common.c 			ctx->tiled_map_type = GDI_TILED_FRAME_MB_RASTER_MAP;
ctx               731 drivers/media/platform/coda/coda-common.c 		ctx->tiled_map_type = GDI_LINEAR_FRAME_MAP;
ctx               737 drivers/media/platform/coda/coda-common.c 	if (ctx->tiled_map_type == GDI_TILED_FRAME_MB_RASTER_MAP &&
ctx               738 drivers/media/platform/coda/coda-common.c 	    !coda_try_fmt_vdoa(ctx, f, &ctx->use_vdoa) &&
ctx               739 drivers/media/platform/coda/coda-common.c 	    ctx->use_vdoa)
ctx               740 drivers/media/platform/coda/coda-common.c 		vdoa_context_configure(ctx->vdoa,
ctx               745 drivers/media/platform/coda/coda-common.c 		ctx->use_vdoa = false;
ctx               747 drivers/media/platform/coda/coda-common.c 	coda_dbg(1, ctx, "Setting %s format, wxh: %dx%d, fmt: %4.4s %c\n",
ctx               750 drivers/media/platform/coda/coda-common.c 		 (ctx->tiled_map_type == GDI_LINEAR_FRAME_MAP) ? 'L' : 'T');
ctx               758 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(priv);
ctx               768 drivers/media/platform/coda/coda-common.c 	q_data_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               774 drivers/media/platform/coda/coda-common.c 	ret = coda_s_fmt(ctx, f, &r);
ctx               778 drivers/media/platform/coda/coda-common.c 	if (ctx->inst_type != CODA_INST_ENCODER)
ctx               782 drivers/media/platform/coda/coda-common.c 	codec = coda_find_codec(ctx->dev, q_data_src->fourcc,
ctx               785 drivers/media/platform/coda/coda-common.c 		v4l2_err(&ctx->dev->v4l2_dev, "failed to determine codec\n");
ctx               788 drivers/media/platform/coda/coda-common.c 	ctx->codec = codec;
ctx               790 drivers/media/platform/coda/coda-common.c 	ctx->colorspace = f->fmt.pix.colorspace;
ctx               791 drivers/media/platform/coda/coda-common.c 	ctx->xfer_func = f->fmt.pix.xfer_func;
ctx               792 drivers/media/platform/coda/coda-common.c 	ctx->ycbcr_enc = f->fmt.pix.ycbcr_enc;
ctx               793 drivers/media/platform/coda/coda-common.c 	ctx->quantization = f->fmt.pix.quantization;
ctx               801 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(priv);
ctx               811 drivers/media/platform/coda/coda-common.c 	ret = coda_s_fmt(ctx, f, NULL);
ctx               815 drivers/media/platform/coda/coda-common.c 	ctx->colorspace = f->fmt.pix.colorspace;
ctx               816 drivers/media/platform/coda/coda-common.c 	ctx->xfer_func = f->fmt.pix.xfer_func;
ctx               817 drivers/media/platform/coda/coda-common.c 	ctx->ycbcr_enc = f->fmt.pix.ycbcr_enc;
ctx               818 drivers/media/platform/coda/coda-common.c 	ctx->quantization = f->fmt.pix.quantization;
ctx               820 drivers/media/platform/coda/coda-common.c 	if (ctx->inst_type != CODA_INST_DECODER)
ctx               824 drivers/media/platform/coda/coda-common.c 	codec = coda_find_codec(ctx->dev, f->fmt.pix.pixelformat,
ctx               827 drivers/media/platform/coda/coda-common.c 		v4l2_err(&ctx->dev->v4l2_dev, "failed to determine codec\n");
ctx               830 drivers/media/platform/coda/coda-common.c 	ctx->codec = codec;
ctx               832 drivers/media/platform/coda/coda-common.c 	dst_vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               857 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(priv);
ctx               860 drivers/media/platform/coda/coda-common.c 	ret = v4l2_m2m_reqbufs(file, ctx->fh.m2m_ctx, rb);
ctx               868 drivers/media/platform/coda/coda-common.c 	if (rb->type == V4L2_BUF_TYPE_VIDEO_OUTPUT && ctx->ops->reqbufs)
ctx               869 drivers/media/platform/coda/coda-common.c 		return ctx->ops->reqbufs(ctx, rb);
ctx               877 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(priv);
ctx               879 drivers/media/platform/coda/coda-common.c 	if (ctx->inst_type == CODA_INST_DECODER &&
ctx               883 drivers/media/platform/coda/coda-common.c 	return v4l2_m2m_qbuf(file, ctx->fh.m2m_ctx, buf);
ctx               888 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(priv);
ctx               891 drivers/media/platform/coda/coda-common.c 	ret = v4l2_m2m_dqbuf(file, ctx->fh.m2m_ctx, buf);
ctx               893 drivers/media/platform/coda/coda-common.c 	if (ctx->inst_type == CODA_INST_DECODER &&
ctx               900 drivers/media/platform/coda/coda-common.c void coda_m2m_buf_done(struct coda_ctx *ctx, struct vb2_v4l2_buffer *buf,
ctx               908 drivers/media/platform/coda/coda-common.c 		v4l2_event_queue_fh(&ctx->fh, &eos_event);
ctx               916 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(fh);
ctx               920 drivers/media/platform/coda/coda-common.c 	q_data = get_q_data(ctx, s->type);
ctx               960 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(fh);
ctx               965 drivers/media/platform/coda/coda-common.c 		if (ctx->inst_type == CODA_INST_ENCODER &&
ctx               967 drivers/media/platform/coda/coda-common.c 			q_data = get_q_data(ctx, s->type);
ctx               986 drivers/media/platform/coda/coda-common.c 			coda_dbg(1, ctx, "Setting crop rectangle: %dx%d\n",
ctx              1004 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(fh);
ctx              1006 drivers/media/platform/coda/coda-common.c 	if (ctx->inst_type != CODA_INST_ENCODER)
ctx              1012 drivers/media/platform/coda/coda-common.c static void coda_wake_up_capture_queue(struct coda_ctx *ctx)
ctx              1016 drivers/media/platform/coda/coda-common.c 	coda_dbg(1, ctx, "waking up capture queue\n");
ctx              1018 drivers/media/platform/coda/coda-common.c 	dst_vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx              1026 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(fh);
ctx              1034 drivers/media/platform/coda/coda-common.c 	mutex_lock(&ctx->wakeup_mutex);
ctx              1035 drivers/media/platform/coda/coda-common.c 	buf = v4l2_m2m_last_src_buf(ctx->fh.m2m_ctx);
ctx              1045 drivers/media/platform/coda/coda-common.c 		ctx->bit_stream_param |= CODA_BIT_STREAM_END_FLAG;
ctx              1052 drivers/media/platform/coda/coda-common.c 		coda_wake_up_capture_queue(ctx);
ctx              1054 drivers/media/platform/coda/coda-common.c 	mutex_unlock(&ctx->wakeup_mutex);
ctx              1062 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(fh);
ctx              1064 drivers/media/platform/coda/coda-common.c 	if (ctx->inst_type != CODA_INST_DECODER)
ctx              1073 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(fh);
ctx              1074 drivers/media/platform/coda/coda-common.c 	struct coda_dev *dev = ctx->dev;
ctx              1088 drivers/media/platform/coda/coda-common.c 		mutex_lock(&ctx->bitstream_mutex);
ctx              1089 drivers/media/platform/coda/coda-common.c 		coda_bitstream_flush(ctx);
ctx              1090 drivers/media/platform/coda/coda-common.c 		dst_vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
ctx              1093 drivers/media/platform/coda/coda-common.c 		ctx->bit_stream_param &= ~CODA_BIT_STREAM_END_FLAG;
ctx              1094 drivers/media/platform/coda/coda-common.c 		coda_fill_bitstream(ctx, NULL);
ctx              1095 drivers/media/platform/coda/coda-common.c 		mutex_unlock(&ctx->bitstream_mutex);
ctx              1102 drivers/media/platform/coda/coda-common.c 		buf = v4l2_m2m_last_src_buf(ctx->fh.m2m_ctx);
ctx              1104 drivers/media/platform/coda/coda-common.c 			coda_dbg(1, ctx, "marking last pending buffer\n");
ctx              1109 drivers/media/platform/coda/coda-common.c 			if (v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx) == 0) {
ctx              1110 drivers/media/platform/coda/coda-common.c 				coda_dbg(1, ctx, "all remaining buffers queued\n");
ctx              1114 drivers/media/platform/coda/coda-common.c 			coda_dbg(1, ctx, "marking last meta\n");
ctx              1117 drivers/media/platform/coda/coda-common.c 			spin_lock(&ctx->buffer_meta_lock);
ctx              1118 drivers/media/platform/coda/coda-common.c 			if (!list_empty(&ctx->buffer_meta_list)) {
ctx              1121 drivers/media/platform/coda/coda-common.c 				meta = list_last_entry(&ctx->buffer_meta_list,
ctx              1129 drivers/media/platform/coda/coda-common.c 			spin_unlock(&ctx->buffer_meta_lock);
ctx              1133 drivers/media/platform/coda/coda-common.c 			coda_dbg(1, ctx, "all remaining buffers queued\n");
ctx              1136 drivers/media/platform/coda/coda-common.c 			coda_bit_stream_end_flag(ctx);
ctx              1137 drivers/media/platform/coda/coda-common.c 			ctx->hold = false;
ctx              1138 drivers/media/platform/coda/coda-common.c 			v4l2_m2m_try_schedule(ctx->fh.m2m_ctx);
ctx              1143 drivers/media/platform/coda/coda-common.c 			coda_wake_up_capture_queue(ctx);
ctx              1157 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(fh);
ctx              1161 drivers/media/platform/coda/coda-common.c 	if (ctx->inst_type != CODA_INST_ENCODER)
ctx              1169 drivers/media/platform/coda/coda-common.c 		q_data_dst = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx              1170 drivers/media/platform/coda/coda-common.c 		codec = coda_find_codec(ctx->dev, fsize->pixel_format,
ctx              1173 drivers/media/platform/coda/coda-common.c 		codec = coda_find_codec(ctx->dev, V4L2_PIX_FMT_YUV420,
ctx              1193 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(fh);
ctx              1200 drivers/media/platform/coda/coda-common.c 	if (!ctx->vdoa && f->pixel_format == V4L2_PIX_FMT_YUYV)
ctx              1204 drivers/media/platform/coda/coda-common.c 		if (f->pixel_format == ctx->cvd->src_formats[i] ||
ctx              1205 drivers/media/platform/coda/coda-common.c 		    f->pixel_format == ctx->cvd->dst_formats[i])
ctx              1224 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(fh);
ctx              1232 drivers/media/platform/coda/coda-common.c 	tpf->denominator = ctx->params.framerate & CODA_FRATE_RES_MASK;
ctx              1233 drivers/media/platform/coda/coda-common.c 	tpf->numerator = 1 + (ctx->params.framerate >>
ctx              1307 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(fh);
ctx              1316 drivers/media/platform/coda/coda-common.c 	ctx->params.framerate = coda_timeperframe_to_frate(tpf);
ctx              1317 drivers/media/platform/coda/coda-common.c 	ctx->params.framerate_changed = true;
ctx              1325 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(fh);
ctx              1331 drivers/media/platform/coda/coda-common.c 		if (ctx->inst_type == CODA_INST_DECODER)
ctx              1389 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = m2m_priv;
ctx              1390 drivers/media/platform/coda/coda-common.c 	struct coda_dev *dev = ctx->dev;
ctx              1392 drivers/media/platform/coda/coda-common.c 	queue_work(dev->workqueue, &ctx->pic_run_work);
ctx              1397 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = container_of(work, struct coda_ctx, pic_run_work);
ctx              1398 drivers/media/platform/coda/coda-common.c 	struct coda_dev *dev = ctx->dev;
ctx              1401 drivers/media/platform/coda/coda-common.c 	mutex_lock(&ctx->buffer_mutex);
ctx              1404 drivers/media/platform/coda/coda-common.c 	ret = ctx->ops->prepare_run(ctx);
ctx              1405 drivers/media/platform/coda/coda-common.c 	if (ret < 0 && ctx->inst_type == CODA_INST_DECODER) {
ctx              1407 drivers/media/platform/coda/coda-common.c 		mutex_unlock(&ctx->buffer_mutex);
ctx              1412 drivers/media/platform/coda/coda-common.c 	if (!wait_for_completion_timeout(&ctx->completion,
ctx              1416 drivers/media/platform/coda/coda-common.c 		ctx->hold = true;
ctx              1418 drivers/media/platform/coda/coda-common.c 		coda_hw_reset(ctx);
ctx              1420 drivers/media/platform/coda/coda-common.c 		if (ctx->ops->run_timeout)
ctx              1421 drivers/media/platform/coda/coda-common.c 			ctx->ops->run_timeout(ctx);
ctx              1422 drivers/media/platform/coda/coda-common.c 	} else if (!ctx->aborting) {
ctx              1423 drivers/media/platform/coda/coda-common.c 		ctx->ops->finish_run(ctx);
ctx              1426 drivers/media/platform/coda/coda-common.c 	if ((ctx->aborting || (!ctx->streamon_cap && !ctx->streamon_out)) &&
ctx              1427 drivers/media/platform/coda/coda-common.c 	    ctx->ops->seq_end_work)
ctx              1428 drivers/media/platform/coda/coda-common.c 		queue_work(dev->workqueue, &ctx->seq_end_work);
ctx              1431 drivers/media/platform/coda/coda-common.c 	mutex_unlock(&ctx->buffer_mutex);
ctx              1433 drivers/media/platform/coda/coda-common.c 	v4l2_m2m_job_finish(ctx->dev->m2m_dev, ctx->fh.m2m_ctx);
ctx              1438 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = m2m_priv;
ctx              1439 drivers/media/platform/coda/coda-common.c 	int src_bufs = v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx);
ctx              1446 drivers/media/platform/coda/coda-common.c 	if (!src_bufs && ctx->inst_type != CODA_INST_DECODER) {
ctx              1447 drivers/media/platform/coda/coda-common.c 		coda_dbg(1, ctx, "not ready: not enough vid-out buffers.\n");
ctx              1451 drivers/media/platform/coda/coda-common.c 	if (!v4l2_m2m_num_dst_bufs_ready(ctx->fh.m2m_ctx)) {
ctx              1452 drivers/media/platform/coda/coda-common.c 		coda_dbg(1, ctx, "not ready: not enough vid-cap buffers.\n");
ctx              1456 drivers/media/platform/coda/coda-common.c 	if (ctx->inst_type == CODA_INST_DECODER && ctx->use_bit) {
ctx              1457 drivers/media/platform/coda/coda-common.c 		bool stream_end = ctx->bit_stream_param &
ctx              1459 drivers/media/platform/coda/coda-common.c 		int num_metas = ctx->num_metas;
ctx              1463 drivers/media/platform/coda/coda-common.c 		count = hweight32(ctx->frm_dis_flg);
ctx              1464 drivers/media/platform/coda/coda-common.c 		if (ctx->use_vdoa && count >= (ctx->num_internal_frames - 1)) {
ctx              1465 drivers/media/platform/coda/coda-common.c 			coda_dbg(1, ctx,
ctx              1467 drivers/media/platform/coda/coda-common.c 				 count, ctx->num_internal_frames,
ctx              1468 drivers/media/platform/coda/coda-common.c 				 ctx->frm_dis_flg);
ctx              1472 drivers/media/platform/coda/coda-common.c 		if (ctx->hold && !src_bufs) {
ctx              1473 drivers/media/platform/coda/coda-common.c 			coda_dbg(1, ctx,
ctx              1479 drivers/media/platform/coda/coda-common.c 			coda_dbg(1, ctx,
ctx              1485 drivers/media/platform/coda/coda-common.c 		meta = list_first_entry(&ctx->buffer_meta_list,
ctx              1487 drivers/media/platform/coda/coda-common.c 		if (!coda_bitstream_can_fetch_past(ctx, meta->end) &&
ctx              1489 drivers/media/platform/coda/coda-common.c 			coda_dbg(1, ctx,
ctx              1491 drivers/media/platform/coda/coda-common.c 				 meta->end, ctx->bitstream_fifo.kfifo.in);
ctx              1496 drivers/media/platform/coda/coda-common.c 	if (ctx->aborting) {
ctx              1497 drivers/media/platform/coda/coda-common.c 		coda_dbg(1, ctx, "not ready: aborting\n");
ctx              1501 drivers/media/platform/coda/coda-common.c 	coda_dbg(2, ctx, "job ready\n");
ctx              1508 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = priv;
ctx              1510 drivers/media/platform/coda/coda-common.c 	ctx->aborting = 1;
ctx              1512 drivers/media/platform/coda/coda-common.c 	coda_dbg(1, ctx, "job abort\n");
ctx              1521 drivers/media/platform/coda/coda-common.c static void set_default_params(struct coda_ctx *ctx)
ctx              1525 drivers/media/platform/coda/coda-common.c 	ctx->codec = coda_find_codec(ctx->dev, ctx->cvd->src_formats[0],
ctx              1526 drivers/media/platform/coda/coda-common.c 				     ctx->cvd->dst_formats[0]);
ctx              1527 drivers/media/platform/coda/coda-common.c 	max_w = min(ctx->codec->max_w, 1920U);
ctx              1528 drivers/media/platform/coda/coda-common.c 	max_h = min(ctx->codec->max_h, 1088U);
ctx              1530 drivers/media/platform/coda/coda-common.c 	csize = coda_estimate_sizeimage(ctx, usize, max_w, max_h);
ctx              1532 drivers/media/platform/coda/coda-common.c 	ctx->params.codec_mode = ctx->codec->mode;
ctx              1533 drivers/media/platform/coda/coda-common.c 	if (ctx->cvd->src_formats[0] == V4L2_PIX_FMT_JPEG)
ctx              1534 drivers/media/platform/coda/coda-common.c 		ctx->colorspace = V4L2_COLORSPACE_JPEG;
ctx              1536 drivers/media/platform/coda/coda-common.c 		ctx->colorspace = V4L2_COLORSPACE_REC709;
ctx              1537 drivers/media/platform/coda/coda-common.c 	ctx->xfer_func = V4L2_XFER_FUNC_DEFAULT;
ctx              1538 drivers/media/platform/coda/coda-common.c 	ctx->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
ctx              1539 drivers/media/platform/coda/coda-common.c 	ctx->quantization = V4L2_QUANTIZATION_DEFAULT;
ctx              1540 drivers/media/platform/coda/coda-common.c 	ctx->params.framerate = 30;
ctx              1543 drivers/media/platform/coda/coda-common.c 	ctx->q_data[V4L2_M2M_SRC].fourcc = ctx->cvd->src_formats[0];
ctx              1544 drivers/media/platform/coda/coda-common.c 	ctx->q_data[V4L2_M2M_DST].fourcc = ctx->cvd->dst_formats[0];
ctx              1545 drivers/media/platform/coda/coda-common.c 	ctx->q_data[V4L2_M2M_SRC].width = max_w;
ctx              1546 drivers/media/platform/coda/coda-common.c 	ctx->q_data[V4L2_M2M_SRC].height = max_h;
ctx              1547 drivers/media/platform/coda/coda-common.c 	ctx->q_data[V4L2_M2M_DST].width = max_w;
ctx              1548 drivers/media/platform/coda/coda-common.c 	ctx->q_data[V4L2_M2M_DST].height = max_h;
ctx              1549 drivers/media/platform/coda/coda-common.c 	if (ctx->codec->src_fourcc == V4L2_PIX_FMT_YUV420) {
ctx              1550 drivers/media/platform/coda/coda-common.c 		ctx->q_data[V4L2_M2M_SRC].bytesperline = max_w;
ctx              1551 drivers/media/platform/coda/coda-common.c 		ctx->q_data[V4L2_M2M_SRC].sizeimage = usize;
ctx              1552 drivers/media/platform/coda/coda-common.c 		ctx->q_data[V4L2_M2M_DST].bytesperline = 0;
ctx              1553 drivers/media/platform/coda/coda-common.c 		ctx->q_data[V4L2_M2M_DST].sizeimage = csize;
ctx              1555 drivers/media/platform/coda/coda-common.c 		ctx->q_data[V4L2_M2M_SRC].bytesperline = 0;
ctx              1556 drivers/media/platform/coda/coda-common.c 		ctx->q_data[V4L2_M2M_SRC].sizeimage = csize;
ctx              1557 drivers/media/platform/coda/coda-common.c 		ctx->q_data[V4L2_M2M_DST].bytesperline = max_w;
ctx              1558 drivers/media/platform/coda/coda-common.c 		ctx->q_data[V4L2_M2M_DST].sizeimage = usize;
ctx              1560 drivers/media/platform/coda/coda-common.c 	ctx->q_data[V4L2_M2M_SRC].rect.width = max_w;
ctx              1561 drivers/media/platform/coda/coda-common.c 	ctx->q_data[V4L2_M2M_SRC].rect.height = max_h;
ctx              1562 drivers/media/platform/coda/coda-common.c 	ctx->q_data[V4L2_M2M_DST].rect.width = max_w;
ctx              1563 drivers/media/platform/coda/coda-common.c 	ctx->q_data[V4L2_M2M_DST].rect.height = max_h;
ctx              1569 drivers/media/platform/coda/coda-common.c 	ctx->tiled_map_type = GDI_LINEAR_FRAME_MAP;
ctx              1579 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1583 drivers/media/platform/coda/coda-common.c 	q_data = get_q_data(ctx, vq->type);
ctx              1592 drivers/media/platform/coda/coda-common.c 	coda_dbg(1, ctx, "get %d buffer(s) of size %d each.\n", *nbuffers,
ctx              1601 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1604 drivers/media/platform/coda/coda-common.c 	q_data = get_q_data(ctx, vb->vb2_queue->type);
ctx              1609 drivers/media/platform/coda/coda-common.c 			v4l2_warn(&ctx->dev->v4l2_dev,
ctx              1616 drivers/media/platform/coda/coda-common.c 		v4l2_warn(&ctx->dev->v4l2_dev,
ctx              1652 drivers/media/platform/coda/coda-common.c void coda_update_profile_level_ctrls(struct coda_ctx *ctx, u8 profile_idc,
ctx              1665 drivers/media/platform/coda/coda-common.c 	switch (ctx->codec->src_fourcc) {
ctx              1670 drivers/media/platform/coda/coda-common.c 		profile_ctrl = ctx->h264_profile_ctrl;
ctx              1671 drivers/media/platform/coda/coda-common.c 		level_ctrl = ctx->h264_level_ctrl;
ctx              1679 drivers/media/platform/coda/coda-common.c 		profile_ctrl = ctx->mpeg2_profile_ctrl;
ctx              1680 drivers/media/platform/coda/coda-common.c 		level_ctrl = ctx->mpeg2_level_ctrl;
ctx              1688 drivers/media/platform/coda/coda-common.c 		profile_ctrl = ctx->mpeg4_profile_ctrl;
ctx              1689 drivers/media/platform/coda/coda-common.c 		level_ctrl = ctx->mpeg4_level_ctrl;
ctx              1701 drivers/media/platform/coda/coda-common.c 		v4l2_warn(&ctx->dev->v4l2_dev, "Invalid %s profile: %u\n",
ctx              1704 drivers/media/platform/coda/coda-common.c 		coda_dbg(1, ctx, "Parsed %s profile: %s\n", codec_name,
ctx              1710 drivers/media/platform/coda/coda-common.c 		v4l2_warn(&ctx->dev->v4l2_dev, "Invalid %s level: %u\n",
ctx              1713 drivers/media/platform/coda/coda-common.c 		coda_dbg(1, ctx, "Parsed %s level: %s\n", codec_name,
ctx              1719 drivers/media/platform/coda/coda-common.c static void coda_queue_source_change_event(struct coda_ctx *ctx)
ctx              1726 drivers/media/platform/coda/coda-common.c 	v4l2_event_queue_fh(&ctx->fh, &source_change_event);
ctx              1732 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1736 drivers/media/platform/coda/coda-common.c 	q_data = get_q_data(ctx, vb->vb2_queue->type);
ctx              1742 drivers/media/platform/coda/coda-common.c 	if (ctx->bitstream.size && vq->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) {
ctx              1748 drivers/media/platform/coda/coda-common.c 			coda_bit_stream_end_flag(ctx);
ctx              1757 drivers/media/platform/coda/coda-common.c 			if (!ctx->params.h264_profile_idc) {
ctx              1758 drivers/media/platform/coda/coda-common.c 				coda_sps_parse_profile(ctx, vb);
ctx              1759 drivers/media/platform/coda/coda-common.c 				coda_update_profile_level_ctrls(ctx,
ctx              1760 drivers/media/platform/coda/coda-common.c 						ctx->params.h264_profile_idc,
ctx              1761 drivers/media/platform/coda/coda-common.c 						ctx->params.h264_level_idc);
ctx              1765 drivers/media/platform/coda/coda-common.c 		mutex_lock(&ctx->bitstream_mutex);
ctx              1766 drivers/media/platform/coda/coda-common.c 		v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx              1769 drivers/media/platform/coda/coda-common.c 			coda_fill_bitstream(ctx, NULL);
ctx              1770 drivers/media/platform/coda/coda-common.c 		mutex_unlock(&ctx->bitstream_mutex);
ctx              1772 drivers/media/platform/coda/coda-common.c 		if (!ctx->initialized) {
ctx              1778 drivers/media/platform/coda/coda-common.c 			    ctx->ops->seq_init_work) {
ctx              1779 drivers/media/platform/coda/coda-common.c 				queue_work(ctx->dev->workqueue,
ctx              1780 drivers/media/platform/coda/coda-common.c 					   &ctx->seq_init_work);
ctx              1781 drivers/media/platform/coda/coda-common.c 				flush_work(&ctx->seq_init_work);
ctx              1784 drivers/media/platform/coda/coda-common.c 			if (ctx->initialized)
ctx              1785 drivers/media/platform/coda/coda-common.c 				coda_queue_source_change_event(ctx);
ctx              1788 drivers/media/platform/coda/coda-common.c 		if (ctx->inst_type == CODA_INST_ENCODER &&
ctx              1790 drivers/media/platform/coda/coda-common.c 			vbuf->sequence = ctx->qsequence++;
ctx              1791 drivers/media/platform/coda/coda-common.c 		v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx              1836 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = vb2_get_drv_priv(q);
ctx              1837 drivers/media/platform/coda/coda-common.c 	struct v4l2_device *v4l2_dev = &ctx->dev->v4l2_dev;
ctx              1847 drivers/media/platform/coda/coda-common.c 	coda_dbg(1, ctx, "start streaming %s\n", v4l2_type_names[q->type]);
ctx              1851 drivers/media/platform/coda/coda-common.c 	q_data_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx              1853 drivers/media/platform/coda/coda-common.c 		if (ctx->inst_type == CODA_INST_DECODER && ctx->use_bit) {
ctx              1855 drivers/media/platform/coda/coda-common.c 			mutex_lock(&ctx->bitstream_mutex);
ctx              1856 drivers/media/platform/coda/coda-common.c 			coda_fill_bitstream(ctx, &list);
ctx              1857 drivers/media/platform/coda/coda-common.c 			mutex_unlock(&ctx->bitstream_mutex);
ctx              1859 drivers/media/platform/coda/coda-common.c 			if (ctx->dev->devtype->product != CODA_960 &&
ctx              1860 drivers/media/platform/coda/coda-common.c 			    coda_get_bitstream_payload(ctx) < 512) {
ctx              1866 drivers/media/platform/coda/coda-common.c 			if (!ctx->initialized) {
ctx              1868 drivers/media/platform/coda/coda-common.c 				if (ctx->ops->seq_init_work) {
ctx              1869 drivers/media/platform/coda/coda-common.c 					queue_work(ctx->dev->workqueue,
ctx              1870 drivers/media/platform/coda/coda-common.c 						   &ctx->seq_init_work);
ctx              1871 drivers/media/platform/coda/coda-common.c 					flush_work(&ctx->seq_init_work);
ctx              1876 drivers/media/platform/coda/coda-common.c 		ctx->streamon_out = 1;
ctx              1878 drivers/media/platform/coda/coda-common.c 		ctx->streamon_cap = 1;
ctx              1882 drivers/media/platform/coda/coda-common.c 	if (!(ctx->streamon_out && ctx->streamon_cap))
ctx              1885 drivers/media/platform/coda/coda-common.c 	q_data_dst = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx              1898 drivers/media/platform/coda/coda-common.c 	if (ctx->inst_type == CODA_INST_DECODER && ctx->use_bit)
ctx              1899 drivers/media/platform/coda/coda-common.c 		v4l2_m2m_set_src_buffered(ctx->fh.m2m_ctx, true);
ctx              1901 drivers/media/platform/coda/coda-common.c 	ctx->gopcounter = ctx->params.gop_size - 1;
ctx              1904 drivers/media/platform/coda/coda-common.c 		ctx->params.gop_size = 1;
ctx              1905 drivers/media/platform/coda/coda-common.c 	ctx->gopcounter = ctx->params.gop_size - 1;
ctx              1907 drivers/media/platform/coda/coda-common.c 	ret = ctx->ops->start_streaming(ctx);
ctx              1908 drivers/media/platform/coda/coda-common.c 	if (ctx->inst_type == CODA_INST_DECODER) {
ctx              1930 drivers/media/platform/coda/coda-common.c 		while ((buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx)))
ctx              1933 drivers/media/platform/coda/coda-common.c 		while ((buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx)))
ctx              1941 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = vb2_get_drv_priv(q);
ctx              1942 drivers/media/platform/coda/coda-common.c 	struct coda_dev *dev = ctx->dev;
ctx              1946 drivers/media/platform/coda/coda-common.c 	stop = ctx->streamon_out && ctx->streamon_cap;
ctx              1948 drivers/media/platform/coda/coda-common.c 	coda_dbg(1, ctx, "stop streaming %s\n", v4l2_type_names[q->type]);
ctx              1951 drivers/media/platform/coda/coda-common.c 		ctx->streamon_out = 0;
ctx              1953 drivers/media/platform/coda/coda-common.c 		coda_bit_stream_end_flag(ctx);
ctx              1955 drivers/media/platform/coda/coda-common.c 		ctx->qsequence = 0;
ctx              1957 drivers/media/platform/coda/coda-common.c 		while ((buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx)))
ctx              1960 drivers/media/platform/coda/coda-common.c 		ctx->streamon_cap = 0;
ctx              1962 drivers/media/platform/coda/coda-common.c 		ctx->osequence = 0;
ctx              1963 drivers/media/platform/coda/coda-common.c 		ctx->sequence_offset = 0;
ctx              1965 drivers/media/platform/coda/coda-common.c 		while ((buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx)))
ctx              1972 drivers/media/platform/coda/coda-common.c 		if (ctx->ops->seq_end_work) {
ctx              1973 drivers/media/platform/coda/coda-common.c 			queue_work(dev->workqueue, &ctx->seq_end_work);
ctx              1974 drivers/media/platform/coda/coda-common.c 			flush_work(&ctx->seq_end_work);
ctx              1976 drivers/media/platform/coda/coda-common.c 		spin_lock(&ctx->buffer_meta_lock);
ctx              1977 drivers/media/platform/coda/coda-common.c 		while (!list_empty(&ctx->buffer_meta_list)) {
ctx              1978 drivers/media/platform/coda/coda-common.c 			meta = list_first_entry(&ctx->buffer_meta_list,
ctx              1983 drivers/media/platform/coda/coda-common.c 		ctx->num_metas = 0;
ctx              1984 drivers/media/platform/coda/coda-common.c 		spin_unlock(&ctx->buffer_meta_lock);
ctx              1985 drivers/media/platform/coda/coda-common.c 		kfifo_init(&ctx->bitstream_fifo,
ctx              1986 drivers/media/platform/coda/coda-common.c 			ctx->bitstream.vaddr, ctx->bitstream.size);
ctx              1987 drivers/media/platform/coda/coda-common.c 		ctx->runcounter = 0;
ctx              1988 drivers/media/platform/coda/coda-common.c 		ctx->aborting = 0;
ctx              1989 drivers/media/platform/coda/coda-common.c 		ctx->hold = false;
ctx              1992 drivers/media/platform/coda/coda-common.c 	if (!ctx->streamon_out && !ctx->streamon_cap)
ctx              1993 drivers/media/platform/coda/coda-common.c 		ctx->bit_stream_param &= ~CODA_BIT_STREAM_END_FLAG;
ctx              2009 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx =
ctx              2013 drivers/media/platform/coda/coda-common.c 		coda_dbg(2, ctx, "s_ctrl: id = 0x%x, name = \"%s\", val = %d (\"%s\")\n",
ctx              2016 drivers/media/platform/coda/coda-common.c 		coda_dbg(2, ctx, "s_ctrl: id = 0x%x, name = \"%s\", val = %d\n",
ctx              2022 drivers/media/platform/coda/coda-common.c 			ctx->params.rot_mode |= CODA_MIR_HOR;
ctx              2024 drivers/media/platform/coda/coda-common.c 			ctx->params.rot_mode &= ~CODA_MIR_HOR;
ctx              2028 drivers/media/platform/coda/coda-common.c 			ctx->params.rot_mode |= CODA_MIR_VER;
ctx              2030 drivers/media/platform/coda/coda-common.c 			ctx->params.rot_mode &= ~CODA_MIR_VER;
ctx              2033 drivers/media/platform/coda/coda-common.c 		ctx->params.bitrate = ctrl->val / 1000;
ctx              2034 drivers/media/platform/coda/coda-common.c 		ctx->params.bitrate_changed = true;
ctx              2037 drivers/media/platform/coda/coda-common.c 		ctx->params.gop_size = ctrl->val;
ctx              2040 drivers/media/platform/coda/coda-common.c 		ctx->params.h264_intra_qp = ctrl->val;
ctx              2041 drivers/media/platform/coda/coda-common.c 		ctx->params.h264_intra_qp_changed = true;
ctx              2044 drivers/media/platform/coda/coda-common.c 		ctx->params.h264_inter_qp = ctrl->val;
ctx              2047 drivers/media/platform/coda/coda-common.c 		ctx->params.h264_min_qp = ctrl->val;
ctx              2050 drivers/media/platform/coda/coda-common.c 		ctx->params.h264_max_qp = ctrl->val;
ctx              2053 drivers/media/platform/coda/coda-common.c 		ctx->params.h264_slice_alpha_c0_offset_div2 = ctrl->val;
ctx              2056 drivers/media/platform/coda/coda-common.c 		ctx->params.h264_slice_beta_offset_div2 = ctrl->val;
ctx              2059 drivers/media/platform/coda/coda-common.c 		ctx->params.h264_disable_deblocking_filter_idc = ctrl->val;
ctx              2062 drivers/media/platform/coda/coda-common.c 		ctx->params.h264_constrained_intra_pred_flag = ctrl->val;
ctx              2065 drivers/media/platform/coda/coda-common.c 		ctx->params.h264_chroma_qp_index_offset = ctrl->val;
ctx              2069 drivers/media/platform/coda/coda-common.c 		if (ctx->inst_type == CODA_INST_ENCODER)
ctx              2070 drivers/media/platform/coda/coda-common.c 			ctx->params.h264_profile_idc = 66;
ctx              2076 drivers/media/platform/coda/coda-common.c 		ctx->params.mpeg4_intra_qp = ctrl->val;
ctx              2079 drivers/media/platform/coda/coda-common.c 		ctx->params.mpeg4_inter_qp = ctrl->val;
ctx              2088 drivers/media/platform/coda/coda-common.c 		ctx->params.slice_mode = ctrl->val;
ctx              2089 drivers/media/platform/coda/coda-common.c 		ctx->params.slice_mode_changed = true;
ctx              2092 drivers/media/platform/coda/coda-common.c 		ctx->params.slice_max_mb = ctrl->val;
ctx              2093 drivers/media/platform/coda/coda-common.c 		ctx->params.slice_mode_changed = true;
ctx              2096 drivers/media/platform/coda/coda-common.c 		ctx->params.slice_max_bits = ctrl->val * 8;
ctx              2097 drivers/media/platform/coda/coda-common.c 		ctx->params.slice_mode_changed = true;
ctx              2102 drivers/media/platform/coda/coda-common.c 		ctx->params.intra_refresh = ctrl->val;
ctx              2103 drivers/media/platform/coda/coda-common.c 		ctx->params.intra_refresh_changed = true;
ctx              2106 drivers/media/platform/coda/coda-common.c 		ctx->params.force_ipicture = true;
ctx              2109 drivers/media/platform/coda/coda-common.c 		coda_set_jpeg_compression_quality(ctx, ctrl->val);
ctx              2112 drivers/media/platform/coda/coda-common.c 		ctx->params.jpeg_restart_interval = ctrl->val;
ctx              2115 drivers/media/platform/coda/coda-common.c 		ctx->params.vbv_delay = ctrl->val;
ctx              2118 drivers/media/platform/coda/coda-common.c 		ctx->params.vbv_size = min(ctrl->val * 8192, 0x7fffffff);
ctx              2121 drivers/media/platform/coda/coda-common.c 		coda_dbg(1, ctx, "Invalid control, id=%d, val=%d\n",
ctx              2133 drivers/media/platform/coda/coda-common.c static void coda_encode_ctrls(struct coda_ctx *ctx)
ctx              2135 drivers/media/platform/coda/coda-common.c 	int max_gop_size = (ctx->dev->devtype->product == CODA_DX6) ? 60 : 99;
ctx              2137 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2139 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2141 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2143 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2145 drivers/media/platform/coda/coda-common.c 	if (ctx->dev->devtype->product != CODA_960) {
ctx              2146 drivers/media/platform/coda/coda-common.c 		v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2149 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2151 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2153 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2155 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std_menu(&ctx->ctrls, &coda_ctrl_ops,
ctx              2159 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2162 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2164 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std_menu(&ctx->ctrls, &coda_ctrl_ops,
ctx              2168 drivers/media/platform/coda/coda-common.c 	if (ctx->dev->devtype->product == CODA_HX4 ||
ctx              2169 drivers/media/platform/coda/coda-common.c 	    ctx->dev->devtype->product == CODA_7541) {
ctx              2170 drivers/media/platform/coda/coda-common.c 		v4l2_ctrl_new_std_menu(&ctx->ctrls, &coda_ctrl_ops,
ctx              2178 drivers/media/platform/coda/coda-common.c 	if (ctx->dev->devtype->product == CODA_960) {
ctx              2179 drivers/media/platform/coda/coda-common.c 		v4l2_ctrl_new_std_menu(&ctx->ctrls, &coda_ctrl_ops,
ctx              2189 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2191 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2193 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std_menu(&ctx->ctrls, &coda_ctrl_ops,
ctx              2197 drivers/media/platform/coda/coda-common.c 	if (ctx->dev->devtype->product == CODA_HX4 ||
ctx              2198 drivers/media/platform/coda/coda-common.c 	    ctx->dev->devtype->product == CODA_7541 ||
ctx              2199 drivers/media/platform/coda/coda-common.c 	    ctx->dev->devtype->product == CODA_960) {
ctx              2200 drivers/media/platform/coda/coda-common.c 		v4l2_ctrl_new_std_menu(&ctx->ctrls, &coda_ctrl_ops,
ctx              2206 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std_menu(&ctx->ctrls, &coda_ctrl_ops,
ctx              2210 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2212 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2215 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std_menu(&ctx->ctrls, &coda_ctrl_ops,
ctx              2220 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2223 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2229 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2233 drivers/media/platform/coda/coda-common.c static void coda_jpeg_encode_ctrls(struct coda_ctx *ctx)
ctx              2235 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2237 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2241 drivers/media/platform/coda/coda-common.c static void coda_decode_ctrls(struct coda_ctx *ctx)
ctx              2245 drivers/media/platform/coda/coda-common.c 	ctx->h264_profile_ctrl = v4l2_ctrl_new_std_menu(&ctx->ctrls,
ctx              2252 drivers/media/platform/coda/coda-common.c 	if (ctx->h264_profile_ctrl)
ctx              2253 drivers/media/platform/coda/coda-common.c 		ctx->h264_profile_ctrl->flags |= V4L2_CTRL_FLAG_READ_ONLY;
ctx              2255 drivers/media/platform/coda/coda-common.c 	if (ctx->dev->devtype->product == CODA_HX4 ||
ctx              2256 drivers/media/platform/coda/coda-common.c 	    ctx->dev->devtype->product == CODA_7541)
ctx              2258 drivers/media/platform/coda/coda-common.c 	else if (ctx->dev->devtype->product == CODA_960)
ctx              2262 drivers/media/platform/coda/coda-common.c 	ctx->h264_level_ctrl = v4l2_ctrl_new_std_menu(&ctx->ctrls,
ctx              2264 drivers/media/platform/coda/coda-common.c 	if (ctx->h264_level_ctrl)
ctx              2265 drivers/media/platform/coda/coda-common.c 		ctx->h264_level_ctrl->flags |= V4L2_CTRL_FLAG_READ_ONLY;
ctx              2267 drivers/media/platform/coda/coda-common.c 	ctx->mpeg2_profile_ctrl = v4l2_ctrl_new_std_menu(&ctx->ctrls,
ctx              2271 drivers/media/platform/coda/coda-common.c 	if (ctx->mpeg2_profile_ctrl)
ctx              2272 drivers/media/platform/coda/coda-common.c 		ctx->mpeg2_profile_ctrl->flags |= V4L2_CTRL_FLAG_READ_ONLY;
ctx              2274 drivers/media/platform/coda/coda-common.c 	ctx->mpeg2_level_ctrl = v4l2_ctrl_new_std_menu(&ctx->ctrls,
ctx              2278 drivers/media/platform/coda/coda-common.c 	if (ctx->mpeg2_level_ctrl)
ctx              2279 drivers/media/platform/coda/coda-common.c 		ctx->mpeg2_level_ctrl->flags |= V4L2_CTRL_FLAG_READ_ONLY;
ctx              2281 drivers/media/platform/coda/coda-common.c 	ctx->mpeg4_profile_ctrl = v4l2_ctrl_new_std_menu(&ctx->ctrls,
ctx              2285 drivers/media/platform/coda/coda-common.c 	if (ctx->mpeg4_profile_ctrl)
ctx              2286 drivers/media/platform/coda/coda-common.c 		ctx->mpeg4_profile_ctrl->flags |= V4L2_CTRL_FLAG_READ_ONLY;
ctx              2288 drivers/media/platform/coda/coda-common.c 	ctx->mpeg4_level_ctrl = v4l2_ctrl_new_std_menu(&ctx->ctrls,
ctx              2292 drivers/media/platform/coda/coda-common.c 	if (ctx->mpeg4_level_ctrl)
ctx              2293 drivers/media/platform/coda/coda-common.c 		ctx->mpeg4_level_ctrl->flags |= V4L2_CTRL_FLAG_READ_ONLY;
ctx              2296 drivers/media/platform/coda/coda-common.c static int coda_ctrls_setup(struct coda_ctx *ctx)
ctx              2298 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_handler_init(&ctx->ctrls, 2);
ctx              2300 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2302 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2304 drivers/media/platform/coda/coda-common.c 	if (ctx->inst_type == CODA_INST_ENCODER) {
ctx              2305 drivers/media/platform/coda/coda-common.c 		v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2308 drivers/media/platform/coda/coda-common.c 		if (ctx->cvd->dst_formats[0] == V4L2_PIX_FMT_JPEG)
ctx              2309 drivers/media/platform/coda/coda-common.c 			coda_jpeg_encode_ctrls(ctx);
ctx              2311 drivers/media/platform/coda/coda-common.c 			coda_encode_ctrls(ctx);
ctx              2313 drivers/media/platform/coda/coda-common.c 		v4l2_ctrl_new_std(&ctx->ctrls, &coda_ctrl_ops,
ctx              2316 drivers/media/platform/coda/coda-common.c 		if (ctx->cvd->src_formats[0] == V4L2_PIX_FMT_H264)
ctx              2317 drivers/media/platform/coda/coda-common.c 			coda_decode_ctrls(ctx);
ctx              2320 drivers/media/platform/coda/coda-common.c 	if (ctx->ctrls.error) {
ctx              2321 drivers/media/platform/coda/coda-common.c 		v4l2_err(&ctx->dev->v4l2_dev,
ctx              2323 drivers/media/platform/coda/coda-common.c 			ctx->ctrls.error);
ctx              2327 drivers/media/platform/coda/coda-common.c 	return v4l2_ctrl_handler_setup(&ctx->ctrls);
ctx              2330 drivers/media/platform/coda/coda-common.c static int coda_queue_init(struct coda_ctx *ctx, struct vb2_queue *vq)
ctx              2332 drivers/media/platform/coda/coda-common.c 	vq->drv_priv = ctx;
ctx              2336 drivers/media/platform/coda/coda-common.c 	vq->lock = &ctx->dev->dev_mutex;
ctx              2350 drivers/media/platform/coda/coda-common.c 	vq->dev = ctx->dev->dev;
ctx              2403 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx;
ctx              2409 drivers/media/platform/coda/coda-common.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              2410 drivers/media/platform/coda/coda-common.c 	if (!ctx)
ctx              2427 drivers/media/platform/coda/coda-common.c 	ctx->debugfs_entry = debugfs_create_dir(name, dev->debugfs_root);
ctx              2430 drivers/media/platform/coda/coda-common.c 	ctx->cvd = to_coda_video_device(vdev);
ctx              2431 drivers/media/platform/coda/coda-common.c 	ctx->inst_type = ctx->cvd->type;
ctx              2432 drivers/media/platform/coda/coda-common.c 	ctx->ops = ctx->cvd->ops;
ctx              2433 drivers/media/platform/coda/coda-common.c 	ctx->use_bit = !ctx->cvd->direct;
ctx              2434 drivers/media/platform/coda/coda-common.c 	init_completion(&ctx->completion);
ctx              2435 drivers/media/platform/coda/coda-common.c 	INIT_WORK(&ctx->pic_run_work, coda_pic_run_work);
ctx              2436 drivers/media/platform/coda/coda-common.c 	if (ctx->ops->seq_init_work)
ctx              2437 drivers/media/platform/coda/coda-common.c 		INIT_WORK(&ctx->seq_init_work, ctx->ops->seq_init_work);
ctx              2438 drivers/media/platform/coda/coda-common.c 	if (ctx->ops->seq_end_work)
ctx              2439 drivers/media/platform/coda/coda-common.c 		INIT_WORK(&ctx->seq_end_work, ctx->ops->seq_end_work);
ctx              2440 drivers/media/platform/coda/coda-common.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx              2441 drivers/media/platform/coda/coda-common.c 	file->private_data = &ctx->fh;
ctx              2442 drivers/media/platform/coda/coda-common.c 	v4l2_fh_add(&ctx->fh);
ctx              2443 drivers/media/platform/coda/coda-common.c 	ctx->dev = dev;
ctx              2444 drivers/media/platform/coda/coda-common.c 	ctx->idx = idx;
ctx              2446 drivers/media/platform/coda/coda-common.c 	coda_dbg(1, ctx, "open instance (%p)\n", ctx);
ctx              2456 drivers/media/platform/coda/coda-common.c 		if (enable_bwb || ctx->inst_type == CODA_INST_ENCODER)
ctx              2457 drivers/media/platform/coda/coda-common.c 			ctx->frame_mem_ctrl = CODA9_FRAME_ENABLE_BWB;
ctx              2461 drivers/media/platform/coda/coda-common.c 		ctx->reg_idx = 0;
ctx              2464 drivers/media/platform/coda/coda-common.c 		ctx->reg_idx = idx;
ctx              2466 drivers/media/platform/coda/coda-common.c 	if (ctx->dev->vdoa && !disable_vdoa) {
ctx              2467 drivers/media/platform/coda/coda-common.c 		ctx->vdoa = vdoa_context_create(dev->vdoa);
ctx              2468 drivers/media/platform/coda/coda-common.c 		if (!ctx->vdoa)
ctx              2472 drivers/media/platform/coda/coda-common.c 	ctx->use_vdoa = false;
ctx              2489 drivers/media/platform/coda/coda-common.c 	set_default_params(ctx);
ctx              2490 drivers/media/platform/coda/coda-common.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(dev->m2m_dev, ctx,
ctx              2491 drivers/media/platform/coda/coda-common.c 					    ctx->ops->queue_init);
ctx              2492 drivers/media/platform/coda/coda-common.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx              2493 drivers/media/platform/coda/coda-common.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx              2500 drivers/media/platform/coda/coda-common.c 	ret = coda_ctrls_setup(ctx);
ctx              2506 drivers/media/platform/coda/coda-common.c 	ctx->fh.ctrl_handler = &ctx->ctrls;
ctx              2508 drivers/media/platform/coda/coda-common.c 	mutex_init(&ctx->bitstream_mutex);
ctx              2509 drivers/media/platform/coda/coda-common.c 	mutex_init(&ctx->buffer_mutex);
ctx              2510 drivers/media/platform/coda/coda-common.c 	mutex_init(&ctx->wakeup_mutex);
ctx              2511 drivers/media/platform/coda/coda-common.c 	INIT_LIST_HEAD(&ctx->buffer_meta_list);
ctx              2512 drivers/media/platform/coda/coda-common.c 	spin_lock_init(&ctx->buffer_meta_lock);
ctx              2517 drivers/media/platform/coda/coda-common.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx              2525 drivers/media/platform/coda/coda-common.c 	v4l2_fh_del(&ctx->fh);
ctx              2526 drivers/media/platform/coda/coda-common.c 	v4l2_fh_exit(&ctx->fh);
ctx              2528 drivers/media/platform/coda/coda-common.c 	ida_free(&dev->ida, ctx->idx);
ctx              2530 drivers/media/platform/coda/coda-common.c 	kfree(ctx);
ctx              2537 drivers/media/platform/coda/coda-common.c 	struct coda_ctx *ctx = fh_to_ctx(file->private_data);
ctx              2539 drivers/media/platform/coda/coda-common.c 	coda_dbg(1, ctx, "release instance (%p)\n", ctx);
ctx              2541 drivers/media/platform/coda/coda-common.c 	if (ctx->inst_type == CODA_INST_DECODER && ctx->use_bit)
ctx              2542 drivers/media/platform/coda/coda-common.c 		coda_bit_stream_end_flag(ctx);
ctx              2545 drivers/media/platform/coda/coda-common.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx              2547 drivers/media/platform/coda/coda-common.c 	if (ctx->vdoa)
ctx              2548 drivers/media/platform/coda/coda-common.c 		vdoa_context_destroy(ctx->vdoa);
ctx              2551 drivers/media/platform/coda/coda-common.c 	if (ctx->ops->seq_end_work) {
ctx              2552 drivers/media/platform/coda/coda-common.c 		queue_work(dev->workqueue, &ctx->seq_end_work);
ctx              2553 drivers/media/platform/coda/coda-common.c 		flush_work(&ctx->seq_end_work);
ctx              2556 drivers/media/platform/coda/coda-common.c 	if (ctx->dev->devtype->product == CODA_DX6)
ctx              2557 drivers/media/platform/coda/coda-common.c 		coda_free_aux_buf(dev, &ctx->workbuf);
ctx              2559 drivers/media/platform/coda/coda-common.c 	v4l2_ctrl_handler_free(&ctx->ctrls);
ctx              2563 drivers/media/platform/coda/coda-common.c 	v4l2_fh_del(&ctx->fh);
ctx              2564 drivers/media/platform/coda/coda-common.c 	v4l2_fh_exit(&ctx->fh);
ctx              2565 drivers/media/platform/coda/coda-common.c 	ida_free(&dev->ida, ctx->idx);
ctx              2566 drivers/media/platform/coda/coda-common.c 	if (ctx->ops->release)
ctx              2567 drivers/media/platform/coda/coda-common.c 		ctx->ops->release(ctx);
ctx              2568 drivers/media/platform/coda/coda-common.c 	debugfs_remove_recursive(ctx->debugfs_entry);
ctx              2569 drivers/media/platform/coda/coda-common.c 	kfree(ctx);
ctx               110 drivers/media/platform/coda/coda-gdi.c void coda_set_gdi_regs(struct coda_ctx *ctx)
ctx               112 drivers/media/platform/coda/coda-gdi.c 	struct coda_dev *dev = ctx->dev;
ctx               117 drivers/media/platform/coda/coda-gdi.c 	switch (ctx->tiled_map_type) {
ctx                31 drivers/media/platform/coda/coda-h264.c int coda_sps_parse_profile(struct coda_ctx *ctx, struct vb2_buffer *vb)
ctx                43 drivers/media/platform/coda/coda-h264.c 	ctx->params.h264_profile_idc = buf[0];
ctx                44 drivers/media/platform/coda/coda-h264.c 	ctx->params.h264_level_idc = buf[2];
ctx               250 drivers/media/platform/coda/coda-h264.c int coda_h264_sps_fixup(struct coda_ctx *ctx, int width, int height, char *buf,
ctx               284 drivers/media/platform/coda/coda-h264.c 		dev_err(ctx->fh.vdev->dev_parent,
ctx               386 drivers/media/platform/coda/coda-h264.c 		dev_err(ctx->fh.vdev->dev_parent,
ctx               147 drivers/media/platform/coda/coda-jpeg.c int coda_jpeg_write_tables(struct coda_ctx *ctx)
ctx               161 drivers/media/platform/coda/coda-jpeg.c 		{ 512, ctx->params.jpeg_qmat_tab[0], 64 },
ctx               162 drivers/media/platform/coda/coda-jpeg.c 		{ 576, ctx->params.jpeg_qmat_tab[1], 64 },
ctx               163 drivers/media/platform/coda/coda-jpeg.c 		{ 640, ctx->params.jpeg_qmat_tab[1], 64 },
ctx               168 drivers/media/platform/coda/coda-jpeg.c 		coda_memcpy_parabuf(ctx->parabuf.vaddr, huff + i);
ctx               172 drivers/media/platform/coda/coda-jpeg.c 		coda_memcpy_parabuf(ctx->parabuf.vaddr, qmat + i);
ctx               177 drivers/media/platform/coda/coda-jpeg.c bool coda_jpeg_check_buffer(struct coda_ctx *ctx, struct vb2_buffer *vb)
ctx               220 drivers/media/platform/coda/coda-jpeg.c void coda_set_jpeg_compression_quality(struct coda_ctx *ctx, int quality)
ctx               224 drivers/media/platform/coda/coda-jpeg.c 	ctx->params.jpeg_quality = quality;
ctx               241 drivers/media/platform/coda/coda-jpeg.c 	if (ctx->params.jpeg_qmat_tab[0]) {
ctx               242 drivers/media/platform/coda/coda-jpeg.c 		memcpy(ctx->params.jpeg_qmat_tab[0], luma_q, 64);
ctx               243 drivers/media/platform/coda/coda-jpeg.c 		coda_scale_quant_table(ctx->params.jpeg_qmat_tab[0], scale);
ctx               245 drivers/media/platform/coda/coda-jpeg.c 	if (ctx->params.jpeg_qmat_tab[1]) {
ctx               246 drivers/media/platform/coda/coda-jpeg.c 		memcpy(ctx->params.jpeg_qmat_tab[1], chroma_q, 64);
ctx               247 drivers/media/platform/coda/coda-jpeg.c 		coda_scale_quant_table(ctx->params.jpeg_qmat_tab[1], scale);
ctx                64 drivers/media/platform/coda/coda-mpeg2.c u32 coda_mpeg2_parse_headers(struct coda_ctx *ctx, u8 *buf, u32 size)
ctx                61 drivers/media/platform/coda/coda-mpeg4.c u32 coda_mpeg4_parse_headers(struct coda_ctx *ctx, u8 *buf, u32 size)
ctx               190 drivers/media/platform/coda/coda.h 	int (*reqbufs)(struct coda_ctx *ctx, struct v4l2_requestbuffers *rb);
ctx               191 drivers/media/platform/coda/coda.h 	int (*start_streaming)(struct coda_ctx *ctx);
ctx               192 drivers/media/platform/coda/coda.h 	int (*prepare_run)(struct coda_ctx *ctx);
ctx               193 drivers/media/platform/coda/coda.h 	void (*finish_run)(struct coda_ctx *ctx);
ctx               194 drivers/media/platform/coda/coda.h 	void (*run_timeout)(struct coda_ctx *ctx);
ctx               197 drivers/media/platform/coda/coda.h 	void (*release)(struct coda_ctx *ctx);
ctx               279 drivers/media/platform/coda/coda.h #define coda_dbg(level, ctx, fmt, arg...)				\
ctx               282 drivers/media/platform/coda/coda.h 			v4l2_dbg((level), coda_debug, &(ctx)->dev->v4l2_dev, \
ctx               283 drivers/media/platform/coda/coda.h 			 "%u: " fmt, (ctx)->idx, ##arg);		\
ctx               288 drivers/media/platform/coda/coda.h void coda_write_base(struct coda_ctx *ctx, struct coda_q_data *q_data,
ctx               300 drivers/media/platform/coda/coda.h int coda_hw_reset(struct coda_ctx *ctx);
ctx               302 drivers/media/platform/coda/coda.h void coda_fill_bitstream(struct coda_ctx *ctx, struct list_head *buffer_list);
ctx               304 drivers/media/platform/coda/coda.h void coda_set_gdi_regs(struct coda_ctx *ctx);
ctx               306 drivers/media/platform/coda/coda.h static inline struct coda_q_data *get_q_data(struct coda_ctx *ctx,
ctx               311 drivers/media/platform/coda/coda.h 		return &(ctx->q_data[V4L2_M2M_SRC]);
ctx               313 drivers/media/platform/coda/coda.h 		return &(ctx->q_data[V4L2_M2M_DST]);
ctx               323 drivers/media/platform/coda/coda.h static inline unsigned int coda_get_bitstream_payload(struct coda_ctx *ctx)
ctx               325 drivers/media/platform/coda/coda.h 	return kfifo_len(&ctx->bitstream_fifo);
ctx               332 drivers/media/platform/coda/coda.h static inline bool coda_bitstream_can_fetch_past(struct coda_ctx *ctx,
ctx               335 drivers/media/platform/coda/coda.h 	return (int)(ctx->bitstream_fifo.kfifo.in - ALIGN(pos, 256)) > 512;
ctx               338 drivers/media/platform/coda/coda.h bool coda_bitstream_can_fetch_past(struct coda_ctx *ctx, unsigned int pos);
ctx               339 drivers/media/platform/coda/coda.h int coda_bitstream_flush(struct coda_ctx *ctx);
ctx               341 drivers/media/platform/coda/coda.h void coda_bit_stream_end_flag(struct coda_ctx *ctx);
ctx               343 drivers/media/platform/coda/coda.h void coda_m2m_buf_done(struct coda_ctx *ctx, struct vb2_v4l2_buffer *buf,
ctx               350 drivers/media/platform/coda/coda.h int coda_sps_parse_profile(struct coda_ctx *ctx, struct vb2_buffer *vb);
ctx               351 drivers/media/platform/coda/coda.h int coda_h264_sps_fixup(struct coda_ctx *ctx, int width, int height, char *buf,
ctx               356 drivers/media/platform/coda/coda.h u32 coda_mpeg2_parse_headers(struct coda_ctx *ctx, u8 *buf, u32 size);
ctx               359 drivers/media/platform/coda/coda.h u32 coda_mpeg4_parse_headers(struct coda_ctx *ctx, u8 *buf, u32 size);
ctx               361 drivers/media/platform/coda/coda.h void coda_update_profile_level_ctrls(struct coda_ctx *ctx, u8 profile_idc,
ctx               364 drivers/media/platform/coda/coda.h bool coda_jpeg_check_buffer(struct coda_ctx *ctx, struct vb2_buffer *vb);
ctx               365 drivers/media/platform/coda/coda.h int coda_jpeg_write_tables(struct coda_ctx *ctx);
ctx               366 drivers/media/platform/coda/coda.h void coda_set_jpeg_compression_quality(struct coda_ctx *ctx, int quality);
ctx               130 drivers/media/platform/coda/imx-vdoa.c int vdoa_wait_for_completion(struct vdoa_ctx *ctx)
ctx               132 drivers/media/platform/coda/imx-vdoa.c 	struct vdoa_data *vdoa = ctx->vdoa;
ctx               134 drivers/media/platform/coda/imx-vdoa.c 	if (ctx->submitted_job == ctx->completed_job)
ctx               137 drivers/media/platform/coda/imx-vdoa.c 	if (!wait_for_completion_timeout(&ctx->completion,
ctx               148 drivers/media/platform/coda/imx-vdoa.c void vdoa_device_run(struct vdoa_ctx *ctx, dma_addr_t dst, dma_addr_t src)
ctx               151 drivers/media/platform/coda/imx-vdoa.c 	struct vdoa_data *vdoa = ctx->vdoa;
ctx               157 drivers/media/platform/coda/imx-vdoa.c 	vdoa->curr_ctx = ctx;
ctx               159 drivers/media/platform/coda/imx-vdoa.c 	reinit_completion(&ctx->completion);
ctx               160 drivers/media/platform/coda/imx-vdoa.c 	ctx->submitted_job++;
ctx               162 drivers/media/platform/coda/imx-vdoa.c 	src_q_data = &ctx->q_data[V4L2_M2M_SRC];
ctx               163 drivers/media/platform/coda/imx-vdoa.c 	dst_q_data = &ctx->q_data[V4L2_M2M_DST];
ctx               201 drivers/media/platform/coda/imx-vdoa.c 	struct vdoa_ctx *ctx;
ctx               204 drivers/media/platform/coda/imx-vdoa.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               205 drivers/media/platform/coda/imx-vdoa.c 	if (!ctx)
ctx               210 drivers/media/platform/coda/imx-vdoa.c 		kfree(ctx);
ctx               214 drivers/media/platform/coda/imx-vdoa.c 	init_completion(&ctx->completion);
ctx               215 drivers/media/platform/coda/imx-vdoa.c 	ctx->vdoa = vdoa;
ctx               217 drivers/media/platform/coda/imx-vdoa.c 	return ctx;
ctx               221 drivers/media/platform/coda/imx-vdoa.c void vdoa_context_destroy(struct vdoa_ctx *ctx)
ctx               223 drivers/media/platform/coda/imx-vdoa.c 	struct vdoa_data *vdoa = ctx->vdoa;
ctx               225 drivers/media/platform/coda/imx-vdoa.c 	if (vdoa->curr_ctx == ctx) {
ctx               231 drivers/media/platform/coda/imx-vdoa.c 	kfree(ctx);
ctx               235 drivers/media/platform/coda/imx-vdoa.c int vdoa_context_configure(struct vdoa_ctx *ctx,
ctx               251 drivers/media/platform/coda/imx-vdoa.c 	if (!ctx)
ctx               254 drivers/media/platform/coda/imx-vdoa.c 	src_q_data = &ctx->q_data[V4L2_M2M_SRC];
ctx               255 drivers/media/platform/coda/imx-vdoa.c 	dst_q_data = &ctx->q_data[V4L2_M2M_DST];
ctx                15 drivers/media/platform/coda/imx-vdoa.h int vdoa_context_configure(struct vdoa_ctx *ctx,
ctx                18 drivers/media/platform/coda/imx-vdoa.h void vdoa_context_destroy(struct vdoa_ctx *ctx);
ctx                20 drivers/media/platform/coda/imx-vdoa.h void vdoa_device_run(struct vdoa_ctx *ctx, dma_addr_t dst, dma_addr_t src);
ctx                21 drivers/media/platform/coda/imx-vdoa.h int vdoa_wait_for_completion(struct vdoa_ctx *ctx);
ctx                30 drivers/media/platform/coda/imx-vdoa.h static inline int vdoa_context_configure(struct vdoa_ctx *ctx,
ctx                38 drivers/media/platform/coda/imx-vdoa.h static inline void vdoa_context_destroy(struct vdoa_ctx *ctx) { };
ctx                40 drivers/media/platform/coda/imx-vdoa.h static inline void vdoa_device_run(struct vdoa_ctx *ctx,
ctx                43 drivers/media/platform/coda/imx-vdoa.h static inline int vdoa_wait_for_completion(struct vdoa_ctx *ctx)
ctx                14 drivers/media/platform/coda/trace.h 	TP_PROTO(struct coda_ctx *ctx, int cmd),
ctx                16 drivers/media/platform/coda/trace.h 	TP_ARGS(ctx, cmd),
ctx                20 drivers/media/platform/coda/trace.h 		__field(int, ctx)
ctx                25 drivers/media/platform/coda/trace.h 		__entry->minor = ctx->fh.vdev->minor;
ctx                26 drivers/media/platform/coda/trace.h 		__entry->ctx = ctx->idx;
ctx                31 drivers/media/platform/coda/trace.h 		  __entry->minor, __entry->ctx, __entry->cmd)
ctx                35 drivers/media/platform/coda/trace.h 	TP_PROTO(struct coda_ctx *ctx),
ctx                37 drivers/media/platform/coda/trace.h 	TP_ARGS(ctx),
ctx                41 drivers/media/platform/coda/trace.h 		__field(int, ctx)
ctx                45 drivers/media/platform/coda/trace.h 		__entry->minor = ctx->fh.vdev->minor;
ctx                46 drivers/media/platform/coda/trace.h 		__entry->ctx = ctx->idx;
ctx                49 drivers/media/platform/coda/trace.h 	TP_printk("minor = %d, ctx = %d", __entry->minor, __entry->ctx)
ctx                53 drivers/media/platform/coda/trace.h 	TP_PROTO(struct coda_ctx *ctx, struct vb2_v4l2_buffer *buf),
ctx                55 drivers/media/platform/coda/trace.h 	TP_ARGS(ctx, buf),
ctx                60 drivers/media/platform/coda/trace.h 		__field(int, ctx)
ctx                64 drivers/media/platform/coda/trace.h 		__entry->minor = ctx->fh.vdev->minor;
ctx                66 drivers/media/platform/coda/trace.h 		__entry->ctx = ctx->idx;
ctx                70 drivers/media/platform/coda/trace.h 		  __entry->minor, __entry->index, __entry->ctx)
ctx                74 drivers/media/platform/coda/trace.h 	TP_PROTO(struct coda_ctx *ctx, struct vb2_v4l2_buffer *buf),
ctx                75 drivers/media/platform/coda/trace.h 	TP_ARGS(ctx, buf)
ctx                79 drivers/media/platform/coda/trace.h 	TP_PROTO(struct coda_ctx *ctx, struct vb2_v4l2_buffer *buf),
ctx                80 drivers/media/platform/coda/trace.h 	TP_ARGS(ctx, buf)
ctx                84 drivers/media/platform/coda/trace.h 	TP_PROTO(struct coda_ctx *ctx, struct vb2_v4l2_buffer *buf,
ctx                87 drivers/media/platform/coda/trace.h 	TP_ARGS(ctx, buf, meta),
ctx                94 drivers/media/platform/coda/trace.h 		__field(int, ctx)
ctx                98 drivers/media/platform/coda/trace.h 		__entry->minor = ctx->fh.vdev->minor;
ctx               100 drivers/media/platform/coda/trace.h 		__entry->start = meta->start & ctx->bitstream_fifo.kfifo.mask;
ctx               101 drivers/media/platform/coda/trace.h 		__entry->end = meta->end & ctx->bitstream_fifo.kfifo.mask;
ctx               102 drivers/media/platform/coda/trace.h 		__entry->ctx = ctx->idx;
ctx               107 drivers/media/platform/coda/trace.h 		  __entry->ctx)
ctx               111 drivers/media/platform/coda/trace.h 	TP_PROTO(struct coda_ctx *ctx, struct vb2_v4l2_buffer *buf,
ctx               113 drivers/media/platform/coda/trace.h 	TP_ARGS(ctx, buf, meta)
ctx               117 drivers/media/platform/coda/trace.h 	TP_PROTO(struct coda_ctx *ctx, struct coda_buffer_meta *meta),
ctx               119 drivers/media/platform/coda/trace.h 	TP_ARGS(ctx, meta),
ctx               125 drivers/media/platform/coda/trace.h 		__field(int, ctx)
ctx               129 drivers/media/platform/coda/trace.h 		__entry->minor = ctx->fh.vdev->minor;
ctx               131 drivers/media/platform/coda/trace.h 					 ctx->bitstream_fifo.kfifo.mask) : 0;
ctx               133 drivers/media/platform/coda/trace.h 				       ctx->bitstream_fifo.kfifo.mask) : 0;
ctx               134 drivers/media/platform/coda/trace.h 		__entry->ctx = ctx->idx;
ctx               138 drivers/media/platform/coda/trace.h 		  __entry->minor, __entry->start, __entry->end, __entry->ctx)
ctx               142 drivers/media/platform/coda/trace.h 	TP_PROTO(struct coda_ctx *ctx, struct coda_buffer_meta *meta),
ctx               143 drivers/media/platform/coda/trace.h 	TP_ARGS(ctx, meta)
ctx               147 drivers/media/platform/coda/trace.h 	TP_PROTO(struct coda_ctx *ctx, struct coda_buffer_meta *meta),
ctx               148 drivers/media/platform/coda/trace.h 	TP_ARGS(ctx, meta)
ctx               152 drivers/media/platform/coda/trace.h 	TP_PROTO(struct coda_ctx *ctx, struct vb2_v4l2_buffer *buf,
ctx               154 drivers/media/platform/coda/trace.h 	TP_ARGS(ctx, buf, meta)
ctx               390 drivers/media/platform/exynos-gsc/gsc-core.c int gsc_try_fmt_mplane(struct gsc_ctx *ctx, struct v4l2_format *f)
ctx               392 drivers/media/platform/exynos-gsc/gsc-core.c 	struct gsc_dev *gsc = ctx->gsc_dev;
ctx               431 drivers/media/platform/exynos-gsc/gsc-core.c 		pix_mp->colorspace = ctx->out_colorspace;
ctx               451 drivers/media/platform/exynos-gsc/gsc-core.c 		ctx->out_colorspace = pix_mp->colorspace;
ctx               479 drivers/media/platform/exynos-gsc/gsc-core.c int gsc_g_fmt_mplane(struct gsc_ctx *ctx, struct v4l2_format *f)
ctx               485 drivers/media/platform/exynos-gsc/gsc-core.c 	frame = ctx_get_frame(ctx, f->type);
ctx               496 drivers/media/platform/exynos-gsc/gsc-core.c 	pix_mp->colorspace = ctx->out_colorspace;
ctx               518 drivers/media/platform/exynos-gsc/gsc-core.c int gsc_try_selection(struct gsc_ctx *ctx, struct v4l2_selection *s)
ctx               521 drivers/media/platform/exynos-gsc/gsc-core.c 	struct gsc_dev *gsc = ctx->gsc_dev;
ctx               533 drivers/media/platform/exynos-gsc/gsc-core.c 		f = &ctx->d_frame;
ctx               535 drivers/media/platform/exynos-gsc/gsc-core.c 		f = &ctx->s_frame;
ctx               560 drivers/media/platform/exynos-gsc/gsc-core.c 		if (ctx->gsc_ctrls.rotate->val == 90 ||
ctx               561 drivers/media/platform/exynos-gsc/gsc-core.c 		    ctx->gsc_ctrls.rotate->val == 270) {
ctx               581 drivers/media/platform/exynos-gsc/gsc-core.c 	    (ctx->gsc_ctrls.rotate->val == 90 ||
ctx               582 drivers/media/platform/exynos-gsc/gsc-core.c 	     ctx->gsc_ctrls.rotate->val == 270))
ctx               634 drivers/media/platform/exynos-gsc/gsc-core.c int gsc_set_scaler_info(struct gsc_ctx *ctx)
ctx               636 drivers/media/platform/exynos-gsc/gsc-core.c 	struct gsc_scaler *sc = &ctx->scaler;
ctx               637 drivers/media/platform/exynos-gsc/gsc-core.c 	struct gsc_frame *s_frame = &ctx->s_frame;
ctx               638 drivers/media/platform/exynos-gsc/gsc-core.c 	struct gsc_frame *d_frame = &ctx->d_frame;
ctx               639 drivers/media/platform/exynos-gsc/gsc-core.c 	struct gsc_variant *variant = ctx->gsc_dev->variant;
ctx               640 drivers/media/platform/exynos-gsc/gsc-core.c 	struct device *dev = &ctx->gsc_dev->pdev->dev;
ctx               646 drivers/media/platform/exynos-gsc/gsc-core.c 		ctx->gsc_ctrls.rotate->val, ctx->out_path);
ctx               652 drivers/media/platform/exynos-gsc/gsc-core.c 	if (ctx->gsc_ctrls.rotate->val == 90 ||
ctx               653 drivers/media/platform/exynos-gsc/gsc-core.c 	    ctx->gsc_ctrls.rotate->val == 270) {
ctx               699 drivers/media/platform/exynos-gsc/gsc-core.c static int __gsc_s_ctrl(struct gsc_ctx *ctx, struct v4l2_ctrl *ctrl)
ctx               701 drivers/media/platform/exynos-gsc/gsc-core.c 	struct gsc_dev *gsc = ctx->gsc_dev;
ctx               711 drivers/media/platform/exynos-gsc/gsc-core.c 		ctx->hflip = ctrl->val;
ctx               715 drivers/media/platform/exynos-gsc/gsc-core.c 		ctx->vflip = ctrl->val;
ctx               719 drivers/media/platform/exynos-gsc/gsc-core.c 		if ((ctx->state & flags) == flags) {
ctx               721 drivers/media/platform/exynos-gsc/gsc-core.c 					ctx->s_frame.crop.width,
ctx               722 drivers/media/platform/exynos-gsc/gsc-core.c 					ctx->s_frame.crop.height,
ctx               723 drivers/media/platform/exynos-gsc/gsc-core.c 					ctx->d_frame.crop.width,
ctx               724 drivers/media/platform/exynos-gsc/gsc-core.c 					ctx->d_frame.crop.height,
ctx               725 drivers/media/platform/exynos-gsc/gsc-core.c 					ctx->gsc_ctrls.rotate->val,
ctx               726 drivers/media/platform/exynos-gsc/gsc-core.c 					ctx->out_path);
ctx               732 drivers/media/platform/exynos-gsc/gsc-core.c 		ctx->rotation = ctrl->val;
ctx               736 drivers/media/platform/exynos-gsc/gsc-core.c 		ctx->d_frame.alpha = ctrl->val;
ctx               740 drivers/media/platform/exynos-gsc/gsc-core.c 	ctx->state |= GSC_PARAMS;
ctx               746 drivers/media/platform/exynos-gsc/gsc-core.c 	struct gsc_ctx *ctx = ctrl_to_ctx(ctrl);
ctx               750 drivers/media/platform/exynos-gsc/gsc-core.c 	spin_lock_irqsave(&ctx->gsc_dev->slock, flags);
ctx               751 drivers/media/platform/exynos-gsc/gsc-core.c 	ret = __gsc_s_ctrl(ctx, ctrl);
ctx               752 drivers/media/platform/exynos-gsc/gsc-core.c 	spin_unlock_irqrestore(&ctx->gsc_dev->slock, flags);
ctx               761 drivers/media/platform/exynos-gsc/gsc-core.c int gsc_ctrls_create(struct gsc_ctx *ctx)
ctx               763 drivers/media/platform/exynos-gsc/gsc-core.c 	if (ctx->ctrls_rdy) {
ctx               768 drivers/media/platform/exynos-gsc/gsc-core.c 	v4l2_ctrl_handler_init(&ctx->ctrl_handler, GSC_MAX_CTRL_NUM);
ctx               770 drivers/media/platform/exynos-gsc/gsc-core.c 	ctx->gsc_ctrls.rotate = v4l2_ctrl_new_std(&ctx->ctrl_handler,
ctx               772 drivers/media/platform/exynos-gsc/gsc-core.c 	ctx->gsc_ctrls.hflip = v4l2_ctrl_new_std(&ctx->ctrl_handler,
ctx               774 drivers/media/platform/exynos-gsc/gsc-core.c 	ctx->gsc_ctrls.vflip = v4l2_ctrl_new_std(&ctx->ctrl_handler,
ctx               776 drivers/media/platform/exynos-gsc/gsc-core.c 	ctx->gsc_ctrls.global_alpha = v4l2_ctrl_new_std(&ctx->ctrl_handler,
ctx               779 drivers/media/platform/exynos-gsc/gsc-core.c 	ctx->ctrls_rdy = ctx->ctrl_handler.error == 0;
ctx               781 drivers/media/platform/exynos-gsc/gsc-core.c 	if (ctx->ctrl_handler.error) {
ctx               782 drivers/media/platform/exynos-gsc/gsc-core.c 		int err = ctx->ctrl_handler.error;
ctx               783 drivers/media/platform/exynos-gsc/gsc-core.c 		v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx               791 drivers/media/platform/exynos-gsc/gsc-core.c void gsc_ctrls_delete(struct gsc_ctx *ctx)
ctx               793 drivers/media/platform/exynos-gsc/gsc-core.c 	if (ctx->ctrls_rdy) {
ctx               794 drivers/media/platform/exynos-gsc/gsc-core.c 		v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx               795 drivers/media/platform/exynos-gsc/gsc-core.c 		ctx->ctrls_rdy = false;
ctx               800 drivers/media/platform/exynos-gsc/gsc-core.c int gsc_prepare_addr(struct gsc_ctx *ctx, struct vb2_buffer *vb,
ctx               864 drivers/media/platform/exynos-gsc/gsc-core.c 	struct gsc_ctx *ctx;
ctx               886 drivers/media/platform/exynos-gsc/gsc-core.c 		ctx = v4l2_m2m_get_curr_priv(gsc->m2m.m2m_dev);
ctx               888 drivers/media/platform/exynos-gsc/gsc-core.c 		if (!ctx || !ctx->m2m_ctx)
ctx               892 drivers/media/platform/exynos-gsc/gsc-core.c 		gsc_m2m_job_finish(ctx, VB2_BUF_STATE_DONE);
ctx               895 drivers/media/platform/exynos-gsc/gsc-core.c 		if (ctx->state & GSC_CTX_STOP_REQ) {
ctx               896 drivers/media/platform/exynos-gsc/gsc-core.c 			ctx->state &= ~GSC_CTX_STOP_REQ;
ctx              1254 drivers/media/platform/exynos-gsc/gsc-core.c 	struct gsc_ctx *ctx;
ctx              1259 drivers/media/platform/exynos-gsc/gsc-core.c 	ctx = gsc->m2m.ctx;
ctx              1260 drivers/media/platform/exynos-gsc/gsc-core.c 	gsc->m2m.ctx = NULL;
ctx              1264 drivers/media/platform/exynos-gsc/gsc-core.c 		gsc_m2m_job_finish(ctx, VB2_BUF_STATE_ERROR);
ctx               215 drivers/media/platform/exynos-gsc/gsc-core.h 	struct gsc_ctx		*ctx;
ctx               381 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_m2m_job_finish(struct gsc_ctx *ctx, int vb_state);
ctx               387 drivers/media/platform/exynos-gsc/gsc-core.h int gsc_try_fmt_mplane(struct gsc_ctx *ctx, struct v4l2_format *f);
ctx               389 drivers/media/platform/exynos-gsc/gsc-core.h int gsc_g_fmt_mplane(struct gsc_ctx *ctx, struct v4l2_format *f);
ctx               391 drivers/media/platform/exynos-gsc/gsc-core.h int gsc_try_selection(struct gsc_ctx *ctx, struct v4l2_selection *s);
ctx               400 drivers/media/platform/exynos-gsc/gsc-core.h int gsc_set_scaler_info(struct gsc_ctx *ctx);
ctx               401 drivers/media/platform/exynos-gsc/gsc-core.h int gsc_ctrls_create(struct gsc_ctx *ctx);
ctx               402 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_ctrls_delete(struct gsc_ctx *ctx);
ctx               403 drivers/media/platform/exynos-gsc/gsc-core.h int gsc_prepare_addr(struct gsc_ctx *ctx, struct vb2_buffer *vb,
ctx               406 drivers/media/platform/exynos-gsc/gsc-core.h static inline void gsc_ctx_state_lock_set(u32 state, struct gsc_ctx *ctx)
ctx               410 drivers/media/platform/exynos-gsc/gsc-core.h 	spin_lock_irqsave(&ctx->gsc_dev->slock, flags);
ctx               411 drivers/media/platform/exynos-gsc/gsc-core.h 	ctx->state |= state;
ctx               412 drivers/media/platform/exynos-gsc/gsc-core.h 	spin_unlock_irqrestore(&ctx->gsc_dev->slock, flags);
ctx               415 drivers/media/platform/exynos-gsc/gsc-core.h static inline void gsc_ctx_state_lock_clear(u32 state, struct gsc_ctx *ctx)
ctx               419 drivers/media/platform/exynos-gsc/gsc-core.h 	spin_lock_irqsave(&ctx->gsc_dev->slock, flags);
ctx               420 drivers/media/platform/exynos-gsc/gsc-core.h 	ctx->state &= ~state;
ctx               421 drivers/media/platform/exynos-gsc/gsc-core.h 	spin_unlock_irqrestore(&ctx->gsc_dev->slock, flags);
ctx               461 drivers/media/platform/exynos-gsc/gsc-core.h static inline bool gsc_ctx_state_is_set(u32 mask, struct gsc_ctx *ctx)
ctx               466 drivers/media/platform/exynos-gsc/gsc-core.h 	spin_lock_irqsave(&ctx->gsc_dev->slock, flags);
ctx               467 drivers/media/platform/exynos-gsc/gsc-core.h 	ret = (ctx->state & mask) == mask;
ctx               468 drivers/media/platform/exynos-gsc/gsc-core.h 	spin_unlock_irqrestore(&ctx->gsc_dev->slock, flags);
ctx               472 drivers/media/platform/exynos-gsc/gsc-core.h static inline struct gsc_frame *ctx_get_frame(struct gsc_ctx *ctx,
ctx               478 drivers/media/platform/exynos-gsc/gsc-core.h 		frame = &ctx->s_frame;
ctx               480 drivers/media/platform/exynos-gsc/gsc-core.h 		frame = &ctx->d_frame;
ctx               500 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_hw_set_input_path(struct gsc_ctx *ctx);
ctx               501 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_hw_set_in_size(struct gsc_ctx *ctx);
ctx               502 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_hw_set_in_image_rgb(struct gsc_ctx *ctx);
ctx               503 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_hw_set_in_image_format(struct gsc_ctx *ctx);
ctx               504 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_hw_set_output_path(struct gsc_ctx *ctx);
ctx               505 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_hw_set_out_size(struct gsc_ctx *ctx);
ctx               506 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_hw_set_out_image_rgb(struct gsc_ctx *ctx);
ctx               507 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_hw_set_out_image_format(struct gsc_ctx *ctx);
ctx               508 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_hw_set_prescaler(struct gsc_ctx *ctx);
ctx               509 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_hw_set_mainscaler(struct gsc_ctx *ctx);
ctx               510 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_hw_set_rotation(struct gsc_ctx *ctx);
ctx               511 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_hw_set_global_alpha(struct gsc_ctx *ctx);
ctx               512 drivers/media/platform/exynos-gsc/gsc-core.h void gsc_hw_set_sfr_update(struct gsc_ctx *ctx);
ctx                27 drivers/media/platform/exynos-gsc/gsc-m2m.c static int gsc_m2m_ctx_stop_req(struct gsc_ctx *ctx)
ctx                30 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_dev *gsc = ctx->gsc_dev;
ctx                34 drivers/media/platform/exynos-gsc/gsc-m2m.c 	if (!gsc_m2m_pending(gsc) || (curr_ctx != ctx))
ctx                37 drivers/media/platform/exynos-gsc/gsc-m2m.c 	gsc_ctx_state_lock_set(GSC_CTX_STOP_REQ, ctx);
ctx                39 drivers/media/platform/exynos-gsc/gsc-m2m.c 			!gsc_ctx_state_is_set(GSC_CTX_STOP_REQ, ctx),
ctx                45 drivers/media/platform/exynos-gsc/gsc-m2m.c static void __gsc_m2m_job_abort(struct gsc_ctx *ctx)
ctx                49 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ret = gsc_m2m_ctx_stop_req(ctx);
ctx                50 drivers/media/platform/exynos-gsc/gsc-m2m.c 	if ((ret == -ETIMEDOUT) || (ctx->state & GSC_CTX_ABORT)) {
ctx                51 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_ctx_state_lock_clear(GSC_CTX_STOP_REQ | GSC_CTX_ABORT, ctx);
ctx                52 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_m2m_job_finish(ctx, VB2_BUF_STATE_ERROR);
ctx                58 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = q->drv_priv;
ctx                61 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ret = pm_runtime_get_sync(&ctx->gsc_dev->pdev->dev);
ctx                65 drivers/media/platform/exynos-gsc/gsc-m2m.c static void __gsc_m2m_cleanup_queue(struct gsc_ctx *ctx)
ctx                69 drivers/media/platform/exynos-gsc/gsc-m2m.c 	while (v4l2_m2m_num_src_bufs_ready(ctx->m2m_ctx) > 0) {
ctx                70 drivers/media/platform/exynos-gsc/gsc-m2m.c 		src_vb = v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
ctx                74 drivers/media/platform/exynos-gsc/gsc-m2m.c 	while (v4l2_m2m_num_dst_bufs_ready(ctx->m2m_ctx) > 0) {
ctx                75 drivers/media/platform/exynos-gsc/gsc-m2m.c 		dst_vb = v4l2_m2m_dst_buf_remove(ctx->m2m_ctx);
ctx                82 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = q->drv_priv;
ctx                84 drivers/media/platform/exynos-gsc/gsc-m2m.c 	__gsc_m2m_job_abort(ctx);
ctx                86 drivers/media/platform/exynos-gsc/gsc-m2m.c 	__gsc_m2m_cleanup_queue(ctx);
ctx                88 drivers/media/platform/exynos-gsc/gsc-m2m.c 	pm_runtime_put(&ctx->gsc_dev->pdev->dev);
ctx                91 drivers/media/platform/exynos-gsc/gsc-m2m.c void gsc_m2m_job_finish(struct gsc_ctx *ctx, int vb_state)
ctx                95 drivers/media/platform/exynos-gsc/gsc-m2m.c 	if (!ctx || !ctx->m2m_ctx)
ctx                98 drivers/media/platform/exynos-gsc/gsc-m2m.c 	src_vb = v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
ctx                99 drivers/media/platform/exynos-gsc/gsc-m2m.c 	dst_vb = v4l2_m2m_dst_buf_remove(ctx->m2m_ctx);
ctx               112 drivers/media/platform/exynos-gsc/gsc-m2m.c 		v4l2_m2m_job_finish(ctx->gsc_dev->m2m.m2m_dev,
ctx               113 drivers/media/platform/exynos-gsc/gsc-m2m.c 				    ctx->m2m_ctx);
ctx               122 drivers/media/platform/exynos-gsc/gsc-m2m.c static int gsc_get_bufs(struct gsc_ctx *ctx)
ctx               128 drivers/media/platform/exynos-gsc/gsc-m2m.c 	s_frame = &ctx->s_frame;
ctx               129 drivers/media/platform/exynos-gsc/gsc-m2m.c 	d_frame = &ctx->d_frame;
ctx               131 drivers/media/platform/exynos-gsc/gsc-m2m.c 	src_vb = v4l2_m2m_next_src_buf(ctx->m2m_ctx);
ctx               132 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ret = gsc_prepare_addr(ctx, &src_vb->vb2_buf, s_frame, &s_frame->addr);
ctx               136 drivers/media/platform/exynos-gsc/gsc-m2m.c 	dst_vb = v4l2_m2m_next_dst_buf(ctx->m2m_ctx);
ctx               137 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ret = gsc_prepare_addr(ctx, &dst_vb->vb2_buf, d_frame, &d_frame->addr);
ctx               148 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = priv;
ctx               154 drivers/media/platform/exynos-gsc/gsc-m2m.c 	if (WARN(!ctx, "null hardware context\n"))
ctx               157 drivers/media/platform/exynos-gsc/gsc-m2m.c 	gsc = ctx->gsc_dev;
ctx               163 drivers/media/platform/exynos-gsc/gsc-m2m.c 	if (gsc->m2m.ctx != ctx) {
ctx               165 drivers/media/platform/exynos-gsc/gsc-m2m.c 				gsc->m2m.ctx, ctx);
ctx               166 drivers/media/platform/exynos-gsc/gsc-m2m.c 		ctx->state |= GSC_PARAMS;
ctx               167 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc->m2m.ctx = ctx;
ctx               170 drivers/media/platform/exynos-gsc/gsc-m2m.c 	is_set = ctx->state & GSC_CTX_STOP_REQ;
ctx               172 drivers/media/platform/exynos-gsc/gsc-m2m.c 		ctx->state &= ~GSC_CTX_STOP_REQ;
ctx               173 drivers/media/platform/exynos-gsc/gsc-m2m.c 		ctx->state |= GSC_CTX_ABORT;
ctx               178 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ret = gsc_get_bufs(ctx);
ctx               184 drivers/media/platform/exynos-gsc/gsc-m2m.c 	gsc_set_prefbuf(gsc, &ctx->s_frame);
ctx               185 drivers/media/platform/exynos-gsc/gsc-m2m.c 	gsc_hw_set_input_addr(gsc, &ctx->s_frame.addr, GSC_M2M_BUF_NUM);
ctx               186 drivers/media/platform/exynos-gsc/gsc-m2m.c 	gsc_hw_set_output_addr(gsc, &ctx->d_frame.addr, GSC_M2M_BUF_NUM);
ctx               188 drivers/media/platform/exynos-gsc/gsc-m2m.c 	if (ctx->state & GSC_PARAMS) {
ctx               194 drivers/media/platform/exynos-gsc/gsc-m2m.c 		if (gsc_set_scaler_info(ctx)) {
ctx               199 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_hw_set_input_path(ctx);
ctx               200 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_hw_set_in_size(ctx);
ctx               201 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_hw_set_in_image_format(ctx);
ctx               203 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_hw_set_output_path(ctx);
ctx               204 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_hw_set_out_size(ctx);
ctx               205 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_hw_set_out_image_format(ctx);
ctx               207 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_hw_set_prescaler(ctx);
ctx               208 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_hw_set_mainscaler(ctx);
ctx               209 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_hw_set_rotation(ctx);
ctx               210 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_hw_set_global_alpha(ctx);
ctx               214 drivers/media/platform/exynos-gsc/gsc-m2m.c 	gsc_hw_set_sfr_update(ctx);
ctx               216 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ctx->state &= ~GSC_PARAMS;
ctx               223 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ctx->state &= ~GSC_PARAMS;
ctx               231 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = vb2_get_drv_priv(vq);
ctx               235 drivers/media/platform/exynos-gsc/gsc-m2m.c 	frame = ctx_get_frame(ctx, vq->type);
ctx               250 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               254 drivers/media/platform/exynos-gsc/gsc-m2m.c 	frame = ctx_get_frame(ctx, vb->vb2_queue->type);
ctx               269 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               271 drivers/media/platform/exynos-gsc/gsc-m2m.c 	pr_debug("ctx: %p, ctx->state: 0x%x", ctx, ctx->state);
ctx               273 drivers/media/platform/exynos-gsc/gsc-m2m.c 	if (ctx->m2m_ctx)
ctx               274 drivers/media/platform/exynos-gsc/gsc-m2m.c 		v4l2_m2m_buf_queue(ctx->m2m_ctx, vbuf);
ctx               290 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(fh);
ctx               291 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_dev *gsc = ctx->gsc_dev;
ctx               309 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(fh);
ctx               311 drivers/media/platform/exynos-gsc/gsc-m2m.c 	return gsc_g_fmt_mplane(ctx, f);
ctx               317 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(fh);
ctx               319 drivers/media/platform/exynos-gsc/gsc-m2m.c 	return gsc_try_fmt_mplane(ctx, f);
ctx               325 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(fh);
ctx               335 drivers/media/platform/exynos-gsc/gsc-m2m.c 	vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
ctx               343 drivers/media/platform/exynos-gsc/gsc-m2m.c 		frame = &ctx->s_frame;
ctx               345 drivers/media/platform/exynos-gsc/gsc-m2m.c 		frame = &ctx->d_frame;
ctx               359 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_ctx_state_lock_set(GSC_PARAMS | GSC_DST_FMT, ctx);
ctx               361 drivers/media/platform/exynos-gsc/gsc-m2m.c 		gsc_ctx_state_lock_set(GSC_PARAMS | GSC_SRC_FMT, ctx);
ctx               371 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(fh);
ctx               372 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_dev *gsc = ctx->gsc_dev;
ctx               380 drivers/media/platform/exynos-gsc/gsc-m2m.c 	return v4l2_m2m_reqbufs(file, ctx->m2m_ctx, reqbufs);
ctx               386 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(fh);
ctx               387 drivers/media/platform/exynos-gsc/gsc-m2m.c 	return v4l2_m2m_expbuf(file, ctx->m2m_ctx, eb);
ctx               393 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(fh);
ctx               394 drivers/media/platform/exynos-gsc/gsc-m2m.c 	return v4l2_m2m_querybuf(file, ctx->m2m_ctx, buf);
ctx               400 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(fh);
ctx               401 drivers/media/platform/exynos-gsc/gsc-m2m.c 	return v4l2_m2m_qbuf(file, ctx->m2m_ctx, buf);
ctx               407 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(fh);
ctx               408 drivers/media/platform/exynos-gsc/gsc-m2m.c 	return v4l2_m2m_dqbuf(file, ctx->m2m_ctx, buf);
ctx               414 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(fh);
ctx               418 drivers/media/platform/exynos-gsc/gsc-m2m.c 		if (!gsc_ctx_state_is_set(GSC_SRC_FMT, ctx))
ctx               420 drivers/media/platform/exynos-gsc/gsc-m2m.c 	} else if (!gsc_ctx_state_is_set(GSC_DST_FMT, ctx)) {
ctx               424 drivers/media/platform/exynos-gsc/gsc-m2m.c 	return v4l2_m2m_streamon(file, ctx->m2m_ctx, type);
ctx               430 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(fh);
ctx               431 drivers/media/platform/exynos-gsc/gsc-m2m.c 	return v4l2_m2m_streamoff(file, ctx->m2m_ctx, type);
ctx               453 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(fh);
ctx               459 drivers/media/platform/exynos-gsc/gsc-m2m.c 	frame = ctx_get_frame(ctx, s->type);
ctx               490 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(fh);
ctx               491 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_variant *variant = ctx->gsc_dev->variant;
ctx               499 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ret = gsc_try_selection(ctx, &sel);
ctx               517 drivers/media/platform/exynos-gsc/gsc-m2m.c 		frame = &ctx->s_frame;
ctx               523 drivers/media/platform/exynos-gsc/gsc-m2m.c 		frame = &ctx->d_frame;
ctx               531 drivers/media/platform/exynos-gsc/gsc-m2m.c 	if (gsc_ctx_state_is_set(GSC_DST_FMT | GSC_SRC_FMT, ctx)) {
ctx               534 drivers/media/platform/exynos-gsc/gsc-m2m.c 				sel.r.height, ctx->d_frame.crop.width,
ctx               535 drivers/media/platform/exynos-gsc/gsc-m2m.c 				ctx->d_frame.crop.height,
ctx               536 drivers/media/platform/exynos-gsc/gsc-m2m.c 				ctx->gsc_ctrls.rotate->val, ctx->out_path);
ctx               539 drivers/media/platform/exynos-gsc/gsc-m2m.c 				ctx->s_frame.crop.width,
ctx               540 drivers/media/platform/exynos-gsc/gsc-m2m.c 				ctx->s_frame.crop.height, sel.r.width,
ctx               541 drivers/media/platform/exynos-gsc/gsc-m2m.c 				sel.r.height, ctx->gsc_ctrls.rotate->val,
ctx               542 drivers/media/platform/exynos-gsc/gsc-m2m.c 				ctx->out_path);
ctx               553 drivers/media/platform/exynos-gsc/gsc-m2m.c 	gsc_ctx_state_lock_set(GSC_PARAMS, ctx);
ctx               581 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = priv;
ctx               587 drivers/media/platform/exynos-gsc/gsc-m2m.c 	src_vq->drv_priv = ctx;
ctx               592 drivers/media/platform/exynos-gsc/gsc-m2m.c 	src_vq->lock = &ctx->gsc_dev->lock;
ctx               593 drivers/media/platform/exynos-gsc/gsc-m2m.c 	src_vq->dev = &ctx->gsc_dev->pdev->dev;
ctx               602 drivers/media/platform/exynos-gsc/gsc-m2m.c 	dst_vq->drv_priv = ctx;
ctx               607 drivers/media/platform/exynos-gsc/gsc-m2m.c 	dst_vq->lock = &ctx->gsc_dev->lock;
ctx               608 drivers/media/platform/exynos-gsc/gsc-m2m.c 	dst_vq->dev = &ctx->gsc_dev->pdev->dev;
ctx               616 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = NULL;
ctx               624 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               625 drivers/media/platform/exynos-gsc/gsc-m2m.c 	if (!ctx) {
ctx               630 drivers/media/platform/exynos-gsc/gsc-m2m.c 	v4l2_fh_init(&ctx->fh, gsc->m2m.vfd);
ctx               631 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ret = gsc_ctrls_create(ctx);
ctx               636 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ctx->fh.ctrl_handler = &ctx->ctrl_handler;
ctx               637 drivers/media/platform/exynos-gsc/gsc-m2m.c 	file->private_data = &ctx->fh;
ctx               638 drivers/media/platform/exynos-gsc/gsc-m2m.c 	v4l2_fh_add(&ctx->fh);
ctx               640 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ctx->gsc_dev = gsc;
ctx               642 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ctx->s_frame.fmt = get_format(0);
ctx               643 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ctx->d_frame.fmt = get_format(0);
ctx               645 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ctx->state = GSC_CTX_M2M;
ctx               646 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ctx->flags = 0;
ctx               647 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ctx->in_path = GSC_DMA;
ctx               648 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ctx->out_path = GSC_DMA;
ctx               650 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ctx->m2m_ctx = v4l2_m2m_ctx_init(gsc->m2m.m2m_dev, ctx, queue_init);
ctx               651 drivers/media/platform/exynos-gsc/gsc-m2m.c 	if (IS_ERR(ctx->m2m_ctx)) {
ctx               653 drivers/media/platform/exynos-gsc/gsc-m2m.c 		ret = PTR_ERR(ctx->m2m_ctx);
ctx               660 drivers/media/platform/exynos-gsc/gsc-m2m.c 	pr_debug("gsc m2m driver is opened, ctx(0x%p)", ctx);
ctx               666 drivers/media/platform/exynos-gsc/gsc-m2m.c 	gsc_ctrls_delete(ctx);
ctx               667 drivers/media/platform/exynos-gsc/gsc-m2m.c 	v4l2_fh_del(&ctx->fh);
ctx               669 drivers/media/platform/exynos-gsc/gsc-m2m.c 	v4l2_fh_exit(&ctx->fh);
ctx               670 drivers/media/platform/exynos-gsc/gsc-m2m.c 	kfree(ctx);
ctx               678 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(file->private_data);
ctx               679 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_dev *gsc = ctx->gsc_dev;
ctx               686 drivers/media/platform/exynos-gsc/gsc-m2m.c 	v4l2_m2m_ctx_release(ctx->m2m_ctx);
ctx               687 drivers/media/platform/exynos-gsc/gsc-m2m.c 	gsc_ctrls_delete(ctx);
ctx               688 drivers/media/platform/exynos-gsc/gsc-m2m.c 	v4l2_fh_del(&ctx->fh);
ctx               689 drivers/media/platform/exynos-gsc/gsc-m2m.c 	v4l2_fh_exit(&ctx->fh);
ctx               693 drivers/media/platform/exynos-gsc/gsc-m2m.c 	kfree(ctx);
ctx               702 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(file->private_data);
ctx               703 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_dev *gsc = ctx->gsc_dev;
ctx               709 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ret = v4l2_m2m_poll(file, ctx->m2m_ctx, wait);
ctx               717 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_ctx *ctx = fh_to_ctx(file->private_data);
ctx               718 drivers/media/platform/exynos-gsc/gsc-m2m.c 	struct gsc_dev *gsc = ctx->gsc_dev;
ctx               724 drivers/media/platform/exynos-gsc/gsc-m2m.c 	ret = v4l2_m2m_mmap(file, ctx->m2m_ctx, vma);
ctx               107 drivers/media/platform/exynos-gsc/gsc-regs.c void gsc_hw_set_input_path(struct gsc_ctx *ctx)
ctx               109 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_dev *dev = ctx->gsc_dev;
ctx               114 drivers/media/platform/exynos-gsc/gsc-regs.c 	if (ctx->in_path == GSC_DMA)
ctx               120 drivers/media/platform/exynos-gsc/gsc-regs.c void gsc_hw_set_in_size(struct gsc_ctx *ctx)
ctx               122 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_dev *dev = ctx->gsc_dev;
ctx               123 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_frame *frame = &ctx->s_frame;
ctx               142 drivers/media/platform/exynos-gsc/gsc-regs.c void gsc_hw_set_in_image_rgb(struct gsc_ctx *ctx)
ctx               144 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_dev *dev = ctx->gsc_dev;
ctx               145 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_frame *frame = &ctx->s_frame;
ctx               162 drivers/media/platform/exynos-gsc/gsc-regs.c void gsc_hw_set_in_image_format(struct gsc_ctx *ctx)
ctx               164 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_dev *dev = ctx->gsc_dev;
ctx               165 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_frame *frame = &ctx->s_frame;
ctx               176 drivers/media/platform/exynos-gsc/gsc-regs.c 		gsc_hw_set_in_image_rgb(ctx);
ctx               218 drivers/media/platform/exynos-gsc/gsc-regs.c void gsc_hw_set_output_path(struct gsc_ctx *ctx)
ctx               220 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_dev *dev = ctx->gsc_dev;
ctx               225 drivers/media/platform/exynos-gsc/gsc-regs.c 	if (ctx->out_path == GSC_DMA)
ctx               233 drivers/media/platform/exynos-gsc/gsc-regs.c void gsc_hw_set_out_size(struct gsc_ctx *ctx)
ctx               235 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_dev *dev = ctx->gsc_dev;
ctx               236 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_frame *frame = &ctx->d_frame;
ctx               240 drivers/media/platform/exynos-gsc/gsc-regs.c 	if (ctx->out_path == GSC_DMA) {
ctx               251 drivers/media/platform/exynos-gsc/gsc-regs.c 	if (ctx->gsc_ctrls.rotate->val == 90 ||
ctx               252 drivers/media/platform/exynos-gsc/gsc-regs.c 	    ctx->gsc_ctrls.rotate->val == 270) {
ctx               262 drivers/media/platform/exynos-gsc/gsc-regs.c void gsc_hw_set_out_image_rgb(struct gsc_ctx *ctx)
ctx               264 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_dev *dev = ctx->gsc_dev;
ctx               265 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_frame *frame = &ctx->d_frame;
ctx               282 drivers/media/platform/exynos-gsc/gsc-regs.c void gsc_hw_set_out_image_format(struct gsc_ctx *ctx)
ctx               284 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_dev *dev = ctx->gsc_dev;
ctx               285 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_frame *frame = &ctx->d_frame;
ctx               296 drivers/media/platform/exynos-gsc/gsc-regs.c 		gsc_hw_set_out_image_rgb(ctx);
ctx               300 drivers/media/platform/exynos-gsc/gsc-regs.c 	if (ctx->out_path != GSC_DMA) {
ctx               342 drivers/media/platform/exynos-gsc/gsc-regs.c void gsc_hw_set_prescaler(struct gsc_ctx *ctx)
ctx               344 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_dev *dev = ctx->gsc_dev;
ctx               345 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_scaler *sc = &ctx->scaler;
ctx               354 drivers/media/platform/exynos-gsc/gsc-regs.c void gsc_hw_set_mainscaler(struct gsc_ctx *ctx)
ctx               356 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_dev *dev = ctx->gsc_dev;
ctx               357 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_scaler *sc = &ctx->scaler;
ctx               367 drivers/media/platform/exynos-gsc/gsc-regs.c void gsc_hw_set_rotation(struct gsc_ctx *ctx)
ctx               369 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_dev *dev = ctx->gsc_dev;
ctx               375 drivers/media/platform/exynos-gsc/gsc-regs.c 	switch (ctx->gsc_ctrls.rotate->val) {
ctx               383 drivers/media/platform/exynos-gsc/gsc-regs.c 		if (ctx->gsc_ctrls.hflip->val)
ctx               385 drivers/media/platform/exynos-gsc/gsc-regs.c 		else if (ctx->gsc_ctrls.vflip->val)
ctx               391 drivers/media/platform/exynos-gsc/gsc-regs.c 		if (ctx->gsc_ctrls.hflip->val)
ctx               393 drivers/media/platform/exynos-gsc/gsc-regs.c 		else if (ctx->gsc_ctrls.vflip->val)
ctx               400 drivers/media/platform/exynos-gsc/gsc-regs.c void gsc_hw_set_global_alpha(struct gsc_ctx *ctx)
ctx               402 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_dev *dev = ctx->gsc_dev;
ctx               403 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_frame *frame = &ctx->d_frame;
ctx               414 drivers/media/platform/exynos-gsc/gsc-regs.c 	cfg |= GSC_OUT_GLOBAL_ALPHA(ctx->gsc_ctrls.global_alpha->val);
ctx               418 drivers/media/platform/exynos-gsc/gsc-regs.c void gsc_hw_set_sfr_update(struct gsc_ctx *ctx)
ctx               420 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_dev *dev = ctx->gsc_dev;
ctx                35 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = fimc->vid_cap.ctx;
ctx                39 drivers/media/platform/exynos4-is/fimc-capture.c 	if (ctx == NULL || ctx->s_frame.fmt == NULL)
ctx                49 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_prepare_dma_offset(ctx, &ctx->d_frame);
ctx                50 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_set_yuv_order(ctx);
ctx                55 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_hw_set_camera_offset(fimc, &ctx->s_frame);
ctx                57 drivers/media/platform/exynos4-is/fimc-capture.c 	ret = fimc_set_scaler_info(ctx);
ctx                59 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_hw_set_input_path(ctx);
ctx                60 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_hw_set_prescaler(ctx);
ctx                61 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_hw_set_mainscaler(ctx);
ctx                62 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_hw_set_target_format(ctx);
ctx                63 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_hw_set_rotation(ctx);
ctx                64 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_hw_set_effect(ctx);
ctx                65 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_hw_set_output_path(ctx);
ctx                66 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_hw_set_out_dma(ctx);
ctx                68 drivers/media/platform/exynos4-is/fimc-capture.c 			fimc_hw_set_rgb_alpha(ctx);
ctx               152 drivers/media/platform/exynos4-is/fimc-capture.c static int fimc_capture_config_update(struct fimc_ctx *ctx)
ctx               154 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               157 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_hw_set_camera_offset(fimc, &ctx->s_frame);
ctx               159 drivers/media/platform/exynos4-is/fimc-capture.c 	ret = fimc_set_scaler_info(ctx);
ctx               163 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_hw_set_prescaler(ctx);
ctx               164 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_hw_set_mainscaler(ctx);
ctx               165 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_hw_set_target_format(ctx);
ctx               166 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_hw_set_rotation(ctx);
ctx               167 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_hw_set_effect(ctx);
ctx               168 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_prepare_dma_offset(ctx, &ctx->d_frame);
ctx               169 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_hw_set_out_dma(ctx);
ctx               171 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_hw_set_rgb_alpha(ctx);
ctx               182 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_frame *f = &cap->ctx->d_frame;
ctx               246 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_capture_config_update(cap->ctx);
ctx               260 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = q->drv_priv;
ctx               261 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               280 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_activate_capture(ctx);
ctx               291 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = q->drv_priv;
ctx               292 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               343 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = vq->drv_priv;
ctx               344 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_frame *frame = &ctx->d_frame;
ctx               378 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = vq->drv_priv;
ctx               381 drivers/media/platform/exynos4-is/fimc-capture.c 	if (ctx->d_frame.fmt == NULL)
ctx               384 drivers/media/platform/exynos4-is/fimc-capture.c 	for (i = 0; i < ctx->d_frame.fmt->memplanes; i++) {
ctx               385 drivers/media/platform/exynos4-is/fimc-capture.c 		unsigned long size = ctx->d_frame.payload[i];
ctx               388 drivers/media/platform/exynos4-is/fimc-capture.c 			v4l2_err(&ctx->fimc_dev->vid_cap.ve.vdev,
ctx               404 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               405 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               412 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_prepare_addr(ctx, &buf->vb.vb2_buf, &ctx->d_frame, &buf->paddr);
ctx               439 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_activate_capture(ctx);
ctx               500 drivers/media/platform/exynos4-is/fimc-capture.c 			fimc_ctrls_delete(vc->ctx);
ctx               502 drivers/media/platform/exynos4-is/fimc-capture.c 			ret = fimc_ctrls_create(vc->ctx);
ctx               572 drivers/media/platform/exynos4-is/fimc-capture.c static struct fimc_fmt *fimc_capture_try_format(struct fimc_ctx *ctx,
ctx               576 drivers/media/platform/exynos4-is/fimc-capture.c 	bool rotation = ctx->rotation == 90 || ctx->rotation == 270;
ctx               577 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               580 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_frame *dst = &ctx->d_frame;
ctx               586 drivers/media/platform/exynos4-is/fimc-capture.c 	if (code && ctx->s_frame.fmt && pad == FIMC_SD_PAD_SOURCE &&
ctx               587 drivers/media/platform/exynos4-is/fimc-capture.c 	    fimc_fmt_is_user_defined(ctx->s_frame.fmt->color))
ctx               588 drivers/media/platform/exynos4-is/fimc-capture.c 		*code = ctx->s_frame.fmt->mbus_code;
ctx               619 drivers/media/platform/exynos4-is/fimc-capture.c 		*width  = ctx->s_frame.f_width;
ctx               620 drivers/media/platform/exynos4-is/fimc-capture.c 		*height = ctx->s_frame.f_height;
ctx               625 drivers/media/platform/exynos4-is/fimc-capture.c 	if (ctx->state & FIMC_COMPOSE) {
ctx               649 drivers/media/platform/exynos4-is/fimc-capture.c static void fimc_capture_try_selection(struct fimc_ctx *ctx,
ctx               653 drivers/media/platform/exynos4-is/fimc-capture.c 	bool rotate = ctx->rotation == 90 || ctx->rotation == 270;
ctx               654 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               657 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_frame *sink = &ctx->s_frame;
ctx               663 drivers/media/platform/exynos4-is/fimc-capture.c 	if (fimc_fmt_is_user_defined(ctx->d_frame.fmt->color)) {
ctx               673 drivers/media/platform/exynos4-is/fimc-capture.c 		if (ctx->rotation != 90 && ctx->rotation != 270)
ctx               768 drivers/media/platform/exynos4-is/fimc-capture.c static int fimc_pipeline_try_format(struct fimc_ctx *ctx,
ctx               773 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               835 drivers/media/platform/exynos4-is/fimc-capture.c 		ffmt = fimc_capture_try_format(ctx, &tfmt->width, &tfmt->height,
ctx               837 drivers/media/platform/exynos4-is/fimc-capture.c 		ffmt = fimc_capture_try_format(ctx, &tfmt->width, &tfmt->height,
ctx               904 drivers/media/platform/exynos4-is/fimc-capture.c 	__fimc_get_format(&fimc->vid_cap.ctx->d_frame, f);
ctx               921 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = vc->ctx;
ctx               927 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_capture_try_format(ctx, &pix->width, &pix->height,
ctx               934 drivers/media/platform/exynos4-is/fimc-capture.c 			ctx->s_frame.f_width = pix->width;
ctx               935 drivers/media/platform/exynos4-is/fimc-capture.c 			ctx->s_frame.f_height = pix->height;
ctx               940 drivers/media/platform/exynos4-is/fimc-capture.c 	*out_fmt = fimc_capture_try_format(ctx, &pix->width, &pix->height,
ctx               964 drivers/media/platform/exynos4-is/fimc-capture.c 		ret = fimc_pipeline_try_format(ctx, mf, inp_fmt, try);
ctx              1003 drivers/media/platform/exynos4-is/fimc-capture.c static void fimc_capture_mark_jpeg_xfer(struct fimc_ctx *ctx,
ctx              1008 drivers/media/platform/exynos4-is/fimc-capture.c 	ctx->scaler.enabled = !jpeg;
ctx              1009 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_ctrls_activate(ctx, !jpeg);
ctx              1012 drivers/media/platform/exynos4-is/fimc-capture.c 		set_bit(ST_CAPT_JPEG, &ctx->fimc_dev->state);
ctx              1014 drivers/media/platform/exynos4-is/fimc-capture.c 		clear_bit(ST_CAPT_JPEG, &ctx->fimc_dev->state);
ctx              1021 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = vc->ctx;
ctx              1023 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_frame *ff = &ctx->d_frame;
ctx              1035 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_alpha_ctrl_update(ctx);
ctx              1044 drivers/media/platform/exynos4-is/fimc-capture.c 	if (!(ctx->state & FIMC_COMPOSE))
ctx              1047 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_capture_mark_jpeg_xfer(ctx, ff->fmt->color);
ctx              1051 drivers/media/platform/exynos4-is/fimc-capture.c 		ctx->s_frame.fmt = inp_fmt;
ctx              1052 drivers/media/platform/exynos4-is/fimc-capture.c 		set_frame_bounds(&ctx->s_frame, pix->width, pix->height);
ctx              1053 drivers/media/platform/exynos4-is/fimc-capture.c 		set_frame_crop(&ctx->s_frame, 0, 0, pix->width, pix->height);
ctx              1139 drivers/media/platform/exynos4-is/fimc-capture.c 			struct fimc_frame *ff = &vc->ctx->s_frame;
ctx              1167 drivers/media/platform/exynos4-is/fimc-capture.c 			struct fimc_frame *frame = &vc->ctx->d_frame;
ctx              1269 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = fimc->vid_cap.ctx;
ctx              1270 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_frame *f = &ctx->s_frame;
ctx              1278 drivers/media/platform/exynos4-is/fimc-capture.c 		f = &ctx->d_frame;
ctx              1289 drivers/media/platform/exynos4-is/fimc-capture.c 		f = &ctx->d_frame;
ctx              1319 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = fimc->vid_cap.ctx;
ctx              1328 drivers/media/platform/exynos4-is/fimc-capture.c 		f = &ctx->d_frame;
ctx              1330 drivers/media/platform/exynos4-is/fimc-capture.c 		f = &ctx->s_frame;
ctx              1334 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_capture_try_selection(ctx, &rect, s->target);
ctx              1419 drivers/media/platform/exynos4-is/fimc-capture.c 	return v4l2_ctrl_add_handler(&vc->ctx->ctrls.handler,
ctx              1494 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = fimc->vid_cap.ctx;
ctx              1495 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_frame *ff = &ctx->s_frame;
ctx              1537 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = vc->ctx;
ctx              1548 drivers/media/platform/exynos4-is/fimc-capture.c 	ffmt = fimc_capture_try_format(ctx, &mf->width, &mf->height,
ctx              1563 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_alpha_ctrl_update(ctx);
ctx              1565 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_capture_mark_jpeg_xfer(ctx, ffmt->color);
ctx              1567 drivers/media/platform/exynos4-is/fimc-capture.c 		ff = &ctx->d_frame;
ctx              1569 drivers/media/platform/exynos4-is/fimc-capture.c 		mf->width = ctx->s_frame.width;
ctx              1570 drivers/media/platform/exynos4-is/fimc-capture.c 		mf->height = ctx->s_frame.height;
ctx              1572 drivers/media/platform/exynos4-is/fimc-capture.c 		ff = &ctx->s_frame;
ctx              1586 drivers/media/platform/exynos4-is/fimc-capture.c 	if (!(fmt->pad == FIMC_SD_PAD_SOURCE && (ctx->state & FIMC_COMPOSE)))
ctx              1590 drivers/media/platform/exynos4-is/fimc-capture.c 		ctx->state &= ~FIMC_COMPOSE;
ctx              1601 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = fimc->vid_cap.ctx;
ctx              1602 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_frame *f = &ctx->s_frame;
ctx              1613 drivers/media/platform/exynos4-is/fimc-capture.c 		f = &ctx->d_frame;
ctx              1628 drivers/media/platform/exynos4-is/fimc-capture.c 		f = &ctx->d_frame;
ctx              1657 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx = fimc->vid_cap.ctx;
ctx              1658 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_frame *f = &ctx->s_frame;
ctx              1667 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_capture_try_selection(ctx, r, V4L2_SEL_TGT_CROP);
ctx              1675 drivers/media/platform/exynos4-is/fimc-capture.c 		f = &ctx->d_frame;
ctx              1689 drivers/media/platform/exynos4-is/fimc-capture.c 			ctx->state |= FIMC_COMPOSE;
ctx              1735 drivers/media/platform/exynos4-is/fimc-capture.c 	struct fimc_ctx *ctx;
ctx              1740 drivers/media/platform/exynos4-is/fimc-capture.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              1741 drivers/media/platform/exynos4-is/fimc-capture.c 	if (!ctx)
ctx              1744 drivers/media/platform/exynos4-is/fimc-capture.c 	ctx->fimc_dev	 = fimc;
ctx              1745 drivers/media/platform/exynos4-is/fimc-capture.c 	ctx->in_path	 = FIMC_IO_CAMERA;
ctx              1746 drivers/media/platform/exynos4-is/fimc-capture.c 	ctx->out_path	 = FIMC_IO_DMA;
ctx              1747 drivers/media/platform/exynos4-is/fimc-capture.c 	ctx->state	 = FIMC_CTX_CAP;
ctx              1748 drivers/media/platform/exynos4-is/fimc-capture.c 	ctx->s_frame.fmt = fimc_find_format(NULL, NULL, FMT_FLAGS_CAM, 0);
ctx              1749 drivers/media/platform/exynos4-is/fimc-capture.c 	ctx->d_frame.fmt = ctx->s_frame.fmt;
ctx              1767 drivers/media/platform/exynos4-is/fimc-capture.c 	vid_cap->ctx = ctx;
ctx              1775 drivers/media/platform/exynos4-is/fimc-capture.c 	q->drv_priv = ctx;
ctx              1793 drivers/media/platform/exynos4-is/fimc-capture.c 	ctx->s_frame.width = FIMC_DEFAULT_WIDTH;
ctx              1794 drivers/media/platform/exynos4-is/fimc-capture.c 	ctx->s_frame.height = FIMC_DEFAULT_HEIGHT;
ctx              1795 drivers/media/platform/exynos4-is/fimc-capture.c 	ctx->s_frame.fmt = fmt;
ctx              1807 drivers/media/platform/exynos4-is/fimc-capture.c 	ret = fimc_ctrls_create(ctx);
ctx              1818 drivers/media/platform/exynos4-is/fimc-capture.c 	vfd->ctrl_handler = &ctx->ctrls.handler;
ctx              1822 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc_ctrls_delete(ctx);
ctx              1826 drivers/media/platform/exynos4-is/fimc-capture.c 	kfree(ctx);
ctx              1869 drivers/media/platform/exynos4-is/fimc-capture.c 		fimc_ctrls_delete(fimc->vid_cap.ctx);
ctx              1872 drivers/media/platform/exynos4-is/fimc-capture.c 	kfree(fimc->vid_cap.ctx);
ctx              1873 drivers/media/platform/exynos4-is/fimc-capture.c 	fimc->vid_cap.ctx = NULL;
ctx               192 drivers/media/platform/exynos4-is/fimc-core.c int fimc_check_scaler_ratio(struct fimc_ctx *ctx, int sw, int sh,
ctx               198 drivers/media/platform/exynos4-is/fimc-core.c 	if (!ctx->scaler.enabled)
ctx               225 drivers/media/platform/exynos4-is/fimc-core.c int fimc_set_scaler_info(struct fimc_ctx *ctx)
ctx               227 drivers/media/platform/exynos4-is/fimc-core.c 	const struct fimc_variant *variant = ctx->fimc_dev->variant;
ctx               228 drivers/media/platform/exynos4-is/fimc-core.c 	struct device *dev = &ctx->fimc_dev->pdev->dev;
ctx               229 drivers/media/platform/exynos4-is/fimc-core.c 	struct fimc_scaler *sc = &ctx->scaler;
ctx               230 drivers/media/platform/exynos4-is/fimc-core.c 	struct fimc_frame *s_frame = &ctx->s_frame;
ctx               231 drivers/media/platform/exynos4-is/fimc-core.c 	struct fimc_frame *d_frame = &ctx->d_frame;
ctx               235 drivers/media/platform/exynos4-is/fimc-core.c 	if (ctx->rotation == 90 || ctx->rotation == 270) {
ctx               293 drivers/media/platform/exynos4-is/fimc-core.c 	struct fimc_ctx *ctx;
ctx               305 drivers/media/platform/exynos4-is/fimc-core.c 		ctx = v4l2_m2m_get_curr_priv(fimc->m2m.m2m_dev);
ctx               306 drivers/media/platform/exynos4-is/fimc-core.c 		if (ctx != NULL) {
ctx               308 drivers/media/platform/exynos4-is/fimc-core.c 			fimc_m2m_job_finish(ctx, VB2_BUF_STATE_DONE);
ctx               310 drivers/media/platform/exynos4-is/fimc-core.c 			if (ctx->state & FIMC_CTX_SHUT) {
ctx               311 drivers/media/platform/exynos4-is/fimc-core.c 				ctx->state &= ~FIMC_CTX_SHUT;
ctx               327 drivers/media/platform/exynos4-is/fimc-core.c int fimc_prepare_addr(struct fimc_ctx *ctx, struct vb2_buffer *vb,
ctx               382 drivers/media/platform/exynos4-is/fimc-core.c void fimc_set_yuv_order(struct fimc_ctx *ctx)
ctx               385 drivers/media/platform/exynos4-is/fimc-core.c 	ctx->in_order_2p = FIMC_REG_CIOCTRL_ORDER422_2P_LSB_CRCB;
ctx               386 drivers/media/platform/exynos4-is/fimc-core.c 	ctx->out_order_2p = FIMC_REG_CIOCTRL_ORDER422_2P_LSB_CRCB;
ctx               389 drivers/media/platform/exynos4-is/fimc-core.c 	switch (ctx->s_frame.fmt->color) {
ctx               391 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->in_order_1p = FIMC_REG_MSCTRL_ORDER422_YCRYCB;
ctx               394 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->in_order_1p = FIMC_REG_MSCTRL_ORDER422_CBYCRY;
ctx               397 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->in_order_1p = FIMC_REG_MSCTRL_ORDER422_CRYCBY;
ctx               401 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->in_order_1p = FIMC_REG_MSCTRL_ORDER422_YCBYCR;
ctx               404 drivers/media/platform/exynos4-is/fimc-core.c 	dbg("ctx->in_order_1p= %d", ctx->in_order_1p);
ctx               406 drivers/media/platform/exynos4-is/fimc-core.c 	switch (ctx->d_frame.fmt->color) {
ctx               408 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->out_order_1p = FIMC_REG_CIOCTRL_ORDER422_YCRYCB;
ctx               411 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->out_order_1p = FIMC_REG_CIOCTRL_ORDER422_CBYCRY;
ctx               414 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->out_order_1p = FIMC_REG_CIOCTRL_ORDER422_CRYCBY;
ctx               418 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->out_order_1p = FIMC_REG_CIOCTRL_ORDER422_YCBYCR;
ctx               421 drivers/media/platform/exynos4-is/fimc-core.c 	dbg("ctx->out_order_1p= %d", ctx->out_order_1p);
ctx               424 drivers/media/platform/exynos4-is/fimc-core.c void fimc_prepare_dma_offset(struct fimc_ctx *ctx, struct fimc_frame *f)
ctx               426 drivers/media/platform/exynos4-is/fimc-core.c 	bool pix_hoff = ctx->fimc_dev->drv_data->dma_pix_hoff;
ctx               459 drivers/media/platform/exynos4-is/fimc-core.c static int fimc_set_color_effect(struct fimc_ctx *ctx, enum v4l2_colorfx colorfx)
ctx               461 drivers/media/platform/exynos4-is/fimc-core.c 	struct fimc_effect *effect = &ctx->effect;
ctx               491 drivers/media/platform/exynos4-is/fimc-core.c 		effect->pat_cb = ctx->ctrls.colorfx_cbcr->val >> 8;
ctx               492 drivers/media/platform/exynos4-is/fimc-core.c 		effect->pat_cr = ctx->ctrls.colorfx_cbcr->val & 0xff;
ctx               507 drivers/media/platform/exynos4-is/fimc-core.c static int __fimc_s_ctrl(struct fimc_ctx *ctx, struct v4l2_ctrl *ctrl)
ctx               509 drivers/media/platform/exynos4-is/fimc-core.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               518 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->hflip = ctrl->val;
ctx               522 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->vflip = ctrl->val;
ctx               527 drivers/media/platform/exynos4-is/fimc-core.c 			ret = fimc_check_scaler_ratio(ctx, ctx->s_frame.width,
ctx               528 drivers/media/platform/exynos4-is/fimc-core.c 					ctx->s_frame.height, ctx->d_frame.width,
ctx               529 drivers/media/platform/exynos4-is/fimc-core.c 					ctx->d_frame.height, ctrl->val);
ctx               537 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->rotation = ctrl->val;
ctx               541 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->d_frame.alpha = ctrl->val;
ctx               545 drivers/media/platform/exynos4-is/fimc-core.c 		ret = fimc_set_color_effect(ctx, ctrl->val);
ctx               551 drivers/media/platform/exynos4-is/fimc-core.c 	ctx->state |= FIMC_PARAMS;
ctx               558 drivers/media/platform/exynos4-is/fimc-core.c 	struct fimc_ctx *ctx = ctrl_to_ctx(ctrl);
ctx               562 drivers/media/platform/exynos4-is/fimc-core.c 	spin_lock_irqsave(&ctx->fimc_dev->slock, flags);
ctx               563 drivers/media/platform/exynos4-is/fimc-core.c 	ret = __fimc_s_ctrl(ctx, ctrl);
ctx               564 drivers/media/platform/exynos4-is/fimc-core.c 	spin_unlock_irqrestore(&ctx->fimc_dev->slock, flags);
ctx               573 drivers/media/platform/exynos4-is/fimc-core.c int fimc_ctrls_create(struct fimc_ctx *ctx)
ctx               575 drivers/media/platform/exynos4-is/fimc-core.c 	unsigned int max_alpha = fimc_get_alpha_mask(ctx->d_frame.fmt);
ctx               576 drivers/media/platform/exynos4-is/fimc-core.c 	struct fimc_ctrls *ctrls = &ctx->ctrls;
ctx               579 drivers/media/platform/exynos4-is/fimc-core.c 	if (ctx->ctrls.ready)
ctx               591 drivers/media/platform/exynos4-is/fimc-core.c 	if (ctx->fimc_dev->drv_data->alpha_color)
ctx               605 drivers/media/platform/exynos4-is/fimc-core.c 	ctx->effect.type = FIMC_REG_CIIMGEFF_FIN_BYPASS;
ctx               615 drivers/media/platform/exynos4-is/fimc-core.c void fimc_ctrls_delete(struct fimc_ctx *ctx)
ctx               617 drivers/media/platform/exynos4-is/fimc-core.c 	struct fimc_ctrls *ctrls = &ctx->ctrls;
ctx               626 drivers/media/platform/exynos4-is/fimc-core.c void fimc_ctrls_activate(struct fimc_ctx *ctx, bool active)
ctx               628 drivers/media/platform/exynos4-is/fimc-core.c 	unsigned int has_alpha = ctx->d_frame.fmt->flags & FMT_HAS_ALPHA;
ctx               629 drivers/media/platform/exynos4-is/fimc-core.c 	struct fimc_ctrls *ctrls = &ctx->ctrls;
ctx               643 drivers/media/platform/exynos4-is/fimc-core.c 		fimc_set_color_effect(ctx, ctrls->colorfx->cur.val);
ctx               644 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->rotation = ctrls->rotate->val;
ctx               645 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->hflip    = ctrls->hflip->val;
ctx               646 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->vflip    = ctrls->vflip->val;
ctx               648 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->effect.type = FIMC_REG_CIIMGEFF_FIN_BYPASS;
ctx               649 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->rotation = 0;
ctx               650 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->hflip    = 0;
ctx               651 drivers/media/platform/exynos4-is/fimc-core.c 		ctx->vflip    = 0;
ctx               657 drivers/media/platform/exynos4-is/fimc-core.c void fimc_alpha_ctrl_update(struct fimc_ctx *ctx)
ctx               659 drivers/media/platform/exynos4-is/fimc-core.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               660 drivers/media/platform/exynos4-is/fimc-core.c 	struct v4l2_ctrl *ctrl = ctx->ctrls.alpha;
ctx               666 drivers/media/platform/exynos4-is/fimc-core.c 	ctrl->maximum = fimc_get_alpha_mask(ctx->d_frame.fmt);
ctx               846 drivers/media/platform/exynos4-is/fimc-core.c 	struct fimc_ctx *ctx;
ctx               851 drivers/media/platform/exynos4-is/fimc-core.c 	ctx = fimc->m2m.ctx;
ctx               852 drivers/media/platform/exynos4-is/fimc-core.c 	fimc->m2m.ctx = NULL;
ctx               856 drivers/media/platform/exynos4-is/fimc-core.c 		fimc_m2m_job_finish(ctx, VB2_BUF_STATE_ERROR);
ctx               273 drivers/media/platform/exynos4-is/fimc-core.h 	struct fimc_ctx		*ctx;
ctx               306 drivers/media/platform/exynos4-is/fimc-core.h 	struct fimc_ctx			*ctx;
ctx               543 drivers/media/platform/exynos4-is/fimc-core.h static inline void fimc_ctx_state_set(u32 state, struct fimc_ctx *ctx)
ctx               547 drivers/media/platform/exynos4-is/fimc-core.h 	spin_lock_irqsave(&ctx->fimc_dev->slock, flags);
ctx               548 drivers/media/platform/exynos4-is/fimc-core.h 	ctx->state |= state;
ctx               549 drivers/media/platform/exynos4-is/fimc-core.h 	spin_unlock_irqrestore(&ctx->fimc_dev->slock, flags);
ctx               552 drivers/media/platform/exynos4-is/fimc-core.h static inline bool fimc_ctx_state_is_set(u32 mask, struct fimc_ctx *ctx)
ctx               557 drivers/media/platform/exynos4-is/fimc-core.h 	spin_lock_irqsave(&ctx->fimc_dev->slock, flags);
ctx               558 drivers/media/platform/exynos4-is/fimc-core.h 	ret = (ctx->state & mask) == mask;
ctx               559 drivers/media/platform/exynos4-is/fimc-core.h 	spin_unlock_irqrestore(&ctx->fimc_dev->slock, flags);
ctx               591 drivers/media/platform/exynos4-is/fimc-core.h static inline struct fimc_frame *ctx_get_frame(struct fimc_ctx *ctx,
ctx               598 drivers/media/platform/exynos4-is/fimc-core.h 		if (fimc_ctx_state_is_set(FIMC_CTX_M2M, ctx))
ctx               599 drivers/media/platform/exynos4-is/fimc-core.h 			frame = &ctx->s_frame;
ctx               604 drivers/media/platform/exynos4-is/fimc-core.h 		frame = &ctx->d_frame;
ctx               606 drivers/media/platform/exynos4-is/fimc-core.h 		v4l2_err(ctx->fimc_dev->v4l2_dev,
ctx               618 drivers/media/platform/exynos4-is/fimc-core.h int fimc_ctrls_create(struct fimc_ctx *ctx);
ctx               619 drivers/media/platform/exynos4-is/fimc-core.h void fimc_ctrls_delete(struct fimc_ctx *ctx);
ctx               620 drivers/media/platform/exynos4-is/fimc-core.h void fimc_ctrls_activate(struct fimc_ctx *ctx, bool active);
ctx               621 drivers/media/platform/exynos4-is/fimc-core.h void fimc_alpha_ctrl_update(struct fimc_ctx *ctx);
ctx               629 drivers/media/platform/exynos4-is/fimc-core.h int fimc_check_scaler_ratio(struct fimc_ctx *ctx, int sw, int sh,
ctx               631 drivers/media/platform/exynos4-is/fimc-core.h int fimc_set_scaler_info(struct fimc_ctx *ctx);
ctx               632 drivers/media/platform/exynos4-is/fimc-core.h int fimc_prepare_config(struct fimc_ctx *ctx, u32 flags);
ctx               633 drivers/media/platform/exynos4-is/fimc-core.h int fimc_prepare_addr(struct fimc_ctx *ctx, struct vb2_buffer *vb,
ctx               635 drivers/media/platform/exynos4-is/fimc-core.h void fimc_prepare_dma_offset(struct fimc_ctx *ctx, struct fimc_frame *f);
ctx               636 drivers/media/platform/exynos4-is/fimc-core.h void fimc_set_yuv_order(struct fimc_ctx *ctx);
ctx               656 drivers/media/platform/exynos4-is/fimc-core.h void fimc_m2m_job_finish(struct fimc_ctx *ctx, int vb_state);
ctx                39 drivers/media/platform/exynos4-is/fimc-m2m.c void fimc_m2m_job_finish(struct fimc_ctx *ctx, int vb_state)
ctx                43 drivers/media/platform/exynos4-is/fimc-m2m.c 	if (!ctx || !ctx->fh.m2m_ctx)
ctx                46 drivers/media/platform/exynos4-is/fimc-m2m.c 	src_vb = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx                47 drivers/media/platform/exynos4-is/fimc-m2m.c 	dst_vb = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx                54 drivers/media/platform/exynos4-is/fimc-m2m.c 		v4l2_m2m_job_finish(ctx->fimc_dev->m2m.m2m_dev,
ctx                55 drivers/media/platform/exynos4-is/fimc-m2m.c 				    ctx->fh.m2m_ctx);
ctx                59 drivers/media/platform/exynos4-is/fimc-m2m.c static void fimc_m2m_shutdown(struct fimc_ctx *ctx)
ctx                61 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx                66 drivers/media/platform/exynos4-is/fimc-m2m.c 	fimc_ctx_state_set(FIMC_CTX_SHUT, ctx);
ctx                69 drivers/media/platform/exynos4-is/fimc-m2m.c 			!fimc_ctx_state_is_set(FIMC_CTX_SHUT, ctx),
ctx                75 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx = q->drv_priv;
ctx                78 drivers/media/platform/exynos4-is/fimc-m2m.c 	ret = pm_runtime_get_sync(&ctx->fimc_dev->pdev->dev);
ctx                84 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx = q->drv_priv;
ctx                87 drivers/media/platform/exynos4-is/fimc-m2m.c 	fimc_m2m_shutdown(ctx);
ctx                88 drivers/media/platform/exynos4-is/fimc-m2m.c 	fimc_m2m_job_finish(ctx, VB2_BUF_STATE_ERROR);
ctx                89 drivers/media/platform/exynos4-is/fimc-m2m.c 	pm_runtime_put(&ctx->fimc_dev->pdev->dev);
ctx                95 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx = priv;
ctx               101 drivers/media/platform/exynos4-is/fimc-m2m.c 	if (WARN(!ctx, "Null context\n"))
ctx               104 drivers/media/platform/exynos4-is/fimc-m2m.c 	fimc = ctx->fimc_dev;
ctx               108 drivers/media/platform/exynos4-is/fimc-m2m.c 	sf = &ctx->s_frame;
ctx               109 drivers/media/platform/exynos4-is/fimc-m2m.c 	df = &ctx->d_frame;
ctx               111 drivers/media/platform/exynos4-is/fimc-m2m.c 	if (ctx->state & FIMC_PARAMS) {
ctx               113 drivers/media/platform/exynos4-is/fimc-m2m.c 		fimc_prepare_dma_offset(ctx, sf);
ctx               114 drivers/media/platform/exynos4-is/fimc-m2m.c 		fimc_prepare_dma_offset(ctx, df);
ctx               117 drivers/media/platform/exynos4-is/fimc-m2m.c 	src_vb = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               118 drivers/media/platform/exynos4-is/fimc-m2m.c 	ret = fimc_prepare_addr(ctx, &src_vb->vb2_buf, sf, &sf->paddr);
ctx               122 drivers/media/platform/exynos4-is/fimc-m2m.c 	dst_vb = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx               123 drivers/media/platform/exynos4-is/fimc-m2m.c 	ret = fimc_prepare_addr(ctx, &dst_vb->vb2_buf, df, &df->paddr);
ctx               133 drivers/media/platform/exynos4-is/fimc-m2m.c 	if (fimc->m2m.ctx != ctx) {
ctx               134 drivers/media/platform/exynos4-is/fimc-m2m.c 		ctx->state |= FIMC_PARAMS;
ctx               135 drivers/media/platform/exynos4-is/fimc-m2m.c 		fimc->m2m.ctx = ctx;
ctx               138 drivers/media/platform/exynos4-is/fimc-m2m.c 	if (ctx->state & FIMC_PARAMS) {
ctx               139 drivers/media/platform/exynos4-is/fimc-m2m.c 		fimc_set_yuv_order(ctx);
ctx               140 drivers/media/platform/exynos4-is/fimc-m2m.c 		fimc_hw_set_input_path(ctx);
ctx               141 drivers/media/platform/exynos4-is/fimc-m2m.c 		fimc_hw_set_in_dma(ctx);
ctx               142 drivers/media/platform/exynos4-is/fimc-m2m.c 		ret = fimc_set_scaler_info(ctx);
ctx               145 drivers/media/platform/exynos4-is/fimc-m2m.c 		fimc_hw_set_prescaler(ctx);
ctx               146 drivers/media/platform/exynos4-is/fimc-m2m.c 		fimc_hw_set_mainscaler(ctx);
ctx               147 drivers/media/platform/exynos4-is/fimc-m2m.c 		fimc_hw_set_target_format(ctx);
ctx               148 drivers/media/platform/exynos4-is/fimc-m2m.c 		fimc_hw_set_rotation(ctx);
ctx               149 drivers/media/platform/exynos4-is/fimc-m2m.c 		fimc_hw_set_effect(ctx);
ctx               150 drivers/media/platform/exynos4-is/fimc-m2m.c 		fimc_hw_set_out_dma(ctx);
ctx               152 drivers/media/platform/exynos4-is/fimc-m2m.c 			fimc_hw_set_rgb_alpha(ctx);
ctx               153 drivers/media/platform/exynos4-is/fimc-m2m.c 		fimc_hw_set_output_path(ctx);
ctx               158 drivers/media/platform/exynos4-is/fimc-m2m.c 	fimc_activate_capture(ctx);
ctx               159 drivers/media/platform/exynos4-is/fimc-m2m.c 	ctx->state &= (FIMC_CTX_M2M | FIMC_CTX_CAP);
ctx               175 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx = vb2_get_drv_priv(vq);
ctx               179 drivers/media/platform/exynos4-is/fimc-m2m.c 	f = ctx_get_frame(ctx, vq->type);
ctx               197 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               201 drivers/media/platform/exynos4-is/fimc-m2m.c 	frame = ctx_get_frame(ctx, vb->vb2_queue->type);
ctx               214 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               215 drivers/media/platform/exynos4-is/fimc-m2m.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx               257 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx = fh_to_ctx(fh);
ctx               258 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_frame *frame = ctx_get_frame(ctx, f->type);
ctx               267 drivers/media/platform/exynos4-is/fimc-m2m.c static int fimc_try_fmt_mplane(struct fimc_ctx *ctx, struct v4l2_format *f)
ctx               269 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               316 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx = fh_to_ctx(fh);
ctx               317 drivers/media/platform/exynos4-is/fimc-m2m.c 	return fimc_try_fmt_mplane(ctx, f);
ctx               344 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx = fh_to_ctx(fh);
ctx               345 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               351 drivers/media/platform/exynos4-is/fimc-m2m.c 	ret = fimc_try_fmt_mplane(ctx, f);
ctx               355 drivers/media/platform/exynos4-is/fimc-m2m.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               363 drivers/media/platform/exynos4-is/fimc-m2m.c 		frame = &ctx->s_frame;
ctx               365 drivers/media/platform/exynos4-is/fimc-m2m.c 		frame = &ctx->d_frame;
ctx               375 drivers/media/platform/exynos4-is/fimc-m2m.c 	fimc_alpha_ctrl_update(ctx);
ctx               383 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx = fh_to_ctx(fh);
ctx               386 drivers/media/platform/exynos4-is/fimc-m2m.c 	frame = ctx_get_frame(ctx, s->type);
ctx               430 drivers/media/platform/exynos4-is/fimc-m2m.c static int fimc_m2m_try_selection(struct fimc_ctx *ctx,
ctx               433 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               444 drivers/media/platform/exynos4-is/fimc-m2m.c 		f = &ctx->d_frame;
ctx               448 drivers/media/platform/exynos4-is/fimc-m2m.c 		f = &ctx->s_frame;
ctx               455 drivers/media/platform/exynos4-is/fimc-m2m.c 	min_size = (f == &ctx->s_frame) ?
ctx               491 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx = fh_to_ctx(fh);
ctx               492 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               496 drivers/media/platform/exynos4-is/fimc-m2m.c 	ret = fimc_m2m_try_selection(ctx, s);
ctx               501 drivers/media/platform/exynos4-is/fimc-m2m.c 		&ctx->s_frame : &ctx->d_frame;
ctx               505 drivers/media/platform/exynos4-is/fimc-m2m.c 		ret = fimc_check_scaler_ratio(ctx, s->r.width,
ctx               506 drivers/media/platform/exynos4-is/fimc-m2m.c 				s->r.height, ctx->d_frame.width,
ctx               507 drivers/media/platform/exynos4-is/fimc-m2m.c 				ctx->d_frame.height, ctx->rotation);
ctx               509 drivers/media/platform/exynos4-is/fimc-m2m.c 		ret = fimc_check_scaler_ratio(ctx, ctx->s_frame.width,
ctx               510 drivers/media/platform/exynos4-is/fimc-m2m.c 				ctx->s_frame.height, s->r.width,
ctx               511 drivers/media/platform/exynos4-is/fimc-m2m.c 				s->r.height, ctx->rotation);
ctx               523 drivers/media/platform/exynos4-is/fimc-m2m.c 	fimc_ctx_state_set(FIMC_PARAMS, ctx);
ctx               553 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx = priv;
ctx               558 drivers/media/platform/exynos4-is/fimc-m2m.c 	src_vq->drv_priv = ctx;
ctx               563 drivers/media/platform/exynos4-is/fimc-m2m.c 	src_vq->lock = &ctx->fimc_dev->lock;
ctx               564 drivers/media/platform/exynos4-is/fimc-m2m.c 	src_vq->dev = &ctx->fimc_dev->pdev->dev;
ctx               572 drivers/media/platform/exynos4-is/fimc-m2m.c 	dst_vq->drv_priv = ctx;
ctx               577 drivers/media/platform/exynos4-is/fimc-m2m.c 	dst_vq->lock = &ctx->fimc_dev->lock;
ctx               578 drivers/media/platform/exynos4-is/fimc-m2m.c 	dst_vq->dev = &ctx->fimc_dev->pdev->dev;
ctx               583 drivers/media/platform/exynos4-is/fimc-m2m.c static int fimc_m2m_set_default_format(struct fimc_ctx *ctx)
ctx               600 drivers/media/platform/exynos4-is/fimc-m2m.c 	__set_frame_format(&ctx->s_frame, fmt, &pixm);
ctx               601 drivers/media/platform/exynos4-is/fimc-m2m.c 	__set_frame_format(&ctx->d_frame, fmt, &pixm);
ctx               609 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx;
ctx               623 drivers/media/platform/exynos4-is/fimc-m2m.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               624 drivers/media/platform/exynos4-is/fimc-m2m.c 	if (!ctx) {
ctx               628 drivers/media/platform/exynos4-is/fimc-m2m.c 	v4l2_fh_init(&ctx->fh, &fimc->m2m.vfd);
ctx               629 drivers/media/platform/exynos4-is/fimc-m2m.c 	ctx->fimc_dev = fimc;
ctx               632 drivers/media/platform/exynos4-is/fimc-m2m.c 	ctx->s_frame.fmt = fimc_get_format(0);
ctx               633 drivers/media/platform/exynos4-is/fimc-m2m.c 	ctx->d_frame.fmt = fimc_get_format(0);
ctx               635 drivers/media/platform/exynos4-is/fimc-m2m.c 	ret = fimc_ctrls_create(ctx);
ctx               640 drivers/media/platform/exynos4-is/fimc-m2m.c 	ctx->fh.ctrl_handler = &ctx->ctrls.handler;
ctx               641 drivers/media/platform/exynos4-is/fimc-m2m.c 	file->private_data = &ctx->fh;
ctx               642 drivers/media/platform/exynos4-is/fimc-m2m.c 	v4l2_fh_add(&ctx->fh);
ctx               645 drivers/media/platform/exynos4-is/fimc-m2m.c 	ctx->state = FIMC_CTX_M2M;
ctx               646 drivers/media/platform/exynos4-is/fimc-m2m.c 	ctx->flags = 0;
ctx               647 drivers/media/platform/exynos4-is/fimc-m2m.c 	ctx->in_path = FIMC_IO_DMA;
ctx               648 drivers/media/platform/exynos4-is/fimc-m2m.c 	ctx->out_path = FIMC_IO_DMA;
ctx               649 drivers/media/platform/exynos4-is/fimc-m2m.c 	ctx->scaler.enabled = 1;
ctx               651 drivers/media/platform/exynos4-is/fimc-m2m.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(fimc->m2m.m2m_dev, ctx, queue_init);
ctx               652 drivers/media/platform/exynos4-is/fimc-m2m.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx               653 drivers/media/platform/exynos4-is/fimc-m2m.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx               660 drivers/media/platform/exynos4-is/fimc-m2m.c 	ret = fimc_m2m_set_default_format(ctx);
ctx               668 drivers/media/platform/exynos4-is/fimc-m2m.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx               670 drivers/media/platform/exynos4-is/fimc-m2m.c 	fimc_ctrls_delete(ctx);
ctx               671 drivers/media/platform/exynos4-is/fimc-m2m.c 	v4l2_fh_del(&ctx->fh);
ctx               673 drivers/media/platform/exynos4-is/fimc-m2m.c 	v4l2_fh_exit(&ctx->fh);
ctx               674 drivers/media/platform/exynos4-is/fimc-m2m.c 	kfree(ctx);
ctx               682 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_ctx *ctx = fh_to_ctx(file->private_data);
ctx               683 drivers/media/platform/exynos4-is/fimc-m2m.c 	struct fimc_dev *fimc = ctx->fimc_dev;
ctx               690 drivers/media/platform/exynos4-is/fimc-m2m.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx               691 drivers/media/platform/exynos4-is/fimc-m2m.c 	fimc_ctrls_delete(ctx);
ctx               692 drivers/media/platform/exynos4-is/fimc-m2m.c 	v4l2_fh_del(&ctx->fh);
ctx               693 drivers/media/platform/exynos4-is/fimc-m2m.c 	v4l2_fh_exit(&ctx->fh);
ctx               697 drivers/media/platform/exynos4-is/fimc-m2m.c 	kfree(ctx);
ctx                41 drivers/media/platform/exynos4-is/fimc-reg.c static u32 fimc_hw_get_in_flip(struct fimc_ctx *ctx)
ctx                45 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->hflip)
ctx                47 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->vflip)
ctx                50 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->rotation <= 90)
ctx                56 drivers/media/platform/exynos4-is/fimc-reg.c static u32 fimc_hw_get_target_flip(struct fimc_ctx *ctx)
ctx                60 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->hflip)
ctx                62 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->vflip)
ctx                65 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->rotation <= 90)
ctx                71 drivers/media/platform/exynos4-is/fimc-reg.c void fimc_hw_set_rotation(struct fimc_ctx *ctx)
ctx                74 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev = ctx->fimc_dev;
ctx                85 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->rotation == 90 || ctx->rotation == 270) {
ctx                86 drivers/media/platform/exynos4-is/fimc-reg.c 		if (ctx->out_path == FIMC_IO_LCDFIFO)
ctx                92 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->out_path == FIMC_IO_DMA) {
ctx                93 drivers/media/platform/exynos4-is/fimc-reg.c 		cfg |= fimc_hw_get_target_flip(ctx);
ctx                99 drivers/media/platform/exynos4-is/fimc-reg.c 		flip |= fimc_hw_get_in_flip(ctx);
ctx               104 drivers/media/platform/exynos4-is/fimc-reg.c void fimc_hw_set_target_format(struct fimc_ctx *ctx)
ctx               107 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev = ctx->fimc_dev;
ctx               108 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_frame *frame = &ctx->d_frame;
ctx               134 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->rotation == 90 || ctx->rotation == 270)
ctx               147 drivers/media/platform/exynos4-is/fimc-reg.c static void fimc_hw_set_out_dma_size(struct fimc_ctx *ctx)
ctx               149 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev = ctx->fimc_dev;
ctx               150 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_frame *frame = &ctx->d_frame;
ctx               166 drivers/media/platform/exynos4-is/fimc-reg.c void fimc_hw_set_out_dma(struct fimc_ctx *ctx)
ctx               168 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev = ctx->fimc_dev;
ctx               169 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_frame *frame = &ctx->d_frame;
ctx               184 drivers/media/platform/exynos4-is/fimc-reg.c 	fimc_hw_set_out_dma_size(ctx);
ctx               195 drivers/media/platform/exynos4-is/fimc-reg.c 		cfg |= ctx->out_order_1p;
ctx               197 drivers/media/platform/exynos4-is/fimc-reg.c 		cfg |= ctx->out_order_2p | FIMC_REG_CIOCTRL_YCBCR_2PLANE;
ctx               231 drivers/media/platform/exynos4-is/fimc-reg.c void fimc_hw_set_prescaler(struct fimc_ctx *ctx)
ctx               233 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev =  ctx->fimc_dev;
ctx               234 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_scaler *sc = &ctx->scaler;
ctx               247 drivers/media/platform/exynos4-is/fimc-reg.c static void fimc_hw_set_scaler(struct fimc_ctx *ctx)
ctx               249 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev = ctx->fimc_dev;
ctx               250 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_scaler *sc = &ctx->scaler;
ctx               251 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_frame *src_frame = &ctx->s_frame;
ctx               252 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_frame *dst_frame = &ctx->d_frame;
ctx               262 drivers/media/platform/exynos4-is/fimc-reg.c 	if (!(ctx->flags & FIMC_COLOR_RANGE_NARROW))
ctx               278 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->in_path == FIMC_IO_DMA) {
ctx               292 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->out_path == FIMC_IO_DMA) {
ctx               304 drivers/media/platform/exynos4-is/fimc-reg.c 		if (ctx->flags & FIMC_SCAN_MODE_INTERLACED)
ctx               311 drivers/media/platform/exynos4-is/fimc-reg.c void fimc_hw_set_mainscaler(struct fimc_ctx *ctx)
ctx               313 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev = ctx->fimc_dev;
ctx               315 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_scaler *sc = &ctx->scaler;
ctx               321 drivers/media/platform/exynos4-is/fimc-reg.c 	fimc_hw_set_scaler(ctx);
ctx               346 drivers/media/platform/exynos4-is/fimc-reg.c void fimc_hw_enable_capture(struct fimc_ctx *ctx)
ctx               348 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev = ctx->fimc_dev;
ctx               354 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->scaler.enabled)
ctx               371 drivers/media/platform/exynos4-is/fimc-reg.c void fimc_hw_set_effect(struct fimc_ctx *ctx)
ctx               373 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev = ctx->fimc_dev;
ctx               374 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_effect *effect = &ctx->effect;
ctx               388 drivers/media/platform/exynos4-is/fimc-reg.c void fimc_hw_set_rgb_alpha(struct fimc_ctx *ctx)
ctx               390 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev = ctx->fimc_dev;
ctx               391 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_frame *frame = &ctx->d_frame;
ctx               403 drivers/media/platform/exynos4-is/fimc-reg.c static void fimc_hw_set_in_dma_size(struct fimc_ctx *ctx)
ctx               405 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev = ctx->fimc_dev;
ctx               406 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_frame *frame = &ctx->s_frame;
ctx               410 drivers/media/platform/exynos4-is/fimc-reg.c 	if (FIMC_IO_LCDFIFO == ctx->out_path)
ctx               420 drivers/media/platform/exynos4-is/fimc-reg.c void fimc_hw_set_in_dma(struct fimc_ctx *ctx)
ctx               422 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev = ctx->fimc_dev;
ctx               423 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_frame *frame = &ctx->s_frame;
ctx               438 drivers/media/platform/exynos4-is/fimc-reg.c 	fimc_hw_set_in_dma_size(ctx);
ctx               441 drivers/media/platform/exynos4-is/fimc-reg.c 	fimc_hw_en_autoload(dev, ctx->out_path == FIMC_IO_LCDFIFO);
ctx               464 drivers/media/platform/exynos4-is/fimc-reg.c 			cfg |= ctx->in_order_2p | FIMC_REG_MSCTRL_C_INT_IN_2PLANE;
ctx               471 drivers/media/platform/exynos4-is/fimc-reg.c 			cfg |= ctx->in_order_1p
ctx               477 drivers/media/platform/exynos4-is/fimc-reg.c 				cfg |= ctx->in_order_2p
ctx               493 drivers/media/platform/exynos4-is/fimc-reg.c 	if (tiled_fmt(ctx->s_frame.fmt))
ctx               496 drivers/media/platform/exynos4-is/fimc-reg.c 	if (tiled_fmt(ctx->d_frame.fmt))
ctx               503 drivers/media/platform/exynos4-is/fimc-reg.c void fimc_hw_set_input_path(struct fimc_ctx *ctx)
ctx               505 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev = ctx->fimc_dev;
ctx               510 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->in_path == FIMC_IO_DMA)
ctx               518 drivers/media/platform/exynos4-is/fimc-reg.c void fimc_hw_set_output_path(struct fimc_ctx *ctx)
ctx               520 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_dev *dev = ctx->fimc_dev;
ctx               524 drivers/media/platform/exynos4-is/fimc-reg.c 	if (ctx->out_path == FIMC_IO_LCDFIFO)
ctx               602 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_frame *f = &vc->ctx->s_frame;
ctx               787 drivers/media/platform/exynos4-is/fimc-reg.c void fimc_activate_capture(struct fimc_ctx *ctx)
ctx               789 drivers/media/platform/exynos4-is/fimc-reg.c 	fimc_hw_enable_scaler(ctx->fimc_dev, ctx->scaler.enabled);
ctx               790 drivers/media/platform/exynos4-is/fimc-reg.c 	fimc_hw_enable_capture(ctx);
ctx               292 drivers/media/platform/exynos4-is/fimc-reg.h void fimc_hw_set_rotation(struct fimc_ctx *ctx);
ctx               293 drivers/media/platform/exynos4-is/fimc-reg.h void fimc_hw_set_target_format(struct fimc_ctx *ctx);
ctx               294 drivers/media/platform/exynos4-is/fimc-reg.h void fimc_hw_set_out_dma(struct fimc_ctx *ctx);
ctx               297 drivers/media/platform/exynos4-is/fimc-reg.h void fimc_hw_set_prescaler(struct fimc_ctx *ctx);
ctx               298 drivers/media/platform/exynos4-is/fimc-reg.h void fimc_hw_set_mainscaler(struct fimc_ctx *ctx);
ctx               299 drivers/media/platform/exynos4-is/fimc-reg.h void fimc_hw_enable_capture(struct fimc_ctx *ctx);
ctx               300 drivers/media/platform/exynos4-is/fimc-reg.h void fimc_hw_set_effect(struct fimc_ctx *ctx);
ctx               301 drivers/media/platform/exynos4-is/fimc-reg.h void fimc_hw_set_rgb_alpha(struct fimc_ctx *ctx);
ctx               302 drivers/media/platform/exynos4-is/fimc-reg.h void fimc_hw_set_in_dma(struct fimc_ctx *ctx);
ctx               303 drivers/media/platform/exynos4-is/fimc-reg.h void fimc_hw_set_input_path(struct fimc_ctx *ctx);
ctx               304 drivers/media/platform/exynos4-is/fimc-reg.h void fimc_hw_set_output_path(struct fimc_ctx *ctx);
ctx               322 drivers/media/platform/exynos4-is/fimc-reg.h void fimc_activate_capture(struct fimc_ctx *ctx);
ctx               227 drivers/media/platform/imx-pxp.c static struct pxp_q_data *get_q_data(struct pxp_ctx *ctx,
ctx               231 drivers/media/platform/imx-pxp.c 		return &ctx->q_data[V4L2_M2M_SRC];
ctx               233 drivers/media/platform/imx-pxp.c 		return &ctx->q_data[V4L2_M2M_DST];
ctx               309 drivers/media/platform/imx-pxp.c static void pxp_setup_csc(struct pxp_ctx *ctx)
ctx               311 drivers/media/platform/imx-pxp.c 	struct pxp_dev *dev = ctx->dev;
ctx               315 drivers/media/platform/imx-pxp.c 	if (pxp_v4l2_pix_fmt_is_yuv(ctx->q_data[V4L2_M2M_SRC].fmt->fourcc) &&
ctx               316 drivers/media/platform/imx-pxp.c 	    !pxp_v4l2_pix_fmt_is_yuv(ctx->q_data[V4L2_M2M_DST].fmt->fourcc)) {
ctx               463 drivers/media/platform/imx-pxp.c 		ycbcr_enc = ctx->q_data[V4L2_M2M_SRC].ycbcr_enc;
ctx               464 drivers/media/platform/imx-pxp.c 		quantization = ctx->q_data[V4L2_M2M_SRC].quant;
ctx               495 drivers/media/platform/imx-pxp.c 	if (!pxp_v4l2_pix_fmt_is_yuv(ctx->q_data[V4L2_M2M_SRC].fmt->fourcc) &&
ctx               496 drivers/media/platform/imx-pxp.c 	    pxp_v4l2_pix_fmt_is_yuv(ctx->q_data[V4L2_M2M_DST].fmt->fourcc)) {
ctx               676 drivers/media/platform/imx-pxp.c 		ycbcr_enc = ctx->q_data[V4L2_M2M_DST].ycbcr_enc;
ctx               677 drivers/media/platform/imx-pxp.c 		quantization = ctx->q_data[V4L2_M2M_DST].quant;
ctx               720 drivers/media/platform/imx-pxp.c static int pxp_start(struct pxp_ctx *ctx, struct vb2_v4l2_buffer *in_vb,
ctx               723 drivers/media/platform/imx-pxp.c 	struct pxp_dev *dev = ctx->dev;
ctx               735 drivers/media/platform/imx-pxp.c 	q_data = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               737 drivers/media/platform/imx-pxp.c 	src_width = ctx->q_data[V4L2_M2M_SRC].width;
ctx               738 drivers/media/platform/imx-pxp.c 	dst_width = ctx->q_data[V4L2_M2M_DST].width;
ctx               739 drivers/media/platform/imx-pxp.c 	src_height = ctx->q_data[V4L2_M2M_SRC].height;
ctx               740 drivers/media/platform/imx-pxp.c 	dst_height = ctx->q_data[V4L2_M2M_DST].height;
ctx               741 drivers/media/platform/imx-pxp.c 	src_stride = ctx->q_data[V4L2_M2M_SRC].bytesperline;
ctx               742 drivers/media/platform/imx-pxp.c 	dst_stride = ctx->q_data[V4L2_M2M_DST].bytesperline;
ctx               743 drivers/media/platform/imx-pxp.c 	src_fourcc = ctx->q_data[V4L2_M2M_SRC].fmt->fourcc;
ctx               744 drivers/media/platform/imx-pxp.c 	dst_fourcc = ctx->q_data[V4L2_M2M_DST].fmt->fourcc;
ctx               756 drivers/media/platform/imx-pxp.c 		get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE)->sequence++;
ctx               771 drivers/media/platform/imx-pxp.c 	ctrl = BF_PXP_CTRL_VFLIP0(!!(ctx->mode & MEM2MEM_VFLIP)) |
ctx               772 drivers/media/platform/imx-pxp.c 	       BF_PXP_CTRL_HFLIP0(!!(ctx->mode & MEM2MEM_HFLIP));
ctx               774 drivers/media/platform/imx-pxp.c 	out_ctrl = BF_PXP_OUT_CTRL_ALPHA(ctx->alpha_component) |
ctx               895 drivers/media/platform/imx-pxp.c 	pxp_setup_csc(ctx);
ctx               962 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = priv;
ctx               965 drivers/media/platform/imx-pxp.c 	src_buf = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               966 drivers/media/platform/imx-pxp.c 	dst_buf = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx               968 drivers/media/platform/imx-pxp.c 	pxp_start(ctx, src_buf, dst_buf);
ctx               973 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = priv;
ctx               975 drivers/media/platform/imx-pxp.c 	if (v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx) < 1 ||
ctx               976 drivers/media/platform/imx-pxp.c 	    v4l2_m2m_num_dst_bufs_ready(ctx->fh.m2m_ctx) < 1) {
ctx               977 drivers/media/platform/imx-pxp.c 		dprintk(ctx->dev, "Not enough buffers available\n");
ctx               986 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = priv;
ctx               989 drivers/media/platform/imx-pxp.c 	ctx->aborting = 1;
ctx              1078 drivers/media/platform/imx-pxp.c static int pxp_g_fmt(struct pxp_ctx *ctx, struct v4l2_format *f)
ctx              1083 drivers/media/platform/imx-pxp.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx              1087 drivers/media/platform/imx-pxp.c 	q_data = get_q_data(ctx, f->type);
ctx              1095 drivers/media/platform/imx-pxp.c 	f->fmt.pix.colorspace	= ctx->colorspace;
ctx              1096 drivers/media/platform/imx-pxp.c 	f->fmt.pix.xfer_func	= ctx->xfer_func;
ctx              1149 drivers/media/platform/imx-pxp.c pxp_fixup_colorimetry_cap(struct pxp_ctx *ctx, u32 dst_fourcc,
ctx              1155 drivers/media/platform/imx-pxp.c 	if (pxp_v4l2_pix_fmt_is_yuv(ctx->q_data[V4L2_M2M_SRC].fmt->fourcc) ==
ctx              1161 drivers/media/platform/imx-pxp.c 		*ycbcr_enc = ctx->q_data[V4L2_M2M_SRC].ycbcr_enc;
ctx              1162 drivers/media/platform/imx-pxp.c 		*quantization = ctx->q_data[V4L2_M2M_SRC].quant;
ctx              1164 drivers/media/platform/imx-pxp.c 		*ycbcr_enc = V4L2_MAP_YCBCR_ENC_DEFAULT(ctx->colorspace);
ctx              1166 drivers/media/platform/imx-pxp.c 							      ctx->colorspace,
ctx              1175 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = file2ctx(file);
ctx              1183 drivers/media/platform/imx-pxp.c 		v4l2_err(&ctx->dev->v4l2_dev,
ctx              1189 drivers/media/platform/imx-pxp.c 	f->fmt.pix.colorspace = ctx->colorspace;
ctx              1190 drivers/media/platform/imx-pxp.c 	f->fmt.pix.xfer_func = ctx->xfer_func;
ctx              1192 drivers/media/platform/imx-pxp.c 	pxp_fixup_colorimetry_cap(ctx, fmt->fourcc,
ctx              1203 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = file2ctx(file);
ctx              1211 drivers/media/platform/imx-pxp.c 		v4l2_err(&ctx->dev->v4l2_dev,
ctx              1223 drivers/media/platform/imx-pxp.c static int pxp_s_fmt(struct pxp_ctx *ctx, struct v4l2_format *f)
ctx              1228 drivers/media/platform/imx-pxp.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx              1232 drivers/media/platform/imx-pxp.c 	q_data = get_q_data(ctx, f->type);
ctx              1237 drivers/media/platform/imx-pxp.c 		v4l2_err(&ctx->dev->v4l2_dev, "%s queue busy\n", __func__);
ctx              1247 drivers/media/platform/imx-pxp.c 	dprintk(ctx->dev,
ctx              1257 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = file2ctx(file);
ctx              1268 drivers/media/platform/imx-pxp.c 	ctx->q_data[V4L2_M2M_DST].ycbcr_enc = f->fmt.pix.ycbcr_enc;
ctx              1269 drivers/media/platform/imx-pxp.c 	ctx->q_data[V4L2_M2M_DST].quant = f->fmt.pix.quantization;
ctx              1277 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = file2ctx(file);
ctx              1288 drivers/media/platform/imx-pxp.c 	ctx->colorspace = f->fmt.pix.colorspace;
ctx              1289 drivers/media/platform/imx-pxp.c 	ctx->xfer_func = f->fmt.pix.xfer_func;
ctx              1290 drivers/media/platform/imx-pxp.c 	ctx->q_data[V4L2_M2M_SRC].ycbcr_enc = f->fmt.pix.ycbcr_enc;
ctx              1291 drivers/media/platform/imx-pxp.c 	ctx->q_data[V4L2_M2M_SRC].quant = f->fmt.pix.quantization;
ctx              1293 drivers/media/platform/imx-pxp.c 	pxp_fixup_colorimetry_cap(ctx, ctx->q_data[V4L2_M2M_DST].fmt->fourcc,
ctx              1294 drivers/media/platform/imx-pxp.c 				  &ctx->q_data[V4L2_M2M_DST].ycbcr_enc,
ctx              1295 drivers/media/platform/imx-pxp.c 				  &ctx->q_data[V4L2_M2M_DST].quant);
ctx              1302 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx =
ctx              1308 drivers/media/platform/imx-pxp.c 			ctx->mode |= MEM2MEM_HFLIP;
ctx              1310 drivers/media/platform/imx-pxp.c 			ctx->mode &= ~MEM2MEM_HFLIP;
ctx              1315 drivers/media/platform/imx-pxp.c 			ctx->mode |= MEM2MEM_VFLIP;
ctx              1317 drivers/media/platform/imx-pxp.c 			ctx->mode &= ~MEM2MEM_VFLIP;
ctx              1321 drivers/media/platform/imx-pxp.c 		ctx->alpha_component = ctrl->val;
ctx              1325 drivers/media/platform/imx-pxp.c 		v4l2_err(&ctx->dev->v4l2_dev, "Invalid control\n");
ctx              1371 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1375 drivers/media/platform/imx-pxp.c 	q_data = get_q_data(ctx, vq->type);
ctx              1387 drivers/media/platform/imx-pxp.c 	dprintk(ctx->dev, "get %d buffer(s) of size %d each.\n", count, size);
ctx              1395 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1396 drivers/media/platform/imx-pxp.c 	struct pxp_dev *dev = ctx->dev;
ctx              1399 drivers/media/platform/imx-pxp.c 	dprintk(ctx->dev, "type: %d\n", vb->vb2_queue->type);
ctx              1401 drivers/media/platform/imx-pxp.c 	q_data = get_q_data(ctx, vb->vb2_queue->type);
ctx              1426 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1428 drivers/media/platform/imx-pxp.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx              1433 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = vb2_get_drv_priv(q);
ctx              1434 drivers/media/platform/imx-pxp.c 	struct pxp_q_data *q_data = get_q_data(ctx, q->type);
ctx              1442 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = vb2_get_drv_priv(q);
ctx              1448 drivers/media/platform/imx-pxp.c 			vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx              1450 drivers/media/platform/imx-pxp.c 			vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              1453 drivers/media/platform/imx-pxp.c 		spin_lock_irqsave(&ctx->dev->irqlock, flags);
ctx              1455 drivers/media/platform/imx-pxp.c 		spin_unlock_irqrestore(&ctx->dev->irqlock, flags);
ctx              1472 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = priv;
ctx              1477 drivers/media/platform/imx-pxp.c 	src_vq->drv_priv = ctx;
ctx              1482 drivers/media/platform/imx-pxp.c 	src_vq->lock = &ctx->dev->dev_mutex;
ctx              1483 drivers/media/platform/imx-pxp.c 	src_vq->dev = ctx->dev->v4l2_dev.dev;
ctx              1491 drivers/media/platform/imx-pxp.c 	dst_vq->drv_priv = ctx;
ctx              1496 drivers/media/platform/imx-pxp.c 	dst_vq->lock = &ctx->dev->dev_mutex;
ctx              1497 drivers/media/platform/imx-pxp.c 	dst_vq->dev = ctx->dev->v4l2_dev.dev;
ctx              1508 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = NULL;
ctx              1514 drivers/media/platform/imx-pxp.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              1515 drivers/media/platform/imx-pxp.c 	if (!ctx) {
ctx              1520 drivers/media/platform/imx-pxp.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx              1521 drivers/media/platform/imx-pxp.c 	file->private_data = &ctx->fh;
ctx              1522 drivers/media/platform/imx-pxp.c 	ctx->dev = dev;
ctx              1523 drivers/media/platform/imx-pxp.c 	hdl = &ctx->hdl;
ctx              1532 drivers/media/platform/imx-pxp.c 		kfree(ctx);
ctx              1535 drivers/media/platform/imx-pxp.c 	ctx->fh.ctrl_handler = hdl;
ctx              1538 drivers/media/platform/imx-pxp.c 	ctx->q_data[V4L2_M2M_SRC].fmt = &formats[0];
ctx              1539 drivers/media/platform/imx-pxp.c 	ctx->q_data[V4L2_M2M_SRC].width = 640;
ctx              1540 drivers/media/platform/imx-pxp.c 	ctx->q_data[V4L2_M2M_SRC].height = 480;
ctx              1541 drivers/media/platform/imx-pxp.c 	ctx->q_data[V4L2_M2M_SRC].bytesperline =
ctx              1543 drivers/media/platform/imx-pxp.c 	ctx->q_data[V4L2_M2M_SRC].sizeimage =
ctx              1545 drivers/media/platform/imx-pxp.c 	ctx->q_data[V4L2_M2M_DST] = ctx->q_data[V4L2_M2M_SRC];
ctx              1546 drivers/media/platform/imx-pxp.c 	ctx->colorspace = V4L2_COLORSPACE_REC709;
ctx              1548 drivers/media/platform/imx-pxp.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(dev->m2m_dev, ctx, &queue_init);
ctx              1550 drivers/media/platform/imx-pxp.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx              1551 drivers/media/platform/imx-pxp.c 		rc = PTR_ERR(ctx->fh.m2m_ctx);
ctx              1554 drivers/media/platform/imx-pxp.c 		v4l2_fh_exit(&ctx->fh);
ctx              1555 drivers/media/platform/imx-pxp.c 		kfree(ctx);
ctx              1559 drivers/media/platform/imx-pxp.c 	v4l2_fh_add(&ctx->fh);
ctx              1563 drivers/media/platform/imx-pxp.c 		ctx, ctx->fh.m2m_ctx);
ctx              1573 drivers/media/platform/imx-pxp.c 	struct pxp_ctx *ctx = file2ctx(file);
ctx              1575 drivers/media/platform/imx-pxp.c 	dprintk(dev, "Releasing instance %p\n", ctx);
ctx              1577 drivers/media/platform/imx-pxp.c 	v4l2_fh_del(&ctx->fh);
ctx              1578 drivers/media/platform/imx-pxp.c 	v4l2_fh_exit(&ctx->fh);
ctx              1579 drivers/media/platform/imx-pxp.c 	v4l2_ctrl_handler_free(&ctx->hdl);
ctx              1581 drivers/media/platform/imx-pxp.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx              1583 drivers/media/platform/imx-pxp.c 	kfree(ctx);
ctx               150 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_ctx *ctx = priv;
ctx               151 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_dev *pcdev = ctx->dev;
ctx               153 drivers/media/platform/m2m-deinterlace.c 	if (v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx) > 0 &&
ctx               154 drivers/media/platform/m2m-deinterlace.c 	    v4l2_m2m_num_dst_bufs_ready(ctx->fh.m2m_ctx) > 0 &&
ctx               155 drivers/media/platform/m2m-deinterlace.c 	    !atomic_read(&ctx->dev->busy)) {
ctx               167 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_ctx *ctx = priv;
ctx               168 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_dev *pcdev = ctx->dev;
ctx               170 drivers/media/platform/m2m-deinterlace.c 	ctx->aborting = 1;
ctx               174 drivers/media/platform/m2m-deinterlace.c 	v4l2_m2m_job_finish(pcdev->m2m_dev, ctx->fh.m2m_ctx);
ctx               202 drivers/media/platform/m2m-deinterlace.c static void deinterlace_issue_dma(struct deinterlace_ctx *ctx, int op,
ctx               207 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_dev *pcdev = ctx->dev;
ctx               216 drivers/media/platform/m2m-deinterlace.c 	src_buf = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               217 drivers/media/platform/m2m-deinterlace.c 	dst_buf = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx               235 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->numf = s_height / 2;
ctx               236 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].size = s_width;
ctx               237 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].icg = s_width;
ctx               238 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->src_start = p_in;
ctx               239 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->dst_start = p_out;
ctx               242 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->numf = s_height / 2;
ctx               243 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].size = s_width;
ctx               244 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].icg = s_width;
ctx               245 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->src_start = p_in + s_size / 2;
ctx               246 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->dst_start = p_out + s_width;
ctx               249 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->numf = s_height / 4;
ctx               250 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].size = s_width / 2;
ctx               251 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].icg = s_width / 2;
ctx               252 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->src_start = p_in + s_size;
ctx               253 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->dst_start = p_out + s_size;
ctx               256 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->numf = s_height / 4;
ctx               257 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].size = s_width / 2;
ctx               258 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].icg = s_width / 2;
ctx               259 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->src_start = p_in + (9 * s_size) / 8;
ctx               260 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->dst_start = p_out + s_size + s_width / 2;
ctx               263 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->numf = s_height / 4;
ctx               264 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].size = s_width / 2;
ctx               265 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].icg = s_width / 2;
ctx               266 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->src_start = p_in + (5 * s_size) / 4;
ctx               267 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->dst_start = p_out + (5 * s_size) / 4;
ctx               270 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->numf = s_height / 4;
ctx               271 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].size = s_width / 2;
ctx               272 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].icg = s_width / 2;
ctx               273 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->src_start = p_in + (11 * s_size) / 8;
ctx               274 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->dst_start = p_out + (5 * s_size) / 4 + s_width / 2;
ctx               277 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->numf = s_height / 2;
ctx               278 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].size = s_width;
ctx               279 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].icg = s_width;
ctx               280 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->src_start = p_in;
ctx               281 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->dst_start = p_out + s_width;
ctx               284 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->numf = s_height / 4;
ctx               285 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].size = s_width / 2;
ctx               286 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].icg = s_width / 2;
ctx               287 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->src_start = p_in + s_size;
ctx               288 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->dst_start = p_out + s_size + s_width / 2;
ctx               291 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->numf = s_height / 4;
ctx               292 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].size = s_width / 2;
ctx               293 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].icg = s_width / 2;
ctx               294 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->src_start = p_in + (5 * s_size) / 4;
ctx               295 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->dst_start = p_out + (5 * s_size) / 4 + s_width / 2;
ctx               298 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->numf = s_height / 2;
ctx               299 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].size = s_width * 2;
ctx               300 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].icg = s_width * 2;
ctx               301 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->src_start = p_in;
ctx               302 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->dst_start = p_out;
ctx               305 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->numf = s_height / 2;
ctx               306 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].size = s_width * 2;
ctx               307 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].icg = s_width * 2;
ctx               308 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->src_start = p_in + s_size;
ctx               309 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->dst_start = p_out + s_width * 2;
ctx               313 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->numf = s_height / 2;
ctx               314 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].size = s_width * 2;
ctx               315 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->sgl[0].icg = s_width * 2;
ctx               316 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->src_start = p_in;
ctx               317 drivers/media/platform/m2m-deinterlace.c 		ctx->xt->dst_start = p_out + s_width * 2;
ctx               322 drivers/media/platform/m2m-deinterlace.c 	ctx->xt->frame_size = 1;
ctx               323 drivers/media/platform/m2m-deinterlace.c 	ctx->xt->dir = DMA_MEM_TO_MEM;
ctx               324 drivers/media/platform/m2m-deinterlace.c 	ctx->xt->src_sgl = false;
ctx               325 drivers/media/platform/m2m-deinterlace.c 	ctx->xt->dst_sgl = true;
ctx               328 drivers/media/platform/m2m-deinterlace.c 	tx = dmadev->device_prep_interleaved_dma(chan, ctx->xt, flags);
ctx               336 drivers/media/platform/m2m-deinterlace.c 		tx->callback_param = ctx;
ctx               339 drivers/media/platform/m2m-deinterlace.c 	ctx->cookie = dmaengine_submit(tx);
ctx               340 drivers/media/platform/m2m-deinterlace.c 	if (dma_submit_error(ctx->cookie)) {
ctx               343 drivers/media/platform/m2m-deinterlace.c 			  ctx->cookie, (unsigned)p_in, (unsigned)p_out,
ctx               353 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_ctx *ctx = priv;
ctx               356 drivers/media/platform/m2m-deinterlace.c 	atomic_set(&ctx->dev->busy, 1);
ctx               358 drivers/media/platform/m2m-deinterlace.c 	dprintk(ctx->dev, "%s: DMA try issue.\n", __func__);
ctx               382 drivers/media/platform/m2m-deinterlace.c 			dprintk(ctx->dev, "%s: yuv420 interlaced tb.\n",
ctx               384 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUV420_DMA_Y_ODD, 0);
ctx               385 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUV420_DMA_Y_EVEN, 0);
ctx               386 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUV420_DMA_U_ODD, 0);
ctx               387 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUV420_DMA_U_EVEN, 0);
ctx               388 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUV420_DMA_V_ODD, 0);
ctx               389 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUV420_DMA_V_EVEN, 1);
ctx               393 drivers/media/platform/m2m-deinterlace.c 			dprintk(ctx->dev, "%s: yuv420 interlaced line doubling.\n",
ctx               395 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUV420_DMA_Y_ODD, 0);
ctx               396 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUV420_DMA_Y_ODD_DOUBLING, 0);
ctx               397 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUV420_DMA_U_ODD, 0);
ctx               398 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUV420_DMA_U_ODD_DOUBLING, 0);
ctx               399 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUV420_DMA_V_ODD, 0);
ctx               400 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUV420_DMA_V_ODD_DOUBLING, 1);
ctx               409 drivers/media/platform/m2m-deinterlace.c 			dprintk(ctx->dev, "%s: yuyv interlaced_tb.\n",
ctx               411 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUYV_DMA_ODD, 0);
ctx               412 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUYV_DMA_EVEN, 1);
ctx               416 drivers/media/platform/m2m-deinterlace.c 			dprintk(ctx->dev, "%s: yuyv interlaced line doubling.\n",
ctx               418 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUYV_DMA_ODD, 0);
ctx               419 drivers/media/platform/m2m-deinterlace.c 			deinterlace_issue_dma(ctx, YUYV_DMA_EVEN_DOUBLING, 1);
ctx               425 drivers/media/platform/m2m-deinterlace.c 	dprintk(ctx->dev, "%s: DMA issue done.\n", __func__);
ctx               481 drivers/media/platform/m2m-deinterlace.c static int vidioc_g_fmt(struct deinterlace_ctx *ctx, struct v4l2_format *f)
ctx               486 drivers/media/platform/m2m-deinterlace.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               507 drivers/media/platform/m2m-deinterlace.c 	f->fmt.pix.colorspace	= ctx->colorspace;
ctx               543 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_ctx *ctx = priv;
ctx               549 drivers/media/platform/m2m-deinterlace.c 	f->fmt.pix.colorspace = ctx->colorspace;
ctx               578 drivers/media/platform/m2m-deinterlace.c static int vidioc_s_fmt(struct deinterlace_ctx *ctx, struct v4l2_format *f)
ctx               583 drivers/media/platform/m2m-deinterlace.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               592 drivers/media/platform/m2m-deinterlace.c 		v4l2_err(&ctx->dev->v4l2_dev, "%s queue busy\n", __func__);
ctx               598 drivers/media/platform/m2m-deinterlace.c 		v4l2_err(&ctx->dev->v4l2_dev,
ctx               620 drivers/media/platform/m2m-deinterlace.c 	dprintk(ctx->dev,
ctx               642 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_ctx *ctx = priv;
ctx               651 drivers/media/platform/m2m-deinterlace.c 		ctx->colorspace = f->fmt.pix.colorspace;
ctx               660 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_ctx *ctx = priv;
ctx               667 drivers/media/platform/m2m-deinterlace.c 		v4l2_err(&ctx->dev->v4l2_dev,
ctx               677 drivers/media/platform/m2m-deinterlace.c 			v4l2_err(&ctx->dev->v4l2_dev,
ctx               686 drivers/media/platform/m2m-deinterlace.c 			v4l2_err(&ctx->dev->v4l2_dev,
ctx               696 drivers/media/platform/m2m-deinterlace.c 	return v4l2_m2m_streamon(file, ctx->fh.m2m_ctx, type);
ctx               735 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_ctx *ctx = vb2_get_drv_priv(vq);
ctx               754 drivers/media/platform/m2m-deinterlace.c 	dprintk(ctx->dev, "get %d buffer(s) of size %d each.\n", count, size);
ctx               761 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               764 drivers/media/platform/m2m-deinterlace.c 	dprintk(ctx->dev, "type: %d\n", vb->vb2_queue->type);
ctx               769 drivers/media/platform/m2m-deinterlace.c 		dprintk(ctx->dev, "%s data will not fit into plane (%lu < %lu)\n",
ctx               782 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               784 drivers/media/platform/m2m-deinterlace.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx               798 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_ctx *ctx = priv;
ctx               803 drivers/media/platform/m2m-deinterlace.c 	src_vq->drv_priv = ctx;
ctx               808 drivers/media/platform/m2m-deinterlace.c 	src_vq->dev = ctx->dev->v4l2_dev.dev;
ctx               809 drivers/media/platform/m2m-deinterlace.c 	src_vq->lock = &ctx->dev->dev_mutex;
ctx               822 drivers/media/platform/m2m-deinterlace.c 	dst_vq->drv_priv = ctx;
ctx               827 drivers/media/platform/m2m-deinterlace.c 	dst_vq->dev = ctx->dev->v4l2_dev.dev;
ctx               828 drivers/media/platform/m2m-deinterlace.c 	dst_vq->lock = &ctx->dev->dev_mutex;
ctx               844 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_ctx *ctx = NULL;
ctx               846 drivers/media/platform/m2m-deinterlace.c 	ctx = kzalloc(sizeof *ctx, GFP_KERNEL);
ctx               847 drivers/media/platform/m2m-deinterlace.c 	if (!ctx)
ctx               850 drivers/media/platform/m2m-deinterlace.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx               851 drivers/media/platform/m2m-deinterlace.c 	file->private_data = &ctx->fh;
ctx               852 drivers/media/platform/m2m-deinterlace.c 	ctx->dev = pcdev;
ctx               854 drivers/media/platform/m2m-deinterlace.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(pcdev->m2m_dev, ctx, &queue_init);
ctx               855 drivers/media/platform/m2m-deinterlace.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx               856 drivers/media/platform/m2m-deinterlace.c 		int ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx               858 drivers/media/platform/m2m-deinterlace.c 		kfree(ctx);
ctx               862 drivers/media/platform/m2m-deinterlace.c 	ctx->xt = kzalloc(sizeof(struct dma_interleaved_template) +
ctx               864 drivers/media/platform/m2m-deinterlace.c 	if (!ctx->xt) {
ctx               865 drivers/media/platform/m2m-deinterlace.c 		kfree(ctx);
ctx               869 drivers/media/platform/m2m-deinterlace.c 	ctx->colorspace = V4L2_COLORSPACE_REC709;
ctx               870 drivers/media/platform/m2m-deinterlace.c 	v4l2_fh_add(&ctx->fh);
ctx               873 drivers/media/platform/m2m-deinterlace.c 		ctx, ctx->fh.m2m_ctx);
ctx               881 drivers/media/platform/m2m-deinterlace.c 	struct deinterlace_ctx *ctx = file->private_data;
ctx               883 drivers/media/platform/m2m-deinterlace.c 	dprintk(pcdev, "Releasing instance %p\n", ctx);
ctx               885 drivers/media/platform/m2m-deinterlace.c 	v4l2_fh_del(&ctx->fh);
ctx               886 drivers/media/platform/m2m-deinterlace.c 	v4l2_fh_exit(&ctx->fh);
ctx               887 drivers/media/platform/m2m-deinterlace.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx               888 drivers/media/platform/m2m-deinterlace.c 	kfree(ctx->xt);
ctx               889 drivers/media/platform/m2m-deinterlace.c 	kfree(ctx);
ctx               132 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c static struct mtk_jpeg_q_data *mtk_jpeg_get_q_data(struct mtk_jpeg_ctx *ctx,
ctx               136 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		return &ctx->out_q;
ctx               137 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	return &ctx->cap_q;
ctx               140 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c static struct mtk_jpeg_fmt *mtk_jpeg_find_format(struct mtk_jpeg_ctx *ctx,
ctx               179 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c static void mtk_jpeg_adjust_fmt_mplane(struct mtk_jpeg_ctx *ctx,
ctx               186 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q_data = mtk_jpeg_get_q_data(ctx, f->type);
ctx               201 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 				   struct mtk_jpeg_ctx *ctx, int q_type)
ctx               204 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_dev *jpeg = ctx->jpeg;
ctx               210 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	if (ctx->state != MTK_JPEG_INIT) {
ctx               211 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		mtk_jpeg_adjust_fmt_mplane(ctx, f);
ctx               269 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = mtk_jpeg_fh_to_ctx(priv);
ctx               270 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_dev *jpeg = ctx->jpeg;
ctx               273 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               277 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q_data = mtk_jpeg_get_q_data(ctx, f->type);
ctx               285 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	pix_mp->colorspace = ctx->colorspace;
ctx               286 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	pix_mp->ycbcr_enc = ctx->ycbcr_enc;
ctx               287 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	pix_mp->xfer_func = ctx->xfer_func;
ctx               288 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	pix_mp->quantization = ctx->quantization;
ctx               317 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = mtk_jpeg_fh_to_ctx(priv);
ctx               320 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	fmt = mtk_jpeg_find_format(ctx, f->fmt.pix_mp.pixelformat,
ctx               323 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		fmt = ctx->cap_q.fmt;
ctx               325 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_dbg(2, debug, &ctx->jpeg->v4l2_dev, "(%d) try_fmt:%c%c%c%c\n",
ctx               332 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	return mtk_jpeg_try_fmt_mplane(f, fmt, ctx, MTK_JPEG_FMT_TYPE_CAPTURE);
ctx               338 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = mtk_jpeg_fh_to_ctx(priv);
ctx               341 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	fmt = mtk_jpeg_find_format(ctx, f->fmt.pix_mp.pixelformat,
ctx               344 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		fmt = ctx->out_q.fmt;
ctx               346 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_dbg(2, debug, &ctx->jpeg->v4l2_dev, "(%d) try_fmt:%c%c%c%c\n",
ctx               353 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	return mtk_jpeg_try_fmt_mplane(f, fmt, ctx, MTK_JPEG_FMT_TYPE_OUTPUT);
ctx               356 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c static int mtk_jpeg_s_fmt_mplane(struct mtk_jpeg_ctx *ctx,
ctx               362 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_dev *jpeg = ctx->jpeg;
ctx               366 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               370 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q_data = mtk_jpeg_get_q_data(ctx, f->type);
ctx               380 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q_data->fmt = mtk_jpeg_find_format(ctx, pix_mp->pixelformat, f_type);
ctx               383 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	ctx->colorspace = pix_mp->colorspace;
ctx               384 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	ctx->ycbcr_enc = pix_mp->ycbcr_enc;
ctx               385 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	ctx->xfer_func = pix_mp->xfer_func;
ctx               386 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	ctx->quantization = pix_mp->quantization;
ctx               432 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c static void mtk_jpeg_queue_src_chg_event(struct mtk_jpeg_ctx *ctx)
ctx               440 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_event_queue_fh(&ctx->fh, &ev_src_ch);
ctx               457 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = mtk_jpeg_fh_to_ctx(priv);
ctx               465 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		s->r.width = ctx->out_q.w;
ctx               466 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		s->r.height = ctx->out_q.h;
ctx               472 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		s->r.width = ctx->cap_q.w;
ctx               473 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		s->r.height = ctx->cap_q.h;
ctx               486 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = mtk_jpeg_fh_to_ctx(priv);
ctx               495 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		s->r.width = ctx->out_q.w;
ctx               496 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		s->r.height = ctx->out_q.h;
ctx               507 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = mtk_jpeg_fh_to_ctx(priv);
ctx               517 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		dev_err(ctx->jpeg->dev, "buffer index out of range\n");
ctx               562 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = vb2_get_drv_priv(q);
ctx               564 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_dev *jpeg = ctx->jpeg;
ctx               570 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q_data = mtk_jpeg_get_q_data(ctx, q->type);
ctx               586 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               590 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q_data = mtk_jpeg_get_q_data(ctx, vb->vb2_queue->type);
ctx               600 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c static bool mtk_jpeg_check_resolution_change(struct mtk_jpeg_ctx *ctx,
ctx               603 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_dev *jpeg = ctx->jpeg;
ctx               606 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q_data = &ctx->out_q;
ctx               612 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q_data = &ctx->cap_q;
ctx               613 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	if (q_data->fmt != mtk_jpeg_find_format(ctx, param->dst_fourcc,
ctx               621 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c static void mtk_jpeg_set_queue_data(struct mtk_jpeg_ctx *ctx,
ctx               624 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_dev *jpeg = ctx->jpeg;
ctx               628 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q_data = &ctx->out_q;
ctx               632 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q_data = &ctx->cap_q;
ctx               635 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q_data->fmt = mtk_jpeg_find_format(ctx,
ctx               656 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               658 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_dev *jpeg = ctx->jpeg;
ctx               684 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	if (ctx->state == MTK_JPEG_INIT) {
ctx               686 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 			ctx->fh.m2m_ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
ctx               688 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		mtk_jpeg_queue_src_chg_event(ctx);
ctx               689 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		mtk_jpeg_set_queue_data(ctx, param);
ctx               690 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		ctx->state = vb2_is_streaming(dst_vq) ?
ctx               694 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, to_vb2_v4l2_buffer(vb));
ctx               697 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c static struct vb2_v4l2_buffer *mtk_jpeg_buf_remove(struct mtk_jpeg_ctx *ctx,
ctx               701 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		return v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               703 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		return v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx               708 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = vb2_get_drv_priv(q);
ctx               712 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	ret = pm_runtime_get_sync(ctx->jpeg->dev);
ctx               718 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	while ((vb = mtk_jpeg_buf_remove(ctx, q->type)))
ctx               725 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = vb2_get_drv_priv(q);
ctx               733 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	if (ctx->state == MTK_JPEG_SOURCE_CHANGE &&
ctx               737 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		vb = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               739 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		mtk_jpeg_set_queue_data(ctx, &src_buf->dec_param);
ctx               740 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		ctx->state = MTK_JPEG_RUNNING;
ctx               742 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		ctx->state = MTK_JPEG_INIT;
ctx               745 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	while ((vb = mtk_jpeg_buf_remove(ctx, q->type)))
ctx               748 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	pm_runtime_put_sync(ctx->jpeg->dev);
ctx               761 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c static void mtk_jpeg_set_dec_src(struct mtk_jpeg_ctx *ctx,
ctx               771 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c static int mtk_jpeg_set_dec_dst(struct mtk_jpeg_ctx *ctx,
ctx               779 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		dev_err(ctx->jpeg->dev, "plane number mismatch (%u != %u)\n",
ctx               786 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 			dev_err(ctx->jpeg->dev,
ctx               800 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = priv;
ctx               801 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_dev *jpeg = ctx->jpeg;
ctx               810 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	src_buf = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               811 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	dst_buf = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx               821 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	if (mtk_jpeg_check_resolution_change(ctx, &jpeg_src_buf->dec_param)) {
ctx               822 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		mtk_jpeg_queue_src_chg_event(ctx);
ctx               823 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		ctx->state = MTK_JPEG_SOURCE_CHANGE;
ctx               824 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		v4l2_m2m_job_finish(jpeg->m2m_dev, ctx->fh.m2m_ctx);
ctx               828 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	mtk_jpeg_set_dec_src(ctx, &src_buf->vb2_buf, &bs);
ctx               829 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	if (mtk_jpeg_set_dec_dst(ctx, &jpeg_src_buf->dec_param, &dst_buf->vb2_buf, &fb))
ctx               842 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               843 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx               846 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_m2m_job_finish(jpeg->m2m_dev, ctx->fh.m2m_ctx);
ctx               851 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = priv;
ctx               853 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	return (ctx->state == MTK_JPEG_RUNNING) ? 1 : 0;
ctx               864 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = priv;
ctx               869 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	src_vq->drv_priv = ctx;
ctx               874 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	src_vq->lock = &ctx->jpeg->lock;
ctx               875 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	src_vq->dev = ctx->jpeg->dev;
ctx               882 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	dst_vq->drv_priv = ctx;
ctx               887 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	dst_vq->lock = &ctx->jpeg->lock;
ctx               888 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	dst_vq->dev = ctx->jpeg->dev;
ctx               915 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx;
ctx               925 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	ctx = v4l2_m2m_get_curr_priv(jpeg->m2m_dev);
ctx               926 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	if (!ctx) {
ctx               931 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	src_buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               932 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	dst_buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx               952 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_m2m_job_finish(jpeg->m2m_dev, ctx->fh.m2m_ctx);
ctx               956 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c static void mtk_jpeg_set_default_params(struct mtk_jpeg_ctx *ctx)
ctx               958 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_q_data *q = &ctx->out_q;
ctx               961 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	ctx->colorspace = V4L2_COLORSPACE_JPEG,
ctx               962 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	ctx->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
ctx               963 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	ctx->quantization = V4L2_QUANTIZATION_DEFAULT;
ctx               964 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	ctx->xfer_func = V4L2_XFER_FUNC_DEFAULT;
ctx               966 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q->fmt = mtk_jpeg_find_format(ctx, V4L2_PIX_FMT_JPEG,
ctx               973 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q = &ctx->cap_q;
ctx               974 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	q->fmt = mtk_jpeg_find_format(ctx, V4L2_PIX_FMT_YUV420M,
ctx               992 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx;
ctx               995 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               996 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	if (!ctx)
ctx              1004 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_fh_init(&ctx->fh, vfd);
ctx              1005 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	file->private_data = &ctx->fh;
ctx              1006 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_fh_add(&ctx->fh);
ctx              1008 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	ctx->jpeg = jpeg;
ctx              1009 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(jpeg->m2m_dev, ctx,
ctx              1011 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx              1012 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx              1016 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	mtk_jpeg_set_default_params(ctx);
ctx              1021 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_fh_del(&ctx->fh);
ctx              1022 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_fh_exit(&ctx->fh);
ctx              1025 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	kfree(ctx);
ctx              1032 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	struct mtk_jpeg_ctx *ctx = mtk_jpeg_fh_to_ctx(file->private_data);
ctx              1035 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx              1036 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_fh_del(&ctx->fh);
ctx              1037 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	v4l2_fh_exit(&ctx->fh);
ctx              1038 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c 	kfree(ctx);
ctx                77 drivers/media/platform/mtk-mdp/mtk_mdp_core.c 	struct mtk_mdp_ctx *ctx;
ctx                81 drivers/media/platform/mtk-mdp/mtk_mdp_core.c 	list_for_each_entry(ctx, &mdp->ctx_list, list) {
ctx                82 drivers/media/platform/mtk-mdp/mtk_mdp_core.c 		mtk_mdp_dbg(0, "[%d] Change as state error", ctx->id);
ctx                83 drivers/media/platform/mtk-mdp/mtk_mdp_core.c 		mtk_mdp_ctx_state_lock_set(ctx, MTK_MDP_CTX_ERROR);
ctx               173 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c static const struct mtk_mdp_fmt *mtk_mdp_try_fmt_mplane(struct mtk_mdp_ctx *ctx,
ctx               176 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_dev *mdp = ctx->mdp_dev;
ctx               188 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		dev_dbg(&ctx->mdp_dev->pdev->dev,
ctx               197 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		pix_mp->colorspace = ctx->colorspace;
ctx               198 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		pix_mp->xfer_func = ctx->xfer_func;
ctx               199 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		pix_mp->ycbcr_enc = ctx->ycbcr_enc;
ctx               200 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		pix_mp->quantization = ctx->quant;
ctx               225 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		    ctx->id, f->type, pix_mp->width, pix_mp->height,
ctx               238 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		mtk_mdp_dbg(1, "[%d] size change:%ux%u to %ux%u", ctx->id,
ctx               252 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		mtk_mdp_dbg(2, "[%d] p%d, bpl:%d, sizeimage:%u (%u)", ctx->id,
ctx               259 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c static struct mtk_mdp_frame *mtk_mdp_ctx_get_frame(struct mtk_mdp_ctx *ctx,
ctx               263 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		return &ctx->s_frame;
ctx               264 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	return &ctx->d_frame;
ctx               278 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c static int mtk_mdp_try_crop(struct mtk_mdp_ctx *ctx, u32 type,
ctx               282 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_dev *mdp = ctx->mdp_dev;
ctx               288 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		dev_err(&ctx->mdp_dev->pdev->dev,
ctx               293 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_dbg(2, "[%d] type:%d, set wxh:%dx%d", ctx->id, type,
ctx               296 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	frame = mtk_mdp_ctx_get_frame(ctx, type);
ctx               310 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		if (ctx->ctrls.rotate->val == 90 ||
ctx               311 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		    ctx->ctrls.rotate->val == 270) {
ctx               324 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_dbg(2, "[%d] align:%dx%d, min:%dx%d, new:%dx%d", ctx->id,
ctx               331 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		(ctx->ctrls.rotate->val == 90 ||
ctx               332 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		ctx->ctrls.rotate->val == 270))
ctx               349 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_dbg(2, "[%d] crop l,t,w,h:%d,%d,%d,%d, max:%dx%d", ctx->id,
ctx               365 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c void mtk_mdp_ctx_state_lock_set(struct mtk_mdp_ctx *ctx, u32 state)
ctx               367 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mutex_lock(&ctx->slock);
ctx               368 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx->state |= state;
ctx               369 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mutex_unlock(&ctx->slock);
ctx               372 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c static void mtk_mdp_ctx_state_lock_clear(struct mtk_mdp_ctx *ctx, u32 state)
ctx               374 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mutex_lock(&ctx->slock);
ctx               375 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx->state &= ~state;
ctx               376 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mutex_unlock(&ctx->slock);
ctx               379 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c static bool mtk_mdp_ctx_state_is_set(struct mtk_mdp_ctx *ctx, u32 mask)
ctx               383 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mutex_lock(&ctx->slock);
ctx               384 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ret = (ctx->state & mask) == mask;
ctx               385 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mutex_unlock(&ctx->slock);
ctx               402 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = q->drv_priv;
ctx               405 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ret = pm_runtime_get_sync(&ctx->mdp_dev->pdev->dev);
ctx               408 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 			    ctx->id, ret);
ctx               413 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c static void *mtk_mdp_m2m_buf_remove(struct mtk_mdp_ctx *ctx,
ctx               417 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		return v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
ctx               419 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		return v4l2_m2m_dst_buf_remove(ctx->m2m_ctx);
ctx               424 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = q->drv_priv;
ctx               427 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	vb = mtk_mdp_m2m_buf_remove(ctx, q->type);
ctx               430 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		vb = mtk_mdp_m2m_buf_remove(ctx, q->type);
ctx               433 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	pm_runtime_put(&ctx->mdp_dev->pdev->dev);
ctx               437 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c static void mtk_mdp_prepare_addr(struct mtk_mdp_ctx *ctx,
ctx               455 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 			dev_err(&ctx->mdp_dev->pdev->dev,
ctx               461 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		    ctx->id, planes, pix_size, (void *)addr->addr[0],
ctx               465 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c static void mtk_mdp_m2m_get_bufs(struct mtk_mdp_ctx *ctx)
ctx               470 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	s_frame = &ctx->s_frame;
ctx               471 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	d_frame = &ctx->d_frame;
ctx               473 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	src_vbuf = v4l2_m2m_next_src_buf(ctx->m2m_ctx);
ctx               474 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_prepare_addr(ctx, &src_vbuf->vb2_buf, s_frame, &s_frame->addr);
ctx               476 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	dst_vbuf = v4l2_m2m_next_dst_buf(ctx->m2m_ctx);
ctx               477 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_prepare_addr(ctx, &dst_vbuf->vb2_buf, d_frame, &d_frame->addr);
ctx               485 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx;
ctx               488 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx = v4l2_m2m_get_curr_priv(mdp->m2m_dev);
ctx               489 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	if (!ctx)
ctx               492 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	src_vbuf = v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
ctx               493 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	dst_vbuf = v4l2_m2m_dst_buf_remove(ctx->m2m_ctx);
ctx               502 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	v4l2_m2m_job_finish(ctx->mdp_dev->m2m_dev, ctx->m2m_ctx);
ctx               507 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx =
ctx               509 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_dev *mdp = ctx->mdp_dev;
ctx               513 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	if (mtk_mdp_ctx_state_is_set(ctx, MTK_MDP_CTX_ERROR)) {
ctx               518 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_m2m_get_bufs(ctx);
ctx               520 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_hw_set_input_addr(ctx, &ctx->s_frame.addr);
ctx               521 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_hw_set_output_addr(ctx, &ctx->d_frame.addr);
ctx               523 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_hw_set_in_size(ctx);
ctx               524 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_hw_set_in_image_format(ctx);
ctx               526 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_hw_set_out_size(ctx);
ctx               527 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_hw_set_out_image_format(ctx);
ctx               529 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_hw_set_rotation(ctx);
ctx               530 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_hw_set_global_alpha(ctx);
ctx               532 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ret = mtk_mdp_vpu_process(&ctx->vpu);
ctx               546 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = priv;
ctx               548 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	queue_work(ctx->mdp_dev->job_wq, &ctx->work);
ctx               555 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = vb2_get_drv_priv(vq);
ctx               559 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	frame = mtk_mdp_ctx_get_frame(ctx, vq->type);
ctx               564 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		    ctx->id, vq->type, *num_planes, *num_buffers,
ctx               571 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               575 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	frame = mtk_mdp_ctx_get_frame(ctx, vb->vb2_queue->type);
ctx               587 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               589 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	v4l2_m2m_buf_queue(ctx->m2m_ctx, to_vb2_v4l2_buffer(vb));
ctx               605 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = fh_to_ctx(fh);
ctx               606 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_dev *mdp = ctx->mdp_dev;
ctx               643 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = fh_to_ctx(fh);
ctx               648 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_dbg(2, "[%d] type:%d", ctx->id, f->type);
ctx               650 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	frame = mtk_mdp_ctx_get_frame(ctx, f->type);
ctx               658 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	pix_mp->colorspace = ctx->colorspace;
ctx               659 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	pix_mp->xfer_func = ctx->xfer_func;
ctx               660 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	pix_mp->ycbcr_enc = ctx->ycbcr_enc;
ctx               661 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	pix_mp->quantization = ctx->quant;
ctx               662 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_dbg(2, "[%d] wxh:%dx%d", ctx->id,
ctx               671 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		mtk_mdp_dbg(2, "[%d] p%d, bpl:%d, sizeimage:%d", ctx->id, i,
ctx               682 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = fh_to_ctx(fh);
ctx               684 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	if (!mtk_mdp_try_fmt_mplane(ctx, f))
ctx               692 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = fh_to_ctx(fh);
ctx               699 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_dbg(2, "[%d] type:%d", ctx->id, f->type);
ctx               701 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	frame = mtk_mdp_ctx_get_frame(ctx, f->type);
ctx               702 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	fmt = mtk_mdp_try_fmt_mplane(ctx, f);
ctx               704 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		mtk_mdp_err("[%d] try_fmt failed, type:%d", ctx->id, f->type);
ctx               709 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
ctx               711 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		dev_info(&ctx->mdp_dev->pdev->dev, "queue %d busy", f->type);
ctx               723 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		ctx->colorspace = pix_mp->colorspace;
ctx               724 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		ctx->xfer_func = pix_mp->xfer_func;
ctx               725 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		ctx->ycbcr_enc = pix_mp->ycbcr_enc;
ctx               726 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		ctx->quant = pix_mp->quantization;
ctx               730 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		mtk_mdp_ctx_state_lock_set(ctx, MTK_MDP_SRC_FMT);
ctx               732 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		mtk_mdp_ctx_state_lock_set(ctx, MTK_MDP_DST_FMT);
ctx               734 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_dbg(2, "[%d] type:%d, frame:%dx%d", ctx->id, f->type,
ctx               743 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = fh_to_ctx(fh);
ctx               747 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 			mtk_mdp_ctx_state_lock_clear(ctx, MTK_MDP_SRC_FMT);
ctx               749 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 			mtk_mdp_ctx_state_lock_clear(ctx, MTK_MDP_DST_FMT);
ctx               752 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	return v4l2_m2m_reqbufs(file, ctx->m2m_ctx, reqbufs);
ctx               758 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = fh_to_ctx(fh);
ctx               763 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		if (!mtk_mdp_ctx_state_is_set(ctx, MTK_MDP_SRC_FMT))
ctx               765 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	} else if (!mtk_mdp_ctx_state_is_set(ctx, MTK_MDP_DST_FMT)) {
ctx               769 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	if (!mtk_mdp_ctx_state_is_set(ctx, MTK_MDP_VPU_INIT)) {
ctx               770 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		ret = mtk_mdp_vpu_init(&ctx->vpu);
ctx               772 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 			dev_err(&ctx->mdp_dev->pdev->dev,
ctx               777 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		mtk_mdp_ctx_state_lock_set(ctx, MTK_MDP_VPU_INIT);
ctx               780 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	return v4l2_m2m_streamon(file, ctx->m2m_ctx, type);
ctx               805 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = fh_to_ctx(fh);
ctx               816 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		mtk_mdp_dbg(1, "[%d] invalid type:%d,%u", ctx->id, s->type,
ctx               821 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	frame = mtk_mdp_ctx_get_frame(ctx, s->type);
ctx               872 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = fh_to_ctx(fh);
ctx               874 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_variant *variant = ctx->mdp_dev->variant;
ctx               886 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		mtk_mdp_dbg(1, "[%d] invalid type:%d,%u", ctx->id, s->type,
ctx               892 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ret = mtk_mdp_try_crop(ctx, s->type, &new_r);
ctx               897 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		frame = &ctx->s_frame;
ctx               899 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		frame = &ctx->d_frame;
ctx               902 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	if (mtk_mdp_ctx_state_is_set(ctx, MTK_MDP_DST_FMT | MTK_MDP_SRC_FMT)) {
ctx               905 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 				new_r.height, ctx->d_frame.crop.width,
ctx               906 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 				ctx->d_frame.crop.height,
ctx               907 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 				ctx->ctrls.rotate->val);
ctx               910 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 				ctx->s_frame.crop.width,
ctx               911 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 				ctx->s_frame.crop.height, new_r.width,
ctx               912 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 				new_r.height, ctx->ctrls.rotate->val);
ctx               916 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 			dev_info(&ctx->mdp_dev->pdev->dev,
ctx               955 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = priv;
ctx               961 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	src_vq->drv_priv = ctx;
ctx               966 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	src_vq->dev = &ctx->mdp_dev->pdev->dev;
ctx               967 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	src_vq->lock = &ctx->mdp_dev->lock;
ctx               976 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	dst_vq->drv_priv = ctx;
ctx               981 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	dst_vq->dev = &ctx->mdp_dev->pdev->dev;
ctx               982 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	dst_vq->lock = &ctx->mdp_dev->lock;
ctx               989 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = ctrl_to_ctx(ctrl);
ctx               990 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_dev *mdp = ctx->mdp_dev;
ctx              1000 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		ctx->hflip = ctrl->val;
ctx              1003 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		ctx->vflip = ctrl->val;
ctx              1006 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		if (mtk_mdp_ctx_state_is_set(ctx, state)) {
ctx              1008 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 					ctx->s_frame.crop.width,
ctx              1009 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 					ctx->s_frame.crop.height,
ctx              1010 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 					ctx->d_frame.crop.width,
ctx              1011 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 					ctx->d_frame.crop.height,
ctx              1012 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 					ctx->ctrls.rotate->val);
ctx              1018 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		ctx->rotation = ctrl->val;
ctx              1021 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		ctx->d_frame.alpha = ctrl->val;
ctx              1032 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c static int mtk_mdp_ctrls_create(struct mtk_mdp_ctx *ctx)
ctx              1034 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	v4l2_ctrl_handler_init(&ctx->ctrl_handler, MTK_MDP_MAX_CTRL_NUM);
ctx              1036 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx->ctrls.rotate = v4l2_ctrl_new_std(&ctx->ctrl_handler,
ctx              1038 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx->ctrls.hflip = v4l2_ctrl_new_std(&ctx->ctrl_handler,
ctx              1042 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx->ctrls.vflip = v4l2_ctrl_new_std(&ctx->ctrl_handler,
ctx              1046 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx->ctrls.global_alpha = v4l2_ctrl_new_std(&ctx->ctrl_handler,
ctx              1050 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx->ctrls_rdy = ctx->ctrl_handler.error == 0;
ctx              1052 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	if (ctx->ctrl_handler.error) {
ctx              1053 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		int err = ctx->ctrl_handler.error;
ctx              1055 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx              1056 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		dev_err(&ctx->mdp_dev->pdev->dev,
ctx              1064 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c static void mtk_mdp_set_default_params(struct mtk_mdp_ctx *ctx)
ctx              1066 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_dev *mdp = ctx->mdp_dev;
ctx              1069 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	frame = mtk_mdp_ctx_get_frame(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
ctx              1077 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	frame = mtk_mdp_ctx_get_frame(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
ctx              1091 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = NULL;
ctx              1094 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              1095 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	if (!ctx)
ctx              1103 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mutex_init(&ctx->slock);
ctx              1104 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx->id = mdp->id_counter++;
ctx              1105 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	v4l2_fh_init(&ctx->fh, vfd);
ctx              1106 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	file->private_data = &ctx->fh;
ctx              1107 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ret = mtk_mdp_ctrls_create(ctx);
ctx              1112 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx->fh.ctrl_handler = &ctx->ctrl_handler;
ctx              1113 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	v4l2_fh_add(&ctx->fh);
ctx              1114 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	INIT_LIST_HEAD(&ctx->list);
ctx              1116 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx->mdp_dev = mdp;
ctx              1117 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_set_default_params(ctx);
ctx              1119 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	INIT_WORK(&ctx->work, mtk_mdp_m2m_worker);
ctx              1120 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx->m2m_ctx = v4l2_m2m_ctx_init(mdp->m2m_dev, ctx,
ctx              1122 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	if (IS_ERR(ctx->m2m_ctx)) {
ctx              1124 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 		ret = PTR_ERR(ctx->m2m_ctx);
ctx              1127 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	ctx->fh.m2m_ctx = ctx->m2m_ctx;
ctx              1144 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	list_add(&ctx->list, &mdp->ctx_list);
ctx              1147 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_dbg(0, "%s [%d]", dev_name(&mdp->pdev->dev), ctx->id);
ctx              1153 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	v4l2_m2m_ctx_release(ctx->m2m_ctx);
ctx              1155 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx              1157 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	v4l2_fh_del(&ctx->fh);
ctx              1158 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	v4l2_fh_exit(&ctx->fh);
ctx              1161 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	kfree(ctx);
ctx              1168 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_ctx *ctx = fh_to_ctx(file->private_data);
ctx              1169 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	struct mtk_mdp_dev *mdp = ctx->mdp_dev;
ctx              1173 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	v4l2_m2m_ctx_release(ctx->m2m_ctx);
ctx              1174 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx              1175 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	v4l2_fh_del(&ctx->fh);
ctx              1176 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	v4l2_fh_exit(&ctx->fh);
ctx              1177 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_vpu_deinit(&ctx->vpu);
ctx              1179 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	list_del_init(&ctx->list);
ctx              1181 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	mtk_mdp_dbg(0, "%s [%d]", dev_name(&mdp->pdev->dev), ctx->id);
ctx              1184 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c 	kfree(ctx);
ctx                10 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.h void mtk_mdp_ctx_state_lock_set(struct mtk_mdp_ctx *ctx, u32 state);
ctx                48 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c void mtk_mdp_hw_set_input_addr(struct mtk_mdp_ctx *ctx,
ctx                51 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mdp_buffer *src_buf = &ctx->vpu.vsi->src_buffer;
ctx                58 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c void mtk_mdp_hw_set_output_addr(struct mtk_mdp_ctx *ctx,
ctx                61 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mdp_buffer *dst_buf = &ctx->vpu.vsi->dst_buffer;
ctx                68 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c void mtk_mdp_hw_set_in_size(struct mtk_mdp_ctx *ctx)
ctx                70 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mtk_mdp_frame *frame = &ctx->s_frame;
ctx                71 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mdp_config *config = &ctx->vpu.vsi->src_config;
ctx                88 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c void mtk_mdp_hw_set_in_image_format(struct mtk_mdp_ctx *ctx)
ctx                91 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mtk_mdp_frame *frame = &ctx->s_frame;
ctx                92 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mdp_config *config = &ctx->vpu.vsi->src_config;
ctx                93 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mdp_buffer *src_buf = &ctx->vpu.vsi->src_buffer;
ctx               104 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c void mtk_mdp_hw_set_out_size(struct mtk_mdp_ctx *ctx)
ctx               106 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mtk_mdp_frame *frame = &ctx->d_frame;
ctx               107 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mdp_config *config = &ctx->vpu.vsi->dst_config;
ctx               119 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c void mtk_mdp_hw_set_out_image_format(struct mtk_mdp_ctx *ctx)
ctx               122 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mtk_mdp_frame *frame = &ctx->d_frame;
ctx               123 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mdp_config *config = &ctx->vpu.vsi->dst_config;
ctx               124 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mdp_buffer *dst_buf = &ctx->vpu.vsi->dst_buffer;
ctx               134 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c void mtk_mdp_hw_set_rotation(struct mtk_mdp_ctx *ctx)
ctx               136 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mdp_config_misc *misc = &ctx->vpu.vsi->misc;
ctx               138 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	misc->orientation = ctx->ctrls.rotate->val;
ctx               139 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	misc->hflip = ctx->ctrls.hflip->val;
ctx               140 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	misc->vflip = ctx->ctrls.vflip->val;
ctx               143 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c void mtk_mdp_hw_set_global_alpha(struct mtk_mdp_ctx *ctx)
ctx               145 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	struct mdp_config_misc *misc = &ctx->vpu.vsi->misc;
ctx               147 drivers/media/platform/mtk-mdp/mtk_mdp_regs.c 	misc->alpha = ctx->ctrls.global_alpha->val;
ctx                11 drivers/media/platform/mtk-mdp/mtk_mdp_regs.h void mtk_mdp_hw_set_input_addr(struct mtk_mdp_ctx *ctx,
ctx                13 drivers/media/platform/mtk-mdp/mtk_mdp_regs.h void mtk_mdp_hw_set_output_addr(struct mtk_mdp_ctx *ctx,
ctx                15 drivers/media/platform/mtk-mdp/mtk_mdp_regs.h void mtk_mdp_hw_set_in_size(struct mtk_mdp_ctx *ctx);
ctx                16 drivers/media/platform/mtk-mdp/mtk_mdp_regs.h void mtk_mdp_hw_set_in_image_format(struct mtk_mdp_ctx *ctx);
ctx                17 drivers/media/platform/mtk-mdp/mtk_mdp_regs.h void mtk_mdp_hw_set_out_size(struct mtk_mdp_ctx *ctx);
ctx                18 drivers/media/platform/mtk-mdp/mtk_mdp_regs.h void mtk_mdp_hw_set_out_image_format(struct mtk_mdp_ctx *ctx);
ctx                19 drivers/media/platform/mtk-mdp/mtk_mdp_regs.h void mtk_mdp_hw_set_rotation(struct mtk_mdp_ctx *ctx);
ctx                20 drivers/media/platform/mtk-mdp/mtk_mdp_regs.h void mtk_mdp_hw_set_global_alpha(struct mtk_mdp_ctx *ctx);
ctx                35 drivers/media/platform/mtk-mdp/mtk_mdp_vpu.c 	struct mtk_mdp_ctx *ctx;
ctx                47 drivers/media/platform/mtk-mdp/mtk_mdp_vpu.c 			ctx = vpu_to_ctx(vpu);
ctx                48 drivers/media/platform/mtk-mdp/mtk_mdp_vpu.c 			dev_err(&ctx->mdp_dev->pdev->dev,
ctx                54 drivers/media/platform/mtk-mdp/mtk_mdp_vpu.c 		ctx = vpu_to_ctx(vpu);
ctx                55 drivers/media/platform/mtk-mdp/mtk_mdp_vpu.c 		mtk_mdp_dbg(0, "[%d]:msg 0x%x, failure:%d", ctx->id,
ctx                77 drivers/media/platform/mtk-mdp/mtk_mdp_vpu.c 	struct mtk_mdp_ctx *ctx = vpu_to_ctx(vpu);
ctx                81 drivers/media/platform/mtk-mdp/mtk_mdp_vpu.c 		mtk_mdp_dbg(1, "[%d]:vpu pdev is NULL", ctx->id);
ctx                85 drivers/media/platform/mtk-mdp/mtk_mdp_vpu.c 	mutex_lock(&ctx->mdp_dev->vpulock);
ctx                88 drivers/media/platform/mtk-mdp/mtk_mdp_vpu.c 		dev_err(&ctx->mdp_dev->pdev->dev,
ctx                90 drivers/media/platform/mtk-mdp/mtk_mdp_vpu.c 	mutex_unlock(&ctx->mdp_dev->vpulock);
ctx               115 drivers/media/platform/mtk-mdp/mtk_mdp_vpu.c 	struct mtk_mdp_ctx *ctx = vpu_to_ctx(vpu);
ctx               117 drivers/media/platform/mtk-mdp/mtk_mdp_vpu.c 	vpu->pdev = ctx->mdp_dev->vpu_dev;
ctx                88 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c static struct mtk_q_data *mtk_vdec_get_q_data(struct mtk_vcodec_ctx *ctx,
ctx                92 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		return &ctx->q_data[MTK_Q_DATA_SRC];
ctx                94 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	return &ctx->q_data[MTK_Q_DATA_DST];
ctx               103 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c static struct vb2_buffer *get_display_buffer(struct mtk_vcodec_ctx *ctx)
ctx               108 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mtk_v4l2_debug(3, "[%d]", ctx->id);
ctx               109 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (vdec_if_get_param(ctx,
ctx               113 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->id);
ctx               124 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mutex_lock(&ctx->lock);
ctx               127 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 					ctx->picinfo.fb_sz[0]);
ctx               128 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		if (ctx->q_data[MTK_Q_DATA_DST].fmt->num_planes == 2)
ctx               130 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 					      ctx->picinfo.fb_sz[1]);
ctx               134 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->id, disp_frame_buffer->status,
ctx               139 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		ctx->decoded_frame_cnt++;
ctx               141 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mutex_unlock(&ctx->lock);
ctx               153 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c static struct vb2_buffer *get_free_buffer(struct mtk_vcodec_ctx *ctx)
ctx               158 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (vdec_if_get_param(ctx,
ctx               161 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		mtk_v4l2_err("[%d] Error!! Cannot get param", ctx->id);
ctx               170 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->id, free_frame_buffer);
ctx               175 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mutex_lock(&ctx->lock);
ctx               189 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->id, free_frame_buffer->status,
ctx               192 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			v4l2_m2m_buf_queue(ctx->m2m_ctx, &dstbuf->vb);
ctx               207 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 					ctx->id, free_frame_buffer->status,
ctx               209 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			v4l2_m2m_buf_queue(ctx->m2m_ctx, &dstbuf->vb);
ctx               221 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 					ctx->id, free_frame_buffer->status,
ctx               228 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mutex_unlock(&ctx->lock);
ctx               232 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c static void clean_display_buffer(struct mtk_vcodec_ctx *ctx)
ctx               237 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		framptr = get_display_buffer(ctx);
ctx               241 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c static void clean_free_buffer(struct mtk_vcodec_ctx *ctx)
ctx               246 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		framptr = get_free_buffer(ctx);
ctx               250 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c static void mtk_vdec_queue_res_chg_event(struct mtk_vcodec_ctx *ctx)
ctx               258 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mtk_v4l2_debug(1, "[%d]", ctx->id);
ctx               259 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	v4l2_event_queue_fh(&ctx->fh, &ev_src_ch);
ctx               262 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c static void mtk_vdec_flush_decoder(struct mtk_vcodec_ctx *ctx)
ctx               267 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ret = vdec_if_decode(ctx, NULL, NULL, &res_chg);
ctx               271 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	clean_display_buffer(ctx);
ctx               272 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	clean_free_buffer(ctx);
ctx               275 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c static void mtk_vdec_update_fmt(struct mtk_vcodec_ctx *ctx,
ctx               282 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	dst_q_data = &ctx->q_data[MTK_Q_DATA_DST];
ctx               296 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c static int mtk_vdec_pic_info_update(struct mtk_vcodec_ctx *ctx)
ctx               301 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (vdec_if_get_param(ctx,
ctx               303 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				&ctx->last_decoded_picinfo)) {
ctx               305 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->id);
ctx               309 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (ctx->last_decoded_picinfo.pic_w == 0 ||
ctx               310 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		ctx->last_decoded_picinfo.pic_h == 0 ||
ctx               311 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		ctx->last_decoded_picinfo.buf_w == 0 ||
ctx               312 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		ctx->last_decoded_picinfo.buf_h == 0) {
ctx               317 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (ctx->last_decoded_picinfo.cap_fourcc != ctx->picinfo.cap_fourcc &&
ctx               318 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		ctx->picinfo.cap_fourcc != 0)
ctx               319 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		mtk_vdec_update_fmt(ctx, ctx->picinfo.cap_fourcc);
ctx               321 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if ((ctx->last_decoded_picinfo.pic_w == ctx->picinfo.pic_w) ||
ctx               322 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	    (ctx->last_decoded_picinfo.pic_h == ctx->picinfo.pic_h))
ctx               327 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->id, ctx->last_decoded_picinfo.pic_w,
ctx               328 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->last_decoded_picinfo.pic_h,
ctx               329 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->picinfo.pic_w, ctx->picinfo.pic_h,
ctx               330 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->last_decoded_picinfo.buf_w,
ctx               331 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->last_decoded_picinfo.buf_h);
ctx               333 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ret = vdec_if_get_param(ctx, GET_PARAM_DPB_SIZE, &dpbsize);
ctx               337 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->dpb_size = dpbsize;
ctx               344 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = container_of(work, struct mtk_vcodec_ctx,
ctx               346 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_dev *dev = ctx->dev;
ctx               354 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	src_buf = v4l2_m2m_next_src_buf(ctx->m2m_ctx);
ctx               356 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		v4l2_m2m_job_finish(dev->m2m_dev_dec, ctx->m2m_ctx);
ctx               357 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		mtk_v4l2_debug(1, "[%d] src_buf empty!!", ctx->id);
ctx               361 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	dst_buf = v4l2_m2m_next_dst_buf(ctx->m2m_ctx);
ctx               363 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		v4l2_m2m_job_finish(dev->m2m_dev_dec, ctx->m2m_ctx);
ctx               364 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		mtk_v4l2_debug(1, "[%d] dst_buf empty!!", ctx->id);
ctx               374 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	pfb->base_y.size = ctx->picinfo.fb_sz[0];
ctx               378 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	pfb->base_c.size = ctx->picinfo.fb_sz[1];
ctx               380 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mtk_v4l2_debug(3, "===>[%d] vdec_if_decode() ===>", ctx->id);
ctx               390 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		src_buf = v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
ctx               393 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		dst_buf = v4l2_m2m_dst_buf_remove(ctx->m2m_ctx);
ctx               394 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		mutex_lock(&ctx->lock);
ctx               396 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		mutex_unlock(&ctx->lock);
ctx               398 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		vdec_if_decode(ctx, NULL, NULL, &res_chg);
ctx               399 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		clean_display_buffer(ctx);
ctx               401 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		if (ctx->q_data[MTK_Q_DATA_DST].fmt->num_planes == 2)
ctx               405 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		clean_free_buffer(ctx);
ctx               406 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		v4l2_m2m_job_finish(dev->m2m_dev_dec, ctx->m2m_ctx);
ctx               413 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		v4l2_m2m_job_finish(dev->m2m_dev_dec, ctx->m2m_ctx);
ctx               415 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->id, src_buf->vb2_buf.index);
ctx               419 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->id, buf.va, &buf.dma_addr, buf.size, src_buf);
ctx               424 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mutex_lock(&ctx->lock);
ctx               426 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mutex_unlock(&ctx->lock);
ctx               429 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ret = vdec_if_decode(ctx, &buf, pfb, &res_chg);
ctx               434 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->id,
ctx               440 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		src_buf = v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
ctx               442 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			mutex_lock(&ctx->lock);
ctx               444 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			mutex_unlock(&ctx->lock);
ctx               452 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		src_buf = v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
ctx               456 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	dst_buf = v4l2_m2m_dst_buf_remove(ctx->m2m_ctx);
ctx               457 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	clean_display_buffer(ctx);
ctx               458 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	clean_free_buffer(ctx);
ctx               461 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		mtk_vdec_pic_info_update(ctx);
ctx               468 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		mtk_vdec_flush_decoder(ctx);
ctx               476 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		mtk_vdec_queue_res_chg_event(ctx);
ctx               478 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	v4l2_m2m_job_finish(dev->m2m_dev_dec, ctx->m2m_ctx);
ctx               502 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               511 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	dst_vq = v4l2_m2m_get_vq(ctx->m2m_ctx,
ctx               515 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		src_vq = v4l2_m2m_get_vq(ctx->m2m_ctx,
ctx               525 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		v4l2_m2m_buf_queue(ctx->m2m_ctx, &ctx->empty_flush_buf->vb);
ctx               526 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		v4l2_m2m_try_schedule(ctx->m2m_ctx);
ctx               540 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c void mtk_vdec_unlock(struct mtk_vcodec_ctx *ctx)
ctx               542 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mutex_unlock(&ctx->dev->dec_mutex);
ctx               545 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c void mtk_vdec_lock(struct mtk_vcodec_ctx *ctx)
ctx               547 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mutex_lock(&ctx->dev->dec_mutex);
ctx               550 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c void mtk_vcodec_dec_release(struct mtk_vcodec_ctx *ctx)
ctx               552 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	vdec_if_deinit(ctx);
ctx               553 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->state = MTK_STATE_FREE;
ctx               556 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c void mtk_vcodec_dec_set_default_params(struct mtk_vcodec_ctx *ctx)
ctx               560 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->m2m_ctx->q_lock = &ctx->dev->dev_mutex;
ctx               561 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->fh.m2m_ctx = ctx->m2m_ctx;
ctx               562 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->fh.ctrl_handler = &ctx->ctrl_hdl;
ctx               563 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	INIT_WORK(&ctx->decode_work, mtk_vdec_worker);
ctx               564 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->colorspace = V4L2_COLORSPACE_REC709;
ctx               565 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
ctx               566 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->quantization = V4L2_QUANTIZATION_DEFAULT;
ctx               567 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->xfer_func = V4L2_XFER_FUNC_DEFAULT;
ctx               569 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	q_data = &ctx->q_data[MTK_Q_DATA_SRC];
ctx               579 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	q_data = &ctx->q_data[MTK_Q_DATA_DST];
ctx               604 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               606 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (ctx->state == MTK_STATE_ABORT) {
ctx               608 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->id);
ctx               612 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	return v4l2_m2m_qbuf(file, ctx->m2m_ctx, buf);
ctx               618 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               620 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (ctx->state == MTK_STATE_ABORT) {
ctx               622 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->id);
ctx               626 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	return v4l2_m2m_dqbuf(file, ctx->m2m_ctx, buf);
ctx               760 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               766 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	q_data = &ctx->q_data[MTK_Q_DATA_DST];
ctx               772 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		s->r.width = ctx->picinfo.pic_w;
ctx               773 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		s->r.height = ctx->picinfo.pic_h;
ctx               778 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		s->r.width = ctx->picinfo.buf_w;
ctx               779 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		s->r.height = ctx->picinfo.buf_h;
ctx               782 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		if (vdec_if_get_param(ctx, GET_PARAM_CROP_INFO, &(s->r))) {
ctx               794 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (ctx->state < MTK_STATE_HEADER) {
ctx               809 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               818 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		s->r.width = ctx->picinfo.pic_w;
ctx               819 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		s->r.height = ctx->picinfo.pic_h;
ctx               831 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               837 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mtk_v4l2_debug(3, "[%d]", ctx->id);
ctx               839 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	q_data = mtk_vdec_get_q_data(ctx, f->type);
ctx               845 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	    vb2_is_busy(&ctx->m2m_ctx->out_q_ctx.q)) {
ctx               851 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	    vb2_is_busy(&ctx->m2m_ctx->cap_q_ctx.q)) {
ctx               876 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		ctx->colorspace = f->fmt.pix_mp.colorspace;
ctx               877 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		ctx->ycbcr_enc = f->fmt.pix_mp.ycbcr_enc;
ctx               878 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		ctx->quantization = f->fmt.pix_mp.quantization;
ctx               879 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		ctx->xfer_func = f->fmt.pix_mp.xfer_func;
ctx               881 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		if (ctx->state == MTK_STATE_FREE) {
ctx               882 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ret = vdec_if_init(ctx, q_data->fmt->fourcc);
ctx               885 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 					ctx->id, ret);
ctx               888 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->state = MTK_STATE_INIT;
ctx               899 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               910 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		if (!(ctx->dev->dec_capability &
ctx               919 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->dev->dec_capability,
ctx               974 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               979 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
ctx               985 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	q_data = mtk_vdec_get_q_data(ctx, f->type);
ctx               988 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	pix_mp->colorspace = ctx->colorspace;
ctx               989 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	pix_mp->ycbcr_enc = ctx->ycbcr_enc;
ctx               990 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	pix_mp->quantization = ctx->quantization;
ctx               991 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	pix_mp->xfer_func = ctx->xfer_func;
ctx               994 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	    (ctx->state >= MTK_STATE_HEADER)) {
ctx              1001 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		q_data->sizeimage[0] = ctx->picinfo.fb_sz[0];
ctx              1002 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		q_data->sizeimage[1] = ctx->picinfo.fb_sz[1];
ctx              1003 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		q_data->bytesperline[0] = ctx->last_decoded_picinfo.buf_w;
ctx              1004 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		q_data->bytesperline[1] = ctx->last_decoded_picinfo.buf_w;
ctx              1005 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		q_data->coded_width = ctx->picinfo.buf_w;
ctx              1006 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		q_data->coded_height = ctx->picinfo.buf_h;
ctx              1007 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		ctx->last_decoded_picinfo.cap_fourcc = q_data->fmt->fourcc;
ctx              1053 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->id, f->type, ctx->state);
ctx              1065 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1069 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	q_data = mtk_vdec_get_q_data(ctx, vq->type);
ctx              1093 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->id, vq->type, *nplanes, *nbuffers,
ctx              1101 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1106 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->id, vb->vb2_queue->type, vb->index);
ctx              1108 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	q_data = mtk_vdec_get_q_data(ctx, vb->vb2_queue->type);
ctx              1128 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1134 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->id, vb->vb2_queue->type,
ctx              1142 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		mutex_lock(&ctx->lock);
ctx              1144 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			v4l2_m2m_buf_queue(ctx->m2m_ctx, vb2_v4l2);
ctx              1151 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		mutex_unlock(&ctx->lock);
ctx              1155 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	v4l2_m2m_buf_queue(ctx->m2m_ctx, to_vb2_v4l2_buffer(vb));
ctx              1157 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (ctx->state != MTK_STATE_INIT) {
ctx              1159 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->id, ctx->state);
ctx              1163 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	src_buf = v4l2_m2m_next_src_buf(ctx->m2m_ctx);
ctx              1172 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
ctx              1181 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->id, src_buf->vb2_buf.index,
ctx              1185 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ret = vdec_if_decode(ctx, &src_mem, NULL, &res_chg);
ctx              1194 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		src_buf = v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
ctx              1197 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 					ctx->id);
ctx              1198 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->state = MTK_STATE_ABORT;
ctx              1205 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			       ctx->id, src_buf->vb2_buf.index,
ctx              1210 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (vdec_if_get_param(ctx, GET_PARAM_PIC_INFO, &ctx->picinfo)) {
ctx              1212 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->id);
ctx              1216 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->last_decoded_picinfo = ctx->picinfo;
ctx              1217 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	dst_q_data = &ctx->q_data[MTK_Q_DATA_DST];
ctx              1219 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		dst_q_data->sizeimage[i] = ctx->picinfo.fb_sz[i];
ctx              1220 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		dst_q_data->bytesperline[i] = ctx->picinfo.buf_w;
ctx              1224 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->id,
ctx              1225 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->picinfo.buf_w, ctx->picinfo.buf_h,
ctx              1226 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->picinfo.pic_w, ctx->picinfo.pic_h,
ctx              1230 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ret = vdec_if_get_param(ctx, GET_PARAM_DPB_SIZE, &dpbsize);
ctx              1232 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		mtk_v4l2_err("[%d] GET_PARAM_DPB_SIZE fail=%d", ctx->id, ret);
ctx              1234 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->dpb_size = dpbsize;
ctx              1235 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->state = MTK_STATE_HEADER;
ctx              1236 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mtk_v4l2_debug(1, "[%d] dpbsize=%d", ctx->id, ctx->dpb_size);
ctx              1238 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mtk_vdec_queue_res_chg_event(ctx);
ctx              1243 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1250 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mutex_lock(&ctx->lock);
ctx              1256 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mutex_unlock(&ctx->lock);
ctx              1260 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		ctx->state = MTK_STATE_ABORT;
ctx              1283 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = vb2_get_drv_priv(q);
ctx              1285 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (ctx->state == MTK_STATE_FLUSH)
ctx              1286 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		ctx->state = MTK_STATE_HEADER;
ctx              1294 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = vb2_get_drv_priv(q);
ctx              1297 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctx->id, q->type, ctx->state, ctx->decoded_frame_cnt);
ctx              1300 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		while ((src_buf = v4l2_m2m_src_buf_remove(ctx->m2m_ctx))) {
ctx              1310 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (ctx->state >= MTK_STATE_HEADER) {
ctx              1318 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		ctx->picinfo = ctx->last_decoded_picinfo;
ctx              1322 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->id, ctx->last_decoded_picinfo.pic_w,
ctx              1323 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->last_decoded_picinfo.pic_h,
ctx              1324 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->picinfo.pic_w, ctx->picinfo.pic_h,
ctx              1325 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->last_decoded_picinfo.buf_w,
ctx              1326 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->last_decoded_picinfo.buf_h);
ctx              1328 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		mtk_vdec_flush_decoder(ctx);
ctx              1330 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->state = MTK_STATE_FLUSH;
ctx              1332 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	while ((dst_buf = v4l2_m2m_dst_buf_remove(ctx->m2m_ctx))) {
ctx              1334 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		if (ctx->q_data[MTK_Q_DATA_DST].fmt->num_planes == 2)
ctx              1343 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = priv;
ctx              1344 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_dev *dev = ctx->dev;
ctx              1346 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	queue_work(dev->decode_workqueue, &ctx->decode_work);
ctx              1351 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = m2m_priv;
ctx              1353 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mtk_v4l2_debug(3, "[%d]", ctx->id);
ctx              1355 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (ctx->state == MTK_STATE_ABORT)
ctx              1358 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if ((ctx->last_decoded_picinfo.pic_w != ctx->picinfo.pic_w) ||
ctx              1359 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	    (ctx->last_decoded_picinfo.pic_h != ctx->picinfo.pic_h))
ctx              1362 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (ctx->state != MTK_STATE_HEADER)
ctx              1370 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = priv;
ctx              1372 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctx->state = MTK_STATE_ABORT;
ctx              1377 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = ctrl_to_ctx(ctrl);
ctx              1382 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		if (ctx->state >= MTK_STATE_HEADER) {
ctx              1383 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 			ctrl->val = ctx->dpb_size;
ctx              1399 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c int mtk_vcodec_dec_ctrls_setup(struct mtk_vcodec_ctx *ctx)
ctx              1403 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	v4l2_ctrl_handler_init(&ctx->ctrl_hdl, 1);
ctx              1405 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	ctrl = v4l2_ctrl_new_std(&ctx->ctrl_hdl,
ctx              1410 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	v4l2_ctrl_new_std_menu(&ctx->ctrl_hdl,
ctx              1416 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	if (ctx->ctrl_hdl.error) {
ctx              1418 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 				ctx->ctrl_hdl.error);
ctx              1419 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 		return ctx->ctrl_hdl.error;
ctx              1422 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	v4l2_ctrl_handler_setup(&ctx->ctrl_hdl);
ctx              1481 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	struct mtk_vcodec_ctx *ctx = priv;
ctx              1484 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	mtk_v4l2_debug(3, "[%d]", ctx->id);
ctx              1488 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	src_vq->drv_priv	= ctx;
ctx              1493 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	src_vq->lock		= &ctx->dev->dev_mutex;
ctx              1494 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	src_vq->dev             = &ctx->dev->plat_dev->dev;
ctx              1503 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	dst_vq->drv_priv	= ctx;
ctx              1508 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	dst_vq->lock		= &ctx->dev->dev_mutex;
ctx              1509 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c 	dst_vq->dev             = &ctx->dev->plat_dev->dev;
ctx                71 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.h void mtk_vdec_unlock(struct mtk_vcodec_ctx *ctx);
ctx                72 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.h void mtk_vdec_lock(struct mtk_vcodec_ctx *ctx);
ctx                75 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.h void mtk_vcodec_dec_set_default_params(struct mtk_vcodec_ctx *ctx);
ctx                76 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.h void mtk_vcodec_dec_release(struct mtk_vcodec_ctx *ctx);
ctx                77 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.h int mtk_vcodec_dec_ctrls_setup(struct mtk_vcodec_ctx *ctx);
ctx                34 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c static void wake_up_ctx(struct mtk_vcodec_ctx *ctx)
ctx                36 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	ctx->int_cond = 1;
ctx                37 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	wake_up_interruptible(&ctx->queue);
ctx                43 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	struct mtk_vcodec_ctx *ctx;
ctx                49 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	ctx = mtk_vcodec_get_curr_ctx(dev);
ctx                60 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	ctx->irq_status = dec_done_status;
ctx                71 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	wake_up_ctx(ctx);
ctx                75 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 			ctx->id, dec_done_status);
ctx                83 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	struct mtk_vcodec_ctx *ctx;
ctx                88 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	list_for_each_entry(ctx, &dev->ctx_list, list) {
ctx                89 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 		ctx->state = MTK_STATE_ABORT;
ctx                91 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 				ctx->id);
ctx                99 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	struct mtk_vcodec_ctx *ctx = NULL;
ctx               104 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               105 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	if (!ctx)
ctx               109 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 		kfree(ctx);
ctx               114 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	ctx->empty_flush_buf = mtk_buf;
ctx               115 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	ctx->id = dev->id_counter++;
ctx               116 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx               117 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	file->private_data = &ctx->fh;
ctx               118 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	v4l2_fh_add(&ctx->fh);
ctx               119 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	INIT_LIST_HEAD(&ctx->list);
ctx               120 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	ctx->dev = dev;
ctx               121 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	init_waitqueue_head(&ctx->queue);
ctx               122 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	mutex_init(&ctx->lock);
ctx               124 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	ctx->type = MTK_INST_DECODER;
ctx               125 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	ret = mtk_vcodec_dec_ctrls_setup(ctx);
ctx               130 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	ctx->m2m_ctx = v4l2_m2m_ctx_init(dev->m2m_dev_dec, ctx,
ctx               132 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	if (IS_ERR((__force void *)ctx->m2m_ctx)) {
ctx               133 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 		ret = PTR_ERR((__force void *)ctx->m2m_ctx);
ctx               138 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	src_vq = v4l2_m2m_get_vq(ctx->m2m_ctx,
ctx               140 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	ctx->empty_flush_buf->vb.vb2_buf.vb2_queue = src_vq;
ctx               141 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	ctx->empty_flush_buf->lastframe = true;
ctx               142 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	mtk_vcodec_dec_set_default_params(ctx);
ctx               144 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	if (v4l2_fh_is_singular(&ctx->fh)) {
ctx               165 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	list_add(&ctx->list, &dev->ctx_list);
ctx               169 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 			ctx->id);
ctx               174 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	v4l2_m2m_ctx_release(ctx->m2m_ctx);
ctx               176 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	v4l2_ctrl_handler_free(&ctx->ctrl_hdl);
ctx               178 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	v4l2_fh_del(&ctx->fh);
ctx               179 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	v4l2_fh_exit(&ctx->fh);
ctx               180 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	kfree(ctx->empty_flush_buf);
ctx               181 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	kfree(ctx);
ctx               190 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(file->private_data);
ctx               192 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	mtk_v4l2_debug(0, "[%d] decoder", ctx->id);
ctx               201 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	v4l2_m2m_ctx_release(ctx->m2m_ctx);
ctx               202 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	mtk_vcodec_dec_release(ctx);
ctx               204 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	if (v4l2_fh_is_singular(&ctx->fh))
ctx               206 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	v4l2_fh_del(&ctx->fh);
ctx               207 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	v4l2_fh_exit(&ctx->fh);
ctx               208 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	v4l2_ctrl_handler_free(&ctx->ctrl_hdl);
ctx               210 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	list_del_init(&ctx->list);
ctx               211 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	kfree(ctx->empty_flush_buf);
ctx               212 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec_drv.c 	kfree(ctx);
ctx                84 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = ctrl_to_ctx(ctrl);
ctx                85 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_enc_params *p = &ctx->enc_params;
ctx                93 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ctx->param_change |= MTK_ENCODE_PARAM_BITRATE;
ctx               134 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ctx->param_change |= MTK_ENCODE_PARAM_INTRA_PERIOD;
ctx               140 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ctx->param_change |= MTK_ENCODE_PARAM_GOP_SIZE;
ctx               145 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ctx->param_change |= MTK_ENCODE_PARAM_FORCE_INTRA;
ctx               227 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               232 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->enc_params.framerate_num =
ctx               234 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->enc_params.framerate_denom =
ctx               236 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->param_change |= MTK_ENCODE_PARAM_FRAMERATE;
ctx               246 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               253 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 			ctx->enc_params.framerate_num;
ctx               255 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 			ctx->enc_params.framerate_denom;
ctx               260 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c static struct mtk_q_data *mtk_venc_get_q_data(struct mtk_vcodec_ctx *ctx,
ctx               264 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		return &ctx->q_data[MTK_Q_DATA_SRC];
ctx               266 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	return &ctx->q_data[MTK_Q_DATA_DST];
ctx               369 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c static void mtk_venc_set_param(struct mtk_vcodec_ctx *ctx,
ctx               372 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_q_data *q_data_src = &ctx->q_data[MTK_Q_DATA_SRC];
ctx               373 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_enc_params *enc_params = &ctx->enc_params;
ctx               419 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               425 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
ctx               436 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	q_data = mtk_venc_get_q_data(ctx, f->type);
ctx               465 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	if (ctx->state == MTK_STATE_FREE) {
ctx               466 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ret = venc_if_init(ctx, q_data->fmt->fourcc);
ctx               472 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ctx->state = MTK_STATE_INIT;
ctx               481 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               488 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
ctx               499 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	q_data = mtk_venc_get_q_data(ctx, f->type);
ctx               529 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->colorspace = f->fmt.pix_mp.colorspace;
ctx               530 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->ycbcr_enc = f->fmt.pix_mp.ycbcr_enc;
ctx               531 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->quantization = f->fmt.pix_mp.quantization;
ctx               532 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->xfer_func = f->fmt.pix_mp.xfer_func;
ctx               549 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               554 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	vq = v4l2_m2m_get_vq(ctx->m2m_ctx, f->type);
ctx               558 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	q_data = mtk_venc_get_q_data(ctx, f->type);
ctx               573 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	pix->colorspace = ctx->colorspace;
ctx               574 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	pix->ycbcr_enc = ctx->ycbcr_enc;
ctx               575 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	pix->quantization = ctx->quantization;
ctx               576 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	pix->xfer_func = ctx->xfer_func;
ctx               585 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               592 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	f->fmt.pix_mp.colorspace = ctx->colorspace;
ctx               593 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	f->fmt.pix_mp.ycbcr_enc = ctx->ycbcr_enc;
ctx               594 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	f->fmt.pix_mp.quantization = ctx->quantization;
ctx               595 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	f->fmt.pix_mp.xfer_func = ctx->xfer_func;
ctx               623 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               629 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	q_data = mtk_venc_get_q_data(ctx, s->type);
ctx               657 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               663 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	q_data = mtk_venc_get_q_data(ctx, s->type);
ctx               686 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               688 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	if (ctx->state == MTK_STATE_ABORT) {
ctx               690 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 				ctx->id);
ctx               694 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	return v4l2_m2m_qbuf(file, ctx->m2m_ctx, buf);
ctx               700 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(priv);
ctx               702 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	if (ctx->state == MTK_STATE_ABORT) {
ctx               704 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 				ctx->id);
ctx               708 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	return v4l2_m2m_dqbuf(file, ctx->m2m_ctx, buf);
ctx               752 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = vb2_get_drv_priv(vq);
ctx               756 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	q_data = mtk_venc_get_q_data(ctx, vq->type);
ctx               776 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               780 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	q_data = mtk_venc_get_q_data(ctx, vb->vb2_queue->type);
ctx               796 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               804 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	    (ctx->param_change != MTK_ENCODE_PARAM_NONE)) {
ctx               806 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 			       ctx->id,
ctx               808 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 			       ctx->param_change);
ctx               809 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		mtk_buf->param_change = ctx->param_change;
ctx               810 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		mtk_buf->enc_params = ctx->enc_params;
ctx               811 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ctx->param_change = MTK_ENCODE_PARAM_NONE;
ctx               814 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	v4l2_m2m_buf_queue(ctx->m2m_ctx, to_vb2_v4l2_buffer(vb));
ctx               819 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = vb2_get_drv_priv(q);
ctx               827 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	if ((ctx->state == MTK_STATE_ABORT) || (ctx->state == MTK_STATE_FREE)) {
ctx               834 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		if (!vb2_start_streaming_called(&ctx->m2m_ctx->cap_q_ctx.q))
ctx               837 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		if (!vb2_start_streaming_called(&ctx->m2m_ctx->out_q_ctx.q))
ctx               841 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	mtk_venc_set_param(ctx, &param);
ctx               842 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ret = venc_if_set_param(ctx, VENC_SET_PARAM_ENC, &param);
ctx               845 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ctx->state = MTK_STATE_ABORT;
ctx               848 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->param_change = MTK_ENCODE_PARAM_NONE;
ctx               850 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	if ((ctx->q_data[MTK_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_H264) &&
ctx               851 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	    (ctx->enc_params.seq_hdr_mode !=
ctx               853 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ret = venc_if_set_param(ctx,
ctx               858 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 			ctx->state = MTK_STATE_ABORT;
ctx               861 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ctx->state = MTK_STATE_HEADER;
ctx               876 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 					ctx->id, i, q->type,
ctx               888 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = vb2_get_drv_priv(q);
ctx               892 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	mtk_v4l2_debug(2, "[%d]-> type=%d", ctx->id, q->type);
ctx               895 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		while ((dst_buf = v4l2_m2m_dst_buf_remove(ctx->m2m_ctx))) {
ctx               900 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		while ((src_buf = v4l2_m2m_src_buf_remove(ctx->m2m_ctx)))
ctx               905 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	     vb2_is_streaming(&ctx->m2m_ctx->out_q_ctx.q)) ||
ctx               907 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	     vb2_is_streaming(&ctx->m2m_ctx->cap_q_ctx.q))) {
ctx               909 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 			       ctx->id, q->type,
ctx               910 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 			       vb2_is_streaming(&ctx->m2m_ctx->out_q_ctx.q),
ctx               911 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 			       vb2_is_streaming(&ctx->m2m_ctx->cap_q_ctx.q));
ctx               916 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ret = venc_if_deinit(ctx);
ctx               920 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->state = MTK_STATE_FREE;
ctx               935 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = priv;
ctx               941 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	dst_buf = v4l2_m2m_dst_buf_remove(ctx->m2m_ctx);
ctx               953 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 			ctx->id,
ctx               958 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ret = venc_if_encode(ctx,
ctx               964 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ctx->state = MTK_STATE_ABORT;
ctx               969 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	src_buf = v4l2_m2m_next_src_buf(ctx->m2m_ctx);
ctx               977 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->state = MTK_STATE_HEADER;
ctx               984 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c static int mtk_venc_param_change(struct mtk_vcodec_ctx *ctx)
ctx               987 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct vb2_v4l2_buffer *vb2_v4l2 = v4l2_m2m_next_src_buf(ctx->m2m_ctx);
ctx              1000 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 				ctx->id,
ctx              1003 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ret |= venc_if_set_param(ctx,
ctx              1011 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 			       ctx->id,
ctx              1014 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ret |= venc_if_set_param(ctx,
ctx              1022 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ret |= venc_if_set_param(ctx,
ctx              1028 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 				ctx->id,
ctx              1032 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 			ret |= venc_if_set_param(ctx,
ctx              1040 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		ctx->state = MTK_STATE_ABORT;
ctx              1059 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = container_of(work, struct mtk_vcodec_ctx,
ctx              1071 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	dst_buf = v4l2_m2m_dst_buf_remove(ctx->m2m_ctx);
ctx              1073 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		v4l2_m2m_job_finish(ctx->dev->m2m_dev_enc, ctx->m2m_ctx);
ctx              1077 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	src_buf = v4l2_m2m_src_buf_remove(ctx->m2m_ctx);
ctx              1098 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ret = venc_if_encode(ctx, VENC_START_OPT_ENCODE_FRAME,
ctx              1120 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	v4l2_m2m_job_finish(ctx->dev->m2m_dev_enc, ctx->m2m_ctx);
ctx              1129 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = priv;
ctx              1131 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	if ((ctx->q_data[MTK_Q_DATA_DST].fmt->fourcc == V4L2_PIX_FMT_H264) &&
ctx              1132 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	    (ctx->state != MTK_STATE_HEADER)) {
ctx              1134 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		mtk_venc_encode_header(ctx);
ctx              1135 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 		queue_work(ctx->dev->encode_workqueue, &ctx->encode_work);
ctx              1139 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	mtk_venc_param_change(ctx);
ctx              1140 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	queue_work(ctx->dev->encode_workqueue, &ctx->encode_work);
ctx              1145 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = m2m_priv;
ctx              1147 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	if (ctx->state == MTK_STATE_ABORT || ctx->state == MTK_STATE_FREE) {
ctx              1149 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 			       ctx->id, ctx->state);
ctx              1158 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = priv;
ctx              1160 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->state = MTK_STATE_ABORT;
ctx              1169 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c void mtk_vcodec_enc_set_default_params(struct mtk_vcodec_ctx *ctx)
ctx              1173 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->m2m_ctx->q_lock = &ctx->dev->dev_mutex;
ctx              1174 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->fh.m2m_ctx = ctx->m2m_ctx;
ctx              1175 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->fh.ctrl_handler = &ctx->ctrl_hdl;
ctx              1176 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	INIT_WORK(&ctx->encode_work, mtk_venc_worker);
ctx              1178 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->colorspace = V4L2_COLORSPACE_REC709;
ctx              1179 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
ctx              1180 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->quantization = V4L2_QUANTIZATION_DEFAULT;
ctx              1181 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->xfer_func = V4L2_XFER_FUNC_DEFAULT;
ctx              1183 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	q_data = &ctx->q_data[MTK_Q_DATA_SRC];
ctx              1216 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	q_data = &ctx->q_data[MTK_Q_DATA_DST];
ctx              1222 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->q_data[MTK_Q_DATA_DST].sizeimage[0] =
ctx              1224 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->q_data[MTK_Q_DATA_DST].bytesperline[0] = 0;
ctx              1228 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c int mtk_vcodec_enc_ctrls_setup(struct mtk_vcodec_ctx *ctx)
ctx              1231 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct v4l2_ctrl_handler *handler = &ctx->ctrl_hdl;
ctx              1267 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	v4l2_ctrl_handler_setup(&ctx->ctrl_hdl);
ctx              1275 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_ctx *ctx = priv;
ctx              1285 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	src_vq->drv_priv	= ctx;
ctx              1290 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	src_vq->lock		= &ctx->dev->dev_mutex;
ctx              1291 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	src_vq->dev		= &ctx->dev->plat_dev->dev;
ctx              1299 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	dst_vq->drv_priv	= ctx;
ctx              1304 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	dst_vq->lock		= &ctx->dev->dev_mutex;
ctx              1305 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	dst_vq->dev		= &ctx->dev->plat_dev->dev;
ctx              1310 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c int mtk_venc_unlock(struct mtk_vcodec_ctx *ctx)
ctx              1312 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_dev *dev = ctx->dev;
ctx              1318 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c int mtk_venc_lock(struct mtk_vcodec_ctx *ctx)
ctx              1320 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	struct mtk_vcodec_dev *dev = ctx->dev;
ctx              1326 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c void mtk_vcodec_enc_release(struct mtk_vcodec_ctx *ctx)
ctx              1328 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	int ret = venc_if_deinit(ctx);
ctx              1333 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c 	ctx->state = MTK_STATE_FREE;
ctx                42 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.h int mtk_venc_unlock(struct mtk_vcodec_ctx *ctx);
ctx                43 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.h int mtk_venc_lock(struct mtk_vcodec_ctx *ctx);
ctx                46 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.h void mtk_vcodec_enc_release(struct mtk_vcodec_ctx *ctx);
ctx                47 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.h int mtk_vcodec_enc_ctrls_setup(struct mtk_vcodec_ctx *ctx);
ctx                48 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.h void mtk_vcodec_enc_set_default_params(struct mtk_vcodec_ctx *ctx);
ctx                30 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c static void wake_up_ctx(struct mtk_vcodec_ctx *ctx, unsigned int reason)
ctx                32 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	ctx->int_cond = 1;
ctx                33 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	ctx->int_type = reason;
ctx                34 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	wake_up_interruptible(&ctx->queue);
ctx                61 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	struct mtk_vcodec_ctx *ctx;
ctx                66 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	ctx = dev->curr_ctx;
ctx                69 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	mtk_v4l2_debug(1, "id=%d", ctx->id);
ctx                72 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	ctx->irq_status = readl(dev->reg_base[VENC_SYS] +
ctx                75 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	clean_irq_status(ctx->irq_status, addr);
ctx                77 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	wake_up_ctx(ctx, MTK_INST_IRQ_RECEIVED);
ctx                84 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	struct mtk_vcodec_ctx *ctx;
ctx                89 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	ctx = dev->curr_ctx;
ctx                92 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	mtk_v4l2_debug(1, "id=%d", ctx->id);
ctx                93 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	ctx->irq_status = readl(dev->reg_base[VENC_LT_SYS] +
ctx                98 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	clean_irq_status(ctx->irq_status, addr);
ctx               100 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	wake_up_ctx(ctx, MTK_INST_IRQ_RECEIVED);
ctx               107 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	struct mtk_vcodec_ctx *ctx;
ctx               112 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	list_for_each_entry(ctx, &dev->ctx_list, list) {
ctx               113 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 		ctx->state = MTK_STATE_ABORT;
ctx               115 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 				ctx->id);
ctx               123 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	struct mtk_vcodec_ctx *ctx = NULL;
ctx               126 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               127 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	if (!ctx)
ctx               135 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	ctx->id = dev->id_counter++;
ctx               136 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx               137 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	file->private_data = &ctx->fh;
ctx               138 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	v4l2_fh_add(&ctx->fh);
ctx               139 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	INIT_LIST_HEAD(&ctx->list);
ctx               140 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	ctx->dev = dev;
ctx               141 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	init_waitqueue_head(&ctx->queue);
ctx               143 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	ctx->type = MTK_INST_ENCODER;
ctx               144 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	ret = mtk_vcodec_enc_ctrls_setup(ctx);
ctx               150 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	ctx->m2m_ctx = v4l2_m2m_ctx_init(dev->m2m_dev_enc, ctx,
ctx               152 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	if (IS_ERR((__force void *)ctx->m2m_ctx)) {
ctx               153 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 		ret = PTR_ERR((__force void *)ctx->m2m_ctx);
ctx               158 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	mtk_vcodec_enc_set_default_params(ctx);
ctx               160 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	if (v4l2_fh_is_singular(&ctx->fh)) {
ctx               181 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 			ctx->id, ctx, ctx->m2m_ctx);
ctx               183 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	list_add(&ctx->list, &dev->ctx_list);
ctx               187 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 			ctx->id);
ctx               192 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	v4l2_m2m_ctx_release(ctx->m2m_ctx);
ctx               194 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	v4l2_ctrl_handler_free(&ctx->ctrl_hdl);
ctx               196 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	v4l2_fh_del(&ctx->fh);
ctx               197 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	v4l2_fh_exit(&ctx->fh);
ctx               198 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	kfree(ctx);
ctx               207 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	struct mtk_vcodec_ctx *ctx = fh_to_ctx(file->private_data);
ctx               209 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	mtk_v4l2_debug(1, "[%d] encoder", ctx->id);
ctx               212 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	mtk_vcodec_enc_release(ctx);
ctx               213 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	v4l2_fh_del(&ctx->fh);
ctx               214 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	v4l2_fh_exit(&ctx->fh);
ctx               215 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	v4l2_ctrl_handler_free(&ctx->ctrl_hdl);
ctx               216 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	v4l2_m2m_ctx_release(ctx->m2m_ctx);
ctx               218 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	list_del_init(&ctx->list);
ctx               219 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc_drv.c 	kfree(ctx);
ctx                14 drivers/media/platform/mtk-vcodec/mtk_vcodec_intr.c int mtk_vcodec_wait_for_done_ctx(struct mtk_vcodec_ctx  *ctx, int command,
ctx                21 drivers/media/platform/mtk-vcodec/mtk_vcodec_intr.c 	waitqueue = (wait_queue_head_t *)&ctx->queue;
ctx                25 drivers/media/platform/mtk-vcodec/mtk_vcodec_intr.c 				ctx->int_cond,
ctx                31 drivers/media/platform/mtk-vcodec/mtk_vcodec_intr.c 				ctx->id, ctx->type, command, timeout_ms,
ctx                32 drivers/media/platform/mtk-vcodec/mtk_vcodec_intr.c 				ctx->int_cond, ctx->int_type);
ctx                35 drivers/media/platform/mtk-vcodec/mtk_vcodec_intr.c 				ctx->id, ctx->type, command, ctx->int_cond,
ctx                36 drivers/media/platform/mtk-vcodec/mtk_vcodec_intr.c 				ctx->int_type);
ctx                40 drivers/media/platform/mtk-vcodec/mtk_vcodec_intr.c 	ctx->int_cond = 0;
ctx                41 drivers/media/platform/mtk-vcodec/mtk_vcodec_intr.c 	ctx->int_type = 0;
ctx                27 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	struct mtk_vcodec_ctx *ctx = (struct mtk_vcodec_ctx *)data;
ctx                33 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	return ctx->dev->reg_base[reg_idx];
ctx                41 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	struct mtk_vcodec_ctx *ctx = (struct mtk_vcodec_ctx *)data;
ctx                42 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	struct device *dev = &ctx->dev->plat_dev->dev;
ctx                51 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	mtk_v4l2_debug(3, "[%d]  - va      = %p", ctx->id, mem->va);
ctx                52 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	mtk_v4l2_debug(3, "[%d]  - dma     = 0x%lx", ctx->id,
ctx                54 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	mtk_v4l2_debug(3, "[%d]    size = 0x%lx", ctx->id, size);
ctx                64 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	struct mtk_vcodec_ctx *ctx = (struct mtk_vcodec_ctx *)data;
ctx                65 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	struct device *dev = &ctx->dev->plat_dev->dev;
ctx                73 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	mtk_v4l2_debug(3, "[%d]  - va      = %p", ctx->id, mem->va);
ctx                74 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	mtk_v4l2_debug(3, "[%d]  - dma     = 0x%lx", ctx->id,
ctx                76 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	mtk_v4l2_debug(3, "[%d]    size = 0x%lx", ctx->id, size);
ctx                86 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	struct mtk_vcodec_ctx *ctx)
ctx                91 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	dev->curr_ctx = ctx;
ctx                99 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	struct mtk_vcodec_ctx *ctx;
ctx               102 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	ctx = dev->curr_ctx;
ctx               104 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.c 	return ctx;
ctx                38 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.h 	       ((struct mtk_vcodec_ctx *)h->ctx)->id, __func__, ##args)
ctx                57 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.h 				((struct mtk_vcodec_ctx *)h->ctx)->id, \
ctx                83 drivers/media/platform/mtk-vcodec/mtk_vcodec_util.h 	struct mtk_vcodec_ctx *ctx);
ctx               128 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 	struct mtk_vcodec_ctx *ctx;
ctx               145 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 	err = mtk_vcodec_mem_alloc(inst->ctx, &inst->pred_buf);
ctx               164 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 		mtk_vcodec_mem_free(inst->ctx, mem);
ctx               177 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 			mtk_vcodec_mem_free(inst->ctx, mem);
ctx               179 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 		err = mtk_vcodec_mem_alloc(inst->ctx, mem);
ctx               199 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 			mtk_vcodec_mem_free(inst->ctx, mem);
ctx               272 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c static int vdec_h264_init(struct mtk_vcodec_ctx *ctx)
ctx               281 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 	inst->ctx = ctx;
ctx               284 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 	inst->vpu.dev = ctx->dev->vpu_plat_dev;
ctx               285 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 	inst->vpu.ctx = ctx;
ctx               301 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 	ctx->drv_handle = inst;
ctx               416 drivers/media/platform/mtk-vcodec/vdec/vdec_h264_if.c 		err = mtk_vcodec_wait_for_done_ctx(inst->ctx,
ctx               163 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	struct mtk_vcodec_ctx *ctx;
ctx               170 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	inst->reg_base.top = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_TOP);
ctx               171 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	inst->reg_base.cm = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_CM);
ctx               172 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	inst->reg_base.hwd = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_HWD);
ctx               173 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	inst->reg_base.sys = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_SYS);
ctx               174 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	inst->reg_base.misc = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_MISC);
ctx               175 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	inst->reg_base.ld = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_LD);
ctx               176 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	inst->reg_base.hwb = mtk_vcodec_get_reg_addr(inst->ctx, VDEC_HWB);
ctx               371 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	err = mtk_vcodec_mem_alloc(inst->ctx, mem);
ctx               386 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 		mtk_vcodec_mem_free(inst->ctx, mem);
ctx               391 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c static int vdec_vp8_init(struct mtk_vcodec_ctx *ctx)
ctx               400 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	inst->ctx = ctx;
ctx               403 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	inst->vpu.dev = ctx->dev->vpu_plat_dev;
ctx               404 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	inst->vpu.ctx = ctx;
ctx               422 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	ctx->drv_handle = inst;
ctx               492 drivers/media/platform/mtk-vcodec/vdec/vdec_vp8_if.c 	mtk_vcodec_wait_for_done_ctx(inst->ctx, MTK_INST_IRQ_RECEIVED,
ctx               195 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	struct mtk_vcodec_ctx *ctx;
ctx               291 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 			mtk_vcodec_mem_free(inst->ctx,
ctx               293 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 			mtk_vcodec_mem_free(inst->ctx,
ctx               337 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	if (mtk_vcodec_mem_alloc(inst->ctx, mem_basy_y)) {
ctx               346 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	if (mtk_vcodec_mem_alloc(inst->ctx, mem_basy_c)) {
ctx               365 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	if (!(inst->ctx->dev->dec_capability &
ctx               390 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 		mtk_vcodec_mem_free(inst->ctx, mem);
ctx               394 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	result = mtk_vcodec_mem_alloc(inst->ctx, mem);
ctx               408 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 		mtk_vcodec_mem_free(inst->ctx, mem);
ctx               411 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	result = mtk_vcodec_mem_alloc(inst->ctx, mem);
ctx               534 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	struct mtk_vcodec_ctx *ctx = inst->ctx;
ctx               536 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	mtk_vcodec_wait_for_done_ctx(inst->ctx,
ctx               540 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	if (ctx->irq_status & MTK_VDEC_IRQ_STATUS_DEC_SUCCESS)
ctx               546 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c static struct vdec_vp9_inst *vp9_alloc_inst(struct mtk_vcodec_ctx *ctx)
ctx               554 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	result = mtk_vcodec_mem_alloc(ctx, &mem);
ctx               570 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 		mtk_vcodec_mem_free(inst->ctx, &mem);
ctx               772 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 		mtk_vcodec_mem_free(inst->ctx, mem);
ctx               776 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 		mtk_vcodec_mem_free(inst->ctx, mem);
ctx               782 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c static int vdec_vp9_init(struct mtk_vcodec_ctx *ctx)
ctx               786 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	inst = vp9_alloc_inst(ctx);
ctx               791 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	inst->ctx = ctx;
ctx               794 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	inst->vpu.dev = ctx->dev->vpu_plat_dev;
ctx               795 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	inst->vpu.ctx = ctx;
ctx               806 drivers/media/platform/mtk-vcodec/vdec/vdec_vp9_if.c 	ctx->drv_handle = inst;
ctx                20 drivers/media/platform/mtk-vcodec/vdec_drv_base.h 	int (*init)(struct mtk_vcodec_ctx *ctx);
ctx                18 drivers/media/platform/mtk-vcodec/vdec_drv_if.c int vdec_if_init(struct mtk_vcodec_ctx *ctx, unsigned int fourcc)
ctx                24 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 		ctx->dec_if = &vdec_h264_if;
ctx                27 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 		ctx->dec_if = &vdec_vp8_if;
ctx                30 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 		ctx->dec_if = &vdec_vp9_if;
ctx                36 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vdec_lock(ctx);
ctx                37 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vcodec_dec_clock_on(&ctx->dev->pm);
ctx                38 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	ret = ctx->dec_if->init(ctx);
ctx                39 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vcodec_dec_clock_off(&ctx->dev->pm);
ctx                40 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vdec_unlock(ctx);
ctx                45 drivers/media/platform/mtk-vcodec/vdec_drv_if.c int vdec_if_decode(struct mtk_vcodec_ctx *ctx, struct mtk_vcodec_mem *bs,
ctx                65 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	if (!ctx->drv_handle)
ctx                68 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vdec_lock(ctx);
ctx                70 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vcodec_set_curr_ctx(ctx->dev, ctx);
ctx                71 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vcodec_dec_clock_on(&ctx->dev->pm);
ctx                72 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	enable_irq(ctx->dev->dec_irq);
ctx                73 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	ret = ctx->dec_if->decode(ctx->drv_handle, bs, fb, res_chg);
ctx                74 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	disable_irq(ctx->dev->dec_irq);
ctx                75 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vcodec_dec_clock_off(&ctx->dev->pm);
ctx                76 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vcodec_set_curr_ctx(ctx->dev, NULL);
ctx                78 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vdec_unlock(ctx);
ctx                83 drivers/media/platform/mtk-vcodec/vdec_drv_if.c int vdec_if_get_param(struct mtk_vcodec_ctx *ctx, enum vdec_get_param_type type,
ctx                88 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	if (!ctx->drv_handle)
ctx                91 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vdec_lock(ctx);
ctx                92 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	ret = ctx->dec_if->get_param(ctx->drv_handle, type, out);
ctx                93 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vdec_unlock(ctx);
ctx                98 drivers/media/platform/mtk-vcodec/vdec_drv_if.c void vdec_if_deinit(struct mtk_vcodec_ctx *ctx)
ctx               100 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	if (!ctx->drv_handle)
ctx               103 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vdec_lock(ctx);
ctx               104 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vcodec_dec_clock_on(&ctx->dev->pm);
ctx               105 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	ctx->dec_if->deinit(ctx->drv_handle);
ctx               106 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vcodec_dec_clock_off(&ctx->dev->pm);
ctx               107 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	mtk_vdec_unlock(ctx);
ctx               109 drivers/media/platform/mtk-vcodec/vdec_drv_if.c 	ctx->drv_handle = NULL;
ctx                66 drivers/media/platform/mtk-vcodec/vdec_drv_if.h int vdec_if_init(struct mtk_vcodec_ctx *ctx, unsigned int fourcc);
ctx                73 drivers/media/platform/mtk-vcodec/vdec_drv_if.h void vdec_if_deinit(struct mtk_vcodec_ctx *ctx);
ctx                87 drivers/media/platform/mtk-vcodec/vdec_drv_if.h int vdec_if_decode(struct mtk_vcodec_ctx *ctx, struct mtk_vcodec_mem *bs,
ctx                96 drivers/media/platform/mtk-vcodec/vdec_drv_if.h int vdec_if_get_param(struct mtk_vcodec_ctx *ctx, enum vdec_get_param_type type,
ctx                31 drivers/media/platform/mtk-vcodec/vdec_vpu_if.h 	struct mtk_vcodec_ctx *ctx;
ctx               145 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 	struct mtk_vcodec_ctx *ctx;
ctx               225 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 			mtk_vcodec_mem_free(inst->ctx, &inst->work_bufs[i]);
ctx               228 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 	mtk_vcodec_mem_free(inst->ctx, &inst->pps_buf);
ctx               264 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 			ret = mtk_vcodec_mem_alloc(inst->ctx,
ctx               297 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 	ret = mtk_vcodec_mem_alloc(inst->ctx, &inst->pps_buf);
ctx               316 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 	struct mtk_vcodec_ctx *ctx = (struct mtk_vcodec_ctx *)inst->ctx;
ctx               318 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 	if (!mtk_vcodec_wait_for_done_ctx(ctx, MTK_INST_IRQ_RECEIVED,
ctx               320 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 		irq_status = ctx->irq_status;
ctx               461 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c static int h264_enc_init(struct mtk_vcodec_ctx *ctx)
ctx               470 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 	inst->ctx = ctx;
ctx               471 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 	inst->vpu_inst.ctx = ctx;
ctx               472 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 	inst->vpu_inst.dev = ctx->dev->vpu_plat_dev;
ctx               474 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 	inst->hw_base = mtk_vcodec_get_reg_addr(inst->ctx, VENC_SYS);
ctx               487 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 		ctx->drv_handle = inst;
ctx               500 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 	struct mtk_vcodec_ctx *ctx = inst->ctx;
ctx               504 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 	enable_irq(ctx->dev->enc_irq);
ctx               581 drivers/media/platform/mtk-vcodec/venc/venc_h264_if.c 	disable_irq(ctx->dev->enc_irq);
ctx               133 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 	struct mtk_vcodec_ctx *ctx;
ctx               151 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 		mtk_vcodec_mem_free(inst->ctx, &inst->work_bufs[i]);
ctx               179 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 		ret = mtk_vcodec_mem_alloc(inst->ctx, &inst->work_bufs[i]);
ctx               221 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 	struct mtk_vcodec_ctx *ctx = (struct mtk_vcodec_ctx *)inst->ctx;
ctx               223 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 	if (!mtk_vcodec_wait_for_done_ctx(ctx, MTK_INST_IRQ_RECEIVED,
ctx               225 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 		irq_status = ctx->irq_status;
ctx               326 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c static int vp8_enc_init(struct mtk_vcodec_ctx *ctx)
ctx               335 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 	inst->ctx = ctx;
ctx               336 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 	inst->vpu_inst.ctx = ctx;
ctx               337 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 	inst->vpu_inst.dev = ctx->dev->vpu_plat_dev;
ctx               339 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 	inst->hw_base = mtk_vcodec_get_reg_addr(inst->ctx, VENC_LT_SYS);
ctx               352 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 		ctx->drv_handle = inst;
ctx               365 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 	struct mtk_vcodec_ctx *ctx = inst->ctx;
ctx               369 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 	enable_irq(ctx->dev->enc_lt_irq);
ctx               388 drivers/media/platform/mtk-vcodec/venc/venc_vp8_if.c 	disable_irq(ctx->dev->enc_lt_irq);
ctx                22 drivers/media/platform/mtk-vcodec/venc_drv_base.h 	int (*init)(struct mtk_vcodec_ctx *ctx);
ctx                20 drivers/media/platform/mtk-vcodec/venc_drv_if.c int venc_if_init(struct mtk_vcodec_ctx *ctx, unsigned int fourcc)
ctx                26 drivers/media/platform/mtk-vcodec/venc_drv_if.c 		ctx->enc_if = &venc_vp8_if;
ctx                29 drivers/media/platform/mtk-vcodec/venc_drv_if.c 		ctx->enc_if = &venc_h264_if;
ctx                35 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_venc_lock(ctx);
ctx                36 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_vcodec_enc_clock_on(&ctx->dev->pm);
ctx                37 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	ret = ctx->enc_if->init(ctx);
ctx                38 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_vcodec_enc_clock_off(&ctx->dev->pm);
ctx                39 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_venc_unlock(ctx);
ctx                44 drivers/media/platform/mtk-vcodec/venc_drv_if.c int venc_if_set_param(struct mtk_vcodec_ctx *ctx,
ctx                49 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_venc_lock(ctx);
ctx                50 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_vcodec_enc_clock_on(&ctx->dev->pm);
ctx                51 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	ret = ctx->enc_if->set_param(ctx->drv_handle, type, in);
ctx                52 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_vcodec_enc_clock_off(&ctx->dev->pm);
ctx                53 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_venc_unlock(ctx);
ctx                58 drivers/media/platform/mtk-vcodec/venc_drv_if.c int venc_if_encode(struct mtk_vcodec_ctx *ctx,
ctx                66 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_venc_lock(ctx);
ctx                68 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	spin_lock_irqsave(&ctx->dev->irqlock, flags);
ctx                69 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	ctx->dev->curr_ctx = ctx;
ctx                70 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	spin_unlock_irqrestore(&ctx->dev->irqlock, flags);
ctx                72 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_vcodec_enc_clock_on(&ctx->dev->pm);
ctx                73 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	ret = ctx->enc_if->encode(ctx->drv_handle, opt, frm_buf,
ctx                75 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_vcodec_enc_clock_off(&ctx->dev->pm);
ctx                77 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	spin_lock_irqsave(&ctx->dev->irqlock, flags);
ctx                78 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	ctx->dev->curr_ctx = NULL;
ctx                79 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	spin_unlock_irqrestore(&ctx->dev->irqlock, flags);
ctx                81 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_venc_unlock(ctx);
ctx                85 drivers/media/platform/mtk-vcodec/venc_drv_if.c int venc_if_deinit(struct mtk_vcodec_ctx *ctx)
ctx                89 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	if (!ctx->drv_handle)
ctx                92 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_venc_lock(ctx);
ctx                93 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_vcodec_enc_clock_on(&ctx->dev->pm);
ctx                94 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	ret = ctx->enc_if->deinit(ctx->drv_handle);
ctx                95 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_vcodec_enc_clock_off(&ctx->dev->pm);
ctx                96 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	mtk_venc_unlock(ctx);
ctx                98 drivers/media/platform/mtk-vcodec/venc_drv_if.c 	ctx->drv_handle = NULL;
ctx               122 drivers/media/platform/mtk-vcodec/venc_drv_if.h int venc_if_init(struct mtk_vcodec_ctx *ctx, unsigned int fourcc);
ctx               129 drivers/media/platform/mtk-vcodec/venc_drv_if.h int venc_if_deinit(struct mtk_vcodec_ctx *ctx);
ctx               138 drivers/media/platform/mtk-vcodec/venc_drv_if.h int venc_if_set_param(struct mtk_vcodec_ctx *ctx,
ctx               151 drivers/media/platform/mtk-vcodec/venc_drv_if.h int venc_if_encode(struct mtk_vcodec_ctx *ctx,
ctx                38 drivers/media/platform/mtk-vcodec/venc_vpu_if.h 	struct mtk_vcodec_ctx *ctx;
ctx               217 drivers/media/platform/mx2_emmaprp.c static struct emmaprp_q_data *get_q_data(struct emmaprp_ctx *ctx,
ctx               222 drivers/media/platform/mx2_emmaprp.c 		return &(ctx->q_data[V4L2_M2M_SRC]);
ctx               224 drivers/media/platform/mx2_emmaprp.c 		return &(ctx->q_data[V4L2_M2M_DST]);
ctx               236 drivers/media/platform/mx2_emmaprp.c 	struct emmaprp_ctx *ctx = priv;
ctx               237 drivers/media/platform/mx2_emmaprp.c 	struct emmaprp_dev *pcdev = ctx->dev;
ctx               239 drivers/media/platform/mx2_emmaprp.c 	ctx->aborting = 1;
ctx               243 drivers/media/platform/mx2_emmaprp.c 	v4l2_m2m_job_finish(pcdev->m2m_dev, ctx->fh.m2m_ctx);
ctx               268 drivers/media/platform/mx2_emmaprp.c 	struct emmaprp_ctx *ctx = priv;
ctx               271 drivers/media/platform/mx2_emmaprp.c 	struct emmaprp_dev *pcdev = ctx->dev;
ctx               278 drivers/media/platform/mx2_emmaprp.c 	src_buf = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               279 drivers/media/platform/mx2_emmaprp.c 	dst_buf = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx               281 drivers/media/platform/mx2_emmaprp.c 	s_q_data = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               285 drivers/media/platform/mx2_emmaprp.c 	d_q_data = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               427 drivers/media/platform/mx2_emmaprp.c static int vidioc_g_fmt(struct emmaprp_ctx *ctx, struct v4l2_format *f)
ctx               432 drivers/media/platform/mx2_emmaprp.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               436 drivers/media/platform/mx2_emmaprp.c 	q_data = get_q_data(ctx, f->type);
ctx               501 drivers/media/platform/mx2_emmaprp.c 	struct emmaprp_ctx *ctx = priv;
ctx               505 drivers/media/platform/mx2_emmaprp.c 		v4l2_err(&ctx->dev->v4l2_dev,
ctx               518 drivers/media/platform/mx2_emmaprp.c 	struct emmaprp_ctx *ctx = priv;
ctx               522 drivers/media/platform/mx2_emmaprp.c 		v4l2_err(&ctx->dev->v4l2_dev,
ctx               531 drivers/media/platform/mx2_emmaprp.c static int vidioc_s_fmt(struct emmaprp_ctx *ctx, struct v4l2_format *f)
ctx               537 drivers/media/platform/mx2_emmaprp.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               541 drivers/media/platform/mx2_emmaprp.c 	q_data = get_q_data(ctx, f->type);
ctx               546 drivers/media/platform/mx2_emmaprp.c 		v4l2_err(&ctx->dev->v4l2_dev, "%s queue busy\n", __func__);
ctx               562 drivers/media/platform/mx2_emmaprp.c 	dprintk(ctx->dev,
ctx               624 drivers/media/platform/mx2_emmaprp.c 	struct emmaprp_ctx *ctx = vb2_get_drv_priv(vq);
ctx               628 drivers/media/platform/mx2_emmaprp.c 	q_data = get_q_data(ctx, vq->type);
ctx               642 drivers/media/platform/mx2_emmaprp.c 	dprintk(ctx->dev, "get %d buffer(s) of size %d each.\n", count, size);
ctx               649 drivers/media/platform/mx2_emmaprp.c 	struct emmaprp_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               652 drivers/media/platform/mx2_emmaprp.c 	dprintk(ctx->dev, "type: %d\n", vb->vb2_queue->type);
ctx               654 drivers/media/platform/mx2_emmaprp.c 	q_data = get_q_data(ctx, vb->vb2_queue->type);
ctx               657 drivers/media/platform/mx2_emmaprp.c 		dprintk(ctx->dev,
ctx               672 drivers/media/platform/mx2_emmaprp.c 	struct emmaprp_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               673 drivers/media/platform/mx2_emmaprp.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx               687 drivers/media/platform/mx2_emmaprp.c 	struct emmaprp_ctx *ctx = priv;
ctx               692 drivers/media/platform/mx2_emmaprp.c 	src_vq->drv_priv = ctx;
ctx               697 drivers/media/platform/mx2_emmaprp.c 	src_vq->dev = ctx->dev->v4l2_dev.dev;
ctx               698 drivers/media/platform/mx2_emmaprp.c 	src_vq->lock = &ctx->dev->dev_mutex;
ctx               706 drivers/media/platform/mx2_emmaprp.c 	dst_vq->drv_priv = ctx;
ctx               711 drivers/media/platform/mx2_emmaprp.c 	dst_vq->dev = ctx->dev->v4l2_dev.dev;
ctx               712 drivers/media/platform/mx2_emmaprp.c 	dst_vq->lock = &ctx->dev->dev_mutex;
ctx               723 drivers/media/platform/mx2_emmaprp.c 	struct emmaprp_ctx *ctx;
ctx               725 drivers/media/platform/mx2_emmaprp.c 	ctx = kzalloc(sizeof *ctx, GFP_KERNEL);
ctx               726 drivers/media/platform/mx2_emmaprp.c 	if (!ctx)
ctx               729 drivers/media/platform/mx2_emmaprp.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx               730 drivers/media/platform/mx2_emmaprp.c 	file->private_data = &ctx->fh;
ctx               731 drivers/media/platform/mx2_emmaprp.c 	ctx->dev = pcdev;
ctx               734 drivers/media/platform/mx2_emmaprp.c 		kfree(ctx);
ctx               738 drivers/media/platform/mx2_emmaprp.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(pcdev->m2m_dev, ctx, &queue_init);
ctx               740 drivers/media/platform/mx2_emmaprp.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx               741 drivers/media/platform/mx2_emmaprp.c 		int ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx               744 drivers/media/platform/mx2_emmaprp.c 		kfree(ctx);
ctx               750 drivers/media/platform/mx2_emmaprp.c 	ctx->q_data[V4L2_M2M_SRC].fmt = &formats[1];
ctx               751 drivers/media/platform/mx2_emmaprp.c 	ctx->q_data[V4L2_M2M_DST].fmt = &formats[0];
ctx               752 drivers/media/platform/mx2_emmaprp.c 	v4l2_fh_add(&ctx->fh);
ctx               755 drivers/media/platform/mx2_emmaprp.c 	dprintk(pcdev, "Created instance %p, m2m_ctx: %p\n", ctx, ctx->fh.m2m_ctx);
ctx               763 drivers/media/platform/mx2_emmaprp.c 	struct emmaprp_ctx *ctx = file->private_data;
ctx               765 drivers/media/platform/mx2_emmaprp.c 	dprintk(pcdev, "Releasing instance %p\n", ctx);
ctx               770 drivers/media/platform/mx2_emmaprp.c 	v4l2_fh_del(&ctx->fh);
ctx               771 drivers/media/platform/mx2_emmaprp.c 	v4l2_fh_exit(&ctx->fh);
ctx               772 drivers/media/platform/mx2_emmaprp.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx               774 drivers/media/platform/mx2_emmaprp.c 	kfree(ctx);
ctx               220 drivers/media/platform/omap3isp/ispcsi2.c 	struct isp_csi2_ctx_cfg *ctx = &csi2->contexts[0];
ctx               222 drivers/media/platform/omap3isp/ispcsi2.c 	ctx->ping_addr = addr;
ctx               223 drivers/media/platform/omap3isp/ispcsi2.c 	ctx->pong_addr = addr;
ctx               224 drivers/media/platform/omap3isp/ispcsi2.c 	isp_reg_writel(isp, ctx->ping_addr,
ctx               225 drivers/media/platform/omap3isp/ispcsi2.c 		       csi2->regs1, ISPCSI2_CTX_DAT_PING_ADDR(ctx->ctxnum));
ctx               226 drivers/media/platform/omap3isp/ispcsi2.c 	isp_reg_writel(isp, ctx->pong_addr,
ctx               227 drivers/media/platform/omap3isp/ispcsi2.c 		       csi2->regs1, ISPCSI2_CTX_DAT_PONG_ADDR(ctx->ctxnum));
ctx               250 drivers/media/platform/omap3isp/ispcsi2.c 	struct isp_csi2_ctx_cfg *ctx = &csi2->contexts[ctxnum];
ctx               271 drivers/media/platform/omap3isp/ispcsi2.c 	ctx->enabled = enable;
ctx               281 drivers/media/platform/omap3isp/ispcsi2.c 			    struct isp_csi2_ctx_cfg *ctx)
ctx               286 drivers/media/platform/omap3isp/ispcsi2.c 	reg = isp_reg_readl(isp, csi2->regs1, ISPCSI2_CTX_CTRL1(ctx->ctxnum));
ctx               288 drivers/media/platform/omap3isp/ispcsi2.c 	if (ctx->eof_enabled)
ctx               293 drivers/media/platform/omap3isp/ispcsi2.c 	if (ctx->eol_enabled)
ctx               298 drivers/media/platform/omap3isp/ispcsi2.c 	if (ctx->checksum_enabled)
ctx               303 drivers/media/platform/omap3isp/ispcsi2.c 	isp_reg_writel(isp, reg, csi2->regs1, ISPCSI2_CTX_CTRL1(ctx->ctxnum));
ctx               306 drivers/media/platform/omap3isp/ispcsi2.c 	reg = isp_reg_readl(isp, csi2->regs1, ISPCSI2_CTX_CTRL2(ctx->ctxnum));
ctx               309 drivers/media/platform/omap3isp/ispcsi2.c 	reg |= ctx->virtual_id << ISPCSI2_CTX_CTRL2_VIRTUAL_ID_SHIFT;
ctx               312 drivers/media/platform/omap3isp/ispcsi2.c 	reg |= ctx->format_id << ISPCSI2_CTX_CTRL2_FORMAT_SHIFT;
ctx               314 drivers/media/platform/omap3isp/ispcsi2.c 	if (ctx->dpcm_decompress) {
ctx               315 drivers/media/platform/omap3isp/ispcsi2.c 		if (ctx->dpcm_predictor)
ctx               321 drivers/media/platform/omap3isp/ispcsi2.c 	if (is_usr_def_mapping(ctx->format_id)) {
ctx               326 drivers/media/platform/omap3isp/ispcsi2.c 	isp_reg_writel(isp, reg, csi2->regs1, ISPCSI2_CTX_CTRL2(ctx->ctxnum));
ctx               329 drivers/media/platform/omap3isp/ispcsi2.c 	reg = isp_reg_readl(isp, csi2->regs1, ISPCSI2_CTX_CTRL3(ctx->ctxnum));
ctx               331 drivers/media/platform/omap3isp/ispcsi2.c 	reg |= (ctx->alpha << ISPCSI2_CTX_CTRL3_ALPHA_SHIFT);
ctx               333 drivers/media/platform/omap3isp/ispcsi2.c 	isp_reg_writel(isp, reg, csi2->regs1, ISPCSI2_CTX_CTRL3(ctx->ctxnum));
ctx               337 drivers/media/platform/omap3isp/ispcsi2.c 			    ISPCSI2_CTX_DAT_OFST(ctx->ctxnum));
ctx               339 drivers/media/platform/omap3isp/ispcsi2.c 	reg |= ctx->data_offset << ISPCSI2_CTX_DAT_OFST_OFST_SHIFT;
ctx               341 drivers/media/platform/omap3isp/ispcsi2.c 		       ISPCSI2_CTX_DAT_OFST(ctx->ctxnum));
ctx               343 drivers/media/platform/omap3isp/ispcsi2.c 	isp_reg_writel(isp, ctx->ping_addr,
ctx               344 drivers/media/platform/omap3isp/ispcsi2.c 		       csi2->regs1, ISPCSI2_CTX_DAT_PING_ADDR(ctx->ctxnum));
ctx               346 drivers/media/platform/omap3isp/ispcsi2.c 	isp_reg_writel(isp, ctx->pong_addr,
ctx               347 drivers/media/platform/omap3isp/ispcsi2.c 		       csi2->regs1, ISPCSI2_CTX_DAT_PONG_ADDR(ctx->ctxnum));
ctx               696 drivers/media/platform/omap3isp/ispcsi2.c 			 struct isp_csi2_ctx_cfg *ctx)
ctx               699 drivers/media/platform/omap3isp/ispcsi2.c 	unsigned int n = ctx->ctxnum;
ctx               721 drivers/media/platform/omap3isp/ispcsi2.c 			ctx->format_id = csi2_ctx_map_format(csi2);
ctx               722 drivers/media/platform/omap3isp/ispcsi2.c 			csi2_ctx_config(isp, csi2, ctx);
ctx               638 drivers/media/platform/rcar_fdp1.c static struct fdp1_q_data *get_q_data(struct fdp1_ctx *ctx,
ctx               642 drivers/media/platform/rcar_fdp1.c 		return &ctx->out_q;
ctx               644 drivers/media/platform/rcar_fdp1.c 		return &ctx->cap_q;
ctx               719 drivers/media/platform/rcar_fdp1.c static void fdp1_field_complete(struct fdp1_ctx *ctx,
ctx               730 drivers/media/platform/rcar_fdp1.c static void fdp1_queue_field(struct fdp1_ctx *ctx,
ctx               735 drivers/media/platform/rcar_fdp1.c 	spin_lock_irqsave(&ctx->fdp1->irqlock, flags);
ctx               736 drivers/media/platform/rcar_fdp1.c 	list_add_tail(&fbuf->list, &ctx->fields_queue);
ctx               737 drivers/media/platform/rcar_fdp1.c 	spin_unlock_irqrestore(&ctx->fdp1->irqlock, flags);
ctx               739 drivers/media/platform/rcar_fdp1.c 	ctx->buffers_queued++;
ctx               742 drivers/media/platform/rcar_fdp1.c static struct fdp1_field_buffer *fdp1_dequeue_field(struct fdp1_ctx *ctx)
ctx               747 drivers/media/platform/rcar_fdp1.c 	ctx->buffers_queued--;
ctx               749 drivers/media/platform/rcar_fdp1.c 	spin_lock_irqsave(&ctx->fdp1->irqlock, flags);
ctx               750 drivers/media/platform/rcar_fdp1.c 	fbuf = list_first_entry_or_null(&ctx->fields_queue,
ctx               754 drivers/media/platform/rcar_fdp1.c 	spin_unlock_irqrestore(&ctx->fdp1->irqlock, flags);
ctx               763 drivers/media/platform/rcar_fdp1.c static struct fdp1_field_buffer *fdp1_peek_queued_field(struct fdp1_ctx *ctx)
ctx               768 drivers/media/platform/rcar_fdp1.c 	spin_lock_irqsave(&ctx->fdp1->irqlock, flags);
ctx               769 drivers/media/platform/rcar_fdp1.c 	fbuf = list_first_entry_or_null(&ctx->fields_queue,
ctx               771 drivers/media/platform/rcar_fdp1.c 	spin_unlock_irqrestore(&ctx->fdp1->irqlock, flags);
ctx               795 drivers/media/platform/rcar_fdp1.c static void fdp1_set_ipc_dli(struct fdp1_ctx *ctx)
ctx               797 drivers/media/platform/rcar_fdp1.c 	struct fdp1_dev *fdp1 = ctx->fdp1;
ctx               812 drivers/media/platform/rcar_fdp1.c static void fdp1_set_ipc_sensor(struct fdp1_ctx *ctx)
ctx               814 drivers/media/platform/rcar_fdp1.c 	struct fdp1_dev *fdp1 = ctx->fdp1;
ctx               815 drivers/media/platform/rcar_fdp1.c 	struct fdp1_q_data *src_q_data = &ctx->out_q;
ctx               879 drivers/media/platform/rcar_fdp1.c static void fdp1_configure_rpf(struct fdp1_ctx *ctx,
ctx               882 drivers/media/platform/rcar_fdp1.c 	struct fdp1_dev *fdp1 = ctx->fdp1;
ctx               888 drivers/media/platform/rcar_fdp1.c 	struct fdp1_q_data *q_data = &ctx->out_q;
ctx               909 drivers/media/platform/rcar_fdp1.c 		smsk_addr = ctx->smsk_addr[0];
ctx               911 drivers/media/platform/rcar_fdp1.c 		smsk_addr = ctx->smsk_addr[1];
ctx               915 drivers/media/platform/rcar_fdp1.c 	if (ctx->deint_mode)
ctx               938 drivers/media/platform/rcar_fdp1.c static void fdp1_configure_wpf(struct fdp1_ctx *ctx,
ctx               941 drivers/media/platform/rcar_fdp1.c 	struct fdp1_dev *fdp1 = ctx->fdp1;
ctx               942 drivers/media/platform/rcar_fdp1.c 	struct fdp1_q_data *src_q_data = &ctx->out_q;
ctx               943 drivers/media/platform/rcar_fdp1.c 	struct fdp1_q_data *q_data = &ctx->cap_q;
ctx               979 drivers/media/platform/rcar_fdp1.c 	format |= ctx->alpha << FD1_WPF_FORMAT_PDV_SHIFT;
ctx               999 drivers/media/platform/rcar_fdp1.c static void fdp1_configure_deint_mode(struct fdp1_ctx *ctx,
ctx              1002 drivers/media/platform/rcar_fdp1.c 	struct fdp1_dev *fdp1 = ctx->fdp1;
ctx              1008 drivers/media/platform/rcar_fdp1.c 	switch (ctx->deint_mode) {
ctx              1017 drivers/media/platform/rcar_fdp1.c 		if (ctx->sequence == 0 || ctx->aborting)
ctx              1022 drivers/media/platform/rcar_fdp1.c 		if (ctx->sequence > 1) {
ctx              1027 drivers/media/platform/rcar_fdp1.c 		if (ctx->sequence > 2)
ctx              1035 drivers/media/platform/rcar_fdp1.c 		if (!(ctx->sequence == 0 || ctx->aborting))
ctx              1066 drivers/media/platform/rcar_fdp1.c static int fdp1_device_process(struct fdp1_ctx *ctx)
ctx              1069 drivers/media/platform/rcar_fdp1.c 	struct fdp1_dev *fdp1 = ctx->fdp1;
ctx              1090 drivers/media/platform/rcar_fdp1.c 	fdp1_configure_deint_mode(ctx, job);
ctx              1093 drivers/media/platform/rcar_fdp1.c 	fdp1_set_ipc_dli(ctx);
ctx              1096 drivers/media/platform/rcar_fdp1.c 	fdp1_set_ipc_sensor(ctx);
ctx              1099 drivers/media/platform/rcar_fdp1.c 	fdp1_configure_rpf(ctx, job);
ctx              1102 drivers/media/platform/rcar_fdp1.c 	fdp1_configure_wpf(ctx, job);
ctx              1138 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = priv;
ctx              1139 drivers/media/platform/rcar_fdp1.c 	struct fdp1_q_data *src_q_data = &ctx->out_q;
ctx              1143 drivers/media/platform/rcar_fdp1.c 	dprintk(ctx->fdp1, "+ Src: %d : Dst: %d\n",
ctx              1144 drivers/media/platform/rcar_fdp1.c 		v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx),
ctx              1145 drivers/media/platform/rcar_fdp1.c 		v4l2_m2m_num_dst_bufs_ready(ctx->fh.m2m_ctx));
ctx              1151 drivers/media/platform/rcar_fdp1.c 	if (v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx) < srcbufs
ctx              1152 drivers/media/platform/rcar_fdp1.c 	    || v4l2_m2m_num_dst_bufs_ready(ctx->fh.m2m_ctx) < dstbufs) {
ctx              1153 drivers/media/platform/rcar_fdp1.c 		dprintk(ctx->fdp1, "Not enough buffers available\n");
ctx              1162 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = priv;
ctx              1164 drivers/media/platform/rcar_fdp1.c 	dprintk(ctx->fdp1, "+\n");
ctx              1167 drivers/media/platform/rcar_fdp1.c 	ctx->aborting = 1;
ctx              1170 drivers/media/platform/rcar_fdp1.c 	fdp1_write(ctx->fdp1, 0, FD1_CTL_SGCMD);
ctx              1171 drivers/media/platform/rcar_fdp1.c 	fdp1_write(ctx->fdp1, FD1_CTL_SRESET_SRST, FD1_CTL_SRESET);
ctx              1180 drivers/media/platform/rcar_fdp1.c static struct fdp1_job *fdp1_prepare_job(struct fdp1_ctx *ctx)
ctx              1184 drivers/media/platform/rcar_fdp1.c 	struct fdp1_dev *fdp1 = ctx->fdp1;
ctx              1190 drivers/media/platform/rcar_fdp1.c 	if (FDP1_DEINT_MODE_USES_NEXT(ctx->deint_mode))
ctx              1193 drivers/media/platform/rcar_fdp1.c 	if (ctx->buffers_queued < buffers_required)
ctx              1202 drivers/media/platform/rcar_fdp1.c 	job->active = fdp1_dequeue_field(ctx);
ctx              1216 drivers/media/platform/rcar_fdp1.c 	vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              1220 drivers/media/platform/rcar_fdp1.c 	job->active->vb->sequence = ctx->sequence;
ctx              1221 drivers/media/platform/rcar_fdp1.c 	job->dst->vb->sequence = ctx->sequence;
ctx              1222 drivers/media/platform/rcar_fdp1.c 	ctx->sequence++;
ctx              1224 drivers/media/platform/rcar_fdp1.c 	if (FDP1_DEINT_MODE_USES_PREV(ctx->deint_mode)) {
ctx              1225 drivers/media/platform/rcar_fdp1.c 		job->previous = ctx->previous;
ctx              1228 drivers/media/platform/rcar_fdp1.c 		ctx->previous = job->active;
ctx              1231 drivers/media/platform/rcar_fdp1.c 	if (FDP1_DEINT_MODE_USES_NEXT(ctx->deint_mode)) {
ctx              1233 drivers/media/platform/rcar_fdp1.c 		job->next = fdp1_peek_queued_field(ctx);
ctx              1246 drivers/media/platform/rcar_fdp1.c 	ctx->translen++;
ctx              1251 drivers/media/platform/rcar_fdp1.c 	dprintk(fdp1, "Job Queued translen = %d\n", ctx->translen);
ctx              1264 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = priv;
ctx              1265 drivers/media/platform/rcar_fdp1.c 	struct fdp1_dev *fdp1 = ctx->fdp1;
ctx              1272 drivers/media/platform/rcar_fdp1.c 	ctx->translen = 0;
ctx              1275 drivers/media/platform/rcar_fdp1.c 	src_vb = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx              1281 drivers/media/platform/rcar_fdp1.c 		fdp1_queue_field(ctx, fbuf);
ctx              1287 drivers/media/platform/rcar_fdp1.c 	while (fdp1_prepare_job(ctx))
ctx              1290 drivers/media/platform/rcar_fdp1.c 	if (ctx->translen == 0) {
ctx              1292 drivers/media/platform/rcar_fdp1.c 		v4l2_m2m_job_finish(fdp1->m2m_dev, ctx->fh.m2m_ctx);
ctx              1297 drivers/media/platform/rcar_fdp1.c 	fdp1_device_process(ctx);
ctx              1308 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx;
ctx              1314 drivers/media/platform/rcar_fdp1.c 	ctx = v4l2_m2m_get_curr_priv(fdp1->m2m_dev);
ctx              1316 drivers/media/platform/rcar_fdp1.c 	if (ctx == NULL) {
ctx              1322 drivers/media/platform/rcar_fdp1.c 	ctx->num_processed++;
ctx              1328 drivers/media/platform/rcar_fdp1.c 	if (FDP1_DEINT_MODE_USES_PREV(ctx->deint_mode))
ctx              1329 drivers/media/platform/rcar_fdp1.c 		fdp1_field_complete(ctx, job->previous);
ctx              1331 drivers/media/platform/rcar_fdp1.c 		fdp1_field_complete(ctx, job->active);
ctx              1342 drivers/media/platform/rcar_fdp1.c 		ctx->num_processed, ctx->translen);
ctx              1344 drivers/media/platform/rcar_fdp1.c 	if (ctx->num_processed == ctx->translen ||
ctx              1345 drivers/media/platform/rcar_fdp1.c 			ctx->aborting) {
ctx              1346 drivers/media/platform/rcar_fdp1.c 		dprintk(ctx->fdp1, "Finishing transaction\n");
ctx              1347 drivers/media/platform/rcar_fdp1.c 		ctx->num_processed = 0;
ctx              1348 drivers/media/platform/rcar_fdp1.c 		v4l2_m2m_job_finish(fdp1->m2m_dev, ctx->fh.m2m_ctx);
ctx              1354 drivers/media/platform/rcar_fdp1.c 		fdp1_device_process(ctx);
ctx              1410 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = fh_to_ctx(priv);
ctx              1412 drivers/media/platform/rcar_fdp1.c 	if (!v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type))
ctx              1415 drivers/media/platform/rcar_fdp1.c 	q_data = get_q_data(ctx, f->type);
ctx              1456 drivers/media/platform/rcar_fdp1.c static void fdp1_try_fmt_output(struct fdp1_ctx *ctx,
ctx              1512 drivers/media/platform/rcar_fdp1.c static void fdp1_try_fmt_capture(struct fdp1_ctx *ctx,
ctx              1516 drivers/media/platform/rcar_fdp1.c 	struct fdp1_q_data *src_data = &ctx->out_q;
ctx              1591 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = fh_to_ctx(priv);
ctx              1594 drivers/media/platform/rcar_fdp1.c 		fdp1_try_fmt_output(ctx, NULL, &f->fmt.pix_mp);
ctx              1596 drivers/media/platform/rcar_fdp1.c 		fdp1_try_fmt_capture(ctx, NULL, &f->fmt.pix_mp);
ctx              1598 drivers/media/platform/rcar_fdp1.c 	dprintk(ctx->fdp1, "Try %s format: %4.4s (0x%08x) %ux%u field %u\n",
ctx              1606 drivers/media/platform/rcar_fdp1.c static void fdp1_set_format(struct fdp1_ctx *ctx,
ctx              1610 drivers/media/platform/rcar_fdp1.c 	struct fdp1_q_data *q_data = get_q_data(ctx, type);
ctx              1614 drivers/media/platform/rcar_fdp1.c 		fdp1_try_fmt_output(ctx, &fmtinfo, pix);
ctx              1616 drivers/media/platform/rcar_fdp1.c 		fdp1_try_fmt_capture(ctx, &fmtinfo, pix);
ctx              1638 drivers/media/platform/rcar_fdp1.c 		struct fdp1_q_data *dst_data = &ctx->cap_q;
ctx              1652 drivers/media/platform/rcar_fdp1.c 		fdp1_try_fmt_capture(ctx, &dst_data->fmt, &dst_data->format);
ctx              1662 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = fh_to_ctx(priv);
ctx              1663 drivers/media/platform/rcar_fdp1.c 	struct v4l2_m2m_ctx *m2m_ctx = ctx->fh.m2m_ctx;
ctx              1667 drivers/media/platform/rcar_fdp1.c 		v4l2_err(&ctx->fdp1->v4l2_dev, "%s queue busy\n", __func__);
ctx              1671 drivers/media/platform/rcar_fdp1.c 	fdp1_set_format(ctx, &f->fmt.pix_mp, f->type);
ctx              1673 drivers/media/platform/rcar_fdp1.c 	dprintk(ctx->fdp1, "Set %s format: %4.4s (0x%08x) %ux%u field %u\n",
ctx              1683 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx =
ctx              1685 drivers/media/platform/rcar_fdp1.c 	struct fdp1_q_data *src_q_data = &ctx->out_q;
ctx              1701 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx =
ctx              1706 drivers/media/platform/rcar_fdp1.c 		ctx->alpha = ctrl->val;
ctx              1710 drivers/media/platform/rcar_fdp1.c 		ctx->deint_mode = ctrl->val;
ctx              1768 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1772 drivers/media/platform/rcar_fdp1.c 	q_data = get_q_data(ctx, vq->type);
ctx              1853 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1854 drivers/media/platform/rcar_fdp1.c 	struct fdp1_q_data *q_data = get_q_data(ctx, vb->vb2_queue->type);
ctx              1886 drivers/media/platform/rcar_fdp1.c 			dprintk(ctx->fdp1,
ctx              1900 drivers/media/platform/rcar_fdp1.c 			dprintk(ctx->fdp1,
ctx              1921 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1923 drivers/media/platform/rcar_fdp1.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx              1928 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = vb2_get_drv_priv(q);
ctx              1929 drivers/media/platform/rcar_fdp1.c 	struct fdp1_q_data *q_data = get_q_data(ctx, q->type);
ctx              1938 drivers/media/platform/rcar_fdp1.c 			ctx->deint_mode = FDP1_PROGRESSIVE;
ctx              1940 drivers/media/platform/rcar_fdp1.c 		if (ctx->deint_mode == FDP1_ADAPT2D3D) {
ctx              1947 drivers/media/platform/rcar_fdp1.c 			ctx->smsk_size = bpp * stride * q_data->vsize;
ctx              1949 drivers/media/platform/rcar_fdp1.c 			ctx->smsk_cpu = dma_alloc_coherent(ctx->fdp1->dev,
ctx              1950 drivers/media/platform/rcar_fdp1.c 				ctx->smsk_size, &smsk_base, GFP_KERNEL);
ctx              1952 drivers/media/platform/rcar_fdp1.c 			if (ctx->smsk_cpu == NULL) {
ctx              1953 drivers/media/platform/rcar_fdp1.c 				dprintk(ctx->fdp1, "Failed to alloc smsk\n");
ctx              1957 drivers/media/platform/rcar_fdp1.c 			ctx->smsk_addr[0] = smsk_base;
ctx              1958 drivers/media/platform/rcar_fdp1.c 			ctx->smsk_addr[1] = smsk_base + (ctx->smsk_size/2);
ctx              1967 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = vb2_get_drv_priv(q);
ctx              1973 drivers/media/platform/rcar_fdp1.c 			vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx              1975 drivers/media/platform/rcar_fdp1.c 			vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              1978 drivers/media/platform/rcar_fdp1.c 		spin_lock_irqsave(&ctx->fdp1->irqlock, flags);
ctx              1980 drivers/media/platform/rcar_fdp1.c 		spin_unlock_irqrestore(&ctx->fdp1->irqlock, flags);
ctx              1989 drivers/media/platform/rcar_fdp1.c 		fbuf = fdp1_dequeue_field(ctx);
ctx              1991 drivers/media/platform/rcar_fdp1.c 			fdp1_field_complete(ctx, fbuf);
ctx              1992 drivers/media/platform/rcar_fdp1.c 			fbuf = fdp1_dequeue_field(ctx);
ctx              1996 drivers/media/platform/rcar_fdp1.c 		if (ctx->smsk_cpu) {
ctx              1997 drivers/media/platform/rcar_fdp1.c 			dma_free_coherent(ctx->fdp1->dev, ctx->smsk_size,
ctx              1998 drivers/media/platform/rcar_fdp1.c 					  ctx->smsk_cpu, ctx->smsk_addr[0]);
ctx              1999 drivers/media/platform/rcar_fdp1.c 			ctx->smsk_addr[0] = ctx->smsk_addr[1] = 0;
ctx              2000 drivers/media/platform/rcar_fdp1.c 			ctx->smsk_cpu = NULL;
ctx              2003 drivers/media/platform/rcar_fdp1.c 		WARN(!list_empty(&ctx->fields_queue),
ctx              2009 drivers/media/platform/rcar_fdp1.c 		job = get_queued_job(ctx->fdp1);
ctx              2011 drivers/media/platform/rcar_fdp1.c 			if (FDP1_DEINT_MODE_USES_PREV(ctx->deint_mode))
ctx              2012 drivers/media/platform/rcar_fdp1.c 				fdp1_field_complete(ctx, job->previous);
ctx              2014 drivers/media/platform/rcar_fdp1.c 				fdp1_field_complete(ctx, job->active);
ctx              2019 drivers/media/platform/rcar_fdp1.c 			job = get_queued_job(ctx->fdp1);
ctx              2023 drivers/media/platform/rcar_fdp1.c 		fdp1_field_complete(ctx, ctx->previous);
ctx              2025 drivers/media/platform/rcar_fdp1.c 		WARN(!list_empty(&ctx->fdp1->queued_job_list),
ctx              2028 drivers/media/platform/rcar_fdp1.c 		WARN(!list_empty(&ctx->fdp1->hw_job_list),
ctx              2046 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = priv;
ctx              2051 drivers/media/platform/rcar_fdp1.c 	src_vq->drv_priv = ctx;
ctx              2056 drivers/media/platform/rcar_fdp1.c 	src_vq->lock = &ctx->fdp1->dev_mutex;
ctx              2057 drivers/media/platform/rcar_fdp1.c 	src_vq->dev = ctx->fdp1->dev;
ctx              2065 drivers/media/platform/rcar_fdp1.c 	dst_vq->drv_priv = ctx;
ctx              2070 drivers/media/platform/rcar_fdp1.c 	dst_vq->lock = &ctx->fdp1->dev_mutex;
ctx              2071 drivers/media/platform/rcar_fdp1.c 	dst_vq->dev = ctx->fdp1->dev;
ctx              2083 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = NULL;
ctx              2090 drivers/media/platform/rcar_fdp1.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              2091 drivers/media/platform/rcar_fdp1.c 	if (!ctx) {
ctx              2096 drivers/media/platform/rcar_fdp1.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx              2097 drivers/media/platform/rcar_fdp1.c 	file->private_data = &ctx->fh;
ctx              2098 drivers/media/platform/rcar_fdp1.c 	ctx->fdp1 = fdp1;
ctx              2101 drivers/media/platform/rcar_fdp1.c 	INIT_LIST_HEAD(&ctx->fields_queue);
ctx              2103 drivers/media/platform/rcar_fdp1.c 	ctx->translen = 1;
ctx              2104 drivers/media/platform/rcar_fdp1.c 	ctx->sequence = 0;
ctx              2108 drivers/media/platform/rcar_fdp1.c 	v4l2_ctrl_handler_init(&ctx->hdl, 3);
ctx              2109 drivers/media/platform/rcar_fdp1.c 	v4l2_ctrl_new_std_menu_items(&ctx->hdl, &fdp1_ctrl_ops,
ctx              2114 drivers/media/platform/rcar_fdp1.c 	ctrl = v4l2_ctrl_new_std(&ctx->hdl, &fdp1_ctrl_ops,
ctx              2119 drivers/media/platform/rcar_fdp1.c 	v4l2_ctrl_new_std(&ctx->hdl, &fdp1_ctrl_ops,
ctx              2122 drivers/media/platform/rcar_fdp1.c 	if (ctx->hdl.error) {
ctx              2123 drivers/media/platform/rcar_fdp1.c 		ret = ctx->hdl.error;
ctx              2124 drivers/media/platform/rcar_fdp1.c 		v4l2_ctrl_handler_free(&ctx->hdl);
ctx              2125 drivers/media/platform/rcar_fdp1.c 		kfree(ctx);
ctx              2129 drivers/media/platform/rcar_fdp1.c 	ctx->fh.ctrl_handler = &ctx->hdl;
ctx              2130 drivers/media/platform/rcar_fdp1.c 	v4l2_ctrl_handler_setup(&ctx->hdl);
ctx              2134 drivers/media/platform/rcar_fdp1.c 	fdp1_set_format(ctx, &format, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
ctx              2136 drivers/media/platform/rcar_fdp1.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(fdp1->m2m_dev, ctx, &queue_init);
ctx              2138 drivers/media/platform/rcar_fdp1.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx              2139 drivers/media/platform/rcar_fdp1.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx              2141 drivers/media/platform/rcar_fdp1.c 		v4l2_ctrl_handler_free(&ctx->hdl);
ctx              2142 drivers/media/platform/rcar_fdp1.c 		kfree(ctx);
ctx              2149 drivers/media/platform/rcar_fdp1.c 	v4l2_fh_add(&ctx->fh);
ctx              2152 drivers/media/platform/rcar_fdp1.c 		ctx, ctx->fh.m2m_ctx);
ctx              2162 drivers/media/platform/rcar_fdp1.c 	struct fdp1_ctx *ctx = fh_to_ctx(file->private_data);
ctx              2164 drivers/media/platform/rcar_fdp1.c 	dprintk(fdp1, "Releasing instance %p\n", ctx);
ctx              2166 drivers/media/platform/rcar_fdp1.c 	v4l2_fh_del(&ctx->fh);
ctx              2167 drivers/media/platform/rcar_fdp1.c 	v4l2_fh_exit(&ctx->fh);
ctx              2168 drivers/media/platform/rcar_fdp1.c 	v4l2_ctrl_handler_free(&ctx->hdl);
ctx              2170 drivers/media/platform/rcar_fdp1.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx              2172 drivers/media/platform/rcar_fdp1.c 	kfree(ctx);
ctx               664 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = fh_to_ctx(priv);
ctx               666 drivers/media/platform/rcar_jpu.c 	if (ctx->encoder)
ctx               673 drivers/media/platform/rcar_jpu.c 		 dev_name(ctx->jpu->dev));
ctx               724 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = fh_to_ctx(priv);
ctx               726 drivers/media/platform/rcar_jpu.c 	return jpu_enum_fmt(f, ctx->encoder ? JPU_ENC_CAPTURE :
ctx               733 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = fh_to_ctx(priv);
ctx               735 drivers/media/platform/rcar_jpu.c 	return jpu_enum_fmt(f, ctx->encoder ? JPU_ENC_OUTPUT : JPU_DEC_OUTPUT);
ctx               738 drivers/media/platform/rcar_jpu.c static struct jpu_q_data *jpu_get_q_data(struct jpu_ctx *ctx,
ctx               742 drivers/media/platform/rcar_jpu.c 		return &ctx->out_q;
ctx               744 drivers/media/platform/rcar_jpu.c 		return &ctx->cap_q;
ctx               768 drivers/media/platform/rcar_jpu.c static int __jpu_try_fmt(struct jpu_ctx *ctx, struct jpu_fmt **fmtinfo,
ctx               778 drivers/media/platform/rcar_jpu.c 	fmt = jpu_find_format(ctx->encoder, pix->pixelformat, f_type);
ctx               782 drivers/media/platform/rcar_jpu.c 		dev_dbg(ctx->jpu->dev, "unknown format; set default format\n");
ctx               783 drivers/media/platform/rcar_jpu.c 		if (ctx->encoder)
ctx               789 drivers/media/platform/rcar_jpu.c 		fmt = jpu_find_format(ctx->encoder, pixelformat, f_type);
ctx               807 drivers/media/platform/rcar_jpu.c 		if (pix->plane_fmt[0].sizeimage <= 0 || ctx->encoder)
ctx               838 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = fh_to_ctx(priv);
ctx               840 drivers/media/platform/rcar_jpu.c 	if (!v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type))
ctx               843 drivers/media/platform/rcar_jpu.c 	return __jpu_try_fmt(ctx, NULL, &f->fmt.pix_mp, f->type);
ctx               849 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = fh_to_ctx(priv);
ctx               850 drivers/media/platform/rcar_jpu.c 	struct v4l2_m2m_ctx *m2m_ctx = ctx->fh.m2m_ctx;
ctx               860 drivers/media/platform/rcar_jpu.c 		v4l2_err(&ctx->jpu->v4l2_dev, "%s queue busy\n", __func__);
ctx               864 drivers/media/platform/rcar_jpu.c 	ret = __jpu_try_fmt(ctx, &fmtinfo, &f->fmt.pix_mp, f->type);
ctx               868 drivers/media/platform/rcar_jpu.c 	q_data = jpu_get_q_data(ctx, f->type);
ctx               879 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = fh_to_ctx(priv);
ctx               881 drivers/media/platform/rcar_jpu.c 	if (!v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type))
ctx               884 drivers/media/platform/rcar_jpu.c 	q_data = jpu_get_q_data(ctx, f->type);
ctx               895 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = ctrl_to_ctx(ctrl);
ctx               898 drivers/media/platform/rcar_jpu.c 	spin_lock_irqsave(&ctx->jpu->lock, flags);
ctx               900 drivers/media/platform/rcar_jpu.c 		ctx->compr_quality = ctrl->val;
ctx               901 drivers/media/platform/rcar_jpu.c 	spin_unlock_irqrestore(&ctx->jpu->lock, flags);
ctx               912 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = fh_to_ctx(priv);
ctx               916 drivers/media/platform/rcar_jpu.c 	src_q_data = jpu_get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
ctx               917 drivers/media/platform/rcar_jpu.c 	dst_q_data = jpu_get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
ctx               919 drivers/media/platform/rcar_jpu.c 	if (ctx->encoder) {
ctx               934 drivers/media/platform/rcar_jpu.c 	__jpu_try_fmt(ctx, NULL, &adj.format, adj_type);
ctx               938 drivers/media/platform/rcar_jpu.c 		dev_err(ctx->jpu->dev, "src and dst formats do not match.\n");
ctx               943 drivers/media/platform/rcar_jpu.c 	return v4l2_m2m_streamon(file, ctx->fh.m2m_ctx, type);
ctx               972 drivers/media/platform/rcar_jpu.c static int jpu_controls_create(struct jpu_ctx *ctx)
ctx               977 drivers/media/platform/rcar_jpu.c 	v4l2_ctrl_handler_init(&ctx->ctrl_handler, 1);
ctx               979 drivers/media/platform/rcar_jpu.c 	ctrl = v4l2_ctrl_new_std(&ctx->ctrl_handler, &jpu_ctrl_ops,
ctx               983 drivers/media/platform/rcar_jpu.c 	if (ctx->ctrl_handler.error) {
ctx               984 drivers/media/platform/rcar_jpu.c 		ret = ctx->ctrl_handler.error;
ctx               988 drivers/media/platform/rcar_jpu.c 	if (!ctx->encoder)
ctx               992 drivers/media/platform/rcar_jpu.c 	ret = v4l2_ctrl_handler_setup(&ctx->ctrl_handler);
ctx               999 drivers/media/platform/rcar_jpu.c 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx              1012 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1016 drivers/media/platform/rcar_jpu.c 	q_data = jpu_get_q_data(ctx, vq->type);
ctx              1042 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1046 drivers/media/platform/rcar_jpu.c 	q_data = jpu_get_q_data(ctx, vb->vb2_queue->type);
ctx              1052 drivers/media/platform/rcar_jpu.c 			dev_err(ctx->jpu->dev, "%s field isn't supported\n",
ctx              1062 drivers/media/platform/rcar_jpu.c 			dev_err(ctx->jpu->dev,
ctx              1069 drivers/media/platform/rcar_jpu.c 		if (!ctx->encoder && !V4L2_TYPE_IS_OUTPUT(vb->vb2_queue->type))
ctx              1079 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1081 drivers/media/platform/rcar_jpu.c 	if (!ctx->encoder && V4L2_TYPE_IS_OUTPUT(vb->vb2_queue->type)) {
ctx              1095 drivers/media/platform/rcar_jpu.c 		q_data = &ctx->out_q;
ctx              1101 drivers/media/platform/rcar_jpu.c 		__jpu_try_fmt(ctx, &adjust.fmtinfo, &adjust.format,
ctx              1115 drivers/media/platform/rcar_jpu.c 	if (ctx->fh.m2m_ctx)
ctx              1116 drivers/media/platform/rcar_jpu.c 		v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx              1121 drivers/media/platform/rcar_jpu.c 	dev_err(ctx->jpu->dev, "incompatible or corrupted JPEG data\n");
ctx              1129 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1130 drivers/media/platform/rcar_jpu.c 	struct jpu_q_data *q_data = &ctx->out_q;
ctx              1135 drivers/media/platform/rcar_jpu.c 		vbuf->sequence = jpu_get_q_data(ctx, type)->sequence++;
ctx              1137 drivers/media/platform/rcar_jpu.c 	if (!ctx->encoder || vb->state != VB2_BUF_STATE_DONE ||
ctx              1153 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1154 drivers/media/platform/rcar_jpu.c 	struct jpu_q_data *q_data = jpu_get_q_data(ctx, vq->type);
ctx              1162 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1168 drivers/media/platform/rcar_jpu.c 			vb = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx              1170 drivers/media/platform/rcar_jpu.c 			vb = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              1173 drivers/media/platform/rcar_jpu.c 		spin_lock_irqsave(&ctx->jpu->lock, flags);
ctx              1175 drivers/media/platform/rcar_jpu.c 		spin_unlock_irqrestore(&ctx->jpu->lock, flags);
ctx              1193 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = priv;
ctx              1199 drivers/media/platform/rcar_jpu.c 	src_vq->drv_priv = ctx;
ctx              1204 drivers/media/platform/rcar_jpu.c 	src_vq->lock = &ctx->jpu->mutex;
ctx              1205 drivers/media/platform/rcar_jpu.c 	src_vq->dev = ctx->jpu->v4l2_dev.dev;
ctx              1214 drivers/media/platform/rcar_jpu.c 	dst_vq->drv_priv = ctx;
ctx              1219 drivers/media/platform/rcar_jpu.c 	dst_vq->lock = &ctx->jpu->mutex;
ctx              1220 drivers/media/platform/rcar_jpu.c 	dst_vq->dev = ctx->jpu->v4l2_dev.dev;
ctx              1234 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx;
ctx              1237 drivers/media/platform/rcar_jpu.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              1238 drivers/media/platform/rcar_jpu.c 	if (!ctx)
ctx              1241 drivers/media/platform/rcar_jpu.c 	v4l2_fh_init(&ctx->fh, vfd);
ctx              1242 drivers/media/platform/rcar_jpu.c 	ctx->fh.ctrl_handler = &ctx->ctrl_handler;
ctx              1243 drivers/media/platform/rcar_jpu.c 	file->private_data = &ctx->fh;
ctx              1244 drivers/media/platform/rcar_jpu.c 	v4l2_fh_add(&ctx->fh);
ctx              1246 drivers/media/platform/rcar_jpu.c 	ctx->jpu = jpu;
ctx              1247 drivers/media/platform/rcar_jpu.c 	ctx->encoder = vfd == &jpu->vfd_encoder;
ctx              1249 drivers/media/platform/rcar_jpu.c 	__jpu_try_fmt(ctx, &ctx->out_q.fmtinfo, &ctx->out_q.format,
ctx              1251 drivers/media/platform/rcar_jpu.c 	__jpu_try_fmt(ctx, &ctx->cap_q.fmtinfo, &ctx->cap_q.format,
ctx              1254 drivers/media/platform/rcar_jpu.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(jpu->m2m_dev, ctx, jpu_queue_init);
ctx              1255 drivers/media/platform/rcar_jpu.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx              1256 drivers/media/platform/rcar_jpu.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx              1260 drivers/media/platform/rcar_jpu.c 	ret = jpu_controls_create(ctx);
ctx              1289 drivers/media/platform/rcar_jpu.c 	v4l2_fh_del(&ctx->fh);
ctx              1290 drivers/media/platform/rcar_jpu.c 	v4l2_fh_exit(&ctx->fh);
ctx              1291 drivers/media/platform/rcar_jpu.c 	kfree(ctx);
ctx              1298 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = fh_to_ctx(file->private_data);
ctx              1300 drivers/media/platform/rcar_jpu.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx              1301 drivers/media/platform/rcar_jpu.c 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx              1302 drivers/media/platform/rcar_jpu.c 	v4l2_fh_del(&ctx->fh);
ctx              1303 drivers/media/platform/rcar_jpu.c 	v4l2_fh_exit(&ctx->fh);
ctx              1304 drivers/media/platform/rcar_jpu.c 	kfree(ctx);
ctx              1328 drivers/media/platform/rcar_jpu.c static void jpu_cleanup(struct jpu_ctx *ctx, bool reset)
ctx              1334 drivers/media/platform/rcar_jpu.c 	spin_lock_irqsave(&ctx->jpu->lock, flags);
ctx              1336 drivers/media/platform/rcar_jpu.c 	src_buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx              1337 drivers/media/platform/rcar_jpu.c 	dst_buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              1344 drivers/media/platform/rcar_jpu.c 		jpu_write(ctx->jpu, JCCMD_SRST, JCCMD);
ctx              1346 drivers/media/platform/rcar_jpu.c 	spin_unlock_irqrestore(&ctx->jpu->lock, flags);
ctx              1348 drivers/media/platform/rcar_jpu.c 	v4l2_m2m_job_finish(ctx->jpu->m2m_dev, ctx->fh.m2m_ctx);
ctx              1353 drivers/media/platform/rcar_jpu.c 	struct jpu_ctx *ctx = priv;
ctx              1354 drivers/media/platform/rcar_jpu.c 	struct jpu *jpu = ctx->jpu;
ctx              1364 drivers/media/platform/rcar_jpu.c 		jpu_cleanup(ctx, true);
ctx              1368 drivers/media/platform/rcar_jpu.c 	spin_lock_irqsave(&ctx->jpu->lock, flags);
ctx              1370 drivers/media/platform/rcar_jpu.c 	jpu->curr = ctx;
ctx              1372 drivers/media/platform/rcar_jpu.c 	src_buf = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx              1373 drivers/media/platform/rcar_jpu.c 	dst_buf = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx              1375 drivers/media/platform/rcar_jpu.c 	if (ctx->encoder) {
ctx              1377 drivers/media/platform/rcar_jpu.c 		q_data = &ctx->out_q;
ctx              1380 drivers/media/platform/rcar_jpu.c 		q_data = &ctx->cap_q;
ctx              1389 drivers/media/platform/rcar_jpu.c 	if (ctx->encoder) {
ctx              1402 drivers/media/platform/rcar_jpu.c 		jpu_buf->compr_quality = ctx->compr_quality;
ctx              1446 drivers/media/platform/rcar_jpu.c 		jpu_set_qtbl(jpu, ctx->compr_quality);
ctx              1452 drivers/media/platform/rcar_jpu.c 			dev_err(ctx->jpu->dev,
ctx              1454 drivers/media/platform/rcar_jpu.c 			spin_unlock_irqrestore(&ctx->jpu->lock, flags);
ctx              1455 drivers/media/platform/rcar_jpu.c 			jpu_cleanup(ctx, false);
ctx              1487 drivers/media/platform/rcar_jpu.c 	spin_unlock_irqrestore(&ctx->jpu->lock, flags);
ctx                23 drivers/media/platform/rockchip/rga/rga-buf.c 	struct rga_ctx *ctx = vb2_get_drv_priv(vq);
ctx                24 drivers/media/platform/rockchip/rga/rga-buf.c 	struct rga_frame *f = rga_get_frame(ctx, vq->type);
ctx                40 drivers/media/platform/rockchip/rga/rga-buf.c 	struct rga_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx                41 drivers/media/platform/rockchip/rga/rga-buf.c 	struct rga_frame *f = rga_get_frame(ctx, vb->vb2_queue->type);
ctx                54 drivers/media/platform/rockchip/rga/rga-buf.c 	struct rga_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx                56 drivers/media/platform/rockchip/rga/rga-buf.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx                62 drivers/media/platform/rockchip/rga/rga-buf.c 	struct rga_ctx *ctx = vb2_get_drv_priv(q);
ctx                67 drivers/media/platform/rockchip/rga/rga-buf.c 			vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx                69 drivers/media/platform/rockchip/rga/rga-buf.c 			vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx                78 drivers/media/platform/rockchip/rga/rga-buf.c 	struct rga_ctx *ctx = vb2_get_drv_priv(q);
ctx                79 drivers/media/platform/rockchip/rga/rga-buf.c 	struct rockchip_rga *rga = ctx->rga;
ctx                93 drivers/media/platform/rockchip/rga/rga-buf.c 	struct rga_ctx *ctx = vb2_get_drv_priv(q);
ctx                94 drivers/media/platform/rockchip/rga/rga-buf.c 	struct rockchip_rga *rga = ctx->rga;
ctx               115 drivers/media/platform/rockchip/rga/rga-buf.c 	struct rga_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               116 drivers/media/platform/rockchip/rga/rga-buf.c 	struct rockchip_rga *rga = ctx->rga;
ctx               122 drivers/media/platform/rockchip/rga/rga-hw.c static void rga_cmd_set_src_addr(struct rga_ctx *ctx, void *mmu_pages)
ctx               124 drivers/media/platform/rockchip/rga/rga-hw.c 	struct rockchip_rga *rga = ctx->rga;
ctx               135 drivers/media/platform/rockchip/rga/rga-hw.c static void rga_cmd_set_src1_addr(struct rga_ctx *ctx, void *mmu_pages)
ctx               137 drivers/media/platform/rockchip/rga/rga-hw.c 	struct rockchip_rga *rga = ctx->rga;
ctx               148 drivers/media/platform/rockchip/rga/rga-hw.c static void rga_cmd_set_dst_addr(struct rga_ctx *ctx, void *mmu_pages)
ctx               150 drivers/media/platform/rockchip/rga/rga-hw.c 	struct rockchip_rga *rga = ctx->rga;
ctx               161 drivers/media/platform/rockchip/rga/rga-hw.c static void rga_cmd_set_trans_info(struct rga_ctx *ctx)
ctx               163 drivers/media/platform/rockchip/rga/rga-hw.c 	struct rockchip_rga *rga = ctx->rga;
ctx               180 drivers/media/platform/rockchip/rga/rga-hw.c 	src_h = ctx->in.crop.height;
ctx               181 drivers/media/platform/rockchip/rga/rga-hw.c 	src_w = ctx->in.crop.width;
ctx               182 drivers/media/platform/rockchip/rga/rga-hw.c 	src_x = ctx->in.crop.left;
ctx               183 drivers/media/platform/rockchip/rga/rga-hw.c 	src_y = ctx->in.crop.top;
ctx               184 drivers/media/platform/rockchip/rga/rga-hw.c 	dst_h = ctx->out.crop.height;
ctx               185 drivers/media/platform/rockchip/rga/rga-hw.c 	dst_w = ctx->out.crop.width;
ctx               186 drivers/media/platform/rockchip/rga/rga-hw.c 	dst_x = ctx->out.crop.left;
ctx               187 drivers/media/platform/rockchip/rga/rga-hw.c 	dst_y = ctx->out.crop.top;
ctx               198 drivers/media/platform/rockchip/rga/rga-hw.c 	src_info.data.format = ctx->in.fmt->hw_format;
ctx               199 drivers/media/platform/rockchip/rga/rga-hw.c 	src_info.data.swap = ctx->in.fmt->color_swap;
ctx               200 drivers/media/platform/rockchip/rga/rga-hw.c 	dst_info.data.format = ctx->out.fmt->hw_format;
ctx               201 drivers/media/platform/rockchip/rga/rga-hw.c 	dst_info.data.swap = ctx->out.fmt->color_swap;
ctx               203 drivers/media/platform/rockchip/rga/rga-hw.c 	if (ctx->in.fmt->hw_format >= RGA_COLOR_FMT_YUV422SP) {
ctx               204 drivers/media/platform/rockchip/rga/rga-hw.c 		if (ctx->out.fmt->hw_format < RGA_COLOR_FMT_YUV422SP) {
ctx               205 drivers/media/platform/rockchip/rga/rga-hw.c 			switch (ctx->in.colorspace) {
ctx               218 drivers/media/platform/rockchip/rga/rga-hw.c 	if (ctx->out.fmt->hw_format >= RGA_COLOR_FMT_YUV422SP) {
ctx               219 drivers/media/platform/rockchip/rga/rga-hw.c 		switch (ctx->out.colorspace) {
ctx               229 drivers/media/platform/rockchip/rga/rga-hw.c 	if (ctx->vflip)
ctx               232 drivers/media/platform/rockchip/rga/rga-hw.c 	if (ctx->hflip)
ctx               235 drivers/media/platform/rockchip/rga/rga-hw.c 	switch (ctx->rotate) {
ctx               302 drivers/media/platform/rockchip/rga/rga-hw.c 	src_vir_info.data.vir_stride = ctx->in.stride >> 2;
ctx               303 drivers/media/platform/rockchip/rga/rga-hw.c 	src_vir_info.data.vir_width = ctx->in.stride >> 2;
ctx               308 drivers/media/platform/rockchip/rga/rga-hw.c 	dst_vir_info.data.vir_stride = ctx->out.stride >> 2;
ctx               315 drivers/media/platform/rockchip/rga/rga-hw.c 	src_offsets = rga_get_addr_offset(&ctx->in, src_x, src_y,
ctx               321 drivers/media/platform/rockchip/rga/rga-hw.c 	offsets = rga_get_addr_offset(&ctx->out, dst_x, dst_y, dst_w, dst_h);
ctx               352 drivers/media/platform/rockchip/rga/rga-hw.c static void rga_cmd_set_mode(struct rga_ctx *ctx)
ctx               354 drivers/media/platform/rockchip/rga/rga-hw.c 	struct rockchip_rga *rga = ctx->rga;
ctx               375 drivers/media/platform/rockchip/rga/rga-hw.c static void rga_cmd_set(struct rga_ctx *ctx)
ctx               377 drivers/media/platform/rockchip/rga/rga-hw.c 	struct rockchip_rga *rga = ctx->rga;
ctx               381 drivers/media/platform/rockchip/rga/rga-hw.c 	rga_cmd_set_src_addr(ctx, rga->src_mmu_pages);
ctx               386 drivers/media/platform/rockchip/rga/rga-hw.c 	rga_cmd_set_src1_addr(ctx, rga->dst_mmu_pages);
ctx               388 drivers/media/platform/rockchip/rga/rga-hw.c 	rga_cmd_set_dst_addr(ctx, rga->dst_mmu_pages);
ctx               389 drivers/media/platform/rockchip/rga/rga-hw.c 	rga_cmd_set_mode(ctx);
ctx               391 drivers/media/platform/rockchip/rga/rga-hw.c 	rga_cmd_set_trans_info(ctx);
ctx               402 drivers/media/platform/rockchip/rga/rga-hw.c 	struct rga_ctx *ctx = rga->curr;
ctx               404 drivers/media/platform/rockchip/rga/rga-hw.c 	rga_cmd_set(ctx);
ctx                36 drivers/media/platform/rockchip/rga/rga.c 	struct rga_ctx *ctx = prv;
ctx                37 drivers/media/platform/rockchip/rga/rga.c 	struct rockchip_rga *rga = ctx->rga;
ctx                43 drivers/media/platform/rockchip/rga/rga.c 	rga->curr = ctx;
ctx                45 drivers/media/platform/rockchip/rga/rga.c 	src = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx                46 drivers/media/platform/rockchip/rga/rga.c 	dst = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx                67 drivers/media/platform/rockchip/rga/rga.c 		struct rga_ctx *ctx = rga->curr;
ctx                69 drivers/media/platform/rockchip/rga/rga.c 		WARN_ON(!ctx);
ctx                73 drivers/media/platform/rockchip/rga/rga.c 		src = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx                74 drivers/media/platform/rockchip/rga/rga.c 		dst = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx                86 drivers/media/platform/rockchip/rga/rga.c 		v4l2_m2m_job_finish(rga->m2m_dev, ctx->fh.m2m_ctx);
ctx                99 drivers/media/platform/rockchip/rga/rga.c 	struct rga_ctx *ctx = priv;
ctx               104 drivers/media/platform/rockchip/rga/rga.c 	src_vq->drv_priv = ctx;
ctx               109 drivers/media/platform/rockchip/rga/rga.c 	src_vq->lock = &ctx->rga->mutex;
ctx               110 drivers/media/platform/rockchip/rga/rga.c 	src_vq->dev = ctx->rga->v4l2_dev.dev;
ctx               118 drivers/media/platform/rockchip/rga/rga.c 	dst_vq->drv_priv = ctx;
ctx               123 drivers/media/platform/rockchip/rga/rga.c 	dst_vq->lock = &ctx->rga->mutex;
ctx               124 drivers/media/platform/rockchip/rga/rga.c 	dst_vq->dev = ctx->rga->v4l2_dev.dev;
ctx               131 drivers/media/platform/rockchip/rga/rga.c 	struct rga_ctx *ctx = container_of(ctrl->handler, struct rga_ctx,
ctx               135 drivers/media/platform/rockchip/rga/rga.c 	spin_lock_irqsave(&ctx->rga->ctrl_lock, flags);
ctx               138 drivers/media/platform/rockchip/rga/rga.c 		ctx->hflip = ctrl->val;
ctx               141 drivers/media/platform/rockchip/rga/rga.c 		ctx->vflip = ctrl->val;
ctx               144 drivers/media/platform/rockchip/rga/rga.c 		ctx->rotate = ctrl->val;
ctx               147 drivers/media/platform/rockchip/rga/rga.c 		ctx->fill_color = ctrl->val;
ctx               150 drivers/media/platform/rockchip/rga/rga.c 	spin_unlock_irqrestore(&ctx->rga->ctrl_lock, flags);
ctx               158 drivers/media/platform/rockchip/rga/rga.c static int rga_setup_ctrls(struct rga_ctx *ctx)
ctx               160 drivers/media/platform/rockchip/rga/rga.c 	struct rockchip_rga *rga = ctx->rga;
ctx               162 drivers/media/platform/rockchip/rga/rga.c 	v4l2_ctrl_handler_init(&ctx->ctrl_handler, 4);
ctx               164 drivers/media/platform/rockchip/rga/rga.c 	v4l2_ctrl_new_std(&ctx->ctrl_handler, &rga_ctrl_ops,
ctx               167 drivers/media/platform/rockchip/rga/rga.c 	v4l2_ctrl_new_std(&ctx->ctrl_handler, &rga_ctrl_ops,
ctx               170 drivers/media/platform/rockchip/rga/rga.c 	v4l2_ctrl_new_std(&ctx->ctrl_handler, &rga_ctrl_ops,
ctx               173 drivers/media/platform/rockchip/rga/rga.c 	v4l2_ctrl_new_std(&ctx->ctrl_handler, &rga_ctrl_ops,
ctx               176 drivers/media/platform/rockchip/rga/rga.c 	if (ctx->ctrl_handler.error) {
ctx               177 drivers/media/platform/rockchip/rga/rga.c 		int err = ctx->ctrl_handler.error;
ctx               180 drivers/media/platform/rockchip/rga/rga.c 		v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx               358 drivers/media/platform/rockchip/rga/rga.c struct rga_frame *rga_get_frame(struct rga_ctx *ctx, enum v4l2_buf_type type)
ctx               362 drivers/media/platform/rockchip/rga/rga.c 		return &ctx->in;
ctx               364 drivers/media/platform/rockchip/rga/rga.c 		return &ctx->out;
ctx               373 drivers/media/platform/rockchip/rga/rga.c 	struct rga_ctx *ctx = NULL;
ctx               376 drivers/media/platform/rockchip/rga/rga.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               377 drivers/media/platform/rockchip/rga/rga.c 	if (!ctx)
ctx               379 drivers/media/platform/rockchip/rga/rga.c 	ctx->rga = rga;
ctx               381 drivers/media/platform/rockchip/rga/rga.c 	ctx->in = def_frame;
ctx               382 drivers/media/platform/rockchip/rga/rga.c 	ctx->out = def_frame;
ctx               385 drivers/media/platform/rockchip/rga/rga.c 		kfree(ctx);
ctx               388 drivers/media/platform/rockchip/rga/rga.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(rga->m2m_dev, ctx, &queue_init);
ctx               389 drivers/media/platform/rockchip/rga/rga.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx               390 drivers/media/platform/rockchip/rga/rga.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx               392 drivers/media/platform/rockchip/rga/rga.c 		kfree(ctx);
ctx               395 drivers/media/platform/rockchip/rga/rga.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx               396 drivers/media/platform/rockchip/rga/rga.c 	file->private_data = &ctx->fh;
ctx               397 drivers/media/platform/rockchip/rga/rga.c 	v4l2_fh_add(&ctx->fh);
ctx               399 drivers/media/platform/rockchip/rga/rga.c 	rga_setup_ctrls(ctx);
ctx               402 drivers/media/platform/rockchip/rga/rga.c 	v4l2_ctrl_handler_setup(&ctx->ctrl_handler);
ctx               404 drivers/media/platform/rockchip/rga/rga.c 	ctx->fh.ctrl_handler = &ctx->ctrl_handler;
ctx               412 drivers/media/platform/rockchip/rga/rga.c 	struct rga_ctx *ctx =
ctx               414 drivers/media/platform/rockchip/rga/rga.c 	struct rockchip_rga *rga = ctx->rga;
ctx               418 drivers/media/platform/rockchip/rga/rga.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx               420 drivers/media/platform/rockchip/rga/rga.c 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx               421 drivers/media/platform/rockchip/rga/rga.c 	v4l2_fh_del(&ctx->fh);
ctx               422 drivers/media/platform/rockchip/rga/rga.c 	v4l2_fh_exit(&ctx->fh);
ctx               423 drivers/media/platform/rockchip/rga/rga.c 	kfree(ctx);
ctx               464 drivers/media/platform/rockchip/rga/rga.c 	struct rga_ctx *ctx = prv;
ctx               468 drivers/media/platform/rockchip/rga/rga.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               471 drivers/media/platform/rockchip/rga/rga.c 	frm = rga_get_frame(ctx, f->type);
ctx               521 drivers/media/platform/rockchip/rga/rga.c 	struct rga_ctx *ctx = prv;
ctx               522 drivers/media/platform/rockchip/rga/rga.c 	struct rockchip_rga *rga = ctx->rga;
ctx               534 drivers/media/platform/rockchip/rga/rga.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               539 drivers/media/platform/rockchip/rga/rga.c 	frm = rga_get_frame(ctx, f->type);
ctx               564 drivers/media/platform/rockchip/rga/rga.c 	struct rga_ctx *ctx = prv;
ctx               568 drivers/media/platform/rockchip/rga/rga.c 	f = rga_get_frame(ctx, s->type);
ctx               612 drivers/media/platform/rockchip/rga/rga.c 	struct rga_ctx *ctx = prv;
ctx               613 drivers/media/platform/rockchip/rga/rga.c 	struct rockchip_rga *rga = ctx->rga;
ctx               617 drivers/media/platform/rockchip/rga/rga.c 	f = rga_get_frame(ctx, s->type);
ctx                88 drivers/media/platform/rockchip/rga/rga.h struct rga_frame *rga_get_frame(struct rga_ctx *ctx, enum v4l2_buf_type type);
ctx                82 drivers/media/platform/s5p-g2d/g2d.c static struct g2d_frame *get_frame(struct g2d_ctx *ctx,
ctx                87 drivers/media/platform/s5p-g2d/g2d.c 		return &ctx->in;
ctx                89 drivers/media/platform/s5p-g2d/g2d.c 		return &ctx->out;
ctx                99 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = vb2_get_drv_priv(vq);
ctx               100 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_frame *f = get_frame(ctx, vq->type);
ctx               116 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               117 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_frame *f = get_frame(ctx, vb->vb2_queue->type);
ctx               128 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               129 drivers/media/platform/s5p-g2d/g2d.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx               143 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = priv;
ctx               148 drivers/media/platform/s5p-g2d/g2d.c 	src_vq->drv_priv = ctx;
ctx               153 drivers/media/platform/s5p-g2d/g2d.c 	src_vq->lock = &ctx->dev->mutex;
ctx               154 drivers/media/platform/s5p-g2d/g2d.c 	src_vq->dev = ctx->dev->v4l2_dev.dev;
ctx               162 drivers/media/platform/s5p-g2d/g2d.c 	dst_vq->drv_priv = ctx;
ctx               167 drivers/media/platform/s5p-g2d/g2d.c 	dst_vq->lock = &ctx->dev->mutex;
ctx               168 drivers/media/platform/s5p-g2d/g2d.c 	dst_vq->dev = ctx->dev->v4l2_dev.dev;
ctx               175 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = container_of(ctrl->handler, struct g2d_ctx,
ctx               179 drivers/media/platform/s5p-g2d/g2d.c 	spin_lock_irqsave(&ctx->dev->ctrl_lock, flags);
ctx               183 drivers/media/platform/s5p-g2d/g2d.c 			ctx->rop = ROP4_INVERT;
ctx               185 drivers/media/platform/s5p-g2d/g2d.c 			ctx->rop = ROP4_COPY;
ctx               189 drivers/media/platform/s5p-g2d/g2d.c 		ctx->flip = ctx->ctrl_hflip->val | (ctx->ctrl_vflip->val << 1);
ctx               193 drivers/media/platform/s5p-g2d/g2d.c 	spin_unlock_irqrestore(&ctx->dev->ctrl_lock, flags);
ctx               201 drivers/media/platform/s5p-g2d/g2d.c static int g2d_setup_ctrls(struct g2d_ctx *ctx)
ctx               203 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_dev *dev = ctx->dev;
ctx               205 drivers/media/platform/s5p-g2d/g2d.c 	v4l2_ctrl_handler_init(&ctx->ctrl_handler, 3);
ctx               207 drivers/media/platform/s5p-g2d/g2d.c 	ctx->ctrl_hflip = v4l2_ctrl_new_std(&ctx->ctrl_handler, &g2d_ctrl_ops,
ctx               210 drivers/media/platform/s5p-g2d/g2d.c 	ctx->ctrl_vflip = v4l2_ctrl_new_std(&ctx->ctrl_handler, &g2d_ctrl_ops,
ctx               214 drivers/media/platform/s5p-g2d/g2d.c 		&ctx->ctrl_handler,
ctx               221 drivers/media/platform/s5p-g2d/g2d.c 	if (ctx->ctrl_handler.error) {
ctx               222 drivers/media/platform/s5p-g2d/g2d.c 		int err = ctx->ctrl_handler.error;
ctx               224 drivers/media/platform/s5p-g2d/g2d.c 		v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx               228 drivers/media/platform/s5p-g2d/g2d.c 	v4l2_ctrl_cluster(2, &ctx->ctrl_hflip);
ctx               236 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = NULL;
ctx               239 drivers/media/platform/s5p-g2d/g2d.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               240 drivers/media/platform/s5p-g2d/g2d.c 	if (!ctx)
ctx               242 drivers/media/platform/s5p-g2d/g2d.c 	ctx->dev = dev;
ctx               244 drivers/media/platform/s5p-g2d/g2d.c 	ctx->in		= def_frame;
ctx               245 drivers/media/platform/s5p-g2d/g2d.c 	ctx->out	= def_frame;
ctx               248 drivers/media/platform/s5p-g2d/g2d.c 		kfree(ctx);
ctx               251 drivers/media/platform/s5p-g2d/g2d.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(dev->m2m_dev, ctx, &queue_init);
ctx               252 drivers/media/platform/s5p-g2d/g2d.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx               253 drivers/media/platform/s5p-g2d/g2d.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx               255 drivers/media/platform/s5p-g2d/g2d.c 		kfree(ctx);
ctx               258 drivers/media/platform/s5p-g2d/g2d.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx               259 drivers/media/platform/s5p-g2d/g2d.c 	file->private_data = &ctx->fh;
ctx               260 drivers/media/platform/s5p-g2d/g2d.c 	v4l2_fh_add(&ctx->fh);
ctx               262 drivers/media/platform/s5p-g2d/g2d.c 	g2d_setup_ctrls(ctx);
ctx               265 drivers/media/platform/s5p-g2d/g2d.c 	v4l2_ctrl_handler_setup(&ctx->ctrl_handler);
ctx               267 drivers/media/platform/s5p-g2d/g2d.c 	ctx->fh.ctrl_handler = &ctx->ctrl_handler;
ctx               277 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = fh2ctx(file->private_data);
ctx               279 drivers/media/platform/s5p-g2d/g2d.c 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx               280 drivers/media/platform/s5p-g2d/g2d.c 	v4l2_fh_del(&ctx->fh);
ctx               281 drivers/media/platform/s5p-g2d/g2d.c 	v4l2_fh_exit(&ctx->fh);
ctx               282 drivers/media/platform/s5p-g2d/g2d.c 	kfree(ctx);
ctx               307 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = prv;
ctx               311 drivers/media/platform/s5p-g2d/g2d.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               314 drivers/media/platform/s5p-g2d/g2d.c 	frm = get_frame(ctx, f->type);
ctx               359 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = prv;
ctx               360 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_dev *dev = ctx->dev;
ctx               371 drivers/media/platform/s5p-g2d/g2d.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               376 drivers/media/platform/s5p-g2d/g2d.c 	frm = get_frame(ctx, f->type);
ctx               400 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = prv;
ctx               403 drivers/media/platform/s5p-g2d/g2d.c 	f = get_frame(ctx, s->type);
ctx               450 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = prv;
ctx               451 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_dev *dev = ctx->dev;
ctx               454 drivers/media/platform/s5p-g2d/g2d.c 	f = get_frame(ctx, s->type);
ctx               478 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = prv;
ctx               485 drivers/media/platform/s5p-g2d/g2d.c 	f = get_frame(ctx, s->type);
ctx               500 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = prv;
ctx               501 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_dev *dev = ctx->dev;
ctx               506 drivers/media/platform/s5p-g2d/g2d.c 	dev->curr = ctx;
ctx               508 drivers/media/platform/s5p-g2d/g2d.c 	src = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               509 drivers/media/platform/s5p-g2d/g2d.c 	dst = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx               516 drivers/media/platform/s5p-g2d/g2d.c 	g2d_set_src_size(dev, &ctx->in);
ctx               519 drivers/media/platform/s5p-g2d/g2d.c 	g2d_set_dst_size(dev, &ctx->out);
ctx               522 drivers/media/platform/s5p-g2d/g2d.c 	g2d_set_rop4(dev, ctx->rop);
ctx               523 drivers/media/platform/s5p-g2d/g2d.c 	g2d_set_flip(dev, ctx->flip);
ctx               525 drivers/media/platform/s5p-g2d/g2d.c 	if (ctx->in.c_width != ctx->out.c_width ||
ctx               526 drivers/media/platform/s5p-g2d/g2d.c 		ctx->in.c_height != ctx->out.c_height) {
ctx               530 drivers/media/platform/s5p-g2d/g2d.c 			g2d_set_v41_stretch(dev, &ctx->in, &ctx->out);
ctx               542 drivers/media/platform/s5p-g2d/g2d.c 	struct g2d_ctx *ctx = dev->curr;
ctx               548 drivers/media/platform/s5p-g2d/g2d.c 	BUG_ON(ctx == NULL);
ctx               550 drivers/media/platform/s5p-g2d/g2d.c 	src = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               551 drivers/media/platform/s5p-g2d/g2d.c 	dst = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx               564 drivers/media/platform/s5p-g2d/g2d.c 	v4l2_m2m_job_finish(dev->m2m_dev, ctx->fh.m2m_ctx);
ctx               525 drivers/media/platform/s5p-jpeg/jpeg-core.c 					struct s5p_jpeg_ctx *ctx)
ctx               529 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->subsampling != V4L2_JPEG_CHROMA_SUBSAMPLING_GRAY) {
ctx               536 drivers/media/platform/s5p-jpeg/jpeg-core.c 	switch (ctx->subsampling) {
ctx               587 drivers/media/platform/s5p-jpeg/jpeg-core.c static int s5p_jpeg_to_user_subsampling(struct s5p_jpeg_ctx *ctx)
ctx               589 drivers/media/platform/s5p-jpeg/jpeg-core.c 	switch (ctx->jpeg->variant->version) {
ctx               591 drivers/media/platform/s5p-jpeg/jpeg-core.c 		WARN_ON(ctx->subsampling > 3);
ctx               592 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->subsampling > 2)
ctx               594 drivers/media/platform/s5p-jpeg/jpeg-core.c 		return ctx->subsampling;
ctx               597 drivers/media/platform/s5p-jpeg/jpeg-core.c 		WARN_ON(ctx->subsampling > 6);
ctx               598 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->subsampling > 3)
ctx               600 drivers/media/platform/s5p-jpeg/jpeg-core.c 		return exynos3250_decoded_subsampling[ctx->subsampling];
ctx               602 drivers/media/platform/s5p-jpeg/jpeg-core.c 		WARN_ON(ctx->subsampling > 3);
ctx               603 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->subsampling > 2)
ctx               605 drivers/media/platform/s5p-jpeg/jpeg-core.c 		return exynos4x12_decoded_subsampling[ctx->subsampling];
ctx               607 drivers/media/platform/s5p-jpeg/jpeg-core.c 		return ctx->subsampling; /* parsed from header */
ctx               609 drivers/media/platform/s5p-jpeg/jpeg-core.c 		WARN_ON(ctx->subsampling > 3);
ctx               764 drivers/media/platform/s5p-jpeg/jpeg-core.c static void exynos4_jpeg_parse_decode_h_tbl(struct s5p_jpeg_ctx *ctx)
ctx               766 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg *jpeg = ctx->jpeg;
ctx               767 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct vb2_v4l2_buffer *vb = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               774 drivers/media/platform/s5p-jpeg/jpeg-core.c 		(unsigned long)vb2_plane_vaddr(&vb->vb2_buf, 0) + ctx->out_q.sos + 2;
ctx               801 drivers/media/platform/s5p-jpeg/jpeg-core.c static void exynos4_jpeg_parse_huff_tbl(struct s5p_jpeg_ctx *ctx)
ctx               803 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg *jpeg = ctx->jpeg;
ctx               804 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct vb2_v4l2_buffer *vb = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               809 drivers/media/platform/s5p-jpeg/jpeg-core.c 	for (j = 0; j < ctx->out_q.dht.n; ++j) {
ctx               810 drivers/media/platform/s5p-jpeg/jpeg-core.c 		jpeg_buffer.size = ctx->out_q.dht.len[j];
ctx               812 drivers/media/platform/s5p-jpeg/jpeg-core.c 				   ctx->out_q.dht.marker[j];
ctx               860 drivers/media/platform/s5p-jpeg/jpeg-core.c static void exynos4_jpeg_parse_decode_q_tbl(struct s5p_jpeg_ctx *ctx)
ctx               862 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg *jpeg = ctx->jpeg;
ctx               863 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct vb2_v4l2_buffer *vb = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               867 drivers/media/platform/s5p-jpeg/jpeg-core.c 	jpeg_buffer.size = ctx->out_q.sof_len;
ctx               869 drivers/media/platform/s5p-jpeg/jpeg-core.c 		(unsigned long)vb2_plane_vaddr(&vb->vb2_buf, 0) + ctx->out_q.sof;
ctx               891 drivers/media/platform/s5p-jpeg/jpeg-core.c static void exynos4_jpeg_parse_q_tbl(struct s5p_jpeg_ctx *ctx)
ctx               893 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg *jpeg = ctx->jpeg;
ctx               894 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct vb2_v4l2_buffer *vb = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               899 drivers/media/platform/s5p-jpeg/jpeg-core.c 	for (j = 0; j < ctx->out_q.dqt.n; ++j) {
ctx               900 drivers/media/platform/s5p-jpeg/jpeg-core.c 		jpeg_buffer.size = ctx->out_q.dqt.len[j];
ctx               902 drivers/media/platform/s5p-jpeg/jpeg-core.c 				   ctx->out_q.dqt.marker[j];
ctx               940 drivers/media/platform/s5p-jpeg/jpeg-core.c static struct s5p_jpeg_fmt *s5p_jpeg_find_format(struct s5p_jpeg_ctx *ctx,
ctx               942 drivers/media/platform/s5p-jpeg/jpeg-core.c static int s5p_jpeg_controls_create(struct s5p_jpeg_ctx *ctx);
ctx               948 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx;
ctx               952 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               953 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (!ctx)
ctx               961 drivers/media/platform/s5p-jpeg/jpeg-core.c 	v4l2_fh_init(&ctx->fh, vfd);
ctx               963 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->fh.ctrl_handler = &ctx->ctrl_handler;
ctx               964 drivers/media/platform/s5p-jpeg/jpeg-core.c 	file->private_data = &ctx->fh;
ctx               965 drivers/media/platform/s5p-jpeg/jpeg-core.c 	v4l2_fh_add(&ctx->fh);
ctx               967 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->jpeg = jpeg;
ctx               969 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctx->mode = S5P_JPEG_ENCODE;
ctx               970 drivers/media/platform/s5p-jpeg/jpeg-core.c 		out_fmt = s5p_jpeg_find_format(ctx, V4L2_PIX_FMT_RGB565,
ctx               972 drivers/media/platform/s5p-jpeg/jpeg-core.c 		cap_fmt = s5p_jpeg_find_format(ctx, V4L2_PIX_FMT_JPEG,
ctx               975 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctx->mode = S5P_JPEG_DECODE;
ctx               976 drivers/media/platform/s5p-jpeg/jpeg-core.c 		out_fmt = s5p_jpeg_find_format(ctx, V4L2_PIX_FMT_JPEG,
ctx               978 drivers/media/platform/s5p-jpeg/jpeg-core.c 		cap_fmt = s5p_jpeg_find_format(ctx, V4L2_PIX_FMT_YUYV,
ctx               980 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctx->scale_factor = EXYNOS3250_DEC_SCALE_FACTOR_8_8;
ctx               983 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(jpeg->m2m_dev, ctx, queue_init);
ctx               984 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx               985 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx               989 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->out_q.fmt = out_fmt;
ctx               990 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->cap_q.fmt = cap_fmt;
ctx               992 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ret = s5p_jpeg_controls_create(ctx);
ctx              1000 drivers/media/platform/s5p-jpeg/jpeg-core.c 	v4l2_fh_del(&ctx->fh);
ctx              1001 drivers/media/platform/s5p-jpeg/jpeg-core.c 	v4l2_fh_exit(&ctx->fh);
ctx              1004 drivers/media/platform/s5p-jpeg/jpeg-core.c 	kfree(ctx);
ctx              1011 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = fh_to_ctx(file->private_data);
ctx              1014 drivers/media/platform/s5p-jpeg/jpeg-core.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx              1015 drivers/media/platform/s5p-jpeg/jpeg-core.c 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx              1016 drivers/media/platform/s5p-jpeg/jpeg-core.c 	v4l2_fh_del(&ctx->fh);
ctx              1017 drivers/media/platform/s5p-jpeg/jpeg-core.c 	v4l2_fh_exit(&ctx->fh);
ctx              1018 drivers/media/platform/s5p-jpeg/jpeg-core.c 	kfree(ctx);
ctx              1072 drivers/media/platform/s5p-jpeg/jpeg-core.c static bool s5p_jpeg_subsampling_decode(struct s5p_jpeg_ctx *ctx,
ctx              1079 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctx->subsampling = V4L2_JPEG_CHROMA_SUBSAMPLING_444;
ctx              1082 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctx->subsampling = V4L2_JPEG_CHROMA_SUBSAMPLING_422;
ctx              1085 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctx->subsampling = V4L2_JPEG_CHROMA_SUBSAMPLING_420;
ctx              1088 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctx->subsampling = V4L2_JPEG_CHROMA_SUBSAMPLING_GRAY;
ctx              1095 drivers/media/platform/s5p-jpeg/jpeg-core.c 		version = ctx->jpeg->variant->version;
ctx              1101 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctx->subsampling = V4L2_JPEG_CHROMA_SUBSAMPLING_411;
ctx              1112 drivers/media/platform/s5p-jpeg/jpeg-core.c 			       struct s5p_jpeg_ctx *ctx)
ctx              1221 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (notfound || !sos || !s5p_jpeg_subsampling_decode(ctx, subsampling))
ctx              1247 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = fh_to_ctx(priv);
ctx              1249 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_ENCODE) {
ctx              1261 drivers/media/platform/s5p-jpeg/jpeg-core.c 		 dev_name(ctx->jpeg->dev));
ctx              1265 drivers/media/platform/s5p-jpeg/jpeg-core.c static int enum_fmt(struct s5p_jpeg_ctx *ctx,
ctx              1270 drivers/media/platform/s5p-jpeg/jpeg-core.c 	unsigned int fmt_ver_flag = ctx->jpeg->variant->fmt_ver_flag;
ctx              1297 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = fh_to_ctx(priv);
ctx              1299 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_ENCODE)
ctx              1300 drivers/media/platform/s5p-jpeg/jpeg-core.c 		return enum_fmt(ctx, sjpeg_formats, SJPEG_NUM_FORMATS, f,
ctx              1303 drivers/media/platform/s5p-jpeg/jpeg-core.c 	return enum_fmt(ctx, sjpeg_formats, SJPEG_NUM_FORMATS, f,
ctx              1310 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = fh_to_ctx(priv);
ctx              1312 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_ENCODE)
ctx              1313 drivers/media/platform/s5p-jpeg/jpeg-core.c 		return enum_fmt(ctx, sjpeg_formats, SJPEG_NUM_FORMATS, f,
ctx              1316 drivers/media/platform/s5p-jpeg/jpeg-core.c 	return enum_fmt(ctx, sjpeg_formats, SJPEG_NUM_FORMATS, f,
ctx              1320 drivers/media/platform/s5p-jpeg/jpeg-core.c static struct s5p_jpeg_q_data *get_q_data(struct s5p_jpeg_ctx *ctx,
ctx              1324 drivers/media/platform/s5p-jpeg/jpeg-core.c 		return &ctx->out_q;
ctx              1326 drivers/media/platform/s5p-jpeg/jpeg-core.c 		return &ctx->cap_q;
ctx              1365 drivers/media/platform/s5p-jpeg/jpeg-core.c static struct s5p_jpeg_fmt *s5p_jpeg_find_format(struct s5p_jpeg_ctx *ctx,
ctx              1370 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_ENCODE)
ctx              1384 drivers/media/platform/s5p-jpeg/jpeg-core.c 		    fmt->flags & ctx->jpeg->variant->fmt_ver_flag) {
ctx              1392 drivers/media/platform/s5p-jpeg/jpeg-core.c static void jpeg_bound_align_image(struct s5p_jpeg_ctx *ctx,
ctx              1406 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->jpeg->variant->hw3250_compat) {
ctx              1429 drivers/media/platform/s5p-jpeg/jpeg-core.c 			  struct s5p_jpeg_ctx *ctx, int q_type)
ctx              1442 drivers/media/platform/s5p-jpeg/jpeg-core.c 		jpeg_bound_align_image(ctx, &pix->width, S5P_JPEG_MIN_WIDTH,
ctx              1447 drivers/media/platform/s5p-jpeg/jpeg-core.c 		jpeg_bound_align_image(ctx, &pix->width, S5P_JPEG_MIN_WIDTH,
ctx              1476 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = fh_to_ctx(priv);
ctx              1481 drivers/media/platform/s5p-jpeg/jpeg-core.c 	fmt = s5p_jpeg_find_format(ctx, f->fmt.pix.pixelformat,
ctx              1484 drivers/media/platform/s5p-jpeg/jpeg-core.c 		v4l2_err(&ctx->jpeg->v4l2_dev,
ctx              1490 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (!ctx->jpeg->variant->hw_ex4_compat || ctx->mode != S5P_JPEG_DECODE)
ctx              1500 drivers/media/platform/s5p-jpeg/jpeg-core.c 	    (fmt->subsampling < ctx->subsampling)) {
ctx              1501 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ret = s5p_jpeg_adjust_fourcc_to_subsampling(ctx->subsampling,
ctx              1504 drivers/media/platform/s5p-jpeg/jpeg-core.c 							    ctx);
ctx              1508 drivers/media/platform/s5p-jpeg/jpeg-core.c 		fmt = s5p_jpeg_find_format(ctx, pix->pixelformat,
ctx              1518 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->subsampling == V4L2_JPEG_CHROMA_SUBSAMPLING_420 &&
ctx              1519 drivers/media/platform/s5p-jpeg/jpeg-core.c 	    (ctx->out_q.w & 1) &&
ctx              1524 drivers/media/platform/s5p-jpeg/jpeg-core.c 		fmt = s5p_jpeg_find_format(ctx, pix->pixelformat,
ctx              1529 drivers/media/platform/s5p-jpeg/jpeg-core.c 	return vidioc_try_fmt(f, fmt, ctx, FMT_TYPE_CAPTURE);
ctx              1535 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = fh_to_ctx(priv);
ctx              1538 drivers/media/platform/s5p-jpeg/jpeg-core.c 	fmt = s5p_jpeg_find_format(ctx, f->fmt.pix.pixelformat,
ctx              1541 drivers/media/platform/s5p-jpeg/jpeg-core.c 		v4l2_err(&ctx->jpeg->v4l2_dev,
ctx              1547 drivers/media/platform/s5p-jpeg/jpeg-core.c 	return vidioc_try_fmt(f, fmt, ctx, FMT_TYPE_OUTPUT);
ctx              1550 drivers/media/platform/s5p-jpeg/jpeg-core.c static int exynos4_jpeg_get_output_buffer_size(struct s5p_jpeg_ctx *ctx,
ctx              1570 drivers/media/platform/s5p-jpeg/jpeg-core.c 	jpeg_bound_align_image(ctx, &w, S5P_JPEG_MIN_WIDTH,
ctx              1575 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->jpeg->variant->version == SJPEG_EXYNOS4)
ctx              1581 drivers/media/platform/s5p-jpeg/jpeg-core.c static int exynos3250_jpeg_try_downscale(struct s5p_jpeg_ctx *ctx,
ctx              1706 drivers/media/platform/s5p-jpeg/jpeg-core.c static int exynos3250_jpeg_try_downscale(struct s5p_jpeg_ctx *ctx,
ctx              1711 drivers/media/platform/s5p-jpeg/jpeg-core.c 	w_ratio = ctx->out_q.w / r->width;
ctx              1712 drivers/media/platform/s5p-jpeg/jpeg-core.c 	h_ratio = ctx->out_q.h / r->height;
ctx              1721 drivers/media/platform/s5p-jpeg/jpeg-core.c 			ctx->scale_factor = cur_ratio;
ctx              1726 drivers/media/platform/s5p-jpeg/jpeg-core.c 	r->width = round_down(ctx->out_q.w / ctx->scale_factor, 2);
ctx              1727 drivers/media/platform/s5p-jpeg/jpeg-core.c 	r->height = round_down(ctx->out_q.h / ctx->scale_factor, 2);
ctx              1729 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->crop_rect.width = r->width;
ctx              1730 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->crop_rect.height = r->height;
ctx              1731 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->crop_rect.left = 0;
ctx              1732 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->crop_rect.top = 0;
ctx              1734 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->crop_altered = true;
ctx              1752 drivers/media/platform/s5p-jpeg/jpeg-core.c static int exynos3250_jpeg_try_crop(struct s5p_jpeg_ctx *ctx,
ctx              1758 drivers/media/platform/s5p-jpeg/jpeg-core.c 	switch (ctx->cap_q.fmt->fourcc) {
ctx              1776 drivers/media/platform/s5p-jpeg/jpeg-core.c 	base_rect.width = ctx->out_q.w;
ctx              1777 drivers/media/platform/s5p-jpeg/jpeg-core.c 	base_rect.height = ctx->out_q.h;
ctx              1787 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->crop_rect.left = r->left;
ctx              1788 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->crop_rect.top = r->top;
ctx              1789 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->crop_rect.width = r->width;
ctx              1790 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->crop_rect.height = r->height;
ctx              1792 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctx->crop_altered = true;
ctx              1804 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = fh_to_ctx(priv);
ctx              1816 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s->r.width = ctx->out_q.w;
ctx              1817 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s->r.height = ctx->out_q.h;
ctx              1824 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s->r.width = ctx->crop_rect.width;
ctx              1825 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s->r.height =  ctx->crop_rect.height;
ctx              1826 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s->r.left = ctx->crop_rect.left;
ctx              1827 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s->r.top = ctx->crop_rect.top;
ctx              1841 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = fh_to_ctx(file->private_data);
ctx              1849 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->mode != S5P_JPEG_DECODE)
ctx              1851 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->jpeg->variant->hw3250_compat)
ctx              1852 drivers/media/platform/s5p-jpeg/jpeg-core.c 			ret = exynos3250_jpeg_try_downscale(ctx, rect);
ctx              1854 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->mode != S5P_JPEG_ENCODE)
ctx              1856 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->jpeg->variant->hw3250_compat)
ctx              1857 drivers/media/platform/s5p-jpeg/jpeg-core.c 			ret = exynos3250_jpeg_try_crop(ctx, rect);
ctx              1865 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = ctrl_to_ctx(ctrl);
ctx              1866 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg *jpeg = ctx->jpeg;
ctx              1872 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctrl->val = s5p_jpeg_to_user_subsampling(ctx);
ctx              1880 drivers/media/platform/s5p-jpeg/jpeg-core.c static int s5p_jpeg_adjust_subs_ctrl(struct s5p_jpeg_ctx *ctx, int *ctrl_val)
ctx              1882 drivers/media/platform/s5p-jpeg/jpeg-core.c 	switch (ctx->jpeg->variant->version) {
ctx              1891 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->out_q.fmt->fourcc == V4L2_PIX_FMT_RGB32)
ctx              1900 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->out_q.fmt->fourcc != V4L2_PIX_FMT_GREY &&
ctx              1911 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->out_q.fmt->subsampling > *ctrl_val)
ctx              1912 drivers/media/platform/s5p-jpeg/jpeg-core.c 		*ctrl_val = ctx->out_q.fmt->subsampling;
ctx              1919 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = ctrl_to_ctx(ctrl);
ctx              1923 drivers/media/platform/s5p-jpeg/jpeg-core.c 	spin_lock_irqsave(&ctx->jpeg->slock, flags);
ctx              1926 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ret = s5p_jpeg_adjust_subs_ctrl(ctx, &ctrl->val);
ctx              1928 drivers/media/platform/s5p-jpeg/jpeg-core.c 	spin_unlock_irqrestore(&ctx->jpeg->slock, flags);
ctx              1934 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = ctrl_to_ctx(ctrl);
ctx              1937 drivers/media/platform/s5p-jpeg/jpeg-core.c 	spin_lock_irqsave(&ctx->jpeg->slock, flags);
ctx              1941 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctx->compr_quality = ctrl->val;
ctx              1944 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctx->restart_interval = ctrl->val;
ctx              1947 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctx->subsampling = ctrl->val;
ctx              1951 drivers/media/platform/s5p-jpeg/jpeg-core.c 	spin_unlock_irqrestore(&ctx->jpeg->slock, flags);
ctx              1961 drivers/media/platform/s5p-jpeg/jpeg-core.c static int s5p_jpeg_controls_create(struct s5p_jpeg_ctx *ctx)
ctx              1967 drivers/media/platform/s5p-jpeg/jpeg-core.c 	v4l2_ctrl_handler_init(&ctx->ctrl_handler, 3);
ctx              1969 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_ENCODE) {
ctx              1970 drivers/media/platform/s5p-jpeg/jpeg-core.c 		v4l2_ctrl_new_std(&ctx->ctrl_handler, &s5p_jpeg_ctrl_ops,
ctx              1974 drivers/media/platform/s5p-jpeg/jpeg-core.c 		v4l2_ctrl_new_std(&ctx->ctrl_handler, &s5p_jpeg_ctrl_ops,
ctx              1977 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->jpeg->variant->version == SJPEG_S5P)
ctx              1981 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ctrl = v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &s5p_jpeg_ctrl_ops,
ctx              1986 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->ctrl_handler.error) {
ctx              1987 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ret = ctx->ctrl_handler.error;
ctx              1991 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_DECODE)
ctx              1995 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ret = v4l2_ctrl_handler_setup(&ctx->ctrl_handler);
ctx              2002 drivers/media/platform/s5p-jpeg/jpeg-core.c 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx              2044 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = priv;
ctx              2045 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg *jpeg = ctx->jpeg;
ctx              2049 drivers/media/platform/s5p-jpeg/jpeg-core.c 	spin_lock_irqsave(&ctx->jpeg->slock, flags);
ctx              2051 drivers/media/platform/s5p-jpeg/jpeg-core.c 	src_buf = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx              2052 drivers/media/platform/s5p-jpeg/jpeg-core.c 	dst_buf = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx              2058 drivers/media/platform/s5p-jpeg/jpeg-core.c 	s5p_jpeg_proc_mode(jpeg->regs, ctx->mode);
ctx              2059 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_ENCODE) {
ctx              2060 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->out_q.fmt->fourcc == V4L2_PIX_FMT_RGB565)
ctx              2066 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s5p_jpeg_subsampling_mode(jpeg->regs, ctx->subsampling);
ctx              2067 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s5p_jpeg_dri(jpeg->regs, ctx->restart_interval);
ctx              2068 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s5p_jpeg_x(jpeg->regs, ctx->out_q.w);
ctx              2069 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s5p_jpeg_y(jpeg->regs, ctx->out_q.h);
ctx              2074 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s5p_jpeg_enc_stream_int(jpeg->regs, ctx->cap_q.size);
ctx              2091 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s5p_jpeg_set_qtbl_lum(jpeg->regs, ctx->compr_quality);
ctx              2092 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s5p_jpeg_set_qtbl_chr(jpeg->regs, ctx->compr_quality);
ctx              2110 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->cap_q.fmt->fourcc == V4L2_PIX_FMT_YUYV)
ctx              2120 drivers/media/platform/s5p-jpeg/jpeg-core.c 	spin_unlock_irqrestore(&ctx->jpeg->slock, flags);
ctx              2123 drivers/media/platform/s5p-jpeg/jpeg-core.c static void exynos4_jpeg_set_img_addr(struct s5p_jpeg_ctx *ctx)
ctx              2125 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg *jpeg = ctx->jpeg;
ctx              2134 drivers/media/platform/s5p-jpeg/jpeg-core.c 	pix_size = ctx->cap_q.w * ctx->cap_q.h;
ctx              2136 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_ENCODE) {
ctx              2137 drivers/media/platform/s5p-jpeg/jpeg-core.c 		vb = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx              2138 drivers/media/platform/s5p-jpeg/jpeg-core.c 		fmt = ctx->out_q.fmt;
ctx              2139 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->out_q.w % 2 && fmt->h_align > 0)
ctx              2140 drivers/media/platform/s5p-jpeg/jpeg-core.c 			padding_bytes = ctx->out_q.h;
ctx              2142 drivers/media/platform/s5p-jpeg/jpeg-core.c 		fmt = ctx->cap_q.fmt;
ctx              2143 drivers/media/platform/s5p-jpeg/jpeg-core.c 		vb = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx              2161 drivers/media/platform/s5p-jpeg/jpeg-core.c static void exynos4_jpeg_set_jpeg_addr(struct s5p_jpeg_ctx *ctx)
ctx              2163 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg *jpeg = ctx->jpeg;
ctx              2167 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_ENCODE)
ctx              2168 drivers/media/platform/s5p-jpeg/jpeg-core.c 		vb = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx              2170 drivers/media/platform/s5p-jpeg/jpeg-core.c 		vb = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx              2174 drivers/media/platform/s5p-jpeg/jpeg-core.c 	    ctx->mode == S5P_JPEG_DECODE)
ctx              2175 drivers/media/platform/s5p-jpeg/jpeg-core.c 		jpeg_addr += ctx->out_q.sos;
ctx              2205 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = priv;
ctx              2206 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg *jpeg = ctx->jpeg;
ctx              2212 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_ENCODE) {
ctx              2223 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos4_jpeg_set_qtbl_lum(jpeg->regs, ctx->compr_quality);
ctx              2224 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos4_jpeg_set_qtbl_chr(jpeg->regs, ctx->compr_quality);
ctx              2227 drivers/media/platform/s5p-jpeg/jpeg-core.c 							ctx->compr_quality);
ctx              2228 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos4_jpeg_set_stream_size(jpeg->regs, ctx->cap_q.w,
ctx              2229 drivers/media/platform/s5p-jpeg/jpeg-core.c 							ctx->cap_q.h);
ctx              2231 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->jpeg->variant->version == SJPEG_EXYNOS4) {
ctx              2233 drivers/media/platform/s5p-jpeg/jpeg-core.c 						     ctx->subsampling);
ctx              2235 drivers/media/platform/s5p-jpeg/jpeg-core.c 						 ctx->out_q.fmt->fourcc);
ctx              2238 drivers/media/platform/s5p-jpeg/jpeg-core.c 							ctx->subsampling);
ctx              2240 drivers/media/platform/s5p-jpeg/jpeg-core.c 						    ctx->out_q.fmt->fourcc);
ctx              2242 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos4_jpeg_set_img_addr(ctx);
ctx              2243 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos4_jpeg_set_jpeg_addr(ctx);
ctx              2245 drivers/media/platform/s5p-jpeg/jpeg-core.c 							ctx->out_q.fmt->fourcc);
ctx              2250 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos4_jpeg_set_img_addr(ctx);
ctx              2251 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos4_jpeg_set_jpeg_addr(ctx);
ctx              2254 drivers/media/platform/s5p-jpeg/jpeg-core.c 			exynos4_jpeg_parse_huff_tbl(ctx);
ctx              2255 drivers/media/platform/s5p-jpeg/jpeg-core.c 			exynos4_jpeg_parse_decode_h_tbl(ctx);
ctx              2257 drivers/media/platform/s5p-jpeg/jpeg-core.c 			exynos4_jpeg_parse_q_tbl(ctx);
ctx              2258 drivers/media/platform/s5p-jpeg/jpeg-core.c 			exynos4_jpeg_parse_decode_q_tbl(ctx);
ctx              2262 drivers/media/platform/s5p-jpeg/jpeg-core.c 			exynos4_jpeg_set_stream_size(jpeg->regs, ctx->cap_q.w,
ctx              2263 drivers/media/platform/s5p-jpeg/jpeg-core.c 					ctx->cap_q.h);
ctx              2265 drivers/media/platform/s5p-jpeg/jpeg-core.c 							ctx->subsampling);
ctx              2267 drivers/media/platform/s5p-jpeg/jpeg-core.c 						    ctx->cap_q.fmt->fourcc);
ctx              2268 drivers/media/platform/s5p-jpeg/jpeg-core.c 			bitstream_size = DIV_ROUND_UP(ctx->out_q.size, 16);
ctx              2271 drivers/media/platform/s5p-jpeg/jpeg-core.c 						 ctx->cap_q.fmt->fourcc);
ctx              2272 drivers/media/platform/s5p-jpeg/jpeg-core.c 			bitstream_size = DIV_ROUND_UP(ctx->out_q.size, 32);
ctx              2279 drivers/media/platform/s5p-jpeg/jpeg-core.c 	exynos4_jpeg_set_enc_dec_mode(jpeg->regs, ctx->mode);
ctx              2284 drivers/media/platform/s5p-jpeg/jpeg-core.c static void exynos3250_jpeg_set_img_addr(struct s5p_jpeg_ctx *ctx)
ctx              2286 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg *jpeg = ctx->jpeg;
ctx              2292 drivers/media/platform/s5p-jpeg/jpeg-core.c 	pix_size = ctx->cap_q.w * ctx->cap_q.h;
ctx              2294 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_ENCODE) {
ctx              2295 drivers/media/platform/s5p-jpeg/jpeg-core.c 		vb = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx              2296 drivers/media/platform/s5p-jpeg/jpeg-core.c 		fmt = ctx->out_q.fmt;
ctx              2298 drivers/media/platform/s5p-jpeg/jpeg-core.c 		vb = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx              2299 drivers/media/platform/s5p-jpeg/jpeg-core.c 		fmt = ctx->cap_q.fmt;
ctx              2317 drivers/media/platform/s5p-jpeg/jpeg-core.c static void exynos3250_jpeg_set_jpeg_addr(struct s5p_jpeg_ctx *ctx)
ctx              2319 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg *jpeg = ctx->jpeg;
ctx              2323 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_ENCODE)
ctx              2324 drivers/media/platform/s5p-jpeg/jpeg-core.c 		vb = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx              2326 drivers/media/platform/s5p-jpeg/jpeg-core.c 		vb = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx              2334 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = priv;
ctx              2335 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg *jpeg = ctx->jpeg;
ctx              2338 drivers/media/platform/s5p-jpeg/jpeg-core.c 	spin_lock_irqsave(&ctx->jpeg->slock, flags);
ctx              2344 drivers/media/platform/s5p-jpeg/jpeg-core.c 	exynos3250_jpeg_proc_mode(jpeg->regs, ctx->mode);
ctx              2346 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_ENCODE) {
ctx              2348 drivers/media/platform/s5p-jpeg/jpeg-core.c 					      ctx->out_q.fmt->fourcc);
ctx              2349 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos3250_jpeg_dri(jpeg->regs, ctx->restart_interval);
ctx              2355 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s5p_jpeg_set_qtbl_lum(jpeg->regs, ctx->compr_quality);
ctx              2356 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s5p_jpeg_set_qtbl_chr(jpeg->regs, ctx->compr_quality);
ctx              2381 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos3250_jpeg_set_x(jpeg->regs, ctx->crop_rect.width);
ctx              2382 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos3250_jpeg_set_y(jpeg->regs, ctx->crop_rect.height);
ctx              2383 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos3250_jpeg_stride(jpeg->regs, ctx->out_q.fmt->fourcc,
ctx              2384 drivers/media/platform/s5p-jpeg/jpeg-core.c 								ctx->out_q.w);
ctx              2385 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos3250_jpeg_offset(jpeg->regs, ctx->crop_rect.left,
ctx              2386 drivers/media/platform/s5p-jpeg/jpeg-core.c 							ctx->crop_rect.top);
ctx              2387 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos3250_jpeg_set_img_addr(ctx);
ctx              2388 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos3250_jpeg_set_jpeg_addr(ctx);
ctx              2389 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos3250_jpeg_subsampling_mode(jpeg->regs, ctx->subsampling);
ctx              2392 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos3250_jpeg_enc_stream_bound(jpeg->regs, ctx->cap_q.size);
ctx              2394 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->out_q.fmt->fourcc == V4L2_PIX_FMT_RGB565 ||
ctx              2395 drivers/media/platform/s5p-jpeg/jpeg-core.c 		    ctx->out_q.fmt->fourcc == V4L2_PIX_FMT_RGB565X ||
ctx              2396 drivers/media/platform/s5p-jpeg/jpeg-core.c 		    ctx->out_q.fmt->fourcc == V4L2_PIX_FMT_RGB32)
ctx              2399 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos3250_jpeg_set_img_addr(ctx);
ctx              2400 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos3250_jpeg_set_jpeg_addr(ctx);
ctx              2401 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos3250_jpeg_stride(jpeg->regs, ctx->cap_q.fmt->fourcc,
ctx              2402 drivers/media/platform/s5p-jpeg/jpeg-core.c 								ctx->cap_q.w);
ctx              2405 drivers/media/platform/s5p-jpeg/jpeg-core.c 							ctx->scale_factor);
ctx              2406 drivers/media/platform/s5p-jpeg/jpeg-core.c 		exynos3250_jpeg_dec_stream_size(jpeg->regs, ctx->out_q.size);
ctx              2408 drivers/media/platform/s5p-jpeg/jpeg-core.c 						ctx->cap_q.fmt->fourcc);
ctx              2414 drivers/media/platform/s5p-jpeg/jpeg-core.c 	exynos3250_jpeg_coef(jpeg->regs, ctx->mode);
ctx              2420 drivers/media/platform/s5p-jpeg/jpeg-core.c 	spin_unlock_irqrestore(&ctx->jpeg->slock, flags);
ctx              2425 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = priv;
ctx              2427 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_DECODE) {
ctx              2432 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->state == JPEGCTX_RESOLUTION_CHANGE)
ctx              2435 drivers/media/platform/s5p-jpeg/jpeg-core.c 		return ctx->hdr_parsed;
ctx              2466 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = vb2_get_drv_priv(vq);
ctx              2470 drivers/media/platform/s5p-jpeg/jpeg-core.c 	q_data = get_q_data(ctx, vq->type);
ctx              2479 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_DECODE)
ctx              2491 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              2494 drivers/media/platform/s5p-jpeg/jpeg-core.c 	q_data = get_q_data(ctx, vb->vb2_queue->type);
ctx              2509 drivers/media/platform/s5p-jpeg/jpeg-core.c static void s5p_jpeg_set_capture_queue_data(struct s5p_jpeg_ctx *ctx)
ctx              2511 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_q_data *q_data = &ctx->cap_q;
ctx              2513 drivers/media/platform/s5p-jpeg/jpeg-core.c 	q_data->w = ctx->out_q.w;
ctx              2514 drivers/media/platform/s5p-jpeg/jpeg-core.c 	q_data->h = ctx->out_q.h;
ctx              2525 drivers/media/platform/s5p-jpeg/jpeg-core.c 	jpeg_bound_align_image(ctx, &q_data->w, S5P_JPEG_MIN_WIDTH,
ctx              2536 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              2538 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->mode == S5P_JPEG_DECODE &&
ctx              2548 drivers/media/platform/s5p-jpeg/jpeg-core.c 		dst_vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
ctx              2550 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ori_w = ctx->out_q.w;
ctx              2551 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ori_h = ctx->out_q.h;
ctx              2553 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctx->hdr_parsed = s5p_jpeg_parse_hdr(&ctx->out_q,
ctx              2555 drivers/media/platform/s5p-jpeg/jpeg-core.c 		     min((unsigned long)ctx->out_q.size,
ctx              2556 drivers/media/platform/s5p-jpeg/jpeg-core.c 			 vb2_get_plane_payload(vb, 0)), ctx);
ctx              2557 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (!ctx->hdr_parsed) {
ctx              2567 drivers/media/platform/s5p-jpeg/jpeg-core.c 		if (ctx->out_q.w != ori_w || ctx->out_q.h != ori_h) {
ctx              2568 drivers/media/platform/s5p-jpeg/jpeg-core.c 			v4l2_event_queue_fh(&ctx->fh, &ev_src_ch);
ctx              2570 drivers/media/platform/s5p-jpeg/jpeg-core.c 				ctx->state = JPEGCTX_RESOLUTION_CHANGE;
ctx              2572 drivers/media/platform/s5p-jpeg/jpeg-core.c 				s5p_jpeg_set_capture_queue_data(ctx);
ctx              2576 drivers/media/platform/s5p-jpeg/jpeg-core.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx              2581 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = vb2_get_drv_priv(q);
ctx              2584 drivers/media/platform/s5p-jpeg/jpeg-core.c 	ret = pm_runtime_get_sync(ctx->jpeg->dev);
ctx              2591 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = vb2_get_drv_priv(q);
ctx              2598 drivers/media/platform/s5p-jpeg/jpeg-core.c 	if (ctx->state == JPEGCTX_RESOLUTION_CHANGE &&
ctx              2600 drivers/media/platform/s5p-jpeg/jpeg-core.c 		s5p_jpeg_set_capture_queue_data(ctx);
ctx              2601 drivers/media/platform/s5p-jpeg/jpeg-core.c 		ctx->state = JPEGCTX_RUNNING;
ctx              2604 drivers/media/platform/s5p-jpeg/jpeg-core.c 	pm_runtime_put(ctx->jpeg->dev);
ctx              2620 drivers/media/platform/s5p-jpeg/jpeg-core.c 	struct s5p_jpeg_ctx *ctx = priv;
ctx              2625 drivers/media/platform/s5p-jpeg/jpeg-core.c 	src_vq->drv_priv = ctx;
ctx              2630 drivers/media/platform/s5p-jpeg/jpeg-core.c 	src_vq->lock = &ctx->jpeg->lock;
ctx              2631 drivers/media/platform/s5p-jpeg/jpeg-core.c 	src_vq->dev = ctx->jpeg->dev;
ctx              2639 drivers/media/platform/s5p-jpeg/jpeg-core.c 	dst_vq->drv_priv = ctx;
ctx              2644 drivers/media/platform/s5p-jpeg/jpeg-core.c 	dst_vq->lock = &ctx->jpeg->lock;
ctx              2645 drivers/media/platform/s5p-jpeg/jpeg-core.c 	dst_vq->dev = ctx->jpeg->dev;
ctx                49 drivers/media/platform/s5p-mfc/s5p_mfc.c void clear_work_bit(struct s5p_mfc_ctx *ctx)
ctx                51 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx                54 drivers/media/platform/s5p-mfc/s5p_mfc.c 	__clear_bit(ctx->num, &dev->ctx_work_bits);
ctx                59 drivers/media/platform/s5p-mfc/s5p_mfc.c void set_work_bit(struct s5p_mfc_ctx *ctx)
ctx                61 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx                64 drivers/media/platform/s5p-mfc/s5p_mfc.c 	__set_bit(ctx->num, &dev->ctx_work_bits);
ctx                69 drivers/media/platform/s5p-mfc/s5p_mfc.c void clear_work_bit_irqsave(struct s5p_mfc_ctx *ctx)
ctx                71 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx                75 drivers/media/platform/s5p-mfc/s5p_mfc.c 	__clear_bit(ctx->num, &dev->ctx_work_bits);
ctx                80 drivers/media/platform/s5p-mfc/s5p_mfc.c void set_work_bit_irqsave(struct s5p_mfc_ctx *ctx)
ctx                82 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx                86 drivers/media/platform/s5p-mfc/s5p_mfc.c 	__set_bit(ctx->num, &dev->ctx_work_bits);
ctx                93 drivers/media/platform/s5p-mfc/s5p_mfc.c 	int ctx;
ctx                96 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx = dev->curr_ctx;
ctx                98 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx = (ctx + 1) % MFC_NUM_CONTEXTS;
ctx                99 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (ctx == dev->curr_ctx) {
ctx               100 drivers/media/platform/s5p-mfc/s5p_mfc.c 			if (!test_bit(ctx, &dev->ctx_work_bits))
ctx               101 drivers/media/platform/s5p-mfc/s5p_mfc.c 				ctx = -EAGAIN;
ctx               104 drivers/media/platform/s5p-mfc/s5p_mfc.c 	} while (!test_bit(ctx, &dev->ctx_work_bits));
ctx               107 drivers/media/platform/s5p-mfc/s5p_mfc.c 	return ctx;
ctx               111 drivers/media/platform/s5p-mfc/s5p_mfc.c static void wake_up_ctx(struct s5p_mfc_ctx *ctx, unsigned int reason,
ctx               114 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->int_cond = 1;
ctx               115 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->int_type = reason;
ctx               116 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->int_err = err;
ctx               117 drivers/media/platform/s5p-mfc/s5p_mfc.c 	wake_up(&ctx->queue);
ctx               167 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_ctx *ctx;
ctx               185 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx = dev->ctx[i];
ctx               186 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (!ctx)
ctx               188 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->state = MFCINST_ERROR;
ctx               189 drivers/media/platform/s5p-mfc/s5p_mfc.c 		s5p_mfc_cleanup_queue(&ctx->dst_queue, &ctx->vq_dst);
ctx               190 drivers/media/platform/s5p-mfc/s5p_mfc.c 		s5p_mfc_cleanup_queue(&ctx->src_queue, &ctx->vq_src);
ctx               191 drivers/media/platform/s5p-mfc/s5p_mfc.c 		clear_work_bit(ctx);
ctx               192 drivers/media/platform/s5p-mfc/s5p_mfc.c 		wake_up_ctx(ctx, S5P_MFC_R2H_CMD_ERR_RET, 0);
ctx               219 drivers/media/platform/s5p-mfc/s5p_mfc.c static void s5p_mfc_handle_frame_all_extracted(struct s5p_mfc_ctx *ctx)
ctx               222 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               224 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->state = MFCINST_FINISHED;
ctx               225 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->sequence++;
ctx               226 drivers/media/platform/s5p-mfc/s5p_mfc.c 	while (!list_empty(&ctx->dst_queue)) {
ctx               227 drivers/media/platform/s5p-mfc/s5p_mfc.c 		dst_buf = list_entry(ctx->dst_queue.next,
ctx               235 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->dst_queue_cnt--;
ctx               236 drivers/media/platform/s5p-mfc/s5p_mfc.c 		dst_buf->b->sequence = (ctx->sequence++);
ctx               238 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (s5p_mfc_hw_call(dev->mfc_ops, get_pic_type_top, ctx) ==
ctx               239 drivers/media/platform/s5p-mfc/s5p_mfc.c 			s5p_mfc_hw_call(dev->mfc_ops, get_pic_type_bot, ctx))
ctx               245 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->dec_dst_flag &= ~(1 << dst_buf->b->vb2_buf.index);
ctx               250 drivers/media/platform/s5p-mfc/s5p_mfc.c static void s5p_mfc_handle_frame_copy_time(struct s5p_mfc_ctx *ctx)
ctx               252 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               265 drivers/media/platform/s5p-mfc/s5p_mfc.c 	src_buf = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
ctx               266 drivers/media/platform/s5p-mfc/s5p_mfc.c 	list_for_each_entry(dst_buf, &ctx->dst_queue, list) {
ctx               302 drivers/media/platform/s5p-mfc/s5p_mfc.c static void s5p_mfc_handle_frame_new(struct s5p_mfc_ctx *ctx, unsigned int err)
ctx               304 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               312 drivers/media/platform/s5p-mfc/s5p_mfc.c 			get_disp_frame_type, ctx);
ctx               319 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (!ctx->after_packed_pb)
ctx               320 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->sequence++;
ctx               321 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->after_packed_pb = 0;
ctx               324 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->sequence++;
ctx               327 drivers/media/platform/s5p-mfc/s5p_mfc.c 	list_for_each_entry(dst_buf, &ctx->dst_queue, list) {
ctx               333 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->dst_queue_cnt--;
ctx               334 drivers/media/platform/s5p-mfc/s5p_mfc.c 			dst_buf->b->sequence = ctx->sequence;
ctx               336 drivers/media/platform/s5p-mfc/s5p_mfc.c 					get_pic_type_top, ctx) ==
ctx               338 drivers/media/platform/s5p-mfc/s5p_mfc.c 					get_pic_type_bot, ctx))
ctx               344 drivers/media/platform/s5p-mfc/s5p_mfc.c 						ctx->luma_size);
ctx               346 drivers/media/platform/s5p-mfc/s5p_mfc.c 						ctx->chroma_size);
ctx               348 drivers/media/platform/s5p-mfc/s5p_mfc.c 							&ctx->dec_dst_flag);
ctx               359 drivers/media/platform/s5p-mfc/s5p_mfc.c static void s5p_mfc_handle_frame(struct s5p_mfc_ctx *ctx,
ctx               362 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               376 drivers/media/platform/s5p-mfc/s5p_mfc.c 	if (ctx->state == MFCINST_RES_CHANGE_INIT)
ctx               377 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->state = MFCINST_RES_CHANGE_FLUSH;
ctx               380 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->state = MFCINST_RES_CHANGE_INIT;
ctx               382 drivers/media/platform/s5p-mfc/s5p_mfc.c 		wake_up_ctx(ctx, reason, err);
ctx               388 drivers/media/platform/s5p-mfc/s5p_mfc.c 	if (ctx->dpb_flush_flag)
ctx               389 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->dpb_flush_flag = 0;
ctx               393 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (ctx->state == MFCINST_RES_CHANGE_FLUSH) {
ctx               400 drivers/media/platform/s5p-mfc/s5p_mfc.c 			s5p_mfc_handle_frame_all_extracted(ctx);
ctx               401 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->state = MFCINST_RES_CHANGE_END;
ctx               402 drivers/media/platform/s5p-mfc/s5p_mfc.c 			v4l2_event_queue_fh(&ctx->fh, &ev_src_ch);
ctx               406 drivers/media/platform/s5p-mfc/s5p_mfc.c 			s5p_mfc_handle_frame_all_extracted(ctx);
ctx               411 drivers/media/platform/s5p-mfc/s5p_mfc.c 		s5p_mfc_handle_frame_copy_time(ctx);
ctx               416 drivers/media/platform/s5p-mfc/s5p_mfc.c 		s5p_mfc_handle_frame_new(ctx, err);
ctx               422 drivers/media/platform/s5p-mfc/s5p_mfc.c 		&& !list_empty(&ctx->src_queue)) {
ctx               423 drivers/media/platform/s5p-mfc/s5p_mfc.c 		src_buf = list_entry(ctx->src_queue.next, struct s5p_mfc_buf,
ctx               425 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->consumed_stream += s5p_mfc_hw_call(dev->mfc_ops,
ctx               427 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (ctx->codec_mode != S5P_MFC_CODEC_H264_DEC &&
ctx               428 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->codec_mode != S5P_MFC_CODEC_VP8_DEC &&
ctx               429 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->consumed_stream + STUFF_BYTE <
ctx               433 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->after_packed_pb = 1;
ctx               436 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->consumed_stream = 0;
ctx               438 drivers/media/platform/s5p-mfc/s5p_mfc.c 				ctx->state = MFCINST_FINISHING;
ctx               440 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->src_queue_cnt--;
ctx               450 drivers/media/platform/s5p-mfc/s5p_mfc.c 	if ((ctx->src_queue_cnt == 0 && ctx->state != MFCINST_FINISHING)
ctx               451 drivers/media/platform/s5p-mfc/s5p_mfc.c 				    || ctx->dst_queue_cnt < ctx->pb_count)
ctx               452 drivers/media/platform/s5p-mfc/s5p_mfc.c 		clear_work_bit(ctx);
ctx               454 drivers/media/platform/s5p-mfc/s5p_mfc.c 	wake_up_ctx(ctx, reason, err);
ctx               466 drivers/media/platform/s5p-mfc/s5p_mfc.c 		struct s5p_mfc_ctx *ctx, unsigned int reason, unsigned int err)
ctx               470 drivers/media/platform/s5p-mfc/s5p_mfc.c 	if (ctx) {
ctx               472 drivers/media/platform/s5p-mfc/s5p_mfc.c 		switch (ctx->state) {
ctx               481 drivers/media/platform/s5p-mfc/s5p_mfc.c 			clear_work_bit(ctx);
ctx               482 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->state = MFCINST_ERROR;
ctx               484 drivers/media/platform/s5p-mfc/s5p_mfc.c 			s5p_mfc_cleanup_queue(&ctx->dst_queue, &ctx->vq_dst);
ctx               486 drivers/media/platform/s5p-mfc/s5p_mfc.c 			s5p_mfc_cleanup_queue(&ctx->src_queue, &ctx->vq_src);
ctx               487 drivers/media/platform/s5p-mfc/s5p_mfc.c 			wake_up_ctx(ctx, reason, err);
ctx               490 drivers/media/platform/s5p-mfc/s5p_mfc.c 			clear_work_bit(ctx);
ctx               491 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->state = MFCINST_ERROR;
ctx               492 drivers/media/platform/s5p-mfc/s5p_mfc.c 			wake_up_ctx(ctx, reason, err);
ctx               503 drivers/media/platform/s5p-mfc/s5p_mfc.c static void s5p_mfc_handle_seq_done(struct s5p_mfc_ctx *ctx,
ctx               508 drivers/media/platform/s5p-mfc/s5p_mfc.c 	if (!ctx)
ctx               510 drivers/media/platform/s5p-mfc/s5p_mfc.c 	dev = ctx->dev;
ctx               511 drivers/media/platform/s5p-mfc/s5p_mfc.c 	if (ctx->c_ops->post_seq_start) {
ctx               512 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (ctx->c_ops->post_seq_start(ctx))
ctx               515 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->img_width = s5p_mfc_hw_call(dev->mfc_ops, get_img_width,
ctx               517 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->img_height = s5p_mfc_hw_call(dev->mfc_ops, get_img_height,
ctx               520 drivers/media/platform/s5p-mfc/s5p_mfc.c 		s5p_mfc_hw_call(dev->mfc_ops, dec_calc_dpb_size, ctx);
ctx               522 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->pb_count = s5p_mfc_hw_call(dev->mfc_ops, get_dpb_count,
ctx               524 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->mv_count = s5p_mfc_hw_call(dev->mfc_ops, get_mv_count,
ctx               527 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->scratch_buf_size = s5p_mfc_hw_call(dev->mfc_ops,
ctx               529 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (ctx->img_width == 0 || ctx->img_height == 0)
ctx               530 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->state = MFCINST_ERROR;
ctx               532 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->state = MFCINST_HEAD_PARSED;
ctx               534 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if ((ctx->codec_mode == S5P_MFC_CODEC_H264_DEC ||
ctx               535 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->codec_mode == S5P_MFC_CODEC_H264_MVC_DEC) &&
ctx               536 drivers/media/platform/s5p-mfc/s5p_mfc.c 				!list_empty(&ctx->src_queue)) {
ctx               538 drivers/media/platform/s5p-mfc/s5p_mfc.c 			src_buf = list_entry(ctx->src_queue.next,
ctx               543 drivers/media/platform/s5p-mfc/s5p_mfc.c 				ctx->head_processed = 0;
ctx               545 drivers/media/platform/s5p-mfc/s5p_mfc.c 				ctx->head_processed = 1;
ctx               547 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->head_processed = 1;
ctx               551 drivers/media/platform/s5p-mfc/s5p_mfc.c 	clear_work_bit(ctx);
ctx               555 drivers/media/platform/s5p-mfc/s5p_mfc.c 	wake_up_ctx(ctx, reason, err);
ctx               559 drivers/media/platform/s5p-mfc/s5p_mfc.c static void s5p_mfc_handle_init_buffers(struct s5p_mfc_ctx *ctx,
ctx               565 drivers/media/platform/s5p-mfc/s5p_mfc.c 	if (!ctx)
ctx               567 drivers/media/platform/s5p-mfc/s5p_mfc.c 	dev = ctx->dev;
ctx               569 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->int_type = reason;
ctx               570 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->int_err = err;
ctx               571 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->int_cond = 1;
ctx               572 drivers/media/platform/s5p-mfc/s5p_mfc.c 	clear_work_bit(ctx);
ctx               574 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->state = MFCINST_RUNNING;
ctx               575 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (!ctx->dpb_flush_flag && ctx->head_processed) {
ctx               576 drivers/media/platform/s5p-mfc/s5p_mfc.c 			if (!list_empty(&ctx->src_queue)) {
ctx               577 drivers/media/platform/s5p-mfc/s5p_mfc.c 				src_buf = list_entry(ctx->src_queue.next,
ctx               580 drivers/media/platform/s5p-mfc/s5p_mfc.c 				ctx->src_queue_cnt--;
ctx               585 drivers/media/platform/s5p-mfc/s5p_mfc.c 			ctx->dpb_flush_flag = 0;
ctx               591 drivers/media/platform/s5p-mfc/s5p_mfc.c 		wake_up(&ctx->queue);
ctx               598 drivers/media/platform/s5p-mfc/s5p_mfc.c 		wake_up(&ctx->queue);
ctx               602 drivers/media/platform/s5p-mfc/s5p_mfc.c static void s5p_mfc_handle_stream_complete(struct s5p_mfc_ctx *ctx)
ctx               604 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               609 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->state = MFCINST_FINISHED;
ctx               611 drivers/media/platform/s5p-mfc/s5p_mfc.c 	if (!list_empty(&ctx->dst_queue)) {
ctx               612 drivers/media/platform/s5p-mfc/s5p_mfc.c 		mb_entry = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf,
ctx               615 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->dst_queue_cnt--;
ctx               620 drivers/media/platform/s5p-mfc/s5p_mfc.c 	clear_work_bit(ctx);
ctx               625 drivers/media/platform/s5p-mfc/s5p_mfc.c 	wake_up(&ctx->queue);
ctx               633 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_ctx *ctx;
ctx               641 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx = dev->ctx[dev->curr_ctx];
ctx               649 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (ctx->state == MFCINST_RUNNING &&
ctx               655 drivers/media/platform/s5p-mfc/s5p_mfc.c 			s5p_mfc_handle_frame(ctx, reason, err);
ctx               657 drivers/media/platform/s5p-mfc/s5p_mfc.c 			s5p_mfc_handle_error(dev, ctx, reason, err);
ctx               664 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (ctx->c_ops->post_frame_start) {
ctx               665 drivers/media/platform/s5p-mfc/s5p_mfc.c 			if (ctx->c_ops->post_frame_start(ctx))
ctx               668 drivers/media/platform/s5p-mfc/s5p_mfc.c 			if (ctx->state == MFCINST_FINISHING &&
ctx               669 drivers/media/platform/s5p-mfc/s5p_mfc.c 						list_empty(&ctx->ref_queue)) {
ctx               671 drivers/media/platform/s5p-mfc/s5p_mfc.c 				s5p_mfc_handle_stream_complete(ctx);
ctx               677 drivers/media/platform/s5p-mfc/s5p_mfc.c 			wake_up_ctx(ctx, reason, err);
ctx               680 drivers/media/platform/s5p-mfc/s5p_mfc.c 			s5p_mfc_handle_frame(ctx, reason, err);
ctx               685 drivers/media/platform/s5p-mfc/s5p_mfc.c 		s5p_mfc_handle_seq_done(ctx, reason, err);
ctx               689 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->inst_no = s5p_mfc_hw_call(dev->mfc_ops, get_inst_no, dev);
ctx               690 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->state = MFCINST_GOT_INST;
ctx               694 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->inst_no = MFC_NO_INSTANCE_SET;
ctx               695 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->state = MFCINST_FREE;
ctx               702 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (ctx)
ctx               703 drivers/media/platform/s5p-mfc/s5p_mfc.c 			clear_work_bit(ctx);
ctx               711 drivers/media/platform/s5p-mfc/s5p_mfc.c 		s5p_mfc_handle_init_buffers(ctx, reason, err);
ctx               716 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->int_type = reason;
ctx               717 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->int_err = err;
ctx               718 drivers/media/platform/s5p-mfc/s5p_mfc.c 		s5p_mfc_handle_stream_complete(ctx);
ctx               722 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->state = MFCINST_RUNNING;
ctx               734 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->int_type = reason;
ctx               735 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->int_err = err;
ctx               736 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->int_cond = 1;
ctx               741 drivers/media/platform/s5p-mfc/s5p_mfc.c 	clear_work_bit(ctx);
ctx               742 drivers/media/platform/s5p-mfc/s5p_mfc.c 	wake_up(&ctx->queue);
ctx               755 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_ctx *ctx = NULL;
ctx               764 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               765 drivers/media/platform/s5p-mfc/s5p_mfc.c 	if (!ctx) {
ctx               769 drivers/media/platform/s5p-mfc/s5p_mfc.c 	init_waitqueue_head(&ctx->queue);
ctx               770 drivers/media/platform/s5p-mfc/s5p_mfc.c 	v4l2_fh_init(&ctx->fh, vdev);
ctx               771 drivers/media/platform/s5p-mfc/s5p_mfc.c 	file->private_data = &ctx->fh;
ctx               772 drivers/media/platform/s5p-mfc/s5p_mfc.c 	v4l2_fh_add(&ctx->fh);
ctx               773 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->dev = dev;
ctx               774 drivers/media/platform/s5p-mfc/s5p_mfc.c 	INIT_LIST_HEAD(&ctx->src_queue);
ctx               775 drivers/media/platform/s5p-mfc/s5p_mfc.c 	INIT_LIST_HEAD(&ctx->dst_queue);
ctx               776 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->src_queue_cnt = 0;
ctx               777 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->dst_queue_cnt = 0;
ctx               779 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->num = 0;
ctx               780 drivers/media/platform/s5p-mfc/s5p_mfc.c 	while (dev->ctx[ctx->num]) {
ctx               781 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->num++;
ctx               782 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (ctx->num >= MFC_NUM_CONTEXTS) {
ctx               789 drivers/media/platform/s5p-mfc/s5p_mfc.c 	clear_work_bit_irqsave(ctx);
ctx               790 drivers/media/platform/s5p-mfc/s5p_mfc.c 	dev->ctx[ctx->num] = ctx;
ctx               792 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->type = MFCINST_DECODER;
ctx               793 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->c_ops = get_dec_codec_ops();
ctx               794 drivers/media/platform/s5p-mfc/s5p_mfc.c 		s5p_mfc_dec_init(ctx);
ctx               796 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ret = s5p_mfc_dec_ctrls_setup(ctx);
ctx               802 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->type = MFCINST_ENCODER;
ctx               803 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->c_ops = get_enc_codec_ops();
ctx               805 drivers/media/platform/s5p-mfc/s5p_mfc.c 		INIT_LIST_HEAD(&ctx->ref_queue);
ctx               806 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->ref_queue_cnt = 0;
ctx               807 drivers/media/platform/s5p-mfc/s5p_mfc.c 		s5p_mfc_enc_init(ctx);
ctx               809 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ret = s5p_mfc_enc_ctrls_setup(ctx);
ctx               818 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->fh.ctrl_handler = &ctx->ctrl_handler;
ctx               819 drivers/media/platform/s5p-mfc/s5p_mfc.c 	ctx->inst_no = MFC_NO_INSTANCE_SET;
ctx               843 drivers/media/platform/s5p-mfc/s5p_mfc.c 	q = &ctx->vq_dst;
ctx               845 drivers/media/platform/s5p-mfc/s5p_mfc.c 	q->drv_priv = &ctx->fh;
ctx               870 drivers/media/platform/s5p-mfc/s5p_mfc.c 	q = &ctx->vq_src;
ctx               872 drivers/media/platform/s5p-mfc/s5p_mfc.c 	q->drv_priv = &ctx->fh;
ctx               920 drivers/media/platform/s5p-mfc/s5p_mfc.c 	s5p_mfc_dec_ctrls_delete(ctx);
ctx               922 drivers/media/platform/s5p-mfc/s5p_mfc.c 	dev->ctx[ctx->num] = NULL;
ctx               924 drivers/media/platform/s5p-mfc/s5p_mfc.c 	v4l2_fh_del(&ctx->fh);
ctx               925 drivers/media/platform/s5p-mfc/s5p_mfc.c 	v4l2_fh_exit(&ctx->fh);
ctx               926 drivers/media/platform/s5p-mfc/s5p_mfc.c 	kfree(ctx);
ctx               937 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(file->private_data);
ctx               938 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               944 drivers/media/platform/s5p-mfc/s5p_mfc.c 	vb2_queue_release(&ctx->vq_src);
ctx               945 drivers/media/platform/s5p-mfc/s5p_mfc.c 	vb2_queue_release(&ctx->vq_dst);
ctx               950 drivers/media/platform/s5p-mfc/s5p_mfc.c 		clear_work_bit_irqsave(ctx);
ctx               955 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (ctx->state != MFCINST_FREE && ctx->state != MFCINST_INIT) {
ctx               957 drivers/media/platform/s5p-mfc/s5p_mfc.c 			s5p_mfc_close_mfc_inst(dev, ctx);
ctx               960 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (dev->curr_ctx == ctx->num)
ctx               976 drivers/media/platform/s5p-mfc/s5p_mfc.c 		dev->ctx[ctx->num] = NULL;
ctx               977 drivers/media/platform/s5p-mfc/s5p_mfc.c 	s5p_mfc_dec_ctrls_delete(ctx);
ctx               978 drivers/media/platform/s5p-mfc/s5p_mfc.c 	v4l2_fh_del(&ctx->fh);
ctx               981 drivers/media/platform/s5p-mfc/s5p_mfc.c 		v4l2_fh_exit(&ctx->fh);
ctx               982 drivers/media/platform/s5p-mfc/s5p_mfc.c 	kfree(ctx);
ctx               994 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(file->private_data);
ctx               995 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1002 drivers/media/platform/s5p-mfc/s5p_mfc.c 	src_q = &ctx->vq_src;
ctx              1003 drivers/media/platform/s5p-mfc/s5p_mfc.c 	dst_q = &ctx->vq_dst;
ctx              1015 drivers/media/platform/s5p-mfc/s5p_mfc.c 	poll_wait(file, &ctx->fh.wait, wait);
ctx              1019 drivers/media/platform/s5p-mfc/s5p_mfc.c 	if (v4l2_event_pending(&ctx->fh))
ctx              1045 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(file->private_data);
ctx              1051 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ret = vb2_mmap(&ctx->vq_src, vma);
ctx              1055 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ret = vb2_mmap(&ctx->vq_dst, vma);
ctx              1421 drivers/media/platform/s5p-mfc/s5p_mfc.c 	struct s5p_mfc_ctx *ctx;
ctx              1433 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx = dev->ctx[i];
ctx              1434 drivers/media/platform/s5p-mfc/s5p_mfc.c 		if (!ctx)
ctx              1437 drivers/media/platform/s5p-mfc/s5p_mfc.c 		ctx->dev = NULL;
ctx                26 drivers/media/platform/s5p-mfc/s5p_mfc_cmd.h 	int (*open_inst_cmd)(struct s5p_mfc_ctx *ctx);
ctx                27 drivers/media/platform/s5p-mfc/s5p_mfc_cmd.h 	int (*close_inst_cmd)(struct s5p_mfc_ctx *ctx);
ctx                71 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c static int s5p_mfc_open_inst_cmd_v5(struct s5p_mfc_ctx *ctx)
ctx                73 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx                78 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 	mfc_debug(2, "Getting instance number (codec: %d)\n", ctx->codec_mode);
ctx                79 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 	dev->curr_ctx = ctx->num;
ctx                81 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 	switch (ctx->codec_mode) {
ctx               113 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 	h2r_args.arg[2] = ctx->ctx.ofs;
ctx               114 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 	h2r_args.arg[3] = ctx->ctx.size;
ctx               119 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 		ctx->state = MFCINST_ERROR;
ctx               124 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c static int s5p_mfc_close_inst_cmd_v5(struct s5p_mfc_ctx *ctx)
ctx               126 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               130 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 	if (ctx->state == MFCINST_FREE) {
ctx               132 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 		ctx->state = MFCINST_ERROR;
ctx               136 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 	mfc_debug(2, "Returning instance number %d\n", ctx->inst_no);
ctx               137 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 	dev->curr_ctx = ctx->num;
ctx               139 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 	h2r_args.arg[0] = ctx->inst_no;
ctx               144 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v5.c 		ctx->state = MFCINST_ERROR;
ctx                67 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c static int s5p_mfc_open_inst_cmd_v6(struct s5p_mfc_ctx *ctx)
ctx                69 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx                73 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c 	mfc_debug(2, "Requested codec mode: %d\n", ctx->codec_mode);
ctx                74 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c 	dev->curr_ctx = ctx->num;
ctx                75 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c 	switch (ctx->codec_mode) {
ctx               128 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c 	mfc_write(dev, ctx->ctx.dma, S5P_FIMV_CONTEXT_MEM_ADDR_V6);
ctx               129 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c 	mfc_write(dev, ctx->ctx.size, S5P_FIMV_CONTEXT_MEM_SIZE_V6);
ctx               137 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c static int s5p_mfc_close_inst_cmd_v6(struct s5p_mfc_ctx *ctx)
ctx               139 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               143 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c 	dev->curr_ctx = ctx->num;
ctx               144 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c 	if (ctx->state != MFCINST_FREE) {
ctx               145 drivers/media/platform/s5p-mfc/s5p_mfc_cmd_v6.c 		mfc_write(dev, ctx->inst_no, S5P_FIMV_INSTANCE_ID_V6);
ctx               250 drivers/media/platform/s5p-mfc/s5p_mfc_common.h 	unsigned int	ctx;
ctx               326 drivers/media/platform/s5p-mfc/s5p_mfc_common.h 	struct s5p_mfc_ctx *ctx[MFC_NUM_CONTEXTS];
ctx               529 drivers/media/platform/s5p-mfc/s5p_mfc_common.h 	int (*pre_seq_start) (struct s5p_mfc_ctx *ctx);
ctx               530 drivers/media/platform/s5p-mfc/s5p_mfc_common.h 	int (*post_seq_start) (struct s5p_mfc_ctx *ctx);
ctx               532 drivers/media/platform/s5p-mfc/s5p_mfc_common.h 	int (*pre_frame_start) (struct s5p_mfc_ctx *ctx);
ctx               533 drivers/media/platform/s5p-mfc/s5p_mfc_common.h 	int (*post_frame_start) (struct s5p_mfc_ctx *ctx);
ctx               685 drivers/media/platform/s5p-mfc/s5p_mfc_common.h 	struct s5p_mfc_priv_buf ctx;
ctx               754 drivers/media/platform/s5p-mfc/s5p_mfc_common.h void clear_work_bit(struct s5p_mfc_ctx *ctx);
ctx               755 drivers/media/platform/s5p-mfc/s5p_mfc_common.h void set_work_bit(struct s5p_mfc_ctx *ctx);
ctx               756 drivers/media/platform/s5p-mfc/s5p_mfc_common.h void clear_work_bit_irqsave(struct s5p_mfc_ctx *ctx);
ctx               757 drivers/media/platform/s5p-mfc/s5p_mfc_common.h void set_work_bit_irqsave(struct s5p_mfc_ctx *ctx);
ctx               423 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c int s5p_mfc_open_mfc_inst(struct s5p_mfc_dev *dev, struct s5p_mfc_ctx *ctx)
ctx               427 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	ret = s5p_mfc_hw_call(dev->mfc_ops, alloc_instance_buffer, ctx);
ctx               433 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	if (ctx->type == MFCINST_DECODER) {
ctx               435 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 					alloc_dec_temp_buffers, ctx);
ctx               442 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	set_work_bit_irqsave(ctx);
ctx               444 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	if (s5p_mfc_wait_for_done_ctx(ctx,
ctx               452 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	mfc_debug(2, "Got instance number: %d\n", ctx->inst_no);
ctx               456 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	if (ctx->type == MFCINST_DECODER)
ctx               457 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 		s5p_mfc_hw_call(dev->mfc_ops, release_dec_desc_buffer, ctx);
ctx               459 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	s5p_mfc_hw_call(dev->mfc_ops, release_instance_buffer, ctx);
ctx               464 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c void s5p_mfc_close_mfc_inst(struct s5p_mfc_dev *dev, struct s5p_mfc_ctx *ctx)
ctx               466 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	ctx->state = MFCINST_RETURN_INST;
ctx               467 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	set_work_bit_irqsave(ctx);
ctx               470 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	if (s5p_mfc_wait_for_done_ctx(ctx,
ctx               475 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	s5p_mfc_hw_call(dev->mfc_ops, release_codec_buffers, ctx);
ctx               476 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	s5p_mfc_hw_call(dev->mfc_ops, release_instance_buffer, ctx);
ctx               477 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	if (ctx->type == MFCINST_DECODER)
ctx               478 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 		s5p_mfc_hw_call(dev->mfc_ops, release_dec_desc_buffer, ctx);
ctx               480 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	ctx->inst_no = MFC_NO_INSTANCE_SET;
ctx               481 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.c 	ctx->state = MFCINST_FREE;
ctx                26 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.h int s5p_mfc_open_mfc_inst(struct s5p_mfc_dev *dev, struct s5p_mfc_ctx *ctx);
ctx                27 drivers/media/platform/s5p-mfc/s5p_mfc_ctrl.h void s5p_mfc_close_mfc_inst(struct s5p_mfc_dev *dev, struct s5p_mfc_ctx *ctx);
ctx               212 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c static int s5p_mfc_ctx_ready(struct s5p_mfc_ctx *ctx)
ctx               215 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->src_queue_cnt >= 1 && ctx->state == MFCINST_GOT_INST)
ctx               218 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->src_queue_cnt >= 1 &&
ctx               219 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	    ctx->state == MFCINST_RUNNING &&
ctx               220 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	    ctx->dst_queue_cnt >= ctx->pb_count)
ctx               223 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->state == MFCINST_FINISHING &&
ctx               224 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	    ctx->dst_queue_cnt >= ctx->pb_count)
ctx               227 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->src_queue_cnt >= 1 &&
ctx               228 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	    ctx->state == MFCINST_HEAD_PARSED &&
ctx               229 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	    ctx->capture_state == QUEUE_BUFS_MMAPED)
ctx               232 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if ((ctx->state == MFCINST_RES_CHANGE_INIT ||
ctx               233 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->state == MFCINST_RES_CHANGE_FLUSH) &&
ctx               234 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->dst_queue_cnt >= ctx->pb_count)
ctx               236 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->state == MFCINST_RES_CHANGE_END &&
ctx               237 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->src_queue_cnt >= 1)
ctx               303 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx               309 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	    (ctx->state == MFCINST_GOT_INST || ctx->state ==
ctx               313 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		s5p_mfc_wait_for_done_ctx(ctx, S5P_MFC_R2H_CMD_SEQ_DONE_RET,
ctx               317 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	    ctx->state >= MFCINST_HEAD_PARSED &&
ctx               318 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	    ctx->state < MFCINST_ABORT) {
ctx               324 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		pix_mp->width = ctx->buf_width;
ctx               325 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		pix_mp->height = ctx->buf_height;
ctx               330 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		pix_mp->pixelformat = ctx->dst_fmt->fourcc;
ctx               331 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		pix_mp->plane_fmt[0].bytesperline = ctx->buf_width;
ctx               332 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		pix_mp->plane_fmt[0].sizeimage = ctx->luma_size;
ctx               333 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		pix_mp->plane_fmt[1].bytesperline = ctx->buf_width;
ctx               334 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		pix_mp->plane_fmt[1].sizeimage = ctx->chroma_size;
ctx               342 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		pix_mp->plane_fmt[0].bytesperline = ctx->dec_src_buf_size;
ctx               343 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		pix_mp->plane_fmt[0].sizeimage = ctx->dec_src_buf_size;
ctx               344 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		pix_mp->pixelformat = ctx->src_fmt->fourcc;
ctx               345 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		pix_mp->num_planes = ctx->src_fmt->num_planes;
ctx               395 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx               405 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (vb2_is_streaming(&ctx->vq_src) || vb2_is_streaming(&ctx->vq_dst)) {
ctx               412 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->dst_fmt = find_format(f, MFC_FMT_RAW);
ctx               417 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->src_fmt = find_format(f, MFC_FMT_DEC);
ctx               418 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->codec_mode = ctx->src_fmt->codec_mode;
ctx               419 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		mfc_debug(2, "The codec number is: %d\n", ctx->codec_mode);
ctx               423 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			pix_mp->plane_fmt[0].sizeimage = ctx->dec_src_buf_size =
ctx               426 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			ctx->dec_src_buf_size = buf_size->cpb;
ctx               428 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			ctx->dec_src_buf_size = pix_mp->plane_fmt[0].sizeimage;
ctx               430 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->state = MFCINST_INIT;
ctx               444 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c static int reqbufs_output(struct s5p_mfc_dev *dev, struct s5p_mfc_ctx *ctx,
ctx               453 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ret = vb2_reqbufs(&ctx->vq_src, reqbufs);
ctx               456 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->src_bufs_cnt = 0;
ctx               457 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->output_state = QUEUE_FREE;
ctx               458 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	} else if (ctx->output_state == QUEUE_FREE) {
ctx               460 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		WARN_ON(ctx->src_bufs_cnt != 0);
ctx               461 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (ctx->state != MFCINST_INIT) {
ctx               469 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ret = vb2_reqbufs(&ctx->vq_src, reqbufs);
ctx               473 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ret = s5p_mfc_open_mfc_inst(dev, ctx);
ctx               476 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			vb2_reqbufs(&ctx->vq_src, reqbufs);
ctx               480 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->output_state = QUEUE_BUFS_REQUESTED;
ctx               492 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c static int reqbufs_capture(struct s5p_mfc_dev *dev, struct s5p_mfc_ctx *ctx,
ctx               501 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ret = vb2_reqbufs(&ctx->vq_dst, reqbufs);
ctx               504 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		s5p_mfc_hw_call(dev->mfc_ops, release_codec_buffers, ctx);
ctx               505 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->dst_bufs_cnt = 0;
ctx               506 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	} else if (ctx->capture_state == QUEUE_FREE) {
ctx               507 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		WARN_ON(ctx->dst_bufs_cnt != 0);
ctx               510 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ret = vb2_reqbufs(&ctx->vq_dst, reqbufs);
ctx               514 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->capture_state = QUEUE_BUFS_REQUESTED;
ctx               515 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->total_dpb_count = reqbufs->count;
ctx               517 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ret = s5p_mfc_hw_call(dev->mfc_ops, alloc_codec_buffers, ctx);
ctx               521 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			vb2_reqbufs(&ctx->vq_dst, reqbufs);
ctx               523 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			ctx->capture_state = QUEUE_FREE;
ctx               527 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		WARN_ON(ctx->dst_bufs_cnt != ctx->total_dpb_count);
ctx               528 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->capture_state = QUEUE_BUFS_MMAPED;
ctx               530 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (s5p_mfc_ctx_ready(ctx))
ctx               531 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			set_work_bit_irqsave(ctx);
ctx               533 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		s5p_mfc_wait_for_done_ctx(ctx, S5P_MFC_R2H_CMD_INIT_BUFFERS_RET,
ctx               551 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx               559 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		return reqbufs_output(dev, ctx, reqbufs);
ctx               561 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		return reqbufs_capture(dev, ctx, reqbufs);
ctx               572 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx               580 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	mfc_debug(2, "State: %d, buf->type: %d\n", ctx->state, buf->type);
ctx               581 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->state == MFCINST_GOT_INST &&
ctx               583 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ret = vb2_querybuf(&ctx->vq_src, buf);
ctx               584 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	} else if (ctx->state == MFCINST_RUNNING &&
ctx               586 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ret = vb2_querybuf(&ctx->vq_dst, buf);
ctx               600 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx               602 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->state == MFCINST_ERROR) {
ctx               607 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		return vb2_qbuf(&ctx->vq_src, NULL, buf);
ctx               609 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		return vb2_qbuf(&ctx->vq_dst, NULL, buf);
ctx               619 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx               622 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->state == MFCINST_ERROR) {
ctx               629 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		return vb2_dqbuf(&ctx->vq_src, buf, file->f_flags & O_NONBLOCK);
ctx               631 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ret = vb2_dqbuf(&ctx->vq_dst, buf, file->f_flags & O_NONBLOCK);
ctx               635 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (ctx->state == MFCINST_FINISHED &&
ctx               636 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		    (ctx->dst_bufs[buf->index].flags & MFC_BUF_FLAG_EOS))
ctx               637 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			v4l2_event_queue_fh(&ctx->fh, &ev);
ctx               648 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx               651 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		return vb2_expbuf(&ctx->vq_src, eb);
ctx               653 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		return vb2_expbuf(&ctx->vq_dst, eb);
ctx               661 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx               666 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ret = vb2_streamon(&ctx->vq_src, type);
ctx               668 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ret = vb2_streamon(&ctx->vq_dst, type);
ctx               677 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx               680 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		return vb2_streamoff(&ctx->vq_src, type);
ctx               682 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		return vb2_streamoff(&ctx->vq_dst, type);
ctx               689 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = ctrl_to_ctx(ctrl);
ctx               693 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->display_delay = ctrl->val;
ctx               696 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->display_delay_enable = ctrl->val;
ctx               699 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->loop_filter_mpeg4 = ctrl->val;
ctx               702 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->slice_interface = ctrl->val;
ctx               713 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = ctrl_to_ctx(ctrl);
ctx               714 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               718 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (ctx->state >= MFCINST_HEAD_PARSED &&
ctx               719 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		    ctx->state < MFCINST_ABORT) {
ctx               720 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			ctrl->val = ctx->pb_count;
ctx               722 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		} else if (ctx->state != MFCINST_INIT &&
ctx               723 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 				ctx->state != MFCINST_RES_CHANGE_END) {
ctx               728 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		s5p_mfc_wait_for_done_ctx(ctx,
ctx               730 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (ctx->state >= MFCINST_HEAD_PARSED &&
ctx               731 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		    ctx->state < MFCINST_ABORT) {
ctx               732 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			ctrl->val = ctx->pb_count;
ctx               752 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx               753 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               760 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->state != MFCINST_HEAD_PARSED &&
ctx               761 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	    ctx->state != MFCINST_RUNNING &&
ctx               762 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	    ctx->state != MFCINST_FINISHING &&
ctx               763 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	    ctx->state != MFCINST_FINISHED) {
ctx               767 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_H264) {
ctx               768 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		left = s5p_mfc_hw_call(dev->mfc_ops, get_crop_info_h, ctx);
ctx               771 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		top = s5p_mfc_hw_call(dev->mfc_ops, get_crop_info_v, ctx);
ctx               774 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		width = ctx->img_width - left - right;
ctx               775 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		height = ctx->img_height - top - bottom;
ctx               778 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			  ctx->buf_width, ctx->buf_height);
ctx               782 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		width = ctx->img_width;
ctx               783 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		height = ctx->img_height;
ctx               785 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			  s->r.width, s->r.height, ctx->buf_width,
ctx               786 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			  ctx->buf_height);
ctx               807 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx               808 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               817 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (!vb2_is_streaming(&ctx->vq_src))
ctx               821 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (list_empty(&ctx->src_queue)) {
ctx               823 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			ctx->state = MFCINST_FINISHING;
ctx               824 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			if (s5p_mfc_ctx_ready(ctx))
ctx               825 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 				set_work_bit_irqsave(ctx);
ctx               830 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			buf = list_entry(ctx->src_queue.prev,
ctx               833 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 				ctx->state = MFCINST_FINISHING;
ctx               888 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(vq->drv_priv);
ctx               889 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               893 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->state == MFCINST_INIT &&
ctx               903 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	} else if (ctx->state == MFCINST_HEAD_PARSED &&
ctx               908 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (*buf_count < ctx->pb_count)
ctx               909 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			*buf_count = ctx->pb_count;
ctx               910 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (*buf_count > ctx->pb_count + MFC_MAX_EXTRA_DPB)
ctx               911 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			*buf_count = ctx->pb_count + MFC_MAX_EXTRA_DPB;
ctx               916 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 							ctx->state, vq->type);
ctx               921 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->state == MFCINST_HEAD_PARSED &&
ctx               923 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		psize[0] = ctx->luma_size;
ctx               924 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		psize[1] = ctx->chroma_size;
ctx               927 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			alloc_devs[0] = ctx->dev->mem_dev[BANK_L_CTX];
ctx               929 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			alloc_devs[0] = ctx->dev->mem_dev[BANK_R_CTX];
ctx               930 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		alloc_devs[1] = ctx->dev->mem_dev[BANK_L_CTX];
ctx               932 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		   ctx->state == MFCINST_INIT) {
ctx               933 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		psize[0] = ctx->dec_src_buf_size;
ctx               934 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		alloc_devs[0] = ctx->dev->mem_dev[BANK_L_CTX];
ctx               946 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(vq->drv_priv);
ctx               950 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (ctx->capture_state == QUEUE_BUFS_MMAPED)
ctx               952 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		for (i = 0; i < ctx->dst_fmt->num_planes; i++) {
ctx               959 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (vb2_plane_size(vb, 0) < ctx->luma_size ||
ctx               960 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			vb2_plane_size(vb, 1) < ctx->chroma_size) {
ctx               965 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->dst_bufs[i].b = vbuf;
ctx               966 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->dst_bufs[i].cookie.raw.luma =
ctx               968 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->dst_bufs[i].cookie.raw.chroma =
ctx               970 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->dst_bufs_cnt++;
ctx               977 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (vb2_plane_size(vb, 0) < ctx->dec_src_buf_size) {
ctx               983 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->src_bufs[i].b = vbuf;
ctx               984 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->src_bufs[i].cookie.stream =
ctx               986 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->src_bufs_cnt++;
ctx               996 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(q->drv_priv);
ctx               997 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               999 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	v4l2_ctrl_handler_setup(&ctx->ctrl_handler);
ctx              1000 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->state == MFCINST_FINISHING ||
ctx              1001 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->state == MFCINST_FINISHED)
ctx              1002 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->state = MFCINST_RUNNING;
ctx              1004 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (s5p_mfc_ctx_ready(ctx))
ctx              1005 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		set_work_bit_irqsave(ctx);
ctx              1013 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(q->drv_priv);
ctx              1014 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1018 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if ((ctx->state == MFCINST_FINISHING ||
ctx              1019 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->state ==  MFCINST_RUNNING) &&
ctx              1020 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		dev->curr_ctx == ctx->num && dev->hw_lock) {
ctx              1021 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->state = MFCINST_ABORT;
ctx              1023 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		s5p_mfc_wait_for_done_ctx(ctx,
ctx              1029 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		s5p_mfc_cleanup_queue(&ctx->dst_queue, &ctx->vq_dst);
ctx              1030 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		INIT_LIST_HEAD(&ctx->dst_queue);
ctx              1031 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->dst_queue_cnt = 0;
ctx              1032 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->dpb_flush_flag = 1;
ctx              1033 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->dec_dst_flag = 0;
ctx              1034 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (IS_MFCV6_PLUS(dev) && (ctx->state == MFCINST_RUNNING)) {
ctx              1035 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			ctx->state = MFCINST_FLUSH;
ctx              1036 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			set_work_bit_irqsave(ctx);
ctx              1039 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			if (s5p_mfc_wait_for_done_ctx(ctx,
ctx              1045 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		s5p_mfc_cleanup_queue(&ctx->src_queue, &ctx->vq_src);
ctx              1046 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		INIT_LIST_HEAD(&ctx->src_queue);
ctx              1047 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->src_queue_cnt = 0;
ctx              1050 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->state = MFCINST_RUNNING;
ctx              1058 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(vq->drv_priv);
ctx              1059 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1064 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		mfc_buf = &ctx->src_bufs[vb->index];
ctx              1067 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		list_add_tail(&mfc_buf->list, &ctx->src_queue);
ctx              1068 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->src_queue_cnt++;
ctx              1071 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		mfc_buf = &ctx->dst_bufs[vb->index];
ctx              1075 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		set_bit(vb->index, &ctx->dec_dst_flag);
ctx              1076 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		list_add_tail(&mfc_buf->list, &ctx->dst_queue);
ctx              1077 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->dst_queue_cnt++;
ctx              1082 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (s5p_mfc_ctx_ready(ctx))
ctx              1083 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		set_work_bit_irqsave(ctx);
ctx              1115 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c int s5p_mfc_dec_ctrls_setup(struct s5p_mfc_ctx *ctx)
ctx              1120 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	v4l2_ctrl_handler_init(&ctx->ctrl_handler, NUM_CTRLS);
ctx              1121 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (ctx->ctrl_handler.error) {
ctx              1123 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		return ctx->ctrl_handler.error;
ctx              1140 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			ctx->ctrls[i] = v4l2_ctrl_new_custom(&ctx->ctrl_handler,
ctx              1143 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			ctx->ctrls[i] = v4l2_ctrl_new_std(&ctx->ctrl_handler,
ctx              1149 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (ctx->ctrl_handler.error) {
ctx              1151 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			return ctx->ctrl_handler.error;
ctx              1153 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		if (controls[i].is_volatile && ctx->ctrls[i])
ctx              1154 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			ctx->ctrls[i]->flags |= V4L2_CTRL_FLAG_VOLATILE;
ctx              1159 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c void s5p_mfc_dec_ctrls_delete(struct s5p_mfc_ctx *ctx)
ctx              1163 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx              1165 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 		ctx->ctrls[i] = NULL;
ctx              1168 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c void s5p_mfc_dec_init(struct s5p_mfc_ctx *ctx)
ctx              1172 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	ctx->src_fmt = find_format(&f, MFC_FMT_DEC);
ctx              1173 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	if (IS_MFCV8_PLUS(ctx->dev))
ctx              1175 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	else if (IS_MFCV6_PLUS(ctx->dev))
ctx              1179 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 	ctx->dst_fmt = find_format(&f, MFC_FMT_RAW);
ctx              1181 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c 			ctx->src_fmt, ctx->dst_fmt);
ctx                16 drivers/media/platform/s5p-mfc/s5p_mfc_dec.h int s5p_mfc_dec_ctrls_setup(struct s5p_mfc_ctx *ctx);
ctx                17 drivers/media/platform/s5p-mfc/s5p_mfc_dec.h void s5p_mfc_dec_ctrls_delete(struct s5p_mfc_ctx *ctx);
ctx                18 drivers/media/platform/s5p-mfc/s5p_mfc_dec.h void s5p_mfc_dec_init(struct s5p_mfc_ctx *ctx);
ctx              1088 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c static int s5p_mfc_ctx_ready(struct s5p_mfc_ctx *ctx)
ctx              1091 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		  ctx->src_queue_cnt, ctx->dst_queue_cnt, ctx->state);
ctx              1093 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if (ctx->state == MFCINST_GOT_INST && ctx->dst_queue_cnt >= 1)
ctx              1096 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if ((ctx->state == MFCINST_RUNNING ||
ctx              1097 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->state == MFCINST_HEAD_PRODUCED) &&
ctx              1098 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->src_queue_cnt >= 1 && ctx->dst_queue_cnt >= 1)
ctx              1101 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if (ctx->state == MFCINST_FINISHING &&
ctx              1102 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->dst_queue_cnt >= 1)
ctx              1108 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c static void cleanup_ref_queue(struct s5p_mfc_ctx *ctx)
ctx              1113 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	while (!list_empty(&ctx->ref_queue)) {
ctx              1114 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		mb_entry = list_entry((&ctx->ref_queue)->next,
ctx              1117 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->ref_queue_cnt--;
ctx              1118 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		list_add_tail(&mb_entry->list, &ctx->src_queue);
ctx              1119 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->src_queue_cnt++;
ctx              1122 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		  ctx->src_queue_cnt, ctx->ref_queue_cnt);
ctx              1123 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	INIT_LIST_HEAD(&ctx->ref_queue);
ctx              1124 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	ctx->ref_queue_cnt = 0;
ctx              1127 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c static int enc_pre_seq_start(struct s5p_mfc_ctx *ctx)
ctx              1129 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1134 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
ctx              1137 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	s5p_mfc_hw_call(dev->mfc_ops, set_enc_stream_buffer, ctx, dst_addr,
ctx              1142 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c static int enc_post_seq_start(struct s5p_mfc_ctx *ctx)
ctx              1144 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1145 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_enc_params *p = &ctx->enc_params;
ctx              1150 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (!list_empty(&ctx->dst_queue)) {
ctx              1151 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			dst_mb = list_entry(ctx->dst_queue.next,
ctx              1154 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctx->dst_queue_cnt--;
ctx              1164 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->state = MFCINST_RUNNING;
ctx              1165 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (s5p_mfc_ctx_ready(ctx))
ctx              1166 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			set_work_bit_irqsave(ctx);
ctx              1171 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (ctx->pb_count < enc_pb_count)
ctx              1172 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctx->pb_count = enc_pb_count;
ctx              1174 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctx->scratch_buf_size = s5p_mfc_hw_call(dev->mfc_ops,
ctx              1176 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctx->bank1.size += ctx->scratch_buf_size;
ctx              1178 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->state = MFCINST_HEAD_PRODUCED;
ctx              1184 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c static int enc_pre_frame_start(struct s5p_mfc_ctx *ctx)
ctx              1186 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1192 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	src_mb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
ctx              1195 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	s5p_mfc_hw_call(dev->mfc_ops, set_enc_frame_buffer, ctx,
ctx              1198 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
ctx              1201 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	s5p_mfc_hw_call(dev->mfc_ops, set_enc_stream_buffer, ctx, dst_addr,
ctx              1207 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c static int enc_post_frame_start(struct s5p_mfc_ctx *ctx)
ctx              1209 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1223 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		s5p_mfc_hw_call(dev->mfc_ops, get_enc_frame_buffer, ctx,
ctx              1225 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		list_for_each_entry(mb_entry, &ctx->src_queue, list) {
ctx              1233 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 				ctx->src_queue_cnt--;
ctx              1239 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		list_for_each_entry(mb_entry, &ctx->ref_queue, list) {
ctx              1247 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 				ctx->ref_queue_cnt--;
ctx              1254 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if ((ctx->src_queue_cnt > 0) && (ctx->state == MFCINST_RUNNING)) {
ctx              1255 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		mb_entry = list_entry(ctx->src_queue.next, struct s5p_mfc_buf,
ctx              1259 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctx->src_queue_cnt--;
ctx              1260 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			list_add_tail(&mb_entry->list, &ctx->ref_queue);
ctx              1261 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctx->ref_queue_cnt++;
ctx              1265 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		  ctx->src_queue_cnt, ctx->ref_queue_cnt);
ctx              1266 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if ((ctx->dst_queue_cnt > 0) && (strm_size > 0)) {
ctx              1267 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		mb_entry = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf,
ctx              1270 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->dst_queue_cnt--;
ctx              1285 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if ((ctx->src_queue_cnt == 0) || (ctx->dst_queue_cnt == 0))
ctx              1286 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		clear_work_bit(ctx);
ctx              1348 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx              1351 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	mfc_debug(2, "f->type = %d ctx->state = %d\n", f->type, ctx->state);
ctx              1357 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->pixelformat = ctx->dst_fmt->fourcc;
ctx              1358 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->num_planes = ctx->dst_fmt->num_planes;
ctx              1360 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->plane_fmt[0].bytesperline = ctx->enc_dst_buf_size;
ctx              1361 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->plane_fmt[0].sizeimage = ctx->enc_dst_buf_size;
ctx              1364 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->width = ctx->img_width;
ctx              1365 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->height = ctx->img_height;
ctx              1368 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->pixelformat = ctx->src_fmt->fourcc;
ctx              1369 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->num_planes = ctx->src_fmt->num_planes;
ctx              1371 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->plane_fmt[0].bytesperline = ctx->buf_width;
ctx              1372 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->plane_fmt[0].sizeimage = ctx->luma_size;
ctx              1373 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->plane_fmt[1].bytesperline = ctx->buf_width;
ctx              1374 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->plane_fmt[1].sizeimage = ctx->chroma_size;
ctx              1424 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx              1431 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if (ctx->vq_src.streaming || ctx->vq_dst.streaming) {
ctx              1438 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->dst_fmt = find_format(f, MFC_FMT_ENC);
ctx              1439 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->state = MFCINST_INIT;
ctx              1440 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->codec_mode = ctx->dst_fmt->codec_mode;
ctx              1441 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->enc_dst_buf_size =	pix_fmt_mp->plane_fmt[0].sizeimage;
ctx              1443 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->dst_bufs_cnt = 0;
ctx              1444 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->capture_state = QUEUE_FREE;
ctx              1445 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ret = s5p_mfc_open_mfc_inst(dev, ctx);
ctx              1448 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->src_fmt = find_format(f, MFC_FMT_RAW);
ctx              1449 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->img_width = pix_fmt_mp->width;
ctx              1450 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->img_height = pix_fmt_mp->height;
ctx              1451 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		mfc_debug(2, "codec number: %d\n", ctx->src_fmt->codec_mode);
ctx              1454 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctx->img_width, ctx->img_height);
ctx              1456 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		s5p_mfc_hw_call(dev->mfc_ops, enc_calc_src_size, ctx);
ctx              1457 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->plane_fmt[0].sizeimage = ctx->luma_size;
ctx              1458 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->plane_fmt[0].bytesperline = ctx->buf_width;
ctx              1459 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->plane_fmt[1].sizeimage = ctx->chroma_size;
ctx              1460 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		pix_fmt_mp->plane_fmt[1].bytesperline = ctx->buf_width;
ctx              1462 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->src_bufs_cnt = 0;
ctx              1463 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->output_state = QUEUE_FREE;
ctx              1477 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx              1487 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ret = vb2_reqbufs(&ctx->vq_dst, reqbufs);
ctx              1489 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 					ctx);
ctx              1490 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctx->capture_state = QUEUE_FREE;
ctx              1493 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (ctx->capture_state != QUEUE_FREE) {
ctx              1495 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 							ctx->capture_state);
ctx              1498 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ret = vb2_reqbufs(&ctx->vq_dst, reqbufs);
ctx              1503 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->capture_state = QUEUE_BUFS_REQUESTED;
ctx              1505 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ret = s5p_mfc_hw_call(ctx->dev->mfc_ops,
ctx              1506 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 				alloc_codec_buffers, ctx);
ctx              1510 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ret = vb2_reqbufs(&ctx->vq_dst, reqbufs);
ctx              1516 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ret = vb2_reqbufs(&ctx->vq_src, reqbufs);
ctx              1518 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 					ctx);
ctx              1519 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctx->output_state = QUEUE_FREE;
ctx              1522 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (ctx->output_state != QUEUE_FREE) {
ctx              1524 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 							ctx->output_state);
ctx              1530 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			if (ctx->pb_count &&
ctx              1531 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 				(reqbufs->count < ctx->pb_count)) {
ctx              1532 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 				reqbufs->count = ctx->pb_count;
ctx              1534 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 						ctx->pb_count);
ctx              1536 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 				ctx->pb_count = reqbufs->count;
ctx              1540 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ret = vb2_reqbufs(&ctx->vq_src, reqbufs);
ctx              1545 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->output_state = QUEUE_BUFS_REQUESTED;
ctx              1556 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx              1564 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (ctx->state != MFCINST_GOT_INST) {
ctx              1565 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			mfc_err("invalid context state: %d\n", ctx->state);
ctx              1568 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ret = vb2_querybuf(&ctx->vq_dst, buf);
ctx              1575 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ret = vb2_querybuf(&ctx->vq_src, buf);
ctx              1590 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx              1592 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if (ctx->state == MFCINST_ERROR) {
ctx              1597 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (ctx->state == MFCINST_FINISHING) {
ctx              1601 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		return vb2_qbuf(&ctx->vq_src, NULL, buf);
ctx              1603 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		return vb2_qbuf(&ctx->vq_dst, NULL, buf);
ctx              1614 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx              1617 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if (ctx->state == MFCINST_ERROR) {
ctx              1622 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ret = vb2_dqbuf(&ctx->vq_src, buf, file->f_flags & O_NONBLOCK);
ctx              1624 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ret = vb2_dqbuf(&ctx->vq_dst, buf, file->f_flags & O_NONBLOCK);
ctx              1625 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (ret == 0 && ctx->state == MFCINST_FINISHED
ctx              1626 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 					&& list_empty(&ctx->vq_dst.done_list))
ctx              1627 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			v4l2_event_queue_fh(&ctx->fh, &ev);
ctx              1639 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx              1642 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		return vb2_expbuf(&ctx->vq_src, eb);
ctx              1644 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		return vb2_expbuf(&ctx->vq_dst, eb);
ctx              1652 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx              1655 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		return vb2_streamon(&ctx->vq_src, type);
ctx              1657 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		return vb2_streamon(&ctx->vq_dst, type);
ctx              1665 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx              1668 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		return vb2_streamoff(&ctx->vq_src, type);
ctx              1670 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		return vb2_streamoff(&ctx->vq_dst, type);
ctx              1757 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c static void __enc_update_hevc_qp_ctrls_range(struct s5p_mfc_ctx *ctx,
ctx              1776 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		for (j = 0; j < ARRAY_SIZE(ctx->ctrls); j++) {
ctx              1777 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			if (ctx->ctrls[j]->id == __hevc_qp_ctrls[i]) {
ctx              1778 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 				ctrl = ctx->ctrls[j];
ctx              1791 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = ctrl_to_ctx(ctrl);
ctx              1792 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1793 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_enc_params *p = &ctx->enc_params;
ctx              1830 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->force_frame_type = ctrl->val;
ctx              1833 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->force_frame_type =
ctx              2054 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		__enc_update_hevc_qp_ctrls_range(ctx, ctrl->val,
ctx              2059 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		__enc_update_hevc_qp_ctrls_range(ctx, p->codec.hevc.rc_min_qp,
ctx              2201 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = ctrl_to_ctx(ctrl);
ctx              2202 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              2206 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (ctx->state >= MFCINST_HEAD_PARSED &&
ctx              2207 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		    ctx->state < MFCINST_ABORT) {
ctx              2208 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctrl->val = ctx->pb_count;
ctx              2210 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		} else if (ctx->state != MFCINST_INIT) {
ctx              2215 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		s5p_mfc_wait_for_done_ctx(ctx,
ctx              2217 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (ctx->state >= MFCINST_HEAD_PARSED &&
ctx              2218 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		    ctx->state < MFCINST_ABORT) {
ctx              2219 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctrl->val = ctx->pb_count;
ctx              2237 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx              2240 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->enc_params.rc_framerate_num =
ctx              2242 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->enc_params.rc_framerate_denom =
ctx              2254 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx              2258 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 					ctx->enc_params.rc_framerate_num;
ctx              2260 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 					ctx->enc_params.rc_framerate_denom;
ctx              2271 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(priv);
ctx              2272 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              2281 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (!ctx->vq_src.streaming)
ctx              2285 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (list_empty(&ctx->src_queue)) {
ctx              2287 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctx->state = MFCINST_FINISHING;
ctx              2288 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			if (s5p_mfc_ctx_ready(ctx))
ctx              2289 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 				set_work_bit_irqsave(ctx);
ctx              2294 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			buf = list_entry(ctx->src_queue.prev,
ctx              2297 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 				ctx->state = MFCINST_FINISHING;
ctx              2371 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(vq->drv_priv);
ctx              2372 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              2375 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (ctx->state != MFCINST_GOT_INST) {
ctx              2376 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			mfc_err("invalid state: %d\n", ctx->state);
ctx              2380 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (ctx->dst_fmt)
ctx              2381 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			*plane_count = ctx->dst_fmt->num_planes;
ctx              2388 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		psize[0] = ctx->enc_dst_buf_size;
ctx              2389 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		alloc_devs[0] = ctx->dev->mem_dev[BANK_L_CTX];
ctx              2391 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (ctx->src_fmt)
ctx              2392 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			*plane_count = ctx->src_fmt->num_planes;
ctx              2401 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		psize[0] = ctx->luma_size;
ctx              2402 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		psize[1] = ctx->chroma_size;
ctx              2405 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			alloc_devs[0] = ctx->dev->mem_dev[BANK_L_CTX];
ctx              2406 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			alloc_devs[1] = ctx->dev->mem_dev[BANK_L_CTX];
ctx              2408 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			alloc_devs[0] = ctx->dev->mem_dev[BANK_R_CTX];
ctx              2409 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			alloc_devs[1] = ctx->dev->mem_dev[BANK_R_CTX];
ctx              2422 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(vq->drv_priv);
ctx              2427 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ret = check_vb_with_fmt(ctx->dst_fmt, vb);
ctx              2431 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->dst_bufs[i].b = vbuf;
ctx              2432 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->dst_bufs[i].cookie.stream =
ctx              2434 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->dst_bufs_cnt++;
ctx              2436 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ret = check_vb_with_fmt(ctx->src_fmt, vb);
ctx              2440 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->src_bufs[i].b = vbuf;
ctx              2441 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->src_bufs[i].cookie.raw.luma =
ctx              2443 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->src_bufs[i].cookie.raw.chroma =
ctx              2445 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->src_bufs_cnt++;
ctx              2456 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(vq->drv_priv);
ctx              2460 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ret = check_vb_with_fmt(ctx->dst_fmt, vb);
ctx              2464 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			vb2_plane_size(vb, 0), ctx->enc_dst_buf_size);
ctx              2465 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (vb2_plane_size(vb, 0) < ctx->enc_dst_buf_size) {
ctx              2470 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ret = check_vb_with_fmt(ctx->src_fmt, vb);
ctx              2474 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			vb2_plane_size(vb, 0), ctx->luma_size);
ctx              2476 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			vb2_plane_size(vb, 1), ctx->chroma_size);
ctx              2477 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (vb2_plane_size(vb, 0) < ctx->luma_size ||
ctx              2478 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		    vb2_plane_size(vb, 1) < ctx->chroma_size) {
ctx              2491 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(q->drv_priv);
ctx              2492 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              2497 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if ((ctx->state == MFCINST_GOT_INST) &&
ctx              2498 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			(dev->curr_ctx == ctx->num) && dev->hw_lock) {
ctx              2499 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			s5p_mfc_wait_for_done_ctx(ctx,
ctx              2504 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (ctx->src_bufs_cnt < ctx->pb_count) {
ctx              2506 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 					ctx->pb_count);
ctx              2512 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if (s5p_mfc_ctx_ready(ctx))
ctx              2513 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		set_work_bit_irqsave(ctx);
ctx              2522 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(q->drv_priv);
ctx              2523 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              2525 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if ((ctx->state == MFCINST_FINISHING ||
ctx              2526 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->state == MFCINST_RUNNING) &&
ctx              2527 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		dev->curr_ctx == ctx->num && dev->hw_lock) {
ctx              2528 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->state = MFCINST_ABORT;
ctx              2529 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		s5p_mfc_wait_for_done_ctx(ctx, S5P_MFC_R2H_CMD_FRAME_DONE_RET,
ctx              2532 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	ctx->state = MFCINST_FINISHED;
ctx              2535 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		s5p_mfc_cleanup_queue(&ctx->dst_queue, &ctx->vq_dst);
ctx              2536 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		INIT_LIST_HEAD(&ctx->dst_queue);
ctx              2537 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->dst_queue_cnt = 0;
ctx              2540 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		cleanup_ref_queue(ctx);
ctx              2541 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		s5p_mfc_cleanup_queue(&ctx->src_queue, &ctx->vq_src);
ctx              2542 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		INIT_LIST_HEAD(&ctx->src_queue);
ctx              2543 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->src_queue_cnt = 0;
ctx              2551 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_ctx *ctx = fh_to_ctx(vq->drv_priv);
ctx              2552 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              2556 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if (ctx->state == MFCINST_ERROR) {
ctx              2558 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		cleanup_ref_queue(ctx);
ctx              2562 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		mfc_buf = &ctx->dst_bufs[vb->index];
ctx              2566 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		list_add_tail(&mfc_buf->list, &ctx->dst_queue);
ctx              2567 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->dst_queue_cnt++;
ctx              2570 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		mfc_buf = &ctx->src_bufs[vb->index];
ctx              2573 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		list_add_tail(&mfc_buf->list, &ctx->src_queue);
ctx              2574 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->src_queue_cnt++;
ctx              2579 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if (s5p_mfc_ctx_ready(ctx))
ctx              2580 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		set_work_bit_irqsave(ctx);
ctx              2613 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c int s5p_mfc_enc_ctrls_setup(struct s5p_mfc_ctx *ctx)
ctx              2618 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	v4l2_ctrl_handler_init(&ctx->ctrl_handler, NUM_CTRLS);
ctx              2619 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	if (ctx->ctrl_handler.error) {
ctx              2621 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		return ctx->ctrl_handler.error;
ctx              2643 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctx->ctrls[i] = v4l2_ctrl_new_custom(&ctx->ctrl_handler,
ctx              2649 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 				ctx->ctrls[i] = v4l2_ctrl_new_std_menu(
ctx              2650 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 					&ctx->ctrl_handler,
ctx              2655 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 				ctx->ctrls[i] = v4l2_ctrl_new_std(
ctx              2656 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 					&ctx->ctrl_handler,
ctx              2663 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (ctx->ctrl_handler.error) {
ctx              2665 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			return ctx->ctrl_handler.error;
ctx              2667 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		if (controls[i].is_volatile && ctx->ctrls[i])
ctx              2668 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 			ctx->ctrls[i]->flags |= V4L2_CTRL_FLAG_VOLATILE;
ctx              2670 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	v4l2_ctrl_handler_setup(&ctx->ctrl_handler);
ctx              2674 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c void s5p_mfc_enc_ctrls_delete(struct s5p_mfc_ctx *ctx)
ctx              2678 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx              2680 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 		ctx->ctrls[i] = NULL;
ctx              2683 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c void s5p_mfc_enc_init(struct s5p_mfc_ctx *ctx)
ctx              2687 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	ctx->src_fmt = find_format(&f, MFC_FMT_RAW);
ctx              2689 drivers/media/platform/s5p-mfc/s5p_mfc_enc.c 	ctx->dst_fmt = find_format(&f, MFC_FMT_ENC);
ctx                16 drivers/media/platform/s5p-mfc/s5p_mfc_enc.h int s5p_mfc_enc_ctrls_setup(struct s5p_mfc_ctx *ctx);
ctx                17 drivers/media/platform/s5p-mfc/s5p_mfc_enc.h void s5p_mfc_enc_ctrls_delete(struct s5p_mfc_ctx *ctx);
ctx                18 drivers/media/platform/s5p-mfc/s5p_mfc_enc.h void s5p_mfc_enc_init(struct s5p_mfc_ctx *ctx);
ctx                51 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c int s5p_mfc_wait_for_done_ctx(struct s5p_mfc_ctx *ctx,
ctx                57 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c 		ret = wait_event_interruptible_timeout(ctx->queue,
ctx                58 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c 				(ctx->int_cond && (ctx->int_type == command
ctx                59 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c 			|| ctx->int_type == S5P_MFC_R2H_CMD_ERR_RET)),
ctx                62 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c 		ret = wait_event_timeout(ctx->queue,
ctx                63 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c 				(ctx->int_cond && (ctx->int_type == command
ctx                64 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c 			|| ctx->int_type == S5P_MFC_R2H_CMD_ERR_RET)),
ctx                69 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c 							ctx->int_type, command);
ctx                76 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c 							ctx->int_type, command);
ctx                77 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c 	if (ctx->int_type == S5P_MFC_R2H_CMD_ERR_RET)
ctx                82 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c void s5p_mfc_clean_ctx_int_flags(struct s5p_mfc_ctx *ctx)
ctx                84 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c 	ctx->int_cond = 0;
ctx                85 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c 	ctx->int_type = 0;
ctx                86 drivers/media/platform/s5p-mfc/s5p_mfc_intr.c 	ctx->int_err = 0;
ctx                17 drivers/media/platform/s5p-mfc/s5p_mfc_intr.h int s5p_mfc_wait_for_done_ctx(struct s5p_mfc_ctx *ctx,
ctx                20 drivers/media/platform/s5p-mfc/s5p_mfc_intr.h void s5p_mfc_clean_ctx_int_flags(struct s5p_mfc_ctx *ctx);
ctx                60 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 		b->ctx = mem_ctx;
ctx                86 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 	b->ctx = mem_ctx;
ctx               107 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 		struct device *mem_dev = dev->mem_dev[b->ctx];
ctx               119 drivers/media/platform/s5p-mfc/s5p_mfc_opr.c 	struct device *mem_dev = dev->mem_dev[b->ctx];
ctx               283 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	int (*alloc_dec_temp_buffers)(struct s5p_mfc_ctx *ctx);
ctx               284 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	void (*release_dec_desc_buffer)(struct s5p_mfc_ctx *ctx);
ctx               285 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	int (*alloc_codec_buffers)(struct s5p_mfc_ctx *ctx);
ctx               286 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	void (*release_codec_buffers)(struct s5p_mfc_ctx *ctx);
ctx               287 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	int (*alloc_instance_buffer)(struct s5p_mfc_ctx *ctx);
ctx               288 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	void (*release_instance_buffer)(struct s5p_mfc_ctx *ctx);
ctx               291 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	void (*dec_calc_dpb_size)(struct s5p_mfc_ctx *ctx);
ctx               292 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	void (*enc_calc_src_size)(struct s5p_mfc_ctx *ctx);
ctx               293 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	int (*set_enc_stream_buffer)(struct s5p_mfc_ctx *ctx,
ctx               295 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	void (*set_enc_frame_buffer)(struct s5p_mfc_ctx *ctx,
ctx               297 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	void (*get_enc_frame_buffer)(struct s5p_mfc_ctx *ctx,
ctx               306 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	int (*get_disp_frame_type)(struct s5p_mfc_ctx *ctx);
ctx               319 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	unsigned int (*get_pic_type_top)(struct s5p_mfc_ctx *ctx);
ctx               320 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	unsigned int (*get_pic_type_bot)(struct s5p_mfc_ctx *ctx);
ctx               321 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	unsigned int (*get_crop_info_h)(struct s5p_mfc_ctx *ctx);
ctx               322 drivers/media/platform/s5p-mfc/s5p_mfc_opr.h 	unsigned int (*get_crop_info_v)(struct s5p_mfc_ctx *ctx);
ctx                34 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_alloc_dec_temp_buffers_v5(struct s5p_mfc_ctx *ctx)
ctx                36 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx                40 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	ctx->dsc.size = buf_size->dsc;
ctx                41 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	ret =  s5p_mfc_alloc_priv_buf(dev, BANK_L_CTX, &ctx->dsc);
ctx                47 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	BUG_ON(ctx->dsc.dma & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
ctx                48 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	memset(ctx->dsc.virt, 0, ctx->dsc.size);
ctx                55 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_release_dec_desc_buffer_v5(struct s5p_mfc_ctx *ctx)
ctx                57 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_release_priv_buf(ctx->dev, &ctx->dsc);
ctx                61 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_alloc_codec_buffers_v5(struct s5p_mfc_ctx *ctx)
ctx                63 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx                69 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->type == MFCINST_DECODER) {
ctx                71 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			  ctx->luma_size, ctx->chroma_size, ctx->mv_size);
ctx                72 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		mfc_debug(2, "Totals bufs: %d\n", ctx->total_dpb_count);
ctx                73 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	} else if (ctx->type == MFCINST_ENCODER) {
ctx                74 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		enc_ref_y_size = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN)
ctx                75 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			* ALIGN(ctx->img_height, S5P_FIMV_NV12MT_VALIGN);
ctx                78 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		if (ctx->codec_mode == S5P_MFC_CODEC_H264_ENC) {
ctx                79 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			enc_ref_c_size = ALIGN(ctx->img_width,
ctx                81 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 						* ALIGN(ctx->img_height >> 1,
ctx                86 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			guard_width = ALIGN(ctx->img_width + 16,
ctx                88 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			guard_height = ALIGN((ctx->img_height >> 1) + 4,
ctx                99 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	switch (ctx->codec_mode) {
ctx               101 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank1.size =
ctx               105 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank2.size = ctx->total_dpb_count * ctx->mv_size;
ctx               108 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank1.size =
ctx               115 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank2.size = 0;
ctx               119 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank1.size =
ctx               126 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank2.size = 0;
ctx               129 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank1.size = 0;
ctx               130 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank2.size = 0;
ctx               133 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank1.size =
ctx               139 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank2.size = 0;
ctx               142 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank1.size = (enc_ref_y_size * 2) +
ctx               147 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank2.size = (enc_ref_y_size * 2) +
ctx               152 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank1.size = (enc_ref_y_size * 2) +
ctx               156 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank2.size = (enc_ref_y_size * 2) +
ctx               160 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank1.size = (enc_ref_y_size * 2) +
ctx               163 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->bank2.size = (enc_ref_y_size * 2) +
ctx               170 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->bank1.size > 0) {
ctx               172 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ret = s5p_mfc_alloc_priv_buf(dev, BANK_L_CTX, &ctx->bank1);
ctx               177 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		BUG_ON(ctx->bank1.dma & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
ctx               180 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->bank2.size > 0) {
ctx               181 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ret = s5p_mfc_alloc_priv_buf(dev, BANK_R_CTX, &ctx->bank2);
ctx               184 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			s5p_mfc_release_priv_buf(ctx->dev, &ctx->bank1);
ctx               187 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		BUG_ON(ctx->bank2.dma & ((1 << MFC_BANK2_ALIGN_ORDER) - 1));
ctx               193 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_release_codec_buffers_v5(struct s5p_mfc_ctx *ctx)
ctx               195 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_release_priv_buf(ctx->dev, &ctx->bank1);
ctx               196 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_release_priv_buf(ctx->dev, &ctx->bank2);
ctx               200 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_alloc_instance_buffer_v5(struct s5p_mfc_ctx *ctx)
ctx               202 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               206 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC ||
ctx               207 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->codec_mode == S5P_MFC_CODEC_H264_ENC)
ctx               208 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->ctx.size = buf_size->h264_ctx;
ctx               210 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->ctx.size = buf_size->non_h264_ctx;
ctx               212 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	ret = s5p_mfc_alloc_priv_buf(dev, BANK_L_CTX, &ctx->ctx);
ctx               217 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	ctx->ctx.ofs = OFFSETA(ctx->ctx.dma);
ctx               220 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	memset(ctx->ctx.virt, 0, ctx->ctx.size);
ctx               224 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	ctx->shm.size = buf_size->shm;
ctx               225 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	ret = s5p_mfc_alloc_priv_buf(dev, BANK_L_CTX, &ctx->shm);
ctx               228 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		s5p_mfc_release_priv_buf(dev, &ctx->ctx);
ctx               233 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	ctx->shm.ofs = ctx->shm.dma - dev->dma_base[BANK_L_CTX];
ctx               234 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	BUG_ON(ctx->shm.ofs & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
ctx               236 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	memset(ctx->shm.virt, 0, buf_size->shm);
ctx               242 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_release_instance_buffer_v5(struct s5p_mfc_ctx *ctx)
ctx               244 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_release_priv_buf(ctx->dev, &ctx->ctx);
ctx               245 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_release_priv_buf(ctx->dev, &ctx->shm);
ctx               260 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_write_info_v5(struct s5p_mfc_ctx *ctx, unsigned int data,
ctx               263 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	*(u32 *)(ctx->shm.virt + ofs) = data;
ctx               267 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static unsigned int s5p_mfc_read_info_v5(struct s5p_mfc_ctx *ctx,
ctx               271 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	return *(u32 *)(ctx->shm.virt + ofs);
ctx               274 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_dec_calc_dpb_size_v5(struct s5p_mfc_ctx *ctx)
ctx               278 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	ctx->buf_width = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN);
ctx               279 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	ctx->buf_height = ALIGN(ctx->img_height, S5P_FIMV_NV12MT_VALIGN);
ctx               282 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->img_width,	ctx->img_height, ctx->buf_width,
ctx               283 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->buf_height);
ctx               285 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC) {
ctx               286 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->luma_size = ALIGN(ctx->buf_width * ctx->buf_height,
ctx               288 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->chroma_size = ALIGN(ctx->buf_width *
ctx               289 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 				ALIGN((ctx->img_height >> 1),
ctx               292 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->mv_size = ALIGN(ctx->buf_width *
ctx               293 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 				ALIGN((ctx->buf_height >> 2),
ctx               298 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ALIGN(ctx->img_width + 24, S5P_FIMV_NV12MT_HALIGN);
ctx               300 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ALIGN(ctx->img_height + 16, S5P_FIMV_NV12MT_VALIGN);
ctx               301 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->luma_size = ALIGN(guard_width * guard_height,
ctx               305 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ALIGN(ctx->img_width + 16, S5P_FIMV_NV12MT_HALIGN);
ctx               307 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ALIGN((ctx->img_height >> 1) + 4,
ctx               309 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->chroma_size = ALIGN(guard_width * guard_height,
ctx               312 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->mv_size = 0;
ctx               316 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_enc_calc_src_size_v5(struct s5p_mfc_ctx *ctx)
ctx               318 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12M) {
ctx               319 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->buf_width = ALIGN(ctx->img_width, S5P_FIMV_NV12M_HALIGN);
ctx               321 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->luma_size = ALIGN(ctx->img_width, S5P_FIMV_NV12M_HALIGN)
ctx               322 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			* ALIGN(ctx->img_height, S5P_FIMV_NV12M_LVALIGN);
ctx               323 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->chroma_size = ALIGN(ctx->img_width, S5P_FIMV_NV12M_HALIGN)
ctx               324 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			* ALIGN((ctx->img_height >> 1), S5P_FIMV_NV12M_CVALIGN);
ctx               326 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->luma_size = ALIGN(ctx->luma_size, S5P_FIMV_NV12M_SALIGN);
ctx               327 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->chroma_size =
ctx               328 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ALIGN(ctx->chroma_size, S5P_FIMV_NV12M_SALIGN);
ctx               329 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	} else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12MT) {
ctx               330 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->buf_width = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN);
ctx               332 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->luma_size = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN)
ctx               333 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			* ALIGN(ctx->img_height, S5P_FIMV_NV12MT_VALIGN);
ctx               334 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->chroma_size =
ctx               335 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN)
ctx               336 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			* ALIGN((ctx->img_height >> 1), S5P_FIMV_NV12MT_VALIGN);
ctx               338 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->luma_size = ALIGN(ctx->luma_size, S5P_FIMV_NV12MT_SALIGN);
ctx               339 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->chroma_size =
ctx               340 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ALIGN(ctx->chroma_size, S5P_FIMV_NV12MT_SALIGN);
ctx               345 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_set_dec_desc_buffer(struct s5p_mfc_ctx *ctx)
ctx               347 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               350 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	mfc_write(dev, OFFSETA(ctx->dsc.dma), S5P_FIMV_SI_CH0_DESC_ADR);
ctx               355 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_set_shared_buffer(struct s5p_mfc_ctx *ctx)
ctx               357 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               358 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	mfc_write(dev, ctx->shm.ofs, S5P_FIMV_SI_CH0_HOST_WR_ADR);
ctx               362 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_set_dec_stream_buffer_v5(struct s5p_mfc_ctx *ctx,
ctx               366 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               369 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	mfc_write(dev, ctx->dec_src_buf_size, S5P_FIMV_SI_CH0_CPB_SIZE);
ctx               371 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_write_info_v5(ctx, start_num_byte, START_BYTE_NUM);
ctx               376 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_set_dec_frame_buffer_v5(struct s5p_mfc_ctx *ctx)
ctx               380 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               385 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	buf_addr1 = ctx->bank1.dma;
ctx               386 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	buf_size1 = ctx->bank1.size;
ctx               387 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	buf_addr2 = ctx->bank2.dma;
ctx               388 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	buf_size2 = ctx->bank2.size;
ctx               391 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	mfc_write(dev, ctx->total_dpb_count | dpb,
ctx               393 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_shared_buffer(ctx);
ctx               394 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	switch (ctx->codec_mode) {
ctx               463 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ctx->codec_mode);
ctx               466 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	frame_size_lu = ctx->luma_size;
ctx               467 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	frame_size_ch = ctx->chroma_size;
ctx               468 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	frame_size_mv = ctx->mv_size;
ctx               471 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	for (i = 0; i < ctx->total_dpb_count; i++) {
ctx               474 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 					ctx->dst_bufs[i].cookie.raw.luma);
ctx               475 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		mfc_write(dev, OFFSETB(ctx->dst_bufs[i].cookie.raw.luma),
ctx               478 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 					ctx->dst_bufs[i].cookie.raw.chroma);
ctx               479 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		mfc_write(dev, OFFSETA(ctx->dst_bufs[i].cookie.raw.chroma),
ctx               481 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC) {
ctx               492 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			buf_size1,  buf_size2, ctx->total_dpb_count);
ctx               497 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_write_info_v5(ctx, frame_size_lu, ALLOC_LUMA_DPB_SIZE);
ctx               498 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_write_info_v5(ctx, frame_size_ch, ALLOC_CHROMA_DPB_SIZE);
ctx               499 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC)
ctx               500 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		s5p_mfc_write_info_v5(ctx, frame_size_mv, ALLOC_MV_SIZE);
ctx               502 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 					<< S5P_FIMV_CH_SHIFT) | (ctx->inst_no),
ctx               508 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_set_enc_stream_buffer_v5(struct s5p_mfc_ctx *ctx,
ctx               511 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               518 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_set_enc_frame_buffer_v5(struct s5p_mfc_ctx *ctx,
ctx               521 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               527 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_get_enc_frame_buffer_v5(struct s5p_mfc_ctx *ctx,
ctx               530 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               539 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_set_enc_ref_buffer_v5(struct s5p_mfc_ctx *ctx)
ctx               541 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               548 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	buf_addr1 = ctx->bank1.dma;
ctx               549 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	buf_size1 = ctx->bank1.size;
ctx               550 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	buf_addr2 = ctx->bank2.dma;
ctx               551 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	buf_size2 = ctx->bank2.size;
ctx               552 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	enc_ref_y_size = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN)
ctx               553 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		* ALIGN(ctx->img_height, S5P_FIMV_NV12MT_VALIGN);
ctx               555 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->codec_mode == S5P_MFC_CODEC_H264_ENC) {
ctx               556 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		enc_ref_c_size = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN)
ctx               557 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			* ALIGN((ctx->img_height >> 1), S5P_FIMV_NV12MT_VALIGN);
ctx               560 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		guard_width = ALIGN(ctx->img_width + 16,
ctx               562 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		guard_height = ALIGN((ctx->img_height >> 1) + 4,
ctx               568 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	switch (ctx->codec_mode) {
ctx               668 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ctx->codec_mode);
ctx               674 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_set_enc_params(struct s5p_mfc_ctx *ctx)
ctx               676 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               677 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_enc_params *p = &ctx->enc_params;
ctx               682 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	mfc_write(dev, ctx->img_width, S5P_FIMV_ENC_HSIZE_PX);
ctx               684 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	mfc_write(dev, ctx->img_height, S5P_FIMV_ENC_VSIZE_PX);
ctx               706 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12M)
ctx               708 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12MT)
ctx               744 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
ctx               751 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
ctx               753 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_write_info_v5(ctx, p->fixed_target_bit, RC_CONTROL_CONFIG);
ctx               757 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_set_enc_params_h264(struct s5p_mfc_ctx *ctx)
ctx               759 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               760 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_enc_params *p = &ctx->enc_params;
ctx               765 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_enc_params(ctx);
ctx               785 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		mfc_write(dev, ctx->img_height >> 1, S5P_FIMV_ENC_VSIZE_PX);
ctx               864 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		shm = s5p_mfc_read_info_v5(ctx, P_B_FRAME_QP);
ctx               868 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		s5p_mfc_write_info_v5(ctx, shm, P_B_FRAME_QP);
ctx               871 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
ctx               875 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
ctx               878 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		shm = s5p_mfc_read_info_v5(ctx, SAMPLE_ASPECT_RATIO_IDC);
ctx               881 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		s5p_mfc_write_info_v5(ctx, shm, SAMPLE_ASPECT_RATIO_IDC);
ctx               884 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			shm = s5p_mfc_read_info_v5(ctx, EXTENDED_SAR);
ctx               888 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			s5p_mfc_write_info_v5(ctx, shm, EXTENDED_SAR);
ctx               892 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	shm = s5p_mfc_read_info_v5(ctx, H264_I_PERIOD);
ctx               901 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_write_info_v5(ctx, shm, H264_I_PERIOD);
ctx               903 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
ctx               910 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
ctx               914 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_set_enc_params_mpeg4(struct s5p_mfc_ctx *ctx)
ctx               916 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               917 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_enc_params *p = &ctx->enc_params;
ctx               923 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_enc_params(ctx);
ctx               943 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		shm = s5p_mfc_read_info_v5(ctx, P_B_FRAME_QP);
ctx               947 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		s5p_mfc_write_info_v5(ctx, shm, P_B_FRAME_QP);
ctx               956 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			shm = s5p_mfc_read_info_v5(ctx, RC_VOP_TIMING);
ctx               961 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			s5p_mfc_write_info_v5(ctx, shm, RC_VOP_TIMING);
ctx               982 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
ctx               989 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
ctx               993 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_set_enc_params_h263(struct s5p_mfc_ctx *ctx)
ctx               995 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               996 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_enc_params *p = &ctx->enc_params;
ctx              1001 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_enc_params(ctx);
ctx              1004 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		shm = s5p_mfc_read_info_v5(ctx, P_B_FRAME_QP);
ctx              1007 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		s5p_mfc_write_info_v5(ctx, shm, P_B_FRAME_QP);
ctx              1031 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	shm = s5p_mfc_read_info_v5(ctx, EXT_ENC_CONTROL);
ctx              1038 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_write_info_v5(ctx, shm, EXT_ENC_CONTROL);
ctx              1043 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_init_decode_v5(struct s5p_mfc_ctx *ctx)
ctx              1045 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1047 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_shared_buffer(ctx);
ctx              1049 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->codec_mode == S5P_MFC_CODEC_MPEG4_DEC)
ctx              1050 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		mfc_write(dev, ctx->loop_filter_mpeg4, S5P_FIMV_ENC_LF_CTRL);
ctx              1053 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	mfc_write(dev, ((ctx->slice_interface & S5P_FIMV_SLICE_INT_MASK) <<
ctx              1054 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		S5P_FIMV_SLICE_INT_SHIFT) | (ctx->display_delay_enable <<
ctx              1055 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		S5P_FIMV_DDELAY_ENA_SHIFT) | ((ctx->display_delay &
ctx              1060 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 				| (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
ctx              1064 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_set_flush(struct s5p_mfc_ctx *ctx, int flush)
ctx              1066 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1079 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_decode_one_frame_v5(struct s5p_mfc_ctx *ctx,
ctx              1082 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1084 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	mfc_write(dev, ctx->dec_dst_flag, S5P_FIMV_SI_CH0_RELEASE_BUF);
ctx              1085 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_shared_buffer(ctx);
ctx              1086 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_flush(ctx, ctx->dpb_flush_flag);
ctx              1092 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		S5P_FIMV_CH_SHIFT) | (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
ctx              1096 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		S5P_FIMV_CH_SHIFT) | (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
ctx              1100 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		S5P_FIMV_CH_MASK) << S5P_FIMV_CH_SHIFT) | (ctx->inst_no),
ctx              1108 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_init_encode_v5(struct s5p_mfc_ctx *ctx)
ctx              1110 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1112 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->codec_mode == S5P_MFC_CODEC_H264_ENC)
ctx              1113 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		s5p_mfc_set_enc_params_h264(ctx);
ctx              1114 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	else if (ctx->codec_mode == S5P_MFC_CODEC_MPEG4_ENC)
ctx              1115 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		s5p_mfc_set_enc_params_mpeg4(ctx);
ctx              1116 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	else if (ctx->codec_mode == S5P_MFC_CODEC_H263_ENC)
ctx              1117 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		s5p_mfc_set_enc_params_h263(ctx);
ctx              1120 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ctx->codec_mode);
ctx              1123 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_shared_buffer(ctx);
ctx              1125 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		(ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
ctx              1130 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_encode_one_frame_v5(struct s5p_mfc_ctx *ctx)
ctx              1132 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1135 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12M)
ctx              1137 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12MT)
ctx              1139 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_shared_buffer(ctx);
ctx              1141 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->state == MFCINST_FINISHING)
ctx              1146 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 				| (ctx->inst_no), S5P_FIMV_SI_CH0_INST_ID);
ctx              1151 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_run_res_change(struct s5p_mfc_ctx *ctx)
ctx              1153 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1155 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_dec_stream_buffer_v5(ctx, 0, 0, 0);
ctx              1156 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	dev->curr_ctx = ctx->num;
ctx              1157 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_decode_one_frame_v5(ctx, MFC_DEC_RES_CHANGE);
ctx              1160 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_run_dec_frame(struct s5p_mfc_ctx *ctx, int last_frame)
ctx              1162 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1165 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->state == MFCINST_FINISHING) {
ctx              1167 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		s5p_mfc_set_dec_stream_buffer_v5(ctx, 0, 0, 0);
ctx              1168 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		dev->curr_ctx = ctx->num;
ctx              1169 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		s5p_mfc_decode_one_frame_v5(ctx, last_frame);
ctx              1174 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (list_empty(&ctx->src_queue)) {
ctx              1179 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
ctx              1181 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_dec_stream_buffer_v5(ctx,
ctx              1183 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->consumed_stream, temp_vb->b->vb2_buf.planes[0].bytesused);
ctx              1184 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	dev->curr_ctx = ctx->num;
ctx              1188 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->state = MFCINST_FINISHING;
ctx              1190 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_decode_one_frame_v5(ctx, last_frame);
ctx              1194 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_run_enc_frame(struct s5p_mfc_ctx *ctx)
ctx              1196 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1202 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (list_empty(&ctx->src_queue) && ctx->state != MFCINST_FINISHING) {
ctx              1206 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (list_empty(&ctx->dst_queue)) {
ctx              1210 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (list_empty(&ctx->src_queue)) {
ctx              1212 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		s5p_mfc_set_enc_frame_buffer_v5(ctx, dev->dma_base[BANK_R_CTX],
ctx              1216 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		src_mb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf,
ctx              1221 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			s5p_mfc_set_enc_frame_buffer_v5(ctx,
ctx              1224 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ctx->state = MFCINST_FINISHING;
ctx              1230 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			s5p_mfc_set_enc_frame_buffer_v5(ctx, src_y_addr,
ctx              1233 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 				ctx->state = MFCINST_FINISHING;
ctx              1236 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
ctx              1240 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_enc_stream_buffer_v5(ctx, dst_addr, dst_size);
ctx              1241 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	dev->curr_ctx = ctx->num;
ctx              1243 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		  src_mb ? src_mb->b->vb2_buf.index : -1, ctx->state);
ctx              1244 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_encode_one_frame_v5(ctx);
ctx              1248 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_run_init_dec(struct s5p_mfc_ctx *ctx)
ctx              1250 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1255 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
ctx              1256 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_dec_desc_buffer(ctx);
ctx              1259 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_dec_stream_buffer_v5(ctx,
ctx              1262 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	dev->curr_ctx = ctx->num;
ctx              1263 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_init_decode_v5(ctx);
ctx              1266 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static void s5p_mfc_run_init_enc(struct s5p_mfc_ctx *ctx)
ctx              1268 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1273 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_enc_ref_buffer_v5(ctx);
ctx              1274 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
ctx              1277 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_enc_stream_buffer_v5(ctx, dst_addr, dst_size);
ctx              1278 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	dev->curr_ctx = ctx->num;
ctx              1279 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_init_encode_v5(ctx);
ctx              1282 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_run_init_dec_buffers(struct s5p_mfc_ctx *ctx)
ctx              1284 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1292 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->capture_state != QUEUE_BUFS_MMAPED) {
ctx              1296 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (list_empty(&ctx->src_queue)) {
ctx              1300 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
ctx              1303 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_set_dec_stream_buffer_v5(ctx,
ctx              1306 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	dev->curr_ctx = ctx->num;
ctx              1307 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	ret = s5p_mfc_set_dec_frame_buffer_v5(ctx);
ctx              1310 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		ctx->state = MFCINST_ERROR;
ctx              1318 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	struct s5p_mfc_ctx *ctx;
ctx              1343 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	ctx = dev->ctx[new_ctx];
ctx              1350 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	s5p_mfc_clean_ctx_int_flags(ctx);
ctx              1352 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	if (ctx->type == MFCINST_DECODER) {
ctx              1353 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		s5p_mfc_set_dec_desc_buffer(ctx);
ctx              1354 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		switch (ctx->state) {
ctx              1356 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			s5p_mfc_run_dec_frame(ctx, MFC_DEC_LAST_FRAME);
ctx              1359 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ret = s5p_mfc_run_dec_frame(ctx, MFC_DEC_FRAME);
ctx              1363 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 					ctx);
ctx              1367 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 					ctx);
ctx              1370 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			s5p_mfc_run_init_dec(ctx);
ctx              1373 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ret = s5p_mfc_run_init_dec_buffers(ctx);
ctx              1377 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			s5p_mfc_run_res_change(ctx);
ctx              1380 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			s5p_mfc_run_dec_frame(ctx, MFC_DEC_FRAME);
ctx              1384 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ctx->capture_state = QUEUE_FREE;
ctx              1386 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			s5p_mfc_run_init_dec(ctx);
ctx              1391 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	} else if (ctx->type == MFCINST_ENCODER) {
ctx              1392 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		switch (ctx->state) {
ctx              1395 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			ret = s5p_mfc_run_enc_frame(ctx);
ctx              1399 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 					ctx);
ctx              1403 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 					ctx);
ctx              1406 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 			s5p_mfc_run_init_enc(ctx);
ctx              1412 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 		mfc_err("Invalid context type: %d\n", ctx->type);
ctx              1462 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static int s5p_mfc_get_disp_frame_type_v5(struct s5p_mfc_ctx *ctx)
ctx              1464 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	return (s5p_mfc_read_info_v5(ctx, DISP_PIC_FRAME_TYPE) >>
ctx              1573 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static unsigned int s5p_mfc_get_pic_type_top_v5(struct s5p_mfc_ctx *ctx)
ctx              1575 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	return s5p_mfc_read_info_v5(ctx, PIC_TIME_TOP);
ctx              1578 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static unsigned int s5p_mfc_get_pic_type_bot_v5(struct s5p_mfc_ctx *ctx)
ctx              1580 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	return s5p_mfc_read_info_v5(ctx, PIC_TIME_BOT);
ctx              1583 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static unsigned int s5p_mfc_get_crop_info_h_v5(struct s5p_mfc_ctx *ctx)
ctx              1585 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	return s5p_mfc_read_info_v5(ctx, CROP_INFO_H);
ctx              1588 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c static unsigned int s5p_mfc_get_crop_info_v_v5(struct s5p_mfc_ctx *ctx)
ctx              1590 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v5.c 	return s5p_mfc_read_info_v5(ctx, CROP_INFO_V);
ctx                46 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_alloc_dec_temp_buffers_v6(struct s5p_mfc_ctx *ctx)
ctx                54 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static void s5p_mfc_release_dec_desc_buffer_v6(struct s5p_mfc_ctx *ctx)
ctx                60 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_alloc_codec_buffers_v6(struct s5p_mfc_ctx *ctx)
ctx                62 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx                67 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	mb_width = MB_WIDTH(ctx->img_width);
ctx                68 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	mb_height = MB_HEIGHT(ctx->img_height);
ctx                70 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->type == MFCINST_DECODER) {
ctx                72 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			  ctx->luma_size, ctx->chroma_size, ctx->mv_size);
ctx                73 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		mfc_debug(2, "Totals bufs: %d\n", ctx->total_dpb_count);
ctx                74 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	} else if (ctx->type == MFCINST_ENCODER) {
ctx                76 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->tmv_buffer_size = 0;
ctx                78 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->tmv_buffer_size = S5P_FIMV_NUM_TMV_BUFFERS_V6 *
ctx                82 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->tmv_buffer_size = S5P_FIMV_NUM_TMV_BUFFERS_V6 *
ctx                86 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			lcu_width = S5P_MFC_LCU_WIDTH(ctx->img_width);
ctx                87 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			lcu_height = S5P_MFC_LCU_HEIGHT(ctx->img_height);
ctx                88 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			if (ctx->codec_mode != S5P_FIMV_CODEC_HEVC_ENC) {
ctx                89 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 				ctx->luma_dpb_size =
ctx                93 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 				ctx->chroma_dpb_size =
ctx                98 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 				ctx->luma_dpb_size =
ctx               102 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 				ctx->chroma_dpb_size =
ctx               108 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->luma_dpb_size = ALIGN((mb_width * mb_height) *
ctx               111 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->chroma_dpb_size = ALIGN((mb_width * mb_height) *
ctx               116 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->me_buffer_size = ALIGN(S5P_FIMV_ME_BUFFER_SIZE_V8(
ctx               117 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 						ctx->img_width, ctx->img_height,
ctx               121 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->me_buffer_size = ALIGN(S5P_FIMV_ME_BUFFER_SIZE_V6(
ctx               122 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 						ctx->img_width, ctx->img_height,
ctx               127 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			  ctx->luma_dpb_size, ctx->chroma_dpb_size);
ctx               133 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	switch (ctx->codec_mode) {
ctx               139 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size =
ctx               144 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size =
ctx               148 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
ctx               150 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank1.size =
ctx               151 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size +
ctx               152 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			(ctx->mv_count * ctx->mv_size);
ctx               158 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size =
ctx               163 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size =
ctx               169 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
ctx               171 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank1.size = ctx->scratch_buf_size;
ctx               178 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size =
ctx               183 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
ctx               185 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank1.size = ctx->scratch_buf_size;
ctx               188 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank1.size = 0;
ctx               189 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank2.size = 0;
ctx               195 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size =
ctx               199 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
ctx               201 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank1.size = ctx->scratch_buf_size;
ctx               207 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size =
ctx               212 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size =
ctx               216 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
ctx               218 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank1.size = ctx->scratch_buf_size;
ctx               222 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank1.size =
ctx               223 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size +
ctx               224 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			(ctx->mv_count * ctx->mv_size);
ctx               228 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank1.size =
ctx               229 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size +
ctx               235 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->me_buffer_size =
ctx               238 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size =
ctx               243 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size =
ctx               247 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
ctx               249 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank1.size =
ctx               250 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size + ctx->tmv_buffer_size +
ctx               251 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			(ctx->pb_count * (ctx->luma_dpb_size +
ctx               252 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->chroma_dpb_size + ctx->me_buffer_size));
ctx               253 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank2.size = 0;
ctx               259 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->me_buffer_size =
ctx               263 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size =
ctx               267 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
ctx               269 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank1.size =
ctx               270 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size + ctx->tmv_buffer_size +
ctx               271 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			(ctx->pb_count * (ctx->luma_dpb_size +
ctx               272 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->chroma_dpb_size + ctx->me_buffer_size));
ctx               273 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank2.size = 0;
ctx               278 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->me_buffer_size =
ctx               282 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size =
ctx               287 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size =
ctx               291 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size,
ctx               293 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank1.size =
ctx               294 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size + ctx->tmv_buffer_size +
ctx               295 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			(ctx->pb_count * (ctx->luma_dpb_size +
ctx               296 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->chroma_dpb_size + ctx->me_buffer_size));
ctx               297 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank2.size = 0;
ctx               301 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->me_buffer_size =
ctx               303 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->scratch_buf_size = ALIGN(ctx->scratch_buf_size, 256);
ctx               304 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank1.size =
ctx               305 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->scratch_buf_size + ctx->tmv_buffer_size +
ctx               306 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			(ctx->pb_count * (ctx->luma_dpb_size +
ctx               307 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->chroma_dpb_size + ctx->me_buffer_size));
ctx               308 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->bank2.size = 0;
ctx               315 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->bank1.size > 0) {
ctx               316 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ret = s5p_mfc_alloc_generic_buf(dev, BANK_L_CTX, &ctx->bank1);
ctx               321 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		BUG_ON(ctx->bank1.dma & ((1 << MFC_BANK1_ALIGN_ORDER) - 1));
ctx               327 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static void s5p_mfc_release_codec_buffers_v6(struct s5p_mfc_ctx *ctx)
ctx               329 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_release_generic_buf(ctx->dev, &ctx->bank1);
ctx               333 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_alloc_instance_buffer_v6(struct s5p_mfc_ctx *ctx)
ctx               335 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               341 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	switch (ctx->codec_mode) {
ctx               345 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->ctx.size = buf_size->h264_dec_ctx;
ctx               354 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->ctx.size = buf_size->other_dec_ctx;
ctx               357 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->ctx.size = buf_size->h264_enc_ctx;
ctx               360 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->ctx.size = buf_size->hevc_enc_ctx;
ctx               365 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->ctx.size = buf_size->other_enc_ctx;
ctx               368 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->ctx.size = 0;
ctx               369 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		mfc_err("Codec type(%d) should be checked!\n", ctx->codec_mode);
ctx               373 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	ret = s5p_mfc_alloc_priv_buf(dev, BANK_L_CTX, &ctx->ctx);
ctx               379 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	memset(ctx->ctx.virt, 0, ctx->ctx.size);
ctx               388 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static void s5p_mfc_release_instance_buffer_v6(struct s5p_mfc_ctx *ctx)
ctx               390 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_release_priv_buf(ctx->dev, &ctx->ctx);
ctx               436 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static void s5p_mfc_dec_calc_dpb_size_v6(struct s5p_mfc_ctx *ctx)
ctx               438 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               439 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	ctx->buf_width = ALIGN(ctx->img_width, S5P_FIMV_NV12MT_HALIGN_V6);
ctx               440 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	ctx->buf_height = ALIGN(ctx->img_height, S5P_FIMV_NV12MT_VALIGN_V6);
ctx               442 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			"buffer dimensions: %dx%d\n", ctx->img_width,
ctx               443 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->img_height, ctx->buf_width, ctx->buf_height);
ctx               445 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	ctx->luma_size = calc_plane(ctx->img_width, ctx->img_height);
ctx               446 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	ctx->chroma_size = calc_plane(ctx->img_width, (ctx->img_height >> 1));
ctx               447 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (IS_MFCV8_PLUS(ctx->dev)) {
ctx               449 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->luma_size += S5P_FIMV_D_ALIGN_PLANE_SIZE_V8;
ctx               450 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->chroma_size += S5P_FIMV_D_ALIGN_PLANE_SIZE_V8;
ctx               453 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC ||
ctx               454 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->codec_mode == S5P_MFC_CODEC_H264_MVC_DEC) {
ctx               456 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->mv_size = S5P_MFC_DEC_MV_SIZE_V10(ctx->img_width,
ctx               457 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 					ctx->img_height);
ctx               459 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->mv_size = S5P_MFC_DEC_MV_SIZE_V6(ctx->img_width,
ctx               460 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 					ctx->img_height);
ctx               462 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	} else if (ctx->codec_mode == S5P_MFC_CODEC_HEVC_DEC) {
ctx               463 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->mv_size = s5p_mfc_dec_hevc_mv_size(ctx->img_width,
ctx               464 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 				ctx->img_height);
ctx               465 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->mv_size = ALIGN(ctx->mv_size, 32);
ctx               467 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->mv_size = 0;
ctx               471 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static void s5p_mfc_enc_calc_src_size_v6(struct s5p_mfc_ctx *ctx)
ctx               475 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	mb_width = MB_WIDTH(ctx->img_width);
ctx               476 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	mb_height = MB_HEIGHT(ctx->img_height);
ctx               478 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	ctx->buf_width = ALIGN(ctx->img_width, S5P_FIMV_NV12M_HALIGN_V6);
ctx               479 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	ctx->luma_size = ALIGN((mb_width * mb_height) * 256, 256);
ctx               480 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	ctx->chroma_size = ALIGN((mb_width * mb_height) * 128, 256);
ctx               483 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (IS_MFCV7_PLUS(ctx->dev)) {
ctx               484 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->luma_size += MFC_LUMA_PAD_BYTES_V7;
ctx               485 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->chroma_size += MFC_CHROMA_PAD_BYTES_V7;
ctx               490 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_set_dec_stream_buffer_v6(struct s5p_mfc_ctx *ctx,
ctx               494 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               501 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->inst_no, buf_addr, strm_size, strm_size);
ctx               512 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_set_dec_frame_buffer_v6(struct s5p_mfc_ctx *ctx)
ctx               516 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               522 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	buf_addr1 = ctx->bank1.dma;
ctx               523 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	buf_size1 = ctx->bank1.size;
ctx               526 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	mfc_debug(2, "Total DPB COUNT: %d\n", ctx->total_dpb_count);
ctx               527 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	mfc_debug(2, "Setting display delay to %d\n", ctx->display_delay);
ctx               529 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->total_dpb_count, mfc_regs->d_num_dpb);
ctx               530 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->luma_size, mfc_regs->d_first_plane_dpb_size);
ctx               531 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->chroma_size, mfc_regs->d_second_plane_dpb_size);
ctx               534 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->scratch_buf_size, mfc_regs->d_scratch_buffer_size);
ctx               537 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->img_width,
ctx               539 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->img_width,
ctx               543 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	buf_addr1 += ctx->scratch_buf_size;
ctx               544 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	buf_size1 -= ctx->scratch_buf_size;
ctx               546 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->codec_mode == S5P_FIMV_CODEC_H264_DEC ||
ctx               547 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->codec_mode == S5P_FIMV_CODEC_H264_MVC_DEC ||
ctx               548 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->codec_mode == S5P_FIMV_CODEC_HEVC_DEC) {
ctx               549 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->mv_size, mfc_regs->d_mv_buffer_size);
ctx               550 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->mv_count, mfc_regs->d_num_mv);
ctx               553 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	frame_size = ctx->luma_size;
ctx               554 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	frame_size_ch = ctx->chroma_size;
ctx               555 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	frame_size_mv = ctx->mv_size;
ctx               559 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	for (i = 0; i < ctx->total_dpb_count; i++) {
ctx               562 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 					ctx->dst_bufs[i].cookie.raw.luma);
ctx               563 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->dst_bufs[i].cookie.raw.luma,
ctx               566 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 					ctx->dst_bufs[i].cookie.raw.chroma);
ctx               567 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->dst_bufs[i].cookie.raw.chroma,
ctx               570 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->codec_mode == S5P_MFC_CODEC_H264_DEC ||
ctx               571 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->codec_mode == S5P_MFC_CODEC_H264_MVC_DEC ||
ctx               572 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->codec_mode == S5P_MFC_CODEC_HEVC_DEC) {
ctx               573 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		for (i = 0; i < ctx->mv_count; i++) {
ctx               587 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->codec_mode == S5P_FIMV_CODEC_VP9_DEC) {
ctx               596 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			buf_addr1, buf_size1, ctx->total_dpb_count);
ctx               602 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->inst_no, mfc_regs->instance_id);
ctx               611 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_set_enc_stream_buffer_v6(struct s5p_mfc_ctx *ctx,
ctx               614 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               626 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static void s5p_mfc_set_enc_frame_buffer_v6(struct s5p_mfc_ctx *ctx,
ctx               629 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               639 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static void s5p_mfc_get_enc_frame_buffer_v6(struct s5p_mfc_ctx *ctx,
ctx               642 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               657 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_set_enc_ref_buffer_v6(struct s5p_mfc_ctx *ctx)
ctx               659 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               666 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	buf_addr1 = ctx->bank1.dma;
ctx               667 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	buf_size1 = ctx->bank1.size;
ctx               673 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		for (i = 0; i < ctx->pb_count; i++) {
ctx               675 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			buf_addr1 += ctx->luma_dpb_size;
ctx               676 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			buf_size1 -= ctx->luma_dpb_size;
ctx               678 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		for (i = 0; i < ctx->pb_count; i++) {
ctx               680 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			buf_addr1 += ctx->chroma_dpb_size;
ctx               681 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			buf_size1 -= ctx->chroma_dpb_size;
ctx               683 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		for (i = 0; i < ctx->pb_count; i++) {
ctx               685 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			buf_addr1 += ctx->me_buffer_size;
ctx               686 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			buf_size1 -= ctx->me_buffer_size;
ctx               689 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		for (i = 0; i < ctx->pb_count; i++) {
ctx               691 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			buf_addr1 += ctx->luma_dpb_size;
ctx               693 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			buf_addr1 += ctx->chroma_dpb_size;
ctx               695 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			buf_addr1 += ctx->me_buffer_size;
ctx               696 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			buf_size1 -= (ctx->luma_dpb_size + ctx->chroma_dpb_size
ctx               697 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 					+ ctx->me_buffer_size);
ctx               702 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->scratch_buf_size, mfc_regs->e_scratch_buffer_size);
ctx               703 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	buf_addr1 += ctx->scratch_buf_size;
ctx               704 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	buf_size1 -= ctx->scratch_buf_size;
ctx               707 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	buf_addr1 += ctx->tmv_buffer_size >> 1;
ctx               709 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	buf_addr1 += ctx->tmv_buffer_size >> 1;
ctx               710 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	buf_size1 -= ctx->tmv_buffer_size;
ctx               713 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			buf_addr1, buf_size1, ctx->pb_count);
ctx               719 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->inst_no, mfc_regs->instance_id);
ctx               728 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_set_slice_mode(struct s5p_mfc_ctx *ctx)
ctx               730 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               735 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->slice_mode, mfc_regs->e_mslice_mode);
ctx               736 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->slice_mode == V4L2_MPEG_VIDEO_MULTI_SLICE_MODE_MAX_MB) {
ctx               737 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->slice_size.mb, mfc_regs->e_mslice_size_mb);
ctx               738 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	} else if (ctx->slice_mode ==
ctx               740 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->slice_size.bits, mfc_regs->e_mslice_size_bits);
ctx               749 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_set_enc_params(struct s5p_mfc_ctx *ctx)
ctx               751 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               753 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_enc_params *p = &ctx->enc_params;
ctx               759 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->img_width, mfc_regs->e_frame_width); /* 16 align */
ctx               761 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->img_height, mfc_regs->e_frame_height); /* 16 align */
ctx               764 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->img_width, mfc_regs->e_cropped_frame_width);
ctx               766 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->img_height, mfc_regs->e_cropped_frame_height);
ctx               777 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	ctx->slice_mode = p->slice_mode;
ctx               782 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->slice_size.mb = p->slice_mb;
ctx               786 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->slice_size.bits = p->slice_bit;
ctx               792 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_set_slice_mode(ctx);
ctx               809 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12M) {
ctx               816 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	} else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV21M) {
ctx               823 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	} else if (ctx->src_fmt->fourcc == V4L2_PIX_FMT_NV12MT_16X16) {
ctx               914 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_set_enc_params_h264(struct s5p_mfc_ctx *ctx)
ctx               916 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx               918 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_enc_params *p = &ctx->enc_params;
ctx               925 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_set_enc_params(ctx);
ctx               996 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->img_height >> 1,
ctx               999 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->img_height >> 1,
ctx              1195 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_set_enc_params_mpeg4(struct s5p_mfc_ctx *ctx)
ctx              1197 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1199 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_enc_params *p = &ctx->enc_params;
ctx              1205 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_set_enc_params(ctx);
ctx              1277 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_set_enc_params_h263(struct s5p_mfc_ctx *ctx)
ctx              1279 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1281 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_enc_params *p = &ctx->enc_params;
ctx              1287 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_set_enc_params(ctx);
ctx              1347 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_set_enc_params_vp8(struct s5p_mfc_ctx *ctx)
ctx              1349 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1351 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_enc_params *p = &ctx->enc_params;
ctx              1358 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_set_enc_params(ctx);
ctx              1440 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_set_enc_params_hevc(struct s5p_mfc_ctx *ctx)
ctx              1442 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1444 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_enc_params *p = &ctx->enc_params;
ctx              1451 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_set_enc_params(ctx);
ctx              1461 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if ((ctx->img_width == 3840) && (ctx->img_height == 2160)) {
ctx              1597 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_init_decode_v6(struct s5p_mfc_ctx *ctx)
ctx              1599 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1605 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	mfc_debug(2, "InstNo: %d/%d\n", ctx->inst_no,
ctx              1615 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->display_delay_enable) {
ctx              1617 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->display_delay, mfc_regs->d_display_delay);
ctx              1626 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->codec_mode == S5P_MFC_CODEC_MPEG4_DEC) {
ctx              1628 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 				ctx->loop_filter_mpeg4);
ctx              1629 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		reg |= (ctx->loop_filter_mpeg4 <<
ctx              1632 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->dst_fmt->fourcc == V4L2_PIX_FMT_NV12MT_16X16)
ctx              1641 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->dst_fmt->fourcc == V4L2_PIX_FMT_NV21M)
ctx              1648 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->sei_fp_parse & 0x1, mfc_regs->d_sei_enable);
ctx              1650 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->inst_no, mfc_regs->instance_id);
ctx              1658 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static inline void s5p_mfc_set_flush(struct s5p_mfc_ctx *ctx, int flush)
ctx              1660 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1664 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		dev->curr_ctx = ctx->num;
ctx              1665 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->inst_no, mfc_regs->instance_id);
ctx              1672 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_decode_one_frame_v6(struct s5p_mfc_ctx *ctx,
ctx              1675 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1678 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->dec_dst_flag, mfc_regs->d_available_dpb_flag_lower);
ctx              1679 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->slice_interface & 0x1, mfc_regs->d_slice_if_enable);
ctx              1681 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->inst_no, mfc_regs->instance_id);
ctx              1702 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_init_encode_v6(struct s5p_mfc_ctx *ctx)
ctx              1704 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1707 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->codec_mode == S5P_MFC_CODEC_H264_ENC)
ctx              1708 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		s5p_mfc_set_enc_params_h264(ctx);
ctx              1709 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	else if (ctx->codec_mode == S5P_MFC_CODEC_MPEG4_ENC)
ctx              1710 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		s5p_mfc_set_enc_params_mpeg4(ctx);
ctx              1711 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	else if (ctx->codec_mode == S5P_MFC_CODEC_H263_ENC)
ctx              1712 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		s5p_mfc_set_enc_params_h263(ctx);
ctx              1713 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	else if (ctx->codec_mode == S5P_MFC_CODEC_VP8_ENC)
ctx              1714 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		s5p_mfc_set_enc_params_vp8(ctx);
ctx              1715 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	else if (ctx->codec_mode == S5P_FIMV_CODEC_HEVC_ENC)
ctx              1716 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		s5p_mfc_set_enc_params_hevc(ctx);
ctx              1719 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->codec_mode);
ctx              1725 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->img_width, mfc_regs->e_source_first_plane_stride);
ctx              1726 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		writel(ctx->img_width, mfc_regs->e_source_second_plane_stride);
ctx              1729 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->inst_no, mfc_regs->instance_id);
ctx              1736 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_h264_set_aso_slice_order_v6(struct s5p_mfc_ctx *ctx)
ctx              1738 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1740 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_enc_params *p = &ctx->enc_params;
ctx              1754 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_encode_one_frame_v6(struct s5p_mfc_ctx *ctx)
ctx              1756 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1764 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->codec_mode == S5P_MFC_CODEC_H264_ENC)
ctx              1765 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		s5p_mfc_h264_set_aso_slice_order_v6(ctx);
ctx              1767 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_set_slice_mode(ctx);
ctx              1769 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->state != MFCINST_FINISHING)
ctx              1774 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	writel(ctx->inst_no, mfc_regs->instance_id);
ctx              1782 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static inline void s5p_mfc_run_dec_last_frames(struct s5p_mfc_ctx *ctx)
ctx              1784 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1786 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_set_dec_stream_buffer_v6(ctx, 0, 0, 0);
ctx              1787 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	dev->curr_ctx = ctx->num;
ctx              1788 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_decode_one_frame_v6(ctx, MFC_DEC_LAST_FRAME);
ctx              1791 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static inline int s5p_mfc_run_dec_frame(struct s5p_mfc_ctx *ctx)
ctx              1793 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1797 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->state == MFCINST_FINISHING) {
ctx              1799 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		s5p_mfc_set_dec_stream_buffer_v6(ctx, 0, 0, 0);
ctx              1800 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		dev->curr_ctx = ctx->num;
ctx              1801 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		s5p_mfc_clean_ctx_int_flags(ctx);
ctx              1802 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		s5p_mfc_decode_one_frame_v6(ctx, last_frame);
ctx              1807 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (list_empty(&ctx->src_queue)) {
ctx              1812 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
ctx              1814 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_set_dec_stream_buffer_v6(ctx,
ctx              1816 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->consumed_stream,
ctx              1819 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	dev->curr_ctx = ctx->num;
ctx              1823 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->state = MFCINST_FINISHING;
ctx              1825 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_decode_one_frame_v6(ctx, last_frame);
ctx              1830 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static inline int s5p_mfc_run_enc_frame(struct s5p_mfc_ctx *ctx)
ctx              1832 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1841 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (list_empty(&ctx->src_queue) && ctx->state != MFCINST_FINISHING) {
ctx              1846 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (list_empty(&ctx->dst_queue)) {
ctx              1851 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (list_empty(&ctx->src_queue)) {
ctx              1853 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		s5p_mfc_set_enc_frame_buffer_v6(ctx, 0, 0);
ctx              1856 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		src_mb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
ctx              1859 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			s5p_mfc_set_enc_frame_buffer_v6(ctx, 0, 0);
ctx              1860 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->state = MFCINST_FINISHING;
ctx              1868 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			s5p_mfc_set_enc_frame_buffer_v6(ctx, src_y_addr, src_c_addr);
ctx              1870 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 				ctx->state = MFCINST_FINISHING;
ctx              1874 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
ctx              1879 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_set_enc_stream_buffer_v6(ctx, dst_addr, dst_size);
ctx              1881 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	dev->curr_ctx = ctx->num;
ctx              1882 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_encode_one_frame_v6(ctx);
ctx              1887 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static inline void s5p_mfc_run_init_dec(struct s5p_mfc_ctx *ctx)
ctx              1889 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1894 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	temp_vb = list_entry(ctx->src_queue.next, struct s5p_mfc_buf, list);
ctx              1896 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_set_dec_stream_buffer_v6(ctx,
ctx              1899 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	dev->curr_ctx = ctx->num;
ctx              1900 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_init_decode_v6(ctx);
ctx              1903 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static inline void s5p_mfc_run_init_enc(struct s5p_mfc_ctx *ctx)
ctx              1905 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1910 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	dst_mb = list_entry(ctx->dst_queue.next, struct s5p_mfc_buf, list);
ctx              1913 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_set_enc_stream_buffer_v6(ctx, dst_addr, dst_size);
ctx              1914 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	dev->curr_ctx = ctx->num;
ctx              1915 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_init_encode_v6(ctx);
ctx              1918 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static inline int s5p_mfc_run_init_dec_buffers(struct s5p_mfc_ctx *ctx)
ctx              1920 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1926 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->capture_state != QUEUE_BUFS_MMAPED) {
ctx              1933 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	dev->curr_ctx = ctx->num;
ctx              1934 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	ret = s5p_mfc_set_dec_frame_buffer_v6(ctx);
ctx              1937 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->state = MFCINST_ERROR;
ctx              1942 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static inline int s5p_mfc_run_init_enc_buffers(struct s5p_mfc_ctx *ctx)
ctx              1944 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              1947 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	dev->curr_ctx = ctx->num;
ctx              1948 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	ret = s5p_mfc_set_enc_ref_buffer_v6(ctx);
ctx              1951 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->state = MFCINST_ERROR;
ctx              1959 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_ctx *ctx;
ctx              1986 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	ctx = dev->ctx[new_ctx];
ctx              1987 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	mfc_debug(1, "Setting new context to %p\n", ctx);
ctx              1990 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		ctx->dst_queue_cnt, ctx->pb_count, ctx->src_queue_cnt);
ctx              1991 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	mfc_debug(1, "ctx->state=%d\n", ctx->state);
ctx              1996 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	s5p_mfc_clean_ctx_int_flags(ctx);
ctx              1998 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	if (ctx->type == MFCINST_DECODER) {
ctx              1999 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		switch (ctx->state) {
ctx              2001 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			s5p_mfc_run_dec_last_frames(ctx);
ctx              2004 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ret = s5p_mfc_run_dec_frame(ctx);
ctx              2008 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 					ctx);
ctx              2012 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 					ctx);
ctx              2015 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			s5p_mfc_run_init_dec(ctx);
ctx              2018 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ret = s5p_mfc_run_init_dec_buffers(ctx);
ctx              2021 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			s5p_mfc_set_flush(ctx, ctx->dpb_flush_flag);
ctx              2024 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			s5p_mfc_run_dec_last_frames(ctx);
ctx              2027 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			s5p_mfc_run_dec_last_frames(ctx);
ctx              2031 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ctx->capture_state = QUEUE_FREE;
ctx              2033 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			s5p_mfc_run_init_dec(ctx);
ctx              2038 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	} else if (ctx->type == MFCINST_ENCODER) {
ctx              2039 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		switch (ctx->state) {
ctx              2042 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ret = s5p_mfc_run_enc_frame(ctx);
ctx              2046 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 					ctx);
ctx              2050 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 					ctx);
ctx              2053 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			s5p_mfc_run_init_enc(ctx);
ctx              2056 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 			ret = s5p_mfc_run_init_enc_buffers(ctx);
ctx              2062 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		mfc_err("invalid context type: %d\n", ctx->type);
ctx              2087 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c s5p_mfc_read_info_v6(struct s5p_mfc_ctx *ctx, unsigned long ofs)
ctx              2124 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static int s5p_mfc_get_disp_frame_type_v6(struct s5p_mfc_ctx *ctx)
ctx              2126 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	struct s5p_mfc_dev *dev = ctx->dev;
ctx              2202 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static unsigned int s5p_mfc_get_pic_type_top_v6(struct s5p_mfc_ctx *ctx)
ctx              2204 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	return s5p_mfc_read_info_v6(ctx,
ctx              2205 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		(__force unsigned long) ctx->dev->mfc_regs->d_ret_picture_tag_top);
ctx              2208 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static unsigned int s5p_mfc_get_pic_type_bot_v6(struct s5p_mfc_ctx *ctx)
ctx              2210 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	return s5p_mfc_read_info_v6(ctx,
ctx              2211 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		(__force unsigned long) ctx->dev->mfc_regs->d_ret_picture_tag_bot);
ctx              2214 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static unsigned int s5p_mfc_get_crop_info_h_v6(struct s5p_mfc_ctx *ctx)
ctx              2216 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	return s5p_mfc_read_info_v6(ctx,
ctx              2217 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		(__force unsigned long) ctx->dev->mfc_regs->d_display_crop_info1);
ctx              2220 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c static unsigned int s5p_mfc_get_crop_info_v_v6(struct s5p_mfc_ctx *ctx)
ctx              2222 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 	return s5p_mfc_read_info_v6(ctx,
ctx              2223 drivers/media/platform/s5p-mfc/s5p_mfc_opr_v6.c 		(__force unsigned long) ctx->dev->mfc_regs->d_display_crop_info2);
ctx               431 drivers/media/platform/sti/bdisp/bdisp-hw.c void bdisp_hw_free_nodes(struct bdisp_ctx *ctx)
ctx               433 drivers/media/platform/sti/bdisp/bdisp-hw.c 	if (ctx && ctx->node[0])
ctx               434 drivers/media/platform/sti/bdisp/bdisp-hw.c 		dma_free_attrs(ctx->bdisp_dev->dev,
ctx               436 drivers/media/platform/sti/bdisp/bdisp-hw.c 			       ctx->node[0], ctx->node_paddr[0],
ctx               449 drivers/media/platform/sti/bdisp/bdisp-hw.c int bdisp_hw_alloc_nodes(struct bdisp_ctx *ctx)
ctx               451 drivers/media/platform/sti/bdisp/bdisp-hw.c 	struct device *dev = ctx->bdisp_dev->dev;
ctx               467 drivers/media/platform/sti/bdisp/bdisp-hw.c 		ctx->node[i] = base;
ctx               468 drivers/media/platform/sti/bdisp/bdisp-hw.c 		ctx->node_paddr[i] = paddr;
ctx               469 drivers/media/platform/sti/bdisp/bdisp-hw.c 		dev_dbg(dev, "node[%d]=0x%p (paddr=%pad)\n", i, ctx->node[i],
ctx               628 drivers/media/platform/sti/bdisp/bdisp-hw.c static int bdisp_hw_get_hv_inc(struct bdisp_ctx *ctx, u16 *h_inc, u16 *v_inc)
ctx               632 drivers/media/platform/sti/bdisp/bdisp-hw.c 	src_w = ctx->src.crop.width;
ctx               633 drivers/media/platform/sti/bdisp/bdisp-hw.c 	src_h = ctx->src.crop.height;
ctx               634 drivers/media/platform/sti/bdisp/bdisp-hw.c 	dst_w = ctx->dst.crop.width;
ctx               635 drivers/media/platform/sti/bdisp/bdisp-hw.c 	dst_h = ctx->dst.crop.height;
ctx               639 drivers/media/platform/sti/bdisp/bdisp-hw.c 		dev_err(ctx->bdisp_dev->dev,
ctx               658 drivers/media/platform/sti/bdisp/bdisp-hw.c static int bdisp_hw_get_op_cfg(struct bdisp_ctx *ctx, struct bdisp_op_cfg *c)
ctx               660 drivers/media/platform/sti/bdisp/bdisp-hw.c 	struct device *dev = ctx->bdisp_dev->dev;
ctx               661 drivers/media/platform/sti/bdisp/bdisp-hw.c 	struct bdisp_frame *src = &ctx->src;
ctx               662 drivers/media/platform/sti/bdisp/bdisp-hw.c 	struct bdisp_frame *dst = &ctx->dst;
ctx               671 drivers/media/platform/sti/bdisp/bdisp-hw.c 	c->hflip = ctx->hflip;
ctx               672 drivers/media/platform/sti/bdisp/bdisp-hw.c 	c->vflip = ctx->vflip;
ctx               688 drivers/media/platform/sti/bdisp/bdisp-hw.c 	if (bdisp_hw_get_hv_inc(ctx, &c->h_inc, &c->v_inc)) {
ctx               757 drivers/media/platform/sti/bdisp/bdisp-hw.c static void bdisp_hw_build_node(struct bdisp_ctx *ctx,
ctx               762 drivers/media/platform/sti/bdisp/bdisp-hw.c 	struct bdisp_frame *src = &ctx->src;
ctx               763 drivers/media/platform/sti/bdisp/bdisp-hw.c 	struct bdisp_frame *dst = &ctx->dst;
ctx               772 drivers/media/platform/sti/bdisp/bdisp-hw.c 	dev_dbg(ctx->bdisp_dev->dev, "%s\n", __func__);
ctx               781 drivers/media/platform/sti/bdisp/bdisp-hw.c 	dst_x_offset = (src_x_offset * dst_width) / ctx->src.crop.width;
ctx               783 drivers/media/platform/sti/bdisp/bdisp-hw.c 	dst_rect.width = (src_rect.width * dst_width) / ctx->src.crop.width;
ctx               992 drivers/media/platform/sti/bdisp/bdisp-hw.c static int bdisp_hw_build_all_nodes(struct bdisp_ctx *ctx)
ctx               999 drivers/media/platform/sti/bdisp/bdisp-hw.c 		if (!ctx->node[i]) {
ctx              1000 drivers/media/platform/sti/bdisp/bdisp-hw.c 			dev_err(ctx->bdisp_dev->dev, "node %d is null\n", i);
ctx              1005 drivers/media/platform/sti/bdisp/bdisp-hw.c 	if (bdisp_hw_get_op_cfg(ctx, &cfg))
ctx              1011 drivers/media/platform/sti/bdisp/bdisp-hw.c 		bdisp_hw_build_node(ctx, &cfg, ctx->node[nid],
ctx              1015 drivers/media/platform/sti/bdisp/bdisp-hw.c 			ctx->node[nid - 1]->nip = ctx->node_paddr[nid];
ctx              1020 drivers/media/platform/sti/bdisp/bdisp-hw.c 			bdisp_hw_build_node(ctx, &cfg, ctx->node[nid],
ctx              1022 drivers/media/platform/sti/bdisp/bdisp-hw.c 			ctx->node[nid - 1]->nip = ctx->node_paddr[nid];
ctx              1028 drivers/media/platform/sti/bdisp/bdisp-hw.c 		if (src_x_offset >= ctx->src.crop.width)
ctx              1033 drivers/media/platform/sti/bdisp/bdisp-hw.c 	ctx->node[nid - 1]->nip = 0;
ctx              1047 drivers/media/platform/sti/bdisp/bdisp-hw.c static void bdisp_hw_save_request(struct bdisp_ctx *ctx)
ctx              1049 drivers/media/platform/sti/bdisp/bdisp-hw.c 	struct bdisp_node **copy_node = ctx->bdisp_dev->dbg.copy_node;
ctx              1050 drivers/media/platform/sti/bdisp/bdisp-hw.c 	struct bdisp_request *request = &ctx->bdisp_dev->dbg.copy_request;
ctx              1051 drivers/media/platform/sti/bdisp/bdisp-hw.c 	struct bdisp_node **node = ctx->node;
ctx              1055 drivers/media/platform/sti/bdisp/bdisp-hw.c 	request->src = ctx->src;
ctx              1056 drivers/media/platform/sti/bdisp/bdisp-hw.c 	request->dst = ctx->dst;
ctx              1057 drivers/media/platform/sti/bdisp/bdisp-hw.c 	request->hflip = ctx->hflip;
ctx              1058 drivers/media/platform/sti/bdisp/bdisp-hw.c 	request->vflip = ctx->vflip;
ctx              1065 drivers/media/platform/sti/bdisp/bdisp-hw.c 			copy_node[i] = devm_kzalloc(ctx->bdisp_dev->dev,
ctx              1084 drivers/media/platform/sti/bdisp/bdisp-hw.c int bdisp_hw_update(struct bdisp_ctx *ctx)
ctx              1087 drivers/media/platform/sti/bdisp/bdisp-hw.c 	struct bdisp_dev *bdisp = ctx->bdisp_dev;
ctx              1094 drivers/media/platform/sti/bdisp/bdisp-hw.c 	ret = bdisp_hw_build_all_nodes(ctx);
ctx              1101 drivers/media/platform/sti/bdisp/bdisp-hw.c 	bdisp_hw_save_request(ctx);
ctx              1108 drivers/media/platform/sti/bdisp/bdisp-hw.c 	writel(ctx->node_paddr[0], bdisp->regs + BLT_AQ1_IP);
ctx              1112 drivers/media/platform/sti/bdisp/bdisp-hw.c 		if (!ctx->node[node_id]->nip)
ctx              1115 drivers/media/platform/sti/bdisp/bdisp-hw.c 	writel(ctx->node_paddr[node_id], bdisp->regs + BLT_AQ1_LNA);
ctx               120 drivers/media/platform/sti/bdisp/bdisp-v4l2.c static inline void bdisp_ctx_state_lock_set(u32 state, struct bdisp_ctx *ctx)
ctx               124 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	spin_lock_irqsave(&ctx->bdisp_dev->slock, flags);
ctx               125 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx->state |= state;
ctx               126 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	spin_unlock_irqrestore(&ctx->bdisp_dev->slock, flags);
ctx               129 drivers/media/platform/sti/bdisp/bdisp-v4l2.c static inline void bdisp_ctx_state_lock_clear(u32 state, struct bdisp_ctx *ctx)
ctx               133 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	spin_lock_irqsave(&ctx->bdisp_dev->slock, flags);
ctx               134 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx->state &= ~state;
ctx               135 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	spin_unlock_irqrestore(&ctx->bdisp_dev->slock, flags);
ctx               138 drivers/media/platform/sti/bdisp/bdisp-v4l2.c static inline bool bdisp_ctx_state_is_set(u32 mask, struct bdisp_ctx *ctx)
ctx               143 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	spin_lock_irqsave(&ctx->bdisp_dev->slock, flags);
ctx               144 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ret = (ctx->state & mask) == mask;
ctx               145 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	spin_unlock_irqrestore(&ctx->bdisp_dev->slock, flags);
ctx               164 drivers/media/platform/sti/bdisp/bdisp-v4l2.c static struct bdisp_frame *ctx_get_frame(struct bdisp_ctx *ctx,
ctx               169 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		return &ctx->src;
ctx               171 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		return &ctx->dst;
ctx               173 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev,
ctx               181 drivers/media/platform/sti/bdisp/bdisp-v4l2.c static void bdisp_job_finish(struct bdisp_ctx *ctx, int vb_state)
ctx               185 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (WARN(!ctx || !ctx->fh.m2m_ctx, "Null hardware context\n"))
ctx               188 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	dev_dbg(ctx->bdisp_dev->dev, "%s\n", __func__);
ctx               190 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	src_vb = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               191 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	dst_vb = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx               203 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		v4l2_m2m_job_finish(ctx->bdisp_dev->m2m.m2m_dev,
ctx               204 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 				    ctx->fh.m2m_ctx);
ctx               208 drivers/media/platform/sti/bdisp/bdisp-v4l2.c static int bdisp_ctx_stop_req(struct bdisp_ctx *ctx)
ctx               211 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_dev *bdisp = ctx->bdisp_dev;
ctx               214 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	dev_dbg(ctx->bdisp_dev->dev, "%s\n", __func__);
ctx               219 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (!test_bit(ST_M2M_RUNNING, &bdisp->state) || (curr_ctx != ctx))
ctx               222 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	bdisp_ctx_state_lock_set(BDISP_CTX_STOP_REQ, ctx);
ctx               225 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 			!bdisp_ctx_state_is_set(BDISP_CTX_STOP_REQ, ctx),
ctx               229 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "%s IRQ timeout\n", __func__);
ctx               236 drivers/media/platform/sti/bdisp/bdisp-v4l2.c static void __bdisp_job_abort(struct bdisp_ctx *ctx)
ctx               240 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ret = bdisp_ctx_stop_req(ctx);
ctx               241 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if ((ret == -ETIMEDOUT) || (ctx->state & BDISP_CTX_ABORT)) {
ctx               243 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 					   ctx);
ctx               244 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		bdisp_job_finish(ctx, VB2_BUF_STATE_ERROR);
ctx               253 drivers/media/platform/sti/bdisp/bdisp-v4l2.c static int bdisp_get_addr(struct bdisp_ctx *ctx, struct vb2_buffer *vb,
ctx               272 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_dbg(ctx->bdisp_dev->dev, "ignoring some planes\n");
ctx               274 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	dev_dbg(ctx->bdisp_dev->dev,
ctx               281 drivers/media/platform/sti/bdisp/bdisp-v4l2.c static int bdisp_get_bufs(struct bdisp_ctx *ctx)
ctx               287 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	src = &ctx->src;
ctx               288 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	dst = &ctx->dst;
ctx               290 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	src_vb = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               291 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ret = bdisp_get_addr(ctx, &src_vb->vb2_buf, src, src->paddr);
ctx               295 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	dst_vb = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx               296 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ret = bdisp_get_addr(ctx, &dst_vb->vb2_buf, dst, dst->paddr);
ctx               307 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = priv;
ctx               312 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (WARN(!ctx, "Null hardware context\n"))
ctx               315 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	bdisp = ctx->bdisp_dev;
ctx               319 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (bdisp->m2m.ctx != ctx) {
ctx               321 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 			bdisp->m2m.ctx, ctx);
ctx               322 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		ctx->state |= BDISP_PARAMS;
ctx               323 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		bdisp->m2m.ctx = ctx;
ctx               326 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (ctx->state & BDISP_CTX_STOP_REQ) {
ctx               327 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		ctx->state &= ~BDISP_CTX_STOP_REQ;
ctx               328 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		ctx->state |= BDISP_CTX_ABORT;
ctx               333 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	err = bdisp_get_bufs(ctx);
ctx               347 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	err = bdisp_hw_update(ctx);
ctx               357 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx->state &= ~BDISP_PARAMS;
ctx               360 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		bdisp_job_finish(ctx, VB2_BUF_STATE_ERROR);
ctx               368 drivers/media/platform/sti/bdisp/bdisp-v4l2.c static int __bdisp_s_ctrl(struct bdisp_ctx *ctx, struct v4l2_ctrl *ctrl)
ctx               375 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		ctx->hflip = ctrl->val;
ctx               378 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		ctx->vflip = ctrl->val;
ctx               381 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "unknown control %d\n", ctrl->id);
ctx               385 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx->state |= BDISP_PARAMS;
ctx               392 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = container_of(ctrl->handler, struct bdisp_ctx,
ctx               397 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	spin_lock_irqsave(&ctx->bdisp_dev->slock, flags);
ctx               398 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ret = __bdisp_s_ctrl(ctx, ctrl);
ctx               399 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	spin_unlock_irqrestore(&ctx->bdisp_dev->slock, flags);
ctx               408 drivers/media/platform/sti/bdisp/bdisp-v4l2.c static int bdisp_ctrls_create(struct bdisp_ctx *ctx)
ctx               410 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (ctx->ctrls_rdy)
ctx               413 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	v4l2_ctrl_handler_init(&ctx->ctrl_handler, BDISP_MAX_CTRL_NUM);
ctx               415 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx->bdisp_ctrls.hflip = v4l2_ctrl_new_std(&ctx->ctrl_handler,
ctx               417 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx->bdisp_ctrls.vflip = v4l2_ctrl_new_std(&ctx->ctrl_handler,
ctx               420 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (ctx->ctrl_handler.error) {
ctx               421 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		int err = ctx->ctrl_handler.error;
ctx               423 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx               427 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx->ctrls_rdy = true;
ctx               432 drivers/media/platform/sti/bdisp/bdisp-v4l2.c static void bdisp_ctrls_delete(struct bdisp_ctx *ctx)
ctx               434 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (ctx->ctrls_rdy) {
ctx               435 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx               436 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		ctx->ctrls_rdy = false;
ctx               444 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = vb2_get_drv_priv(vq);
ctx               445 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_frame *frame = ctx_get_frame(ctx, vq->type);
ctx               448 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "Invalid frame (%p)\n", frame);
ctx               453 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "Invalid format\n");
ctx               468 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               469 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_frame *frame = ctx_get_frame(ctx, vb->vb2_queue->type);
ctx               472 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "Invalid frame (%p)\n", frame);
ctx               485 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               489 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_dbg(ctx->bdisp_dev->dev, "0 data buffer, skip it\n");
ctx               494 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (ctx->fh.m2m_ctx)
ctx               495 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx               500 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = q->drv_priv;
ctx               502 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	int ret = pm_runtime_get_sync(ctx->bdisp_dev->dev);
ctx               505 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "failed to set runtime PM\n");
ctx               508 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 			while ((buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx)))
ctx               511 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 			while ((buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx)))
ctx               523 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = q->drv_priv;
ctx               525 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	__bdisp_job_abort(ctx);
ctx               527 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	pm_runtime_put(ctx->bdisp_dev->dev);
ctx               543 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = priv;
ctx               549 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	src_vq->drv_priv = ctx;
ctx               554 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	src_vq->lock = &ctx->bdisp_dev->lock;
ctx               555 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	src_vq->dev = ctx->bdisp_dev->v4l2_dev.dev;
ctx               564 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	dst_vq->drv_priv = ctx;
ctx               569 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	dst_vq->lock = &ctx->bdisp_dev->lock;
ctx               570 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	dst_vq->dev = ctx->bdisp_dev->v4l2_dev.dev;
ctx               578 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = NULL;
ctx               585 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               586 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (!ctx) {
ctx               590 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx->bdisp_dev = bdisp;
ctx               592 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (bdisp_hw_alloc_nodes(ctx)) {
ctx               598 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	v4l2_fh_init(&ctx->fh, bdisp->m2m.vdev);
ctx               600 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ret = bdisp_ctrls_create(ctx);
ctx               607 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx->fh.ctrl_handler = &ctx->ctrl_handler;
ctx               608 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	file->private_data = &ctx->fh;
ctx               609 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	v4l2_fh_add(&ctx->fh);
ctx               612 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx->src = bdisp_dflt_fmt;
ctx               613 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx->dst = bdisp_dflt_fmt;
ctx               616 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(bdisp->m2m.m2m_dev, ctx,
ctx               618 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx               620 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx               627 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	dev_dbg(bdisp->dev, "driver opened, ctx = 0x%p\n", ctx);
ctx               634 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	bdisp_ctrls_delete(ctx);
ctx               635 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	v4l2_fh_del(&ctx->fh);
ctx               637 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	v4l2_fh_exit(&ctx->fh);
ctx               638 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	bdisp_hw_free_nodes(ctx);
ctx               640 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	kfree(ctx);
ctx               649 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = fh_to_ctx(file->private_data);
ctx               650 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_dev *bdisp = ctx->bdisp_dev;
ctx               656 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx               658 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	bdisp_ctrls_delete(ctx);
ctx               660 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	v4l2_fh_del(&ctx->fh);
ctx               661 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	v4l2_fh_exit(&ctx->fh);
ctx               666 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	bdisp_hw_free_nodes(ctx);
ctx               668 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	kfree(ctx);
ctx               687 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = fh_to_ctx(fh);
ctx               688 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_dev *bdisp = ctx->bdisp_dev;
ctx               699 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = fh_to_ctx(fh);
ctx               709 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_dbg(ctx->bdisp_dev->dev, "No YU12 on capture\n");
ctx               719 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = fh_to_ctx(fh);
ctx               721 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_frame *frame  = ctx_get_frame(ctx, f->type);
ctx               724 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "Invalid frame (%p)\n", frame);
ctx               743 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = fh_to_ctx(fh);
ctx               750 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_dbg(ctx->bdisp_dev->dev, "Unknown format 0x%x\n",
ctx               758 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_dbg(ctx->bdisp_dev->dev, "No YU12 on capture\n");
ctx               778 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_dbg(ctx->bdisp_dev->dev,
ctx               793 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = fh_to_ctx(fh);
ctx               802 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "Cannot set format\n");
ctx               806 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               808 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "queue (%d) busy\n", f->type);
ctx               813 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 			&ctx->src : &ctx->dst;
ctx               817 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "Unknown format 0x%x\n",
ctx               838 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	bdisp_ctx_state_lock_set(state, ctx);
ctx               847 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = fh_to_ctx(fh);
ctx               849 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	frame = ctx_get_frame(ctx, s->type);
ctx               851 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "Invalid frame (%p)\n", frame);
ctx               871 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 			dev_err(ctx->bdisp_dev->dev, "Invalid target\n");
ctx               892 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 			dev_err(ctx->bdisp_dev->dev, "Invalid target\n");
ctx               898 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "Invalid type\n");
ctx               925 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = fh_to_ctx(fh);
ctx               938 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "Invalid type / target\n");
ctx               942 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	frame = ctx_get_frame(ctx, s->type);
ctx               944 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "Invalid frame (%p)\n", frame);
ctx               957 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev,
ctx               970 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev,
ctx               986 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_dbg(ctx->bdisp_dev->dev,
ctx               995 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	bdisp_ctx_state_lock_set(BDISP_PARAMS, ctx);
ctx              1002 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx = fh_to_ctx(fh);
ctx              1005 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	    !bdisp_ctx_state_is_set(BDISP_SRC_FMT, ctx)) {
ctx              1006 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "src not defined\n");
ctx              1011 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	    !bdisp_ctx_state_is_set(BDISP_DST_FMT, ctx)) {
ctx              1012 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		dev_err(ctx->bdisp_dev->dev, "dst not defined\n");
ctx              1016 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	return v4l2_m2m_streamon(file, ctx->fh.m2m_ctx, type);
ctx              1094 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx;
ctx              1111 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx = v4l2_m2m_get_curr_priv(bdisp->m2m.m2m_dev);
ctx              1112 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (!ctx || !ctx->fh.m2m_ctx)
ctx              1117 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	bdisp_job_finish(ctx, VB2_BUF_STATE_DONE);
ctx              1119 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	if (bdisp_ctx_state_is_set(BDISP_CTX_STOP_REQ, ctx)) {
ctx              1120 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		bdisp_ctx_state_lock_clear(BDISP_CTX_STOP_REQ, ctx);
ctx              1145 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx;
ctx              1147 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx = v4l2_m2m_get_curr_priv(bdisp->m2m.m2m_dev);
ctx              1149 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	dev_err(ctx->bdisp_dev->dev, "Device work timeout\n");
ctx              1157 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	bdisp_job_finish(ctx, VB2_BUF_STATE_ERROR);
ctx              1190 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	struct bdisp_ctx *ctx;
ctx              1194 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	ctx = bdisp->m2m.ctx;
ctx              1195 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 	bdisp->m2m.ctx = NULL;
ctx              1199 drivers/media/platform/sti/bdisp/bdisp-v4l2.c 		bdisp_job_finish(ctx, VB2_BUF_STATE_ERROR);
ctx               139 drivers/media/platform/sti/bdisp/bdisp.h 	struct bdisp_ctx        *ctx;
ctx               203 drivers/media/platform/sti/bdisp/bdisp.h void bdisp_hw_free_nodes(struct bdisp_ctx *ctx);
ctx               204 drivers/media/platform/sti/bdisp/bdisp.h int bdisp_hw_alloc_nodes(struct bdisp_ctx *ctx);
ctx               209 drivers/media/platform/sti/bdisp/bdisp.h int bdisp_hw_update(struct bdisp_ctx *ctx);
ctx                55 drivers/media/platform/sti/delta/delta-debug.c void delta_trace_summary(struct delta_ctx *ctx)
ctx                57 drivers/media/platform/sti/delta/delta-debug.c 	struct delta_dev *delta = ctx->dev;
ctx                58 drivers/media/platform/sti/delta/delta-debug.c 	struct delta_streaminfo *s = &ctx->streaminfo;
ctx                61 drivers/media/platform/sti/delta/delta-debug.c 	if (!(ctx->flags & DELTA_FLAG_STREAMINFO))
ctx                65 drivers/media/platform/sti/delta/delta-debug.c 		ctx->name,
ctx                67 drivers/media/platform/sti/delta/delta-debug.c 		ctx->decoded_frames,
ctx                68 drivers/media/platform/sti/delta/delta-debug.c 		ctx->output_frames,
ctx                69 drivers/media/platform/sti/delta/delta-debug.c 		ctx->dropped_frames,
ctx                70 drivers/media/platform/sti/delta/delta-debug.c 		ctx->stream_errors,
ctx                71 drivers/media/platform/sti/delta/delta-debug.c 		ctx->decode_errors);
ctx                16 drivers/media/platform/sti/delta/delta-debug.h void delta_trace_summary(struct delta_ctx *ctx);
ctx                27 drivers/media/platform/sti/delta/delta-ipc.c #define to_pctx(ctx) container_of(ctx, struct delta_ctx, ipc_ctx)
ctx                36 drivers/media/platform/sti/delta/delta-ipc.c #define to_host_hdl(ctx) ((void *)ctx)
ctx                41 drivers/media/platform/sti/delta/delta-ipc.c static inline dma_addr_t to_paddr(struct delta_ipc_ctx *ctx, void *vaddr)
ctx                43 drivers/media/platform/sti/delta/delta-ipc.c 	return (ctx->ipc_buf->paddr + (vaddr - ctx->ipc_buf->vaddr));
ctx                46 drivers/media/platform/sti/delta/delta-ipc.c static inline bool is_valid_data(struct delta_ipc_ctx *ctx,
ctx                49 drivers/media/platform/sti/delta/delta-ipc.c 	return ((data >= ctx->ipc_buf->vaddr) &&
ctx                50 drivers/media/platform/sti/delta/delta-ipc.c 		((data + size) <= (ctx->ipc_buf->vaddr + ctx->ipc_buf->size)));
ctx               100 drivers/media/platform/sti/delta/delta-ipc.c static void build_msg_header(struct delta_ipc_ctx *ctx,
ctx               105 drivers/media/platform/sti/delta/delta-ipc.c 	header->host_hdl = to_host_hdl(ctx);
ctx               106 drivers/media/platform/sti/delta/delta-ipc.c 	header->copro_hdl = ctx->copro_hdl;
ctx               116 drivers/media/platform/sti/delta/delta-ipc.c 	struct delta_ipc_ctx *ctx = &pctx->ipc_ctx;
ctx               118 drivers/media/platform/sti/delta/delta-ipc.c 	struct delta_buf *buf = &ctx->ipc_buf_struct;
ctx               154 drivers/media/platform/sti/delta/delta-ipc.c 			param->size, ctx->ipc_buf->size);
ctx               159 drivers/media/platform/sti/delta/delta-ipc.c 	init_completion(&ctx->done);
ctx               170 drivers/media/platform/sti/delta/delta-ipc.c 	ctx->ipc_buf = buf;
ctx               173 drivers/media/platform/sti/delta/delta-ipc.c 	build_msg_header(ctx, DELTA_IPC_OPEN, &msg.header);
ctx               176 drivers/media/platform/sti/delta/delta-ipc.c 	msg.ipc_buf_paddr = ctx->ipc_buf->paddr;
ctx               182 drivers/media/platform/sti/delta/delta-ipc.c 	memcpy(ctx->ipc_buf->vaddr, param->data, msg.param_size);
ctx               183 drivers/media/platform/sti/delta/delta-ipc.c 	msg.param_paddr = ctx->ipc_buf->paddr;
ctx               197 drivers/media/platform/sti/delta/delta-ipc.c 	    (&ctx->done, msecs_to_jiffies(IPC_TIMEOUT))) {
ctx               207 drivers/media/platform/sti/delta/delta-ipc.c 	if (ctx->cb_err) {
ctx               211 drivers/media/platform/sti/delta/delta-ipc.c 			ctx->cb_err, name, param->size, param->data);
ctx               216 drivers/media/platform/sti/delta/delta-ipc.c 	*ipc_buf = ctx->ipc_buf;
ctx               217 drivers/media/platform/sti/delta/delta-ipc.c 	*hdl = (void *)ctx;
ctx               223 drivers/media/platform/sti/delta/delta-ipc.c 	hw_free(pctx, ctx->ipc_buf);
ctx               224 drivers/media/platform/sti/delta/delta-ipc.c 	ctx->ipc_buf = NULL;
ctx               231 drivers/media/platform/sti/delta/delta-ipc.c 	struct delta_ipc_ctx *ctx = to_ctx(hdl);
ctx               232 drivers/media/platform/sti/delta/delta-ipc.c 	struct delta_ctx *pctx = to_pctx(ctx);
ctx               259 drivers/media/platform/sti/delta/delta-ipc.c 	if (param->size > ctx->ipc_buf->size) {
ctx               263 drivers/media/platform/sti/delta/delta-ipc.c 			param->size, ctx->ipc_buf->size);
ctx               267 drivers/media/platform/sti/delta/delta-ipc.c 	if (!is_valid_data(ctx, param->data, param->size)) {
ctx               273 drivers/media/platform/sti/delta/delta-ipc.c 			ctx->ipc_buf->vaddr,
ctx               274 drivers/media/platform/sti/delta/delta-ipc.c 			ctx->ipc_buf->vaddr + ctx->ipc_buf->size - 1);
ctx               279 drivers/media/platform/sti/delta/delta-ipc.c 	build_msg_header(ctx, DELTA_IPC_SET_STREAM, &msg.header);
ctx               282 drivers/media/platform/sti/delta/delta-ipc.c 	msg.param_paddr = to_paddr(ctx, param->data);
ctx               297 drivers/media/platform/sti/delta/delta-ipc.c 	    (&ctx->done, msecs_to_jiffies(IPC_TIMEOUT))) {
ctx               307 drivers/media/platform/sti/delta/delta-ipc.c 	if (ctx->cb_err) {
ctx               311 drivers/media/platform/sti/delta/delta-ipc.c 			ctx->cb_err, param->size, param->data);
ctx               322 drivers/media/platform/sti/delta/delta-ipc.c 	struct delta_ipc_ctx *ctx = to_ctx(hdl);
ctx               323 drivers/media/platform/sti/delta/delta-ipc.c 	struct delta_ctx *pctx = to_pctx(ctx);
ctx               357 drivers/media/platform/sti/delta/delta-ipc.c 	if (param->size + status->size > ctx->ipc_buf->size) {
ctx               363 drivers/media/platform/sti/delta/delta-ipc.c 			ctx->ipc_buf->size);
ctx               367 drivers/media/platform/sti/delta/delta-ipc.c 	if (!is_valid_data(ctx, param->data, param->size)) {
ctx               373 drivers/media/platform/sti/delta/delta-ipc.c 			ctx->ipc_buf->vaddr,
ctx               374 drivers/media/platform/sti/delta/delta-ipc.c 			ctx->ipc_buf->vaddr + ctx->ipc_buf->size - 1);
ctx               378 drivers/media/platform/sti/delta/delta-ipc.c 	if (!is_valid_data(ctx, status->data, status->size)) {
ctx               384 drivers/media/platform/sti/delta/delta-ipc.c 			ctx->ipc_buf->vaddr,
ctx               385 drivers/media/platform/sti/delta/delta-ipc.c 			ctx->ipc_buf->vaddr + ctx->ipc_buf->size - 1);
ctx               390 drivers/media/platform/sti/delta/delta-ipc.c 	build_msg_header(ctx, DELTA_IPC_DECODE, &msg.header);
ctx               393 drivers/media/platform/sti/delta/delta-ipc.c 	msg.param_paddr = to_paddr(ctx, param->data);
ctx               396 drivers/media/platform/sti/delta/delta-ipc.c 	msg.status_paddr = to_paddr(ctx, status->data);
ctx               411 drivers/media/platform/sti/delta/delta-ipc.c 	    (&ctx->done, msecs_to_jiffies(IPC_TIMEOUT))) {
ctx               421 drivers/media/platform/sti/delta/delta-ipc.c 	if (ctx->cb_err) {
ctx               425 drivers/media/platform/sti/delta/delta-ipc.c 			ctx->cb_err, param->size, param->data);
ctx               435 drivers/media/platform/sti/delta/delta-ipc.c 	struct delta_ipc_ctx *ctx = to_ctx(hdl);
ctx               436 drivers/media/platform/sti/delta/delta-ipc.c 	struct delta_ctx *pctx = to_pctx(ctx);
ctx               449 drivers/media/platform/sti/delta/delta-ipc.c 	if (ctx->ipc_buf) {
ctx               450 drivers/media/platform/sti/delta/delta-ipc.c 		hw_free(pctx, ctx->ipc_buf);
ctx               451 drivers/media/platform/sti/delta/delta-ipc.c 		ctx->ipc_buf = NULL;
ctx               462 drivers/media/platform/sti/delta/delta-ipc.c 	build_msg_header(ctx, DELTA_IPC_CLOSE, &msg.header);
ctx               476 drivers/media/platform/sti/delta/delta-ipc.c 	    (&ctx->done, msecs_to_jiffies(IPC_TIMEOUT))) {
ctx               485 drivers/media/platform/sti/delta/delta-ipc.c 	if (ctx->cb_err) {
ctx               488 drivers/media/platform/sti/delta/delta-ipc.c 			pctx->name, ctx->cb_err);
ctx               496 drivers/media/platform/sti/delta/delta-ipc.c 	struct delta_ipc_ctx *ctx;
ctx               526 drivers/media/platform/sti/delta/delta-ipc.c 	ctx = msg_to_ctx(msg);
ctx               527 drivers/media/platform/sti/delta/delta-ipc.c 	if (!ctx) {
ctx               538 drivers/media/platform/sti/delta/delta-ipc.c 	if (!ctx->copro_hdl)
ctx               539 drivers/media/platform/sti/delta/delta-ipc.c 		ctx->copro_hdl = msg_to_copro_hdl(msg);
ctx               545 drivers/media/platform/sti/delta/delta-ipc.c 	ctx->cb_err = msg->err;
ctx               546 drivers/media/platform/sti/delta/delta-ipc.c 	complete(&ctx->done);
ctx                38 drivers/media/platform/sti/delta/delta-ipc.h int delta_ipc_open(struct delta_ctx *ctx, const char *name,
ctx                10 drivers/media/platform/sti/delta/delta-mem.c int hw_alloc(struct delta_ctx *ctx, u32 size, const char *name,
ctx                13 drivers/media/platform/sti/delta/delta-mem.c 	struct delta_dev *delta = ctx->dev;
ctx                23 drivers/media/platform/sti/delta/delta-mem.c 			ctx->name, name, size);
ctx                24 drivers/media/platform/sti/delta/delta-mem.c 		ctx->sys_errors++;
ctx                36 drivers/media/platform/sti/delta/delta-mem.c 		ctx->name, size, buf->vaddr, &buf->paddr, buf->name);
ctx                41 drivers/media/platform/sti/delta/delta-mem.c void hw_free(struct delta_ctx *ctx, struct delta_buf *buf)
ctx                43 drivers/media/platform/sti/delta/delta-mem.c 	struct delta_dev *delta = ctx->dev;
ctx                47 drivers/media/platform/sti/delta/delta-mem.c 		ctx->name, buf->size, buf->vaddr, &buf->paddr, buf->name);
ctx                10 drivers/media/platform/sti/delta/delta-mem.h int hw_alloc(struct delta_ctx *ctx, u32 size, const char *name,
ctx                12 drivers/media/platform/sti/delta/delta-mem.h void hw_free(struct delta_ctx *ctx, struct delta_buf *buf);
ctx                31 drivers/media/platform/sti/delta/delta-mjpeg-dec.c #define to_ctx(ctx) ((struct delta_mjpeg_ctx *)(ctx)->priv)
ctx               206 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	struct delta_mjpeg_ctx *ctx = to_ctx(pctx);
ctx               221 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 		 ipc_open_param_str(params, ctx->str, sizeof(ctx->str)));
ctx               232 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 			ipc_open_param_str(params, ctx->str, sizeof(ctx->str)));
ctx               236 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	ctx->ipc_buf = ipc_buf;
ctx               237 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	ctx->ipc_hdl = hdl;
ctx               245 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	struct delta_mjpeg_ctx *ctx = to_ctx(pctx);
ctx               247 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	struct jpeg_decode_params_t *params = ctx->ipc_buf->vaddr;
ctx               249 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	    ctx->ipc_buf->vaddr + sizeof(*params);
ctx               285 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 		 ipc_decode_param_str(params, ctx->str, sizeof(ctx->str)));
ctx               295 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	ret = delta_ipc_decode(ctx->ipc_hdl, &ipc_param, &ipc_status);
ctx               299 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 			ipc_decode_param_str(params, ctx->str,
ctx               300 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 					     sizeof(ctx->str)));
ctx               310 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 			ipc_decode_param_str(params, ctx->str,
ctx               311 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 					     sizeof(ctx->str)));
ctx               318 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	ctx->out_frame = frame;
ctx               325 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	struct delta_mjpeg_ctx *ctx;
ctx               327 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               328 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	if (!ctx)
ctx               330 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	pctx->priv = ctx;
ctx               337 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	struct delta_mjpeg_ctx *ctx = to_ctx(pctx);
ctx               339 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	if (ctx->ipc_hdl) {
ctx               340 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 		delta_ipc_close(ctx->ipc_hdl);
ctx               341 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 		ctx->ipc_hdl = NULL;
ctx               344 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	kfree(ctx);
ctx               352 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	struct delta_mjpeg_ctx *ctx = to_ctx(pctx);
ctx               354 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	if (!ctx->header)
ctx               358 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	streaminfo->width = ctx->header->frame_width;
ctx               359 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	streaminfo->height = ctx->header->frame_height;
ctx               375 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	struct delta_mjpeg_ctx *ctx = to_ctx(pctx);
ctx               379 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	struct mjpeg_header *header = &ctx->header_struct;
ctx               381 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	if (!ctx->header) {
ctx               398 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 		ctx->header = header;
ctx               402 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	if (!ctx->ipc_hdl) {
ctx               409 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 				      ctx->header, &data_offset);
ctx               432 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	struct delta_mjpeg_ctx *ctx = to_ctx(pctx);
ctx               434 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	if (!ctx->out_frame)
ctx               437 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	*frame = ctx->out_frame;
ctx               439 drivers/media/platform/sti/delta/delta-mjpeg-dec.c 	ctx->out_frame = NULL;
ctx                61 drivers/media/platform/sti/delta/delta-v4l2.c static void dump_au(struct delta_ctx *ctx, struct delta_au *au)
ctx                63 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx                69 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, au->vbuf.vb2_buf.index, au->dts, au->size,
ctx                73 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, au->vbuf.vb2_buf.index, au->dts, au->size,
ctx                77 drivers/media/platform/sti/delta/delta-v4l2.c static void dump_frame(struct delta_ctx *ctx, struct delta_frame *frame)
ctx                79 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx                84 drivers/media/platform/sti/delta/delta-v4l2.c 		ctx->name, frame->index, frame->dts,
ctx                90 drivers/media/platform/sti/delta/delta-v4l2.c static void delta_au_done(struct delta_ctx *ctx, struct delta_au *au, int err)
ctx                95 drivers/media/platform/sti/delta/delta-v4l2.c 	vbuf->sequence = ctx->au_num++;
ctx                99 drivers/media/platform/sti/delta/delta-v4l2.c static void delta_frame_done(struct delta_ctx *ctx, struct delta_frame *frame,
ctx               104 drivers/media/platform/sti/delta/delta-v4l2.c 	dump_frame(ctx, frame);
ctx               110 drivers/media/platform/sti/delta/delta-v4l2.c 	vbuf->sequence = ctx->frame_num++;
ctx               114 drivers/media/platform/sti/delta/delta-v4l2.c 		ctx->output_frames++;
ctx               117 drivers/media/platform/sti/delta/delta-v4l2.c static void requeue_free_frames(struct delta_ctx *ctx)
ctx               124 drivers/media/platform/sti/delta/delta-v4l2.c 	for (i = 0; i < ctx->nb_of_frames; i++) {
ctx               125 drivers/media/platform/sti/delta/delta-v4l2.c 		frame = ctx->frames[i];
ctx               128 drivers/media/platform/sti/delta/delta-v4l2.c 			v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx               134 drivers/media/platform/sti/delta/delta-v4l2.c static int delta_recycle(struct delta_ctx *ctx, struct delta_frame *frame)
ctx               136 drivers/media/platform/sti/delta/delta-v4l2.c 	const struct delta_dec *dec = ctx->dec;
ctx               139 drivers/media/platform/sti/delta/delta-v4l2.c 	call_dec_op(dec, recycle, ctx, frame);
ctx               148 drivers/media/platform/sti/delta/delta-v4l2.c 		v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx               159 drivers/media/platform/sti/delta/delta-v4l2.c static void delta_push_dts(struct delta_ctx *ctx, u64 val)
ctx               174 drivers/media/platform/sti/delta/delta-v4l2.c 	list_add_tail(&dts->list, &ctx->dts);
ctx               177 drivers/media/platform/sti/delta/delta-v4l2.c static void delta_pop_dts(struct delta_ctx *ctx, u64 *val)
ctx               179 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               186 drivers/media/platform/sti/delta/delta-v4l2.c 	if (list_empty(&ctx->dts)) {
ctx               188 drivers/media/platform/sti/delta/delta-v4l2.c 			 ctx->name);
ctx               193 drivers/media/platform/sti/delta/delta-v4l2.c 	dts = list_first_entry(&ctx->dts, struct delta_dts, list);
ctx               201 drivers/media/platform/sti/delta/delta-v4l2.c static void delta_flush_dts(struct delta_ctx *ctx)
ctx               212 drivers/media/platform/sti/delta/delta-v4l2.c 	list_for_each_entry_safe(dts, next, &ctx->dts, list)
ctx               216 drivers/media/platform/sti/delta/delta-v4l2.c 	INIT_LIST_HEAD(&ctx->dts);
ctx               242 drivers/media/platform/sti/delta/delta-v4l2.c static void set_default_params(struct delta_ctx *ctx)
ctx               244 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_frameinfo *frameinfo = &ctx->frameinfo;
ctx               245 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_streaminfo *streaminfo = &ctx->streaminfo;
ctx               274 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->max_au_size = estimated_au_size(streaminfo->width,
ctx               278 drivers/media/platform/sti/delta/delta-v4l2.c static const struct delta_dec *delta_find_decoder(struct delta_ctx *ctx,
ctx               282 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               342 drivers/media/platform/sti/delta/delta-v4l2.c static int delta_open_decoder(struct delta_ctx *ctx, u32 streamformat,
ctx               345 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               349 drivers/media/platform/sti/delta/delta-v4l2.c 	dec = delta_find_decoder(ctx, streamformat, ctx->frameinfo.pixelformat);
ctx               352 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, (char *)&streamformat, (char *)&pixelformat);
ctx               357 drivers/media/platform/sti/delta/delta-v4l2.c 		ctx->name, (char *)&streamformat, (char *)&pixelformat);
ctx               360 drivers/media/platform/sti/delta/delta-v4l2.c 	snprintf(ctx->name, sizeof(ctx->name), "[%3d:%4.4s]",
ctx               364 drivers/media/platform/sti/delta/delta-v4l2.c 	ret = call_dec_op(dec, open, ctx);
ctx               367 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, ret);
ctx               371 drivers/media/platform/sti/delta/delta-v4l2.c 	dev_dbg(delta->dev, "%s %s decoder opened\n", ctx->name, dec->name);
ctx               385 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = to_ctx(file->private_data);
ctx               386 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               399 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = to_ctx(file->private_data);
ctx               400 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               413 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = to_ctx(file->private_data);
ctx               414 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               427 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = to_ctx(file->private_data);
ctx               428 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               430 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_streaminfo *streaminfo = &ctx->streaminfo;
ctx               433 drivers/media/platform/sti/delta/delta-v4l2.c 	if (!(ctx->flags & DELTA_FLAG_STREAMINFO))
ctx               436 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name,
ctx               444 drivers/media/platform/sti/delta/delta-v4l2.c 	pix->sizeimage = ctx->max_au_size;
ctx               455 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = to_ctx(file->private_data);
ctx               456 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               458 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_frameinfo *frameinfo = &ctx->frameinfo;
ctx               459 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_streaminfo *streaminfo = &ctx->streaminfo;
ctx               462 drivers/media/platform/sti/delta/delta-v4l2.c 	if (!(ctx->flags & DELTA_FLAG_FRAMEINFO))
ctx               465 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name,
ctx               476 drivers/media/platform/sti/delta/delta-v4l2.c 	if (ctx->flags & DELTA_FLAG_STREAMINFO) {
ctx               494 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = to_ctx(file->private_data);
ctx               495 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               502 drivers/media/platform/sti/delta/delta-v4l2.c 	dec = delta_find_decoder(ctx, streamformat, ctx->frameinfo.pixelformat);
ctx               506 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, (char *)&pix->pixelformat);
ctx               526 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, width, height,
ctx               533 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, pix->sizeimage, au_size);
ctx               548 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = to_ctx(file->private_data);
ctx               549 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               555 drivers/media/platform/sti/delta/delta-v4l2.c 	dec = delta_find_decoder(ctx, ctx->streaminfo.streamformat,
ctx               560 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, (char *)&pixelformat);
ctx               577 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, width, height, pix->width, pix->height);
ctx               585 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, width, height, pix->width, pix->height);
ctx               608 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = to_ctx(file->private_data);
ctx               609 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               618 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, (char *)&pix->pixelformat);
ctx               622 drivers/media/platform/sti/delta/delta-v4l2.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               625 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name);
ctx               629 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->max_au_size = pix->sizeimage;
ctx               630 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->streaminfo.width = pix->width;
ctx               631 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->streaminfo.height = pix->height;
ctx               632 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->streaminfo.streamformat = pix->pixelformat;
ctx               633 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->streaminfo.colorspace = pix->colorspace;
ctx               634 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->streaminfo.xfer_func = pix->xfer_func;
ctx               635 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->streaminfo.ycbcr_enc = pix->ycbcr_enc;
ctx               636 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->streaminfo.quantization = pix->quantization;
ctx               637 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->flags |= DELTA_FLAG_STREAMINFO;
ctx               644 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = to_ctx(file->private_data);
ctx               645 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               646 drivers/media/platform/sti/delta/delta-v4l2.c 	const struct delta_dec *dec = ctx->dec;
ctx               653 drivers/media/platform/sti/delta/delta-v4l2.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               656 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name);
ctx               660 drivers/media/platform/sti/delta/delta-v4l2.c 	if (ctx->state < DELTA_STATE_READY) {
ctx               671 drivers/media/platform/sti/delta/delta-v4l2.c 				ctx->name, (char *)&pix->pixelformat);
ctx               691 drivers/media/platform/sti/delta/delta-v4l2.c 	ret = call_dec_op(dec, set_frameinfo, ctx, &frameinfo);
ctx               696 drivers/media/platform/sti/delta/delta-v4l2.c 	ret = call_dec_op(dec, get_frameinfo, ctx, &frameinfo);
ctx               700 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->flags |= DELTA_FLAG_FRAMEINFO;
ctx               701 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->frameinfo = frameinfo;
ctx               704 drivers/media/platform/sti/delta/delta-v4l2.c 		ctx->name,
ctx               724 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = to_ctx(fh);
ctx               725 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_frameinfo *frameinfo = &ctx->frameinfo;
ctx               731 drivers/media/platform/sti/delta/delta-v4l2.c 	if ((ctx->flags & DELTA_FLAG_FRAMEINFO) &&
ctx               763 drivers/media/platform/sti/delta/delta-v4l2.c static void delta_complete_eos(struct delta_ctx *ctx,
ctx               766 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               782 drivers/media/platform/sti/delta/delta-v4l2.c 	delta_frame_done(ctx, frame, 0);
ctx               785 drivers/media/platform/sti/delta/delta-v4l2.c 	v4l2_event_queue_fh(&ctx->fh, &ev);
ctx               787 drivers/media/platform/sti/delta/delta-v4l2.c 	dev_dbg(delta->dev, "%s EOS completed\n", ctx->name);
ctx               806 drivers/media/platform/sti/delta/delta-v4l2.c static int delta_decoder_stop_cmd(struct delta_ctx *ctx, void *fh)
ctx               808 drivers/media/platform/sti/delta/delta-v4l2.c 	const struct delta_dec *dec = ctx->dec;
ctx               809 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               813 drivers/media/platform/sti/delta/delta-v4l2.c 	dev_dbg(delta->dev, "%s EOS received\n", ctx->name);
ctx               815 drivers/media/platform/sti/delta/delta-v4l2.c 	if (ctx->state != DELTA_STATE_READY)
ctx               819 drivers/media/platform/sti/delta/delta-v4l2.c 	call_dec_op(dec, drain, ctx);
ctx               824 drivers/media/platform/sti/delta/delta-v4l2.c 		ret = call_dec_op(dec, get_frame, ctx, &frame);
ctx               831 drivers/media/platform/sti/delta/delta-v4l2.c 				ctx->name, frame->index);
ctx               834 drivers/media/platform/sti/delta/delta-v4l2.c 			delta_pop_dts(ctx, &frame->dts);
ctx               837 drivers/media/platform/sti/delta/delta-v4l2.c 			delta_frame_done(ctx, frame, 0);
ctx               842 drivers/media/platform/sti/delta/delta-v4l2.c 	ret = delta_get_free_frame(ctx, &frame);
ctx               847 drivers/media/platform/sti/delta/delta-v4l2.c 	delta_complete_eos(ctx, frame);
ctx               849 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->state = DELTA_STATE_EOS;
ctx               860 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->state = DELTA_STATE_WF_EOS;
ctx               861 drivers/media/platform/sti/delta/delta-v4l2.c 	dev_dbg(delta->dev, "%s EOS delayed\n", ctx->name);
ctx               869 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = to_ctx(fh);
ctx               876 drivers/media/platform/sti/delta/delta-v4l2.c 	return delta_decoder_stop_cmd(ctx, fh);
ctx               924 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = container_of(work, struct delta_ctx, run_work);
ctx               925 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx               926 drivers/media/platform/sti/delta/delta-v4l2.c 	const struct delta_dec *dec = ctx->dec;
ctx               934 drivers/media/platform/sti/delta/delta-v4l2.c 		dev_err(delta->dev, "%s no decoder opened yet\n", ctx->name);
ctx               939 drivers/media/platform/sti/delta/delta-v4l2.c 	mutex_lock(&ctx->lock);
ctx               941 drivers/media/platform/sti/delta/delta-v4l2.c 	vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               943 drivers/media/platform/sti/delta/delta-v4l2.c 		dev_err(delta->dev, "%s no buffer to decode\n", ctx->name);
ctx               944 drivers/media/platform/sti/delta/delta-v4l2.c 		mutex_unlock(&ctx->lock);
ctx               952 drivers/media/platform/sti/delta/delta-v4l2.c 	dump_au(ctx, au);
ctx               956 drivers/media/platform/sti/delta/delta-v4l2.c 		ret = delta_get_sync(ctx);
ctx               962 drivers/media/platform/sti/delta/delta-v4l2.c 	ret = call_dec_op(dec, decode, ctx, au);
ctx               977 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, ret);
ctx               981 drivers/media/platform/sti/delta/delta-v4l2.c 			delta_put_autosuspend(ctx);
ctx               988 drivers/media/platform/sti/delta/delta-v4l2.c 		delta_put_autosuspend(ctx);
ctx               992 drivers/media/platform/sti/delta/delta-v4l2.c 		delta_push_dts(ctx, au->dts);
ctx               996 drivers/media/platform/sti/delta/delta-v4l2.c 		ret = call_dec_op(dec, get_frame, ctx, &frame);
ctx              1003 drivers/media/platform/sti/delta/delta-v4l2.c 				ctx->name, ret);
ctx              1009 drivers/media/platform/sti/delta/delta-v4l2.c 				ctx->name);
ctx              1015 drivers/media/platform/sti/delta/delta-v4l2.c 		delta_pop_dts(ctx, &frame->dts);
ctx              1018 drivers/media/platform/sti/delta/delta-v4l2.c 		delta_frame_done(ctx, frame, 0);
ctx              1022 drivers/media/platform/sti/delta/delta-v4l2.c 	requeue_free_frames(ctx);
ctx              1023 drivers/media/platform/sti/delta/delta-v4l2.c 	delta_au_done(ctx, au, (discard ? -ENODATA : 0));
ctx              1024 drivers/media/platform/sti/delta/delta-v4l2.c 	mutex_unlock(&ctx->lock);
ctx              1025 drivers/media/platform/sti/delta/delta-v4l2.c 	v4l2_m2m_job_finish(delta->m2m_dev, ctx->fh.m2m_ctx);
ctx              1029 drivers/media/platform/sti/delta/delta-v4l2.c 	requeue_free_frames(ctx);
ctx              1030 drivers/media/platform/sti/delta/delta-v4l2.c 	delta_au_done(ctx, au, ret);
ctx              1031 drivers/media/platform/sti/delta/delta-v4l2.c 	mutex_unlock(&ctx->lock);
ctx              1032 drivers/media/platform/sti/delta/delta-v4l2.c 	v4l2_m2m_job_finish(delta->m2m_dev, ctx->fh.m2m_ctx);
ctx              1037 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = priv;
ctx              1038 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1040 drivers/media/platform/sti/delta/delta-v4l2.c 	queue_work(delta->work_queue, &ctx->run_work);
ctx              1045 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = priv;
ctx              1046 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1048 drivers/media/platform/sti/delta/delta-v4l2.c 	dev_dbg(delta->dev, "%s aborting job\n", ctx->name);
ctx              1050 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->aborting = true;
ctx              1055 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = priv;
ctx              1056 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1057 drivers/media/platform/sti/delta/delta-v4l2.c 	int src_bufs = v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx);
ctx              1061 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name);
ctx              1065 drivers/media/platform/sti/delta/delta-v4l2.c 	if (!v4l2_m2m_num_dst_bufs_ready(ctx->fh.m2m_ctx)) {
ctx              1067 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name);
ctx              1071 drivers/media/platform/sti/delta/delta-v4l2.c 	if (ctx->aborting) {
ctx              1072 drivers/media/platform/sti/delta/delta-v4l2.c 		dev_dbg(delta->dev, "%s job not ready: aborting\n", ctx->name);
ctx              1076 drivers/media/platform/sti/delta/delta-v4l2.c 	dev_dbg(delta->dev, "%s job ready\n", ctx->name);
ctx              1098 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1099 drivers/media/platform/sti/delta/delta-v4l2.c 	unsigned int size = ctx->max_au_size;
ctx              1118 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = vb2_get_drv_priv(q);
ctx              1119 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1130 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, vb->index, au->vaddr, &au->paddr);
ctx              1139 drivers/media/platform/sti/delta/delta-v4l2.c static int delta_setup_frame(struct delta_ctx *ctx,
ctx              1142 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1143 drivers/media/platform/sti/delta/delta-v4l2.c 	const struct delta_dec *dec = ctx->dec;
ctx              1148 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, frame->index, DELTA_MAX_FRAMES);
ctx              1152 drivers/media/platform/sti/delta/delta-v4l2.c 	if (ctx->nb_of_frames >= DELTA_MAX_FRAMES) {
ctx              1155 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, ctx->nb_of_frames, DELTA_MAX_FRAMES);
ctx              1159 drivers/media/platform/sti/delta/delta-v4l2.c 	if (frame->index != ctx->nb_of_frames) {
ctx              1162 drivers/media/platform/sti/delta/delta-v4l2.c 			 ctx->name, ctx->nb_of_frames, frame->index);
ctx              1166 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->frames[ctx->nb_of_frames] = frame;
ctx              1167 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->nb_of_frames++;
ctx              1170 drivers/media/platform/sti/delta/delta-v4l2.c 	return call_dec_op(dec, setup_frame, ctx, frame);
ctx              1178 drivers/media/platform/sti/delta/delta-v4l2.c int delta_get_frameinfo_default(struct delta_ctx *ctx,
ctx              1181 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_streaminfo *streaminfo = &ctx->streaminfo;
ctx              1219 drivers/media/platform/sti/delta/delta-v4l2.c static void dump_frames_status(struct delta_ctx *ctx)
ctx              1221 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1227 drivers/media/platform/sti/delta/delta-v4l2.c 		 "%s dumping frames status...\n", ctx->name);
ctx              1229 drivers/media/platform/sti/delta/delta-v4l2.c 	for (i = 0; i < ctx->nb_of_frames; i++) {
ctx              1230 drivers/media/platform/sti/delta/delta-v4l2.c 		frame = ctx->frames[i];
ctx              1233 drivers/media/platform/sti/delta/delta-v4l2.c 			 ctx->name, frame->index,
ctx              1239 drivers/media/platform/sti/delta/delta-v4l2.c int delta_get_free_frame(struct delta_ctx *ctx,
ctx              1242 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1248 drivers/media/platform/sti/delta/delta-v4l2.c 	vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              1251 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name);
ctx              1260 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, frame->index);
ctx              1261 drivers/media/platform/sti/delta/delta-v4l2.c 		dump_frames_status(ctx);
ctx              1266 drivers/media/platform/sti/delta/delta-v4l2.c 		"%s get free frame[%d]\n", ctx->name, frame->index);
ctx              1272 drivers/media/platform/sti/delta/delta-v4l2.c int delta_get_sync(struct delta_ctx *ctx)
ctx              1274 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1288 drivers/media/platform/sti/delta/delta-v4l2.c void delta_put_autosuspend(struct delta_ctx *ctx)
ctx              1290 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1298 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = vb2_get_drv_priv(q);
ctx              1301 drivers/media/platform/sti/delta/delta-v4l2.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx              1307 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = vb2_get_drv_priv(q);
ctx              1308 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1309 drivers/media/platform/sti/delta/delta-v4l2.c 	const struct delta_dec *dec = ctx->dec;
ctx              1313 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_streaminfo *streaminfo = &ctx->streaminfo;
ctx              1314 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_frameinfo *frameinfo = &ctx->frameinfo;
ctx              1318 drivers/media/platform/sti/delta/delta-v4l2.c 	if ((ctx->state != DELTA_STATE_WF_FORMAT) &&
ctx              1319 drivers/media/platform/sti/delta/delta-v4l2.c 	    (ctx->state != DELTA_STATE_WF_STREAMINFO))
ctx              1322 drivers/media/platform/sti/delta/delta-v4l2.c 	if (ctx->state == DELTA_STATE_WF_FORMAT) {
ctx              1324 drivers/media/platform/sti/delta/delta-v4l2.c 		ret = delta_open_decoder(ctx,
ctx              1325 drivers/media/platform/sti/delta/delta-v4l2.c 					 ctx->streaminfo.streamformat,
ctx              1326 drivers/media/platform/sti/delta/delta-v4l2.c 					 ctx->frameinfo.pixelformat, &dec);
ctx              1329 drivers/media/platform/sti/delta/delta-v4l2.c 		ctx->dec = dec;
ctx              1330 drivers/media/platform/sti/delta/delta-v4l2.c 		ctx->state = DELTA_STATE_WF_STREAMINFO;
ctx              1338 drivers/media/platform/sti/delta/delta-v4l2.c 	vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx              1341 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name);
ctx              1349 drivers/media/platform/sti/delta/delta-v4l2.c 	delta_push_dts(ctx, au->dts);
ctx              1352 drivers/media/platform/sti/delta/delta-v4l2.c 	dump_au(ctx, au);
ctx              1355 drivers/media/platform/sti/delta/delta-v4l2.c 	ret = call_dec_op(dec, decode, ctx, au);
ctx              1358 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, ret);
ctx              1362 drivers/media/platform/sti/delta/delta-v4l2.c 	ret = call_dec_op(dec, get_streaminfo, ctx, streaminfo);
ctx              1366 drivers/media/platform/sti/delta/delta-v4l2.c 				    ctx->name);
ctx              1369 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->flags |= DELTA_FLAG_STREAMINFO;
ctx              1371 drivers/media/platform/sti/delta/delta-v4l2.c 	ret = call_dec_op(dec, get_frameinfo, ctx, frameinfo);
ctx              1374 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->flags |= DELTA_FLAG_FRAMEINFO;
ctx              1376 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->state = DELTA_STATE_READY;
ctx              1378 drivers/media/platform/sti/delta/delta-v4l2.c 	dev_dbg(delta->dev, "%s %s => %s\n", ctx->name,
ctx              1382 drivers/media/platform/sti/delta/delta-v4l2.c 	delta_au_done(ctx, au, ret);
ctx              1393 drivers/media/platform/sti/delta/delta-v4l2.c 	while ((vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx)))
ctx              1400 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = vb2_get_drv_priv(q);
ctx              1403 drivers/media/platform/sti/delta/delta-v4l2.c 	delta_flush_dts(ctx);
ctx              1406 drivers/media/platform/sti/delta/delta-v4l2.c 	while ((vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx)))
ctx              1409 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->au_num = 0;
ctx              1411 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->aborting = false;
ctx              1420 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1421 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1422 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_streaminfo *streaminfo = &ctx->streaminfo;
ctx              1423 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_frameinfo *frameinfo = &ctx->frameinfo;
ctx              1435 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, *num_buffers, DELTA_MIN_FRAME_USER);
ctx              1444 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, *num_buffers, DELTA_MAX_FRAMES);
ctx              1456 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->nb_of_frames = 0;
ctx              1464 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = vb2_get_drv_priv(q);
ctx              1465 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1474 drivers/media/platform/sti/delta/delta-v4l2.c 		frame->info = ctx->frameinfo;
ctx              1476 drivers/media/platform/sti/delta/delta-v4l2.c 		ret = delta_setup_frame(ctx, frame);
ctx              1480 drivers/media/platform/sti/delta/delta-v4l2.c 				ctx->name, ret);
ctx              1486 drivers/media/platform/sti/delta/delta-v4l2.c 			ctx->name, vb->index, frame->vaddr,
ctx              1510 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = vb2_get_drv_priv(q);
ctx              1514 drivers/media/platform/sti/delta/delta-v4l2.c 	if (ctx->state == DELTA_STATE_WF_EOS) {
ctx              1516 drivers/media/platform/sti/delta/delta-v4l2.c 		delta_complete_eos(ctx, frame);
ctx              1518 drivers/media/platform/sti/delta/delta-v4l2.c 		ctx->state = DELTA_STATE_EOS;
ctx              1525 drivers/media/platform/sti/delta/delta-v4l2.c 	delta_recycle(ctx, frame);
ctx              1530 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = vb2_get_drv_priv(q);
ctx              1533 drivers/media/platform/sti/delta/delta-v4l2.c 	const struct delta_dec *dec = ctx->dec;
ctx              1536 drivers/media/platform/sti/delta/delta-v4l2.c 	delta_flush_dts(ctx);
ctx              1538 drivers/media/platform/sti/delta/delta-v4l2.c 	call_dec_op(dec, flush, ctx);
ctx              1544 drivers/media/platform/sti/delta/delta-v4l2.c 	for (i = 0; i < ctx->nb_of_frames; i++) {
ctx              1545 drivers/media/platform/sti/delta/delta-v4l2.c 		frame = ctx->frames[i];
ctx              1553 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->frame_num = 0;
ctx              1555 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->aborting = false;
ctx              1587 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = priv;
ctx              1588 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1595 drivers/media/platform/sti/delta/delta-v4l2.c 	q->drv_priv = ctx;
ctx              1612 drivers/media/platform/sti/delta/delta-v4l2.c 	q->drv_priv = ctx;
ctx              1628 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = NULL;
ctx              1633 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              1634 drivers/media/platform/sti/delta/delta-v4l2.c 	if (!ctx) {
ctx              1638 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->dev = delta;
ctx              1640 drivers/media/platform/sti/delta/delta-v4l2.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx              1641 drivers/media/platform/sti/delta/delta-v4l2.c 	file->private_data = &ctx->fh;
ctx              1642 drivers/media/platform/sti/delta/delta-v4l2.c 	v4l2_fh_add(&ctx->fh);
ctx              1644 drivers/media/platform/sti/delta/delta-v4l2.c 	INIT_WORK(&ctx->run_work, delta_run_work);
ctx              1645 drivers/media/platform/sti/delta/delta-v4l2.c 	mutex_init(&ctx->lock);
ctx              1647 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(delta->m2m_dev, ctx,
ctx              1649 drivers/media/platform/sti/delta/delta-v4l2.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx              1650 drivers/media/platform/sti/delta/delta-v4l2.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx              1660 drivers/media/platform/sti/delta/delta-v4l2.c 	ctx->state = DELTA_STATE_WF_FORMAT;
ctx              1662 drivers/media/platform/sti/delta/delta-v4l2.c 	INIT_LIST_HEAD(&ctx->dts);
ctx              1666 drivers/media/platform/sti/delta/delta-v4l2.c 	snprintf(ctx->name, sizeof(ctx->name), "[%3d:----]",
ctx              1670 drivers/media/platform/sti/delta/delta-v4l2.c 	set_default_params(ctx);
ctx              1684 drivers/media/platform/sti/delta/delta-v4l2.c 	dev_dbg(delta->dev, "%s decoder instance created\n", ctx->name);
ctx              1689 drivers/media/platform/sti/delta/delta-v4l2.c 	v4l2_fh_del(&ctx->fh);
ctx              1690 drivers/media/platform/sti/delta/delta-v4l2.c 	v4l2_fh_exit(&ctx->fh);
ctx              1691 drivers/media/platform/sti/delta/delta-v4l2.c 	kfree(ctx);
ctx              1700 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_ctx *ctx = to_ctx(file->private_data);
ctx              1701 drivers/media/platform/sti/delta/delta-v4l2.c 	struct delta_dev *delta = ctx->dev;
ctx              1702 drivers/media/platform/sti/delta/delta-v4l2.c 	const struct delta_dec *dec = ctx->dec;
ctx              1707 drivers/media/platform/sti/delta/delta-v4l2.c 	call_dec_op(dec, close, ctx);
ctx              1713 drivers/media/platform/sti/delta/delta-v4l2.c 	delta_trace_summary(ctx);
ctx              1715 drivers/media/platform/sti/delta/delta-v4l2.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx              1717 drivers/media/platform/sti/delta/delta-v4l2.c 	v4l2_fh_del(&ctx->fh);
ctx              1718 drivers/media/platform/sti/delta/delta-v4l2.c 	v4l2_fh_exit(&ctx->fh);
ctx              1728 drivers/media/platform/sti/delta/delta-v4l2.c 	dev_dbg(delta->dev, "%s decoder instance released\n", ctx->name);
ctx              1730 drivers/media/platform/sti/delta/delta-v4l2.c 	kfree(ctx);
ctx               261 drivers/media/platform/sti/delta/delta.h 	int (*open)(struct delta_ctx *ctx);
ctx               262 drivers/media/platform/sti/delta/delta.h 	int (*close)(struct delta_ctx *ctx);
ctx               277 drivers/media/platform/sti/delta/delta.h 	int (*setup_frame)(struct delta_ctx *ctx, struct delta_frame *frame);
ctx               292 drivers/media/platform/sti/delta/delta.h 	int (*get_streaminfo)(struct delta_ctx *ctx,
ctx               302 drivers/media/platform/sti/delta/delta.h 	int (*get_frameinfo)(struct delta_ctx *ctx,
ctx               314 drivers/media/platform/sti/delta/delta.h 	int (*set_frameinfo)(struct delta_ctx *ctx,
ctx               333 drivers/media/platform/sti/delta/delta.h 	int (*decode)(struct delta_ctx *ctx, struct delta_au *au);
ctx               352 drivers/media/platform/sti/delta/delta.h 	int (*get_frame)(struct delta_ctx *ctx, struct delta_frame **frame);
ctx               370 drivers/media/platform/sti/delta/delta.h 	int (*recycle)(struct delta_ctx *ctx, struct delta_frame *frame);
ctx               382 drivers/media/platform/sti/delta/delta.h 	int (*flush)(struct delta_ctx *ctx);
ctx               394 drivers/media/platform/sti/delta/delta.h 	int (*drain)(struct delta_ctx *ctx);
ctx               555 drivers/media/platform/sti/delta/delta.h int delta_get_frameinfo_default(struct delta_ctx *ctx,
ctx               560 drivers/media/platform/sti/delta/delta.h int delta_get_free_frame(struct delta_ctx *ctx,
ctx               563 drivers/media/platform/sti/delta/delta.h int delta_get_sync(struct delta_ctx *ctx);
ctx               564 drivers/media/platform/sti/delta/delta.h void delta_put_autosuspend(struct delta_ctx *ctx);
ctx                13 drivers/media/platform/sti/hva/hva-debugfs.c static void format_ctx(struct seq_file *s, struct hva_ctx *ctx)
ctx                15 drivers/media/platform/sti/hva/hva-debugfs.c 	struct hva_streaminfo *stream = &ctx->streaminfo;
ctx                16 drivers/media/platform/sti/hva/hva-debugfs.c 	struct hva_frameinfo *frame = &ctx->frameinfo;
ctx                17 drivers/media/platform/sti/hva/hva-debugfs.c 	struct hva_controls *ctrls = &ctx->ctrls;
ctx                18 drivers/media/platform/sti/hva/hva-debugfs.c 	struct hva_ctx_dbg *dbg = &ctx->dbg;
ctx                21 drivers/media/platform/sti/hva/hva-debugfs.c 	seq_printf(s, "|-%s\n  |\n", ctx->name);
ctx                24 drivers/media/platform/sti/hva/hva-debugfs.c 		   ctx->flags & HVA_FLAG_FRAMEINFO ? "" : "default ");
ctx                34 drivers/media/platform/sti/hva/hva-debugfs.c 		   ctx->flags & HVA_FLAG_STREAMINFO ? "" : "default ");
ctx                83 drivers/media/platform/sti/hva/hva-debugfs.c 	if (ctx->sys_errors || ctx->encode_errors || ctx->frame_errors) {
ctx                88 drivers/media/platform/sti/hva/hva-debugfs.c 			      ctx->sys_errors,
ctx                89 drivers/media/platform/sti/hva/hva-debugfs.c 			      ctx->encode_errors,
ctx                90 drivers/media/platform/sti/hva/hva-debugfs.c 			      ctx->frame_errors);
ctx               119 drivers/media/platform/sti/hva/hva-debugfs.c void hva_dbg_perf_begin(struct hva_ctx *ctx)
ctx               124 drivers/media/platform/sti/hva/hva-debugfs.c 	struct hva_ctx_dbg *dbg = &ctx->dbg;
ctx               175 drivers/media/platform/sti/hva/hva-debugfs.c void hva_dbg_perf_end(struct hva_ctx *ctx, struct hva_stream *stream)
ctx               177 drivers/media/platform/sti/hva/hva-debugfs.c 	struct device *dev = ctx_to_dev(ctx);
ctx               182 drivers/media/platform/sti/hva/hva-debugfs.c 	struct hva_ctx_dbg *dbg = &ctx->dbg;
ctx               196 drivers/media/platform/sti/hva/hva-debugfs.c 		ctx->name,
ctx               219 drivers/media/platform/sti/hva/hva-debugfs.c static void hva_dbg_perf_compute(struct hva_ctx *ctx)
ctx               222 drivers/media/platform/sti/hva/hva-debugfs.c 	struct hva_ctx_dbg *dbg = &ctx->dbg;
ctx               373 drivers/media/platform/sti/hva/hva-debugfs.c 	struct hva_ctx *ctx = s->private;
ctx               375 drivers/media/platform/sti/hva/hva-debugfs.c 	seq_printf(s, "[running encoding %d]\n", ctx->id);
ctx               377 drivers/media/platform/sti/hva/hva-debugfs.c 	hva_dbg_perf_compute(ctx);
ctx               378 drivers/media/platform/sti/hva/hva-debugfs.c 	format_ctx(s, ctx);
ctx               383 drivers/media/platform/sti/hva/hva-debugfs.c DEFINE_SHOW_ATTRIBUTE(ctx);
ctx               385 drivers/media/platform/sti/hva/hva-debugfs.c void hva_dbg_ctx_create(struct hva_ctx *ctx)
ctx               387 drivers/media/platform/sti/hva/hva-debugfs.c 	struct hva_dev *hva = ctx->hva_dev;
ctx               390 drivers/media/platform/sti/hva/hva-debugfs.c 	ctx->dbg.min_duration = UINT_MAX;
ctx               391 drivers/media/platform/sti/hva/hva-debugfs.c 	ctx->dbg.min_period = UINT_MAX;
ctx               392 drivers/media/platform/sti/hva/hva-debugfs.c 	ctx->dbg.min_bitrate = UINT_MAX;
ctx               396 drivers/media/platform/sti/hva/hva-debugfs.c 	ctx->dbg.debugfs_entry = debugfs_create_file(name, 0444,
ctx               398 drivers/media/platform/sti/hva/hva-debugfs.c 						     ctx, &ctx_fops);
ctx               401 drivers/media/platform/sti/hva/hva-debugfs.c void hva_dbg_ctx_remove(struct hva_ctx *ctx)
ctx               403 drivers/media/platform/sti/hva/hva-debugfs.c 	struct hva_dev *hva = ctx->hva_dev;
ctx               405 drivers/media/platform/sti/hva/hva-debugfs.c 	if (ctx->flags & HVA_FLAG_STREAMINFO)
ctx               407 drivers/media/platform/sti/hva/hva-debugfs.c 		memcpy(&hva->dbg.last_ctx, ctx, sizeof(*ctx));
ctx               409 drivers/media/platform/sti/hva/hva-debugfs.c 	debugfs_remove(ctx->dbg.debugfs_entry);
ctx               592 drivers/media/platform/sti/hva/hva-h264.c 	struct hva_h264_ctx *ctx = (struct hva_h264_ctx *)pctx->priv;
ctx               593 drivers/media/platform/sti/hva/hva-h264.c 	struct hva_buffer *seq_info = ctx->seq_info;
ctx               594 drivers/media/platform/sti/hva/hva-h264.c 	struct hva_buffer *fwd_ref_frame = ctx->ref_frame;
ctx               595 drivers/media/platform/sti/hva/hva-h264.c 	struct hva_buffer *loc_rec_frame = ctx->rec_frame;
ctx               779 drivers/media/platform/sti/hva/hva-h264.c 	td->addr_param_out = (u32)ctx->task->paddr +
ctx               890 drivers/media/platform/sti/hva/hva-h264.c 	struct hva_h264_ctx *ctx;
ctx               911 drivers/media/platform/sti/hva/hva-h264.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               912 drivers/media/platform/sti/hva/hva-h264.c 	if (!ctx) {
ctx               923 drivers/media/platform/sti/hva/hva-h264.c 			    &ctx->seq_info);
ctx               935 drivers/media/platform/sti/hva/hva-h264.c 			    &ctx->ref_frame);
ctx               946 drivers/media/platform/sti/hva/hva-h264.c 			    &ctx->rec_frame);
ctx               958 drivers/media/platform/sti/hva/hva-h264.c 			    &ctx->task);
ctx               966 drivers/media/platform/sti/hva/hva-h264.c 	pctx->priv = (void *)ctx;
ctx               971 drivers/media/platform/sti/hva/hva-h264.c 	hva_mem_free(pctx, ctx->rec_frame);
ctx               973 drivers/media/platform/sti/hva/hva-h264.c 	hva_mem_free(pctx, ctx->ref_frame);
ctx               975 drivers/media/platform/sti/hva/hva-h264.c 	hva_mem_free(pctx, ctx->seq_info);
ctx               977 drivers/media/platform/sti/hva/hva-h264.c 	devm_kfree(dev, ctx);
ctx               985 drivers/media/platform/sti/hva/hva-h264.c 	struct hva_h264_ctx *ctx = (struct hva_h264_ctx *)pctx->priv;
ctx               988 drivers/media/platform/sti/hva/hva-h264.c 	if (ctx->seq_info)
ctx               989 drivers/media/platform/sti/hva/hva-h264.c 		hva_mem_free(pctx, ctx->seq_info);
ctx               991 drivers/media/platform/sti/hva/hva-h264.c 	if (ctx->ref_frame)
ctx               992 drivers/media/platform/sti/hva/hva-h264.c 		hva_mem_free(pctx, ctx->ref_frame);
ctx               994 drivers/media/platform/sti/hva/hva-h264.c 	if (ctx->rec_frame)
ctx               995 drivers/media/platform/sti/hva/hva-h264.c 		hva_mem_free(pctx, ctx->rec_frame);
ctx               997 drivers/media/platform/sti/hva/hva-h264.c 	if (ctx->task)
ctx               998 drivers/media/platform/sti/hva/hva-h264.c 		hva_mem_free(pctx, ctx->task);
ctx              1000 drivers/media/platform/sti/hva/hva-h264.c 	devm_kfree(dev, ctx);
ctx              1008 drivers/media/platform/sti/hva/hva-h264.c 	struct hva_h264_ctx *ctx = (struct hva_h264_ctx *)pctx->priv;
ctx              1009 drivers/media/platform/sti/hva/hva-h264.c 	struct hva_h264_task *task = (struct hva_h264_task *)ctx->task->vaddr;
ctx              1017 drivers/media/platform/sti/hva/hva-h264.c 	ret = hva_hw_execute_task(pctx, H264_ENC, ctx->task);
ctx              1033 drivers/media/platform/sti/hva/hva-h264.c 	swap(ctx->ref_frame, ctx->rec_frame);
ctx               121 drivers/media/platform/sti/hva/hva-hw.c 	struct hva_ctx *ctx = NULL;
ctx               133 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, __func__, ctx_id);
ctx               134 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = true;
ctx               138 drivers/media/platform/sti/hva/hva-hw.c 	ctx = hva->instances[ctx_id];
ctx               139 drivers/media/platform/sti/hva/hva-hw.c 	if (!ctx)
ctx               145 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, __func__);
ctx               146 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = false;
ctx               150 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, __func__);
ctx               151 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = false;
ctx               155 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, __func__);
ctx               156 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = false;
ctx               160 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, __func__);
ctx               161 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = true;
ctx               165 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, __func__);
ctx               166 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = true;
ctx               170 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, __func__);
ctx               171 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = true;
ctx               175 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, __func__);
ctx               176 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = true;
ctx               180 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, __func__);
ctx               181 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = true;
ctx               185 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, __func__);
ctx               186 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = true;
ctx               190 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, __func__);
ctx               191 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = true;
ctx               225 drivers/media/platform/sti/hva/hva-hw.c 	struct hva_ctx *ctx;
ctx               241 drivers/media/platform/sti/hva/hva-hw.c 	ctx = hva->instances[ctx_id];
ctx               242 drivers/media/platform/sti/hva/hva-hw.c 	if (!ctx)
ctx               247 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, hva->lmi_err_reg);
ctx               248 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = true;
ctx               253 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, hva->emi_err_reg);
ctx               254 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = true;
ctx               259 drivers/media/platform/sti/hva/hva-hw.c 			ctx->name, hva->hec_mif_err_reg);
ctx               260 drivers/media/platform/sti/hva/hva-hw.c 		ctx->hw_err = true;
ctx               455 drivers/media/platform/sti/hva/hva-hw.c int hva_hw_execute_task(struct hva_ctx *ctx, enum hva_hw_cmd_type cmd,
ctx               458 drivers/media/platform/sti/hva/hva-hw.c 	struct hva_dev *hva = ctx_to_hdev(ctx);
ctx               460 drivers/media/platform/sti/hva/hva-hw.c 	u8 client_id = ctx->id;
ctx               471 drivers/media/platform/sti/hva/hva-hw.c 		dev_err(dev, "%s     failed to get pm_runtime\n", ctx->name);
ctx               472 drivers/media/platform/sti/hva/hva-hw.c 		ctx->sys_errors++;
ctx               483 drivers/media/platform/sti/hva/hva-hw.c 		dev_dbg(dev, "%s     unknown command 0x%x\n", ctx->name, cmd);
ctx               484 drivers/media/platform/sti/hva/hva-hw.c 		ctx->encode_errors++;
ctx               490 drivers/media/platform/sti/hva/hva-hw.c 	dev_dbg(dev, "%s     %s: write configuration registers\n", ctx->name,
ctx               507 drivers/media/platform/sti/hva/hva-hw.c 		ctx->name, __func__, cmd + (client_id << 8), &task->paddr);
ctx               513 drivers/media/platform/sti/hva/hva-hw.c 		dev_err(dev, "%s     %s: time out on completion\n", ctx->name,
ctx               515 drivers/media/platform/sti/hva/hva-hw.c 		ctx->encode_errors++;
ctx               521 drivers/media/platform/sti/hva/hva-hw.c 	ret = ctx->hw_err ? -EFAULT : 0;
ctx               523 drivers/media/platform/sti/hva/hva-hw.c 	ctx->encode_errors += ctx->hw_err ? 1 : 0;
ctx               535 drivers/media/platform/sti/hva/hva-hw.c 		dev_dbg(dev, "%s     unknown command 0x%x\n", ctx->name, cmd);
ctx                39 drivers/media/platform/sti/hva/hva-hw.h int hva_hw_execute_task(struct hva_ctx *ctx, enum hva_hw_cmd_type cmd,
ctx                11 drivers/media/platform/sti/hva/hva-mem.c int hva_mem_alloc(struct hva_ctx *ctx, u32 size, const char *name,
ctx                14 drivers/media/platform/sti/hva/hva-mem.c 	struct device *dev = ctx_to_dev(ctx);
ctx                21 drivers/media/platform/sti/hva/hva-mem.c 		ctx->sys_errors++;
ctx                29 drivers/media/platform/sti/hva/hva-mem.c 			ctx->name, __func__, name, size);
ctx                30 drivers/media/platform/sti/hva/hva-mem.c 		ctx->sys_errors++;
ctx                42 drivers/media/platform/sti/hva/hva-mem.c 		ctx->name, size, b->vaddr, &b->paddr, b->name);
ctx                50 drivers/media/platform/sti/hva/hva-mem.c void hva_mem_free(struct hva_ctx *ctx, struct hva_buffer *buf)
ctx                52 drivers/media/platform/sti/hva/hva-mem.c 	struct device *dev = ctx_to_dev(ctx);
ctx                56 drivers/media/platform/sti/hva/hva-mem.c 		ctx->name, buf->size, buf->vaddr, &buf->paddr, buf->name);
ctx                26 drivers/media/platform/sti/hva/hva-mem.h int hva_mem_alloc(struct hva_ctx *ctx,
ctx                31 drivers/media/platform/sti/hva/hva-mem.h void hva_mem_free(struct hva_ctx *ctx,
ctx                91 drivers/media/platform/sti/hva/hva-v4l2.c static void set_default_params(struct hva_ctx *ctx)
ctx                93 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_frameinfo *frameinfo = &ctx->frameinfo;
ctx                94 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_streaminfo *streaminfo = &ctx->streaminfo;
ctx               111 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->colorspace = V4L2_COLORSPACE_REC709;
ctx               112 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->xfer_func = V4L2_XFER_FUNC_DEFAULT;
ctx               113 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->ycbcr_enc = V4L2_YCBCR_ENC_DEFAULT;
ctx               114 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->quantization = V4L2_QUANTIZATION_DEFAULT;
ctx               116 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->max_stream_size = estimated_stream_size(streaminfo->width,
ctx               120 drivers/media/platform/sti/hva/hva-v4l2.c static const struct hva_enc *hva_find_encoder(struct hva_ctx *ctx,
ctx               124 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_dev *hva = ctx_to_hdev(ctx);
ctx               189 drivers/media/platform/sti/hva/hva-v4l2.c static int hva_open_encoder(struct hva_ctx *ctx, u32 streamformat,
ctx               192 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_dev *hva = ctx_to_hdev(ctx);
ctx               193 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx               198 drivers/media/platform/sti/hva/hva-v4l2.c 	enc = (struct hva_enc *)hva_find_encoder(ctx, pixelformat,
ctx               202 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->name, (char *)&pixelformat, (char *)&streamformat);
ctx               207 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->name, (char *)&pixelformat, (char *)&streamformat);
ctx               210 drivers/media/platform/sti/hva/hva-v4l2.c 	snprintf(ctx->name, sizeof(ctx->name), "[%3d:%4.4s]",
ctx               214 drivers/media/platform/sti/hva/hva-v4l2.c 	ret = enc->open(ctx);
ctx               217 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->name, ret);
ctx               221 drivers/media/platform/sti/hva/hva-v4l2.c 	dev_dbg(dev, "%s %s encoder opened\n", ctx->name, enc->name);
ctx               228 drivers/media/platform/sti/hva/hva-v4l2.c static void hva_dbg_summary(struct hva_ctx *ctx)
ctx               230 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx               231 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_streaminfo *stream = &ctx->streaminfo;
ctx               232 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_frameinfo *frame = &ctx->frameinfo;
ctx               234 drivers/media/platform/sti/hva/hva-v4l2.c 	if (!(ctx->flags & HVA_FLAG_STREAMINFO))
ctx               238 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->name,
ctx               244 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->encoded_frames,
ctx               245 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->sys_errors,
ctx               246 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->encode_errors,
ctx               247 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->frame_errors);
ctx               257 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
ctx               258 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_dev *hva = ctx_to_hdev(ctx);
ctx               271 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
ctx               272 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_dev *hva = ctx_to_hdev(ctx);
ctx               285 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
ctx               286 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_dev *hva = ctx_to_hdev(ctx);
ctx               298 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
ctx               299 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_streaminfo *streaminfo = &ctx->streaminfo;
ctx               304 drivers/media/platform/sti/hva/hva-v4l2.c 	f->fmt.pix.colorspace = ctx->colorspace;
ctx               305 drivers/media/platform/sti/hva/hva-v4l2.c 	f->fmt.pix.xfer_func = ctx->xfer_func;
ctx               306 drivers/media/platform/sti/hva/hva-v4l2.c 	f->fmt.pix.ycbcr_enc = ctx->ycbcr_enc;
ctx               307 drivers/media/platform/sti/hva/hva-v4l2.c 	f->fmt.pix.quantization = ctx->quantization;
ctx               310 drivers/media/platform/sti/hva/hva-v4l2.c 	f->fmt.pix.sizeimage = ctx->max_stream_size;
ctx               317 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
ctx               318 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_frameinfo *frameinfo = &ctx->frameinfo;
ctx               323 drivers/media/platform/sti/hva/hva-v4l2.c 	f->fmt.pix.colorspace = ctx->colorspace;
ctx               324 drivers/media/platform/sti/hva/hva-v4l2.c 	f->fmt.pix.xfer_func = ctx->xfer_func;
ctx               325 drivers/media/platform/sti/hva/hva-v4l2.c 	f->fmt.pix.ycbcr_enc = ctx->ycbcr_enc;
ctx               326 drivers/media/platform/sti/hva/hva-v4l2.c 	f->fmt.pix.quantization = ctx->quantization;
ctx               338 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
ctx               339 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx               346 drivers/media/platform/sti/hva/hva-v4l2.c 	enc = hva_find_encoder(ctx, ctx->frameinfo.pixelformat, streamformat);
ctx               350 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->name, (char *)&pix->pixelformat);
ctx               356 drivers/media/platform/sti/hva/hva-v4l2.c 	if (ctx->flags & HVA_FLAG_FRAMEINFO) {
ctx               361 drivers/media/platform/sti/hva/hva-v4l2.c 		pix->width = ctx->frameinfo.width;
ctx               362 drivers/media/platform/sti/hva/hva-v4l2.c 		pix->height = ctx->frameinfo.height;
ctx               366 drivers/media/platform/sti/hva/hva-v4l2.c 				ctx->name, width, height,
ctx               381 drivers/media/platform/sti/hva/hva-v4l2.c 				ctx->name, width, height,
ctx               390 drivers/media/platform/sti/hva/hva-v4l2.c 	pix->colorspace = ctx->colorspace;
ctx               391 drivers/media/platform/sti/hva/hva-v4l2.c 	pix->xfer_func = ctx->xfer_func;
ctx               392 drivers/media/platform/sti/hva/hva-v4l2.c 	pix->ycbcr_enc = ctx->ycbcr_enc;
ctx               393 drivers/media/platform/sti/hva/hva-v4l2.c 	pix->quantization = ctx->quantization;
ctx               402 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
ctx               403 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx               409 drivers/media/platform/sti/hva/hva-v4l2.c 	enc = hva_find_encoder(ctx, pixelformat, ctx->streaminfo.streamformat);
ctx               413 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->name, (char *)&pixelformat);
ctx               431 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->name, width, height, pix->width, pix->height);
ctx               452 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
ctx               453 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx               460 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->name, (char *)&f->fmt.pix.pixelformat);
ctx               464 drivers/media/platform/sti/hva/hva-v4l2.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               467 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->name);
ctx               471 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->max_stream_size = f->fmt.pix.sizeimage;
ctx               472 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->streaminfo.width = f->fmt.pix.width;
ctx               473 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->streaminfo.height = f->fmt.pix.height;
ctx               474 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->streaminfo.streamformat = f->fmt.pix.pixelformat;
ctx               475 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->flags |= HVA_FLAG_STREAMINFO;
ctx               482 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
ctx               483 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx               491 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->name, (char *)&pix->pixelformat);
ctx               495 drivers/media/platform/sti/hva/hva-v4l2.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               497 drivers/media/platform/sti/hva/hva-v4l2.c 		dev_dbg(dev, "%s V4L2 S_FMT (OUTPUT): queue busy\n", ctx->name);
ctx               501 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->colorspace = pix->colorspace;
ctx               502 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->xfer_func = pix->xfer_func;
ctx               503 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->ycbcr_enc = pix->ycbcr_enc;
ctx               504 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->quantization = pix->quantization;
ctx               506 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->frameinfo.aligned_width = ALIGN(pix->width, HVA_WIDTH_ALIGNMENT);
ctx               507 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->frameinfo.aligned_height = ALIGN(pix->height,
ctx               509 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->frameinfo.size = pix->sizeimage;
ctx               510 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->frameinfo.pixelformat = pix->pixelformat;
ctx               511 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->frameinfo.width = pix->width;
ctx               512 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->frameinfo.height = pix->height;
ctx               513 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->flags |= HVA_FLAG_FRAMEINFO;
ctx               520 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
ctx               521 drivers/media/platform/sti/hva/hva-v4l2.c 	struct v4l2_fract *time_per_frame = &ctx->ctrls.time_per_frame;
ctx               536 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
ctx               537 drivers/media/platform/sti/hva/hva-v4l2.c 	struct v4l2_fract *time_per_frame = &ctx->ctrls.time_per_frame;
ctx               556 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
ctx               557 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx               571 drivers/media/platform/sti/hva/hva-v4l2.c 		vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, buf->type);
ctx               575 drivers/media/platform/sti/hva/hva-v4l2.c 				ctx->name, buf->index, vq->num_buffers);
ctx               584 drivers/media/platform/sti/hva/hva-v4l2.c 	return v4l2_m2m_qbuf(file, ctx->fh.m2m_ctx, buf);
ctx               618 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = container_of(ctrl->handler, struct hva_ctx,
ctx               620 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx               622 drivers/media/platform/sti/hva/hva-v4l2.c 	dev_dbg(dev, "%s S_CTRL: id = %d, val = %d\n", ctx->name,
ctx               627 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.bitrate_mode = ctrl->val;
ctx               630 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.gop_size = ctrl->val;
ctx               633 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.bitrate = ctrl->val;
ctx               636 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.aspect = ctrl->val;
ctx               639 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.profile = ctrl->val;
ctx               640 drivers/media/platform/sti/hva/hva-v4l2.c 		snprintf(ctx->streaminfo.profile,
ctx               641 drivers/media/platform/sti/hva/hva-v4l2.c 			 sizeof(ctx->streaminfo.profile),
ctx               646 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.level = ctrl->val;
ctx               647 drivers/media/platform/sti/hva/hva-v4l2.c 		snprintf(ctx->streaminfo.level,
ctx               648 drivers/media/platform/sti/hva/hva-v4l2.c 			 sizeof(ctx->streaminfo.level),
ctx               653 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.entropy_mode = ctrl->val;
ctx               656 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.cpb_size = ctrl->val;
ctx               659 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.dct8x8 = ctrl->val;
ctx               662 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.qpmin = ctrl->val;
ctx               665 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.qpmax = ctrl->val;
ctx               668 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.vui_sar = ctrl->val;
ctx               671 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.vui_sar_idc = ctrl->val;
ctx               674 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.sei_fp = ctrl->val;
ctx               677 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->ctrls.sei_fp_type = ctrl->val;
ctx               681 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->name, ctrl->id);
ctx               693 drivers/media/platform/sti/hva/hva-v4l2.c static int hva_ctrls_setup(struct hva_ctx *ctx)
ctx               695 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx               700 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_handler_init(&ctx->ctrl_handler, 15);
ctx               702 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               708 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               712 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               717 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               727 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               733 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               739 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               745 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               749 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               753 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               757 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               761 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               766 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               772 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               777 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_new_std_menu(&ctx->ctrl_handler, &hva_ctrl_ops,
ctx               783 drivers/media/platform/sti/hva/hva-v4l2.c 	if (ctx->ctrl_handler.error) {
ctx               784 drivers/media/platform/sti/hva/hva-v4l2.c 		int err = ctx->ctrl_handler.error;
ctx               787 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->name, err);
ctx               788 drivers/media/platform/sti/hva/hva-v4l2.c 		v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx               792 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_handler_setup(&ctx->ctrl_handler);
ctx               795 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->ctrls.time_per_frame.numerator = HVA_DEFAULT_FRAME_NUM;
ctx               796 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->ctrls.time_per_frame.denominator = HVA_DEFAULT_FRAME_DEN;
ctx               807 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = container_of(work, struct hva_ctx, run_work);
ctx               809 drivers/media/platform/sti/hva/hva-v4l2.c 	const struct hva_enc *enc = ctx->enc;
ctx               815 drivers/media/platform/sti/hva/hva-v4l2.c 	mutex_lock(&ctx->lock);
ctx               818 drivers/media/platform/sti/hva/hva-v4l2.c 	hva_dbg_perf_begin(ctx);
ctx               821 drivers/media/platform/sti/hva/hva-v4l2.c 	src_buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               822 drivers/media/platform/sti/hva/hva-v4l2.c 	dst_buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx               826 drivers/media/platform/sti/hva/hva-v4l2.c 	frame->vbuf.sequence = ctx->frame_num++;
ctx               828 drivers/media/platform/sti/hva/hva-v4l2.c 	ret = enc->encode(ctx, frame, stream);
ctx               838 drivers/media/platform/sti/hva/hva-v4l2.c 		dst_buf->sequence = ctx->stream_num - 1;
ctx               840 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->encoded_frames++;
ctx               843 drivers/media/platform/sti/hva/hva-v4l2.c 		hva_dbg_perf_end(ctx, stream);
ctx               850 drivers/media/platform/sti/hva/hva-v4l2.c 	mutex_unlock(&ctx->lock);
ctx               852 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_m2m_job_finish(ctx->hva_dev->m2m_dev, ctx->fh.m2m_ctx);
ctx               857 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = priv;
ctx               858 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_dev *hva = ctx_to_hdev(ctx);
ctx               860 drivers/media/platform/sti/hva/hva-v4l2.c 	queue_work(hva->work_queue, &ctx->run_work);
ctx               865 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = priv;
ctx               866 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx               868 drivers/media/platform/sti/hva/hva-v4l2.c 	dev_dbg(dev, "%s aborting job\n", ctx->name);
ctx               870 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->aborting = true;
ctx               875 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = priv;
ctx               876 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx               878 drivers/media/platform/sti/hva/hva-v4l2.c 	if (!v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx)) {
ctx               880 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->name);
ctx               884 drivers/media/platform/sti/hva/hva-v4l2.c 	if (!v4l2_m2m_num_dst_bufs_ready(ctx->fh.m2m_ctx)) {
ctx               886 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->name);
ctx               890 drivers/media/platform/sti/hva/hva-v4l2.c 	if (ctx->aborting) {
ctx               891 drivers/media/platform/sti/hva/hva-v4l2.c 		dev_dbg(dev, "%s job not ready: aborting\n", ctx->name);
ctx               913 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = vb2_get_drv_priv(vq);
ctx               914 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx               917 drivers/media/platform/sti/hva/hva-v4l2.c 	dev_dbg(dev, "%s %s queue setup: num_buffers %d\n", ctx->name,
ctx               921 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->frameinfo.size : ctx->max_stream_size;
ctx               935 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               936 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx               947 drivers/media/platform/sti/hva/hva-v4l2.c 				ctx->name, vb->index, vbuf->field);
ctx               956 drivers/media/platform/sti/hva/hva-v4l2.c 			frame->info = ctx->frameinfo;
ctx               961 drivers/media/platform/sti/hva/hva-v4l2.c 				ctx->name, vb->index,
ctx               977 drivers/media/platform/sti/hva/hva-v4l2.c 				ctx->name, vb->index,
ctx               987 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               990 drivers/media/platform/sti/hva/hva-v4l2.c 	if (ctx->fh.m2m_ctx)
ctx               991 drivers/media/platform/sti/hva/hva-v4l2.c 		v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx               996 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = vb2_get_drv_priv(vq);
ctx               997 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_dev *hva = ctx_to_hdev(ctx);
ctx               998 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx              1004 drivers/media/platform/sti/hva/hva-v4l2.c 	dev_dbg(dev, "%s %s start streaming\n", ctx->name,
ctx              1009 drivers/media/platform/sti/hva/hva-v4l2.c 		if (!vb2_start_streaming_called(&ctx->fh.m2m_ctx->cap_q_ctx.q))
ctx              1012 drivers/media/platform/sti/hva/hva-v4l2.c 		if (!vb2_start_streaming_called(&ctx->fh.m2m_ctx->out_q_ctx.q))
ctx              1019 drivers/media/platform/sti/hva/hva-v4l2.c 			hva->instances[i] = ctx;
ctx              1021 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->id = i;
ctx              1028 drivers/media/platform/sti/hva/hva-v4l2.c 		dev_err(dev, "%s maximum instances reached\n", ctx->name);
ctx              1035 drivers/media/platform/sti/hva/hva-v4l2.c 	if (!ctx->enc) {
ctx              1036 drivers/media/platform/sti/hva/hva-v4l2.c 		ret = hva_open_encoder(ctx,
ctx              1037 drivers/media/platform/sti/hva/hva-v4l2.c 				       ctx->streaminfo.streamformat,
ctx              1038 drivers/media/platform/sti/hva/hva-v4l2.c 				       ctx->frameinfo.pixelformat,
ctx              1039 drivers/media/platform/sti/hva/hva-v4l2.c 				       &ctx->enc);
ctx              1047 drivers/media/platform/sti/hva/hva-v4l2.c 	hva->instances[ctx->id] = NULL;
ctx              1052 drivers/media/platform/sti/hva/hva-v4l2.c 		while ((vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx)))
ctx              1056 drivers/media/platform/sti/hva/hva-v4l2.c 		while ((vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx)))
ctx              1060 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->sys_errors++;
ctx              1067 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1068 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_dev *hva = ctx_to_hdev(ctx);
ctx              1069 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx              1070 drivers/media/platform/sti/hva/hva-v4l2.c 	const struct hva_enc *enc = ctx->enc;
ctx              1073 drivers/media/platform/sti/hva/hva-v4l2.c 	dev_dbg(dev, "%s %s stop streaming\n", ctx->name,
ctx              1078 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->frame_num = 0;
ctx              1079 drivers/media/platform/sti/hva/hva-v4l2.c 		while ((vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx)))
ctx              1083 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->stream_num = 0;
ctx              1084 drivers/media/platform/sti/hva/hva-v4l2.c 		while ((vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx)))
ctx              1089 drivers/media/platform/sti/hva/hva-v4l2.c 	     vb2_is_streaming(&ctx->fh.m2m_ctx->cap_q_ctx.q)) ||
ctx              1091 drivers/media/platform/sti/hva/hva-v4l2.c 	     vb2_is_streaming(&ctx->fh.m2m_ctx->out_q_ctx.q))) {
ctx              1093 drivers/media/platform/sti/hva/hva-v4l2.c 			ctx->name, to_type_str(vq->type),
ctx              1094 drivers/media/platform/sti/hva/hva-v4l2.c 			vb2_is_streaming(&ctx->fh.m2m_ctx->out_q_ctx.q),
ctx              1095 drivers/media/platform/sti/hva/hva-v4l2.c 			vb2_is_streaming(&ctx->fh.m2m_ctx->cap_q_ctx.q));
ctx              1101 drivers/media/platform/sti/hva/hva-v4l2.c 		dev_dbg(dev, "%s %s encoder closed\n", ctx->name, enc->name);
ctx              1102 drivers/media/platform/sti/hva/hva-v4l2.c 		enc->close(ctx);
ctx              1103 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->enc = NULL;
ctx              1106 drivers/media/platform/sti/hva/hva-v4l2.c 		hva->instances[ctx->id] = NULL;
ctx              1110 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->aborting = false;
ctx              1128 drivers/media/platform/sti/hva/hva-v4l2.c static int queue_init(struct hva_ctx *ctx, struct vb2_queue *vq)
ctx              1131 drivers/media/platform/sti/hva/hva-v4l2.c 	vq->drv_priv = ctx;
ctx              1135 drivers/media/platform/sti/hva/hva-v4l2.c 	vq->lock = &ctx->hva_dev->lock;
ctx              1143 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = priv;
ctx              1149 drivers/media/platform/sti/hva/hva-v4l2.c 	src_vq->dev = ctx->hva_dev->dev;
ctx              1151 drivers/media/platform/sti/hva/hva-v4l2.c 	ret = queue_init(ctx, src_vq);
ctx              1158 drivers/media/platform/sti/hva/hva-v4l2.c 	dst_vq->dev = ctx->hva_dev->dev;
ctx              1160 drivers/media/platform/sti/hva/hva-v4l2.c 	return queue_init(ctx, dst_vq);
ctx              1167 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx;
ctx              1170 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              1171 drivers/media/platform/sti/hva/hva-v4l2.c 	if (!ctx) {
ctx              1175 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->hva_dev = hva;
ctx              1177 drivers/media/platform/sti/hva/hva-v4l2.c 	INIT_WORK(&ctx->run_work, hva_run_work);
ctx              1178 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx              1179 drivers/media/platform/sti/hva/hva-v4l2.c 	file->private_data = &ctx->fh;
ctx              1180 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_fh_add(&ctx->fh);
ctx              1182 drivers/media/platform/sti/hva/hva-v4l2.c 	ret = hva_ctrls_setup(ctx);
ctx              1186 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->sys_errors++;
ctx              1189 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->fh.ctrl_handler = &ctx->ctrl_handler;
ctx              1191 drivers/media/platform/sti/hva/hva-v4l2.c 	mutex_init(&ctx->lock);
ctx              1193 drivers/media/platform/sti/hva/hva-v4l2.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(hva->m2m_dev, ctx,
ctx              1195 drivers/media/platform/sti/hva/hva-v4l2.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx              1196 drivers/media/platform/sti/hva/hva-v4l2.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx              1199 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->sys_errors++;
ctx              1206 drivers/media/platform/sti/hva/hva-v4l2.c 	snprintf(ctx->name, sizeof(ctx->name), "[%3d:----]",
ctx              1211 drivers/media/platform/sti/hva/hva-v4l2.c 	set_default_params(ctx);
ctx              1214 drivers/media/platform/sti/hva/hva-v4l2.c 	hva_dbg_ctx_create(ctx);
ctx              1217 drivers/media/platform/sti/hva/hva-v4l2.c 	dev_info(dev, "%s encoder instance created\n", ctx->name);
ctx              1222 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx              1224 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_fh_del(&ctx->fh);
ctx              1225 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_fh_exit(&ctx->fh);
ctx              1226 drivers/media/platform/sti/hva/hva-v4l2.c 	kfree(ctx);
ctx              1233 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_ctx *ctx = fh_to_ctx(file->private_data);
ctx              1234 drivers/media/platform/sti/hva/hva-v4l2.c 	struct hva_dev *hva = ctx_to_hdev(ctx);
ctx              1235 drivers/media/platform/sti/hva/hva-v4l2.c 	struct device *dev = ctx_to_dev(ctx);
ctx              1236 drivers/media/platform/sti/hva/hva-v4l2.c 	const struct hva_enc *enc = ctx->enc;
ctx              1239 drivers/media/platform/sti/hva/hva-v4l2.c 		dev_dbg(dev, "%s %s encoder closed\n", ctx->name, enc->name);
ctx              1240 drivers/media/platform/sti/hva/hva-v4l2.c 		enc->close(ctx);
ctx              1241 drivers/media/platform/sti/hva/hva-v4l2.c 		ctx->enc = NULL;
ctx              1244 drivers/media/platform/sti/hva/hva-v4l2.c 		hva->instances[ctx->id] = NULL;
ctx              1249 drivers/media/platform/sti/hva/hva-v4l2.c 	hva_dbg_summary(ctx);
ctx              1251 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx              1253 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx              1255 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_fh_del(&ctx->fh);
ctx              1256 drivers/media/platform/sti/hva/hva-v4l2.c 	v4l2_fh_exit(&ctx->fh);
ctx              1259 drivers/media/platform/sti/hva/hva-v4l2.c 	hva_dbg_ctx_remove(ctx);
ctx              1262 drivers/media/platform/sti/hva/hva-v4l2.c 	dev_info(dev, "%s encoder instance released\n", ctx->name);
ctx              1264 drivers/media/platform/sti/hva/hva-v4l2.c 	kfree(ctx);
ctx               394 drivers/media/platform/sti/hva/hva.h 	int		(*open)(struct hva_ctx *ctx);
ctx               395 drivers/media/platform/sti/hva/hva.h 	int		(*close)(struct hva_ctx *ctx);
ctx               396 drivers/media/platform/sti/hva/hva.h 	int		(*encode)(struct hva_ctx *ctx, struct hva_frame *frame,
ctx               403 drivers/media/platform/sti/hva/hva.h void hva_dbg_ctx_create(struct hva_ctx *ctx);
ctx               404 drivers/media/platform/sti/hva/hva.h void hva_dbg_ctx_remove(struct hva_ctx *ctx);
ctx               405 drivers/media/platform/sti/hva/hva.h void hva_dbg_perf_begin(struct hva_ctx *ctx);
ctx               406 drivers/media/platform/sti/hva/hva.h void hva_dbg_perf_end(struct hva_ctx *ctx, struct hva_stream *stream);
ctx                64 drivers/media/platform/ti-vpe/cal.c #define ctx_dbg(level, ctx, fmt, arg...)	\
ctx                65 drivers/media/platform/ti-vpe/cal.c 		v4l2_dbg(level, debug, &ctx->v4l2_dev, fmt, ##arg)
ctx                66 drivers/media/platform/ti-vpe/cal.c #define ctx_info(ctx, fmt, arg...)	\
ctx                67 drivers/media/platform/ti-vpe/cal.c 		v4l2_info(&ctx->v4l2_dev, fmt, ##arg)
ctx                68 drivers/media/platform/ti-vpe/cal.c #define ctx_err(ctx, fmt, arg...)	\
ctx                69 drivers/media/platform/ti-vpe/cal.c 		v4l2_err(&ctx->v4l2_dev, fmt, ##arg)
ctx               255 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx		*ctx[CAL_NUM_CONTEXT];
ctx               311 drivers/media/platform/ti-vpe/cal.c static const struct cal_fmt *find_format_by_pix(struct cal_ctx *ctx,
ctx               317 drivers/media/platform/ti-vpe/cal.c 	for (k = 0; k < ctx->num_active_fmt; k++) {
ctx               318 drivers/media/platform/ti-vpe/cal.c 		fmt = ctx->active_fmt[k];
ctx               326 drivers/media/platform/ti-vpe/cal.c static const struct cal_fmt *find_format_by_code(struct cal_ctx *ctx,
ctx               332 drivers/media/platform/ti-vpe/cal.c 	for (k = 0; k < ctx->num_active_fmt; k++) {
ctx               333 drivers/media/platform/ti-vpe/cal.c 		fmt = ctx->active_fmt[k];
ctx               386 drivers/media/platform/ti-vpe/cal.c static void camerarx_phy_enable(struct cal_ctx *ctx)
ctx               390 drivers/media/platform/ti-vpe/cal.c 	if (!ctx->dev->cm->base) {
ctx               391 drivers/media/platform/ti-vpe/cal.c 		ctx_err(ctx, "cm not mapped\n");
ctx               395 drivers/media/platform/ti-vpe/cal.c 	val = reg_read(ctx->dev->cm, CM_CTRL_CORE_CAMERRX_CONTROL);
ctx               396 drivers/media/platform/ti-vpe/cal.c 	if (ctx->csi2_port == 1) {
ctx               402 drivers/media/platform/ti-vpe/cal.c 	} else if (ctx->csi2_port == 2) {
ctx               409 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev->cm, CM_CTRL_CORE_CAMERRX_CONTROL, val);
ctx               412 drivers/media/platform/ti-vpe/cal.c static void camerarx_phy_disable(struct cal_ctx *ctx)
ctx               416 drivers/media/platform/ti-vpe/cal.c 	if (!ctx->dev->cm->base) {
ctx               417 drivers/media/platform/ti-vpe/cal.c 		ctx_err(ctx, "cm not mapped\n");
ctx               421 drivers/media/platform/ti-vpe/cal.c 	val = reg_read(ctx->dev->cm, CM_CTRL_CORE_CAMERRX_CONTROL);
ctx               422 drivers/media/platform/ti-vpe/cal.c 	if (ctx->csi2_port == 1)
ctx               424 drivers/media/platform/ti-vpe/cal.c 	else if (ctx->csi2_port == 2)
ctx               426 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev->cm, CM_CTRL_CORE_CAMERRX_CONTROL, val);
ctx               492 drivers/media/platform/ti-vpe/cal.c 	if (dev->ctx[0]) {
ctx               494 drivers/media/platform/ti-vpe/cal.c 			 &dev->ctx[0]->cc->res->start);
ctx               496 drivers/media/platform/ti-vpe/cal.c 			       (__force const void *)dev->ctx[0]->cc->base,
ctx               497 drivers/media/platform/ti-vpe/cal.c 			       resource_size(dev->ctx[0]->cc->res),
ctx               501 drivers/media/platform/ti-vpe/cal.c 	if (dev->ctx[1]) {
ctx               503 drivers/media/platform/ti-vpe/cal.c 			 &dev->ctx[1]->cc->res->start);
ctx               505 drivers/media/platform/ti-vpe/cal.c 			       (__force const void *)dev->ctx[1]->cc->base,
ctx               506 drivers/media/platform/ti-vpe/cal.c 			       resource_size(dev->ctx[1]->cc->res),
ctx               520 drivers/media/platform/ti-vpe/cal.c static void enable_irqs(struct cal_ctx *ctx)
ctx               523 drivers/media/platform/ti-vpe/cal.c 	reg_write_field(ctx->dev,
ctx               526 drivers/media/platform/ti-vpe/cal.c 			CAL_HL_IRQ_MASK(ctx->csi2_port));
ctx               528 drivers/media/platform/ti-vpe/cal.c 	reg_write_field(ctx->dev,
ctx               531 drivers/media/platform/ti-vpe/cal.c 			CAL_HL_IRQ_MASK(ctx->csi2_port));
ctx               533 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev, CAL_CSI2_VC_IRQENABLE(1), 0xFF000000);
ctx               536 drivers/media/platform/ti-vpe/cal.c static void disable_irqs(struct cal_ctx *ctx)
ctx               542 drivers/media/platform/ti-vpe/cal.c 	set_field(&val, CAL_HL_IRQ_CLEAR, CAL_HL_IRQ_MASK(ctx->csi2_port));
ctx               543 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev, CAL_HL_IRQENABLE_CLR(2), val);
ctx               546 drivers/media/platform/ti-vpe/cal.c 	set_field(&val, CAL_HL_IRQ_CLEAR, CAL_HL_IRQ_MASK(ctx->csi2_port));
ctx               547 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev, CAL_HL_IRQENABLE_CLR(3), val);
ctx               549 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev, CAL_CSI2_VC_IRQENABLE(1), 0);
ctx               552 drivers/media/platform/ti-vpe/cal.c static void csi2_init(struct cal_ctx *ctx)
ctx               557 drivers/media/platform/ti-vpe/cal.c 	val = reg_read(ctx->dev, CAL_CSI2_TIMING(ctx->csi2_port));
ctx               565 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev, CAL_CSI2_TIMING(ctx->csi2_port), val);
ctx               566 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "CAL_CSI2_TIMING(%d) = 0x%08x\n", ctx->csi2_port,
ctx               567 drivers/media/platform/ti-vpe/cal.c 		reg_read(ctx->dev, CAL_CSI2_TIMING(ctx->csi2_port)));
ctx               569 drivers/media/platform/ti-vpe/cal.c 	val = reg_read(ctx->dev, CAL_CSI2_COMPLEXIO_CFG(ctx->csi2_port));
ctx               574 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev, CAL_CSI2_COMPLEXIO_CFG(ctx->csi2_port), val);
ctx               576 drivers/media/platform/ti-vpe/cal.c 		if (reg_read_field(ctx->dev,
ctx               577 drivers/media/platform/ti-vpe/cal.c 				   CAL_CSI2_COMPLEXIO_CFG(ctx->csi2_port),
ctx               583 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "CAL_CSI2_COMPLEXIO_CFG(%d) = 0x%08x\n", ctx->csi2_port,
ctx               584 drivers/media/platform/ti-vpe/cal.c 		reg_read(ctx->dev, CAL_CSI2_COMPLEXIO_CFG(ctx->csi2_port)));
ctx               586 drivers/media/platform/ti-vpe/cal.c 	val = reg_read(ctx->dev, CAL_CTRL);
ctx               593 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev, CAL_CTRL, val);
ctx               594 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "CAL_CTRL = 0x%08x\n", reg_read(ctx->dev, CAL_CTRL));
ctx               597 drivers/media/platform/ti-vpe/cal.c static void csi2_lane_config(struct cal_ctx *ctx)
ctx               599 drivers/media/platform/ti-vpe/cal.c 	u32 val = reg_read(ctx->dev, CAL_CSI2_COMPLEXIO_CFG(ctx->csi2_port));
ctx               603 drivers/media/platform/ti-vpe/cal.c 		&ctx->endpoint.bus.mipi_csi2;
ctx               620 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev, CAL_CSI2_COMPLEXIO_CFG(ctx->csi2_port), val);
ctx               621 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "CAL_CSI2_COMPLEXIO_CFG(%d) = 0x%08x\n",
ctx               622 drivers/media/platform/ti-vpe/cal.c 		ctx->csi2_port, val);
ctx               625 drivers/media/platform/ti-vpe/cal.c static void csi2_ppi_enable(struct cal_ctx *ctx)
ctx               627 drivers/media/platform/ti-vpe/cal.c 	reg_write_field(ctx->dev, CAL_CSI2_PPI_CTRL(ctx->csi2_port),
ctx               631 drivers/media/platform/ti-vpe/cal.c static void csi2_ppi_disable(struct cal_ctx *ctx)
ctx               633 drivers/media/platform/ti-vpe/cal.c 	reg_write_field(ctx->dev, CAL_CSI2_PPI_CTRL(ctx->csi2_port),
ctx               637 drivers/media/platform/ti-vpe/cal.c static void csi2_ctx_config(struct cal_ctx *ctx)
ctx               641 drivers/media/platform/ti-vpe/cal.c 	val = reg_read(ctx->dev, CAL_CSI2_CTX0(ctx->csi2_port));
ctx               642 drivers/media/platform/ti-vpe/cal.c 	set_field(&val, ctx->csi2_port, CAL_CSI2_CTX_CPORT_MASK);
ctx               653 drivers/media/platform/ti-vpe/cal.c 	set_field(&val, ctx->virtual_channel, CAL_CSI2_CTX_VC_MASK);
ctx               659 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev, CAL_CSI2_CTX0(ctx->csi2_port), val);
ctx               660 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "CAL_CSI2_CTX0(%d) = 0x%08x\n", ctx->csi2_port,
ctx               661 drivers/media/platform/ti-vpe/cal.c 		reg_read(ctx->dev, CAL_CSI2_CTX0(ctx->csi2_port)));
ctx               664 drivers/media/platform/ti-vpe/cal.c static void pix_proc_config(struct cal_ctx *ctx)
ctx               668 drivers/media/platform/ti-vpe/cal.c 	val = reg_read(ctx->dev, CAL_PIX_PROC(ctx->csi2_port));
ctx               673 drivers/media/platform/ti-vpe/cal.c 	set_field(&val, ctx->csi2_port, CAL_PIX_PROC_CPORT_MASK);
ctx               675 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev, CAL_PIX_PROC(ctx->csi2_port), val);
ctx               676 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "CAL_PIX_PROC(%d) = 0x%08x\n", ctx->csi2_port,
ctx               677 drivers/media/platform/ti-vpe/cal.c 		reg_read(ctx->dev, CAL_PIX_PROC(ctx->csi2_port)));
ctx               680 drivers/media/platform/ti-vpe/cal.c static void cal_wr_dma_config(struct cal_ctx *ctx,
ctx               685 drivers/media/platform/ti-vpe/cal.c 	val = reg_read(ctx->dev, CAL_WR_DMA_CTRL(ctx->csi2_port));
ctx               686 drivers/media/platform/ti-vpe/cal.c 	set_field(&val, ctx->csi2_port, CAL_WR_DMA_CTRL_CPORT_MASK);
ctx               694 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev, CAL_WR_DMA_CTRL(ctx->csi2_port), val);
ctx               695 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "CAL_WR_DMA_CTRL(%d) = 0x%08x\n", ctx->csi2_port,
ctx               696 drivers/media/platform/ti-vpe/cal.c 		reg_read(ctx->dev, CAL_WR_DMA_CTRL(ctx->csi2_port)));
ctx               702 drivers/media/platform/ti-vpe/cal.c 	reg_write_field(ctx->dev,
ctx               703 drivers/media/platform/ti-vpe/cal.c 			CAL_WR_DMA_OFST(ctx->csi2_port),
ctx               706 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "CAL_WR_DMA_OFST(%d) = 0x%08x\n", ctx->csi2_port,
ctx               707 drivers/media/platform/ti-vpe/cal.c 		reg_read(ctx->dev, CAL_WR_DMA_OFST(ctx->csi2_port)));
ctx               709 drivers/media/platform/ti-vpe/cal.c 	val = reg_read(ctx->dev, CAL_WR_DMA_XSIZE(ctx->csi2_port));
ctx               718 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev, CAL_WR_DMA_XSIZE(ctx->csi2_port), val);
ctx               719 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "CAL_WR_DMA_XSIZE(%d) = 0x%08x\n", ctx->csi2_port,
ctx               720 drivers/media/platform/ti-vpe/cal.c 		reg_read(ctx->dev, CAL_WR_DMA_XSIZE(ctx->csi2_port)));
ctx               723 drivers/media/platform/ti-vpe/cal.c static void cal_wr_dma_addr(struct cal_ctx *ctx, unsigned int dmaaddr)
ctx               725 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->dev, CAL_WR_DMA_ADDR(ctx->csi2_port), dmaaddr);
ctx               736 drivers/media/platform/ti-vpe/cal.c static void csi2_phy_config(struct cal_ctx *ctx)
ctx               745 drivers/media/platform/ti-vpe/cal.c 	ddrclkperiod_us = ctx->external_rate / 2000000;
ctx               747 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(1, ctx, "ddrclkperiod_us: %d\n", ddrclkperiod_us);
ctx               751 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(1, ctx, "ths_term: %d (0x%02x)\n", ths_term, ths_term);
ctx               769 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(1, ctx, "ths_settle: %d (0x%02x)\n", ths_settle, ths_settle);
ctx               771 drivers/media/platform/ti-vpe/cal.c 	reg0 = reg_read(ctx->cc, CAL_CSI2_PHY_REG0);
ctx               777 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(1, ctx, "CSI2_%d_REG0 = 0x%08x\n", (ctx->csi2_port - 1), reg0);
ctx               778 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->cc, CAL_CSI2_PHY_REG0, reg0);
ctx               780 drivers/media/platform/ti-vpe/cal.c 	reg1 = reg_read(ctx->cc, CAL_CSI2_PHY_REG1);
ctx               786 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(1, ctx, "CSI2_%d_REG1 = 0x%08x\n", (ctx->csi2_port - 1), reg1);
ctx               787 drivers/media/platform/ti-vpe/cal.c 	reg_write(ctx->cc, CAL_CSI2_PHY_REG1, reg1);
ctx               790 drivers/media/platform/ti-vpe/cal.c static int cal_get_external_info(struct cal_ctx *ctx)
ctx               794 drivers/media/platform/ti-vpe/cal.c 	if (!ctx->sensor)
ctx               797 drivers/media/platform/ti-vpe/cal.c 	ctrl = v4l2_ctrl_find(ctx->sensor->ctrl_handler, V4L2_CID_PIXEL_RATE);
ctx               799 drivers/media/platform/ti-vpe/cal.c 		ctx_err(ctx, "no pixel rate control in subdev: %s\n",
ctx               800 drivers/media/platform/ti-vpe/cal.c 			ctx->sensor->name);
ctx               804 drivers/media/platform/ti-vpe/cal.c 	ctx->external_rate = v4l2_ctrl_g_ctrl_int64(ctrl);
ctx               805 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "sensor Pixel Rate: %d\n", ctx->external_rate);
ctx               810 drivers/media/platform/ti-vpe/cal.c static inline void cal_schedule_next_buffer(struct cal_ctx *ctx)
ctx               812 drivers/media/platform/ti-vpe/cal.c 	struct cal_dmaqueue *dma_q = &ctx->vidq;
ctx               817 drivers/media/platform/ti-vpe/cal.c 	ctx->next_frm = buf;
ctx               821 drivers/media/platform/ti-vpe/cal.c 	cal_wr_dma_addr(ctx, addr);
ctx               824 drivers/media/platform/ti-vpe/cal.c static inline void cal_process_buffer_complete(struct cal_ctx *ctx)
ctx               826 drivers/media/platform/ti-vpe/cal.c 	ctx->cur_frm->vb.vb2_buf.timestamp = ktime_get_ns();
ctx               827 drivers/media/platform/ti-vpe/cal.c 	ctx->cur_frm->vb.field = ctx->m_fmt.field;
ctx               828 drivers/media/platform/ti-vpe/cal.c 	ctx->cur_frm->vb.sequence = ctx->sequence++;
ctx               830 drivers/media/platform/ti-vpe/cal.c 	vb2_buffer_done(&ctx->cur_frm->vb.vb2_buf, VB2_BUF_STATE_DONE);
ctx               831 drivers/media/platform/ti-vpe/cal.c 	ctx->cur_frm = ctx->next_frm;
ctx               842 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx;
ctx               854 drivers/media/platform/ti-vpe/cal.c 			ctx = dev->ctx[0];
ctx               856 drivers/media/platform/ti-vpe/cal.c 			if (ctx->cur_frm != ctx->next_frm)
ctx               857 drivers/media/platform/ti-vpe/cal.c 				cal_process_buffer_complete(ctx);
ctx               861 drivers/media/platform/ti-vpe/cal.c 			ctx = dev->ctx[1];
ctx               863 drivers/media/platform/ti-vpe/cal.c 			if (ctx->cur_frm != ctx->next_frm)
ctx               864 drivers/media/platform/ti-vpe/cal.c 				cal_process_buffer_complete(ctx);
ctx               876 drivers/media/platform/ti-vpe/cal.c 			ctx = dev->ctx[0];
ctx               877 drivers/media/platform/ti-vpe/cal.c 			dma_q = &ctx->vidq;
ctx               879 drivers/media/platform/ti-vpe/cal.c 			spin_lock(&ctx->slock);
ctx               881 drivers/media/platform/ti-vpe/cal.c 			    ctx->cur_frm == ctx->next_frm)
ctx               882 drivers/media/platform/ti-vpe/cal.c 				cal_schedule_next_buffer(ctx);
ctx               883 drivers/media/platform/ti-vpe/cal.c 			spin_unlock(&ctx->slock);
ctx               887 drivers/media/platform/ti-vpe/cal.c 			ctx = dev->ctx[1];
ctx               888 drivers/media/platform/ti-vpe/cal.c 			dma_q = &ctx->vidq;
ctx               890 drivers/media/platform/ti-vpe/cal.c 			spin_lock(&ctx->slock);
ctx               892 drivers/media/platform/ti-vpe/cal.c 			    ctx->cur_frm == ctx->next_frm)
ctx               893 drivers/media/platform/ti-vpe/cal.c 				cal_schedule_next_buffer(ctx);
ctx               894 drivers/media/platform/ti-vpe/cal.c 			spin_unlock(&ctx->slock);
ctx               907 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = video_drvdata(file);
ctx               913 drivers/media/platform/ti-vpe/cal.c 		 "platform:%s", ctx->v4l2_dev.name);
ctx               920 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = video_drvdata(file);
ctx               923 drivers/media/platform/ti-vpe/cal.c 	if (f->index >= ctx->num_active_fmt)
ctx               926 drivers/media/platform/ti-vpe/cal.c 	fmt = ctx->active_fmt[f->index];
ctx               933 drivers/media/platform/ti-vpe/cal.c static int __subdev_get_format(struct cal_ctx *ctx,
ctx               943 drivers/media/platform/ti-vpe/cal.c 	ret = v4l2_subdev_call(ctx->sensor, pad, get_fmt, NULL, &sd_fmt);
ctx               949 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(1, ctx, "%s %dx%d code:%04X\n", __func__,
ctx               955 drivers/media/platform/ti-vpe/cal.c static int __subdev_set_format(struct cal_ctx *ctx,
ctx               966 drivers/media/platform/ti-vpe/cal.c 	ret = v4l2_subdev_call(ctx->sensor, pad, set_fmt, NULL, &sd_fmt);
ctx               970 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(1, ctx, "%s %dx%d code:%04X\n", __func__,
ctx               976 drivers/media/platform/ti-vpe/cal.c static int cal_calc_format_size(struct cal_ctx *ctx,
ctx               981 drivers/media/platform/ti-vpe/cal.c 		ctx_dbg(3, ctx, "No cal_fmt provided!\n");
ctx               992 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "%s: fourcc: %s size: %dx%d bpl:%d img_size:%d\n",
ctx              1003 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = video_drvdata(file);
ctx              1005 drivers/media/platform/ti-vpe/cal.c 	*f = ctx->v_fmt;
ctx              1013 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = video_drvdata(file);
ctx              1018 drivers/media/platform/ti-vpe/cal.c 	fmt = find_format_by_pix(ctx, f->fmt.pix.pixelformat);
ctx              1020 drivers/media/platform/ti-vpe/cal.c 		ctx_dbg(3, ctx, "Fourcc format (0x%08x) not found.\n",
ctx              1024 drivers/media/platform/ti-vpe/cal.c 		fmt = ctx->active_fmt[0];
ctx              1028 drivers/media/platform/ti-vpe/cal.c 	f->fmt.pix.field = ctx->v_fmt.fmt.pix.field;
ctx              1037 drivers/media/platform/ti-vpe/cal.c 		ret = v4l2_subdev_call(ctx->sensor, pad, enum_frame_size,
ctx              1057 drivers/media/platform/ti-vpe/cal.c 		f->fmt.pix.width = ctx->v_fmt.fmt.pix.width;
ctx              1058 drivers/media/platform/ti-vpe/cal.c 		f->fmt.pix.height =  ctx->v_fmt.fmt.pix.height;
ctx              1065 drivers/media/platform/ti-vpe/cal.c 	f->fmt.pix.colorspace = ctx->v_fmt.fmt.pix.colorspace;
ctx              1066 drivers/media/platform/ti-vpe/cal.c 	return cal_calc_format_size(ctx, fmt, f);
ctx              1072 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = video_drvdata(file);
ctx              1073 drivers/media/platform/ti-vpe/cal.c 	struct vb2_queue *q = &ctx->vb_vidq;
ctx              1079 drivers/media/platform/ti-vpe/cal.c 		ctx_dbg(3, ctx, "%s device busy\n", __func__);
ctx              1087 drivers/media/platform/ti-vpe/cal.c 	fmt = find_format_by_pix(ctx, f->fmt.pix.pixelformat);
ctx              1091 drivers/media/platform/ti-vpe/cal.c 	ret = __subdev_set_format(ctx, &mbus_fmt);
ctx              1097 drivers/media/platform/ti-vpe/cal.c 		ctx_dbg(3, ctx,
ctx              1103 drivers/media/platform/ti-vpe/cal.c 	v4l2_fill_pix_format(&ctx->v_fmt.fmt.pix, &mbus_fmt);
ctx              1104 drivers/media/platform/ti-vpe/cal.c 	ctx->v_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ctx              1105 drivers/media/platform/ti-vpe/cal.c 	ctx->v_fmt.fmt.pix.pixelformat  = fmt->fourcc;
ctx              1106 drivers/media/platform/ti-vpe/cal.c 	cal_calc_format_size(ctx, fmt, &ctx->v_fmt);
ctx              1107 drivers/media/platform/ti-vpe/cal.c 	ctx->fmt = fmt;
ctx              1108 drivers/media/platform/ti-vpe/cal.c 	ctx->m_fmt = mbus_fmt;
ctx              1109 drivers/media/platform/ti-vpe/cal.c 	*f = ctx->v_fmt;
ctx              1117 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = video_drvdata(file);
ctx              1123 drivers/media/platform/ti-vpe/cal.c 	fmt = find_format_by_pix(ctx, fsize->pixel_format);
ctx              1125 drivers/media/platform/ti-vpe/cal.c 		ctx_dbg(3, ctx, "Invalid pixel code: %x\n",
ctx              1134 drivers/media/platform/ti-vpe/cal.c 	ret = v4l2_subdev_call(ctx->sensor, pad, enum_frame_size, NULL, &fse);
ctx              1138 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(1, ctx, "%s: index: %d code: %x W:[%d,%d] H:[%d,%d]\n",
ctx              1162 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = video_drvdata(file);
ctx              1164 drivers/media/platform/ti-vpe/cal.c 	*i = ctx->input;
ctx              1170 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = video_drvdata(file);
ctx              1175 drivers/media/platform/ti-vpe/cal.c 	ctx->input = i;
ctx              1183 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = video_drvdata(file);
ctx              1193 drivers/media/platform/ti-vpe/cal.c 	fmt = find_format_by_pix(ctx, fival->pixel_format);
ctx              1198 drivers/media/platform/ti-vpe/cal.c 	ret = v4l2_subdev_call(ctx->sensor, pad, enum_frame_interval,
ctx              1215 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1216 drivers/media/platform/ti-vpe/cal.c 	unsigned size = ctx->v_fmt.fmt.pix.sizeimage;
ctx              1230 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "nbuffers=%d, size=%d\n", *nbuffers, sizes[0]);
ctx              1237 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1242 drivers/media/platform/ti-vpe/cal.c 	if (WARN_ON(!ctx->fmt))
ctx              1245 drivers/media/platform/ti-vpe/cal.c 	size = ctx->v_fmt.fmt.pix.sizeimage;
ctx              1247 drivers/media/platform/ti-vpe/cal.c 		ctx_err(ctx,
ctx              1259 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1262 drivers/media/platform/ti-vpe/cal.c 	struct cal_dmaqueue *vidq = &ctx->vidq;
ctx              1266 drivers/media/platform/ti-vpe/cal.c 	spin_lock_irqsave(&ctx->slock, flags);
ctx              1268 drivers/media/platform/ti-vpe/cal.c 	spin_unlock_irqrestore(&ctx->slock, flags);
ctx              1273 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1274 drivers/media/platform/ti-vpe/cal.c 	struct cal_dmaqueue *dma_q = &ctx->vidq;
ctx              1280 drivers/media/platform/ti-vpe/cal.c 	spin_lock_irqsave(&ctx->slock, flags);
ctx              1282 drivers/media/platform/ti-vpe/cal.c 		spin_unlock_irqrestore(&ctx->slock, flags);
ctx              1283 drivers/media/platform/ti-vpe/cal.c 		ctx_dbg(3, ctx, "buffer queue is empty\n");
ctx              1288 drivers/media/platform/ti-vpe/cal.c 	ctx->cur_frm = buf;
ctx              1289 drivers/media/platform/ti-vpe/cal.c 	ctx->next_frm = buf;
ctx              1291 drivers/media/platform/ti-vpe/cal.c 	spin_unlock_irqrestore(&ctx->slock, flags);
ctx              1293 drivers/media/platform/ti-vpe/cal.c 	addr = vb2_dma_contig_plane_dma_addr(&ctx->cur_frm->vb.vb2_buf, 0);
ctx              1294 drivers/media/platform/ti-vpe/cal.c 	ctx->sequence = 0;
ctx              1296 drivers/media/platform/ti-vpe/cal.c 	ret = cal_get_external_info(ctx);
ctx              1300 drivers/media/platform/ti-vpe/cal.c 	cal_runtime_get(ctx->dev);
ctx              1302 drivers/media/platform/ti-vpe/cal.c 	enable_irqs(ctx);
ctx              1303 drivers/media/platform/ti-vpe/cal.c 	camerarx_phy_enable(ctx);
ctx              1304 drivers/media/platform/ti-vpe/cal.c 	csi2_init(ctx);
ctx              1305 drivers/media/platform/ti-vpe/cal.c 	csi2_phy_config(ctx);
ctx              1306 drivers/media/platform/ti-vpe/cal.c 	csi2_lane_config(ctx);
ctx              1307 drivers/media/platform/ti-vpe/cal.c 	csi2_ctx_config(ctx);
ctx              1308 drivers/media/platform/ti-vpe/cal.c 	pix_proc_config(ctx);
ctx              1309 drivers/media/platform/ti-vpe/cal.c 	cal_wr_dma_config(ctx, ctx->v_fmt.fmt.pix.bytesperline);
ctx              1310 drivers/media/platform/ti-vpe/cal.c 	cal_wr_dma_addr(ctx, addr);
ctx              1311 drivers/media/platform/ti-vpe/cal.c 	csi2_ppi_enable(ctx);
ctx              1313 drivers/media/platform/ti-vpe/cal.c 	ret = v4l2_subdev_call(ctx->sensor, video, s_stream, 1);
ctx              1315 drivers/media/platform/ti-vpe/cal.c 		ctx_err(ctx, "stream on failed in subdev\n");
ctx              1316 drivers/media/platform/ti-vpe/cal.c 		cal_runtime_put(ctx->dev);
ctx              1321 drivers/media/platform/ti-vpe/cal.c 		cal_quickdump_regs(ctx->dev);
ctx              1335 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1336 drivers/media/platform/ti-vpe/cal.c 	struct cal_dmaqueue *dma_q = &ctx->vidq;
ctx              1340 drivers/media/platform/ti-vpe/cal.c 	if (v4l2_subdev_call(ctx->sensor, video, s_stream, 0))
ctx              1341 drivers/media/platform/ti-vpe/cal.c 		ctx_err(ctx, "stream off failed in subdev\n");
ctx              1343 drivers/media/platform/ti-vpe/cal.c 	csi2_ppi_disable(ctx);
ctx              1344 drivers/media/platform/ti-vpe/cal.c 	disable_irqs(ctx);
ctx              1347 drivers/media/platform/ti-vpe/cal.c 	spin_lock_irqsave(&ctx->slock, flags);
ctx              1353 drivers/media/platform/ti-vpe/cal.c 	if (ctx->cur_frm == ctx->next_frm) {
ctx              1354 drivers/media/platform/ti-vpe/cal.c 		vb2_buffer_done(&ctx->cur_frm->vb.vb2_buf, VB2_BUF_STATE_ERROR);
ctx              1356 drivers/media/platform/ti-vpe/cal.c 		vb2_buffer_done(&ctx->cur_frm->vb.vb2_buf, VB2_BUF_STATE_ERROR);
ctx              1357 drivers/media/platform/ti-vpe/cal.c 		vb2_buffer_done(&ctx->next_frm->vb.vb2_buf,
ctx              1360 drivers/media/platform/ti-vpe/cal.c 	ctx->cur_frm = NULL;
ctx              1361 drivers/media/platform/ti-vpe/cal.c 	ctx->next_frm = NULL;
ctx              1362 drivers/media/platform/ti-vpe/cal.c 	spin_unlock_irqrestore(&ctx->slock, flags);
ctx              1364 drivers/media/platform/ti-vpe/cal.c 	cal_runtime_put(ctx->dev);
ctx              1425 drivers/media/platform/ti-vpe/cal.c static int cal_complete_ctx(struct cal_ctx *ctx);
ctx              1431 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = notifier_to_ctx(notifier);
ctx              1436 drivers/media/platform/ti-vpe/cal.c 	if (ctx->sensor) {
ctx              1437 drivers/media/platform/ti-vpe/cal.c 		ctx_info(ctx, "Rejecting subdev %s (Already set!!)",
ctx              1442 drivers/media/platform/ti-vpe/cal.c 	ctx->sensor = subdev;
ctx              1443 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(1, ctx, "Using sensor %s for capture\n", subdev->name);
ctx              1446 drivers/media/platform/ti-vpe/cal.c 	ctx->num_active_fmt = 0;
ctx              1457 drivers/media/platform/ti-vpe/cal.c 		ctx_dbg(2, ctx,
ctx              1465 drivers/media/platform/ti-vpe/cal.c 				ctx->active_fmt[i] = fmt;
ctx              1466 drivers/media/platform/ti-vpe/cal.c 				ctx_dbg(2, ctx,
ctx              1470 drivers/media/platform/ti-vpe/cal.c 				ctx->num_active_fmt = ++i;
ctx              1476 drivers/media/platform/ti-vpe/cal.c 		ctx_err(ctx, "No suitable format reported by subdev %s\n",
ctx              1481 drivers/media/platform/ti-vpe/cal.c 	cal_complete_ctx(ctx);
ctx              1488 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx = notifier_to_ctx(notifier);
ctx              1493 drivers/media/platform/ti-vpe/cal.c 	ret = __subdev_get_format(ctx, &mbus_fmt);
ctx              1497 drivers/media/platform/ti-vpe/cal.c 	fmt = find_format_by_code(ctx, mbus_fmt.code);
ctx              1499 drivers/media/platform/ti-vpe/cal.c 		ctx_dbg(3, ctx, "mbus code format (0x%08x) not found.\n",
ctx              1505 drivers/media/platform/ti-vpe/cal.c 	v4l2_fill_pix_format(&ctx->v_fmt.fmt.pix, &mbus_fmt);
ctx              1506 drivers/media/platform/ti-vpe/cal.c 	ctx->v_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ctx              1507 drivers/media/platform/ti-vpe/cal.c 	ctx->v_fmt.fmt.pix.pixelformat  = fmt->fourcc;
ctx              1508 drivers/media/platform/ti-vpe/cal.c 	cal_calc_format_size(ctx, fmt, &ctx->v_fmt);
ctx              1509 drivers/media/platform/ti-vpe/cal.c 	ctx->fmt = fmt;
ctx              1510 drivers/media/platform/ti-vpe/cal.c 	ctx->m_fmt = mbus_fmt;
ctx              1520 drivers/media/platform/ti-vpe/cal.c static int cal_complete_ctx(struct cal_ctx *ctx)
ctx              1526 drivers/media/platform/ti-vpe/cal.c 	ctx->timeperframe = tpf_default;
ctx              1527 drivers/media/platform/ti-vpe/cal.c 	ctx->external_rate = 192000000;
ctx              1530 drivers/media/platform/ti-vpe/cal.c 	spin_lock_init(&ctx->slock);
ctx              1531 drivers/media/platform/ti-vpe/cal.c 	mutex_init(&ctx->mutex);
ctx              1534 drivers/media/platform/ti-vpe/cal.c 	q = &ctx->vb_vidq;
ctx              1537 drivers/media/platform/ti-vpe/cal.c 	q->drv_priv = ctx;
ctx              1542 drivers/media/platform/ti-vpe/cal.c 	q->lock = &ctx->mutex;
ctx              1544 drivers/media/platform/ti-vpe/cal.c 	q->dev = ctx->v4l2_dev.dev;
ctx              1551 drivers/media/platform/ti-vpe/cal.c 	INIT_LIST_HEAD(&ctx->vidq.active);
ctx              1553 drivers/media/platform/ti-vpe/cal.c 	vfd = &ctx->vdev;
ctx              1555 drivers/media/platform/ti-vpe/cal.c 	vfd->v4l2_dev = &ctx->v4l2_dev;
ctx              1562 drivers/media/platform/ti-vpe/cal.c 	vfd->lock = &ctx->mutex;
ctx              1563 drivers/media/platform/ti-vpe/cal.c 	video_set_drvdata(vfd, ctx);
ctx              1569 drivers/media/platform/ti-vpe/cal.c 	v4l2_info(&ctx->v4l2_dev, "V4L2 device registered as %s\n",
ctx              1638 drivers/media/platform/ti-vpe/cal.c static int of_cal_create_instance(struct cal_ctx *ctx, int inst)
ctx              1640 drivers/media/platform/ti-vpe/cal.c 	struct platform_device *pdev = ctx->dev->pdev;
ctx              1649 drivers/media/platform/ti-vpe/cal.c 	endpoint = &ctx->endpoint;
ctx              1656 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "Scanning Port node for csi2 port: %d\n", inst);
ctx              1660 drivers/media/platform/ti-vpe/cal.c 			ctx_dbg(1, ctx, "No port node found for csi2 port:%d\n",
ctx              1667 drivers/media/platform/ti-vpe/cal.c 		ctx_dbg(3, ctx, "port:%d inst:%d <reg>:%d\n",
ctx              1676 drivers/media/platform/ti-vpe/cal.c 		ctx_dbg(1, ctx, "No port node matches csi2 port:%d\n",
ctx              1681 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "Scanning sub-device for csi2 port: %d\n",
ctx              1686 drivers/media/platform/ti-vpe/cal.c 		ctx_dbg(3, ctx, "can't get next endpoint\n");
ctx              1692 drivers/media/platform/ti-vpe/cal.c 		ctx_dbg(3, ctx, "can't get remote parent\n");
ctx              1699 drivers/media/platform/ti-vpe/cal.c 		ctx_err(ctx, "Port:%d sub-device %pOFn is not a CSI2 device\n",
ctx              1705 drivers/media/platform/ti-vpe/cal.c 	ctx->virtual_channel = endpoint->base.id;
ctx              1707 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "Port:%d v4l2-endpoint: CSI2\n", inst);
ctx              1708 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "Virtual Channel=%d\n", ctx->virtual_channel);
ctx              1709 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "flags=0x%08x\n", endpoint->bus.mipi_csi2.flags);
ctx              1710 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "clock_lane=%d\n", endpoint->bus.mipi_csi2.clock_lane);
ctx              1711 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "num_data_lanes=%d\n",
ctx              1713 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "data_lanes= <\n");
ctx              1715 drivers/media/platform/ti-vpe/cal.c 		ctx_dbg(3, ctx, "\t%d\n",
ctx              1717 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(3, ctx, "\t>\n");
ctx              1719 drivers/media/platform/ti-vpe/cal.c 	ctx_dbg(1, ctx, "Port: %d found sub-device %pOFn\n",
ctx              1722 drivers/media/platform/ti-vpe/cal.c 	v4l2_async_notifier_init(&ctx->notifier);
ctx              1731 drivers/media/platform/ti-vpe/cal.c 	ret = v4l2_async_notifier_add_subdev(&ctx->notifier, asd);
ctx              1733 drivers/media/platform/ti-vpe/cal.c 		ctx_err(ctx, "Error adding asd\n");
ctx              1738 drivers/media/platform/ti-vpe/cal.c 	ctx->notifier.ops = &cal_async_ops;
ctx              1739 drivers/media/platform/ti-vpe/cal.c 	ret = v4l2_async_notifier_register(&ctx->v4l2_dev,
ctx              1740 drivers/media/platform/ti-vpe/cal.c 					   &ctx->notifier);
ctx              1742 drivers/media/platform/ti-vpe/cal.c 		ctx_err(ctx, "Error registering async notifier\n");
ctx              1743 drivers/media/platform/ti-vpe/cal.c 		v4l2_async_notifier_cleanup(&ctx->notifier);
ctx              1764 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx;
ctx              1768 drivers/media/platform/ti-vpe/cal.c 	ctx = devm_kzalloc(&dev->pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx              1769 drivers/media/platform/ti-vpe/cal.c 	if (!ctx)
ctx              1773 drivers/media/platform/ti-vpe/cal.c 	ctx->dev = dev;
ctx              1775 drivers/media/platform/ti-vpe/cal.c 	snprintf(ctx->v4l2_dev.name, sizeof(ctx->v4l2_dev.name),
ctx              1777 drivers/media/platform/ti-vpe/cal.c 	ret = v4l2_device_register(&dev->pdev->dev, &ctx->v4l2_dev);
ctx              1781 drivers/media/platform/ti-vpe/cal.c 	hdl = &ctx->ctrl_handler;
ctx              1784 drivers/media/platform/ti-vpe/cal.c 		ctx_err(ctx, "Failed to init ctrl handler\n");
ctx              1787 drivers/media/platform/ti-vpe/cal.c 	ctx->v4l2_dev.ctrl_handler = hdl;
ctx              1790 drivers/media/platform/ti-vpe/cal.c 	ctx->cc = dev->cc[inst];
ctx              1793 drivers/media/platform/ti-vpe/cal.c 	ctx->csi2_port = inst + 1;
ctx              1795 drivers/media/platform/ti-vpe/cal.c 	ret = of_cal_create_instance(ctx, inst);
ctx              1800 drivers/media/platform/ti-vpe/cal.c 	return ctx;
ctx              1805 drivers/media/platform/ti-vpe/cal.c 	v4l2_device_unregister(&ctx->v4l2_dev);
ctx              1813 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx;
ctx              1859 drivers/media/platform/ti-vpe/cal.c 	dev->ctx[0] = NULL;
ctx              1860 drivers/media/platform/ti-vpe/cal.c 	dev->ctx[1] = NULL;
ctx              1862 drivers/media/platform/ti-vpe/cal.c 	dev->ctx[0] = cal_create_instance(dev, 0);
ctx              1863 drivers/media/platform/ti-vpe/cal.c 	dev->ctx[1] = cal_create_instance(dev, 1);
ctx              1864 drivers/media/platform/ti-vpe/cal.c 	if (!dev->ctx[0] && !dev->ctx[1]) {
ctx              1885 drivers/media/platform/ti-vpe/cal.c 		ctx = dev->ctx[i];
ctx              1886 drivers/media/platform/ti-vpe/cal.c 		if (ctx) {
ctx              1887 drivers/media/platform/ti-vpe/cal.c 			v4l2_async_notifier_unregister(&ctx->notifier);
ctx              1888 drivers/media/platform/ti-vpe/cal.c 			v4l2_async_notifier_cleanup(&ctx->notifier);
ctx              1889 drivers/media/platform/ti-vpe/cal.c 			v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx              1890 drivers/media/platform/ti-vpe/cal.c 			v4l2_device_unregister(&ctx->v4l2_dev);
ctx              1901 drivers/media/platform/ti-vpe/cal.c 	struct cal_ctx *ctx;
ctx              1909 drivers/media/platform/ti-vpe/cal.c 		ctx = dev->ctx[i];
ctx              1910 drivers/media/platform/ti-vpe/cal.c 		if (ctx) {
ctx              1911 drivers/media/platform/ti-vpe/cal.c 			ctx_dbg(1, ctx, "unregistering %s\n",
ctx              1912 drivers/media/platform/ti-vpe/cal.c 				video_device_node_name(&ctx->vdev));
ctx              1913 drivers/media/platform/ti-vpe/cal.c 			camerarx_phy_disable(ctx);
ctx              1914 drivers/media/platform/ti-vpe/cal.c 			v4l2_async_notifier_unregister(&ctx->notifier);
ctx              1915 drivers/media/platform/ti-vpe/cal.c 			v4l2_async_notifier_cleanup(&ctx->notifier);
ctx              1916 drivers/media/platform/ti-vpe/cal.c 			v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx              1917 drivers/media/platform/ti-vpe/cal.c 			v4l2_device_unregister(&ctx->v4l2_dev);
ctx              1918 drivers/media/platform/ti-vpe/cal.c 			video_unregister_device(&ctx->vdev);
ctx               422 drivers/media/platform/ti-vpe/vpe.c static struct vpe_q_data *get_q_data(struct vpe_ctx *ctx,
ctx               428 drivers/media/platform/ti-vpe/vpe.c 		return &ctx->q_data[Q_DATA_SRC];
ctx               431 drivers/media/platform/ti-vpe/vpe.c 		return &ctx->q_data[Q_DATA_DST];
ctx               507 drivers/media/platform/ti-vpe/vpe.c #define GET_OFFSET_TOP(ctx, obj, reg)	\
ctx               508 drivers/media/platform/ti-vpe/vpe.c 	((obj)->res->start - ctx->dev->res->start + reg)
ctx               510 drivers/media/platform/ti-vpe/vpe.c #define VPE_SET_MMR_ADB_HDR(ctx, hdr, regs, offset_a)	\
ctx               511 drivers/media/platform/ti-vpe/vpe.c 	VPDMA_SET_MMR_ADB_HDR(ctx->mmr_adb, vpe_mmr_adb, hdr, regs, offset_a)
ctx               515 drivers/media/platform/ti-vpe/vpe.c static void init_adb_hdrs(struct vpe_ctx *ctx)
ctx               517 drivers/media/platform/ti-vpe/vpe.c 	VPE_SET_MMR_ADB_HDR(ctx, out_fmt_hdr, out_fmt_reg, VPE_CLK_FORMAT_SELECT);
ctx               518 drivers/media/platform/ti-vpe/vpe.c 	VPE_SET_MMR_ADB_HDR(ctx, us1_hdr, us1_regs, VPE_US1_R0);
ctx               519 drivers/media/platform/ti-vpe/vpe.c 	VPE_SET_MMR_ADB_HDR(ctx, us2_hdr, us2_regs, VPE_US2_R0);
ctx               520 drivers/media/platform/ti-vpe/vpe.c 	VPE_SET_MMR_ADB_HDR(ctx, us3_hdr, us3_regs, VPE_US3_R0);
ctx               521 drivers/media/platform/ti-vpe/vpe.c 	VPE_SET_MMR_ADB_HDR(ctx, dei_hdr, dei_regs, VPE_DEI_FRAME_SIZE);
ctx               522 drivers/media/platform/ti-vpe/vpe.c 	VPE_SET_MMR_ADB_HDR(ctx, sc_hdr0, sc_regs0,
ctx               523 drivers/media/platform/ti-vpe/vpe.c 		GET_OFFSET_TOP(ctx, ctx->dev->sc, CFG_SC0));
ctx               524 drivers/media/platform/ti-vpe/vpe.c 	VPE_SET_MMR_ADB_HDR(ctx, sc_hdr8, sc_regs8,
ctx               525 drivers/media/platform/ti-vpe/vpe.c 		GET_OFFSET_TOP(ctx, ctx->dev->sc, CFG_SC8));
ctx               526 drivers/media/platform/ti-vpe/vpe.c 	VPE_SET_MMR_ADB_HDR(ctx, sc_hdr17, sc_regs17,
ctx               527 drivers/media/platform/ti-vpe/vpe.c 		GET_OFFSET_TOP(ctx, ctx->dev->sc, CFG_SC17));
ctx               528 drivers/media/platform/ti-vpe/vpe.c 	VPE_SET_MMR_ADB_HDR(ctx, csc_hdr, csc_regs,
ctx               529 drivers/media/platform/ti-vpe/vpe.c 		GET_OFFSET_TOP(ctx, ctx->dev->csc, CSC_CSC00));
ctx               539 drivers/media/platform/ti-vpe/vpe.c static int realloc_mv_buffers(struct vpe_ctx *ctx, size_t size)
ctx               541 drivers/media/platform/ti-vpe/vpe.c 	struct device *dev = ctx->dev->v4l2_dev.dev;
ctx               543 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->mv_buf_size == size)
ctx               546 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->mv_buf[0])
ctx               547 drivers/media/platform/ti-vpe/vpe.c 		dma_free_coherent(dev, ctx->mv_buf_size, ctx->mv_buf[0],
ctx               548 drivers/media/platform/ti-vpe/vpe.c 			ctx->mv_buf_dma[0]);
ctx               550 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->mv_buf[1])
ctx               551 drivers/media/platform/ti-vpe/vpe.c 		dma_free_coherent(dev, ctx->mv_buf_size, ctx->mv_buf[1],
ctx               552 drivers/media/platform/ti-vpe/vpe.c 			ctx->mv_buf_dma[1]);
ctx               557 drivers/media/platform/ti-vpe/vpe.c 	ctx->mv_buf[0] = dma_alloc_coherent(dev, size, &ctx->mv_buf_dma[0],
ctx               559 drivers/media/platform/ti-vpe/vpe.c 	if (!ctx->mv_buf[0]) {
ctx               560 drivers/media/platform/ti-vpe/vpe.c 		vpe_err(ctx->dev, "failed to allocate motion vector buffer\n");
ctx               564 drivers/media/platform/ti-vpe/vpe.c 	ctx->mv_buf[1] = dma_alloc_coherent(dev, size, &ctx->mv_buf_dma[1],
ctx               566 drivers/media/platform/ti-vpe/vpe.c 	if (!ctx->mv_buf[1]) {
ctx               567 drivers/media/platform/ti-vpe/vpe.c 		vpe_err(ctx->dev, "failed to allocate motion vector buffer\n");
ctx               568 drivers/media/platform/ti-vpe/vpe.c 		dma_free_coherent(dev, size, ctx->mv_buf[0],
ctx               569 drivers/media/platform/ti-vpe/vpe.c 			ctx->mv_buf_dma[0]);
ctx               574 drivers/media/platform/ti-vpe/vpe.c 	ctx->mv_buf_size = size;
ctx               575 drivers/media/platform/ti-vpe/vpe.c 	ctx->src_mv_buf_selector = 0;
ctx               580 drivers/media/platform/ti-vpe/vpe.c static void free_mv_buffers(struct vpe_ctx *ctx)
ctx               582 drivers/media/platform/ti-vpe/vpe.c 	realloc_mv_buffers(ctx, 0);
ctx               590 drivers/media/platform/ti-vpe/vpe.c static void free_vbs(struct vpe_ctx *ctx)
ctx               592 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_dev *dev = ctx->dev;
ctx               595 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->src_vbs[2] == NULL)
ctx               599 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->src_vbs[2]) {
ctx               600 drivers/media/platform/ti-vpe/vpe.c 		v4l2_m2m_buf_done(ctx->src_vbs[2], VB2_BUF_STATE_DONE);
ctx               601 drivers/media/platform/ti-vpe/vpe.c 		if (ctx->src_vbs[1] && (ctx->src_vbs[1] != ctx->src_vbs[2]))
ctx               602 drivers/media/platform/ti-vpe/vpe.c 			v4l2_m2m_buf_done(ctx->src_vbs[1], VB2_BUF_STATE_DONE);
ctx               603 drivers/media/platform/ti-vpe/vpe.c 		ctx->src_vbs[2] = NULL;
ctx               604 drivers/media/platform/ti-vpe/vpe.c 		ctx->src_vbs[1] = NULL;
ctx               647 drivers/media/platform/ti-vpe/vpe.c static void set_us_coefficients(struct vpe_ctx *ctx)
ctx               649 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_mmr_adb *mmr_adb = ctx->mmr_adb.addr;
ctx               650 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_q_data *s_q_data = &ctx->q_data[Q_DATA_SRC];
ctx               669 drivers/media/platform/ti-vpe/vpe.c 	ctx->load_mmrs = true;
ctx               675 drivers/media/platform/ti-vpe/vpe.c static void set_cfg_modes(struct vpe_ctx *ctx)
ctx               677 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_fmt *fmt = ctx->q_data[Q_DATA_SRC].fmt;
ctx               678 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_mmr_adb *mmr_adb = ctx->mmr_adb.addr;
ctx               696 drivers/media/platform/ti-vpe/vpe.c 	ctx->load_mmrs = true;
ctx               699 drivers/media/platform/ti-vpe/vpe.c static void set_line_modes(struct vpe_ctx *ctx)
ctx               701 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_fmt *fmt = ctx->q_data[Q_DATA_SRC].fmt;
ctx               708 drivers/media/platform/ti-vpe/vpe.c 	vpdma_set_line_mode(ctx->dev->vpdma, line_mode, VPE_CHAN_CHROMA1_IN);
ctx               709 drivers/media/platform/ti-vpe/vpe.c 	vpdma_set_line_mode(ctx->dev->vpdma, line_mode, VPE_CHAN_CHROMA2_IN);
ctx               710 drivers/media/platform/ti-vpe/vpe.c 	vpdma_set_line_mode(ctx->dev->vpdma, line_mode, VPE_CHAN_CHROMA3_IN);
ctx               713 drivers/media/platform/ti-vpe/vpe.c 	vpdma_set_frame_start_event(ctx->dev->vpdma, VPDMA_FSEVENT_CHANNEL_ACTIVE,
ctx               715 drivers/media/platform/ti-vpe/vpe.c 	vpdma_set_frame_start_event(ctx->dev->vpdma, VPDMA_FSEVENT_CHANNEL_ACTIVE,
ctx               717 drivers/media/platform/ti-vpe/vpe.c 	vpdma_set_frame_start_event(ctx->dev->vpdma, VPDMA_FSEVENT_CHANNEL_ACTIVE,
ctx               721 drivers/media/platform/ti-vpe/vpe.c 	vpdma_set_frame_start_event(ctx->dev->vpdma, VPDMA_FSEVENT_CHANNEL_ACTIVE,
ctx               723 drivers/media/platform/ti-vpe/vpe.c 	vpdma_set_frame_start_event(ctx->dev->vpdma, VPDMA_FSEVENT_CHANNEL_ACTIVE,
ctx               725 drivers/media/platform/ti-vpe/vpe.c 	vpdma_set_frame_start_event(ctx->dev->vpdma, VPDMA_FSEVENT_CHANNEL_ACTIVE,
ctx               729 drivers/media/platform/ti-vpe/vpe.c 	vpdma_set_frame_start_event(ctx->dev->vpdma, VPDMA_FSEVENT_CHANNEL_ACTIVE,
ctx               737 drivers/media/platform/ti-vpe/vpe.c static void set_src_registers(struct vpe_ctx *ctx)
ctx               739 drivers/media/platform/ti-vpe/vpe.c 	set_us_coefficients(ctx);
ctx               746 drivers/media/platform/ti-vpe/vpe.c static void set_dst_registers(struct vpe_ctx *ctx)
ctx               748 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_mmr_adb *mmr_adb = ctx->mmr_adb.addr;
ctx               749 drivers/media/platform/ti-vpe/vpe.c 	enum v4l2_colorspace clrspc = ctx->q_data[Q_DATA_DST].colorspace;
ctx               750 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_fmt *fmt = ctx->q_data[Q_DATA_DST].fmt;
ctx               755 drivers/media/platform/ti-vpe/vpe.c 		vpdma_set_bg_color(ctx->dev->vpdma,
ctx               771 drivers/media/platform/ti-vpe/vpe.c 	ctx->load_mmrs = true;
ctx               777 drivers/media/platform/ti-vpe/vpe.c static void set_dei_regs(struct vpe_ctx *ctx)
ctx               779 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_mmr_adb *mmr_adb = ctx->mmr_adb.addr;
ctx               780 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_q_data *s_q_data = &ctx->q_data[Q_DATA_SRC];
ctx               793 drivers/media/platform/ti-vpe/vpe.c 	if (!(s_q_data->flags & Q_IS_INTERLACED) || !ctx->deinterlacing) {
ctx               806 drivers/media/platform/ti-vpe/vpe.c 	ctx->load_mmrs = true;
ctx               809 drivers/media/platform/ti-vpe/vpe.c static void set_dei_shadow_registers(struct vpe_ctx *ctx)
ctx               811 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_mmr_adb *mmr_adb = ctx->mmr_adb.addr;
ctx               822 drivers/media/platform/ti-vpe/vpe.c 	ctx->load_mmrs = true;
ctx               825 drivers/media/platform/ti-vpe/vpe.c static void config_edi_input_mode(struct vpe_ctx *ctx, int mode)
ctx               827 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_mmr_adb *mmr_adb = ctx->mmr_adb.addr;
ctx               839 drivers/media/platform/ti-vpe/vpe.c 	ctx->load_mmrs = true;
ctx               846 drivers/media/platform/ti-vpe/vpe.c static int set_srcdst_params(struct vpe_ctx *ctx)
ctx               848 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_q_data *s_q_data =  &ctx->q_data[Q_DATA_SRC];
ctx               849 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_q_data *d_q_data =  &ctx->q_data[Q_DATA_DST];
ctx               850 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_mmr_adb *mmr_adb = ctx->mmr_adb.addr;
ctx               858 drivers/media/platform/ti-vpe/vpe.c 	ctx->sequence = 0;
ctx               859 drivers/media/platform/ti-vpe/vpe.c 	ctx->field = V4L2_FIELD_TOP;
ctx               878 drivers/media/platform/ti-vpe/vpe.c 		ctx->deinterlacing = true;
ctx               881 drivers/media/platform/ti-vpe/vpe.c 		ctx->deinterlacing = false;
ctx               885 drivers/media/platform/ti-vpe/vpe.c 	free_vbs(ctx);
ctx               886 drivers/media/platform/ti-vpe/vpe.c 	ctx->src_vbs[2] = ctx->src_vbs[1] = ctx->src_vbs[0] = NULL;
ctx               888 drivers/media/platform/ti-vpe/vpe.c 	ret = realloc_mv_buffers(ctx, mv_buf_size);
ctx               892 drivers/media/platform/ti-vpe/vpe.c 	set_cfg_modes(ctx);
ctx               893 drivers/media/platform/ti-vpe/vpe.c 	set_dei_regs(ctx);
ctx               895 drivers/media/platform/ti-vpe/vpe.c 	csc_set_coeff(ctx->dev->csc, &mmr_adb->csc_regs[0],
ctx               898 drivers/media/platform/ti-vpe/vpe.c 	sc_set_hs_coeffs(ctx->dev->sc, ctx->sc_coeff_h.addr, src_w, dst_w);
ctx               899 drivers/media/platform/ti-vpe/vpe.c 	sc_set_vs_coeffs(ctx->dev->sc, ctx->sc_coeff_v.addr, src_h, dst_h);
ctx               901 drivers/media/platform/ti-vpe/vpe.c 	sc_config_scaler(ctx->dev->sc, &mmr_adb->sc_regs0[0],
ctx               925 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = priv;
ctx               932 drivers/media/platform/ti-vpe/vpe.c 	if (v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx) <= 0 ||
ctx               933 drivers/media/platform/ti-vpe/vpe.c 		v4l2_m2m_num_dst_bufs_ready(ctx->fh.m2m_ctx) <= 0)
ctx               941 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = priv;
ctx               944 drivers/media/platform/ti-vpe/vpe.c 	ctx->aborting = 1;
ctx              1010 drivers/media/platform/ti-vpe/vpe.c static void add_out_dtd(struct vpe_ctx *ctx, int port)
ctx              1012 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_q_data *q_data = &ctx->q_data[Q_DATA_DST];
ctx              1014 drivers/media/platform/ti-vpe/vpe.c 	struct vb2_buffer *vb = &ctx->dst_vb->vb2_buf;
ctx              1017 drivers/media/platform/ti-vpe/vpe.c 	int mv_buf_selector = !ctx->src_mv_buf_selector;
ctx              1025 drivers/media/platform/ti-vpe/vpe.c 		dma_addr = ctx->mv_buf_dma[mv_buf_selector];
ctx              1026 drivers/media/platform/ti-vpe/vpe.c 		q_data = &ctx->q_data[Q_DATA_SRC];
ctx              1048 drivers/media/platform/ti-vpe/vpe.c 			vpe_err(ctx->dev,
ctx              1063 drivers/media/platform/ti-vpe/vpe.c 	vpdma_set_max_size(ctx->dev->vpdma, VPDMA_MAX_SIZE1,
ctx              1066 drivers/media/platform/ti-vpe/vpe.c 	vpdma_add_out_dtd(&ctx->desc_list, q_data->width,
ctx              1072 drivers/media/platform/ti-vpe/vpe.c static void add_in_dtd(struct vpe_ctx *ctx, int port)
ctx              1074 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_q_data *q_data = &ctx->q_data[Q_DATA_SRC];
ctx              1076 drivers/media/platform/ti-vpe/vpe.c 	struct vb2_buffer *vb = &ctx->src_vbs[p_data->vb_index]->vb2_buf;
ctx              1080 drivers/media/platform/ti-vpe/vpe.c 	int mv_buf_selector = ctx->src_mv_buf_selector;
ctx              1090 drivers/media/platform/ti-vpe/vpe.c 		dma_addr = ctx->mv_buf_dma[mv_buf_selector];
ctx              1112 drivers/media/platform/ti-vpe/vpe.c 			vpe_err(ctx->dev,
ctx              1127 drivers/media/platform/ti-vpe/vpe.c 			field = (p_data->vb_index + (ctx->sequence % 2)) % 2;
ctx              1155 drivers/media/platform/ti-vpe/vpe.c 	vpdma_add_in_dtd(&ctx->desc_list, q_data->width, stride,
ctx              1164 drivers/media/platform/ti-vpe/vpe.c static void enable_irqs(struct vpe_ctx *ctx)
ctx              1166 drivers/media/platform/ti-vpe/vpe.c 	write_reg(ctx->dev, VPE_INT0_ENABLE0_SET, VPE_INT0_LIST0_COMPLETE);
ctx              1167 drivers/media/platform/ti-vpe/vpe.c 	write_reg(ctx->dev, VPE_INT0_ENABLE1_SET, VPE_DEI_ERROR_INT |
ctx              1170 drivers/media/platform/ti-vpe/vpe.c 	vpdma_enable_list_complete_irq(ctx->dev->vpdma, 0, 0, true);
ctx              1173 drivers/media/platform/ti-vpe/vpe.c static void disable_irqs(struct vpe_ctx *ctx)
ctx              1175 drivers/media/platform/ti-vpe/vpe.c 	write_reg(ctx->dev, VPE_INT0_ENABLE0_CLR, 0xffffffff);
ctx              1176 drivers/media/platform/ti-vpe/vpe.c 	write_reg(ctx->dev, VPE_INT0_ENABLE1_CLR, 0xffffffff);
ctx              1178 drivers/media/platform/ti-vpe/vpe.c 	vpdma_enable_list_complete_irq(ctx->dev->vpdma, 0, 0, false);
ctx              1188 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = priv;
ctx              1189 drivers/media/platform/ti-vpe/vpe.c 	struct sc_data *sc = ctx->dev->sc;
ctx              1190 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_q_data *d_q_data = &ctx->q_data[Q_DATA_DST];
ctx              1191 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_q_data *s_q_data = &ctx->q_data[Q_DATA_SRC];
ctx              1193 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->deinterlacing && s_q_data->flags & Q_DATA_INTERLACED_SEQ_TB &&
ctx              1194 drivers/media/platform/ti-vpe/vpe.c 		ctx->sequence % 2 == 0) {
ctx              1200 drivers/media/platform/ti-vpe/vpe.c 		ctx->src_vbs[0] = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx              1201 drivers/media/platform/ti-vpe/vpe.c 		WARN_ON(ctx->src_vbs[0] == NULL);
ctx              1203 drivers/media/platform/ti-vpe/vpe.c 		ctx->src_vbs[0] = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx              1204 drivers/media/platform/ti-vpe/vpe.c 		WARN_ON(ctx->src_vbs[0] == NULL);
ctx              1207 drivers/media/platform/ti-vpe/vpe.c 	ctx->dst_vb = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              1208 drivers/media/platform/ti-vpe/vpe.c 	WARN_ON(ctx->dst_vb == NULL);
ctx              1210 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->deinterlacing) {
ctx              1212 drivers/media/platform/ti-vpe/vpe.c 		if (ctx->src_vbs[2] == NULL) {
ctx              1213 drivers/media/platform/ti-vpe/vpe.c 			ctx->src_vbs[2] = ctx->src_vbs[0];
ctx              1214 drivers/media/platform/ti-vpe/vpe.c 			WARN_ON(ctx->src_vbs[2] == NULL);
ctx              1215 drivers/media/platform/ti-vpe/vpe.c 			ctx->src_vbs[1] = ctx->src_vbs[0];
ctx              1216 drivers/media/platform/ti-vpe/vpe.c 			WARN_ON(ctx->src_vbs[1] == NULL);
ctx              1223 drivers/media/platform/ti-vpe/vpe.c 		if (ctx->sequence == 2)
ctx              1224 drivers/media/platform/ti-vpe/vpe.c 			config_edi_input_mode(ctx, 0x3); /* EDI (Y + UV) */
ctx              1228 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->dev->loaded_mmrs != ctx->mmr_adb.dma_addr || ctx->load_mmrs) {
ctx              1229 drivers/media/platform/ti-vpe/vpe.c 		vpdma_map_desc_buf(ctx->dev->vpdma, &ctx->mmr_adb);
ctx              1230 drivers/media/platform/ti-vpe/vpe.c 		vpdma_add_cfd_adb(&ctx->desc_list, CFD_MMR_CLIENT, &ctx->mmr_adb);
ctx              1232 drivers/media/platform/ti-vpe/vpe.c 		set_line_modes(ctx);
ctx              1234 drivers/media/platform/ti-vpe/vpe.c 		ctx->dev->loaded_mmrs = ctx->mmr_adb.dma_addr;
ctx              1235 drivers/media/platform/ti-vpe/vpe.c 		ctx->load_mmrs = false;
ctx              1238 drivers/media/platform/ti-vpe/vpe.c 	if (sc->loaded_coeff_h != ctx->sc_coeff_h.dma_addr ||
ctx              1240 drivers/media/platform/ti-vpe/vpe.c 		vpdma_map_desc_buf(ctx->dev->vpdma, &ctx->sc_coeff_h);
ctx              1241 drivers/media/platform/ti-vpe/vpe.c 		vpdma_add_cfd_block(&ctx->desc_list, CFD_SC_CLIENT,
ctx              1242 drivers/media/platform/ti-vpe/vpe.c 			&ctx->sc_coeff_h, 0);
ctx              1244 drivers/media/platform/ti-vpe/vpe.c 		sc->loaded_coeff_h = ctx->sc_coeff_h.dma_addr;
ctx              1248 drivers/media/platform/ti-vpe/vpe.c 	if (sc->loaded_coeff_v != ctx->sc_coeff_v.dma_addr ||
ctx              1250 drivers/media/platform/ti-vpe/vpe.c 		vpdma_map_desc_buf(ctx->dev->vpdma, &ctx->sc_coeff_v);
ctx              1251 drivers/media/platform/ti-vpe/vpe.c 		vpdma_add_cfd_block(&ctx->desc_list, CFD_SC_CLIENT,
ctx              1252 drivers/media/platform/ti-vpe/vpe.c 			&ctx->sc_coeff_v, SC_COEF_SRAM_SIZE >> 4);
ctx              1254 drivers/media/platform/ti-vpe/vpe.c 		sc->loaded_coeff_v = ctx->sc_coeff_v.dma_addr;
ctx              1259 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->deinterlacing)
ctx              1260 drivers/media/platform/ti-vpe/vpe.c 		add_out_dtd(ctx, VPE_PORT_MV_OUT);
ctx              1263 drivers/media/platform/ti-vpe/vpe.c 		add_out_dtd(ctx, VPE_PORT_RGB_OUT);
ctx              1265 drivers/media/platform/ti-vpe/vpe.c 		add_out_dtd(ctx, VPE_PORT_LUMA_OUT);
ctx              1267 drivers/media/platform/ti-vpe/vpe.c 			add_out_dtd(ctx, VPE_PORT_CHROMA_OUT);
ctx              1271 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->deinterlacing) {
ctx              1272 drivers/media/platform/ti-vpe/vpe.c 		add_in_dtd(ctx, VPE_PORT_LUMA3_IN);
ctx              1273 drivers/media/platform/ti-vpe/vpe.c 		add_in_dtd(ctx, VPE_PORT_CHROMA3_IN);
ctx              1275 drivers/media/platform/ti-vpe/vpe.c 		add_in_dtd(ctx, VPE_PORT_LUMA2_IN);
ctx              1276 drivers/media/platform/ti-vpe/vpe.c 		add_in_dtd(ctx, VPE_PORT_CHROMA2_IN);
ctx              1279 drivers/media/platform/ti-vpe/vpe.c 	add_in_dtd(ctx, VPE_PORT_LUMA1_IN);
ctx              1280 drivers/media/platform/ti-vpe/vpe.c 	add_in_dtd(ctx, VPE_PORT_CHROMA1_IN);
ctx              1282 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->deinterlacing)
ctx              1283 drivers/media/platform/ti-vpe/vpe.c 		add_in_dtd(ctx, VPE_PORT_MV_IN);
ctx              1286 drivers/media/platform/ti-vpe/vpe.c 	vpdma_add_sync_on_channel_ctd(&ctx->desc_list, VPE_CHAN_LUMA1_IN);
ctx              1287 drivers/media/platform/ti-vpe/vpe.c 	vpdma_add_sync_on_channel_ctd(&ctx->desc_list, VPE_CHAN_CHROMA1_IN);
ctx              1289 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->deinterlacing) {
ctx              1290 drivers/media/platform/ti-vpe/vpe.c 		vpdma_add_sync_on_channel_ctd(&ctx->desc_list,
ctx              1292 drivers/media/platform/ti-vpe/vpe.c 		vpdma_add_sync_on_channel_ctd(&ctx->desc_list,
ctx              1295 drivers/media/platform/ti-vpe/vpe.c 		vpdma_add_sync_on_channel_ctd(&ctx->desc_list,
ctx              1297 drivers/media/platform/ti-vpe/vpe.c 		vpdma_add_sync_on_channel_ctd(&ctx->desc_list,
ctx              1300 drivers/media/platform/ti-vpe/vpe.c 		vpdma_add_sync_on_channel_ctd(&ctx->desc_list, VPE_CHAN_MV_IN);
ctx              1305 drivers/media/platform/ti-vpe/vpe.c 		vpdma_add_sync_on_channel_ctd(&ctx->desc_list,
ctx              1308 drivers/media/platform/ti-vpe/vpe.c 		vpdma_add_sync_on_channel_ctd(&ctx->desc_list,
ctx              1311 drivers/media/platform/ti-vpe/vpe.c 			vpdma_add_sync_on_channel_ctd(&ctx->desc_list,
ctx              1315 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->deinterlacing)
ctx              1316 drivers/media/platform/ti-vpe/vpe.c 		vpdma_add_sync_on_channel_ctd(&ctx->desc_list, VPE_CHAN_MV_OUT);
ctx              1318 drivers/media/platform/ti-vpe/vpe.c 	enable_irqs(ctx);
ctx              1320 drivers/media/platform/ti-vpe/vpe.c 	vpdma_map_desc_buf(ctx->dev->vpdma, &ctx->desc_list.buf);
ctx              1321 drivers/media/platform/ti-vpe/vpe.c 	vpdma_submit_descs(ctx->dev->vpdma, &ctx->desc_list, 0);
ctx              1324 drivers/media/platform/ti-vpe/vpe.c static void dei_error(struct vpe_ctx *ctx)
ctx              1326 drivers/media/platform/ti-vpe/vpe.c 	dev_warn(ctx->dev->v4l2_dev.dev,
ctx              1330 drivers/media/platform/ti-vpe/vpe.c static void ds1_uv_error(struct vpe_ctx *ctx)
ctx              1332 drivers/media/platform/ti-vpe/vpe.c 	dev_warn(ctx->dev->v4l2_dev.dev,
ctx              1339 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx;
ctx              1358 drivers/media/platform/ti-vpe/vpe.c 	ctx = v4l2_m2m_get_curr_priv(dev->m2m_dev);
ctx              1359 drivers/media/platform/ti-vpe/vpe.c 	if (!ctx) {
ctx              1367 drivers/media/platform/ti-vpe/vpe.c 			dei_error(ctx);
ctx              1371 drivers/media/platform/ti-vpe/vpe.c 			ds1_uv_error(ctx);
ctx              1377 drivers/media/platform/ti-vpe/vpe.c 			vpdma_clear_list_stat(ctx->dev->vpdma, 0, 0);
ctx              1395 drivers/media/platform/ti-vpe/vpe.c 	disable_irqs(ctx);
ctx              1397 drivers/media/platform/ti-vpe/vpe.c 	vpdma_unmap_desc_buf(dev->vpdma, &ctx->desc_list.buf);
ctx              1398 drivers/media/platform/ti-vpe/vpe.c 	vpdma_unmap_desc_buf(dev->vpdma, &ctx->mmr_adb);
ctx              1399 drivers/media/platform/ti-vpe/vpe.c 	vpdma_unmap_desc_buf(dev->vpdma, &ctx->sc_coeff_h);
ctx              1400 drivers/media/platform/ti-vpe/vpe.c 	vpdma_unmap_desc_buf(dev->vpdma, &ctx->sc_coeff_v);
ctx              1402 drivers/media/platform/ti-vpe/vpe.c 	vpdma_reset_desc_list(&ctx->desc_list);
ctx              1405 drivers/media/platform/ti-vpe/vpe.c 	ctx->src_mv_buf_selector = !ctx->src_mv_buf_selector;
ctx              1407 drivers/media/platform/ti-vpe/vpe.c 	s_vb = ctx->src_vbs[0];
ctx              1408 drivers/media/platform/ti-vpe/vpe.c 	d_vb = ctx->dst_vb;
ctx              1416 drivers/media/platform/ti-vpe/vpe.c 	d_vb->sequence = ctx->sequence;
ctx              1417 drivers/media/platform/ti-vpe/vpe.c 	s_vb->sequence = ctx->sequence;
ctx              1419 drivers/media/platform/ti-vpe/vpe.c 	d_q_data = &ctx->q_data[Q_DATA_DST];
ctx              1421 drivers/media/platform/ti-vpe/vpe.c 		d_vb->field = ctx->field;
ctx              1422 drivers/media/platform/ti-vpe/vpe.c 		if (ctx->field == V4L2_FIELD_BOTTOM) {
ctx              1423 drivers/media/platform/ti-vpe/vpe.c 			ctx->sequence++;
ctx              1424 drivers/media/platform/ti-vpe/vpe.c 			ctx->field = V4L2_FIELD_TOP;
ctx              1426 drivers/media/platform/ti-vpe/vpe.c 			WARN_ON(ctx->field != V4L2_FIELD_TOP);
ctx              1427 drivers/media/platform/ti-vpe/vpe.c 			ctx->field = V4L2_FIELD_BOTTOM;
ctx              1431 drivers/media/platform/ti-vpe/vpe.c 		ctx->sequence++;
ctx              1434 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->deinterlacing) {
ctx              1443 drivers/media/platform/ti-vpe/vpe.c 		if (ctx->src_vbs[2] != ctx->src_vbs[1])
ctx              1444 drivers/media/platform/ti-vpe/vpe.c 			s_vb = ctx->src_vbs[2];
ctx              1458 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->deinterlacing) {
ctx              1459 drivers/media/platform/ti-vpe/vpe.c 		ctx->src_vbs[2] = ctx->src_vbs[1];
ctx              1460 drivers/media/platform/ti-vpe/vpe.c 		ctx->src_vbs[1] = ctx->src_vbs[0];
ctx              1468 drivers/media/platform/ti-vpe/vpe.c 	ctx->src_vbs[0] = NULL;
ctx              1469 drivers/media/platform/ti-vpe/vpe.c 	ctx->dst_vb = NULL;
ctx              1471 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->aborting)
ctx              1474 drivers/media/platform/ti-vpe/vpe.c 	ctx->bufs_completed++;
ctx              1475 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->bufs_completed < ctx->bufs_per_job && job_ready(ctx)) {
ctx              1476 drivers/media/platform/ti-vpe/vpe.c 		device_run(ctx);
ctx              1481 drivers/media/platform/ti-vpe/vpe.c 	vpe_dbg(ctx->dev, "finishing transaction\n");
ctx              1482 drivers/media/platform/ti-vpe/vpe.c 	ctx->bufs_completed = 0;
ctx              1483 drivers/media/platform/ti-vpe/vpe.c 	v4l2_m2m_job_finish(dev->m2m_dev, ctx->fh.m2m_ctx);
ctx              1536 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = file2ctx(file);
ctx              1541 drivers/media/platform/ti-vpe/vpe.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx              1545 drivers/media/platform/ti-vpe/vpe.c 	q_data = get_q_data(ctx, f->type);
ctx              1558 drivers/media/platform/ti-vpe/vpe.c 		s_q_data = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
ctx              1573 drivers/media/platform/ti-vpe/vpe.c static int __vpe_try_fmt(struct vpe_ctx *ctx, struct v4l2_format *f,
ctx              1583 drivers/media/platform/ti-vpe/vpe.c 		vpe_dbg(ctx->dev, "Fourcc format (0x%08x) invalid.\n",
ctx              1700 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = file2ctx(file);
ctx              1704 drivers/media/platform/ti-vpe/vpe.c 		return __vpe_try_fmt(ctx, f, fmt, VPE_FMT_TYPE_OUTPUT);
ctx              1706 drivers/media/platform/ti-vpe/vpe.c 		return __vpe_try_fmt(ctx, f, fmt, VPE_FMT_TYPE_CAPTURE);
ctx              1709 drivers/media/platform/ti-vpe/vpe.c static int __vpe_s_fmt(struct vpe_ctx *ctx, struct v4l2_format *f)
ctx              1717 drivers/media/platform/ti-vpe/vpe.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx              1722 drivers/media/platform/ti-vpe/vpe.c 		vpe_err(ctx->dev, "queue busy\n");
ctx              1726 drivers/media/platform/ti-vpe/vpe.c 	q_data = get_q_data(ctx, f->type);
ctx              1760 drivers/media/platform/ti-vpe/vpe.c 	vpe_dbg(ctx->dev, "Setting format for type %d, wxh: %dx%d, fmt: %d bpl_y %d",
ctx              1764 drivers/media/platform/ti-vpe/vpe.c 		vpe_dbg(ctx->dev, " bpl_uv %d\n",
ctx              1773 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = file2ctx(file);
ctx              1779 drivers/media/platform/ti-vpe/vpe.c 	ret = __vpe_s_fmt(ctx, f);
ctx              1784 drivers/media/platform/ti-vpe/vpe.c 		set_src_registers(ctx);
ctx              1786 drivers/media/platform/ti-vpe/vpe.c 		set_dst_registers(ctx);
ctx              1788 drivers/media/platform/ti-vpe/vpe.c 	return set_srcdst_params(ctx);
ctx              1791 drivers/media/platform/ti-vpe/vpe.c static int __vpe_try_selection(struct vpe_ctx *ctx, struct v4l2_selection *s)
ctx              1800 drivers/media/platform/ti-vpe/vpe.c 	q_data = get_q_data(ctx, s->type);
ctx              1839 drivers/media/platform/ti-vpe/vpe.c 		vpe_err(ctx->dev, "negative values for top and left\n");
ctx              1858 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = file2ctx(file);
ctx              1866 drivers/media/platform/ti-vpe/vpe.c 	q_data = get_q_data(ctx, s->type);
ctx              1919 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = file2ctx(file);
ctx              1924 drivers/media/platform/ti-vpe/vpe.c 	ret = __vpe_try_selection(ctx, &sel);
ctx              1928 drivers/media/platform/ti-vpe/vpe.c 	q_data = get_q_data(ctx, sel.type);
ctx              1936 drivers/media/platform/ti-vpe/vpe.c 		vpe_dbg(ctx->dev,
ctx              1943 drivers/media/platform/ti-vpe/vpe.c 	return set_srcdst_params(ctx);
ctx              1954 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx =
ctx              1959 drivers/media/platform/ti-vpe/vpe.c 		ctx->bufs_per_job = ctrl->val;
ctx              1963 drivers/media/platform/ti-vpe/vpe.c 		vpe_err(ctx->dev, "Invalid control\n");
ctx              2010 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = vb2_get_drv_priv(vq);
ctx              2013 drivers/media/platform/ti-vpe/vpe.c 	q_data = get_q_data(ctx, vq->type);
ctx              2020 drivers/media/platform/ti-vpe/vpe.c 	vpe_dbg(ctx->dev, "get %d buffer(s) of size %d", *nbuffers,
ctx              2023 drivers/media/platform/ti-vpe/vpe.c 		vpe_dbg(ctx->dev, " and %d\n", sizes[VPE_CHROMA]);
ctx              2031 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              2035 drivers/media/platform/ti-vpe/vpe.c 	vpe_dbg(ctx->dev, "type: %d\n", vb->vb2_queue->type);
ctx              2037 drivers/media/platform/ti-vpe/vpe.c 	q_data = get_q_data(ctx, vb->vb2_queue->type);
ctx              2053 drivers/media/platform/ti-vpe/vpe.c 			vpe_err(ctx->dev,
ctx              2070 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              2072 drivers/media/platform/ti-vpe/vpe.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx              2075 drivers/media/platform/ti-vpe/vpe.c static int check_srcdst_sizes(struct vpe_ctx *ctx)
ctx              2077 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_q_data *s_q_data =  &ctx->q_data[Q_DATA_SRC];
ctx              2078 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_q_data *d_q_data =  &ctx->q_data[Q_DATA_DST];
ctx              2096 drivers/media/platform/ti-vpe/vpe.c static void vpe_return_all_buffers(struct vpe_ctx *ctx,  struct vb2_queue *q,
ctx              2104 drivers/media/platform/ti-vpe/vpe.c 			vb = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx              2106 drivers/media/platform/ti-vpe/vpe.c 			vb = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              2109 drivers/media/platform/ti-vpe/vpe.c 		spin_lock_irqsave(&ctx->dev->lock, flags);
ctx              2111 drivers/media/platform/ti-vpe/vpe.c 		spin_unlock_irqrestore(&ctx->dev->lock, flags);
ctx              2120 drivers/media/platform/ti-vpe/vpe.c 		spin_lock_irqsave(&ctx->dev->lock, flags);
ctx              2122 drivers/media/platform/ti-vpe/vpe.c 		if (ctx->src_vbs[2])
ctx              2123 drivers/media/platform/ti-vpe/vpe.c 			v4l2_m2m_buf_done(ctx->src_vbs[2], state);
ctx              2125 drivers/media/platform/ti-vpe/vpe.c 		if (ctx->src_vbs[1] && (ctx->src_vbs[1] != ctx->src_vbs[2]))
ctx              2126 drivers/media/platform/ti-vpe/vpe.c 			v4l2_m2m_buf_done(ctx->src_vbs[1], state);
ctx              2128 drivers/media/platform/ti-vpe/vpe.c 		if (ctx->src_vbs[0] &&
ctx              2129 drivers/media/platform/ti-vpe/vpe.c 		    (ctx->src_vbs[0] != ctx->src_vbs[1]) &&
ctx              2130 drivers/media/platform/ti-vpe/vpe.c 		    (ctx->src_vbs[0] != ctx->src_vbs[2]))
ctx              2131 drivers/media/platform/ti-vpe/vpe.c 			v4l2_m2m_buf_done(ctx->src_vbs[0], state);
ctx              2133 drivers/media/platform/ti-vpe/vpe.c 		ctx->src_vbs[2] = NULL;
ctx              2134 drivers/media/platform/ti-vpe/vpe.c 		ctx->src_vbs[1] = NULL;
ctx              2135 drivers/media/platform/ti-vpe/vpe.c 		ctx->src_vbs[0] = NULL;
ctx              2137 drivers/media/platform/ti-vpe/vpe.c 		spin_unlock_irqrestore(&ctx->dev->lock, flags);
ctx              2139 drivers/media/platform/ti-vpe/vpe.c 		if (ctx->dst_vb) {
ctx              2140 drivers/media/platform/ti-vpe/vpe.c 			spin_lock_irqsave(&ctx->dev->lock, flags);
ctx              2142 drivers/media/platform/ti-vpe/vpe.c 			v4l2_m2m_buf_done(ctx->dst_vb, state);
ctx              2143 drivers/media/platform/ti-vpe/vpe.c 			ctx->dst_vb = NULL;
ctx              2144 drivers/media/platform/ti-vpe/vpe.c 			spin_unlock_irqrestore(&ctx->dev->lock, flags);
ctx              2151 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = vb2_get_drv_priv(q);
ctx              2154 drivers/media/platform/ti-vpe/vpe.c 	if (check_srcdst_sizes(ctx)) {
ctx              2155 drivers/media/platform/ti-vpe/vpe.c 		vpe_err(ctx->dev,
ctx              2158 drivers/media/platform/ti-vpe/vpe.c 		vpe_return_all_buffers(ctx, q, VB2_BUF_STATE_QUEUED);
ctx              2162 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->deinterlacing)
ctx              2163 drivers/media/platform/ti-vpe/vpe.c 		config_edi_input_mode(ctx, 0x0);
ctx              2165 drivers/media/platform/ti-vpe/vpe.c 	if (ctx->sequence != 0)
ctx              2166 drivers/media/platform/ti-vpe/vpe.c 		set_srcdst_params(ctx);
ctx              2173 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = vb2_get_drv_priv(q);
ctx              2175 drivers/media/platform/ti-vpe/vpe.c 	vpe_dump_regs(ctx->dev);
ctx              2176 drivers/media/platform/ti-vpe/vpe.c 	vpdma_dump_regs(ctx->dev->vpdma);
ctx              2178 drivers/media/platform/ti-vpe/vpe.c 	vpe_return_all_buffers(ctx, q, VB2_BUF_STATE_ERROR);
ctx              2194 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = priv;
ctx              2195 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_dev *dev = ctx->dev;
ctx              2201 drivers/media/platform/ti-vpe/vpe.c 	src_vq->drv_priv = ctx;
ctx              2216 drivers/media/platform/ti-vpe/vpe.c 	dst_vq->drv_priv = ctx;
ctx              2246 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx;
ctx              2251 drivers/media/platform/ti-vpe/vpe.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              2252 drivers/media/platform/ti-vpe/vpe.c 	if (!ctx)
ctx              2255 drivers/media/platform/ti-vpe/vpe.c 	ctx->dev = dev;
ctx              2262 drivers/media/platform/ti-vpe/vpe.c 	ret = vpdma_create_desc_list(&ctx->desc_list, VPE_DESC_LIST_SIZE,
ctx              2267 drivers/media/platform/ti-vpe/vpe.c 	ret = vpdma_alloc_desc_buf(&ctx->mmr_adb, sizeof(struct vpe_mmr_adb));
ctx              2271 drivers/media/platform/ti-vpe/vpe.c 	ret = vpdma_alloc_desc_buf(&ctx->sc_coeff_h, SC_COEF_SRAM_SIZE);
ctx              2275 drivers/media/platform/ti-vpe/vpe.c 	ret = vpdma_alloc_desc_buf(&ctx->sc_coeff_v, SC_COEF_SRAM_SIZE);
ctx              2279 drivers/media/platform/ti-vpe/vpe.c 	init_adb_hdrs(ctx);
ctx              2281 drivers/media/platform/ti-vpe/vpe.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx              2282 drivers/media/platform/ti-vpe/vpe.c 	file->private_data = &ctx->fh;
ctx              2284 drivers/media/platform/ti-vpe/vpe.c 	hdl = &ctx->hdl;
ctx              2291 drivers/media/platform/ti-vpe/vpe.c 	ctx->fh.ctrl_handler = hdl;
ctx              2294 drivers/media/platform/ti-vpe/vpe.c 	s_q_data = &ctx->q_data[Q_DATA_SRC];
ctx              2311 drivers/media/platform/ti-vpe/vpe.c 	ctx->q_data[Q_DATA_DST] = *s_q_data;
ctx              2313 drivers/media/platform/ti-vpe/vpe.c 	set_dei_shadow_registers(ctx);
ctx              2314 drivers/media/platform/ti-vpe/vpe.c 	set_src_registers(ctx);
ctx              2315 drivers/media/platform/ti-vpe/vpe.c 	set_dst_registers(ctx);
ctx              2316 drivers/media/platform/ti-vpe/vpe.c 	ret = set_srcdst_params(ctx);
ctx              2320 drivers/media/platform/ti-vpe/vpe.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(dev->m2m_dev, ctx, &queue_init);
ctx              2322 drivers/media/platform/ti-vpe/vpe.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx              2323 drivers/media/platform/ti-vpe/vpe.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx              2327 drivers/media/platform/ti-vpe/vpe.c 	v4l2_fh_add(&ctx->fh);
ctx              2337 drivers/media/platform/ti-vpe/vpe.c 	ctx->bufs_per_job = VPE_DEF_BUFS_PER_JOB;
ctx              2339 drivers/media/platform/ti-vpe/vpe.c 	ctx->load_mmrs = true;
ctx              2342 drivers/media/platform/ti-vpe/vpe.c 		ctx, ctx->fh.m2m_ctx);
ctx              2349 drivers/media/platform/ti-vpe/vpe.c 	v4l2_fh_exit(&ctx->fh);
ctx              2350 drivers/media/platform/ti-vpe/vpe.c 	vpdma_free_desc_buf(&ctx->sc_coeff_v);
ctx              2352 drivers/media/platform/ti-vpe/vpe.c 	vpdma_free_desc_buf(&ctx->sc_coeff_h);
ctx              2354 drivers/media/platform/ti-vpe/vpe.c 	vpdma_free_desc_buf(&ctx->mmr_adb);
ctx              2356 drivers/media/platform/ti-vpe/vpe.c 	vpdma_free_desc_list(&ctx->desc_list);
ctx              2360 drivers/media/platform/ti-vpe/vpe.c 	kfree(ctx);
ctx              2367 drivers/media/platform/ti-vpe/vpe.c 	struct vpe_ctx *ctx = file2ctx(file);
ctx              2369 drivers/media/platform/ti-vpe/vpe.c 	vpe_dbg(dev, "releasing instance %p\n", ctx);
ctx              2372 drivers/media/platform/ti-vpe/vpe.c 	free_mv_buffers(ctx);
ctx              2374 drivers/media/platform/ti-vpe/vpe.c 	vpdma_unmap_desc_buf(dev->vpdma, &ctx->desc_list.buf);
ctx              2375 drivers/media/platform/ti-vpe/vpe.c 	vpdma_unmap_desc_buf(dev->vpdma, &ctx->mmr_adb);
ctx              2376 drivers/media/platform/ti-vpe/vpe.c 	vpdma_unmap_desc_buf(dev->vpdma, &ctx->sc_coeff_h);
ctx              2377 drivers/media/platform/ti-vpe/vpe.c 	vpdma_unmap_desc_buf(dev->vpdma, &ctx->sc_coeff_v);
ctx              2379 drivers/media/platform/ti-vpe/vpe.c 	vpdma_free_desc_list(&ctx->desc_list);
ctx              2380 drivers/media/platform/ti-vpe/vpe.c 	vpdma_free_desc_buf(&ctx->mmr_adb);
ctx              2382 drivers/media/platform/ti-vpe/vpe.c 	vpdma_free_desc_buf(&ctx->sc_coeff_v);
ctx              2383 drivers/media/platform/ti-vpe/vpe.c 	vpdma_free_desc_buf(&ctx->sc_coeff_h);
ctx              2385 drivers/media/platform/ti-vpe/vpe.c 	v4l2_fh_del(&ctx->fh);
ctx              2386 drivers/media/platform/ti-vpe/vpe.c 	v4l2_fh_exit(&ctx->fh);
ctx              2387 drivers/media/platform/ti-vpe/vpe.c 	v4l2_ctrl_handler_free(&ctx->hdl);
ctx              2388 drivers/media/platform/ti-vpe/vpe.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx              2390 drivers/media/platform/ti-vpe/vpe.c 	kfree(ctx);
ctx               153 drivers/media/platform/vicodec/vicodec-core.c static struct vicodec_q_data *get_q_data(struct vicodec_ctx *ctx,
ctx               159 drivers/media/platform/vicodec/vicodec-core.c 		return &ctx->q_data[V4L2_M2M_SRC];
ctx               162 drivers/media/platform/vicodec/vicodec-core.c 		return &ctx->q_data[V4L2_M2M_DST];
ctx               246 drivers/media/platform/vicodec/vicodec-core.c static void update_state_from_header(struct vicodec_ctx *ctx)
ctx               248 drivers/media/platform/vicodec/vicodec-core.c 	const struct fwht_cframe_hdr *p_hdr = &ctx->state.header;
ctx               250 drivers/media/platform/vicodec/vicodec-core.c 	ctx->state.visible_width = ntohl(p_hdr->width);
ctx               251 drivers/media/platform/vicodec/vicodec-core.c 	ctx->state.visible_height = ntohl(p_hdr->height);
ctx               252 drivers/media/platform/vicodec/vicodec-core.c 	ctx->state.colorspace = ntohl(p_hdr->colorspace);
ctx               253 drivers/media/platform/vicodec/vicodec-core.c 	ctx->state.xfer_func = ntohl(p_hdr->xfer_func);
ctx               254 drivers/media/platform/vicodec/vicodec-core.c 	ctx->state.ycbcr_enc = ntohl(p_hdr->ycbcr_enc);
ctx               255 drivers/media/platform/vicodec/vicodec-core.c 	ctx->state.quantization = ntohl(p_hdr->quantization);
ctx               258 drivers/media/platform/vicodec/vicodec-core.c static int device_process(struct vicodec_ctx *ctx,
ctx               262 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_dev *dev = ctx->dev;
ctx               263 drivers/media/platform/vicodec/vicodec-core.c 	struct v4l2_fwht_state *state = &ctx->state;
ctx               267 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_enc || ctx->is_stateless)
ctx               272 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_stateless) {
ctx               275 drivers/media/platform/vicodec/vicodec-core.c 		ret = v4l2_ctrl_request_setup(src_req, &ctx->hdl);
ctx               278 drivers/media/platform/vicodec/vicodec-core.c 		update_state_from_header(ctx);
ctx               280 drivers/media/platform/vicodec/vicodec-core.c 		ctx->state.header.size =
ctx               286 drivers/media/platform/vicodec/vicodec-core.c 		if (!(ntohl(ctx->state.header.flags) & FWHT_FL_I_FRAME)) {
ctx               290 drivers/media/platform/vicodec/vicodec-core.c 				v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
ctx               294 drivers/media/platform/vicodec/vicodec-core.c 							 ctx->state.ref_frame_ts, 0);
ctx               301 drivers/media/platform/vicodec/vicodec-core.c 			ctx->state.ref_frame.buf =
ctx               304 drivers/media/platform/vicodec/vicodec-core.c 			ctx->state.ref_frame.buf = NULL;
ctx               314 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_enc) {
ctx               318 drivers/media/platform/vicodec/vicodec-core.c 		q_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               326 drivers/media/platform/vicodec/vicodec-core.c 		unsigned int comp_frame_size = ntohl(ctx->state.header.size);
ctx               328 drivers/media/platform/vicodec/vicodec-core.c 		q_dst = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               329 drivers/media/platform/vicodec/vicodec-core.c 		if (comp_frame_size > ctx->comp_max_size)
ctx               335 drivers/media/platform/vicodec/vicodec-core.c 		if (!ctx->is_stateless)
ctx               336 drivers/media/platform/vicodec/vicodec-core.c 			copy_cap_to_ref(p_dst, ctx->state.info, &ctx->state);
ctx               339 drivers/media/platform/vicodec/vicodec-core.c 		if (ntohl(ctx->state.header.flags) & FWHT_FL_I_FRAME)
ctx               350 drivers/media/platform/vicodec/vicodec-core.c static enum vb2_buffer_state get_next_header(struct vicodec_ctx *ctx,
ctx               358 drivers/media/platform/vicodec/vicodec-core.c 	u8 *header = (u8 *)&ctx->state.header;
ctx               362 drivers/media/platform/vicodec/vicodec-core.c 	if (!ctx->header_size) {
ctx               367 drivers/media/platform/vicodec/vicodec-core.c 			p = memchr(p, magic[ctx->comp_magic_cnt],
ctx               370 drivers/media/platform/vicodec/vicodec-core.c 				ctx->comp_magic_cnt = 0;
ctx               374 drivers/media/platform/vicodec/vicodec-core.c 			copy = sizeof(magic) - ctx->comp_magic_cnt;
ctx               378 drivers/media/platform/vicodec/vicodec-core.c 			memcpy(header + ctx->comp_magic_cnt, p, copy);
ctx               379 drivers/media/platform/vicodec/vicodec-core.c 			ctx->comp_magic_cnt += copy;
ctx               380 drivers/media/platform/vicodec/vicodec-core.c 			if (!memcmp(header, magic, ctx->comp_magic_cnt)) {
ctx               385 drivers/media/platform/vicodec/vicodec-core.c 			ctx->comp_magic_cnt = 0;
ctx               387 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->comp_magic_cnt < sizeof(magic)) {
ctx               391 drivers/media/platform/vicodec/vicodec-core.c 		ctx->header_size = sizeof(magic);
ctx               394 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->header_size < sizeof(struct fwht_cframe_hdr)) {
ctx               395 drivers/media/platform/vicodec/vicodec-core.c 		u32 copy = sizeof(struct fwht_cframe_hdr) - ctx->header_size;
ctx               400 drivers/media/platform/vicodec/vicodec-core.c 		memcpy(header + ctx->header_size, p, copy);
ctx               402 drivers/media/platform/vicodec/vicodec-core.c 		ctx->header_size += copy;
ctx               411 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = priv;
ctx               412 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_dev *dev = ctx->dev;
ctx               418 drivers/media/platform/vicodec/vicodec-core.c 	src_buf = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               419 drivers/media/platform/vicodec/vicodec-core.c 	dst_buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx               422 drivers/media/platform/vicodec/vicodec-core.c 	q_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               423 drivers/media/platform/vicodec/vicodec-core.c 	q_dst = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               426 drivers/media/platform/vicodec/vicodec-core.c 	if (device_process(ctx, src_buf, dst_buf))
ctx               433 drivers/media/platform/vicodec/vicodec-core.c 	spin_lock(ctx->lock);
ctx               434 drivers/media/platform/vicodec/vicodec-core.c 	if (!ctx->comp_has_next_frame && src_buf == ctx->last_src_buf) {
ctx               436 drivers/media/platform/vicodec/vicodec-core.c 		v4l2_event_queue_fh(&ctx->fh, &vicodec_eos_event);
ctx               437 drivers/media/platform/vicodec/vicodec-core.c 		ctx->is_draining = false;
ctx               438 drivers/media/platform/vicodec/vicodec-core.c 		ctx->has_stopped = true;
ctx               440 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_enc || ctx->is_stateless) {
ctx               442 drivers/media/platform/vicodec/vicodec-core.c 		src_buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               444 drivers/media/platform/vicodec/vicodec-core.c 	} else if (vb2_get_plane_payload(&src_buf->vb2_buf, 0) == ctx->cur_buf_offset) {
ctx               446 drivers/media/platform/vicodec/vicodec-core.c 		src_buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               448 drivers/media/platform/vicodec/vicodec-core.c 		ctx->cur_buf_offset = 0;
ctx               449 drivers/media/platform/vicodec/vicodec-core.c 		ctx->comp_has_next_frame = false;
ctx               453 drivers/media/platform/vicodec/vicodec-core.c 	ctx->comp_size = 0;
ctx               454 drivers/media/platform/vicodec/vicodec-core.c 	ctx->header_size = 0;
ctx               455 drivers/media/platform/vicodec/vicodec-core.c 	ctx->comp_magic_cnt = 0;
ctx               456 drivers/media/platform/vicodec/vicodec-core.c 	ctx->comp_has_frame = false;
ctx               457 drivers/media/platform/vicodec/vicodec-core.c 	spin_unlock(ctx->lock);
ctx               458 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_stateless && src_req)
ctx               459 drivers/media/platform/vicodec/vicodec-core.c 		v4l2_ctrl_request_complete(src_req, &ctx->hdl);
ctx               461 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_enc)
ctx               462 drivers/media/platform/vicodec/vicodec-core.c 		v4l2_m2m_job_finish(dev->stateful_enc.m2m_dev, ctx->fh.m2m_ctx);
ctx               463 drivers/media/platform/vicodec/vicodec-core.c 	else if (ctx->is_stateless)
ctx               465 drivers/media/platform/vicodec/vicodec-core.c 				    ctx->fh.m2m_ctx);
ctx               467 drivers/media/platform/vicodec/vicodec-core.c 		v4l2_m2m_job_finish(dev->stateful_dec.m2m_dev, ctx->fh.m2m_ctx);
ctx               470 drivers/media/platform/vicodec/vicodec-core.c static void job_remove_src_buf(struct vicodec_ctx *ctx, u32 state)
ctx               475 drivers/media/platform/vicodec/vicodec-core.c 	q_src = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               476 drivers/media/platform/vicodec/vicodec-core.c 	spin_lock(ctx->lock);
ctx               477 drivers/media/platform/vicodec/vicodec-core.c 	src_buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               480 drivers/media/platform/vicodec/vicodec-core.c 	ctx->cur_buf_offset = 0;
ctx               481 drivers/media/platform/vicodec/vicodec-core.c 	spin_unlock(ctx->lock);
ctx               523 drivers/media/platform/vicodec/vicodec-core.c static void update_capture_data_from_header(struct vicodec_ctx *ctx)
ctx               525 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_q_data *q_dst = get_q_data(ctx,
ctx               527 drivers/media/platform/vicodec/vicodec-core.c 	const struct fwht_cframe_hdr *p_hdr = &ctx->state.header;
ctx               537 drivers/media/platform/vicodec/vicodec-core.c 	WARN_ON(ctx->is_stateless);
ctx               548 drivers/media/platform/vicodec/vicodec-core.c 	ctx->state.colorspace = ntohl(p_hdr->colorspace);
ctx               550 drivers/media/platform/vicodec/vicodec-core.c 	ctx->state.xfer_func = ntohl(p_hdr->xfer_func);
ctx               551 drivers/media/platform/vicodec/vicodec-core.c 	ctx->state.ycbcr_enc = ntohl(p_hdr->ycbcr_enc);
ctx               552 drivers/media/platform/vicodec/vicodec-core.c 	ctx->state.quantization = ntohl(p_hdr->quantization);
ctx               557 drivers/media/platform/vicodec/vicodec-core.c 			    struct vicodec_ctx *ctx)
ctx               559 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_q_data *q_dst = get_q_data(ctx,
ctx               565 drivers/media/platform/vicodec/vicodec-core.c 	v4l2_m2m_buf_copy_metadata(src_buf, dst_buf, !ctx->is_enc);
ctx               575 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = priv;
ctx               581 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_q_data *q_dst = get_q_data(ctx,
ctx               589 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->has_stopped)
ctx               591 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->source_changed)
ctx               593 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_stateless || ctx->is_enc || ctx->comp_has_frame)
ctx               597 drivers/media/platform/vicodec/vicodec-core.c 	ctx->comp_has_next_frame = false;
ctx               598 drivers/media/platform/vicodec/vicodec-core.c 	src_buf = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               603 drivers/media/platform/vicodec/vicodec-core.c 	p = p_src + ctx->cur_buf_offset;
ctx               607 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->header_size < sizeof(struct fwht_cframe_hdr)) {
ctx               608 drivers/media/platform/vicodec/vicodec-core.c 		state = get_next_header(ctx, &p, p_src + sz - p);
ctx               609 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->header_size < sizeof(struct fwht_cframe_hdr)) {
ctx               610 drivers/media/platform/vicodec/vicodec-core.c 			if (ctx->is_draining && src_buf == ctx->last_src_buf)
ctx               612 drivers/media/platform/vicodec/vicodec-core.c 			job_remove_src_buf(ctx, state);
ctx               617 drivers/media/platform/vicodec/vicodec-core.c 	comp_frame_size = ntohl(ctx->state.header.size);
ctx               626 drivers/media/platform/vicodec/vicodec-core.c 	max_to_copy = min(comp_frame_size, ctx->comp_max_size);
ctx               628 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->comp_size < max_to_copy) {
ctx               629 drivers/media/platform/vicodec/vicodec-core.c 		u32 copy = max_to_copy - ctx->comp_size;
ctx               634 drivers/media/platform/vicodec/vicodec-core.c 		memcpy(ctx->state.compressed_frame + ctx->comp_size,
ctx               637 drivers/media/platform/vicodec/vicodec-core.c 		ctx->comp_size += copy;
ctx               638 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->comp_size < max_to_copy) {
ctx               639 drivers/media/platform/vicodec/vicodec-core.c 			if (ctx->is_draining && src_buf == ctx->last_src_buf)
ctx               641 drivers/media/platform/vicodec/vicodec-core.c 			job_remove_src_buf(ctx, state);
ctx               645 drivers/media/platform/vicodec/vicodec-core.c 	ctx->cur_buf_offset = p - p_src;
ctx               646 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->comp_size == comp_frame_size)
ctx               647 drivers/media/platform/vicodec/vicodec-core.c 		ctx->comp_has_frame = true;
ctx               648 drivers/media/platform/vicodec/vicodec-core.c 	ctx->comp_has_next_frame = false;
ctx               649 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->comp_has_frame && sz - ctx->cur_buf_offset >=
ctx               653 drivers/media/platform/vicodec/vicodec-core.c 		u32 remaining = sz - ctx->cur_buf_offset - sizeof(*p_hdr);
ctx               656 drivers/media/platform/vicodec/vicodec-core.c 			ctx->comp_has_next_frame = remaining >= frame_size;
ctx               662 drivers/media/platform/vicodec/vicodec-core.c 	if (!is_header_valid(&ctx->state.header) && ctx->comp_has_frame)
ctx               664 drivers/media/platform/vicodec/vicodec-core.c 	flags = ntohl(ctx->state.header.flags);
ctx               668 drivers/media/platform/vicodec/vicodec-core.c 	if (ntohl(ctx->state.header.width) != q_dst->visible_width ||
ctx               669 drivers/media/platform/vicodec/vicodec-core.c 	    ntohl(ctx->state.header.height) != q_dst->visible_height ||
ctx               679 drivers/media/platform/vicodec/vicodec-core.c 			v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx               681 drivers/media/platform/vicodec/vicodec-core.c 		update_capture_data_from_header(ctx);
ctx               682 drivers/media/platform/vicodec/vicodec-core.c 		v4l2_event_queue_fh(&ctx->fh, &rs_event);
ctx               683 drivers/media/platform/vicodec/vicodec-core.c 		set_last_buffer(dst_buf, src_buf, ctx);
ctx               684 drivers/media/platform/vicodec/vicodec-core.c 		ctx->source_changed = true;
ctx               714 drivers/media/platform/vicodec/vicodec-core.c static int enum_fmt(struct v4l2_fmtdesc *f, struct vicodec_ctx *ctx,
ctx               717 drivers/media/platform/vicodec/vicodec-core.c 	bool is_uncomp = (ctx->is_enc && is_out) || (!ctx->is_enc && !is_out);
ctx               726 drivers/media/platform/vicodec/vicodec-core.c 					get_q_data(ctx, f->type)->info;
ctx               728 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->is_enc ||
ctx               729 drivers/media/platform/vicodec/vicodec-core.c 		    !vb2_is_streaming(&ctx->fh.m2m_ctx->cap_q_ctx.q))
ctx               743 drivers/media/platform/vicodec/vicodec-core.c 		f->pixelformat = ctx->is_stateless ?
ctx               745 drivers/media/platform/vicodec/vicodec-core.c 		if (!ctx->is_enc && !ctx->is_stateless)
ctx               755 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = file2ctx(file);
ctx               757 drivers/media/platform/vicodec/vicodec-core.c 	return enum_fmt(f, ctx, false);
ctx               763 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = file2ctx(file);
ctx               765 drivers/media/platform/vicodec/vicodec-core.c 	return enum_fmt(f, ctx, true);
ctx               768 drivers/media/platform/vicodec/vicodec-core.c static int vidioc_g_fmt(struct vicodec_ctx *ctx, struct v4l2_format *f)
ctx               776 drivers/media/platform/vicodec/vicodec-core.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               780 drivers/media/platform/vicodec/vicodec-core.c 	q_data = get_q_data(ctx, f->type);
ctx               796 drivers/media/platform/vicodec/vicodec-core.c 		pix->colorspace = ctx->state.colorspace;
ctx               797 drivers/media/platform/vicodec/vicodec-core.c 		pix->xfer_func = ctx->state.xfer_func;
ctx               798 drivers/media/platform/vicodec/vicodec-core.c 		pix->ycbcr_enc = ctx->state.ycbcr_enc;
ctx               799 drivers/media/platform/vicodec/vicodec-core.c 		pix->quantization = ctx->state.quantization;
ctx               815 drivers/media/platform/vicodec/vicodec-core.c 		pix_mp->colorspace = ctx->state.colorspace;
ctx               816 drivers/media/platform/vicodec/vicodec-core.c 		pix_mp->xfer_func = ctx->state.xfer_func;
ctx               817 drivers/media/platform/vicodec/vicodec-core.c 		pix_mp->ycbcr_enc = ctx->state.ycbcr_enc;
ctx               818 drivers/media/platform/vicodec/vicodec-core.c 		pix_mp->quantization = ctx->state.quantization;
ctx               841 drivers/media/platform/vicodec/vicodec-core.c static int vidioc_try_fmt(struct vicodec_ctx *ctx, struct v4l2_format *f)
ctx               846 drivers/media/platform/vicodec/vicodec-core.c 	const struct v4l2_fwht_pixfmt_info *info = ctx->is_stateless ?
ctx               907 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = file2ctx(file);
ctx               916 drivers/media/platform/vicodec/vicodec-core.c 		pix->pixelformat = ctx->is_enc ? V4L2_PIX_FMT_FWHT :
ctx               918 drivers/media/platform/vicodec/vicodec-core.c 		pix->colorspace = ctx->state.colorspace;
ctx               919 drivers/media/platform/vicodec/vicodec-core.c 		pix->xfer_func = ctx->state.xfer_func;
ctx               920 drivers/media/platform/vicodec/vicodec-core.c 		pix->ycbcr_enc = ctx->state.ycbcr_enc;
ctx               921 drivers/media/platform/vicodec/vicodec-core.c 		pix->quantization = ctx->state.quantization;
ctx               927 drivers/media/platform/vicodec/vicodec-core.c 		pix_mp->pixelformat = ctx->is_enc ? V4L2_PIX_FMT_FWHT :
ctx               929 drivers/media/platform/vicodec/vicodec-core.c 		pix_mp->colorspace = ctx->state.colorspace;
ctx               930 drivers/media/platform/vicodec/vicodec-core.c 		pix_mp->xfer_func = ctx->state.xfer_func;
ctx               931 drivers/media/platform/vicodec/vicodec-core.c 		pix_mp->ycbcr_enc = ctx->state.ycbcr_enc;
ctx               932 drivers/media/platform/vicodec/vicodec-core.c 		pix_mp->quantization = ctx->state.quantization;
ctx               938 drivers/media/platform/vicodec/vicodec-core.c 	return vidioc_try_fmt(ctx, f);
ctx               944 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = file2ctx(file);
ctx               953 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->is_enc)
ctx               955 drivers/media/platform/vicodec/vicodec-core.c 		else if (ctx->is_stateless)
ctx               966 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->is_enc)
ctx               968 drivers/media/platform/vicodec/vicodec-core.c 		else if (ctx->is_stateless)
ctx               979 drivers/media/platform/vicodec/vicodec-core.c 	return vidioc_try_fmt(ctx, f);
ctx               982 drivers/media/platform/vicodec/vicodec-core.c static int vidioc_s_fmt(struct vicodec_ctx *ctx, struct v4l2_format *f)
ctx               990 drivers/media/platform/vicodec/vicodec-core.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               994 drivers/media/platform/vicodec/vicodec-core.c 	q_data = get_q_data(ctx, f->type);
ctx              1002 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->is_enc && V4L2_TYPE_IS_OUTPUT(f->type))
ctx              1025 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->is_enc && V4L2_TYPE_IS_OUTPUT(f->type))
ctx              1049 drivers/media/platform/vicodec/vicodec-core.c 	dprintk(ctx->dev,
ctx              1072 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = file2ctx(file);
ctx              1081 drivers/media/platform/vicodec/vicodec-core.c 	q_data = get_q_data(ctx, f->type);
ctx              1082 drivers/media/platform/vicodec/vicodec-core.c 	q_data_cap = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx              1088 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_enc) {
ctx              1089 drivers/media/platform/vicodec/vicodec-core.c 		struct vb2_queue *vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx              1090 drivers/media/platform/vicodec/vicodec-core.c 		struct vb2_queue *vq_cap = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
ctx              1092 drivers/media/platform/vicodec/vicodec-core.c 		const struct v4l2_fwht_pixfmt_info *info = ctx->is_stateless ?
ctx              1107 drivers/media/platform/vicodec/vicodec-core.c 		if (!ctx->is_stateless)
ctx              1116 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->is_enc) {
ctx              1127 drivers/media/platform/vicodec/vicodec-core.c 			ctx->state.colorspace = pix->colorspace;
ctx              1128 drivers/media/platform/vicodec/vicodec-core.c 			ctx->state.xfer_func = pix->xfer_func;
ctx              1129 drivers/media/platform/vicodec/vicodec-core.c 			ctx->state.ycbcr_enc = pix->ycbcr_enc;
ctx              1130 drivers/media/platform/vicodec/vicodec-core.c 			ctx->state.quantization = pix->quantization;
ctx              1134 drivers/media/platform/vicodec/vicodec-core.c 			ctx->state.colorspace = pix_mp->colorspace;
ctx              1135 drivers/media/platform/vicodec/vicodec-core.c 			ctx->state.xfer_func = pix_mp->xfer_func;
ctx              1136 drivers/media/platform/vicodec/vicodec-core.c 			ctx->state.ycbcr_enc = pix_mp->ycbcr_enc;
ctx              1137 drivers/media/platform/vicodec/vicodec-core.c 			ctx->state.quantization = pix_mp->quantization;
ctx              1149 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = file2ctx(file);
ctx              1152 drivers/media/platform/vicodec/vicodec-core.c 	q_data = get_q_data(ctx, s->type);
ctx              1159 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_enc && s->type == V4L2_BUF_TYPE_VIDEO_OUTPUT) {
ctx              1175 drivers/media/platform/vicodec/vicodec-core.c 	} else if (!ctx->is_enc && s->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
ctx              1198 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = file2ctx(file);
ctx              1204 drivers/media/platform/vicodec/vicodec-core.c 	q_data = get_q_data(ctx, s->type);
ctx              1208 drivers/media/platform/vicodec/vicodec-core.c 	if (!ctx->is_enc || s->target != V4L2_SEL_TGT_CROP)
ctx              1222 drivers/media/platform/vicodec/vicodec-core.c static int vicodec_mark_last_buf(struct vicodec_ctx *ctx)
ctx              1227 drivers/media/platform/vicodec/vicodec-core.c 	spin_lock(ctx->lock);
ctx              1228 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_draining) {
ctx              1232 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->has_stopped)
ctx              1235 drivers/media/platform/vicodec/vicodec-core.c 	ctx->last_src_buf = v4l2_m2m_last_src_buf(ctx->fh.m2m_ctx);
ctx              1236 drivers/media/platform/vicodec/vicodec-core.c 	ctx->is_draining = true;
ctx              1237 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->last_src_buf)
ctx              1240 drivers/media/platform/vicodec/vicodec-core.c 	next_dst_buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              1242 drivers/media/platform/vicodec/vicodec-core.c 		ctx->next_is_last = true;
ctx              1248 drivers/media/platform/vicodec/vicodec-core.c 	ctx->is_draining = false;
ctx              1249 drivers/media/platform/vicodec/vicodec-core.c 	ctx->has_stopped = true;
ctx              1250 drivers/media/platform/vicodec/vicodec-core.c 	v4l2_event_queue_fh(&ctx->fh, &vicodec_eos_event);
ctx              1253 drivers/media/platform/vicodec/vicodec-core.c 	spin_unlock(ctx->lock);
ctx              1260 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = file2ctx(file);
ctx              1267 drivers/media/platform/vicodec/vicodec-core.c 	if (!vb2_is_streaming(&ctx->fh.m2m_ctx->cap_q_ctx.q) ||
ctx              1268 drivers/media/platform/vicodec/vicodec-core.c 	    !vb2_is_streaming(&ctx->fh.m2m_ctx->out_q_ctx.q))
ctx              1272 drivers/media/platform/vicodec/vicodec-core.c 		return vicodec_mark_last_buf(ctx);
ctx              1274 drivers/media/platform/vicodec/vicodec-core.c 	spin_lock(ctx->lock);
ctx              1275 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_draining) {
ctx              1277 drivers/media/platform/vicodec/vicodec-core.c 	} else if (ctx->has_stopped) {
ctx              1278 drivers/media/platform/vicodec/vicodec-core.c 		ctx->has_stopped = false;
ctx              1279 drivers/media/platform/vicodec/vicodec-core.c 		vb2_clear_last_buffer_dequeued(&ctx->fh.m2m_ctx->cap_q_ctx.q);
ctx              1281 drivers/media/platform/vicodec/vicodec-core.c 	spin_unlock(ctx->lock);
ctx              1288 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = file2ctx(file);
ctx              1295 drivers/media/platform/vicodec/vicodec-core.c 	if (!vb2_is_streaming(&ctx->fh.m2m_ctx->cap_q_ctx.q) ||
ctx              1296 drivers/media/platform/vicodec/vicodec-core.c 	    !vb2_is_streaming(&ctx->fh.m2m_ctx->out_q_ctx.q))
ctx              1300 drivers/media/platform/vicodec/vicodec-core.c 		return vicodec_mark_last_buf(ctx);
ctx              1302 drivers/media/platform/vicodec/vicodec-core.c 	spin_lock(ctx->lock);
ctx              1303 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_draining) {
ctx              1305 drivers/media/platform/vicodec/vicodec-core.c 	} else if (ctx->has_stopped) {
ctx              1306 drivers/media/platform/vicodec/vicodec-core.c 		ctx->has_stopped = false;
ctx              1307 drivers/media/platform/vicodec/vicodec-core.c 		vb2_clear_last_buffer_dequeued(&ctx->fh.m2m_ctx->cap_q_ctx.q);
ctx              1309 drivers/media/platform/vicodec/vicodec-core.c 	spin_unlock(ctx->lock);
ctx              1345 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = container_of(fh, struct vicodec_ctx, fh);
ctx              1349 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->is_enc)
ctx              1353 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->is_stateless)
ctx              1415 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1416 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_q_data *q_data = get_q_data(ctx, vq->type);
ctx              1439 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1442 drivers/media/platform/vicodec/vicodec-core.c 	dprintk(ctx->dev, "type: %d\n", vb->vb2_queue->type);
ctx              1444 drivers/media/platform/vicodec/vicodec-core.c 	q_data = get_q_data(ctx, vb->vb2_queue->type);
ctx              1449 drivers/media/platform/vicodec/vicodec-core.c 			dprintk(ctx->dev, "%s field isn't supported\n",
ctx              1456 drivers/media/platform/vicodec/vicodec-core.c 		dprintk(ctx->dev,
ctx              1469 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1473 drivers/media/platform/vicodec/vicodec-core.c 	struct vb2_queue *vq_out = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
ctx              1475 drivers/media/platform/vicodec/vicodec-core.c 	struct vb2_queue *vq_cap = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
ctx              1485 drivers/media/platform/vicodec/vicodec-core.c 		    ctx->next_is_last) {
ctx              1492 drivers/media/platform/vicodec/vicodec-core.c 			vbuf->sequence = get_q_data(ctx, vb->vb2_queue->type)->sequence++;
ctx              1494 drivers/media/platform/vicodec/vicodec-core.c 			ctx->is_draining = false;
ctx              1495 drivers/media/platform/vicodec/vicodec-core.c 			ctx->has_stopped = true;
ctx              1496 drivers/media/platform/vicodec/vicodec-core.c 			ctx->next_is_last = false;
ctx              1497 drivers/media/platform/vicodec/vicodec-core.c 			v4l2_event_queue_fh(&ctx->fh, &vicodec_eos_event);
ctx              1503 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->first_source_change_sent) {
ctx              1504 drivers/media/platform/vicodec/vicodec-core.c 		v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx              1513 drivers/media/platform/vicodec/vicodec-core.c 		v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx              1521 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_stateless || ctx->is_enc ||
ctx              1523 drivers/media/platform/vicodec/vicodec-core.c 		v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx              1529 drivers/media/platform/vicodec/vicodec-core.c 			get_next_header(ctx, &p, p_src + sz - p);
ctx              1531 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->header_size < sizeof(struct fwht_cframe_hdr)) {
ctx              1535 drivers/media/platform/vicodec/vicodec-core.c 		header_valid = is_header_valid(&ctx->state.header);
ctx              1545 drivers/media/platform/vicodec/vicodec-core.c 			ctx->header_size = 0;
ctx              1546 drivers/media/platform/vicodec/vicodec-core.c 			ctx->comp_magic_cnt = 0;
ctx              1551 drivers/media/platform/vicodec/vicodec-core.c 	ctx->cur_buf_offset = p - p_src;
ctx              1552 drivers/media/platform/vicodec/vicodec-core.c 	update_capture_data_from_header(ctx);
ctx              1553 drivers/media/platform/vicodec/vicodec-core.c 	ctx->first_source_change_sent = true;
ctx              1554 drivers/media/platform/vicodec/vicodec-core.c 	v4l2_event_queue_fh(&ctx->fh, &rs_event);
ctx              1555 drivers/media/platform/vicodec/vicodec-core.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx              1560 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = vb2_get_drv_priv(q);
ctx              1565 drivers/media/platform/vicodec/vicodec-core.c 			vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx              1567 drivers/media/platform/vicodec/vicodec-core.c 			vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              1571 drivers/media/platform/vicodec/vicodec-core.c 					   &ctx->hdl);
ctx              1572 drivers/media/platform/vicodec/vicodec-core.c 		spin_lock(ctx->lock);
ctx              1574 drivers/media/platform/vicodec/vicodec-core.c 		spin_unlock(ctx->lock);
ctx              1600 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = vb2_get_drv_priv(q);
ctx              1601 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_q_data *q_data = get_q_data(ctx, q->type);
ctx              1602 drivers/media/platform/vicodec/vicodec-core.c 	struct v4l2_fwht_state *state = &ctx->state;
ctx              1613 drivers/media/platform/vicodec/vicodec-core.c 		ctx->last_src_buf = NULL;
ctx              1617 drivers/media/platform/vicodec/vicodec-core.c 	if ((V4L2_TYPE_IS_OUTPUT(q->type) && !ctx->is_enc) ||
ctx              1618 drivers/media/platform/vicodec/vicodec-core.c 	    (!V4L2_TYPE_IS_OUTPUT(q->type) && ctx->is_enc))
ctx              1627 drivers/media/platform/vicodec/vicodec-core.c 	ctx->comp_max_size = total_planes_size;
ctx              1636 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_stateless) {
ctx              1644 drivers/media/platform/vicodec/vicodec-core.c 	new_comp_frame = kvmalloc(ctx->comp_max_size, GFP_KERNEL);
ctx              1657 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->comp_size > ctx->comp_max_size)
ctx              1658 drivers/media/platform/vicodec/vicodec-core.c 			ctx->comp_size = ctx->comp_max_size;
ctx              1661 drivers/media/platform/vicodec/vicodec-core.c 		       state->compressed_frame, ctx->comp_size);
ctx              1688 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = vb2_get_drv_priv(q);
ctx              1693 drivers/media/platform/vicodec/vicodec-core.c 		if (ctx->is_draining) {
ctx              1696 drivers/media/platform/vicodec/vicodec-core.c 			spin_lock(ctx->lock);
ctx              1697 drivers/media/platform/vicodec/vicodec-core.c 			ctx->last_src_buf = NULL;
ctx              1698 drivers/media/platform/vicodec/vicodec-core.c 			next_dst_buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              1700 drivers/media/platform/vicodec/vicodec-core.c 				ctx->next_is_last = true;
ctx              1704 drivers/media/platform/vicodec/vicodec-core.c 				ctx->is_draining = false;
ctx              1705 drivers/media/platform/vicodec/vicodec-core.c 				ctx->has_stopped = true;
ctx              1706 drivers/media/platform/vicodec/vicodec-core.c 				v4l2_event_queue_fh(&ctx->fh, &vicodec_eos_event);
ctx              1708 drivers/media/platform/vicodec/vicodec-core.c 			spin_unlock(ctx->lock);
ctx              1711 drivers/media/platform/vicodec/vicodec-core.c 		ctx->is_draining = false;
ctx              1712 drivers/media/platform/vicodec/vicodec-core.c 		ctx->has_stopped = false;
ctx              1713 drivers/media/platform/vicodec/vicodec-core.c 		ctx->next_is_last = false;
ctx              1715 drivers/media/platform/vicodec/vicodec-core.c 	if (!ctx->is_enc && V4L2_TYPE_IS_OUTPUT(q->type))
ctx              1716 drivers/media/platform/vicodec/vicodec-core.c 		ctx->first_source_change_sent = false;
ctx              1718 drivers/media/platform/vicodec/vicodec-core.c 	if ((!V4L2_TYPE_IS_OUTPUT(q->type) && !ctx->is_enc) ||
ctx              1719 drivers/media/platform/vicodec/vicodec-core.c 	    (V4L2_TYPE_IS_OUTPUT(q->type) && ctx->is_enc)) {
ctx              1720 drivers/media/platform/vicodec/vicodec-core.c 		if (!ctx->is_stateless)
ctx              1721 drivers/media/platform/vicodec/vicodec-core.c 			kvfree(ctx->state.ref_frame.buf);
ctx              1722 drivers/media/platform/vicodec/vicodec-core.c 		ctx->state.ref_frame.buf = NULL;
ctx              1723 drivers/media/platform/vicodec/vicodec-core.c 		ctx->state.ref_frame.luma = NULL;
ctx              1724 drivers/media/platform/vicodec/vicodec-core.c 		ctx->comp_max_size = 0;
ctx              1725 drivers/media/platform/vicodec/vicodec-core.c 		ctx->source_changed = false;
ctx              1727 drivers/media/platform/vicodec/vicodec-core.c 	if (V4L2_TYPE_IS_OUTPUT(q->type) && !ctx->is_enc) {
ctx              1728 drivers/media/platform/vicodec/vicodec-core.c 		ctx->cur_buf_offset = 0;
ctx              1729 drivers/media/platform/vicodec/vicodec-core.c 		ctx->comp_size = 0;
ctx              1730 drivers/media/platform/vicodec/vicodec-core.c 		ctx->header_size = 0;
ctx              1731 drivers/media/platform/vicodec/vicodec-core.c 		ctx->comp_magic_cnt = 0;
ctx              1732 drivers/media/platform/vicodec/vicodec-core.c 		ctx->comp_has_frame = 0;
ctx              1733 drivers/media/platform/vicodec/vicodec-core.c 		ctx->comp_has_next_frame = 0;
ctx              1739 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1741 drivers/media/platform/vicodec/vicodec-core.c 	v4l2_ctrl_request_complete(vb->req_obj.req, &ctx->hdl);
ctx              1760 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = priv;
ctx              1767 drivers/media/platform/vicodec/vicodec-core.c 	src_vq->drv_priv = ctx;
ctx              1772 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_enc)
ctx              1773 drivers/media/platform/vicodec/vicodec-core.c 		src_vq->lock = &ctx->dev->stateful_enc.mutex;
ctx              1774 drivers/media/platform/vicodec/vicodec-core.c 	else if (ctx->is_stateless)
ctx              1775 drivers/media/platform/vicodec/vicodec-core.c 		src_vq->lock = &ctx->dev->stateless_dec.mutex;
ctx              1777 drivers/media/platform/vicodec/vicodec-core.c 		src_vq->lock = &ctx->dev->stateful_dec.mutex;
ctx              1778 drivers/media/platform/vicodec/vicodec-core.c 	src_vq->supports_requests = ctx->is_stateless;
ctx              1779 drivers/media/platform/vicodec/vicodec-core.c 	src_vq->requires_requests = ctx->is_stateless;
ctx              1788 drivers/media/platform/vicodec/vicodec-core.c 	dst_vq->drv_priv = ctx;
ctx              1800 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = container_of(ctrl->handler,
ctx              1803 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_q_data *q_dst = get_q_data(ctx,
ctx              1827 drivers/media/platform/vicodec/vicodec-core.c static void update_header_from_stateless_params(struct vicodec_ctx *ctx,
ctx              1830 drivers/media/platform/vicodec/vicodec-core.c 	struct fwht_cframe_hdr *p_hdr = &ctx->state.header;
ctx              1846 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = container_of(ctrl->handler,
ctx              1852 drivers/media/platform/vicodec/vicodec-core.c 		ctx->state.gop_size = ctrl->val;
ctx              1855 drivers/media/platform/vicodec/vicodec-core.c 		ctx->state.i_frame_qp = ctrl->val;
ctx              1858 drivers/media/platform/vicodec/vicodec-core.c 		ctx->state.p_frame_qp = ctrl->val;
ctx              1862 drivers/media/platform/vicodec/vicodec-core.c 		update_header_from_stateless_params(ctx, params);
ctx              1863 drivers/media/platform/vicodec/vicodec-core.c 		ctx->state.ref_frame_ts = params->backward_ref_ts;
ctx              1888 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = NULL;
ctx              1896 drivers/media/platform/vicodec/vicodec-core.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              1897 drivers/media/platform/vicodec/vicodec-core.c 	if (!ctx) {
ctx              1903 drivers/media/platform/vicodec/vicodec-core.c 		ctx->is_enc = true;
ctx              1905 drivers/media/platform/vicodec/vicodec-core.c 		ctx->is_stateless = true;
ctx              1907 drivers/media/platform/vicodec/vicodec-core.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx              1908 drivers/media/platform/vicodec/vicodec-core.c 	file->private_data = &ctx->fh;
ctx              1909 drivers/media/platform/vicodec/vicodec-core.c 	ctx->dev = dev;
ctx              1910 drivers/media/platform/vicodec/vicodec-core.c 	hdl = &ctx->hdl;
ctx              1918 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_enc)
ctx              1921 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_stateless)
ctx              1926 drivers/media/platform/vicodec/vicodec-core.c 		kfree(ctx);
ctx              1929 drivers/media/platform/vicodec/vicodec-core.c 	ctx->fh.ctrl_handler = hdl;
ctx              1932 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_enc)
ctx              1933 drivers/media/platform/vicodec/vicodec-core.c 		ctx->q_data[V4L2_M2M_SRC].info = info;
ctx              1934 drivers/media/platform/vicodec/vicodec-core.c 	else if (ctx->is_stateless)
ctx              1935 drivers/media/platform/vicodec/vicodec-core.c 		ctx->q_data[V4L2_M2M_SRC].info = &pixfmt_stateless_fwht;
ctx              1937 drivers/media/platform/vicodec/vicodec-core.c 		ctx->q_data[V4L2_M2M_SRC].info = &pixfmt_fwht;
ctx              1938 drivers/media/platform/vicodec/vicodec-core.c 	ctx->q_data[V4L2_M2M_SRC].coded_width = 1280;
ctx              1939 drivers/media/platform/vicodec/vicodec-core.c 	ctx->q_data[V4L2_M2M_SRC].coded_height = 720;
ctx              1940 drivers/media/platform/vicodec/vicodec-core.c 	ctx->q_data[V4L2_M2M_SRC].visible_width = 1280;
ctx              1941 drivers/media/platform/vicodec/vicodec-core.c 	ctx->q_data[V4L2_M2M_SRC].visible_height = 720;
ctx              1945 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_enc)
ctx              1946 drivers/media/platform/vicodec/vicodec-core.c 		ctx->q_data[V4L2_M2M_SRC].sizeimage = raw_size;
ctx              1947 drivers/media/platform/vicodec/vicodec-core.c 	else if (ctx->is_stateless)
ctx              1948 drivers/media/platform/vicodec/vicodec-core.c 		ctx->q_data[V4L2_M2M_SRC].sizeimage = comp_size;
ctx              1950 drivers/media/platform/vicodec/vicodec-core.c 		ctx->q_data[V4L2_M2M_SRC].sizeimage =
ctx              1952 drivers/media/platform/vicodec/vicodec-core.c 	ctx->q_data[V4L2_M2M_DST] = ctx->q_data[V4L2_M2M_SRC];
ctx              1953 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_enc) {
ctx              1954 drivers/media/platform/vicodec/vicodec-core.c 		ctx->q_data[V4L2_M2M_DST].info = &pixfmt_fwht;
ctx              1955 drivers/media/platform/vicodec/vicodec-core.c 		ctx->q_data[V4L2_M2M_DST].sizeimage =
ctx              1958 drivers/media/platform/vicodec/vicodec-core.c 		ctx->q_data[V4L2_M2M_DST].info = info;
ctx              1959 drivers/media/platform/vicodec/vicodec-core.c 		ctx->q_data[V4L2_M2M_DST].sizeimage = raw_size;
ctx              1962 drivers/media/platform/vicodec/vicodec-core.c 	ctx->state.colorspace = V4L2_COLORSPACE_REC709;
ctx              1964 drivers/media/platform/vicodec/vicodec-core.c 	if (ctx->is_enc) {
ctx              1965 drivers/media/platform/vicodec/vicodec-core.c 		ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(dev->stateful_enc.m2m_dev,
ctx              1966 drivers/media/platform/vicodec/vicodec-core.c 						    ctx, &queue_init);
ctx              1967 drivers/media/platform/vicodec/vicodec-core.c 		ctx->lock = &dev->stateful_enc.lock;
ctx              1968 drivers/media/platform/vicodec/vicodec-core.c 	} else if (ctx->is_stateless) {
ctx              1969 drivers/media/platform/vicodec/vicodec-core.c 		ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(dev->stateless_dec.m2m_dev,
ctx              1970 drivers/media/platform/vicodec/vicodec-core.c 						    ctx, &queue_init);
ctx              1971 drivers/media/platform/vicodec/vicodec-core.c 		ctx->lock = &dev->stateless_dec.lock;
ctx              1973 drivers/media/platform/vicodec/vicodec-core.c 		ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(dev->stateful_dec.m2m_dev,
ctx              1974 drivers/media/platform/vicodec/vicodec-core.c 						    ctx, &queue_init);
ctx              1975 drivers/media/platform/vicodec/vicodec-core.c 		ctx->lock = &dev->stateful_dec.lock;
ctx              1978 drivers/media/platform/vicodec/vicodec-core.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx              1979 drivers/media/platform/vicodec/vicodec-core.c 		rc = PTR_ERR(ctx->fh.m2m_ctx);
ctx              1982 drivers/media/platform/vicodec/vicodec-core.c 		v4l2_fh_exit(&ctx->fh);
ctx              1983 drivers/media/platform/vicodec/vicodec-core.c 		kfree(ctx);
ctx              1987 drivers/media/platform/vicodec/vicodec-core.c 	v4l2_fh_add(&ctx->fh);
ctx              1997 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = file2ctx(file);
ctx              2000 drivers/media/platform/vicodec/vicodec-core.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx              2002 drivers/media/platform/vicodec/vicodec-core.c 	v4l2_fh_del(&ctx->fh);
ctx              2003 drivers/media/platform/vicodec/vicodec-core.c 	v4l2_fh_exit(&ctx->fh);
ctx              2004 drivers/media/platform/vicodec/vicodec-core.c 	v4l2_ctrl_handler_free(&ctx->hdl);
ctx              2005 drivers/media/platform/vicodec/vicodec-core.c 	kvfree(ctx->state.compressed_frame);
ctx              2006 drivers/media/platform/vicodec/vicodec-core.c 	kfree(ctx);
ctx              2015 drivers/media/platform/vicodec/vicodec-core.c 	struct vicodec_ctx *ctx = NULL;
ctx              2024 drivers/media/platform/vicodec/vicodec-core.c 			ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              2030 drivers/media/platform/vicodec/vicodec-core.c 	if (!ctx) {
ctx              2037 drivers/media/platform/vicodec/vicodec-core.c 		v4l2_info(&ctx->dev->v4l2_dev,
ctx              2041 drivers/media/platform/vicodec/vicodec-core.c 		v4l2_info(&ctx->dev->v4l2_dev,
ctx              2046 drivers/media/platform/vicodec/vicodec-core.c 	parent_hdl = &ctx->hdl;
ctx              2050 drivers/media/platform/vicodec/vicodec-core.c 		v4l2_info(&ctx->dev->v4l2_dev, "Missing codec control\n");
ctx              2056 drivers/media/platform/vicodec/vicodec-core.c 		v4l2_info(&ctx->dev->v4l2_dev,
ctx               241 drivers/media/platform/vim2m.c static struct vim2m_q_data *get_q_data(struct vim2m_ctx *ctx,
ctx               246 drivers/media/platform/vim2m.c 		return &ctx->q_data[V4L2_M2M_SRC];
ctx               248 drivers/media/platform/vim2m.c 		return &ctx->q_data[V4L2_M2M_DST];
ctx               445 drivers/media/platform/vim2m.c static int device_process(struct vim2m_ctx *ctx,
ctx               449 drivers/media/platform/vim2m.c 	struct vim2m_dev *dev = ctx->dev;
ctx               456 drivers/media/platform/vim2m.c 	q_data_in = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               462 drivers/media/platform/vim2m.c 	q_data_out = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               482 drivers/media/platform/vim2m.c 	if (ctx->mode & MEM2MEM_VFLIP) {
ctx               502 drivers/media/platform/vim2m.c 			if (ctx->mode & MEM2MEM_HFLIP)
ctx               506 drivers/media/platform/vim2m.c 				  ctx->mode & MEM2MEM_HFLIP);
ctx               525 drivers/media/platform/vim2m.c 		if (ctx->mode & MEM2MEM_HFLIP)
ctx               537 drivers/media/platform/vim2m.c 			if (ctx->mode & MEM2MEM_HFLIP)
ctx               544 drivers/media/platform/vim2m.c 					ctx->mode & MEM2MEM_HFLIP);
ctx               554 drivers/media/platform/vim2m.c 			if (ctx->mode & MEM2MEM_HFLIP)
ctx               573 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = priv;
ctx               575 drivers/media/platform/vim2m.c 	if (v4l2_m2m_num_src_bufs_ready(ctx->fh.m2m_ctx) < ctx->translen
ctx               576 drivers/media/platform/vim2m.c 	    || v4l2_m2m_num_dst_bufs_ready(ctx->fh.m2m_ctx) < ctx->translen) {
ctx               577 drivers/media/platform/vim2m.c 		dprintk(ctx->dev, 1, "Not enough buffers available\n");
ctx               586 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = priv;
ctx               589 drivers/media/platform/vim2m.c 	ctx->aborting = 1;
ctx               600 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = priv;
ctx               603 drivers/media/platform/vim2m.c 	src_buf = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               604 drivers/media/platform/vim2m.c 	dst_buf = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx               608 drivers/media/platform/vim2m.c 				&ctx->hdl);
ctx               610 drivers/media/platform/vim2m.c 	device_process(ctx, src_buf, dst_buf);
ctx               614 drivers/media/platform/vim2m.c 				   &ctx->hdl);
ctx               617 drivers/media/platform/vim2m.c 	schedule_delayed_work(&ctx->work_run, msecs_to_jiffies(ctx->transtime));
ctx               733 drivers/media/platform/vim2m.c static int vidioc_g_fmt(struct vim2m_ctx *ctx, struct v4l2_format *f)
ctx               738 drivers/media/platform/vim2m.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               742 drivers/media/platform/vim2m.c 	q_data = get_q_data(ctx, f->type);
ctx               752 drivers/media/platform/vim2m.c 	f->fmt.pix.colorspace	= ctx->colorspace;
ctx               753 drivers/media/platform/vim2m.c 	f->fmt.pix.xfer_func	= ctx->xfer_func;
ctx               754 drivers/media/platform/vim2m.c 	f->fmt.pix.ycbcr_enc	= ctx->ycbcr_enc;
ctx               755 drivers/media/platform/vim2m.c 	f->fmt.pix.quantization	= ctx->quant;
ctx               803 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = file2ctx(file);
ctx               811 drivers/media/platform/vim2m.c 		v4l2_err(&ctx->dev->v4l2_dev,
ctx               816 drivers/media/platform/vim2m.c 	f->fmt.pix.colorspace = ctx->colorspace;
ctx               817 drivers/media/platform/vim2m.c 	f->fmt.pix.xfer_func = ctx->xfer_func;
ctx               818 drivers/media/platform/vim2m.c 	f->fmt.pix.ycbcr_enc = ctx->ycbcr_enc;
ctx               819 drivers/media/platform/vim2m.c 	f->fmt.pix.quantization = ctx->quant;
ctx               828 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = file2ctx(file);
ctx               836 drivers/media/platform/vim2m.c 		v4l2_err(&ctx->dev->v4l2_dev,
ctx               847 drivers/media/platform/vim2m.c static int vidioc_s_fmt(struct vim2m_ctx *ctx, struct v4l2_format *f)
ctx               852 drivers/media/platform/vim2m.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               856 drivers/media/platform/vim2m.c 	q_data = get_q_data(ctx, f->type);
ctx               861 drivers/media/platform/vim2m.c 		v4l2_err(&ctx->dev->v4l2_dev, "%s queue busy\n", __func__);
ctx               871 drivers/media/platform/vim2m.c 	dprintk(ctx->dev, 1,
ctx               898 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = file2ctx(file);
ctx               907 drivers/media/platform/vim2m.c 		ctx->colorspace = f->fmt.pix.colorspace;
ctx               908 drivers/media/platform/vim2m.c 		ctx->xfer_func = f->fmt.pix.xfer_func;
ctx               909 drivers/media/platform/vim2m.c 		ctx->ycbcr_enc = f->fmt.pix.ycbcr_enc;
ctx               910 drivers/media/platform/vim2m.c 		ctx->quant = f->fmt.pix.quantization;
ctx               917 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx =
ctx               923 drivers/media/platform/vim2m.c 			ctx->mode |= MEM2MEM_HFLIP;
ctx               925 drivers/media/platform/vim2m.c 			ctx->mode &= ~MEM2MEM_HFLIP;
ctx               930 drivers/media/platform/vim2m.c 			ctx->mode |= MEM2MEM_VFLIP;
ctx               932 drivers/media/platform/vim2m.c 			ctx->mode &= ~MEM2MEM_VFLIP;
ctx               936 drivers/media/platform/vim2m.c 		ctx->transtime = ctrl->val;
ctx               937 drivers/media/platform/vim2m.c 		if (ctx->transtime < 1)
ctx               938 drivers/media/platform/vim2m.c 			ctx->transtime = 1;
ctx               942 drivers/media/platform/vim2m.c 		ctx->translen = ctrl->val;
ctx               946 drivers/media/platform/vim2m.c 		v4l2_err(&ctx->dev->v4l2_dev, "Invalid control\n");
ctx               996 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = vb2_get_drv_priv(vq);
ctx              1000 drivers/media/platform/vim2m.c 	q_data = get_q_data(ctx, vq->type);
ctx              1016 drivers/media/platform/vim2m.c 	dprintk(ctx->dev, 1, "%s: get %d buffer(s) of size %d each.\n",
ctx              1025 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1030 drivers/media/platform/vim2m.c 		dprintk(ctx->dev, 1, "%s field isn't supported\n", __func__);
ctx              1039 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1042 drivers/media/platform/vim2m.c 	dprintk(ctx->dev, 2, "type: %s\n", type_name(vb->vb2_queue->type));
ctx              1044 drivers/media/platform/vim2m.c 	q_data = get_q_data(ctx, vb->vb2_queue->type);
ctx              1048 drivers/media/platform/vim2m.c 		dprintk(ctx->dev, 1,
ctx              1063 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1065 drivers/media/platform/vim2m.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx              1070 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = vb2_get_drv_priv(q);
ctx              1071 drivers/media/platform/vim2m.c 	struct vim2m_q_data *q_data = get_q_data(ctx, q->type);
ctx              1077 drivers/media/platform/vim2m.c 		ctx->aborting = 0;
ctx              1085 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = vb2_get_drv_priv(q);
ctx              1089 drivers/media/platform/vim2m.c 	cancel_delayed_work_sync(&ctx->work_run);
ctx              1093 drivers/media/platform/vim2m.c 			vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx              1095 drivers/media/platform/vim2m.c 			vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx              1099 drivers/media/platform/vim2m.c 					   &ctx->hdl);
ctx              1100 drivers/media/platform/vim2m.c 		spin_lock_irqsave(&ctx->irqlock, flags);
ctx              1102 drivers/media/platform/vim2m.c 		spin_unlock_irqrestore(&ctx->irqlock, flags);
ctx              1108 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx              1110 drivers/media/platform/vim2m.c 	v4l2_ctrl_request_complete(vb->req_obj.req, &ctx->hdl);
ctx              1128 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = priv;
ctx              1133 drivers/media/platform/vim2m.c 	src_vq->drv_priv = ctx;
ctx              1138 drivers/media/platform/vim2m.c 	src_vq->lock = &ctx->vb_mutex;
ctx              1147 drivers/media/platform/vim2m.c 	dst_vq->drv_priv = ctx;
ctx              1152 drivers/media/platform/vim2m.c 	dst_vq->lock = &ctx->vb_mutex;
ctx              1184 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = NULL;
ctx              1190 drivers/media/platform/vim2m.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              1191 drivers/media/platform/vim2m.c 	if (!ctx) {
ctx              1196 drivers/media/platform/vim2m.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx              1197 drivers/media/platform/vim2m.c 	file->private_data = &ctx->fh;
ctx              1198 drivers/media/platform/vim2m.c 	ctx->dev = dev;
ctx              1199 drivers/media/platform/vim2m.c 	hdl = &ctx->hdl;
ctx              1210 drivers/media/platform/vim2m.c 		kfree(ctx);
ctx              1213 drivers/media/platform/vim2m.c 	ctx->fh.ctrl_handler = hdl;
ctx              1216 drivers/media/platform/vim2m.c 	ctx->q_data[V4L2_M2M_SRC].fmt = &formats[0];
ctx              1217 drivers/media/platform/vim2m.c 	ctx->q_data[V4L2_M2M_SRC].width = 640;
ctx              1218 drivers/media/platform/vim2m.c 	ctx->q_data[V4L2_M2M_SRC].height = 480;
ctx              1219 drivers/media/platform/vim2m.c 	ctx->q_data[V4L2_M2M_SRC].sizeimage =
ctx              1220 drivers/media/platform/vim2m.c 		ctx->q_data[V4L2_M2M_SRC].width *
ctx              1221 drivers/media/platform/vim2m.c 		ctx->q_data[V4L2_M2M_SRC].height *
ctx              1222 drivers/media/platform/vim2m.c 		(ctx->q_data[V4L2_M2M_SRC].fmt->depth >> 3);
ctx              1223 drivers/media/platform/vim2m.c 	ctx->q_data[V4L2_M2M_DST] = ctx->q_data[V4L2_M2M_SRC];
ctx              1224 drivers/media/platform/vim2m.c 	ctx->colorspace = V4L2_COLORSPACE_REC709;
ctx              1226 drivers/media/platform/vim2m.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(dev->m2m_dev, ctx, &queue_init);
ctx              1228 drivers/media/platform/vim2m.c 	mutex_init(&ctx->vb_mutex);
ctx              1229 drivers/media/platform/vim2m.c 	spin_lock_init(&ctx->irqlock);
ctx              1230 drivers/media/platform/vim2m.c 	INIT_DELAYED_WORK(&ctx->work_run, device_work);
ctx              1232 drivers/media/platform/vim2m.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx              1233 drivers/media/platform/vim2m.c 		rc = PTR_ERR(ctx->fh.m2m_ctx);
ctx              1236 drivers/media/platform/vim2m.c 		v4l2_fh_exit(&ctx->fh);
ctx              1237 drivers/media/platform/vim2m.c 		kfree(ctx);
ctx              1241 drivers/media/platform/vim2m.c 	v4l2_fh_add(&ctx->fh);
ctx              1245 drivers/media/platform/vim2m.c 		ctx, ctx->fh.m2m_ctx);
ctx              1255 drivers/media/platform/vim2m.c 	struct vim2m_ctx *ctx = file2ctx(file);
ctx              1257 drivers/media/platform/vim2m.c 	dprintk(dev, 1, "Releasing instance %p\n", ctx);
ctx              1259 drivers/media/platform/vim2m.c 	v4l2_fh_del(&ctx->fh);
ctx              1260 drivers/media/platform/vim2m.c 	v4l2_fh_exit(&ctx->fh);
ctx              1261 drivers/media/platform/vim2m.c 	v4l2_ctrl_handler_free(&ctx->hdl);
ctx              1263 drivers/media/platform/vim2m.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx              1265 drivers/media/platform/vim2m.c 	kfree(ctx);
ctx               178 drivers/message/fusion/lsi/mpi_lan.h #define SET_LAN_BUCKET_CONTEXT(x, ctx)                                      \
ctx               180 drivers/message/fusion/lsi/mpi_lan.h                             (((ctx) << LAN_REPLY_BUCKET_CONTEXT_SHIFT) &    \
ctx               186 drivers/message/fusion/lsi/mpi_lan.h #define SET_LAN_BUFFER_CONTEXT(x, ctx)                                      \
ctx               188 drivers/message/fusion/lsi/mpi_lan.h                             (((ctx) << LAN_REPLY_BUFFER_CONTEXT_SHIFT) &    \
ctx               575 drivers/message/fusion/mptlan.c 	u32 ctx;
ctx               577 drivers/message/fusion/mptlan.c 	ctx = GET_LAN_BUFFER_CONTEXT(tmsg);
ctx               578 drivers/message/fusion/mptlan.c 	sent = priv->SendCtl[ctx].skb;
ctx               587 drivers/message/fusion/mptlan.c 	priv->SendCtl[ctx].skb = NULL;
ctx               588 drivers/message/fusion/mptlan.c 	pci_unmap_single(mpt_dev->pcidev, priv->SendCtl[ctx].dma,
ctx               589 drivers/message/fusion/mptlan.c 			 priv->SendCtl[ctx].len, PCI_DMA_TODEVICE);
ctx               593 drivers/message/fusion/mptlan.c 	priv->mpt_txfidx[++priv->mpt_txfidx_tail] = ctx;
ctx               610 drivers/message/fusion/mptlan.c 	u32 ctx;
ctx               644 drivers/message/fusion/mptlan.c 		ctx = GET_LAN_BUFFER_CONTEXT(le32_to_cpu(*pContext));
ctx               646 drivers/message/fusion/mptlan.c 		sent = priv->SendCtl[ctx].skb;
ctx               653 drivers/message/fusion/mptlan.c 		priv->SendCtl[ctx].skb = NULL;
ctx               654 drivers/message/fusion/mptlan.c 		pci_unmap_single(mpt_dev->pcidev, priv->SendCtl[ctx].dma,
ctx               655 drivers/message/fusion/mptlan.c 				 priv->SendCtl[ctx].len, PCI_DMA_TODEVICE);
ctx               658 drivers/message/fusion/mptlan.c 		priv->mpt_txfidx[++priv->mpt_txfidx_tail] = ctx;
ctx               686 drivers/message/fusion/mptlan.c 	int ctx;
ctx               712 drivers/message/fusion/mptlan.c 	ctx = priv->mpt_txfidx[priv->mpt_txfidx_tail--];
ctx               729 drivers/message/fusion/mptlan.c 	priv->SendCtl[ctx].skb = skb;
ctx               730 drivers/message/fusion/mptlan.c 	priv->SendCtl[ctx].dma = dma;
ctx               731 drivers/message/fusion/mptlan.c 	priv->SendCtl[ctx].len = skb->len;
ctx               748 drivers/message/fusion/mptlan.c 	pTrans->TransactionContext[0] = cpu_to_le32(ctx);
ctx               856 drivers/message/fusion/mptlan.c 	u32 ctx, len;
ctx               858 drivers/message/fusion/mptlan.c 	ctx = GET_LAN_BUCKET_CONTEXT(tmsg);
ctx               859 drivers/message/fusion/mptlan.c 	skb = priv->RcvCtl[ctx].skb;
ctx               874 drivers/message/fusion/mptlan.c 		pci_dma_sync_single_for_cpu(mpt_dev->pcidev, priv->RcvCtl[ctx].dma,
ctx               875 drivers/message/fusion/mptlan.c 					    priv->RcvCtl[ctx].len, PCI_DMA_FROMDEVICE);
ctx               879 drivers/message/fusion/mptlan.c 		pci_dma_sync_single_for_device(mpt_dev->pcidev, priv->RcvCtl[ctx].dma,
ctx               880 drivers/message/fusion/mptlan.c 					       priv->RcvCtl[ctx].len, PCI_DMA_FROMDEVICE);
ctx               886 drivers/message/fusion/mptlan.c 	priv->RcvCtl[ctx].skb = NULL;
ctx               888 drivers/message/fusion/mptlan.c 	pci_unmap_single(mpt_dev->pcidev, priv->RcvCtl[ctx].dma,
ctx               889 drivers/message/fusion/mptlan.c 			 priv->RcvCtl[ctx].len, PCI_DMA_FROMDEVICE);
ctx               893 drivers/message/fusion/mptlan.c 	priv->mpt_rxfidx[++priv->mpt_rxfidx_tail] = ctx;
ctx               911 drivers/message/fusion/mptlan.c 	u32 ctx;
ctx               922 drivers/message/fusion/mptlan.c 		ctx = le32_to_cpu(pRecvRep->BucketContext[i]);
ctx               924 drivers/message/fusion/mptlan.c 		skb = priv->RcvCtl[ctx].skb;
ctx               932 drivers/message/fusion/mptlan.c 		priv->RcvCtl[ctx].skb = NULL;
ctx               933 drivers/message/fusion/mptlan.c 		pci_unmap_single(mpt_dev->pcidev, priv->RcvCtl[ctx].dma,
ctx               934 drivers/message/fusion/mptlan.c 				 priv->RcvCtl[ctx].len, PCI_DMA_FROMDEVICE);
ctx               937 drivers/message/fusion/mptlan.c 		priv->mpt_rxfidx[++priv->mpt_rxfidx_tail] = ctx;
ctx               966 drivers/message/fusion/mptlan.c 	u32 len, ctx, offset;
ctx               989 drivers/message/fusion/mptlan.c 	ctx    = le32_to_cpu(pRecvRep->BucketContext[0]);
ctx               991 drivers/message/fusion/mptlan.c 	skb    = priv->RcvCtl[ctx].skb;
ctx              1023 drivers/message/fusion/mptlan.c 			ctx = le32_to_cpu(pRecvRep->BucketContext[i]);
ctx              1024 drivers/message/fusion/mptlan.c 			old_skb = priv->RcvCtl[ctx].skb;
ctx              1026 drivers/message/fusion/mptlan.c 			l = priv->RcvCtl[ctx].len;
ctx              1035 drivers/message/fusion/mptlan.c 						    priv->RcvCtl[ctx].dma,
ctx              1036 drivers/message/fusion/mptlan.c 						    priv->RcvCtl[ctx].len,
ctx              1041 drivers/message/fusion/mptlan.c 						       priv->RcvCtl[ctx].dma,
ctx              1042 drivers/message/fusion/mptlan.c 						       priv->RcvCtl[ctx].len,
ctx              1045 drivers/message/fusion/mptlan.c 			priv->mpt_rxfidx[++priv->mpt_rxfidx_tail] = ctx;
ctx              1063 drivers/message/fusion/mptlan.c 					    priv->RcvCtl[ctx].dma,
ctx              1064 drivers/message/fusion/mptlan.c 					    priv->RcvCtl[ctx].len,
ctx              1070 drivers/message/fusion/mptlan.c 					       priv->RcvCtl[ctx].dma,
ctx              1071 drivers/message/fusion/mptlan.c 					       priv->RcvCtl[ctx].len,
ctx              1075 drivers/message/fusion/mptlan.c 		priv->mpt_rxfidx[++priv->mpt_rxfidx_tail] = ctx;
ctx              1081 drivers/message/fusion/mptlan.c 		priv->RcvCtl[ctx].skb = NULL;
ctx              1083 drivers/message/fusion/mptlan.c 		pci_unmap_single(mpt_dev->pcidev, priv->RcvCtl[ctx].dma,
ctx              1084 drivers/message/fusion/mptlan.c 				 priv->RcvCtl[ctx].len, PCI_DMA_FROMDEVICE);
ctx              1085 drivers/message/fusion/mptlan.c 		priv->RcvCtl[ctx].dma = 0;
ctx              1087 drivers/message/fusion/mptlan.c 		priv->mpt_rxfidx[++priv->mpt_rxfidx_tail] = ctx;
ctx              1190 drivers/message/fusion/mptlan.c 			int ctx;
ctx              1201 drivers/message/fusion/mptlan.c 			ctx = priv->mpt_rxfidx[priv->mpt_rxfidx_tail--];
ctx              1203 drivers/message/fusion/mptlan.c 			skb = priv->RcvCtl[ctx].skb;
ctx              1204 drivers/message/fusion/mptlan.c 			if (skb && (priv->RcvCtl[ctx].len != len)) {
ctx              1206 drivers/message/fusion/mptlan.c 						 priv->RcvCtl[ctx].dma,
ctx              1207 drivers/message/fusion/mptlan.c 						 priv->RcvCtl[ctx].len,
ctx              1209 drivers/message/fusion/mptlan.c 				dev_kfree_skb(priv->RcvCtl[ctx].skb);
ctx              1210 drivers/message/fusion/mptlan.c 				skb = priv->RcvCtl[ctx].skb = NULL;
ctx              1219 drivers/message/fusion/mptlan.c 					priv->mpt_rxfidx[++priv->mpt_rxfidx_tail] = ctx;
ctx              1227 drivers/message/fusion/mptlan.c 				priv->RcvCtl[ctx].skb = skb;
ctx              1228 drivers/message/fusion/mptlan.c 				priv->RcvCtl[ctx].dma = dma;
ctx              1229 drivers/message/fusion/mptlan.c 				priv->RcvCtl[ctx].len = len;
ctx              1237 drivers/message/fusion/mptlan.c 			pTrans->TransactionContext[0] = cpu_to_le32(ctx);
ctx              1245 drivers/message/fusion/mptlan.c 			pSimple->Address.Low = cpu_to_le32((u32) priv->RcvCtl[ctx].dma);
ctx              1247 drivers/message/fusion/mptlan.c 				pSimple->Address.High = cpu_to_le32((u32) ((u64) priv->RcvCtl[ctx].dma >> 32));
ctx                50 drivers/misc/cxl/api.c void cxl_release_mapping(struct cxl_context *ctx)
ctx                52 drivers/misc/cxl/api.c 	if (ctx->kernelapi && ctx->mapping)
ctx               103 drivers/misc/cxl/api.c 	struct cxl_context  *ctx;
ctx               110 drivers/misc/cxl/api.c 	ctx = cxl_context_alloc();
ctx               111 drivers/misc/cxl/api.c 	if (!ctx)
ctx               114 drivers/misc/cxl/api.c 	ctx->kernelapi = true;
ctx               117 drivers/misc/cxl/api.c 	rc = cxl_context_init(ctx, afu, false);
ctx               121 drivers/misc/cxl/api.c 	return ctx;
ctx               124 drivers/misc/cxl/api.c 	kfree(ctx);
ctx               135 drivers/misc/cxl/api.c int cxl_release_context(struct cxl_context *ctx)
ctx               137 drivers/misc/cxl/api.c 	if (ctx->status >= STARTED)
ctx               140 drivers/misc/cxl/api.c 	cxl_context_free(ctx);
ctx               146 drivers/misc/cxl/api.c static irq_hw_number_t cxl_find_afu_irq(struct cxl_context *ctx, int num)
ctx               152 drivers/misc/cxl/api.c 		range = ctx->irqs.range[r];
ctx               154 drivers/misc/cxl/api.c 			return ctx->irqs.offset[r] + num;
ctx               162 drivers/misc/cxl/api.c int cxl_set_priv(struct cxl_context *ctx, void *priv)
ctx               164 drivers/misc/cxl/api.c 	if (!ctx)
ctx               167 drivers/misc/cxl/api.c 	ctx->priv = priv;
ctx               173 drivers/misc/cxl/api.c void *cxl_get_priv(struct cxl_context *ctx)
ctx               175 drivers/misc/cxl/api.c 	if (!ctx)
ctx               178 drivers/misc/cxl/api.c 	return ctx->priv;
ctx               182 drivers/misc/cxl/api.c int cxl_allocate_afu_irqs(struct cxl_context *ctx, int num)
ctx               188 drivers/misc/cxl/api.c 		num = ctx->afu->pp_irqs;
ctx               189 drivers/misc/cxl/api.c 	res = afu_allocate_irqs(ctx, num);
ctx               197 drivers/misc/cxl/api.c 		hwirq = cxl_find_afu_irq(ctx, 0);
ctx               199 drivers/misc/cxl/api.c 			cxl_map_irq(ctx->afu->adapter, hwirq, cxl_ops->psl_interrupt, ctx, "psl");
ctx               202 drivers/misc/cxl/api.c 	if (ctx->status == STARTED) {
ctx               204 drivers/misc/cxl/api.c 			cxl_ops->update_ivtes(ctx);
ctx               212 drivers/misc/cxl/api.c void cxl_free_afu_irqs(struct cxl_context *ctx)
ctx               218 drivers/misc/cxl/api.c 		hwirq = cxl_find_afu_irq(ctx, 0);
ctx               222 drivers/misc/cxl/api.c 				cxl_unmap_irq(virq, ctx);
ctx               225 drivers/misc/cxl/api.c 	afu_irq_name_free(ctx);
ctx               226 drivers/misc/cxl/api.c 	cxl_ops->release_irq_ranges(&ctx->irqs, ctx->afu->adapter);
ctx               230 drivers/misc/cxl/api.c int cxl_map_afu_irq(struct cxl_context *ctx, int num,
ctx               238 drivers/misc/cxl/api.c 	hwirq = cxl_find_afu_irq(ctx, num);
ctx               242 drivers/misc/cxl/api.c 	return cxl_map_irq(ctx->afu->adapter, hwirq, handler, cookie, name);
ctx               246 drivers/misc/cxl/api.c void cxl_unmap_afu_irq(struct cxl_context *ctx, int num, void *cookie)
ctx               251 drivers/misc/cxl/api.c 	hwirq = cxl_find_afu_irq(ctx, num);
ctx               265 drivers/misc/cxl/api.c int cxl_start_context(struct cxl_context *ctx, u64 wed,
ctx               271 drivers/misc/cxl/api.c 	pr_devel("%s: pe: %i\n", __func__, ctx->pe);
ctx               273 drivers/misc/cxl/api.c 	mutex_lock(&ctx->status_mutex);
ctx               274 drivers/misc/cxl/api.c 	if (ctx->status == STARTED)
ctx               281 drivers/misc/cxl/api.c 	rc = cxl_adapter_context_get(ctx->afu->adapter);
ctx               286 drivers/misc/cxl/api.c 		ctx->pid = get_task_pid(task, PIDTYPE_PID);
ctx               290 drivers/misc/cxl/api.c 		ctx->mm = get_task_mm(current);
ctx               293 drivers/misc/cxl/api.c 		cxl_context_mm_count_get(ctx);
ctx               295 drivers/misc/cxl/api.c 		if (ctx->mm) {
ctx               297 drivers/misc/cxl/api.c 			mmput(ctx->mm);
ctx               299 drivers/misc/cxl/api.c 			mm_context_add_copro(ctx->mm);
ctx               312 drivers/misc/cxl/api.c 	if ((rc = cxl_ops->attach_process(ctx, kernel, wed, 0))) {
ctx               313 drivers/misc/cxl/api.c 		put_pid(ctx->pid);
ctx               314 drivers/misc/cxl/api.c 		ctx->pid = NULL;
ctx               315 drivers/misc/cxl/api.c 		cxl_adapter_context_put(ctx->afu->adapter);
ctx               318 drivers/misc/cxl/api.c 			cxl_context_mm_count_put(ctx);
ctx               319 drivers/misc/cxl/api.c 			if (ctx->mm)
ctx               320 drivers/misc/cxl/api.c 				mm_context_remove_copro(ctx->mm);
ctx               325 drivers/misc/cxl/api.c 	ctx->status = STARTED;
ctx               327 drivers/misc/cxl/api.c 	mutex_unlock(&ctx->status_mutex);
ctx               332 drivers/misc/cxl/api.c int cxl_process_element(struct cxl_context *ctx)
ctx               334 drivers/misc/cxl/api.c 	return ctx->external_pe;
ctx               339 drivers/misc/cxl/api.c int cxl_stop_context(struct cxl_context *ctx)
ctx               341 drivers/misc/cxl/api.c 	return __detach_context(ctx);
ctx               345 drivers/misc/cxl/api.c void cxl_set_master(struct cxl_context *ctx)
ctx               347 drivers/misc/cxl/api.c 	ctx->master = true;
ctx               387 drivers/misc/cxl/api.c struct file *cxl_get_fd(struct cxl_context *ctx, struct file_operations *fops,
ctx               395 drivers/misc/cxl/api.c 	if (ctx->mapping)
ctx               420 drivers/misc/cxl/api.c 	name = kasprintf(GFP_KERNEL, "cxl:%d", ctx->pe);
ctx               421 drivers/misc/cxl/api.c 	file = cxl_getfile(name, fops, ctx, flags);
ctx               426 drivers/misc/cxl/api.c 	cxl_context_set_mapping(ctx, file->f_mapping);
ctx               442 drivers/misc/cxl/api.c void cxl_set_driver_ops(struct cxl_context *ctx,
ctx               446 drivers/misc/cxl/api.c 	atomic_set(&ctx->afu_driver_events, 0);
ctx               447 drivers/misc/cxl/api.c 	ctx->afu_driver_ops = ops;
ctx               451 drivers/misc/cxl/api.c void cxl_context_events_pending(struct cxl_context *ctx,
ctx               454 drivers/misc/cxl/api.c 	atomic_add(new_events, &ctx->afu_driver_events);
ctx               455 drivers/misc/cxl/api.c 	wake_up_all(&ctx->wq);
ctx               459 drivers/misc/cxl/api.c int cxl_start_work(struct cxl_context *ctx,
ctx               466 drivers/misc/cxl/api.c 		work->num_interrupts = ctx->afu->pp_irqs;
ctx               467 drivers/misc/cxl/api.c 	else if ((work->num_interrupts < ctx->afu->pp_irqs) ||
ctx               468 drivers/misc/cxl/api.c 		 (work->num_interrupts > ctx->afu->irqs_max)) {
ctx               472 drivers/misc/cxl/api.c 	rc = afu_register_irqs(ctx, work->num_interrupts);
ctx               476 drivers/misc/cxl/api.c 	rc = cxl_start_context(ctx, work->work_element_descriptor, current);
ctx               478 drivers/misc/cxl/api.c 		afu_release_irqs(ctx, ctx);
ctx               486 drivers/misc/cxl/api.c void __iomem *cxl_psa_map(struct cxl_context *ctx)
ctx               488 drivers/misc/cxl/api.c 	if (ctx->status != STARTED)
ctx               492 drivers/misc/cxl/api.c 		__func__, ctx->psn_phys, ctx->psn_size);
ctx               493 drivers/misc/cxl/api.c 	return ioremap(ctx->psn_phys, ctx->psn_size);
ctx               503 drivers/misc/cxl/api.c int cxl_afu_reset(struct cxl_context *ctx)
ctx               505 drivers/misc/cxl/api.c 	struct cxl_afu *afu = ctx->afu;
ctx                35 drivers/misc/cxl/context.c int cxl_context_init(struct cxl_context *ctx, struct cxl_afu *afu, bool master)
ctx                39 drivers/misc/cxl/context.c 	ctx->afu = afu;
ctx                40 drivers/misc/cxl/context.c 	ctx->master = master;
ctx                41 drivers/misc/cxl/context.c 	ctx->pid = NULL; /* Set in start work ioctl */
ctx                42 drivers/misc/cxl/context.c 	mutex_init(&ctx->mapping_lock);
ctx                43 drivers/misc/cxl/context.c 	ctx->mapping = NULL;
ctx                44 drivers/misc/cxl/context.c 	ctx->tidr = 0;
ctx                45 drivers/misc/cxl/context.c 	ctx->assign_tidr = false;
ctx                48 drivers/misc/cxl/context.c 		spin_lock_init(&ctx->sste_lock);
ctx                57 drivers/misc/cxl/context.c 		i = cxl_alloc_sst(ctx);
ctx                62 drivers/misc/cxl/context.c 	INIT_WORK(&ctx->fault_work, cxl_handle_fault);
ctx                64 drivers/misc/cxl/context.c 	init_waitqueue_head(&ctx->wq);
ctx                65 drivers/misc/cxl/context.c 	spin_lock_init(&ctx->lock);
ctx                67 drivers/misc/cxl/context.c 	ctx->irq_bitmap = NULL;
ctx                68 drivers/misc/cxl/context.c 	ctx->pending_irq = false;
ctx                69 drivers/misc/cxl/context.c 	ctx->pending_fault = false;
ctx                70 drivers/misc/cxl/context.c 	ctx->pending_afu_err = false;
ctx                72 drivers/misc/cxl/context.c 	INIT_LIST_HEAD(&ctx->irq_names);
ctx                82 drivers/misc/cxl/context.c 		ctx->irqs.range[i] = 0;
ctx                84 drivers/misc/cxl/context.c 	mutex_init(&ctx->status_mutex);
ctx                86 drivers/misc/cxl/context.c 	ctx->status = OPENED;
ctx                94 drivers/misc/cxl/context.c 	i = idr_alloc(&ctx->afu->contexts_idr, ctx, 0,
ctx                95 drivers/misc/cxl/context.c 		      ctx->afu->num_procs, GFP_NOWAIT);
ctx               101 drivers/misc/cxl/context.c 	ctx->pe = i;
ctx               103 drivers/misc/cxl/context.c 		ctx->elem = &ctx->afu->native->spa[i];
ctx               104 drivers/misc/cxl/context.c 		ctx->external_pe = ctx->pe;
ctx               106 drivers/misc/cxl/context.c 		ctx->external_pe = -1; /* assigned when attaching */
ctx               108 drivers/misc/cxl/context.c 	ctx->pe_inserted = false;
ctx               118 drivers/misc/cxl/context.c void cxl_context_set_mapping(struct cxl_context *ctx,
ctx               121 drivers/misc/cxl/context.c 	mutex_lock(&ctx->mapping_lock);
ctx               122 drivers/misc/cxl/context.c 	ctx->mapping = mapping;
ctx               123 drivers/misc/cxl/context.c 	mutex_unlock(&ctx->mapping_lock);
ctx               129 drivers/misc/cxl/context.c 	struct cxl_context *ctx = vma->vm_file->private_data;
ctx               136 drivers/misc/cxl/context.c 			__func__, ctx->pe, vmf->address, offset);
ctx               138 drivers/misc/cxl/context.c 	if (ctx->afu->current_mode == CXL_MODE_DEDICATED) {
ctx               139 drivers/misc/cxl/context.c 		area = ctx->afu->psn_phys;
ctx               140 drivers/misc/cxl/context.c 		if (offset >= ctx->afu->adapter->ps_size)
ctx               143 drivers/misc/cxl/context.c 		area = ctx->psn_phys;
ctx               144 drivers/misc/cxl/context.c 		if (offset >= ctx->psn_size)
ctx               148 drivers/misc/cxl/context.c 	mutex_lock(&ctx->status_mutex);
ctx               150 drivers/misc/cxl/context.c 	if (ctx->status != STARTED) {
ctx               151 drivers/misc/cxl/context.c 		mutex_unlock(&ctx->status_mutex);
ctx               153 drivers/misc/cxl/context.c 		if (ctx->mmio_err_ff) {
ctx               154 drivers/misc/cxl/context.c 			if (!ctx->ff_page) {
ctx               155 drivers/misc/cxl/context.c 				ctx->ff_page = alloc_page(GFP_USER);
ctx               156 drivers/misc/cxl/context.c 				if (!ctx->ff_page)
ctx               158 drivers/misc/cxl/context.c 				memset(page_address(ctx->ff_page), 0xff, PAGE_SIZE);
ctx               160 drivers/misc/cxl/context.c 			get_page(ctx->ff_page);
ctx               161 drivers/misc/cxl/context.c 			vmf->page = ctx->ff_page;
ctx               170 drivers/misc/cxl/context.c 	mutex_unlock(&ctx->status_mutex);
ctx               182 drivers/misc/cxl/context.c int cxl_context_iomap(struct cxl_context *ctx, struct vm_area_struct *vma)
ctx               187 drivers/misc/cxl/context.c 	if (ctx->afu->current_mode == CXL_MODE_DEDICATED) {
ctx               188 drivers/misc/cxl/context.c 		if (start + len > ctx->afu->adapter->ps_size)
ctx               196 drivers/misc/cxl/context.c 			if (ctx->master && !ctx->afu->psa) {
ctx               202 drivers/misc/cxl/context.c 			if (!ctx->afu->enabled)
ctx               206 drivers/misc/cxl/context.c 		if (start + len > ctx->psn_size)
ctx               210 drivers/misc/cxl/context.c 		if ((ctx->master && !ctx->afu->psa) || (!ctx->afu->pp_psa)) {
ctx               216 drivers/misc/cxl/context.c 		if (!ctx->afu->enabled)
ctx               221 drivers/misc/cxl/context.c 		 ctx->psn_phys, ctx->pe , ctx->master);
ctx               234 drivers/misc/cxl/context.c int __detach_context(struct cxl_context *ctx)
ctx               238 drivers/misc/cxl/context.c 	mutex_lock(&ctx->status_mutex);
ctx               239 drivers/misc/cxl/context.c 	status = ctx->status;
ctx               240 drivers/misc/cxl/context.c 	ctx->status = CLOSED;
ctx               241 drivers/misc/cxl/context.c 	mutex_unlock(&ctx->status_mutex);
ctx               248 drivers/misc/cxl/context.c 	WARN_ON(cxl_ops->detach_process(ctx) &&
ctx               249 drivers/misc/cxl/context.c 		cxl_ops->link_ok(ctx->afu->adapter, ctx->afu));
ctx               250 drivers/misc/cxl/context.c 	flush_work(&ctx->fault_work); /* Only needed for dedicated process */
ctx               257 drivers/misc/cxl/context.c 		cxl_ops->irq_wait(ctx);
ctx               260 drivers/misc/cxl/context.c 	put_pid(ctx->pid);
ctx               265 drivers/misc/cxl/context.c 	cxl_adapter_context_put(ctx->afu->adapter);
ctx               268 drivers/misc/cxl/context.c 	cxl_context_mm_count_put(ctx);
ctx               269 drivers/misc/cxl/context.c 	if (ctx->mm)
ctx               270 drivers/misc/cxl/context.c 		mm_context_remove_copro(ctx->mm);
ctx               271 drivers/misc/cxl/context.c 	ctx->mm = NULL;
ctx               282 drivers/misc/cxl/context.c void cxl_context_detach(struct cxl_context *ctx)
ctx               286 drivers/misc/cxl/context.c 	rc = __detach_context(ctx);
ctx               290 drivers/misc/cxl/context.c 	afu_release_irqs(ctx, ctx);
ctx               291 drivers/misc/cxl/context.c 	wake_up_all(&ctx->wq);
ctx               299 drivers/misc/cxl/context.c 	struct cxl_context *ctx;
ctx               303 drivers/misc/cxl/context.c 	idr_for_each_entry(&afu->contexts_idr, ctx, tmp) {
ctx               308 drivers/misc/cxl/context.c 		cxl_context_detach(ctx);
ctx               316 drivers/misc/cxl/context.c 		mutex_lock(&ctx->mapping_lock);
ctx               317 drivers/misc/cxl/context.c 		if (ctx->mapping)
ctx               318 drivers/misc/cxl/context.c 			unmap_mapping_range(ctx->mapping, 0, 0, 1);
ctx               319 drivers/misc/cxl/context.c 		mutex_unlock(&ctx->mapping_lock);
ctx               326 drivers/misc/cxl/context.c 	struct cxl_context *ctx = container_of(rcu, struct cxl_context, rcu);
ctx               329 drivers/misc/cxl/context.c 		free_page((u64)ctx->sstp);
ctx               330 drivers/misc/cxl/context.c 	if (ctx->ff_page)
ctx               331 drivers/misc/cxl/context.c 		__free_page(ctx->ff_page);
ctx               332 drivers/misc/cxl/context.c 	ctx->sstp = NULL;
ctx               334 drivers/misc/cxl/context.c 	kfree(ctx->irq_bitmap);
ctx               337 drivers/misc/cxl/context.c 	cxl_afu_put(ctx->afu);
ctx               339 drivers/misc/cxl/context.c 	kfree(ctx);
ctx               342 drivers/misc/cxl/context.c void cxl_context_free(struct cxl_context *ctx)
ctx               344 drivers/misc/cxl/context.c 	if (ctx->kernelapi && ctx->mapping)
ctx               345 drivers/misc/cxl/context.c 		cxl_release_mapping(ctx);
ctx               346 drivers/misc/cxl/context.c 	mutex_lock(&ctx->afu->contexts_lock);
ctx               347 drivers/misc/cxl/context.c 	idr_remove(&ctx->afu->contexts_idr, ctx->pe);
ctx               348 drivers/misc/cxl/context.c 	mutex_unlock(&ctx->afu->contexts_lock);
ctx               349 drivers/misc/cxl/context.c 	call_rcu(&ctx->rcu, reclaim_ctx);
ctx               352 drivers/misc/cxl/context.c void cxl_context_mm_count_get(struct cxl_context *ctx)
ctx               354 drivers/misc/cxl/context.c 	if (ctx->mm)
ctx               355 drivers/misc/cxl/context.c 		atomic_inc(&ctx->mm->mm_count);
ctx               358 drivers/misc/cxl/context.c void cxl_context_mm_count_put(struct cxl_context *ctx)
ctx               360 drivers/misc/cxl/context.c 	if (ctx->mm)
ctx               361 drivers/misc/cxl/context.c 		mmdrop(ctx->mm);
ctx               632 drivers/misc/cxl/cxl.h 	irqreturn_t (*handle_interrupt)(int irq, struct cxl_context *ctx, struct cxl_irq_info *irq_info);
ctx               635 drivers/misc/cxl/cxl.h 	int (*attach_afu_directed)(struct cxl_context *ctx, u64 wed, u64 amr);
ctx               636 drivers/misc/cxl/cxl.h 	int (*attach_dedicated_process)(struct cxl_context *ctx, u64 wed, u64 amr);
ctx               637 drivers/misc/cxl/cxl.h 	void (*update_dedicated_ivtes)(struct cxl_context *ctx);
ctx               640 drivers/misc/cxl/cxl.h 	void (*psl_irq_dump_registers)(struct cxl_context *ctx);
ctx               876 drivers/misc/cxl/cxl.h void cxl_context_free(struct cxl_context *ctx);
ctx               877 drivers/misc/cxl/cxl.h void cxl_context_detach(struct cxl_context *ctx);
ctx               896 drivers/misc/cxl/cxl.h int afu_register_irqs(struct cxl_context *ctx, u32 count);
ctx               897 drivers/misc/cxl/cxl.h void afu_release_irqs(struct cxl_context *ctx, void *cookie);
ctx               898 drivers/misc/cxl/cxl.h void afu_irq_name_free(struct cxl_context *ctx);
ctx               900 drivers/misc/cxl/cxl.h int cxl_attach_afu_directed_psl9(struct cxl_context *ctx, u64 wed, u64 amr);
ctx               901 drivers/misc/cxl/cxl.h int cxl_attach_afu_directed_psl8(struct cxl_context *ctx, u64 wed, u64 amr);
ctx               904 drivers/misc/cxl/cxl.h int cxl_attach_dedicated_process_psl9(struct cxl_context *ctx, u64 wed, u64 amr);
ctx               905 drivers/misc/cxl/cxl.h int cxl_attach_dedicated_process_psl8(struct cxl_context *ctx, u64 wed, u64 amr);
ctx               906 drivers/misc/cxl/cxl.h void cxl_update_dedicated_ivtes_psl9(struct cxl_context *ctx);
ctx               907 drivers/misc/cxl/cxl.h void cxl_update_dedicated_ivtes_psl8(struct cxl_context *ctx);
ctx               969 drivers/misc/cxl/cxl.h void cxl_prefault(struct cxl_context *ctx, u64 wed);
ctx               973 drivers/misc/cxl/cxl.h int cxl_alloc_sst(struct cxl_context *ctx);
ctx               979 drivers/misc/cxl/cxl.h int cxl_context_init(struct cxl_context *ctx, struct cxl_afu *afu, bool master);
ctx               980 drivers/misc/cxl/cxl.h void cxl_context_set_mapping(struct cxl_context *ctx,
ctx               982 drivers/misc/cxl/cxl.h void cxl_context_free(struct cxl_context *ctx);
ctx               983 drivers/misc/cxl/cxl.h int cxl_context_iomap(struct cxl_context *ctx, struct vm_area_struct *vma);
ctx               987 drivers/misc/cxl/cxl.h int __detach_context(struct cxl_context *ctx);
ctx              1007 drivers/misc/cxl/cxl.h void cxl_assign_psn_space(struct cxl_context *ctx);
ctx              1010 drivers/misc/cxl/cxl.h irqreturn_t cxl_irq_psl9(int irq, struct cxl_context *ctx, struct cxl_irq_info *irq_info);
ctx              1011 drivers/misc/cxl/cxl.h irqreturn_t cxl_irq_psl8(int irq, struct cxl_context *ctx, struct cxl_irq_info *irq_info);
ctx              1028 drivers/misc/cxl/cxl.h void cxl_native_irq_dump_regs_psl9(struct cxl_context *ctx);
ctx              1029 drivers/misc/cxl/cxl.h void cxl_native_irq_dump_regs_psl8(struct cxl_context *ctx);
ctx              1034 drivers/misc/cxl/cxl.h void cxl_release_mapping(struct cxl_context *ctx);
ctx              1038 drivers/misc/cxl/cxl.h int afu_allocate_irqs(struct cxl_context *ctx, u32 count);
ctx              1074 drivers/misc/cxl/cxl.h 	irqreturn_t (*handle_psl_slice_error)(struct cxl_context *ctx,
ctx              1077 drivers/misc/cxl/cxl.h 	int (*ack_irq)(struct cxl_context *ctx, u64 tfc, u64 psl_reset_mask);
ctx              1078 drivers/misc/cxl/cxl.h 	void (*irq_wait)(struct cxl_context *ctx);
ctx              1079 drivers/misc/cxl/cxl.h 	int (*attach_process)(struct cxl_context *ctx, bool kernel,
ctx              1081 drivers/misc/cxl/cxl.h 	int (*detach_process)(struct cxl_context *ctx);
ctx              1082 drivers/misc/cxl/cxl.h 	void (*update_ivtes)(struct cxl_context *ctx);
ctx              1127 drivers/misc/cxl/cxl.h void cxl_context_mm_count_get(struct cxl_context *ctx);
ctx              1130 drivers/misc/cxl/cxl.h void cxl_context_mm_count_put(struct cxl_context *ctx);
ctx                32 drivers/misc/cxl/fault.c static struct cxl_sste *find_free_sste(struct cxl_context *ctx,
ctx                36 drivers/misc/cxl/fault.c 	unsigned int mask = (ctx->sst_size >> 7) - 1; /* SSTP0[SegTableSize] */
ctx                45 drivers/misc/cxl/fault.c 	primary = ctx->sstp + (hash << 3);
ctx                57 drivers/misc/cxl/fault.c 	ret = primary + ctx->sst_lru;
ctx                58 drivers/misc/cxl/fault.c 	ctx->sst_lru = (ctx->sst_lru + 1) & 0x7;
ctx                63 drivers/misc/cxl/fault.c static void cxl_load_segment(struct cxl_context *ctx, struct copro_slb *slb)
ctx                69 drivers/misc/cxl/fault.c 	spin_lock_irqsave(&ctx->sste_lock, flags);
ctx                70 drivers/misc/cxl/fault.c 	sste = find_free_sste(ctx, slb);
ctx                75 drivers/misc/cxl/fault.c 			sste - ctx->sstp, slb->vsid, slb->esid);
ctx                76 drivers/misc/cxl/fault.c 	trace_cxl_ste_write(ctx, sste - ctx->sstp, slb->esid, slb->vsid);
ctx                81 drivers/misc/cxl/fault.c 	spin_unlock_irqrestore(&ctx->sste_lock, flags);
ctx                84 drivers/misc/cxl/fault.c static int cxl_fault_segment(struct cxl_context *ctx, struct mm_struct *mm,
ctx                91 drivers/misc/cxl/fault.c 		cxl_load_segment(ctx, &slb);
ctx                97 drivers/misc/cxl/fault.c static void cxl_ack_ae(struct cxl_context *ctx)
ctx               101 drivers/misc/cxl/fault.c 	cxl_ops->ack_irq(ctx, CXL_PSL_TFC_An_AE, 0);
ctx               103 drivers/misc/cxl/fault.c 	spin_lock_irqsave(&ctx->lock, flags);
ctx               104 drivers/misc/cxl/fault.c 	ctx->pending_fault = true;
ctx               105 drivers/misc/cxl/fault.c 	ctx->fault_addr = ctx->dar;
ctx               106 drivers/misc/cxl/fault.c 	ctx->fault_dsisr = ctx->dsisr;
ctx               107 drivers/misc/cxl/fault.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx               109 drivers/misc/cxl/fault.c 	wake_up_all(&ctx->wq);
ctx               112 drivers/misc/cxl/fault.c static int cxl_handle_segment_miss(struct cxl_context *ctx,
ctx               117 drivers/misc/cxl/fault.c 	pr_devel("CXL interrupt: Segment fault pe: %i ea: %#llx\n", ctx->pe, ea);
ctx               118 drivers/misc/cxl/fault.c 	trace_cxl_ste_miss(ctx, ea);
ctx               120 drivers/misc/cxl/fault.c 	if ((rc = cxl_fault_segment(ctx, mm, ea)))
ctx               121 drivers/misc/cxl/fault.c 		cxl_ack_ae(ctx);
ctx               125 drivers/misc/cxl/fault.c 		cxl_ops->ack_irq(ctx, CXL_PSL_TFC_An_R, 0);
ctx               180 drivers/misc/cxl/fault.c static void cxl_handle_page_fault(struct cxl_context *ctx,
ctx               184 drivers/misc/cxl/fault.c 	trace_cxl_pte_miss(ctx, dsisr, dar);
ctx               187 drivers/misc/cxl/fault.c 		cxl_ack_ae(ctx);
ctx               189 drivers/misc/cxl/fault.c 		pr_devel("Page fault successfully handled for pe: %i!\n", ctx->pe);
ctx               190 drivers/misc/cxl/fault.c 		cxl_ops->ack_irq(ctx, CXL_PSL_TFC_An_R, 0);
ctx               198 drivers/misc/cxl/fault.c static struct mm_struct *get_mem_context(struct cxl_context *ctx)
ctx               200 drivers/misc/cxl/fault.c 	if (ctx->mm == NULL)
ctx               203 drivers/misc/cxl/fault.c 	if (!atomic_inc_not_zero(&ctx->mm->mm_users))
ctx               206 drivers/misc/cxl/fault.c 	return ctx->mm;
ctx               209 drivers/misc/cxl/fault.c static bool cxl_is_segment_miss(struct cxl_context *ctx, u64 dsisr)
ctx               217 drivers/misc/cxl/fault.c static bool cxl_is_page_fault(struct cxl_context *ctx, u64 dsisr)
ctx               230 drivers/misc/cxl/fault.c 	struct cxl_context *ctx =
ctx               232 drivers/misc/cxl/fault.c 	u64 dsisr = ctx->dsisr;
ctx               233 drivers/misc/cxl/fault.c 	u64 dar = ctx->dar;
ctx               237 drivers/misc/cxl/fault.c 		if (cxl_p2n_read(ctx->afu, CXL_PSL_DSISR_An) != dsisr ||
ctx               238 drivers/misc/cxl/fault.c 		    cxl_p2n_read(ctx->afu, CXL_PSL_DAR_An) != dar ||
ctx               239 drivers/misc/cxl/fault.c 		    cxl_p2n_read(ctx->afu, CXL_PSL_PEHandle_An) != ctx->pe) {
ctx               244 drivers/misc/cxl/fault.c 			dev_notice(&ctx->afu->dev, "cxl_handle_fault: Translation fault regs changed\n");
ctx               250 drivers/misc/cxl/fault.c 	if (ctx->status == CLOSED) {
ctx               251 drivers/misc/cxl/fault.c 		cxl_ack_ae(ctx);
ctx               256 drivers/misc/cxl/fault.c 		"DSISR: %#llx DAR: %#llx\n", ctx->pe, dsisr, dar);
ctx               258 drivers/misc/cxl/fault.c 	if (!ctx->kernel) {
ctx               260 drivers/misc/cxl/fault.c 		mm = get_mem_context(ctx);
ctx               263 drivers/misc/cxl/fault.c 				 __func__, ctx->pe, pid_nr(ctx->pid));
ctx               264 drivers/misc/cxl/fault.c 			cxl_ack_ae(ctx);
ctx               268 drivers/misc/cxl/fault.c 				 ctx->pe, pid_nr(ctx->pid));
ctx               272 drivers/misc/cxl/fault.c 	if (cxl_is_segment_miss(ctx, dsisr))
ctx               273 drivers/misc/cxl/fault.c 		cxl_handle_segment_miss(ctx, mm, dar);
ctx               274 drivers/misc/cxl/fault.c 	else if (cxl_is_page_fault(ctx, dsisr))
ctx               275 drivers/misc/cxl/fault.c 		cxl_handle_page_fault(ctx, mm, dsisr, dar);
ctx               283 drivers/misc/cxl/fault.c static void cxl_prefault_one(struct cxl_context *ctx, u64 ea)
ctx               287 drivers/misc/cxl/fault.c 	mm = get_mem_context(ctx);
ctx               290 drivers/misc/cxl/fault.c 			 pid_nr(ctx->pid));
ctx               294 drivers/misc/cxl/fault.c 	cxl_fault_segment(ctx, mm, ea);
ctx               309 drivers/misc/cxl/fault.c static void cxl_prefault_vma(struct cxl_context *ctx)
ctx               317 drivers/misc/cxl/fault.c 	mm = get_mem_context(ctx);
ctx               320 drivers/misc/cxl/fault.c 			 pid_nr(ctx->pid));
ctx               335 drivers/misc/cxl/fault.c 			cxl_load_segment(ctx, &slb);
ctx               344 drivers/misc/cxl/fault.c void cxl_prefault(struct cxl_context *ctx, u64 wed)
ctx               346 drivers/misc/cxl/fault.c 	switch (ctx->afu->prefault_mode) {
ctx               348 drivers/misc/cxl/fault.c 		cxl_prefault_one(ctx, wed);
ctx               351 drivers/misc/cxl/fault.c 		cxl_prefault_vma(ctx);
ctx                47 drivers/misc/cxl/file.c 	struct cxl_context *ctx;
ctx                82 drivers/misc/cxl/file.c 	if (!(ctx = cxl_context_alloc())) {
ctx                87 drivers/misc/cxl/file.c 	rc = cxl_context_init(ctx, afu, master);
ctx                91 drivers/misc/cxl/file.c 	cxl_context_set_mapping(ctx, inode->i_mapping);
ctx                93 drivers/misc/cxl/file.c 	pr_devel("afu_open pe: %i\n", ctx->pe);
ctx                94 drivers/misc/cxl/file.c 	file->private_data = ctx;
ctx               119 drivers/misc/cxl/file.c 	struct cxl_context *ctx = file->private_data;
ctx               122 drivers/misc/cxl/file.c 		 __func__, ctx->pe);
ctx               123 drivers/misc/cxl/file.c 	cxl_context_detach(ctx);
ctx               130 drivers/misc/cxl/file.c 	if (!ctx->kernelapi) {
ctx               131 drivers/misc/cxl/file.c 		mutex_lock(&ctx->mapping_lock);
ctx               132 drivers/misc/cxl/file.c 		ctx->mapping = NULL;
ctx               133 drivers/misc/cxl/file.c 		mutex_unlock(&ctx->mapping_lock);
ctx               142 drivers/misc/cxl/file.c 	cxl_context_free(ctx);
ctx               147 drivers/misc/cxl/file.c static long afu_ioctl_start_work(struct cxl_context *ctx,
ctx               154 drivers/misc/cxl/file.c 	pr_devel("%s: pe: %i\n", __func__, ctx->pe);
ctx               161 drivers/misc/cxl/file.c 	mutex_lock(&ctx->status_mutex);
ctx               162 drivers/misc/cxl/file.c 	if (ctx->status != OPENED) {
ctx               179 drivers/misc/cxl/file.c 		work.num_interrupts = ctx->afu->pp_irqs;
ctx               180 drivers/misc/cxl/file.c 	else if ((work.num_interrupts < ctx->afu->pp_irqs) ||
ctx               181 drivers/misc/cxl/file.c 		 (work.num_interrupts > ctx->afu->irqs_max)) {
ctx               186 drivers/misc/cxl/file.c 	if ((rc = afu_register_irqs(ctx, work.num_interrupts)))
ctx               193 drivers/misc/cxl/file.c 		ctx->assign_tidr = true;
ctx               195 drivers/misc/cxl/file.c 	ctx->mmio_err_ff = !!(work.flags & CXL_START_WORK_ERR_FF);
ctx               201 drivers/misc/cxl/file.c 	rc = cxl_adapter_context_get(ctx->afu->adapter);
ctx               203 drivers/misc/cxl/file.c 		afu_release_irqs(ctx, ctx);
ctx               216 drivers/misc/cxl/file.c 	ctx->pid = get_task_pid(current, PIDTYPE_PID);
ctx               219 drivers/misc/cxl/file.c 	ctx->mm = get_task_mm(current);
ctx               222 drivers/misc/cxl/file.c 	cxl_context_mm_count_get(ctx);
ctx               224 drivers/misc/cxl/file.c 	if (ctx->mm) {
ctx               226 drivers/misc/cxl/file.c 		mmput(ctx->mm);
ctx               228 drivers/misc/cxl/file.c 		mm_context_add_copro(ctx->mm);
ctx               251 drivers/misc/cxl/file.c 	trace_cxl_attach(ctx, work.work_element_descriptor, work.num_interrupts, amr);
ctx               253 drivers/misc/cxl/file.c 	if ((rc = cxl_ops->attach_process(ctx, false, work.work_element_descriptor,
ctx               255 drivers/misc/cxl/file.c 		afu_release_irqs(ctx, ctx);
ctx               256 drivers/misc/cxl/file.c 		cxl_adapter_context_put(ctx->afu->adapter);
ctx               257 drivers/misc/cxl/file.c 		put_pid(ctx->pid);
ctx               258 drivers/misc/cxl/file.c 		ctx->pid = NULL;
ctx               260 drivers/misc/cxl/file.c 		cxl_context_mm_count_put(ctx);
ctx               261 drivers/misc/cxl/file.c 		if (ctx->mm)
ctx               262 drivers/misc/cxl/file.c 			mm_context_remove_copro(ctx->mm);
ctx               268 drivers/misc/cxl/file.c 		work.tid = ctx->tidr;
ctx               273 drivers/misc/cxl/file.c 	ctx->status = STARTED;
ctx               276 drivers/misc/cxl/file.c 	mutex_unlock(&ctx->status_mutex);
ctx               280 drivers/misc/cxl/file.c static long afu_ioctl_process_element(struct cxl_context *ctx,
ctx               283 drivers/misc/cxl/file.c 	pr_devel("%s: pe: %i\n", __func__, ctx->pe);
ctx               285 drivers/misc/cxl/file.c 	if (copy_to_user(upe, &ctx->external_pe, sizeof(__u32)))
ctx               291 drivers/misc/cxl/file.c static long afu_ioctl_get_afu_id(struct cxl_context *ctx,
ctx               296 drivers/misc/cxl/file.c 	afuid.card_id = ctx->afu->adapter->adapter_num;
ctx               297 drivers/misc/cxl/file.c 	afuid.afu_offset = ctx->afu->slice;
ctx               298 drivers/misc/cxl/file.c 	afuid.afu_mode = ctx->afu->current_mode;
ctx               301 drivers/misc/cxl/file.c 	if (ctx->afu->current_mode == CXL_MODE_DIRECTED && !ctx->master)
ctx               312 drivers/misc/cxl/file.c 	struct cxl_context *ctx = file->private_data;
ctx               314 drivers/misc/cxl/file.c 	if (ctx->status == CLOSED)
ctx               317 drivers/misc/cxl/file.c 	if (!cxl_ops->link_ok(ctx->afu->adapter, ctx->afu))
ctx               323 drivers/misc/cxl/file.c 		return afu_ioctl_start_work(ctx, (struct cxl_ioctl_start_work __user *)arg);
ctx               325 drivers/misc/cxl/file.c 		return afu_ioctl_process_element(ctx, (__u32 __user *)arg);
ctx               327 drivers/misc/cxl/file.c 		return afu_ioctl_get_afu_id(ctx, (struct cxl_afu_id __user *)
ctx               341 drivers/misc/cxl/file.c 	struct cxl_context *ctx = file->private_data;
ctx               344 drivers/misc/cxl/file.c 	if (ctx->status != STARTED)
ctx               347 drivers/misc/cxl/file.c 	if (!cxl_ops->link_ok(ctx->afu->adapter, ctx->afu))
ctx               350 drivers/misc/cxl/file.c 	return cxl_context_iomap(ctx, vm);
ctx               353 drivers/misc/cxl/file.c static inline bool ctx_event_pending(struct cxl_context *ctx)
ctx               355 drivers/misc/cxl/file.c 	if (ctx->pending_irq || ctx->pending_fault || ctx->pending_afu_err)
ctx               358 drivers/misc/cxl/file.c 	if (ctx->afu_driver_ops && atomic_read(&ctx->afu_driver_events))
ctx               366 drivers/misc/cxl/file.c 	struct cxl_context *ctx = file->private_data;
ctx               371 drivers/misc/cxl/file.c 	poll_wait(file, &ctx->wq, poll);
ctx               373 drivers/misc/cxl/file.c 	pr_devel("afu_poll wait done pe: %i\n", ctx->pe);
ctx               375 drivers/misc/cxl/file.c 	spin_lock_irqsave(&ctx->lock, flags);
ctx               376 drivers/misc/cxl/file.c 	if (ctx_event_pending(ctx))
ctx               378 drivers/misc/cxl/file.c 	else if (ctx->status == CLOSED)
ctx               382 drivers/misc/cxl/file.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx               384 drivers/misc/cxl/file.c 	pr_devel("afu_poll pe: %i returning %#x\n", ctx->pe, mask);
ctx               389 drivers/misc/cxl/file.c static ssize_t afu_driver_event_copy(struct cxl_context *ctx,
ctx               396 drivers/misc/cxl/file.c 		ctx->afu_driver_ops->event_delivered(ctx, pl, -EINVAL);
ctx               403 drivers/misc/cxl/file.c 		ctx->afu_driver_ops->event_delivered(ctx, pl, -EINVAL);
ctx               409 drivers/misc/cxl/file.c 		ctx->afu_driver_ops->event_delivered(ctx, pl, -EFAULT);
ctx               416 drivers/misc/cxl/file.c 		ctx->afu_driver_ops->event_delivered(ctx, pl, -EFAULT);
ctx               420 drivers/misc/cxl/file.c 	ctx->afu_driver_ops->event_delivered(ctx, pl, 0); /* Success */
ctx               427 drivers/misc/cxl/file.c 	struct cxl_context *ctx = file->private_data;
ctx               434 drivers/misc/cxl/file.c 	if (!cxl_ops->link_ok(ctx->afu->adapter, ctx->afu))
ctx               440 drivers/misc/cxl/file.c 	spin_lock_irqsave(&ctx->lock, flags);
ctx               443 drivers/misc/cxl/file.c 		prepare_to_wait(&ctx->wq, &wait, TASK_INTERRUPTIBLE);
ctx               444 drivers/misc/cxl/file.c 		if (ctx_event_pending(ctx) || (ctx->status == CLOSED))
ctx               447 drivers/misc/cxl/file.c 		if (!cxl_ops->link_ok(ctx->afu->adapter, ctx->afu)) {
ctx               462 drivers/misc/cxl/file.c 		spin_unlock_irqrestore(&ctx->lock, flags);
ctx               466 drivers/misc/cxl/file.c 		spin_lock_irqsave(&ctx->lock, flags);
ctx               469 drivers/misc/cxl/file.c 	finish_wait(&ctx->wq, &wait);
ctx               472 drivers/misc/cxl/file.c 	event.header.process_element = ctx->pe;
ctx               474 drivers/misc/cxl/file.c 	if (ctx->afu_driver_ops && atomic_read(&ctx->afu_driver_events)) {
ctx               476 drivers/misc/cxl/file.c 		pl = ctx->afu_driver_ops->fetch_event(ctx);
ctx               477 drivers/misc/cxl/file.c 		atomic_dec(&ctx->afu_driver_events);
ctx               479 drivers/misc/cxl/file.c 	} else if (ctx->pending_irq) {
ctx               483 drivers/misc/cxl/file.c 		event.irq.irq = find_first_bit(ctx->irq_bitmap, ctx->irq_count) + 1;
ctx               484 drivers/misc/cxl/file.c 		clear_bit(event.irq.irq - 1, ctx->irq_bitmap);
ctx               485 drivers/misc/cxl/file.c 		if (bitmap_empty(ctx->irq_bitmap, ctx->irq_count))
ctx               486 drivers/misc/cxl/file.c 			ctx->pending_irq = false;
ctx               487 drivers/misc/cxl/file.c 	} else if (ctx->pending_fault) {
ctx               491 drivers/misc/cxl/file.c 		event.fault.addr = ctx->fault_addr;
ctx               492 drivers/misc/cxl/file.c 		event.fault.dsisr = ctx->fault_dsisr;
ctx               493 drivers/misc/cxl/file.c 		ctx->pending_fault = false;
ctx               494 drivers/misc/cxl/file.c 	} else if (ctx->pending_afu_err) {
ctx               498 drivers/misc/cxl/file.c 		event.afu_error.error = ctx->afu_err;
ctx               499 drivers/misc/cxl/file.c 		ctx->pending_afu_err = false;
ctx               500 drivers/misc/cxl/file.c 	} else if (ctx->status == CLOSED) {
ctx               502 drivers/misc/cxl/file.c 		spin_unlock_irqrestore(&ctx->lock, flags);
ctx               507 drivers/misc/cxl/file.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx               510 drivers/misc/cxl/file.c 		return afu_driver_event_copy(ctx, buf, &event, pl);
ctx               517 drivers/misc/cxl/file.c 	finish_wait(&ctx->wq, &wait);
ctx               518 drivers/misc/cxl/file.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx                55 drivers/misc/cxl/guest.c static irqreturn_t guest_handle_psl_slice_error(struct cxl_context *ctx, u64 dsisr,
ctx                59 drivers/misc/cxl/guest.c 	dev_crit(&ctx->afu->dev, "PSL ERROR STATUS: 0x%.16llx\n", errstat);
ctx                61 drivers/misc/cxl/guest.c 	return cxl_ops->ack_irq(ctx, 0, errstat);
ctx               150 drivers/misc/cxl/guest.c static int guest_get_irq_info(struct cxl_context *ctx, struct cxl_irq_info *info)
ctx               152 drivers/misc/cxl/guest.c 	return cxl_h_collect_int_info(ctx->afu->guest->handle, ctx->process_token, info);
ctx               157 drivers/misc/cxl/guest.c 	struct cxl_context *ctx = data;
ctx               161 drivers/misc/cxl/guest.c 	pr_devel("%d: received PSL interrupt %i\n", ctx->pe, irq);
ctx               162 drivers/misc/cxl/guest.c 	rc = guest_get_irq_info(ctx, &irq_info);
ctx               168 drivers/misc/cxl/guest.c 	rc = cxl_irq_psl8(irq, ctx, &irq_info);
ctx               373 drivers/misc/cxl/guest.c static int guest_ack_irq(struct cxl_context *ctx, u64 tfc, u64 psl_reset_mask)
ctx               375 drivers/misc/cxl/guest.c 	return cxl_h_control_faults(ctx->afu->guest->handle, ctx->process_token,
ctx               379 drivers/misc/cxl/guest.c static void disable_afu_irqs(struct cxl_context *ctx)
ctx               385 drivers/misc/cxl/guest.c 	pr_devel("Disabling AFU(%d) interrupts\n", ctx->afu->slice);
ctx               387 drivers/misc/cxl/guest.c 		hwirq = ctx->irqs.offset[r];
ctx               388 drivers/misc/cxl/guest.c 		for (i = 0; i < ctx->irqs.range[r]; hwirq++, i++) {
ctx               395 drivers/misc/cxl/guest.c static void enable_afu_irqs(struct cxl_context *ctx)
ctx               401 drivers/misc/cxl/guest.c 	pr_devel("Enabling AFU(%d) interrupts\n", ctx->afu->slice);
ctx               403 drivers/misc/cxl/guest.c 		hwirq = ctx->irqs.offset[r];
ctx               404 drivers/misc/cxl/guest.c 		for (i = 0; i < ctx->irqs.range[r]; hwirq++, i++) {
ctx               515 drivers/misc/cxl/guest.c static int attach_afu_directed(struct cxl_context *ctx, u64 wed, u64 amr)
ctx               518 drivers/misc/cxl/guest.c 	struct cxl *adapter = ctx->afu->adapter;
ctx               531 drivers/misc/cxl/guest.c 	if (ctx->kernel) {
ctx               555 drivers/misc/cxl/guest.c 	cxl_prefault(ctx, wed);
ctx               557 drivers/misc/cxl/guest.c 	elem->common.u.psl8.sstp0  = cpu_to_be64(ctx->sstp0);
ctx               558 drivers/misc/cxl/guest.c 	elem->common.u.psl8.sstp1  = cpu_to_be64(ctx->sstp1);
ctx               564 drivers/misc/cxl/guest.c 	if (ctx->irqs.range[0] == 0) {
ctx               565 drivers/misc/cxl/guest.c 		rc = afu_register_irqs(ctx, 0);
ctx               571 drivers/misc/cxl/guest.c 		for (i = 0; i < ctx->irqs.range[r]; i++) {
ctx               573 drivers/misc/cxl/guest.c 				elem->pslVirtualIsn = cpu_to_be32(ctx->irqs.offset[0]);
ctx               575 drivers/misc/cxl/guest.c 				idx = ctx->irqs.offset[r] + i - adapter->guest->irq_base_offset;
ctx               583 drivers/misc/cxl/guest.c 	disable_afu_irqs(ctx);
ctx               585 drivers/misc/cxl/guest.c 	rc = cxl_h_attach_process(ctx->afu->guest->handle, elem,
ctx               586 drivers/misc/cxl/guest.c 				&ctx->process_token, &mmio_addr, &mmio_size);
ctx               588 drivers/misc/cxl/guest.c 		if (ctx->master || !ctx->afu->pp_psa) {
ctx               589 drivers/misc/cxl/guest.c 			ctx->psn_phys = ctx->afu->psn_phys;
ctx               590 drivers/misc/cxl/guest.c 			ctx->psn_size = ctx->afu->adapter->ps_size;
ctx               592 drivers/misc/cxl/guest.c 			ctx->psn_phys = mmio_addr;
ctx               593 drivers/misc/cxl/guest.c 			ctx->psn_size = mmio_size;
ctx               595 drivers/misc/cxl/guest.c 		if (ctx->afu->pp_psa && mmio_size &&
ctx               596 drivers/misc/cxl/guest.c 			ctx->afu->pp_size == 0) {
ctx               605 drivers/misc/cxl/guest.c 			ctx->afu->pp_size = mmio_size;
ctx               608 drivers/misc/cxl/guest.c 		ctx->external_pe = ctx->process_token & 0xFFFFFFFF;
ctx               610 drivers/misc/cxl/guest.c 			ctx->pe, ctx->external_pe, ctx->psn_size);
ctx               611 drivers/misc/cxl/guest.c 		ctx->pe_inserted = true;
ctx               612 drivers/misc/cxl/guest.c 		enable_afu_irqs(ctx);
ctx               620 drivers/misc/cxl/guest.c static int guest_attach_process(struct cxl_context *ctx, bool kernel, u64 wed, u64 amr)
ctx               624 drivers/misc/cxl/guest.c 	ctx->kernel = kernel;
ctx               625 drivers/misc/cxl/guest.c 	if (ctx->afu->current_mode == CXL_MODE_DIRECTED)
ctx               626 drivers/misc/cxl/guest.c 		return attach_afu_directed(ctx, wed, amr);
ctx               633 drivers/misc/cxl/guest.c static int detach_afu_directed(struct cxl_context *ctx)
ctx               635 drivers/misc/cxl/guest.c 	if (!ctx->pe_inserted)
ctx               637 drivers/misc/cxl/guest.c 	if (cxl_h_detach_process(ctx->afu->guest->handle, ctx->process_token))
ctx               642 drivers/misc/cxl/guest.c static int guest_detach_process(struct cxl_context *ctx)
ctx               645 drivers/misc/cxl/guest.c 	trace_cxl_detach(ctx);
ctx               647 drivers/misc/cxl/guest.c 	if (!cxl_ops->link_ok(ctx->afu->adapter, ctx->afu))
ctx               650 drivers/misc/cxl/guest.c 	if (ctx->afu->current_mode == CXL_MODE_DIRECTED)
ctx               651 drivers/misc/cxl/guest.c 		return detach_afu_directed(ctx);
ctx                25 drivers/misc/cxl/irq.c static irqreturn_t schedule_cxl_fault(struct cxl_context *ctx, u64 dsisr, u64 dar)
ctx                27 drivers/misc/cxl/irq.c 	ctx->dsisr = dsisr;
ctx                28 drivers/misc/cxl/irq.c 	ctx->dar = dar;
ctx                29 drivers/misc/cxl/irq.c 	schedule_work(&ctx->fault_work);
ctx                33 drivers/misc/cxl/irq.c irqreturn_t cxl_irq_psl9(int irq, struct cxl_context *ctx, struct cxl_irq_info *irq_info)
ctx                40 drivers/misc/cxl/irq.c 	trace_cxl_psl9_irq(ctx, irq, dsisr, dar);
ctx                42 drivers/misc/cxl/irq.c 	pr_devel("CXL interrupt %i for afu pe: %i DSISR: %#llx DAR: %#llx\n", irq, ctx->pe, dsisr, dar);
ctx                45 drivers/misc/cxl/irq.c 		pr_devel("CXL interrupt: Scheduling translation fault handling for later (pe: %i)\n", ctx->pe);
ctx                46 drivers/misc/cxl/irq.c 		return schedule_cxl_fault(ctx, dsisr, dar);
ctx                50 drivers/misc/cxl/irq.c 		return cxl_ops->handle_psl_slice_error(ctx, dsisr,
ctx                55 drivers/misc/cxl/irq.c 		if (ctx->pending_afu_err) {
ctx                63 drivers/misc/cxl/irq.c 			dev_err_ratelimited(&ctx->afu->dev, "CXL AFU Error undelivered to pe %i: 0x%016llx\n",
ctx                64 drivers/misc/cxl/irq.c 					    ctx->pe, irq_info->afu_err);
ctx                66 drivers/misc/cxl/irq.c 			spin_lock(&ctx->lock);
ctx                67 drivers/misc/cxl/irq.c 			ctx->afu_err = irq_info->afu_err;
ctx                68 drivers/misc/cxl/irq.c 			ctx->pending_afu_err = 1;
ctx                69 drivers/misc/cxl/irq.c 			spin_unlock(&ctx->lock);
ctx                71 drivers/misc/cxl/irq.c 			wake_up_all(&ctx->wq);
ctx                74 drivers/misc/cxl/irq.c 		cxl_ops->ack_irq(ctx, CXL_PSL_TFC_An_A, 0);
ctx                84 drivers/misc/cxl/irq.c irqreturn_t cxl_irq_psl8(int irq, struct cxl_context *ctx, struct cxl_irq_info *irq_info)
ctx                91 drivers/misc/cxl/irq.c 	trace_cxl_psl_irq(ctx, irq, dsisr, dar);
ctx                93 drivers/misc/cxl/irq.c 	pr_devel("CXL interrupt %i for afu pe: %i DSISR: %#llx DAR: %#llx\n", irq, ctx->pe, dsisr, dar);
ctx               106 drivers/misc/cxl/irq.c 		pr_devel("Scheduling segment miss handling for later pe: %i\n", ctx->pe);
ctx               107 drivers/misc/cxl/irq.c 		return schedule_cxl_fault(ctx, dsisr, dar);
ctx               127 drivers/misc/cxl/irq.c 		pr_devel("Scheduling page fault handling for later pe: %i\n", ctx->pe);
ctx               128 drivers/misc/cxl/irq.c 		return schedule_cxl_fault(ctx, dsisr, dar);
ctx               135 drivers/misc/cxl/irq.c 		return cxl_ops->handle_psl_slice_error(ctx, dsisr,
ctx               140 drivers/misc/cxl/irq.c 		if (ctx->pending_afu_err) {
ctx               148 drivers/misc/cxl/irq.c 			dev_err_ratelimited(&ctx->afu->dev, "CXL AFU Error "
ctx               150 drivers/misc/cxl/irq.c 					    ctx->pe, irq_info->afu_err);
ctx               152 drivers/misc/cxl/irq.c 			spin_lock(&ctx->lock);
ctx               153 drivers/misc/cxl/irq.c 			ctx->afu_err = irq_info->afu_err;
ctx               154 drivers/misc/cxl/irq.c 			ctx->pending_afu_err = true;
ctx               155 drivers/misc/cxl/irq.c 			spin_unlock(&ctx->lock);
ctx               157 drivers/misc/cxl/irq.c 			wake_up_all(&ctx->wq);
ctx               160 drivers/misc/cxl/irq.c 		cxl_ops->ack_irq(ctx, CXL_PSL_TFC_An_A, 0);
ctx               172 drivers/misc/cxl/irq.c 	struct cxl_context *ctx = data;
ctx               191 drivers/misc/cxl/irq.c 		irq_off = hwirq - ctx->irqs.offset[r];
ctx               192 drivers/misc/cxl/irq.c 		range = ctx->irqs.range[r];
ctx               201 drivers/misc/cxl/irq.c 		     ctx->pe, irq, hwirq);
ctx               205 drivers/misc/cxl/irq.c 	trace_cxl_afu_irq(ctx, afu_irq, irq, hwirq);
ctx               207 drivers/misc/cxl/irq.c 	       afu_irq, ctx->pe, irq, hwirq);
ctx               209 drivers/misc/cxl/irq.c 	if (unlikely(!ctx->irq_bitmap)) {
ctx               213 drivers/misc/cxl/irq.c 	spin_lock(&ctx->lock);
ctx               214 drivers/misc/cxl/irq.c 	set_bit(afu_irq - 1, ctx->irq_bitmap);
ctx               215 drivers/misc/cxl/irq.c 	ctx->pending_irq = true;
ctx               216 drivers/misc/cxl/irq.c 	spin_unlock(&ctx->lock);
ctx               218 drivers/misc/cxl/irq.c 	wake_up_all(&ctx->wq);
ctx               280 drivers/misc/cxl/irq.c void afu_irq_name_free(struct cxl_context *ctx)
ctx               284 drivers/misc/cxl/irq.c 	list_for_each_entry_safe(irq_name, tmp, &ctx->irq_names, list) {
ctx               291 drivers/misc/cxl/irq.c int afu_allocate_irqs(struct cxl_context *ctx, u32 count)
ctx               310 drivers/misc/cxl/irq.c 	if ((rc = cxl_ops->alloc_irq_ranges(&ctx->irqs, ctx->afu->adapter,
ctx               316 drivers/misc/cxl/irq.c 		ctx->irqs.offset[0] = ctx->afu->native->psl_hwirq;
ctx               317 drivers/misc/cxl/irq.c 		ctx->irqs.range[0] = 1;
ctx               320 drivers/misc/cxl/irq.c 	ctx->irq_count = count;
ctx               321 drivers/misc/cxl/irq.c 	ctx->irq_bitmap = kcalloc(BITS_TO_LONGS(count),
ctx               322 drivers/misc/cxl/irq.c 				  sizeof(*ctx->irq_bitmap), GFP_KERNEL);
ctx               323 drivers/misc/cxl/irq.c 	if (!ctx->irq_bitmap)
ctx               331 drivers/misc/cxl/irq.c 		for (i = 0; i < ctx->irqs.range[r]; i++) {
ctx               337 drivers/misc/cxl/irq.c 						   dev_name(&ctx->afu->dev),
ctx               338 drivers/misc/cxl/irq.c 						   ctx->pe, j);
ctx               344 drivers/misc/cxl/irq.c 			list_add_tail(&irq_name->list, &ctx->irq_names);
ctx               351 drivers/misc/cxl/irq.c 	cxl_ops->release_irq_ranges(&ctx->irqs, ctx->afu->adapter);
ctx               352 drivers/misc/cxl/irq.c 	afu_irq_name_free(ctx);
ctx               356 drivers/misc/cxl/irq.c static void afu_register_hwirqs(struct cxl_context *ctx)
ctx               364 drivers/misc/cxl/irq.c 	irq_name = list_first_entry(&ctx->irq_names, struct cxl_irq_name, list);
ctx               366 drivers/misc/cxl/irq.c 		hwirq = ctx->irqs.offset[r];
ctx               367 drivers/misc/cxl/irq.c 		for (i = 0; i < ctx->irqs.range[r]; hwirq++, i++) {
ctx               382 drivers/misc/cxl/irq.c 			cxl_map_irq(ctx->afu->adapter, hwirq, handler, ctx,
ctx               389 drivers/misc/cxl/irq.c int afu_register_irqs(struct cxl_context *ctx, u32 count)
ctx               393 drivers/misc/cxl/irq.c 	rc = afu_allocate_irqs(ctx, count);
ctx               397 drivers/misc/cxl/irq.c 	afu_register_hwirqs(ctx);
ctx               401 drivers/misc/cxl/irq.c void afu_release_irqs(struct cxl_context *ctx, void *cookie)
ctx               408 drivers/misc/cxl/irq.c 		hwirq = ctx->irqs.offset[r];
ctx               409 drivers/misc/cxl/irq.c 		for (i = 0; i < ctx->irqs.range[r]; hwirq++, i++) {
ctx               416 drivers/misc/cxl/irq.c 	afu_irq_name_free(ctx);
ctx               417 drivers/misc/cxl/irq.c 	cxl_ops->release_irq_ranges(&ctx->irqs, ctx->afu->adapter);
ctx               419 drivers/misc/cxl/irq.c 	ctx->irq_count = 0;
ctx                57 drivers/misc/cxl/main.c static inline void _cxl_slbia(struct cxl_context *ctx, struct mm_struct *mm)
ctx                61 drivers/misc/cxl/main.c 	if (ctx->mm != mm)
ctx                65 drivers/misc/cxl/main.c 		 ctx->afu->adapter->adapter_num, ctx->afu->slice, ctx->pe);
ctx                67 drivers/misc/cxl/main.c 	spin_lock_irqsave(&ctx->sste_lock, flags);
ctx                68 drivers/misc/cxl/main.c 	trace_cxl_slbia(ctx);
ctx                69 drivers/misc/cxl/main.c 	memset(ctx->sstp, 0, ctx->sst_size);
ctx                70 drivers/misc/cxl/main.c 	spin_unlock_irqrestore(&ctx->sste_lock, flags);
ctx                72 drivers/misc/cxl/main.c 	cxl_afu_slbia(ctx->afu);
ctx                79 drivers/misc/cxl/main.c 	struct cxl_context *ctx;
ctx                93 drivers/misc/cxl/main.c 			idr_for_each_entry(&afu->contexts_idr, ctx, id)
ctx                94 drivers/misc/cxl/main.c 				_cxl_slbia(ctx, mm);
ctx               107 drivers/misc/cxl/main.c int cxl_alloc_sst(struct cxl_context *ctx)
ctx               115 drivers/misc/cxl/main.c 	ctx->sst_size = PAGE_SIZE;
ctx               116 drivers/misc/cxl/main.c 	ctx->sst_lru = 0;
ctx               117 drivers/misc/cxl/main.c 	ctx->sstp = (struct cxl_sste *)get_zeroed_page(GFP_KERNEL);
ctx               118 drivers/misc/cxl/main.c 	if (!ctx->sstp) {
ctx               122 drivers/misc/cxl/main.c 	pr_devel("SSTP allocated at 0x%p\n", ctx->sstp);
ctx               124 drivers/misc/cxl/main.c 	vsid  = get_kernel_vsid((u64)ctx->sstp, mmu_kernel_ssize) << 12;
ctx               129 drivers/misc/cxl/main.c 	size = (((u64)ctx->sst_size >> 8) - 1) << CXL_SSTP0_An_SegTableSize_SHIFT;
ctx               143 drivers/misc/cxl/main.c 	sstp1 |= (u64)ctx->sstp & ea_mask;
ctx               147 drivers/misc/cxl/main.c 			(u64)ctx->sstp, (u64)ctx->sstp & ESID_MASK, mmu_kernel_ssize, vsid, sstp0, sstp1);
ctx               150 drivers/misc/cxl/main.c 	ctx->sstp0 = sstp0;
ctx               151 drivers/misc/cxl/main.c 	ctx->sstp1 = sstp1;
ctx               408 drivers/misc/cxl/native.c static void slb_invalid(struct cxl_context *ctx)
ctx               410 drivers/misc/cxl/native.c 	struct cxl *adapter = ctx->afu->adapter;
ctx               413 drivers/misc/cxl/native.c 	WARN_ON(!mutex_is_locked(&ctx->afu->native->spa_mutex));
ctx               416 drivers/misc/cxl/native.c 			((u64)be32_to_cpu(ctx->elem->common.pid) << 32) |
ctx               417 drivers/misc/cxl/native.c 			be32_to_cpu(ctx->elem->lpid));
ctx               430 drivers/misc/cxl/native.c static int do_process_element_cmd(struct cxl_context *ctx,
ctx               437 drivers/misc/cxl/native.c 	trace_cxl_llcmd(ctx, cmd);
ctx               439 drivers/misc/cxl/native.c 	WARN_ON(!ctx->afu->enabled);
ctx               441 drivers/misc/cxl/native.c 	ctx->elem->software_state = cpu_to_be32(pe_state);
ctx               443 drivers/misc/cxl/native.c 	*(ctx->afu->native->sw_command_status) = cpu_to_be64(cmd | 0 | ctx->pe);
ctx               445 drivers/misc/cxl/native.c 	cxl_p1n_write(ctx->afu, CXL_PSL_LLCMD_An, cmd | ctx->pe);
ctx               448 drivers/misc/cxl/native.c 			dev_warn(&ctx->afu->dev, "WARNING: Process Element Command timed out!\n");
ctx               452 drivers/misc/cxl/native.c 		if (!cxl_ops->link_ok(ctx->afu->adapter, ctx->afu)) {
ctx               453 drivers/misc/cxl/native.c 			dev_warn(&ctx->afu->dev, "WARNING: Device link down, aborting Process Element Command!\n");
ctx               457 drivers/misc/cxl/native.c 		state = be64_to_cpup(ctx->afu->native->sw_command_status);
ctx               464 drivers/misc/cxl/native.c 		    (cmd | (cmd >> 16) | ctx->pe))
ctx               477 drivers/misc/cxl/native.c 	trace_cxl_llcmd_done(ctx, cmd, rc);
ctx               481 drivers/misc/cxl/native.c static int add_process_element(struct cxl_context *ctx)
ctx               485 drivers/misc/cxl/native.c 	mutex_lock(&ctx->afu->native->spa_mutex);
ctx               486 drivers/misc/cxl/native.c 	pr_devel("%s Adding pe: %i started\n", __func__, ctx->pe);
ctx               487 drivers/misc/cxl/native.c 	if (!(rc = do_process_element_cmd(ctx, CXL_SPA_SW_CMD_ADD, CXL_PE_SOFTWARE_STATE_V)))
ctx               488 drivers/misc/cxl/native.c 		ctx->pe_inserted = true;
ctx               489 drivers/misc/cxl/native.c 	pr_devel("%s Adding pe: %i finished\n", __func__, ctx->pe);
ctx               490 drivers/misc/cxl/native.c 	mutex_unlock(&ctx->afu->native->spa_mutex);
ctx               494 drivers/misc/cxl/native.c static int terminate_process_element(struct cxl_context *ctx)
ctx               499 drivers/misc/cxl/native.c 	if (!(ctx->elem->software_state & cpu_to_be32(CXL_PE_SOFTWARE_STATE_V)))
ctx               502 drivers/misc/cxl/native.c 	mutex_lock(&ctx->afu->native->spa_mutex);
ctx               503 drivers/misc/cxl/native.c 	pr_devel("%s Terminate pe: %i started\n", __func__, ctx->pe);
ctx               508 drivers/misc/cxl/native.c 	if (cxl_ops->link_ok(ctx->afu->adapter, ctx->afu))
ctx               509 drivers/misc/cxl/native.c 		rc = do_process_element_cmd(ctx, CXL_SPA_SW_CMD_TERMINATE,
ctx               511 drivers/misc/cxl/native.c 	ctx->elem->software_state = 0;	/* Remove Valid bit */
ctx               512 drivers/misc/cxl/native.c 	pr_devel("%s Terminate pe: %i finished\n", __func__, ctx->pe);
ctx               513 drivers/misc/cxl/native.c 	mutex_unlock(&ctx->afu->native->spa_mutex);
ctx               517 drivers/misc/cxl/native.c static int remove_process_element(struct cxl_context *ctx)
ctx               521 drivers/misc/cxl/native.c 	mutex_lock(&ctx->afu->native->spa_mutex);
ctx               522 drivers/misc/cxl/native.c 	pr_devel("%s Remove pe: %i started\n", __func__, ctx->pe);
ctx               527 drivers/misc/cxl/native.c 	if (cxl_ops->link_ok(ctx->afu->adapter, ctx->afu))
ctx               528 drivers/misc/cxl/native.c 		rc = do_process_element_cmd(ctx, CXL_SPA_SW_CMD_REMOVE, 0);
ctx               531 drivers/misc/cxl/native.c 		ctx->pe_inserted = false;
ctx               533 drivers/misc/cxl/native.c 		slb_invalid(ctx);
ctx               534 drivers/misc/cxl/native.c 	pr_devel("%s Remove pe: %i finished\n", __func__, ctx->pe);
ctx               535 drivers/misc/cxl/native.c 	mutex_unlock(&ctx->afu->native->spa_mutex);
ctx               540 drivers/misc/cxl/native.c void cxl_assign_psn_space(struct cxl_context *ctx)
ctx               542 drivers/misc/cxl/native.c 	if (!ctx->afu->pp_size || ctx->master) {
ctx               543 drivers/misc/cxl/native.c 		ctx->psn_phys = ctx->afu->psn_phys;
ctx               544 drivers/misc/cxl/native.c 		ctx->psn_size = ctx->afu->adapter->ps_size;
ctx               546 drivers/misc/cxl/native.c 		ctx->psn_phys = ctx->afu->psn_phys +
ctx               547 drivers/misc/cxl/native.c 			(ctx->afu->native->pp_offset + ctx->afu->pp_size * ctx->pe);
ctx               548 drivers/misc/cxl/native.c 		ctx->psn_size = ctx->afu->pp_size;
ctx               627 drivers/misc/cxl/native.c static u64 calculate_sr(struct cxl_context *ctx)
ctx               629 drivers/misc/cxl/native.c 	return cxl_calculate_sr(ctx->master, ctx->kernel, false,
ctx               633 drivers/misc/cxl/native.c static void update_ivtes_directed(struct cxl_context *ctx)
ctx               635 drivers/misc/cxl/native.c 	bool need_update = (ctx->status == STARTED);
ctx               639 drivers/misc/cxl/native.c 		WARN_ON(terminate_process_element(ctx));
ctx               640 drivers/misc/cxl/native.c 		WARN_ON(remove_process_element(ctx));
ctx               644 drivers/misc/cxl/native.c 		ctx->elem->ivte_offsets[r] = cpu_to_be16(ctx->irqs.offset[r]);
ctx               645 drivers/misc/cxl/native.c 		ctx->elem->ivte_ranges[r] = cpu_to_be16(ctx->irqs.range[r]);
ctx               659 drivers/misc/cxl/native.c 		WARN_ON(add_process_element(ctx));
ctx               662 drivers/misc/cxl/native.c static int process_element_entry_psl9(struct cxl_context *ctx, u64 wed, u64 amr)
ctx               667 drivers/misc/cxl/native.c 	cxl_assign_psn_space(ctx);
ctx               669 drivers/misc/cxl/native.c 	ctx->elem->ctxtime = 0; /* disable */
ctx               670 drivers/misc/cxl/native.c 	ctx->elem->lpid = cpu_to_be32(mfspr(SPRN_LPID));
ctx               671 drivers/misc/cxl/native.c 	ctx->elem->haurp = 0; /* disable */
ctx               673 drivers/misc/cxl/native.c 	if (ctx->kernel)
ctx               676 drivers/misc/cxl/native.c 		if (ctx->mm == NULL) {
ctx               678 drivers/misc/cxl/native.c 				__func__, ctx->pe, pid_nr(ctx->pid));
ctx               681 drivers/misc/cxl/native.c 		pid = ctx->mm->context.id;
ctx               685 drivers/misc/cxl/native.c 	if (!(ctx->tidr) && (ctx->assign_tidr)) {
ctx               689 drivers/misc/cxl/native.c 		ctx->tidr = current->thread.tidr;
ctx               690 drivers/misc/cxl/native.c 		pr_devel("%s: current tidr: %d\n", __func__, ctx->tidr);
ctx               693 drivers/misc/cxl/native.c 	ctx->elem->common.tid = cpu_to_be32(ctx->tidr);
ctx               694 drivers/misc/cxl/native.c 	ctx->elem->common.pid = cpu_to_be32(pid);
ctx               696 drivers/misc/cxl/native.c 	ctx->elem->sr = cpu_to_be64(calculate_sr(ctx));
ctx               698 drivers/misc/cxl/native.c 	ctx->elem->common.csrp = 0; /* disable */
ctx               700 drivers/misc/cxl/native.c 	cxl_prefault(ctx, wed);
ctx               706 drivers/misc/cxl/native.c 	if (ctx->irqs.range[0] == 0) {
ctx               707 drivers/misc/cxl/native.c 		ctx->irqs.offset[0] = ctx->afu->native->psl_hwirq;
ctx               708 drivers/misc/cxl/native.c 		ctx->irqs.range[0] = 1;
ctx               711 drivers/misc/cxl/native.c 	ctx->elem->common.amr = cpu_to_be64(amr);
ctx               712 drivers/misc/cxl/native.c 	ctx->elem->common.wed = cpu_to_be64(wed);
ctx               717 drivers/misc/cxl/native.c int cxl_attach_afu_directed_psl9(struct cxl_context *ctx, u64 wed, u64 amr)
ctx               722 drivers/misc/cxl/native.c 	result = process_element_entry_psl9(ctx, wed, amr);
ctx               726 drivers/misc/cxl/native.c 	update_ivtes_directed(ctx);
ctx               729 drivers/misc/cxl/native.c 	result = cxl_ops->afu_check_and_enable(ctx->afu);
ctx               733 drivers/misc/cxl/native.c 	return add_process_element(ctx);
ctx               736 drivers/misc/cxl/native.c int cxl_attach_afu_directed_psl8(struct cxl_context *ctx, u64 wed, u64 amr)
ctx               741 drivers/misc/cxl/native.c 	cxl_assign_psn_space(ctx);
ctx               743 drivers/misc/cxl/native.c 	ctx->elem->ctxtime = 0; /* disable */
ctx               744 drivers/misc/cxl/native.c 	ctx->elem->lpid = cpu_to_be32(mfspr(SPRN_LPID));
ctx               745 drivers/misc/cxl/native.c 	ctx->elem->haurp = 0; /* disable */
ctx               746 drivers/misc/cxl/native.c 	ctx->elem->u.sdr = cpu_to_be64(mfspr(SPRN_SDR1));
ctx               749 drivers/misc/cxl/native.c 	if (ctx->kernel)
ctx               751 drivers/misc/cxl/native.c 	ctx->elem->common.tid = 0;
ctx               752 drivers/misc/cxl/native.c 	ctx->elem->common.pid = cpu_to_be32(pid);
ctx               754 drivers/misc/cxl/native.c 	ctx->elem->sr = cpu_to_be64(calculate_sr(ctx));
ctx               756 drivers/misc/cxl/native.c 	ctx->elem->common.csrp = 0; /* disable */
ctx               757 drivers/misc/cxl/native.c 	ctx->elem->common.u.psl8.aurp0 = 0; /* disable */
ctx               758 drivers/misc/cxl/native.c 	ctx->elem->common.u.psl8.aurp1 = 0; /* disable */
ctx               760 drivers/misc/cxl/native.c 	cxl_prefault(ctx, wed);
ctx               762 drivers/misc/cxl/native.c 	ctx->elem->common.u.psl8.sstp0 = cpu_to_be64(ctx->sstp0);
ctx               763 drivers/misc/cxl/native.c 	ctx->elem->common.u.psl8.sstp1 = cpu_to_be64(ctx->sstp1);
ctx               769 drivers/misc/cxl/native.c 	if (ctx->irqs.range[0] == 0) {
ctx               770 drivers/misc/cxl/native.c 		ctx->irqs.offset[0] = ctx->afu->native->psl_hwirq;
ctx               771 drivers/misc/cxl/native.c 		ctx->irqs.range[0] = 1;
ctx               774 drivers/misc/cxl/native.c 	update_ivtes_directed(ctx);
ctx               776 drivers/misc/cxl/native.c 	ctx->elem->common.amr = cpu_to_be64(amr);
ctx               777 drivers/misc/cxl/native.c 	ctx->elem->common.wed = cpu_to_be64(wed);
ctx               780 drivers/misc/cxl/native.c 	if ((result = cxl_ops->afu_check_and_enable(ctx->afu)))
ctx               783 drivers/misc/cxl/native.c 	return add_process_element(ctx);
ctx               878 drivers/misc/cxl/native.c void cxl_update_dedicated_ivtes_psl9(struct cxl_context *ctx)
ctx               883 drivers/misc/cxl/native.c 		ctx->elem->ivte_offsets[r] = cpu_to_be16(ctx->irqs.offset[r]);
ctx               884 drivers/misc/cxl/native.c 		ctx->elem->ivte_ranges[r] = cpu_to_be16(ctx->irqs.range[r]);
ctx               888 drivers/misc/cxl/native.c void cxl_update_dedicated_ivtes_psl8(struct cxl_context *ctx)
ctx               890 drivers/misc/cxl/native.c 	struct cxl_afu *afu = ctx->afu;
ctx               893 drivers/misc/cxl/native.c 		       (((u64)ctx->irqs.offset[0] & 0xffff) << 48) |
ctx               894 drivers/misc/cxl/native.c 		       (((u64)ctx->irqs.offset[1] & 0xffff) << 32) |
ctx               895 drivers/misc/cxl/native.c 		       (((u64)ctx->irqs.offset[2] & 0xffff) << 16) |
ctx               896 drivers/misc/cxl/native.c 			((u64)ctx->irqs.offset[3] & 0xffff));
ctx               898 drivers/misc/cxl/native.c 		       (((u64)ctx->irqs.range[0] & 0xffff) << 48) |
ctx               899 drivers/misc/cxl/native.c 		       (((u64)ctx->irqs.range[1] & 0xffff) << 32) |
ctx               900 drivers/misc/cxl/native.c 		       (((u64)ctx->irqs.range[2] & 0xffff) << 16) |
ctx               901 drivers/misc/cxl/native.c 			((u64)ctx->irqs.range[3] & 0xffff));
ctx               904 drivers/misc/cxl/native.c int cxl_attach_dedicated_process_psl9(struct cxl_context *ctx, u64 wed, u64 amr)
ctx               906 drivers/misc/cxl/native.c 	struct cxl_afu *afu = ctx->afu;
ctx               910 drivers/misc/cxl/native.c 	result = process_element_entry_psl9(ctx, wed, amr);
ctx               914 drivers/misc/cxl/native.c 	if (ctx->afu->adapter->native->sl_ops->update_dedicated_ivtes)
ctx               915 drivers/misc/cxl/native.c 		afu->adapter->native->sl_ops->update_dedicated_ivtes(ctx);
ctx               917 drivers/misc/cxl/native.c 	ctx->elem->software_state = cpu_to_be32(CXL_PE_SOFTWARE_STATE_V);
ctx               932 drivers/misc/cxl/native.c int cxl_attach_dedicated_process_psl8(struct cxl_context *ctx, u64 wed, u64 amr)
ctx               934 drivers/misc/cxl/native.c 	struct cxl_afu *afu = ctx->afu;
ctx               939 drivers/misc/cxl/native.c 	if (ctx->kernel)
ctx               943 drivers/misc/cxl/native.c 	cxl_p1n_write(afu, CXL_PSL_SR_An, calculate_sr(ctx));
ctx               945 drivers/misc/cxl/native.c 	if ((rc = cxl_write_sstp(afu, ctx->sstp0, ctx->sstp1)))
ctx               948 drivers/misc/cxl/native.c 	cxl_prefault(ctx, wed);
ctx               950 drivers/misc/cxl/native.c 	if (ctx->afu->adapter->native->sl_ops->update_dedicated_ivtes)
ctx               951 drivers/misc/cxl/native.c 		afu->adapter->native->sl_ops->update_dedicated_ivtes(ctx);
ctx               956 drivers/misc/cxl/native.c 	cxl_assign_psn_space(ctx);
ctx              1008 drivers/misc/cxl/native.c static int native_attach_process(struct cxl_context *ctx, bool kernel,
ctx              1011 drivers/misc/cxl/native.c 	if (!cxl_ops->link_ok(ctx->afu->adapter, ctx->afu)) {
ctx              1016 drivers/misc/cxl/native.c 	ctx->kernel = kernel;
ctx              1017 drivers/misc/cxl/native.c 	if ((ctx->afu->current_mode == CXL_MODE_DIRECTED) &&
ctx              1018 drivers/misc/cxl/native.c 	    (ctx->afu->adapter->native->sl_ops->attach_afu_directed))
ctx              1019 drivers/misc/cxl/native.c 		return ctx->afu->adapter->native->sl_ops->attach_afu_directed(ctx, wed, amr);
ctx              1021 drivers/misc/cxl/native.c 	if ((ctx->afu->current_mode == CXL_MODE_DEDICATED) &&
ctx              1022 drivers/misc/cxl/native.c 	    (ctx->afu->adapter->native->sl_ops->attach_dedicated_process))
ctx              1023 drivers/misc/cxl/native.c 		return ctx->afu->adapter->native->sl_ops->attach_dedicated_process(ctx, wed, amr);
ctx              1028 drivers/misc/cxl/native.c static inline int detach_process_native_dedicated(struct cxl_context *ctx)
ctx              1046 drivers/misc/cxl/native.c 	cxl_ops->afu_reset(ctx->afu);
ctx              1047 drivers/misc/cxl/native.c 	cxl_afu_disable(ctx->afu);
ctx              1048 drivers/misc/cxl/native.c 	cxl_psl_purge(ctx->afu);
ctx              1052 drivers/misc/cxl/native.c static void native_update_ivtes(struct cxl_context *ctx)
ctx              1054 drivers/misc/cxl/native.c 	if (ctx->afu->current_mode == CXL_MODE_DIRECTED)
ctx              1055 drivers/misc/cxl/native.c 		return update_ivtes_directed(ctx);
ctx              1056 drivers/misc/cxl/native.c 	if ((ctx->afu->current_mode == CXL_MODE_DEDICATED) &&
ctx              1057 drivers/misc/cxl/native.c 	    (ctx->afu->adapter->native->sl_ops->update_dedicated_ivtes))
ctx              1058 drivers/misc/cxl/native.c 		return ctx->afu->adapter->native->sl_ops->update_dedicated_ivtes(ctx);
ctx              1062 drivers/misc/cxl/native.c static inline int detach_process_native_afu_directed(struct cxl_context *ctx)
ctx              1064 drivers/misc/cxl/native.c 	if (!ctx->pe_inserted)
ctx              1066 drivers/misc/cxl/native.c 	if (terminate_process_element(ctx))
ctx              1068 drivers/misc/cxl/native.c 	if (remove_process_element(ctx))
ctx              1074 drivers/misc/cxl/native.c static int native_detach_process(struct cxl_context *ctx)
ctx              1076 drivers/misc/cxl/native.c 	trace_cxl_detach(ctx);
ctx              1078 drivers/misc/cxl/native.c 	if (ctx->afu->current_mode == CXL_MODE_DEDICATED)
ctx              1079 drivers/misc/cxl/native.c 		return detach_process_native_dedicated(ctx);
ctx              1081 drivers/misc/cxl/native.c 	return detach_process_native_afu_directed(ctx);
ctx              1103 drivers/misc/cxl/native.c void cxl_native_irq_dump_regs_psl9(struct cxl_context *ctx)
ctx              1107 drivers/misc/cxl/native.c 	fir1 = cxl_p1_read(ctx->afu->adapter, CXL_PSL9_FIR1);
ctx              1109 drivers/misc/cxl/native.c 	dev_crit(&ctx->afu->dev, "PSL_FIR1: 0x%016llx\n", fir1);
ctx              1110 drivers/misc/cxl/native.c 	if (ctx->afu->adapter->native->sl_ops->register_serr_irq) {
ctx              1111 drivers/misc/cxl/native.c 		serr = cxl_p1n_read(ctx->afu, CXL_PSL_SERR_An);
ctx              1112 drivers/misc/cxl/native.c 		cxl_afu_decode_psl_serr(ctx->afu, serr);
ctx              1116 drivers/misc/cxl/native.c void cxl_native_irq_dump_regs_psl8(struct cxl_context *ctx)
ctx              1120 drivers/misc/cxl/native.c 	fir1 = cxl_p1_read(ctx->afu->adapter, CXL_PSL_FIR1);
ctx              1121 drivers/misc/cxl/native.c 	fir2 = cxl_p1_read(ctx->afu->adapter, CXL_PSL_FIR2);
ctx              1122 drivers/misc/cxl/native.c 	fir_slice = cxl_p1n_read(ctx->afu, CXL_PSL_FIR_SLICE_An);
ctx              1123 drivers/misc/cxl/native.c 	afu_debug = cxl_p1n_read(ctx->afu, CXL_AFU_DEBUG_An);
ctx              1125 drivers/misc/cxl/native.c 	dev_crit(&ctx->afu->dev, "PSL_FIR1: 0x%016llx\n", fir1);
ctx              1126 drivers/misc/cxl/native.c 	dev_crit(&ctx->afu->dev, "PSL_FIR2: 0x%016llx\n", fir2);
ctx              1127 drivers/misc/cxl/native.c 	if (ctx->afu->adapter->native->sl_ops->register_serr_irq) {
ctx              1128 drivers/misc/cxl/native.c 		serr = cxl_p1n_read(ctx->afu, CXL_PSL_SERR_An);
ctx              1129 drivers/misc/cxl/native.c 		cxl_afu_decode_psl_serr(ctx->afu, serr);
ctx              1131 drivers/misc/cxl/native.c 	dev_crit(&ctx->afu->dev, "PSL_FIR_SLICE_An: 0x%016llx\n", fir_slice);
ctx              1132 drivers/misc/cxl/native.c 	dev_crit(&ctx->afu->dev, "CXL_PSL_AFU_DEBUG_An: 0x%016llx\n", afu_debug);
ctx              1135 drivers/misc/cxl/native.c static irqreturn_t native_handle_psl_slice_error(struct cxl_context *ctx,
ctx              1139 drivers/misc/cxl/native.c 	dev_crit(&ctx->afu->dev, "PSL ERROR STATUS: 0x%016llx\n", errstat);
ctx              1141 drivers/misc/cxl/native.c 	if (ctx->afu->adapter->native->sl_ops->psl_irq_dump_registers)
ctx              1142 drivers/misc/cxl/native.c 		ctx->afu->adapter->native->sl_ops->psl_irq_dump_registers(ctx);
ctx              1144 drivers/misc/cxl/native.c 	if (ctx->afu->adapter->native->sl_ops->debugfs_stop_trace) {
ctx              1145 drivers/misc/cxl/native.c 		dev_crit(&ctx->afu->dev, "STOPPING CXL TRACE\n");
ctx              1146 drivers/misc/cxl/native.c 		ctx->afu->adapter->native->sl_ops->debugfs_stop_trace(ctx->afu->adapter);
ctx              1149 drivers/misc/cxl/native.c 	return cxl_ops->ack_irq(ctx, 0, errstat);
ctx              1176 drivers/misc/cxl/native.c 	struct cxl_context *ctx;
ctx              1198 drivers/misc/cxl/native.c 	ctx = idr_find(&afu->contexts_idr, ph);
ctx              1199 drivers/misc/cxl/native.c 	if (ctx) {
ctx              1201 drivers/misc/cxl/native.c 			ret = afu->adapter->native->sl_ops->handle_interrupt(irq, ctx, &irq_info);
ctx              1216 drivers/misc/cxl/native.c static void native_irq_wait(struct cxl_context *ctx)
ctx              1227 drivers/misc/cxl/native.c 		ph = cxl_p2n_read(ctx->afu, CXL_PSL_PEHandle_An) & 0xffff;
ctx              1228 drivers/misc/cxl/native.c 		if (ph != ctx->pe)
ctx              1230 drivers/misc/cxl/native.c 		dsisr = cxl_p2n_read(ctx->afu, CXL_PSL_DSISR_An);
ctx              1244 drivers/misc/cxl/native.c 	dev_warn(&ctx->afu->dev, "WARNING: waiting on DSI for PE %i"
ctx              1451 drivers/misc/cxl/native.c static int native_ack_irq(struct cxl_context *ctx, u64 tfc, u64 psl_reset_mask)
ctx              1453 drivers/misc/cxl/native.c 	trace_cxl_psl_irq_ack(ctx, tfc);
ctx              1455 drivers/misc/cxl/native.c 		cxl_p2n_write(ctx->afu, CXL_PSL_TFC_An, tfc);
ctx              1457 drivers/misc/cxl/native.c 		recover_psl_err(ctx->afu, psl_reset_mask);
ctx              1973 drivers/misc/cxl/pci.c 	struct cxl_context *ctx;
ctx              2009 drivers/misc/cxl/pci.c 			ctx = cxl_get_context(afu_dev);
ctx              2011 drivers/misc/cxl/pci.c 			if (ctx && cxl_release_context(ctx))
ctx              2014 drivers/misc/cxl/pci.c 			ctx = cxl_dev_context_init(afu_dev);
ctx              2015 drivers/misc/cxl/pci.c 			if (IS_ERR(ctx))
ctx              2018 drivers/misc/cxl/pci.c 			afu_dev->dev.archdata.cxl_ctx = ctx;
ctx                64 drivers/misc/cxl/trace.h 	TP_PROTO(struct cxl_context *ctx),
ctx                66 drivers/misc/cxl/trace.h 	TP_ARGS(ctx),
ctx                75 drivers/misc/cxl/trace.h 		__entry->card = ctx->afu->adapter->adapter_num;
ctx                76 drivers/misc/cxl/trace.h 		__entry->afu = ctx->afu->slice;
ctx                77 drivers/misc/cxl/trace.h 		__entry->pe = ctx->pe;
ctx                89 drivers/misc/cxl/trace.h 	TP_PROTO(struct cxl_context *ctx, u64 wed, s16 num_interrupts, u64 amr),
ctx                91 drivers/misc/cxl/trace.h 	TP_ARGS(ctx, wed, num_interrupts, amr),
ctx               104 drivers/misc/cxl/trace.h 		__entry->card = ctx->afu->adapter->adapter_num;
ctx               105 drivers/misc/cxl/trace.h 		__entry->afu = ctx->afu->slice;
ctx               106 drivers/misc/cxl/trace.h 		__entry->pe = ctx->pe;
ctx               107 drivers/misc/cxl/trace.h 		__entry->pid = pid_nr(ctx->pid);
ctx               125 drivers/misc/cxl/trace.h 	TP_PROTO(struct cxl_context *ctx),
ctx               126 drivers/misc/cxl/trace.h 	TP_ARGS(ctx)
ctx               130 drivers/misc/cxl/trace.h 	TP_PROTO(struct cxl_context *ctx, int afu_irq, int virq, irq_hw_number_t hwirq),
ctx               132 drivers/misc/cxl/trace.h 	TP_ARGS(ctx, afu_irq, virq, hwirq),
ctx               144 drivers/misc/cxl/trace.h 		__entry->card = ctx->afu->adapter->adapter_num;
ctx               145 drivers/misc/cxl/trace.h 		__entry->afu = ctx->afu->slice;
ctx               146 drivers/misc/cxl/trace.h 		__entry->pe = ctx->pe;
ctx               163 drivers/misc/cxl/trace.h 	TP_PROTO(struct cxl_context *ctx, int irq, u64 dsisr, u64 dar),
ctx               165 drivers/misc/cxl/trace.h 	TP_ARGS(ctx, irq, dsisr, dar),
ctx               177 drivers/misc/cxl/trace.h 		__entry->card = ctx->afu->adapter->adapter_num;
ctx               178 drivers/misc/cxl/trace.h 		__entry->afu = ctx->afu->slice;
ctx               179 drivers/misc/cxl/trace.h 		__entry->pe = ctx->pe;
ctx               197 drivers/misc/cxl/trace.h 	TP_PROTO(struct cxl_context *ctx, int irq, u64 dsisr, u64 dar),
ctx               199 drivers/misc/cxl/trace.h 	TP_ARGS(ctx, irq, dsisr, dar),
ctx               211 drivers/misc/cxl/trace.h 		__entry->card = ctx->afu->adapter->adapter_num;
ctx               212 drivers/misc/cxl/trace.h 		__entry->afu = ctx->afu->slice;
ctx               213 drivers/misc/cxl/trace.h 		__entry->pe = ctx->pe;
ctx               230 drivers/misc/cxl/trace.h 	TP_PROTO(struct cxl_context *ctx, u64 tfc),
ctx               232 drivers/misc/cxl/trace.h 	TP_ARGS(ctx, tfc),
ctx               242 drivers/misc/cxl/trace.h 		__entry->card = ctx->afu->adapter->adapter_num;
ctx               243 drivers/misc/cxl/trace.h 		__entry->afu = ctx->afu->slice;
ctx               244 drivers/misc/cxl/trace.h 		__entry->pe = ctx->pe;
ctx               257 drivers/misc/cxl/trace.h 	TP_PROTO(struct cxl_context *ctx, u64 dar),
ctx               259 drivers/misc/cxl/trace.h 	TP_ARGS(ctx, dar),
ctx               269 drivers/misc/cxl/trace.h 		__entry->card = ctx->afu->adapter->adapter_num;
ctx               270 drivers/misc/cxl/trace.h 		__entry->afu = ctx->afu->slice;
ctx               271 drivers/misc/cxl/trace.h 		__entry->pe = ctx->pe;
ctx               284 drivers/misc/cxl/trace.h 	TP_PROTO(struct cxl_context *ctx, unsigned int idx, u64 e, u64 v),
ctx               286 drivers/misc/cxl/trace.h 	TP_ARGS(ctx, idx, e, v),
ctx               298 drivers/misc/cxl/trace.h 		__entry->card = ctx->afu->adapter->adapter_num;
ctx               299 drivers/misc/cxl/trace.h 		__entry->afu = ctx->afu->slice;
ctx               300 drivers/misc/cxl/trace.h 		__entry->pe = ctx->pe;
ctx               317 drivers/misc/cxl/trace.h 	TP_PROTO(struct cxl_context *ctx, u64 dsisr, u64 dar),
ctx               319 drivers/misc/cxl/trace.h 	TP_ARGS(ctx, dsisr, dar),
ctx               330 drivers/misc/cxl/trace.h 		__entry->card = ctx->afu->adapter->adapter_num;
ctx               331 drivers/misc/cxl/trace.h 		__entry->afu = ctx->afu->slice;
ctx               332 drivers/misc/cxl/trace.h 		__entry->pe = ctx->pe;
ctx               347 drivers/misc/cxl/trace.h 	TP_PROTO(struct cxl_context *ctx, u64 cmd),
ctx               349 drivers/misc/cxl/trace.h 	TP_ARGS(ctx, cmd),
ctx               359 drivers/misc/cxl/trace.h 		__entry->card = ctx->afu->adapter->adapter_num;
ctx               360 drivers/misc/cxl/trace.h 		__entry->afu = ctx->afu->slice;
ctx               361 drivers/misc/cxl/trace.h 		__entry->pe = ctx->pe;
ctx               374 drivers/misc/cxl/trace.h 	TP_PROTO(struct cxl_context *ctx, u64 cmd, int rc),
ctx               376 drivers/misc/cxl/trace.h 	TP_ARGS(ctx, cmd, rc),
ctx               387 drivers/misc/cxl/trace.h 		__entry->card = ctx->afu->adapter->adapter_num;
ctx               388 drivers/misc/cxl/trace.h 		__entry->afu = ctx->afu->slice;
ctx               389 drivers/misc/cxl/trace.h 		__entry->pe = ctx->pe;
ctx               488 drivers/misc/cxl/trace.h 	TP_PROTO(struct cxl_context *ctx),
ctx               489 drivers/misc/cxl/trace.h 	TP_ARGS(ctx)
ctx                32 drivers/misc/cxl/vphb.c 	struct cxl_context *ctx;
ctx                48 drivers/misc/cxl/vphb.c 	ctx = cxl_dev_context_init(dev);
ctx                49 drivers/misc/cxl/vphb.c 	if (IS_ERR(ctx))
ctx                51 drivers/misc/cxl/vphb.c 	dev->dev.archdata.cxl_ctx = ctx;
ctx                58 drivers/misc/cxl/vphb.c 	struct cxl_context *ctx = cxl_get_context(dev);
ctx                60 drivers/misc/cxl/vphb.c 	if (ctx) {
ctx                61 drivers/misc/cxl/vphb.c 		if (ctx->status == STARTED) {
ctx                66 drivers/misc/cxl/vphb.c 		cxl_release_context(ctx);
ctx                95 drivers/misc/fastrpc.c 	u64 ctx;		/* invoke caller context */
ctx               103 drivers/misc/fastrpc.c 	u64 ctx;		/* invoke caller context */
ctx               317 drivers/misc/fastrpc.c 	struct fastrpc_invoke_ctx *ctx;
ctx               322 drivers/misc/fastrpc.c 	ctx = container_of(ref, struct fastrpc_invoke_ctx, refcount);
ctx               323 drivers/misc/fastrpc.c 	cctx = ctx->cctx;
ctx               325 drivers/misc/fastrpc.c 	for (i = 0; i < ctx->nscalars; i++)
ctx               326 drivers/misc/fastrpc.c 		fastrpc_map_put(ctx->maps[i]);
ctx               328 drivers/misc/fastrpc.c 	if (ctx->buf)
ctx               329 drivers/misc/fastrpc.c 		fastrpc_buf_free(ctx->buf);
ctx               332 drivers/misc/fastrpc.c 	idr_remove(&cctx->ctx_idr, ctx->ctxid >> 4);
ctx               335 drivers/misc/fastrpc.c 	kfree(ctx->maps);
ctx               336 drivers/misc/fastrpc.c 	kfree(ctx->olaps);
ctx               337 drivers/misc/fastrpc.c 	kfree(ctx);
ctx               342 drivers/misc/fastrpc.c static void fastrpc_context_get(struct fastrpc_invoke_ctx *ctx)
ctx               344 drivers/misc/fastrpc.c 	kref_get(&ctx->refcount);
ctx               347 drivers/misc/fastrpc.c static void fastrpc_context_put(struct fastrpc_invoke_ctx *ctx)
ctx               349 drivers/misc/fastrpc.c 	kref_put(&ctx->refcount, fastrpc_context_free);
ctx               354 drivers/misc/fastrpc.c 	struct fastrpc_invoke_ctx *ctx =
ctx               357 drivers/misc/fastrpc.c 	fastrpc_context_put(ctx);
ctx               373 drivers/misc/fastrpc.c static void fastrpc_get_buff_overlaps(struct fastrpc_invoke_ctx *ctx)
ctx               378 drivers/misc/fastrpc.c 	for (i = 0; i < ctx->nbufs; ++i) {
ctx               379 drivers/misc/fastrpc.c 		ctx->olaps[i].start = ctx->args[i].ptr;
ctx               380 drivers/misc/fastrpc.c 		ctx->olaps[i].end = ctx->olaps[i].start + ctx->args[i].length;
ctx               381 drivers/misc/fastrpc.c 		ctx->olaps[i].raix = i;
ctx               384 drivers/misc/fastrpc.c 	sort(ctx->olaps, ctx->nbufs, sizeof(*ctx->olaps), olaps_cmp, NULL);
ctx               386 drivers/misc/fastrpc.c 	for (i = 0; i < ctx->nbufs; ++i) {
ctx               388 drivers/misc/fastrpc.c 		if (ctx->olaps[i].start < max_end) {
ctx               389 drivers/misc/fastrpc.c 			ctx->olaps[i].mstart = max_end;
ctx               390 drivers/misc/fastrpc.c 			ctx->olaps[i].mend = ctx->olaps[i].end;
ctx               391 drivers/misc/fastrpc.c 			ctx->olaps[i].offset = max_end - ctx->olaps[i].start;
ctx               393 drivers/misc/fastrpc.c 			if (ctx->olaps[i].end > max_end) {
ctx               394 drivers/misc/fastrpc.c 				max_end = ctx->olaps[i].end;
ctx               396 drivers/misc/fastrpc.c 				ctx->olaps[i].mend = 0;
ctx               397 drivers/misc/fastrpc.c 				ctx->olaps[i].mstart = 0;
ctx               401 drivers/misc/fastrpc.c 			ctx->olaps[i].mend = ctx->olaps[i].end;
ctx               402 drivers/misc/fastrpc.c 			ctx->olaps[i].mstart = ctx->olaps[i].start;
ctx               403 drivers/misc/fastrpc.c 			ctx->olaps[i].offset = 0;
ctx               404 drivers/misc/fastrpc.c 			max_end = ctx->olaps[i].end;
ctx               414 drivers/misc/fastrpc.c 	struct fastrpc_invoke_ctx *ctx = NULL;
ctx               418 drivers/misc/fastrpc.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               419 drivers/misc/fastrpc.c 	if (!ctx)
ctx               422 drivers/misc/fastrpc.c 	INIT_LIST_HEAD(&ctx->node);
ctx               423 drivers/misc/fastrpc.c 	ctx->fl = user;
ctx               424 drivers/misc/fastrpc.c 	ctx->nscalars = REMOTE_SCALARS_LENGTH(sc);
ctx               425 drivers/misc/fastrpc.c 	ctx->nbufs = REMOTE_SCALARS_INBUFS(sc) +
ctx               428 drivers/misc/fastrpc.c 	if (ctx->nscalars) {
ctx               429 drivers/misc/fastrpc.c 		ctx->maps = kcalloc(ctx->nscalars,
ctx               430 drivers/misc/fastrpc.c 				    sizeof(*ctx->maps), GFP_KERNEL);
ctx               431 drivers/misc/fastrpc.c 		if (!ctx->maps) {
ctx               432 drivers/misc/fastrpc.c 			kfree(ctx);
ctx               435 drivers/misc/fastrpc.c 		ctx->olaps = kcalloc(ctx->nscalars,
ctx               436 drivers/misc/fastrpc.c 				    sizeof(*ctx->olaps), GFP_KERNEL);
ctx               437 drivers/misc/fastrpc.c 		if (!ctx->olaps) {
ctx               438 drivers/misc/fastrpc.c 			kfree(ctx->maps);
ctx               439 drivers/misc/fastrpc.c 			kfree(ctx);
ctx               442 drivers/misc/fastrpc.c 		ctx->args = args;
ctx               443 drivers/misc/fastrpc.c 		fastrpc_get_buff_overlaps(ctx);
ctx               449 drivers/misc/fastrpc.c 	ctx->sc = sc;
ctx               450 drivers/misc/fastrpc.c 	ctx->retval = -1;
ctx               451 drivers/misc/fastrpc.c 	ctx->pid = current->pid;
ctx               452 drivers/misc/fastrpc.c 	ctx->tgid = user->tgid;
ctx               453 drivers/misc/fastrpc.c 	ctx->cctx = cctx;
ctx               454 drivers/misc/fastrpc.c 	init_completion(&ctx->work);
ctx               455 drivers/misc/fastrpc.c 	INIT_WORK(&ctx->put_work, fastrpc_context_put_wq);
ctx               458 drivers/misc/fastrpc.c 	list_add_tail(&ctx->node, &user->pending);
ctx               462 drivers/misc/fastrpc.c 	ret = idr_alloc_cyclic(&cctx->ctx_idr, ctx, 1,
ctx               468 drivers/misc/fastrpc.c 	ctx->ctxid = ret << 4;
ctx               471 drivers/misc/fastrpc.c 	kref_init(&ctx->refcount);
ctx               473 drivers/misc/fastrpc.c 	return ctx;
ctx               476 drivers/misc/fastrpc.c 	list_del(&ctx->node);
ctx               479 drivers/misc/fastrpc.c 	kfree(ctx->maps);
ctx               480 drivers/misc/fastrpc.c 	kfree(ctx->olaps);
ctx               481 drivers/misc/fastrpc.c 	kfree(ctx);
ctx               680 drivers/misc/fastrpc.c static int fastrpc_get_meta_size(struct fastrpc_invoke_ctx *ctx)
ctx               686 drivers/misc/fastrpc.c 		sizeof(struct fastrpc_phy_page)) * ctx->nscalars +
ctx               693 drivers/misc/fastrpc.c static u64 fastrpc_get_payload_size(struct fastrpc_invoke_ctx *ctx, int metalen)
ctx               699 drivers/misc/fastrpc.c 	for (i = 0; i < ctx->nscalars; i++) {
ctx               700 drivers/misc/fastrpc.c 		if (ctx->args[i].fd == 0 || ctx->args[i].fd == -1) {
ctx               702 drivers/misc/fastrpc.c 			if (ctx->olaps[i].offset == 0)
ctx               705 drivers/misc/fastrpc.c 			size += (ctx->olaps[i].mend - ctx->olaps[i].mstart);
ctx               712 drivers/misc/fastrpc.c static int fastrpc_create_maps(struct fastrpc_invoke_ctx *ctx)
ctx               714 drivers/misc/fastrpc.c 	struct device *dev = ctx->fl->sctx->dev;
ctx               717 drivers/misc/fastrpc.c 	for (i = 0; i < ctx->nscalars; ++i) {
ctx               719 drivers/misc/fastrpc.c 		if (ctx->args[i].reserved)
ctx               722 drivers/misc/fastrpc.c 		if (ctx->args[i].fd == 0 || ctx->args[i].fd == -1 ||
ctx               723 drivers/misc/fastrpc.c 		    ctx->args[i].length == 0)
ctx               726 drivers/misc/fastrpc.c 		err = fastrpc_map_create(ctx->fl, ctx->args[i].fd,
ctx               727 drivers/misc/fastrpc.c 					 ctx->args[i].length, &ctx->maps[i]);
ctx               737 drivers/misc/fastrpc.c static int fastrpc_get_args(u32 kernel, struct fastrpc_invoke_ctx *ctx)
ctx               739 drivers/misc/fastrpc.c 	struct device *dev = ctx->fl->sctx->dev;
ctx               749 drivers/misc/fastrpc.c 	inbufs = REMOTE_SCALARS_INBUFS(ctx->sc);
ctx               750 drivers/misc/fastrpc.c 	metalen = fastrpc_get_meta_size(ctx);
ctx               751 drivers/misc/fastrpc.c 	pkt_size = fastrpc_get_payload_size(ctx, metalen);
ctx               753 drivers/misc/fastrpc.c 	err = fastrpc_create_maps(ctx);
ctx               757 drivers/misc/fastrpc.c 	ctx->msg_sz = pkt_size;
ctx               759 drivers/misc/fastrpc.c 	err = fastrpc_buf_alloc(ctx->fl, dev, pkt_size, &ctx->buf);
ctx               763 drivers/misc/fastrpc.c 	rpra = ctx->buf->virt;
ctx               764 drivers/misc/fastrpc.c 	list = ctx->buf->virt + ctx->nscalars * sizeof(*rpra);
ctx               765 drivers/misc/fastrpc.c 	pages = ctx->buf->virt + ctx->nscalars * (sizeof(*list) +
ctx               767 drivers/misc/fastrpc.c 	args = (uintptr_t)ctx->buf->virt + metalen;
ctx               769 drivers/misc/fastrpc.c 	ctx->rpra = rpra;
ctx               771 drivers/misc/fastrpc.c 	for (oix = 0; oix < ctx->nbufs; ++oix) {
ctx               774 drivers/misc/fastrpc.c 		i = ctx->olaps[oix].raix;
ctx               775 drivers/misc/fastrpc.c 		len = ctx->args[i].length;
ctx               785 drivers/misc/fastrpc.c 		if (ctx->maps[i]) {
ctx               788 drivers/misc/fastrpc.c 			rpra[i].pv = (u64) ctx->args[i].ptr;
ctx               789 drivers/misc/fastrpc.c 			pages[i].addr = ctx->maps[i]->phys;
ctx               791 drivers/misc/fastrpc.c 			vma = find_vma(current->mm, ctx->args[i].ptr);
ctx               793 drivers/misc/fastrpc.c 				pages[i].addr += ctx->args[i].ptr -
ctx               796 drivers/misc/fastrpc.c 			pg_start = (ctx->args[i].ptr & PAGE_MASK) >> PAGE_SHIFT;
ctx               797 drivers/misc/fastrpc.c 			pg_end = ((ctx->args[i].ptr + len - 1) & PAGE_MASK) >>
ctx               803 drivers/misc/fastrpc.c 			if (ctx->olaps[oix].offset == 0) {
ctx               808 drivers/misc/fastrpc.c 			mlen = ctx->olaps[oix].mend - ctx->olaps[oix].mstart;
ctx               813 drivers/misc/fastrpc.c 			rpra[i].pv = args - ctx->olaps[oix].offset;
ctx               814 drivers/misc/fastrpc.c 			pages[i].addr = ctx->buf->phys -
ctx               815 drivers/misc/fastrpc.c 					ctx->olaps[oix].offset +
ctx               826 drivers/misc/fastrpc.c 		if (i < inbufs && !ctx->maps[i]) {
ctx               828 drivers/misc/fastrpc.c 			void *src = (void *)(uintptr_t)ctx->args[i].ptr;
ctx               842 drivers/misc/fastrpc.c 	for (i = ctx->nbufs; i < ctx->nscalars; ++i) {
ctx               843 drivers/misc/fastrpc.c 		rpra[i].pv = (u64) ctx->args[i].ptr;
ctx               844 drivers/misc/fastrpc.c 		rpra[i].len = ctx->args[i].length;
ctx               845 drivers/misc/fastrpc.c 		list[i].num = ctx->args[i].length ? 1 : 0;
ctx               847 drivers/misc/fastrpc.c 		pages[i].addr = ctx->maps[i]->phys;
ctx               848 drivers/misc/fastrpc.c 		pages[i].size = ctx->maps[i]->size;
ctx               858 drivers/misc/fastrpc.c static int fastrpc_put_args(struct fastrpc_invoke_ctx *ctx,
ctx               861 drivers/misc/fastrpc.c 	struct fastrpc_remote_arg *rpra = ctx->rpra;
ctx               864 drivers/misc/fastrpc.c 	inbufs = REMOTE_SCALARS_INBUFS(ctx->sc);
ctx               866 drivers/misc/fastrpc.c 	for (i = inbufs; i < ctx->nbufs; ++i) {
ctx               868 drivers/misc/fastrpc.c 		void *dst = (void *)(uintptr_t)ctx->args[i].ptr;
ctx               883 drivers/misc/fastrpc.c 			       struct fastrpc_invoke_ctx *ctx,
ctx               887 drivers/misc/fastrpc.c 	struct fastrpc_user *fl = ctx->fl;
ctx               888 drivers/misc/fastrpc.c 	struct fastrpc_msg *msg = &ctx->msg;
ctx               897 drivers/misc/fastrpc.c 	msg->ctx = ctx->ctxid | fl->pd;
ctx               899 drivers/misc/fastrpc.c 	msg->sc = ctx->sc;
ctx               900 drivers/misc/fastrpc.c 	msg->addr = ctx->buf ? ctx->buf->phys : 0;
ctx               901 drivers/misc/fastrpc.c 	msg->size = roundup(ctx->msg_sz, PAGE_SIZE);
ctx               902 drivers/misc/fastrpc.c 	fastrpc_context_get(ctx);
ctx               911 drivers/misc/fastrpc.c 	struct fastrpc_invoke_ctx *ctx = NULL;
ctx               920 drivers/misc/fastrpc.c 	ctx = fastrpc_context_alloc(fl, kernel, sc, args);
ctx               921 drivers/misc/fastrpc.c 	if (IS_ERR(ctx))
ctx               922 drivers/misc/fastrpc.c 		return PTR_ERR(ctx);
ctx               924 drivers/misc/fastrpc.c 	if (ctx->nscalars) {
ctx               925 drivers/misc/fastrpc.c 		err = fastrpc_get_args(kernel, ctx);
ctx               933 drivers/misc/fastrpc.c 	err = fastrpc_invoke_send(fl->sctx, ctx, kernel, handle);
ctx               938 drivers/misc/fastrpc.c 	err = wait_for_completion_interruptible(&ctx->work);
ctx               943 drivers/misc/fastrpc.c 	err = ctx->retval;
ctx               947 drivers/misc/fastrpc.c 	if (ctx->nscalars) {
ctx               951 drivers/misc/fastrpc.c 		err = fastrpc_put_args(ctx, kernel);
ctx               959 drivers/misc/fastrpc.c 	list_del(&ctx->node);
ctx               961 drivers/misc/fastrpc.c 	fastrpc_context_put(ctx);
ctx              1132 drivers/misc/fastrpc.c 	struct fastrpc_invoke_ctx *ctx, *n;
ctx              1145 drivers/misc/fastrpc.c 	list_for_each_entry_safe(ctx, n, &fl->pending, node) {
ctx              1146 drivers/misc/fastrpc.c 		list_del(&ctx->node);
ctx              1147 drivers/misc/fastrpc.c 		fastrpc_context_put(ctx);
ctx              1455 drivers/misc/fastrpc.c 	struct fastrpc_invoke_ctx *ctx;
ctx              1458 drivers/misc/fastrpc.c 	list_for_each_entry(ctx, &user->pending, node)
ctx              1459 drivers/misc/fastrpc.c 		complete(&ctx->work);
ctx              1486 drivers/misc/fastrpc.c 	struct fastrpc_invoke_ctx *ctx;
ctx              1493 drivers/misc/fastrpc.c 	ctxid = ((rsp->ctx & FASTRPC_CTXID_MASK) >> 4);
ctx              1496 drivers/misc/fastrpc.c 	ctx = idr_find(&cctx->ctx_idr, ctxid);
ctx              1499 drivers/misc/fastrpc.c 	if (!ctx) {
ctx              1504 drivers/misc/fastrpc.c 	ctx->retval = rsp->retval;
ctx              1505 drivers/misc/fastrpc.c 	complete(&ctx->work);
ctx              1512 drivers/misc/fastrpc.c 	schedule_work(&ctx->put_work);
ctx               227 drivers/misc/habanalabs/command_buffer.c 					&handle, hpriv->ctx->asid);
ctx                16 drivers/misc/habanalabs/command_submission.c 		struct hl_ctx *ctx, u64 timeout_us, u64 seq);
ctx                85 drivers/misc/habanalabs/command_submission.c 	parser.ctx_id = job->cs->ctx->asid;
ctx               163 drivers/misc/habanalabs/command_submission.c 	struct hl_device *hdev = cs->ctx->hdev;
ctx               240 drivers/misc/habanalabs/command_submission.c 	hl_ctx_put(cs->ctx);
ctx               271 drivers/misc/habanalabs/command_submission.c 	hdev = cs->ctx->hdev;
ctx               272 drivers/misc/habanalabs/command_submission.c 	ctx_asid = cs->ctx->asid;
ctx               284 drivers/misc/habanalabs/command_submission.c static int allocate_cs(struct hl_device *hdev, struct hl_ctx *ctx,
ctx               296 drivers/misc/habanalabs/command_submission.c 	cs->ctx = ctx;
ctx               314 drivers/misc/habanalabs/command_submission.c 	spin_lock(&ctx->cs_lock);
ctx               316 drivers/misc/habanalabs/command_submission.c 	fence->cs_seq = ctx->cs_sequence;
ctx               317 drivers/misc/habanalabs/command_submission.c 	other = ctx->cs_pending[fence->cs_seq & (HL_MAX_PENDING_CS - 1)];
ctx               319 drivers/misc/habanalabs/command_submission.c 		spin_unlock(&ctx->cs_lock);
ctx               327 drivers/misc/habanalabs/command_submission.c 			ctx->asid, ctx->cs_sequence);
ctx               331 drivers/misc/habanalabs/command_submission.c 	ctx->cs_pending[fence->cs_seq & (HL_MAX_PENDING_CS - 1)] =
ctx               333 drivers/misc/habanalabs/command_submission.c 	ctx->cs_sequence++;
ctx               339 drivers/misc/habanalabs/command_submission.c 	spin_unlock(&ctx->cs_lock);
ctx               373 drivers/misc/habanalabs/command_submission.c 					cs->ctx->asid, cs->sequence);
ctx               384 drivers/misc/habanalabs/command_submission.c 	struct hl_device *hdev = cs->ctx->hdev;
ctx               502 drivers/misc/habanalabs/command_submission.c 	hl_ctx_get(hdev, hpriv->ctx);
ctx               504 drivers/misc/habanalabs/command_submission.c 	rc = allocate_cs(hdev, hpriv->ctx, &cs);
ctx               506 drivers/misc/habanalabs/command_submission.c 		hl_ctx_put(hpriv->ctx);
ctx               568 drivers/misc/habanalabs/command_submission.c 				cs->ctx->asid, cs->sequence, job->id, rc);
ctx               576 drivers/misc/habanalabs/command_submission.c 			cs->ctx->asid, cs->sequence);
ctx               585 drivers/misc/habanalabs/command_submission.c 			cs->ctx->asid, cs->sequence, rc);
ctx               614 drivers/misc/habanalabs/command_submission.c 	struct hl_ctx *ctx = hpriv->ctx;
ctx               629 drivers/misc/habanalabs/command_submission.c 	do_ctx_switch = atomic_cmpxchg(&ctx->thread_ctx_switch_token, 1, 0);
ctx               640 drivers/misc/habanalabs/command_submission.c 			rc = hdev->asic_funcs->context_switch(hdev, ctx->asid);
ctx               644 drivers/misc/habanalabs/command_submission.c 					ctx->asid, rc);
ctx               677 drivers/misc/habanalabs/command_submission.c 				ctx->asid, rc);
ctx               683 drivers/misc/habanalabs/command_submission.c 			ret = _hl_cs_wait_ioctl(hdev, ctx,
ctx               689 drivers/misc/habanalabs/command_submission.c 					ctx->asid, ret);
ctx               695 drivers/misc/habanalabs/command_submission.c 		ctx->thread_ctx_switch_wait_token = 1;
ctx               696 drivers/misc/habanalabs/command_submission.c 	} else if (!ctx->thread_ctx_switch_wait_token) {
ctx               700 drivers/misc/habanalabs/command_submission.c 			&ctx->thread_ctx_switch_wait_token, tmp, (tmp == 1),
ctx               716 drivers/misc/habanalabs/command_submission.c 			ctx->asid);
ctx               737 drivers/misc/habanalabs/command_submission.c 		struct hl_ctx *ctx, u64 timeout_us, u64 seq)
ctx               748 drivers/misc/habanalabs/command_submission.c 	hl_ctx_get(hdev, ctx);
ctx               750 drivers/misc/habanalabs/command_submission.c 	fence = hl_ctx_get_fence(ctx, seq);
ctx               763 drivers/misc/habanalabs/command_submission.c 	hl_ctx_put(ctx);
ctx               775 drivers/misc/habanalabs/command_submission.c 	rc = _hl_cs_wait_ioctl(hdev, hpriv->ctx, args->in.timeout_us, seq);
ctx                12 drivers/misc/habanalabs/context.c static void hl_ctx_fini(struct hl_ctx *ctx)
ctx                14 drivers/misc/habanalabs/context.c 	struct hl_device *hdev = ctx->hdev;
ctx                26 drivers/misc/habanalabs/context.c 		dma_fence_put(ctx->cs_pending[i]);
ctx                28 drivers/misc/habanalabs/context.c 	if (ctx->asid != HL_KERNEL_ASID_ID) {
ctx                35 drivers/misc/habanalabs/context.c 		if ((hdev->in_debug) && (hdev->compute_ctx == ctx))
ctx                38 drivers/misc/habanalabs/context.c 		hl_vm_ctx_fini(ctx);
ctx                39 drivers/misc/habanalabs/context.c 		hl_asid_free(hdev, ctx->asid);
ctx                41 drivers/misc/habanalabs/context.c 		hl_mmu_ctx_fini(ctx);
ctx                47 drivers/misc/habanalabs/context.c 	struct hl_ctx *ctx;
ctx                49 drivers/misc/habanalabs/context.c 	ctx = container_of(ref, struct hl_ctx, refcount);
ctx                51 drivers/misc/habanalabs/context.c 	hl_ctx_fini(ctx);
ctx                53 drivers/misc/habanalabs/context.c 	if (ctx->hpriv)
ctx                54 drivers/misc/habanalabs/context.c 		hl_hpriv_put(ctx->hpriv);
ctx                56 drivers/misc/habanalabs/context.c 	kfree(ctx);
ctx                62 drivers/misc/habanalabs/context.c 	struct hl_ctx *ctx;
ctx                65 drivers/misc/habanalabs/context.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx                66 drivers/misc/habanalabs/context.c 	if (!ctx) {
ctx                72 drivers/misc/habanalabs/context.c 	rc = idr_alloc(&mgr->ctx_handles, ctx, 1, 0, GFP_KERNEL);
ctx                80 drivers/misc/habanalabs/context.c 	ctx->handle = rc;
ctx                82 drivers/misc/habanalabs/context.c 	rc = hl_ctx_init(hdev, ctx, false);
ctx                87 drivers/misc/habanalabs/context.c 	ctx->hpriv = hpriv;
ctx                90 drivers/misc/habanalabs/context.c 	hpriv->ctx = ctx;
ctx                93 drivers/misc/habanalabs/context.c 	hdev->compute_ctx = ctx;
ctx                99 drivers/misc/habanalabs/context.c 	idr_remove(&mgr->ctx_handles, ctx->handle);
ctx               102 drivers/misc/habanalabs/context.c 	kfree(ctx);
ctx               107 drivers/misc/habanalabs/context.c void hl_ctx_free(struct hl_device *hdev, struct hl_ctx *ctx)
ctx               109 drivers/misc/habanalabs/context.c 	if (kref_put(&ctx->refcount, hl_ctx_do_release) == 1)
ctx               114 drivers/misc/habanalabs/context.c 		ctx->asid);
ctx               117 drivers/misc/habanalabs/context.c int hl_ctx_init(struct hl_device *hdev, struct hl_ctx *ctx, bool is_kernel_ctx)
ctx               121 drivers/misc/habanalabs/context.c 	ctx->hdev = hdev;
ctx               123 drivers/misc/habanalabs/context.c 	kref_init(&ctx->refcount);
ctx               125 drivers/misc/habanalabs/context.c 	ctx->cs_sequence = 1;
ctx               126 drivers/misc/habanalabs/context.c 	spin_lock_init(&ctx->cs_lock);
ctx               127 drivers/misc/habanalabs/context.c 	atomic_set(&ctx->thread_ctx_switch_token, 1);
ctx               128 drivers/misc/habanalabs/context.c 	ctx->thread_ctx_switch_wait_token = 0;
ctx               131 drivers/misc/habanalabs/context.c 		ctx->asid = HL_KERNEL_ASID_ID; /* Kernel driver gets ASID 0 */
ctx               132 drivers/misc/habanalabs/context.c 		rc = hl_mmu_ctx_init(ctx);
ctx               138 drivers/misc/habanalabs/context.c 		ctx->asid = hl_asid_alloc(hdev);
ctx               139 drivers/misc/habanalabs/context.c 		if (!ctx->asid) {
ctx               144 drivers/misc/habanalabs/context.c 		rc = hl_vm_ctx_init(ctx);
ctx               155 drivers/misc/habanalabs/context.c 	if (ctx->asid != HL_KERNEL_ASID_ID)
ctx               156 drivers/misc/habanalabs/context.c 		hl_asid_free(hdev, ctx->asid);
ctx               161 drivers/misc/habanalabs/context.c void hl_ctx_get(struct hl_device *hdev, struct hl_ctx *ctx)
ctx               163 drivers/misc/habanalabs/context.c 	kref_get(&ctx->refcount);
ctx               166 drivers/misc/habanalabs/context.c int hl_ctx_put(struct hl_ctx *ctx)
ctx               168 drivers/misc/habanalabs/context.c 	return kref_put(&ctx->refcount, hl_ctx_do_release);
ctx               171 drivers/misc/habanalabs/context.c struct dma_fence *hl_ctx_get_fence(struct hl_ctx *ctx, u64 seq)
ctx               173 drivers/misc/habanalabs/context.c 	struct hl_device *hdev = ctx->hdev;
ctx               176 drivers/misc/habanalabs/context.c 	spin_lock(&ctx->cs_lock);
ctx               178 drivers/misc/habanalabs/context.c 	if (seq >= ctx->cs_sequence) {
ctx               181 drivers/misc/habanalabs/context.c 			seq, ctx->cs_sequence);
ctx               182 drivers/misc/habanalabs/context.c 		spin_unlock(&ctx->cs_lock);
ctx               187 drivers/misc/habanalabs/context.c 	if (seq + HL_MAX_PENDING_CS < ctx->cs_sequence) {
ctx               190 drivers/misc/habanalabs/context.c 			seq, ctx->cs_sequence);
ctx               191 drivers/misc/habanalabs/context.c 		spin_unlock(&ctx->cs_lock);
ctx               196 drivers/misc/habanalabs/context.c 			ctx->cs_pending[seq & (HL_MAX_PENDING_CS - 1)]);
ctx               197 drivers/misc/habanalabs/context.c 	spin_unlock(&ctx->cs_lock);
ctx               227 drivers/misc/habanalabs/context.c 	struct hl_ctx *ctx;
ctx               233 drivers/misc/habanalabs/context.c 	idr_for_each_entry(idp, ctx, id)
ctx               234 drivers/misc/habanalabs/context.c 		hl_ctx_free(hdev, ctx);
ctx               145 drivers/misc/habanalabs/debugfs.c 			cs->sequence, cs->ctx->asid,
ctx               177 drivers/misc/habanalabs/debugfs.c 				job->id, job->cs->sequence, job->cs->ctx->asid,
ctx               228 drivers/misc/habanalabs/debugfs.c 	struct hl_ctx *ctx;
ctx               243 drivers/misc/habanalabs/debugfs.c 	list_for_each_entry(ctx, &dev_entry->ctx_mem_hash_list, debugfs_list) {
ctx               247 drivers/misc/habanalabs/debugfs.c 		seq_printf(s, "ctx asid: %u\n", ctx->asid);
ctx               252 drivers/misc/habanalabs/debugfs.c 		mutex_lock(&ctx->mem_hash_lock);
ctx               253 drivers/misc/habanalabs/debugfs.c 		hash_for_each(ctx->mem_hash, i, hnode, node) {
ctx               269 drivers/misc/habanalabs/debugfs.c 		mutex_unlock(&ctx->mem_hash_lock);
ctx               271 drivers/misc/habanalabs/debugfs.c 		vm = &ctx->hdev->vm;
ctx               278 drivers/misc/habanalabs/debugfs.c 			if (phys_pg_pack->asid != ctx->asid)
ctx               304 drivers/misc/habanalabs/debugfs.c static inline u64 get_hop0_addr(struct hl_ctx *ctx)
ctx               306 drivers/misc/habanalabs/debugfs.c 	return ctx->hdev->asic_prop.mmu_pgt_addr +
ctx               307 drivers/misc/habanalabs/debugfs.c 			(ctx->asid * ctx->hdev->asic_prop.mmu_hop_table_size);
ctx               310 drivers/misc/habanalabs/debugfs.c static inline u64 get_hop0_pte_addr(struct hl_ctx *ctx, u64 hop_addr,
ctx               313 drivers/misc/habanalabs/debugfs.c 	return hop_addr + ctx->hdev->asic_prop.mmu_pte_size *
ctx               317 drivers/misc/habanalabs/debugfs.c static inline u64 get_hop1_pte_addr(struct hl_ctx *ctx, u64 hop_addr,
ctx               320 drivers/misc/habanalabs/debugfs.c 	return hop_addr + ctx->hdev->asic_prop.mmu_pte_size *
ctx               324 drivers/misc/habanalabs/debugfs.c static inline u64 get_hop2_pte_addr(struct hl_ctx *ctx, u64 hop_addr,
ctx               327 drivers/misc/habanalabs/debugfs.c 	return hop_addr + ctx->hdev->asic_prop.mmu_pte_size *
ctx               331 drivers/misc/habanalabs/debugfs.c static inline u64 get_hop3_pte_addr(struct hl_ctx *ctx, u64 hop_addr,
ctx               334 drivers/misc/habanalabs/debugfs.c 	return hop_addr + ctx->hdev->asic_prop.mmu_pte_size *
ctx               338 drivers/misc/habanalabs/debugfs.c static inline u64 get_hop4_pte_addr(struct hl_ctx *ctx, u64 hop_addr,
ctx               341 drivers/misc/habanalabs/debugfs.c 	return hop_addr + ctx->hdev->asic_prop.mmu_pte_size *
ctx               358 drivers/misc/habanalabs/debugfs.c 	struct hl_ctx *ctx;
ctx               371 drivers/misc/habanalabs/debugfs.c 		ctx = hdev->kernel_ctx;
ctx               373 drivers/misc/habanalabs/debugfs.c 		ctx = hdev->compute_ctx;
ctx               375 drivers/misc/habanalabs/debugfs.c 	if (!ctx) {
ctx               380 drivers/misc/habanalabs/debugfs.c 	mutex_lock(&ctx->mmu_lock);
ctx               384 drivers/misc/habanalabs/debugfs.c 	hop0_addr = get_hop0_addr(ctx);
ctx               385 drivers/misc/habanalabs/debugfs.c 	hop0_pte_addr = get_hop0_pte_addr(ctx, hop0_addr, virt_addr);
ctx               392 drivers/misc/habanalabs/debugfs.c 	hop1_pte_addr = get_hop1_pte_addr(ctx, hop1_addr, virt_addr);
ctx               399 drivers/misc/habanalabs/debugfs.c 	hop2_pte_addr = get_hop2_pte_addr(ctx, hop2_addr, virt_addr);
ctx               406 drivers/misc/habanalabs/debugfs.c 	hop3_pte_addr = get_hop3_pte_addr(ctx, hop3_addr, virt_addr);
ctx               415 drivers/misc/habanalabs/debugfs.c 		hop4_pte_addr = get_hop4_pte_addr(ctx, hop4_addr, virt_addr);
ctx               455 drivers/misc/habanalabs/debugfs.c 	mutex_unlock(&ctx->mmu_lock);
ctx               536 drivers/misc/habanalabs/debugfs.c 	struct hl_ctx *ctx = hdev->compute_ctx;
ctx               541 drivers/misc/habanalabs/debugfs.c 	if (!ctx) {
ctx               546 drivers/misc/habanalabs/debugfs.c 	mutex_lock(&ctx->mmu_lock);
ctx               549 drivers/misc/habanalabs/debugfs.c 	hop_addr = get_hop0_addr(ctx);
ctx               550 drivers/misc/habanalabs/debugfs.c 	hop_pte_addr = get_hop0_pte_addr(ctx, hop_addr, virt_addr);
ctx               557 drivers/misc/habanalabs/debugfs.c 	hop_pte_addr = get_hop1_pte_addr(ctx, hop_addr, virt_addr);
ctx               564 drivers/misc/habanalabs/debugfs.c 	hop_pte_addr = get_hop2_pte_addr(ctx, hop_addr, virt_addr);
ctx               571 drivers/misc/habanalabs/debugfs.c 	hop_pte_addr = get_hop3_pte_addr(ctx, hop_addr, virt_addr);
ctx               579 drivers/misc/habanalabs/debugfs.c 		hop_pte_addr = get_hop4_pte_addr(ctx, hop_addr, virt_addr);
ctx               597 drivers/misc/habanalabs/debugfs.c 	mutex_unlock(&ctx->mmu_lock);
ctx              1092 drivers/misc/habanalabs/debugfs.c 	struct hl_dbg_device_entry *dev_entry = &cs->ctx->hdev->hl_debugfs;
ctx              1101 drivers/misc/habanalabs/debugfs.c 	struct hl_dbg_device_entry *dev_entry = &cs->ctx->hdev->hl_debugfs;
ctx              1145 drivers/misc/habanalabs/debugfs.c void hl_debugfs_add_ctx_mem_hash(struct hl_device *hdev, struct hl_ctx *ctx)
ctx              1150 drivers/misc/habanalabs/debugfs.c 	list_add(&ctx->debugfs_list, &dev_entry->ctx_mem_hash_list);
ctx              1154 drivers/misc/habanalabs/debugfs.c void hl_debugfs_remove_ctx_mem_hash(struct hl_device *hdev, struct hl_ctx *ctx)
ctx              1159 drivers/misc/habanalabs/debugfs.c 	list_del(&ctx->debugfs_list);
ctx                45 drivers/misc/habanalabs/device.c 	struct hl_ctx *ctx;
ctx                50 drivers/misc/habanalabs/device.c 	ctx = hpriv->ctx;
ctx                73 drivers/misc/habanalabs/habanalabs.h 	struct hl_ctx		*ctx;
ctx               728 drivers/misc/habanalabs/habanalabs.h 	struct hl_ctx		*ctx;
ctx               929 drivers/misc/habanalabs/habanalabs.h 	struct hl_ctx		*ctx;
ctx              1461 drivers/misc/habanalabs/habanalabs.h void hl_ctx_free(struct hl_device *hdev, struct hl_ctx *ctx);
ctx              1462 drivers/misc/habanalabs/habanalabs.h int hl_ctx_init(struct hl_device *hdev, struct hl_ctx *ctx, bool is_kernel_ctx);
ctx              1464 drivers/misc/habanalabs/habanalabs.h void hl_ctx_get(struct hl_device *hdev,	struct hl_ctx *ctx);
ctx              1465 drivers/misc/habanalabs/habanalabs.h int hl_ctx_put(struct hl_ctx *ctx);
ctx              1466 drivers/misc/habanalabs/habanalabs.h struct dma_fence *hl_ctx_get_fence(struct hl_ctx *ctx, u64 seq);
ctx              1508 drivers/misc/habanalabs/habanalabs.h int hl_vm_ctx_init(struct hl_ctx *ctx);
ctx              1509 drivers/misc/habanalabs/habanalabs.h void hl_vm_ctx_fini(struct hl_ctx *ctx);
ctx              1525 drivers/misc/habanalabs/habanalabs.h int hl_mmu_ctx_init(struct hl_ctx *ctx);
ctx              1526 drivers/misc/habanalabs/habanalabs.h void hl_mmu_ctx_fini(struct hl_ctx *ctx);
ctx              1527 drivers/misc/habanalabs/habanalabs.h int hl_mmu_map(struct hl_ctx *ctx, u64 virt_addr, u64 phys_addr, u32 page_size);
ctx              1528 drivers/misc/habanalabs/habanalabs.h int hl_mmu_unmap(struct hl_ctx *ctx, u64 virt_addr, u32 page_size);
ctx              1529 drivers/misc/habanalabs/habanalabs.h void hl_mmu_swap_out(struct hl_ctx *ctx);
ctx              1530 drivers/misc/habanalabs/habanalabs.h void hl_mmu_swap_in(struct hl_ctx *ctx);
ctx              1587 drivers/misc/habanalabs/habanalabs.h void hl_debugfs_add_ctx_mem_hash(struct hl_device *hdev, struct hl_ctx *ctx);
ctx              1588 drivers/misc/habanalabs/habanalabs.h void hl_debugfs_remove_ctx_mem_hash(struct hl_device *hdev, struct hl_ctx *ctx);
ctx              1653 drivers/misc/habanalabs/habanalabs.h 					struct hl_ctx *ctx)
ctx              1658 drivers/misc/habanalabs/habanalabs.h 					struct hl_ctx *ctx)
ctx               109 drivers/misc/habanalabs/habanalabs_ioctl.c 	if (hpriv->ctx)
ctx               111 drivers/misc/habanalabs/habanalabs_ioctl.c 			atomic64_read(&hpriv->ctx->dram_phys_mem);
ctx                39 drivers/misc/habanalabs/hw_queue.c 	struct hl_device *hdev = cs->ctx->hdev;
ctx               232 drivers/misc/habanalabs/hw_queue.c 	struct hl_device *hdev = job->cs->ctx->hdev;
ctx               290 drivers/misc/habanalabs/hw_queue.c 	struct hl_device *hdev = job->cs->ctx->hdev;
ctx               318 drivers/misc/habanalabs/hw_queue.c 	struct hl_device *hdev = cs->ctx->hdev;
ctx                53 drivers/misc/habanalabs/memory.c static int alloc_device_memory(struct hl_ctx *ctx, struct hl_mem_in *args,
ctx                56 drivers/misc/habanalabs/memory.c 	struct hl_device *hdev = ctx->hdev;
ctx                89 drivers/misc/habanalabs/memory.c 	phys_pg_pack->asid = ctx->asid;
ctx               137 drivers/misc/habanalabs/memory.c 	atomic64_add(phys_pg_pack->total_size, &ctx->dram_phys_mem);
ctx               304 drivers/misc/habanalabs/memory.c static int free_device_memory(struct hl_ctx *ctx, u32 handle)
ctx               306 drivers/misc/habanalabs/memory.c 	struct hl_device *hdev = ctx->hdev;
ctx               328 drivers/misc/habanalabs/memory.c 		atomic64_sub(phys_pg_pack->total_size, &ctx->dram_phys_mem);
ctx               645 drivers/misc/habanalabs/memory.c static int init_phys_pg_pack_from_userptr(struct hl_ctx *ctx,
ctx               663 drivers/misc/habanalabs/memory.c 	phys_pg_pack->asid = ctx->asid;
ctx               745 drivers/misc/habanalabs/memory.c static int map_phys_page_pack(struct hl_ctx *ctx, u64 vaddr,
ctx               748 drivers/misc/habanalabs/memory.c 	struct hl_device *hdev = ctx->hdev;
ctx               756 drivers/misc/habanalabs/memory.c 		rc = hl_mmu_map(ctx, next_vaddr, paddr, page_size);
ctx               774 drivers/misc/habanalabs/memory.c 		if (hl_mmu_unmap(ctx, next_vaddr, page_size))
ctx               786 drivers/misc/habanalabs/memory.c static int get_paddr_from_handle(struct hl_ctx *ctx, struct hl_mem_in *args,
ctx               789 drivers/misc/habanalabs/memory.c 	struct hl_device *hdev = ctx->hdev;
ctx               824 drivers/misc/habanalabs/memory.c static int map_device_va(struct hl_ctx *ctx, struct hl_mem_in *args,
ctx               827 drivers/misc/habanalabs/memory.c 	struct hl_device *hdev = ctx->hdev;
ctx               848 drivers/misc/habanalabs/memory.c 		rc = init_phys_pg_pack_from_userptr(ctx, userptr,
ctx               886 drivers/misc/habanalabs/memory.c 			phys_pg_pack->asid != ctx->asid) {
ctx               901 drivers/misc/habanalabs/memory.c 			is_userptr ? &ctx->host_va_range : &ctx->dram_va_range,
ctx               910 drivers/misc/habanalabs/memory.c 	mutex_lock(&ctx->mmu_lock);
ctx               912 drivers/misc/habanalabs/memory.c 	rc = map_phys_page_pack(ctx, ret_vaddr, phys_pg_pack);
ctx               914 drivers/misc/habanalabs/memory.c 		mutex_unlock(&ctx->mmu_lock);
ctx               922 drivers/misc/habanalabs/memory.c 	mutex_unlock(&ctx->mmu_lock);
ctx               929 drivers/misc/habanalabs/memory.c 	mutex_lock(&ctx->mem_hash_lock);
ctx               930 drivers/misc/habanalabs/memory.c 	hash_add(ctx->mem_hash, &hnode->node, ret_vaddr);
ctx               931 drivers/misc/habanalabs/memory.c 	mutex_unlock(&ctx->mem_hash_lock);
ctx               942 drivers/misc/habanalabs/memory.c 			is_userptr ? &ctx->host_va_range : &ctx->dram_va_range,
ctx               974 drivers/misc/habanalabs/memory.c static int unmap_device_va(struct hl_ctx *ctx, u64 vaddr, bool ctx_free)
ctx               976 drivers/misc/habanalabs/memory.c 	struct hl_device *hdev = ctx->hdev;
ctx               988 drivers/misc/habanalabs/memory.c 	mutex_lock(&ctx->mem_hash_lock);
ctx               989 drivers/misc/habanalabs/memory.c 	hash_for_each_possible(ctx->mem_hash, hnode, node, (unsigned long)vaddr)
ctx               994 drivers/misc/habanalabs/memory.c 		mutex_unlock(&ctx->mem_hash_lock);
ctx              1002 drivers/misc/habanalabs/memory.c 	mutex_unlock(&ctx->mem_hash_lock);
ctx              1008 drivers/misc/habanalabs/memory.c 		va_range = &ctx->host_va_range;
ctx              1010 drivers/misc/habanalabs/memory.c 		rc = init_phys_pg_pack_from_userptr(ctx, userptr,
ctx              1020 drivers/misc/habanalabs/memory.c 		va_range = &ctx->dram_va_range;
ctx              1041 drivers/misc/habanalabs/memory.c 	mutex_lock(&ctx->mmu_lock);
ctx              1044 drivers/misc/habanalabs/memory.c 		if (hl_mmu_unmap(ctx, next_vaddr, page_size))
ctx              1057 drivers/misc/habanalabs/memory.c 	mutex_unlock(&ctx->mmu_lock);
ctx              1086 drivers/misc/habanalabs/memory.c 	mutex_lock(&ctx->mem_hash_lock);
ctx              1087 drivers/misc/habanalabs/memory.c 	hash_add(ctx->mem_hash, &hnode->node, vaddr);
ctx              1088 drivers/misc/habanalabs/memory.c 	mutex_unlock(&ctx->mem_hash_lock);
ctx              1096 drivers/misc/habanalabs/memory.c 	struct hl_ctx *ctx = hpriv->ctx;
ctx              1114 drivers/misc/habanalabs/memory.c 		rc = alloc_device_memory(ctx, &args->in, &handle);
ctx              1121 drivers/misc/habanalabs/memory.c 		rc = free_device_memory(ctx, args->in.free.handle);
ctx              1129 drivers/misc/habanalabs/memory.c 			rc = get_paddr_from_handle(ctx, &args->in,
ctx              1155 drivers/misc/habanalabs/memory.c 	struct hl_ctx *ctx = hpriv->ctx;
ctx              1184 drivers/misc/habanalabs/memory.c 		rc = alloc_device_memory(ctx, &args->in, &handle);
ctx              1191 drivers/misc/habanalabs/memory.c 		rc = free_device_memory(ctx, args->in.free.handle);
ctx              1195 drivers/misc/habanalabs/memory.c 		rc = map_device_va(ctx, &args->in, &device_addr);
ctx              1202 drivers/misc/habanalabs/memory.c 		rc = unmap_device_va(ctx, args->in.unmap.device_virt_addr,
ctx              1466 drivers/misc/habanalabs/memory.c static int hl_vm_ctx_init_with_ranges(struct hl_ctx *ctx, u64 host_range_start,
ctx              1470 drivers/misc/habanalabs/memory.c 	struct hl_device *hdev = ctx->hdev;
ctx              1473 drivers/misc/habanalabs/memory.c 	rc = hl_mmu_ctx_init(ctx);
ctx              1475 drivers/misc/habanalabs/memory.c 		dev_err(hdev->dev, "failed to init context %d\n", ctx->asid);
ctx              1479 drivers/misc/habanalabs/memory.c 	mutex_init(&ctx->mem_hash_lock);
ctx              1480 drivers/misc/habanalabs/memory.c 	hash_init(ctx->mem_hash);
ctx              1482 drivers/misc/habanalabs/memory.c 	mutex_init(&ctx->host_va_range.lock);
ctx              1484 drivers/misc/habanalabs/memory.c 	rc = hl_va_range_init(hdev, &ctx->host_va_range, host_range_start,
ctx              1491 drivers/misc/habanalabs/memory.c 	mutex_init(&ctx->dram_va_range.lock);
ctx              1493 drivers/misc/habanalabs/memory.c 	rc = hl_va_range_init(hdev, &ctx->dram_va_range, dram_range_start,
ctx              1500 drivers/misc/habanalabs/memory.c 	hl_debugfs_add_ctx_mem_hash(hdev, ctx);
ctx              1505 drivers/misc/habanalabs/memory.c 	mutex_destroy(&ctx->dram_va_range.lock);
ctx              1507 drivers/misc/habanalabs/memory.c 	mutex_lock(&ctx->host_va_range.lock);
ctx              1508 drivers/misc/habanalabs/memory.c 	clear_va_list_locked(hdev, &ctx->host_va_range.list);
ctx              1509 drivers/misc/habanalabs/memory.c 	mutex_unlock(&ctx->host_va_range.lock);
ctx              1511 drivers/misc/habanalabs/memory.c 	mutex_destroy(&ctx->host_va_range.lock);
ctx              1512 drivers/misc/habanalabs/memory.c 	mutex_destroy(&ctx->mem_hash_lock);
ctx              1513 drivers/misc/habanalabs/memory.c 	hl_mmu_ctx_fini(ctx);
ctx              1518 drivers/misc/habanalabs/memory.c int hl_vm_ctx_init(struct hl_ctx *ctx)
ctx              1520 drivers/misc/habanalabs/memory.c 	struct asic_fixed_properties *prop = &ctx->hdev->asic_prop;
ctx              1524 drivers/misc/habanalabs/memory.c 	atomic64_set(&ctx->dram_phys_mem, 0);
ctx              1533 drivers/misc/habanalabs/memory.c 	if (ctx->hdev->mmu_enable) {
ctx              1545 drivers/misc/habanalabs/memory.c 	return hl_vm_ctx_init_with_ranges(ctx, host_range_start, host_range_end,
ctx              1615 drivers/misc/habanalabs/memory.c void hl_vm_ctx_fini(struct hl_ctx *ctx)
ctx              1617 drivers/misc/habanalabs/memory.c 	struct hl_device *hdev = ctx->hdev;
ctx              1624 drivers/misc/habanalabs/memory.c 	hl_debugfs_remove_ctx_mem_hash(hdev, ctx);
ctx              1626 drivers/misc/habanalabs/memory.c 	if (!hash_empty(ctx->mem_hash))
ctx              1629 drivers/misc/habanalabs/memory.c 	hash_for_each_safe(ctx->mem_hash, i, tmp_node, hnode, node) {
ctx              1632 drivers/misc/habanalabs/memory.c 			hnode->vaddr, ctx->asid);
ctx              1633 drivers/misc/habanalabs/memory.c 		unmap_device_va(ctx, hnode->vaddr, true);
ctx              1638 drivers/misc/habanalabs/memory.c 		if (phys_pg_list->asid == ctx->asid) {
ctx              1641 drivers/misc/habanalabs/memory.c 				phys_pg_list, ctx->asid);
ctx              1649 drivers/misc/habanalabs/memory.c 	hl_va_range_fini(hdev, &ctx->dram_va_range);
ctx              1650 drivers/misc/habanalabs/memory.c 	hl_va_range_fini(hdev, &ctx->host_va_range);
ctx              1652 drivers/misc/habanalabs/memory.c 	mutex_destroy(&ctx->mem_hash_lock);
ctx              1653 drivers/misc/habanalabs/memory.c 	hl_mmu_ctx_fini(ctx);
ctx                14 drivers/misc/habanalabs/mmu.c static inline u64 get_phys_addr(struct hl_ctx *ctx, u64 shadow_addr);
ctx                16 drivers/misc/habanalabs/mmu.c static struct pgt_info *get_pgt_info(struct hl_ctx *ctx, u64 hop_addr)
ctx                20 drivers/misc/habanalabs/mmu.c 	hash_for_each_possible(ctx->mmu_shadow_hash, pgt_info, node,
ctx                28 drivers/misc/habanalabs/mmu.c static void free_hop(struct hl_ctx *ctx, u64 hop_addr)
ctx                30 drivers/misc/habanalabs/mmu.c 	struct hl_device *hdev = ctx->hdev;
ctx                31 drivers/misc/habanalabs/mmu.c 	struct pgt_info *pgt_info = get_pgt_info(ctx, hop_addr);
ctx                40 drivers/misc/habanalabs/mmu.c static u64 alloc_hop(struct hl_ctx *ctx)
ctx                42 drivers/misc/habanalabs/mmu.c 	struct hl_device *hdev = ctx->hdev;
ctx                65 drivers/misc/habanalabs/mmu.c 	pgt_info->ctx = ctx;
ctx                67 drivers/misc/habanalabs/mmu.c 	hash_add(ctx->mmu_shadow_hash, &pgt_info->node, shadow_addr);
ctx                79 drivers/misc/habanalabs/mmu.c static inline u64 get_phys_hop0_addr(struct hl_ctx *ctx)
ctx                81 drivers/misc/habanalabs/mmu.c 	return ctx->hdev->asic_prop.mmu_pgt_addr +
ctx                82 drivers/misc/habanalabs/mmu.c 			(ctx->asid * ctx->hdev->asic_prop.mmu_hop_table_size);
ctx                85 drivers/misc/habanalabs/mmu.c static inline u64 get_hop0_addr(struct hl_ctx *ctx)
ctx                87 drivers/misc/habanalabs/mmu.c 	return (u64) (uintptr_t) ctx->hdev->mmu_shadow_hop0 +
ctx                88 drivers/misc/habanalabs/mmu.c 			(ctx->asid * ctx->hdev->asic_prop.mmu_hop_table_size);
ctx                91 drivers/misc/habanalabs/mmu.c static inline void flush(struct hl_ctx *ctx)
ctx                95 drivers/misc/habanalabs/mmu.c 	ctx->hdev->asic_funcs->read_pte(ctx->hdev, get_phys_hop0_addr(ctx));
ctx                99 drivers/misc/habanalabs/mmu.c static inline void write_pte(struct hl_ctx *ctx, u64 shadow_pte_addr, u64 val)
ctx               108 drivers/misc/habanalabs/mmu.c 	u64 phys_val = get_phys_addr(ctx, val & PTE_PHYS_ADDR_MASK) |
ctx               111 drivers/misc/habanalabs/mmu.c 	ctx->hdev->asic_funcs->write_pte(ctx->hdev,
ctx               112 drivers/misc/habanalabs/mmu.c 					get_phys_addr(ctx, shadow_pte_addr),
ctx               119 drivers/misc/habanalabs/mmu.c static inline void write_final_pte(struct hl_ctx *ctx, u64 shadow_pte_addr,
ctx               122 drivers/misc/habanalabs/mmu.c 	ctx->hdev->asic_funcs->write_pte(ctx->hdev,
ctx               123 drivers/misc/habanalabs/mmu.c 					get_phys_addr(ctx, shadow_pte_addr),
ctx               129 drivers/misc/habanalabs/mmu.c static inline void clear_pte(struct hl_ctx *ctx, u64 pte_addr)
ctx               132 drivers/misc/habanalabs/mmu.c 	write_final_pte(ctx, pte_addr, 0);
ctx               135 drivers/misc/habanalabs/mmu.c static inline void get_pte(struct hl_ctx *ctx, u64 hop_addr)
ctx               137 drivers/misc/habanalabs/mmu.c 	get_pgt_info(ctx, hop_addr)->num_of_ptes++;
ctx               149 drivers/misc/habanalabs/mmu.c static inline int put_pte(struct hl_ctx *ctx, u64 hop_addr)
ctx               151 drivers/misc/habanalabs/mmu.c 	struct pgt_info *pgt_info = get_pgt_info(ctx, hop_addr);
ctx               162 drivers/misc/habanalabs/mmu.c 		free_hop(ctx, hop_addr);
ctx               167 drivers/misc/habanalabs/mmu.c static inline u64 get_hopN_pte_addr(struct hl_ctx *ctx, u64 hop_addr,
ctx               170 drivers/misc/habanalabs/mmu.c 	return hop_addr + ctx->hdev->asic_prop.mmu_pte_size *
ctx               174 drivers/misc/habanalabs/mmu.c static inline u64 get_hop0_pte_addr(struct hl_ctx *ctx, u64 hop_addr, u64 vaddr)
ctx               176 drivers/misc/habanalabs/mmu.c 	return get_hopN_pte_addr(ctx, hop_addr, vaddr, HOP0_MASK, HOP0_SHIFT);
ctx               179 drivers/misc/habanalabs/mmu.c static inline u64 get_hop1_pte_addr(struct hl_ctx *ctx, u64 hop_addr, u64 vaddr)
ctx               181 drivers/misc/habanalabs/mmu.c 	return get_hopN_pte_addr(ctx, hop_addr, vaddr, HOP1_MASK, HOP1_SHIFT);
ctx               184 drivers/misc/habanalabs/mmu.c static inline u64 get_hop2_pte_addr(struct hl_ctx *ctx, u64 hop_addr, u64 vaddr)
ctx               186 drivers/misc/habanalabs/mmu.c 	return get_hopN_pte_addr(ctx, hop_addr, vaddr, HOP2_MASK, HOP2_SHIFT);
ctx               189 drivers/misc/habanalabs/mmu.c static inline u64 get_hop3_pte_addr(struct hl_ctx *ctx, u64 hop_addr, u64 vaddr)
ctx               191 drivers/misc/habanalabs/mmu.c 	return get_hopN_pte_addr(ctx, hop_addr, vaddr, HOP3_MASK, HOP3_SHIFT);
ctx               194 drivers/misc/habanalabs/mmu.c static inline u64 get_hop4_pte_addr(struct hl_ctx *ctx, u64 hop_addr, u64 vaddr)
ctx               196 drivers/misc/habanalabs/mmu.c 	return get_hopN_pte_addr(ctx, hop_addr, vaddr, HOP4_MASK, HOP4_SHIFT);
ctx               199 drivers/misc/habanalabs/mmu.c static inline u64 get_next_hop_addr(struct hl_ctx *ctx, u64 curr_pte)
ctx               207 drivers/misc/habanalabs/mmu.c static inline u64 get_alloc_next_hop_addr(struct hl_ctx *ctx, u64 curr_pte,
ctx               210 drivers/misc/habanalabs/mmu.c 	u64 hop_addr = get_next_hop_addr(ctx, curr_pte);
ctx               213 drivers/misc/habanalabs/mmu.c 		hop_addr = alloc_hop(ctx);
ctx               221 drivers/misc/habanalabs/mmu.c static inline u64 get_phys_addr(struct hl_ctx *ctx, u64 shadow_addr)
ctx               223 drivers/misc/habanalabs/mmu.c 	u64 page_mask = (ctx->hdev->asic_prop.mmu_hop_table_size - 1);
ctx               228 drivers/misc/habanalabs/mmu.c 	if (shadow_hop_addr != get_hop0_addr(ctx))
ctx               229 drivers/misc/habanalabs/mmu.c 		phys_hop_addr = get_pgt_info(ctx, shadow_hop_addr)->phys_addr;
ctx               231 drivers/misc/habanalabs/mmu.c 		phys_hop_addr = get_phys_hop0_addr(ctx);
ctx               236 drivers/misc/habanalabs/mmu.c static int dram_default_mapping_init(struct hl_ctx *ctx)
ctx               238 drivers/misc/habanalabs/mmu.c 	struct hl_device *hdev = ctx->hdev;
ctx               246 drivers/misc/habanalabs/mmu.c 			(ctx->asid == HL_KERNEL_ASID_ID))
ctx               256 drivers/misc/habanalabs/mmu.c 	ctx->dram_default_hops = kzalloc(HL_PTE_SIZE * total_hops,  GFP_KERNEL);
ctx               257 drivers/misc/habanalabs/mmu.c 	if (!ctx->dram_default_hops)
ctx               260 drivers/misc/habanalabs/mmu.c 	hop0_addr = get_hop0_addr(ctx);
ctx               262 drivers/misc/habanalabs/mmu.c 	hop1_addr = alloc_hop(ctx);
ctx               269 drivers/misc/habanalabs/mmu.c 	ctx->dram_default_hops[total_hops - 1] = hop1_addr;
ctx               271 drivers/misc/habanalabs/mmu.c 	hop2_addr = alloc_hop(ctx);
ctx               278 drivers/misc/habanalabs/mmu.c 	ctx->dram_default_hops[total_hops - 2] = hop2_addr;
ctx               281 drivers/misc/habanalabs/mmu.c 		ctx->dram_default_hops[i] = alloc_hop(ctx);
ctx               282 drivers/misc/habanalabs/mmu.c 		if (ctx->dram_default_hops[i] == ULLONG_MAX) {
ctx               292 drivers/misc/habanalabs/mmu.c 	write_pte(ctx, hop0_addr, pte_val);
ctx               295 drivers/misc/habanalabs/mmu.c 	write_pte(ctx, hop1_addr, pte_val);
ctx               296 drivers/misc/habanalabs/mmu.c 	get_pte(ctx, hop1_addr);
ctx               300 drivers/misc/habanalabs/mmu.c 		pte_val = (ctx->dram_default_hops[i] & PTE_PHYS_ADDR_MASK) |
ctx               302 drivers/misc/habanalabs/mmu.c 		write_pte(ctx, hop2_pte_addr, pte_val);
ctx               303 drivers/misc/habanalabs/mmu.c 		get_pte(ctx, hop2_addr);
ctx               311 drivers/misc/habanalabs/mmu.c 		hop3_pte_addr = ctx->dram_default_hops[i];
ctx               313 drivers/misc/habanalabs/mmu.c 			write_final_pte(ctx, hop3_pte_addr, pte_val);
ctx               314 drivers/misc/habanalabs/mmu.c 			get_pte(ctx, ctx->dram_default_hops[i]);
ctx               319 drivers/misc/habanalabs/mmu.c 	flush(ctx);
ctx               325 drivers/misc/habanalabs/mmu.c 		free_hop(ctx, ctx->dram_default_hops[i]);
ctx               327 drivers/misc/habanalabs/mmu.c 	free_hop(ctx, hop2_addr);
ctx               329 drivers/misc/habanalabs/mmu.c 	free_hop(ctx, hop1_addr);
ctx               331 drivers/misc/habanalabs/mmu.c 	kfree(ctx->dram_default_hops);
ctx               336 drivers/misc/habanalabs/mmu.c static void dram_default_mapping_fini(struct hl_ctx *ctx)
ctx               338 drivers/misc/habanalabs/mmu.c 	struct hl_device *hdev = ctx->hdev;
ctx               346 drivers/misc/habanalabs/mmu.c 			(ctx->asid == HL_KERNEL_ASID_ID))
ctx               353 drivers/misc/habanalabs/mmu.c 	hop0_addr = get_hop0_addr(ctx);
ctx               356 drivers/misc/habanalabs/mmu.c 	hop1_addr = ctx->dram_default_hops[total_hops - 1];
ctx               357 drivers/misc/habanalabs/mmu.c 	hop2_addr = ctx->dram_default_hops[total_hops - 2];
ctx               360 drivers/misc/habanalabs/mmu.c 		hop3_pte_addr = ctx->dram_default_hops[i];
ctx               362 drivers/misc/habanalabs/mmu.c 			clear_pte(ctx, hop3_pte_addr);
ctx               363 drivers/misc/habanalabs/mmu.c 			put_pte(ctx, ctx->dram_default_hops[i]);
ctx               371 drivers/misc/habanalabs/mmu.c 		clear_pte(ctx, hop2_pte_addr);
ctx               372 drivers/misc/habanalabs/mmu.c 		put_pte(ctx, hop2_addr);
ctx               376 drivers/misc/habanalabs/mmu.c 	clear_pte(ctx, hop1_addr);
ctx               377 drivers/misc/habanalabs/mmu.c 	put_pte(ctx, hop1_addr);
ctx               378 drivers/misc/habanalabs/mmu.c 	clear_pte(ctx, hop0_addr);
ctx               380 drivers/misc/habanalabs/mmu.c 	kfree(ctx->dram_default_hops);
ctx               382 drivers/misc/habanalabs/mmu.c 	flush(ctx);
ctx               467 drivers/misc/habanalabs/mmu.c int hl_mmu_ctx_init(struct hl_ctx *ctx)
ctx               469 drivers/misc/habanalabs/mmu.c 	struct hl_device *hdev = ctx->hdev;
ctx               474 drivers/misc/habanalabs/mmu.c 	mutex_init(&ctx->mmu_lock);
ctx               475 drivers/misc/habanalabs/mmu.c 	hash_init(ctx->mmu_phys_hash);
ctx               476 drivers/misc/habanalabs/mmu.c 	hash_init(ctx->mmu_shadow_hash);
ctx               478 drivers/misc/habanalabs/mmu.c 	return dram_default_mapping_init(ctx);
ctx               491 drivers/misc/habanalabs/mmu.c void hl_mmu_ctx_fini(struct hl_ctx *ctx)
ctx               493 drivers/misc/habanalabs/mmu.c 	struct hl_device *hdev = ctx->hdev;
ctx               501 drivers/misc/habanalabs/mmu.c 	dram_default_mapping_fini(ctx);
ctx               503 drivers/misc/habanalabs/mmu.c 	if (!hash_empty(ctx->mmu_shadow_hash))
ctx               506 drivers/misc/habanalabs/mmu.c 	hash_for_each_safe(ctx->mmu_shadow_hash, i, tmp, pgt_info, node) {
ctx               509 drivers/misc/habanalabs/mmu.c 			pgt_info->phys_addr, ctx->asid, pgt_info->num_of_ptes);
ctx               510 drivers/misc/habanalabs/mmu.c 		free_hop(ctx, pgt_info->shadow_addr);
ctx               513 drivers/misc/habanalabs/mmu.c 	mutex_destroy(&ctx->mmu_lock);
ctx               516 drivers/misc/habanalabs/mmu.c static int _hl_mmu_unmap(struct hl_ctx *ctx, u64 virt_addr)
ctx               518 drivers/misc/habanalabs/mmu.c 	struct hl_device *hdev = ctx->hdev;
ctx               532 drivers/misc/habanalabs/mmu.c 	hop0_addr = get_hop0_addr(ctx);
ctx               533 drivers/misc/habanalabs/mmu.c 	hop0_pte_addr = get_hop0_pte_addr(ctx, hop0_addr, virt_addr);
ctx               537 drivers/misc/habanalabs/mmu.c 	hop1_addr = get_next_hop_addr(ctx, curr_pte);
ctx               542 drivers/misc/habanalabs/mmu.c 	hop1_pte_addr = get_hop1_pte_addr(ctx, hop1_addr, virt_addr);
ctx               546 drivers/misc/habanalabs/mmu.c 	hop2_addr = get_next_hop_addr(ctx, curr_pte);
ctx               551 drivers/misc/habanalabs/mmu.c 	hop2_pte_addr = get_hop2_pte_addr(ctx, hop2_addr, virt_addr);
ctx               555 drivers/misc/habanalabs/mmu.c 	hop3_addr = get_next_hop_addr(ctx, curr_pte);
ctx               560 drivers/misc/habanalabs/mmu.c 	hop3_pte_addr = get_hop3_pte_addr(ctx, hop3_addr, virt_addr);
ctx               573 drivers/misc/habanalabs/mmu.c 		hop4_addr = get_next_hop_addr(ctx, curr_pte);
ctx               578 drivers/misc/habanalabs/mmu.c 		hop4_pte_addr = get_hop4_pte_addr(ctx, hop4_addr, virt_addr);
ctx               603 drivers/misc/habanalabs/mmu.c 		write_final_pte(ctx, hop3_pte_addr, default_pte);
ctx               604 drivers/misc/habanalabs/mmu.c 		put_pte(ctx, hop3_addr);
ctx               610 drivers/misc/habanalabs/mmu.c 			clear_pte(ctx, hop4_pte_addr);
ctx               612 drivers/misc/habanalabs/mmu.c 			clear_pte(ctx, hop3_pte_addr);
ctx               614 drivers/misc/habanalabs/mmu.c 		if (hop4_addr && !put_pte(ctx, hop4_addr))
ctx               620 drivers/misc/habanalabs/mmu.c 		clear_pte(ctx, hop3_pte_addr);
ctx               622 drivers/misc/habanalabs/mmu.c 		if (put_pte(ctx, hop3_addr))
ctx               625 drivers/misc/habanalabs/mmu.c 		clear_pte(ctx, hop2_pte_addr);
ctx               627 drivers/misc/habanalabs/mmu.c 		if (put_pte(ctx, hop2_addr))
ctx               630 drivers/misc/habanalabs/mmu.c 		clear_pte(ctx, hop1_pte_addr);
ctx               632 drivers/misc/habanalabs/mmu.c 		if (put_pte(ctx, hop1_addr))
ctx               635 drivers/misc/habanalabs/mmu.c 		clear_pte(ctx, hop0_pte_addr);
ctx               639 drivers/misc/habanalabs/mmu.c 	flush(ctx);
ctx               667 drivers/misc/habanalabs/mmu.c int hl_mmu_unmap(struct hl_ctx *ctx, u64 virt_addr, u32 page_size)
ctx               669 drivers/misc/habanalabs/mmu.c 	struct hl_device *hdev = ctx->hdev;
ctx               697 drivers/misc/habanalabs/mmu.c 		rc = _hl_mmu_unmap(ctx, real_virt_addr);
ctx               707 drivers/misc/habanalabs/mmu.c static int _hl_mmu_map(struct hl_ctx *ctx, u64 virt_addr, u64 phys_addr,
ctx               710 drivers/misc/habanalabs/mmu.c 	struct hl_device *hdev = ctx->hdev;
ctx               740 drivers/misc/habanalabs/mmu.c 	hop0_addr = get_hop0_addr(ctx);
ctx               741 drivers/misc/habanalabs/mmu.c 	hop0_pte_addr = get_hop0_pte_addr(ctx, hop0_addr, virt_addr);
ctx               744 drivers/misc/habanalabs/mmu.c 	hop1_addr = get_alloc_next_hop_addr(ctx, curr_pte, &hop1_new);
ctx               748 drivers/misc/habanalabs/mmu.c 	hop1_pte_addr = get_hop1_pte_addr(ctx, hop1_addr, virt_addr);
ctx               751 drivers/misc/habanalabs/mmu.c 	hop2_addr = get_alloc_next_hop_addr(ctx, curr_pte, &hop2_new);
ctx               755 drivers/misc/habanalabs/mmu.c 	hop2_pte_addr = get_hop2_pte_addr(ctx, hop2_addr, virt_addr);
ctx               758 drivers/misc/habanalabs/mmu.c 	hop3_addr = get_alloc_next_hop_addr(ctx, curr_pte, &hop3_new);
ctx               762 drivers/misc/habanalabs/mmu.c 	hop3_pte_addr = get_hop3_pte_addr(ctx, hop3_addr, virt_addr);
ctx               766 drivers/misc/habanalabs/mmu.c 		hop4_addr = get_alloc_next_hop_addr(ctx, curr_pte, &hop4_new);
ctx               770 drivers/misc/habanalabs/mmu.c 		hop4_pte_addr = get_hop4_pte_addr(ctx, hop4_addr, virt_addr);
ctx               820 drivers/misc/habanalabs/mmu.c 		write_final_pte(ctx, hop3_pte_addr, curr_pte);
ctx               822 drivers/misc/habanalabs/mmu.c 		write_final_pte(ctx, hop4_pte_addr, curr_pte);
ctx               827 drivers/misc/habanalabs/mmu.c 		write_pte(ctx, hop0_pte_addr, curr_pte);
ctx               832 drivers/misc/habanalabs/mmu.c 		write_pte(ctx, hop1_pte_addr, curr_pte);
ctx               833 drivers/misc/habanalabs/mmu.c 		get_pte(ctx, hop1_addr);
ctx               838 drivers/misc/habanalabs/mmu.c 		write_pte(ctx, hop2_pte_addr, curr_pte);
ctx               839 drivers/misc/habanalabs/mmu.c 		get_pte(ctx, hop2_addr);
ctx               846 drivers/misc/habanalabs/mmu.c 			write_pte(ctx, hop3_pte_addr, curr_pte);
ctx               847 drivers/misc/habanalabs/mmu.c 			get_pte(ctx, hop3_addr);
ctx               850 drivers/misc/habanalabs/mmu.c 		get_pte(ctx, hop4_addr);
ctx               852 drivers/misc/habanalabs/mmu.c 		get_pte(ctx, hop3_addr);
ctx               855 drivers/misc/habanalabs/mmu.c 	flush(ctx);
ctx               861 drivers/misc/habanalabs/mmu.c 		free_hop(ctx, hop4_addr);
ctx               863 drivers/misc/habanalabs/mmu.c 		free_hop(ctx, hop3_addr);
ctx               865 drivers/misc/habanalabs/mmu.c 		free_hop(ctx, hop2_addr);
ctx               867 drivers/misc/habanalabs/mmu.c 		free_hop(ctx, hop1_addr);
ctx               890 drivers/misc/habanalabs/mmu.c int hl_mmu_map(struct hl_ctx *ctx, u64 virt_addr, u64 phys_addr, u32 page_size)
ctx               892 drivers/misc/habanalabs/mmu.c 	struct hl_device *hdev = ctx->hdev;
ctx               925 drivers/misc/habanalabs/mmu.c 		rc = _hl_mmu_map(ctx, real_virt_addr, real_phys_addr,
ctx               940 drivers/misc/habanalabs/mmu.c 		if (_hl_mmu_unmap(ctx, real_virt_addr))
ctx               956 drivers/misc/habanalabs/mmu.c void hl_mmu_swap_out(struct hl_ctx *ctx)
ctx               967 drivers/misc/habanalabs/mmu.c void hl_mmu_swap_in(struct hl_ctx *ctx)
ctx               301 drivers/misc/mic/vop/vop_main.c 				     const char *name, bool ctx)
ctx               352 drivers/misc/mic/vop/vop_main.c 	vq = vop_new_virtqueue(index, le16_to_cpu(config.num), dev, ctx,
ctx               386 drivers/misc/mic/vop/vop_main.c 			const char * const names[], const bool *ctx,
ctx               407 drivers/misc/mic/vop/vop_main.c 				     ctx ? ctx[i] : false);
ctx                19 drivers/misc/ocxl/afu_irq.c int ocxl_irq_offset_to_id(struct ocxl_context *ctx, u64 offset)
ctx                21 drivers/misc/ocxl/afu_irq.c 	return (offset - ctx->afu->irq_base_offset) >> PAGE_SHIFT;
ctx                24 drivers/misc/ocxl/afu_irq.c u64 ocxl_irq_id_to_offset(struct ocxl_context *ctx, int irq_id)
ctx                26 drivers/misc/ocxl/afu_irq.c 	return ctx->afu->irq_base_offset + (irq_id << PAGE_SHIFT);
ctx                29 drivers/misc/ocxl/afu_irq.c int ocxl_irq_set_handler(struct ocxl_context *ctx, int irq_id,
ctx                37 drivers/misc/ocxl/afu_irq.c 	mutex_lock(&ctx->irq_lock);
ctx                38 drivers/misc/ocxl/afu_irq.c 	irq = idr_find(&ctx->irq_idr, irq_id);
ctx                52 drivers/misc/ocxl/afu_irq.c 	mutex_unlock(&ctx->irq_lock);
ctx                69 drivers/misc/ocxl/afu_irq.c static int setup_afu_irq(struct ocxl_context *ctx, struct afu_irq *irq)
ctx               104 drivers/misc/ocxl/afu_irq.c int ocxl_afu_irq_alloc(struct ocxl_context *ctx, int *irq_id)
ctx               118 drivers/misc/ocxl/afu_irq.c 	mutex_lock(&ctx->irq_lock);
ctx               120 drivers/misc/ocxl/afu_irq.c 	irq->id = idr_alloc(&ctx->irq_idr, irq, 0, MAX_IRQ_PER_CONTEXT,
ctx               127 drivers/misc/ocxl/afu_irq.c 	rc = ocxl_link_irq_alloc(ctx->afu->fn->link, &irq->hw_irq,
ctx               132 drivers/misc/ocxl/afu_irq.c 	rc = setup_afu_irq(ctx, irq);
ctx               136 drivers/misc/ocxl/afu_irq.c 	trace_ocxl_afu_irq_alloc(ctx->pasid, irq->id, irq->virq, irq->hw_irq);
ctx               137 drivers/misc/ocxl/afu_irq.c 	mutex_unlock(&ctx->irq_lock);
ctx               144 drivers/misc/ocxl/afu_irq.c 	ocxl_link_free_irq(ctx->afu->fn->link, irq->hw_irq);
ctx               146 drivers/misc/ocxl/afu_irq.c 	idr_remove(&ctx->irq_idr, irq->id);
ctx               148 drivers/misc/ocxl/afu_irq.c 	mutex_unlock(&ctx->irq_lock);
ctx               154 drivers/misc/ocxl/afu_irq.c static void afu_irq_free(struct afu_irq *irq, struct ocxl_context *ctx)
ctx               156 drivers/misc/ocxl/afu_irq.c 	trace_ocxl_afu_irq_free(ctx->pasid, irq->id);
ctx               157 drivers/misc/ocxl/afu_irq.c 	if (ctx->mapping)
ctx               158 drivers/misc/ocxl/afu_irq.c 		unmap_mapping_range(ctx->mapping,
ctx               159 drivers/misc/ocxl/afu_irq.c 				ocxl_irq_id_to_offset(ctx, irq->id),
ctx               164 drivers/misc/ocxl/afu_irq.c 	ocxl_link_free_irq(ctx->afu->fn->link, irq->hw_irq);
ctx               168 drivers/misc/ocxl/afu_irq.c int ocxl_afu_irq_free(struct ocxl_context *ctx, int irq_id)
ctx               172 drivers/misc/ocxl/afu_irq.c 	mutex_lock(&ctx->irq_lock);
ctx               174 drivers/misc/ocxl/afu_irq.c 	irq = idr_find(&ctx->irq_idr, irq_id);
ctx               176 drivers/misc/ocxl/afu_irq.c 		mutex_unlock(&ctx->irq_lock);
ctx               179 drivers/misc/ocxl/afu_irq.c 	idr_remove(&ctx->irq_idr, irq->id);
ctx               180 drivers/misc/ocxl/afu_irq.c 	afu_irq_free(irq, ctx);
ctx               181 drivers/misc/ocxl/afu_irq.c 	mutex_unlock(&ctx->irq_lock);
ctx               186 drivers/misc/ocxl/afu_irq.c void ocxl_afu_irq_free_all(struct ocxl_context *ctx)
ctx               191 drivers/misc/ocxl/afu_irq.c 	mutex_lock(&ctx->irq_lock);
ctx               192 drivers/misc/ocxl/afu_irq.c 	idr_for_each_entry(&ctx->irq_idr, irq, id)
ctx               193 drivers/misc/ocxl/afu_irq.c 		afu_irq_free(irq, ctx);
ctx               194 drivers/misc/ocxl/afu_irq.c 	mutex_unlock(&ctx->irq_lock);
ctx               197 drivers/misc/ocxl/afu_irq.c u64 ocxl_afu_irq_get_addr(struct ocxl_context *ctx, int irq_id)
ctx               202 drivers/misc/ocxl/afu_irq.c 	mutex_lock(&ctx->irq_lock);
ctx               203 drivers/misc/ocxl/afu_irq.c 	irq = idr_find(&ctx->irq_idr, irq_id);
ctx               206 drivers/misc/ocxl/afu_irq.c 	mutex_unlock(&ctx->irq_lock);
ctx                11 drivers/misc/ocxl/context.c 	struct ocxl_context *ctx;
ctx                13 drivers/misc/ocxl/context.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx                14 drivers/misc/ocxl/context.c 	if (!ctx)
ctx                17 drivers/misc/ocxl/context.c 	ctx->afu = afu;
ctx                19 drivers/misc/ocxl/context.c 	pasid = idr_alloc(&afu->contexts_idr, ctx, afu->pasid_base,
ctx                23 drivers/misc/ocxl/context.c 		kfree(ctx);
ctx                29 drivers/misc/ocxl/context.c 	ctx->pasid = pasid;
ctx                30 drivers/misc/ocxl/context.c 	ctx->status = OPENED;
ctx                31 drivers/misc/ocxl/context.c 	mutex_init(&ctx->status_mutex);
ctx                32 drivers/misc/ocxl/context.c 	ctx->mapping = mapping;
ctx                33 drivers/misc/ocxl/context.c 	mutex_init(&ctx->mapping_lock);
ctx                34 drivers/misc/ocxl/context.c 	init_waitqueue_head(&ctx->events_wq);
ctx                35 drivers/misc/ocxl/context.c 	mutex_init(&ctx->xsl_error_lock);
ctx                36 drivers/misc/ocxl/context.c 	mutex_init(&ctx->irq_lock);
ctx                37 drivers/misc/ocxl/context.c 	idr_init(&ctx->irq_idr);
ctx                38 drivers/misc/ocxl/context.c 	ctx->tidr = 0;
ctx                45 drivers/misc/ocxl/context.c 	*context = ctx;
ctx                58 drivers/misc/ocxl/context.c 	struct ocxl_context *ctx = (struct ocxl_context *) data;
ctx                60 drivers/misc/ocxl/context.c 	mutex_lock(&ctx->xsl_error_lock);
ctx                61 drivers/misc/ocxl/context.c 	ctx->xsl_error.addr = addr;
ctx                62 drivers/misc/ocxl/context.c 	ctx->xsl_error.dsisr = dsisr;
ctx                63 drivers/misc/ocxl/context.c 	ctx->xsl_error.count++;
ctx                64 drivers/misc/ocxl/context.c 	mutex_unlock(&ctx->xsl_error_lock);
ctx                66 drivers/misc/ocxl/context.c 	wake_up_all(&ctx->events_wq);
ctx                69 drivers/misc/ocxl/context.c int ocxl_context_attach(struct ocxl_context *ctx, u64 amr, struct mm_struct *mm)
ctx                75 drivers/misc/ocxl/context.c 	mutex_lock(&ctx->status_mutex);
ctx                76 drivers/misc/ocxl/context.c 	if (ctx->status != OPENED) {
ctx                84 drivers/misc/ocxl/context.c 	rc = ocxl_link_add_pe(ctx->afu->fn->link, ctx->pasid, pidr, ctx->tidr,
ctx                85 drivers/misc/ocxl/context.c 			      amr, mm, xsl_fault_error, ctx);
ctx                89 drivers/misc/ocxl/context.c 	ctx->status = ATTACHED;
ctx                91 drivers/misc/ocxl/context.c 	mutex_unlock(&ctx->status_mutex);
ctx                97 drivers/misc/ocxl/context.c 		u64 offset, struct ocxl_context *ctx)
ctx               100 drivers/misc/ocxl/context.c 	int irq_id = ocxl_irq_offset_to_id(ctx, offset);
ctx               102 drivers/misc/ocxl/context.c 	trigger_addr = ocxl_afu_irq_get_addr(ctx, irq_id);
ctx               110 drivers/misc/ocxl/context.c 		u64 offset, struct ocxl_context *ctx)
ctx               116 drivers/misc/ocxl/context.c 	if (offset >= ctx->afu->config.pp_mmio_stride)
ctx               119 drivers/misc/ocxl/context.c 	mutex_lock(&ctx->status_mutex);
ctx               120 drivers/misc/ocxl/context.c 	if (ctx->status != ATTACHED) {
ctx               121 drivers/misc/ocxl/context.c 		mutex_unlock(&ctx->status_mutex);
ctx               127 drivers/misc/ocxl/context.c 	pasid_off = ctx->pasid - ctx->afu->pasid_base;
ctx               128 drivers/misc/ocxl/context.c 	pp_mmio_addr = ctx->afu->pp_mmio_start +
ctx               129 drivers/misc/ocxl/context.c 		pasid_off * ctx->afu->config.pp_mmio_stride +
ctx               133 drivers/misc/ocxl/context.c 	mutex_unlock(&ctx->status_mutex);
ctx               140 drivers/misc/ocxl/context.c 	struct ocxl_context *ctx = vma->vm_file->private_data;
ctx               146 drivers/misc/ocxl/context.c 		ctx->pasid, vmf->address, offset);
ctx               148 drivers/misc/ocxl/context.c 	if (offset < ctx->afu->irq_base_offset)
ctx               149 drivers/misc/ocxl/context.c 		ret = map_pp_mmio(vma, vmf->address, offset, ctx);
ctx               151 drivers/misc/ocxl/context.c 		ret = map_afu_irq(vma, vmf->address, offset, ctx);
ctx               159 drivers/misc/ocxl/context.c static int check_mmap_afu_irq(struct ocxl_context *ctx,
ctx               162 drivers/misc/ocxl/context.c 	int irq_id = ocxl_irq_offset_to_id(ctx, vma->vm_pgoff << PAGE_SHIFT);
ctx               169 drivers/misc/ocxl/context.c 	if (!ocxl_afu_irq_get_addr(ctx, irq_id))
ctx               185 drivers/misc/ocxl/context.c static int check_mmap_mmio(struct ocxl_context *ctx,
ctx               189 drivers/misc/ocxl/context.c 		(ctx->afu->config.pp_mmio_stride >> PAGE_SHIFT))
ctx               194 drivers/misc/ocxl/context.c int ocxl_context_mmap(struct ocxl_context *ctx, struct vm_area_struct *vma)
ctx               198 drivers/misc/ocxl/context.c 	if ((vma->vm_pgoff << PAGE_SHIFT) < ctx->afu->irq_base_offset)
ctx               199 drivers/misc/ocxl/context.c 		rc = check_mmap_mmio(ctx, vma);
ctx               201 drivers/misc/ocxl/context.c 		rc = check_mmap_afu_irq(ctx, vma);
ctx               211 drivers/misc/ocxl/context.c int ocxl_context_detach(struct ocxl_context *ctx)
ctx               218 drivers/misc/ocxl/context.c 	mutex_lock(&ctx->status_mutex);
ctx               219 drivers/misc/ocxl/context.c 	status = ctx->status;
ctx               220 drivers/misc/ocxl/context.c 	ctx->status = CLOSED;
ctx               221 drivers/misc/ocxl/context.c 	mutex_unlock(&ctx->status_mutex);
ctx               225 drivers/misc/ocxl/context.c 	dev = to_pci_dev(ctx->afu->fn->dev.parent);
ctx               226 drivers/misc/ocxl/context.c 	afu_control_pos = ctx->afu->config.dvsec_afu_control_pos;
ctx               228 drivers/misc/ocxl/context.c 	mutex_lock(&ctx->afu->afu_control_lock);
ctx               229 drivers/misc/ocxl/context.c 	rc = ocxl_config_terminate_pasid(dev, afu_control_pos, ctx->pasid);
ctx               230 drivers/misc/ocxl/context.c 	mutex_unlock(&ctx->afu->afu_control_lock);
ctx               231 drivers/misc/ocxl/context.c 	trace_ocxl_terminate_pasid(ctx->pasid, rc);
ctx               249 drivers/misc/ocxl/context.c 	rc = ocxl_link_remove_pe(ctx->afu->fn->link, ctx->pasid);
ctx               260 drivers/misc/ocxl/context.c 	struct ocxl_context *ctx;
ctx               264 drivers/misc/ocxl/context.c 	idr_for_each_entry(&afu->contexts_idr, ctx, tmp) {
ctx               265 drivers/misc/ocxl/context.c 		ocxl_context_detach(ctx);
ctx               273 drivers/misc/ocxl/context.c 		mutex_lock(&ctx->mapping_lock);
ctx               274 drivers/misc/ocxl/context.c 		if (ctx->mapping)
ctx               275 drivers/misc/ocxl/context.c 			unmap_mapping_range(ctx->mapping, 0, 0, 1);
ctx               276 drivers/misc/ocxl/context.c 		mutex_unlock(&ctx->mapping_lock);
ctx               281 drivers/misc/ocxl/context.c void ocxl_context_free(struct ocxl_context *ctx)
ctx               283 drivers/misc/ocxl/context.c 	mutex_lock(&ctx->afu->contexts_lock);
ctx               284 drivers/misc/ocxl/context.c 	ctx->afu->pasid_count--;
ctx               285 drivers/misc/ocxl/context.c 	idr_remove(&ctx->afu->contexts_idr, ctx->pasid);
ctx               286 drivers/misc/ocxl/context.c 	mutex_unlock(&ctx->afu->contexts_lock);
ctx               288 drivers/misc/ocxl/context.c 	ocxl_afu_irq_free_all(ctx);
ctx               289 drivers/misc/ocxl/context.c 	idr_destroy(&ctx->irq_idr);
ctx               291 drivers/misc/ocxl/context.c 	ocxl_afu_put(ctx->afu);
ctx               292 drivers/misc/ocxl/context.c 	kfree(ctx);
ctx                53 drivers/misc/ocxl/file.c 	struct ocxl_context *ctx;
ctx                62 drivers/misc/ocxl/file.c 	rc = ocxl_context_alloc(&ctx, info->afu, inode->i_mapping);
ctx                68 drivers/misc/ocxl/file.c 	file->private_data = ctx;
ctx                72 drivers/misc/ocxl/file.c static long afu_ioctl_attach(struct ocxl_context *ctx,
ctx                79 drivers/misc/ocxl/file.c 	pr_debug("%s for context %d\n", __func__, ctx->pasid);
ctx                89 drivers/misc/ocxl/file.c 	rc = ocxl_context_attach(ctx, amr, current->mm);
ctx                93 drivers/misc/ocxl/file.c static long afu_ioctl_get_metadata(struct ocxl_context *ctx,
ctx               102 drivers/misc/ocxl/file.c 	arg.afu_version_major = ctx->afu->config.version_major;
ctx               103 drivers/misc/ocxl/file.c 	arg.afu_version_minor = ctx->afu->config.version_minor;
ctx               104 drivers/misc/ocxl/file.c 	arg.pasid = ctx->pasid;
ctx               105 drivers/misc/ocxl/file.c 	arg.pp_mmio_size = ctx->afu->config.pp_mmio_stride;
ctx               106 drivers/misc/ocxl/file.c 	arg.global_mmio_size = ctx->afu->config.global_mmio_size;
ctx               115 drivers/misc/ocxl/file.c static long afu_ioctl_enable_p9_wait(struct ocxl_context *ctx,
ctx               126 drivers/misc/ocxl/file.c 		mutex_lock(&ctx->status_mutex);
ctx               127 drivers/misc/ocxl/file.c 		if (!ctx->tidr) {
ctx               129 drivers/misc/ocxl/file.c 				mutex_unlock(&ctx->status_mutex);
ctx               133 drivers/misc/ocxl/file.c 			ctx->tidr = current->thread.tidr;
ctx               136 drivers/misc/ocxl/file.c 		status = ctx->status;
ctx               137 drivers/misc/ocxl/file.c 		mutex_unlock(&ctx->status_mutex);
ctx               140 drivers/misc/ocxl/file.c 			int rc = ocxl_link_update_pe(ctx->afu->fn->link,
ctx               141 drivers/misc/ocxl/file.c 				ctx->pasid, ctx->tidr);
ctx               147 drivers/misc/ocxl/file.c 		arg.thread_id = ctx->tidr;
ctx               159 drivers/misc/ocxl/file.c static long afu_ioctl_get_features(struct ocxl_context *ctx,
ctx               204 drivers/misc/ocxl/file.c 	struct ocxl_context *ctx = file->private_data;
ctx               212 drivers/misc/ocxl/file.c 	pr_debug("%s for context %d, command %s\n", __func__, ctx->pasid,
ctx               215 drivers/misc/ocxl/file.c 	mutex_lock(&ctx->status_mutex);
ctx               216 drivers/misc/ocxl/file.c 	closed = (ctx->status == CLOSED);
ctx               217 drivers/misc/ocxl/file.c 	mutex_unlock(&ctx->status_mutex);
ctx               224 drivers/misc/ocxl/file.c 		rc = afu_ioctl_attach(ctx,
ctx               229 drivers/misc/ocxl/file.c 		rc = ocxl_afu_irq_alloc(ctx, &irq_id);
ctx               231 drivers/misc/ocxl/file.c 			irq_offset = ocxl_irq_id_to_offset(ctx, irq_id);
ctx               235 drivers/misc/ocxl/file.c 				ocxl_afu_irq_free(ctx, irq_id);
ctx               246 drivers/misc/ocxl/file.c 		irq_id = ocxl_irq_offset_to_id(ctx, irq_offset);
ctx               247 drivers/misc/ocxl/file.c 		rc = ocxl_afu_irq_free(ctx, irq_id);
ctx               257 drivers/misc/ocxl/file.c 		irq_id = ocxl_irq_offset_to_id(ctx, irq_fd.irq_offset);
ctx               261 drivers/misc/ocxl/file.c 		rc = ocxl_irq_set_handler(ctx, irq_id, irq_handler, irq_free, ev_ctx);
ctx               265 drivers/misc/ocxl/file.c 		rc = afu_ioctl_get_metadata(ctx,
ctx               271 drivers/misc/ocxl/file.c 		rc = afu_ioctl_enable_p9_wait(ctx,
ctx               277 drivers/misc/ocxl/file.c 		rc = afu_ioctl_get_features(ctx,
ctx               295 drivers/misc/ocxl/file.c 	struct ocxl_context *ctx = file->private_data;
ctx               297 drivers/misc/ocxl/file.c 	pr_debug("%s for context %d\n", __func__, ctx->pasid);
ctx               298 drivers/misc/ocxl/file.c 	return ocxl_context_mmap(ctx, vma);
ctx               301 drivers/misc/ocxl/file.c static bool has_xsl_error(struct ocxl_context *ctx)
ctx               305 drivers/misc/ocxl/file.c 	mutex_lock(&ctx->xsl_error_lock);
ctx               306 drivers/misc/ocxl/file.c 	ret = !!ctx->xsl_error.addr;
ctx               307 drivers/misc/ocxl/file.c 	mutex_unlock(&ctx->xsl_error_lock);
ctx               317 drivers/misc/ocxl/file.c static bool afu_events_pending(struct ocxl_context *ctx)
ctx               319 drivers/misc/ocxl/file.c 	if (has_xsl_error(ctx))
ctx               326 drivers/misc/ocxl/file.c 	struct ocxl_context *ctx = file->private_data;
ctx               330 drivers/misc/ocxl/file.c 	pr_debug("%s for context %d\n", __func__, ctx->pasid);
ctx               332 drivers/misc/ocxl/file.c 	poll_wait(file, &ctx->events_wq, wait);
ctx               334 drivers/misc/ocxl/file.c 	mutex_lock(&ctx->status_mutex);
ctx               335 drivers/misc/ocxl/file.c 	closed = (ctx->status == CLOSED);
ctx               336 drivers/misc/ocxl/file.c 	mutex_unlock(&ctx->status_mutex);
ctx               338 drivers/misc/ocxl/file.c 	if (afu_events_pending(ctx))
ctx               354 drivers/misc/ocxl/file.c static ssize_t append_xsl_error(struct ocxl_context *ctx,
ctx               362 drivers/misc/ocxl/file.c 	mutex_lock(&ctx->xsl_error_lock);
ctx               363 drivers/misc/ocxl/file.c 	if (!ctx->xsl_error.addr) {
ctx               364 drivers/misc/ocxl/file.c 		mutex_unlock(&ctx->xsl_error_lock);
ctx               368 drivers/misc/ocxl/file.c 	body.addr = ctx->xsl_error.addr;
ctx               369 drivers/misc/ocxl/file.c 	body.dsisr = ctx->xsl_error.dsisr;
ctx               370 drivers/misc/ocxl/file.c 	body.count = ctx->xsl_error.count;
ctx               372 drivers/misc/ocxl/file.c 	ctx->xsl_error.addr = 0;
ctx               373 drivers/misc/ocxl/file.c 	ctx->xsl_error.dsisr = 0;
ctx               374 drivers/misc/ocxl/file.c 	ctx->xsl_error.count = 0;
ctx               376 drivers/misc/ocxl/file.c 	mutex_unlock(&ctx->xsl_error_lock);
ctx               398 drivers/misc/ocxl/file.c 	struct ocxl_context *ctx = file->private_data;
ctx               415 drivers/misc/ocxl/file.c 		prepare_to_wait(&ctx->events_wq, &event_wait,
ctx               418 drivers/misc/ocxl/file.c 		if (afu_events_pending(ctx))
ctx               421 drivers/misc/ocxl/file.c 		if (ctx->status == CLOSED)
ctx               425 drivers/misc/ocxl/file.c 			finish_wait(&ctx->events_wq, &event_wait);
ctx               430 drivers/misc/ocxl/file.c 			finish_wait(&ctx->events_wq, &event_wait);
ctx               437 drivers/misc/ocxl/file.c 	finish_wait(&ctx->events_wq, &event_wait);
ctx               439 drivers/misc/ocxl/file.c 	if (has_xsl_error(ctx)) {
ctx               440 drivers/misc/ocxl/file.c 		used = append_xsl_error(ctx, &header, buf + sizeof(header));
ctx               445 drivers/misc/ocxl/file.c 	if (!afu_events_pending(ctx))
ctx               459 drivers/misc/ocxl/file.c 	struct ocxl_context *ctx = file->private_data;
ctx               463 drivers/misc/ocxl/file.c 	rc = ocxl_context_detach(ctx);
ctx               464 drivers/misc/ocxl/file.c 	mutex_lock(&ctx->mapping_lock);
ctx               465 drivers/misc/ocxl/file.c 	ctx->mapping = NULL;
ctx               466 drivers/misc/ocxl/file.c 	mutex_unlock(&ctx->mapping_lock);
ctx               467 drivers/misc/ocxl/file.c 	wake_up_all(&ctx->events_wq);
ctx               469 drivers/misc/ocxl/file.c 		ocxl_context_free(ctx);
ctx               133 drivers/misc/ocxl/ocxl_internal.h int ocxl_context_mmap(struct ocxl_context *ctx,
ctx               140 drivers/misc/ocxl/ocxl_internal.h int ocxl_irq_offset_to_id(struct ocxl_context *ctx, u64 offset);
ctx               141 drivers/misc/ocxl/ocxl_internal.h u64 ocxl_irq_id_to_offset(struct ocxl_context *ctx, int irq_id);
ctx               142 drivers/misc/ocxl/ocxl_internal.h void ocxl_afu_irq_free_all(struct ocxl_context *ctx);
ctx              1462 drivers/mmc/core/block.c 		mmc_put_card(mq->card, &mq->ctx);
ctx              1965 drivers/mmc/core/block.c 		mmc_put_card(mq->card, &mq->ctx);
ctx               763 drivers/mmc/core/core.c static inline bool mmc_ctx_matches(struct mmc_host *host, struct mmc_ctx *ctx,
ctx               766 drivers/mmc/core/core.c 	return host->claimer == ctx ||
ctx               767 drivers/mmc/core/core.c 	       (!ctx && task && host->claimer->task == task);
ctx               771 drivers/mmc/core/core.c 				       struct mmc_ctx *ctx,
ctx               775 drivers/mmc/core/core.c 		if (ctx)
ctx               776 drivers/mmc/core/core.c 			host->claimer = ctx;
ctx               796 drivers/mmc/core/core.c int __mmc_claim_host(struct mmc_host *host, struct mmc_ctx *ctx,
ctx               799 drivers/mmc/core/core.c 	struct task_struct *task = ctx ? NULL : current;
ctx               812 drivers/mmc/core/core.c 		if (stop || !host->claimed || mmc_ctx_matches(host, ctx, task))
ctx               821 drivers/mmc/core/core.c 		mmc_ctx_set_claimer(host, ctx, task);
ctx               873 drivers/mmc/core/core.c void mmc_get_card(struct mmc_card *card, struct mmc_ctx *ctx)
ctx               876 drivers/mmc/core/core.c 	__mmc_claim_host(card->host, ctx, NULL);
ctx               884 drivers/mmc/core/core.c void mmc_put_card(struct mmc_card *card, struct mmc_ctx *ctx)
ctx               888 drivers/mmc/core/core.c 	WARN_ON(ctx && host->claimer != ctx);
ctx               122 drivers/mmc/core/core.h int __mmc_claim_host(struct mmc_host *host, struct mmc_ctx *ctx,
ctx               125 drivers/mmc/core/core.h void mmc_get_card(struct mmc_card *card, struct mmc_ctx *ctx);
ctx               126 drivers/mmc/core/core.h void mmc_put_card(struct mmc_card *card, struct mmc_ctx *ctx);
ctx               142 drivers/mmc/core/queue.c 	mmc_get_card(mq->card, &mq->ctx);
ctx               157 drivers/mmc/core/queue.c 	mmc_put_card(mq->card, &mq->ctx);
ctx               304 drivers/mmc/core/queue.c 		mmc_get_card(card, &mq->ctx);
ctx               337 drivers/mmc/core/queue.c 			mmc_put_card(card, &mq->ctx);
ctx                76 drivers/mmc/core/queue.h 	struct mmc_ctx		ctx;
ctx                33 drivers/mmc/core/slot-gpio.c 	struct mmc_gpio *ctx = host->slot.handler_priv;
ctx                36 drivers/mmc/core/slot-gpio.c 	mmc_detect_change(host, msecs_to_jiffies(ctx->cd_debounce_delay_ms));
ctx                43 drivers/mmc/core/slot-gpio.c 	struct mmc_gpio *ctx = devm_kzalloc(host->parent,
ctx                44 drivers/mmc/core/slot-gpio.c 					    sizeof(*ctx), GFP_KERNEL);
ctx                46 drivers/mmc/core/slot-gpio.c 	if (ctx) {
ctx                47 drivers/mmc/core/slot-gpio.c 		ctx->cd_debounce_delay_ms = 200;
ctx                48 drivers/mmc/core/slot-gpio.c 		ctx->cd_label = devm_kasprintf(host->parent, GFP_KERNEL,
ctx                50 drivers/mmc/core/slot-gpio.c 		if (!ctx->cd_label)
ctx                52 drivers/mmc/core/slot-gpio.c 		ctx->ro_label = devm_kasprintf(host->parent, GFP_KERNEL,
ctx                54 drivers/mmc/core/slot-gpio.c 		if (!ctx->ro_label)
ctx                56 drivers/mmc/core/slot-gpio.c 		host->slot.handler_priv = ctx;
ctx                60 drivers/mmc/core/slot-gpio.c 	return ctx ? 0 : -ENOMEM;
ctx                65 drivers/mmc/core/slot-gpio.c 	struct mmc_gpio *ctx = host->slot.handler_priv;
ctx                67 drivers/mmc/core/slot-gpio.c 	if (!ctx || !ctx->ro_gpio)
ctx                70 drivers/mmc/core/slot-gpio.c 	return gpiod_get_value_cansleep(ctx->ro_gpio);
ctx                76 drivers/mmc/core/slot-gpio.c 	struct mmc_gpio *ctx = host->slot.handler_priv;
ctx                79 drivers/mmc/core/slot-gpio.c 	if (!ctx || !ctx->cd_gpio)
ctx                82 drivers/mmc/core/slot-gpio.c 	cansleep = gpiod_cansleep(ctx->cd_gpio);
ctx                83 drivers/mmc/core/slot-gpio.c 	if (ctx->override_cd_active_level) {
ctx                85 drivers/mmc/core/slot-gpio.c 				gpiod_get_raw_value_cansleep(ctx->cd_gpio) :
ctx                86 drivers/mmc/core/slot-gpio.c 				gpiod_get_raw_value(ctx->cd_gpio);
ctx                91 drivers/mmc/core/slot-gpio.c 		gpiod_get_value_cansleep(ctx->cd_gpio) :
ctx                92 drivers/mmc/core/slot-gpio.c 		gpiod_get_value(ctx->cd_gpio);
ctx                98 drivers/mmc/core/slot-gpio.c 	struct mmc_gpio *ctx = host->slot.handler_priv;
ctx               102 drivers/mmc/core/slot-gpio.c 	if (host->slot.cd_irq >= 0 || !ctx || !ctx->cd_gpio)
ctx               110 drivers/mmc/core/slot-gpio.c 		irq = gpiod_to_irq(ctx->cd_gpio);
ctx               113 drivers/mmc/core/slot-gpio.c 		if (!ctx->cd_gpio_isr)
ctx               114 drivers/mmc/core/slot-gpio.c 			ctx->cd_gpio_isr = mmc_gpio_cd_irqt;
ctx               116 drivers/mmc/core/slot-gpio.c 			NULL, ctx->cd_gpio_isr,
ctx               118 drivers/mmc/core/slot-gpio.c 			ctx->cd_label, host);
ctx               157 drivers/mmc/core/slot-gpio.c 	struct mmc_gpio *ctx = host->slot.handler_priv;
ctx               159 drivers/mmc/core/slot-gpio.c 	WARN_ON(ctx->cd_gpio_isr);
ctx               160 drivers/mmc/core/slot-gpio.c 	ctx->cd_gpio_isr = isr;
ctx               183 drivers/mmc/core/slot-gpio.c 	struct mmc_gpio *ctx = host->slot.handler_priv;
ctx               194 drivers/mmc/core/slot-gpio.c 			ctx->cd_debounce_delay_ms = debounce / 1000;
ctx               200 drivers/mmc/core/slot-gpio.c 	ctx->override_cd_active_level = override_active_level;
ctx               201 drivers/mmc/core/slot-gpio.c 	ctx->cd_gpio = desc;
ctx               209 drivers/mmc/core/slot-gpio.c 	struct mmc_gpio *ctx = host->slot.handler_priv;
ctx               211 drivers/mmc/core/slot-gpio.c 	return ctx->cd_gpio ? true : false;
ctx               230 drivers/mmc/core/slot-gpio.c 	struct mmc_gpio *ctx = host->slot.handler_priv;
ctx               250 drivers/mmc/core/slot-gpio.c 	ctx->ro_gpio = desc;
ctx               258 drivers/mmc/core/slot-gpio.c 	struct mmc_gpio *ctx = host->slot.handler_priv;
ctx               260 drivers/mmc/core/slot-gpio.c 	return ctx->ro_gpio ? true : false;
ctx               175 drivers/mtd/nand/raw/ams-delta.c 			ams_delta_write_buf(priv, &instr->ctx.cmd.opcode, 1);
ctx               181 drivers/mtd/nand/raw/ams-delta.c 			ams_delta_write_buf(priv, instr->ctx.addr.addrs,
ctx               182 drivers/mtd/nand/raw/ams-delta.c 					    instr->ctx.addr.naddrs);
ctx               187 drivers/mtd/nand/raw/ams-delta.c 			ams_delta_read_buf(priv, instr->ctx.data.buf.in,
ctx               188 drivers/mtd/nand/raw/ams-delta.c 					   instr->ctx.data.len);
ctx               192 drivers/mtd/nand/raw/ams-delta.c 			ams_delta_write_buf(priv, instr->ctx.data.buf.out,
ctx               193 drivers/mtd/nand/raw/ams-delta.c 					    instr->ctx.data.len);
ctx               199 drivers/mtd/nand/raw/ams-delta.c 						instr->ctx.waitrdy.timeout_ms) :
ctx               201 drivers/mtd/nand/raw/ams-delta.c 						instr->ctx.waitrdy.timeout_ms);
ctx               171 drivers/mtd/nand/raw/au1550nd.c 	struct au1550nd_ctx *ctx = container_of(this, struct au1550nd_ctx,
ctx               177 drivers/mtd/nand/raw/au1550nd.c 		this->legacy.IO_ADDR_W = ctx->base + MEM_STNAND_CMD;
ctx               181 drivers/mtd/nand/raw/au1550nd.c 		this->legacy.IO_ADDR_W = ctx->base + MEM_STNAND_DATA;
ctx               185 drivers/mtd/nand/raw/au1550nd.c 		this->legacy.IO_ADDR_W = ctx->base + MEM_STNAND_ADDR;
ctx               189 drivers/mtd/nand/raw/au1550nd.c 		this->legacy.IO_ADDR_W = ctx->base + MEM_STNAND_DATA;
ctx               197 drivers/mtd/nand/raw/au1550nd.c 		alchemy_wrsmem((1 << (4 + ctx->cs)), AU1000_MEM_STNDCTL);
ctx               243 drivers/mtd/nand/raw/au1550nd.c 	struct au1550nd_ctx *ctx = container_of(this, struct au1550nd_ctx,
ctx               267 drivers/mtd/nand/raw/au1550nd.c 		ctx->write_byte(this, readcmd);
ctx               269 drivers/mtd/nand/raw/au1550nd.c 	ctx->write_byte(this, command);
ctx               283 drivers/mtd/nand/raw/au1550nd.c 			ctx->write_byte(this, column);
ctx               286 drivers/mtd/nand/raw/au1550nd.c 			ctx->write_byte(this, (u8)(page_addr & 0xff));
ctx               304 drivers/mtd/nand/raw/au1550nd.c 			ctx->write_byte(this, (u8)(page_addr >> 8));
ctx               307 drivers/mtd/nand/raw/au1550nd.c 				ctx->write_byte(this,
ctx               379 drivers/mtd/nand/raw/au1550nd.c 	struct au1550nd_ctx *ctx;
ctx               391 drivers/mtd/nand/raw/au1550nd.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               392 drivers/mtd/nand/raw/au1550nd.c 	if (!ctx)
ctx               407 drivers/mtd/nand/raw/au1550nd.c 	ctx->base = ioremap_nocache(r->start, 0x1000);
ctx               408 drivers/mtd/nand/raw/au1550nd.c 	if (!ctx->base) {
ctx               414 drivers/mtd/nand/raw/au1550nd.c 	this = &ctx->chip;
ctx               425 drivers/mtd/nand/raw/au1550nd.c 	ctx->cs = cs;
ctx               440 drivers/mtd/nand/raw/au1550nd.c 	ctx->write_byte = (pd->devwidth) ? au_write_byte16 : au_write_byte;
ctx               452 drivers/mtd/nand/raw/au1550nd.c 	platform_set_drvdata(pdev, ctx);
ctx               457 drivers/mtd/nand/raw/au1550nd.c 	iounmap(ctx->base);
ctx               461 drivers/mtd/nand/raw/au1550nd.c 	kfree(ctx);
ctx               467 drivers/mtd/nand/raw/au1550nd.c 	struct au1550nd_ctx *ctx = platform_get_drvdata(pdev);
ctx               470 drivers/mtd/nand/raw/au1550nd.c 	nand_release(&ctx->chip);
ctx               471 drivers/mtd/nand/raw/au1550nd.c 	iounmap(ctx->base);
ctx               473 drivers/mtd/nand/raw/au1550nd.c 	kfree(ctx);
ctx              1095 drivers/mtd/nand/raw/denali.c 				 &instr->ctx.cmd.opcode, 1);
ctx              1099 drivers/mtd/nand/raw/denali.c 				 instr->ctx.addr.addrs,
ctx              1100 drivers/mtd/nand/raw/denali.c 				 instr->ctx.addr.naddrs);
ctx              1104 drivers/mtd/nand/raw/denali.c 			       instr->ctx.data.buf.in,
ctx              1105 drivers/mtd/nand/raw/denali.c 			       instr->ctx.data.len,
ctx              1106 drivers/mtd/nand/raw/denali.c 			       !instr->ctx.data.force_8bit &&
ctx              1111 drivers/mtd/nand/raw/denali.c 				instr->ctx.data.buf.out,
ctx              1112 drivers/mtd/nand/raw/denali.c 				instr->ctx.data.len,
ctx              1113 drivers/mtd/nand/raw/denali.c 				!instr->ctx.data.force_8bit &&
ctx               620 drivers/mtd/nand/raw/fsmc_nand.c 			writeb_relaxed(instr->ctx.cmd.opcode, host->cmd_va);
ctx               624 drivers/mtd/nand/raw/fsmc_nand.c 			for (i = 0; i < instr->ctx.addr.naddrs; i++)
ctx               625 drivers/mtd/nand/raw/fsmc_nand.c 				writeb_relaxed(instr->ctx.addr.addrs[i],
ctx               631 drivers/mtd/nand/raw/fsmc_nand.c 				fsmc_read_buf_dma(host, instr->ctx.data.buf.in,
ctx               632 drivers/mtd/nand/raw/fsmc_nand.c 						  instr->ctx.data.len);
ctx               634 drivers/mtd/nand/raw/fsmc_nand.c 				fsmc_read_buf(host, instr->ctx.data.buf.in,
ctx               635 drivers/mtd/nand/raw/fsmc_nand.c 					      instr->ctx.data.len);
ctx               641 drivers/mtd/nand/raw/fsmc_nand.c 						   instr->ctx.data.buf.out,
ctx               642 drivers/mtd/nand/raw/fsmc_nand.c 						   instr->ctx.data.len);
ctx               644 drivers/mtd/nand/raw/fsmc_nand.c 				fsmc_write_buf(host, instr->ctx.data.buf.out,
ctx               645 drivers/mtd/nand/raw/fsmc_nand.c 					       instr->ctx.data.len);
ctx               650 drivers/mtd/nand/raw/fsmc_nand.c 						instr->ctx.waitrdy.timeout_ms);
ctx              2441 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c 			cmd = instr->ctx.cmd.opcode;
ctx              2455 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c 			desc = gpmi_chain_command(this, cmd, instr->ctx.addr.addrs,
ctx              2456 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c 						  instr->ctx.addr.naddrs);
ctx              2459 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c 			buf_write = instr->ctx.data.buf.out;
ctx              2460 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c 			buf_len = instr->ctx.data.len;
ctx              2467 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c 			if (!instr->ctx.data.len)
ctx              2469 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c 			buf_read = instr->ctx.data.buf.in;
ctx              2470 drivers/mtd/nand/raw/gpmi-nand/gpmi-nand.c 			buf_len = instr->ctx.data.len;
ctx              1659 drivers/mtd/nand/raw/marvell_nand.c 					NDCB0_CMD1(instr->ctx.cmd.opcode);
ctx              1662 drivers/mtd/nand/raw/marvell_nand.c 					NDCB0_CMD2(instr->ctx.cmd.opcode) |
ctx              1672 drivers/mtd/nand/raw/marvell_nand.c 			addrs = &instr->ctx.addr.addrs[offset];
ctx              1718 drivers/mtd/nand/raw/marvell_nand.c 			nfc_op->rdy_timeout_ms = instr->ctx.waitrdy.timeout_ms;
ctx              1737 drivers/mtd/nand/raw/marvell_nand.c 	if (instr->ctx.data.force_8bit)
ctx              1741 drivers/mtd/nand/raw/marvell_nand.c 		u8 *in = instr->ctx.data.buf.in + offset;
ctx              1745 drivers/mtd/nand/raw/marvell_nand.c 		const u8 *out = instr->ctx.data.buf.out + offset;
ctx              1750 drivers/mtd/nand/raw/marvell_nand.c 	if (instr->ctx.data.force_8bit)
ctx               846 drivers/mtd/nand/raw/meson_nand.c 	if (meson_nfc_is_buffer_dma_safe(instr->ctx.data.buf.in))
ctx               847 drivers/mtd/nand/raw/meson_nand.c 		return instr->ctx.data.buf.in;
ctx               849 drivers/mtd/nand/raw/meson_nand.c 	return kzalloc(instr->ctx.data.len, GFP_KERNEL);
ctx               860 drivers/mtd/nand/raw/meson_nand.c 	if (buf == instr->ctx.data.buf.in)
ctx               863 drivers/mtd/nand/raw/meson_nand.c 	memcpy(instr->ctx.data.buf.in, buf, instr->ctx.data.len);
ctx               873 drivers/mtd/nand/raw/meson_nand.c 	if (meson_nfc_is_buffer_dma_safe(instr->ctx.data.buf.out))
ctx               874 drivers/mtd/nand/raw/meson_nand.c 		return (void *)instr->ctx.data.buf.out;
ctx               876 drivers/mtd/nand/raw/meson_nand.c 	return kmemdup(instr->ctx.data.buf.out,
ctx               877 drivers/mtd/nand/raw/meson_nand.c 		       instr->ctx.data.len, GFP_KERNEL);
ctx               888 drivers/mtd/nand/raw/meson_nand.c 	if (buf != instr->ctx.data.buf.out)
ctx               911 drivers/mtd/nand/raw/meson_nand.c 			cmd |= instr->ctx.cmd.opcode & 0xff;
ctx               917 drivers/mtd/nand/raw/meson_nand.c 			for (i = 0; i < instr->ctx.addr.naddrs; i++) {
ctx               919 drivers/mtd/nand/raw/meson_nand.c 				cmd |= instr->ctx.addr.addrs[i] & 0xff;
ctx               929 drivers/mtd/nand/raw/meson_nand.c 			meson_nfc_read_buf(nand, buf, instr->ctx.data.len);
ctx               937 drivers/mtd/nand/raw/meson_nand.c 			meson_nfc_write_buf(nand, buf, instr->ctx.data.len);
ctx               942 drivers/mtd/nand/raw/meson_nand.c 			meson_nfc_queue_rb(nfc, instr->ctx.waitrdy.timeout_ms);
ctx               409 drivers/mtd/nand/raw/mxic_nand.c 						 &instr->ctx.cmd.opcode,
ctx               415 drivers/mtd/nand/raw/mxic_nand.c 			       OP_ADDR_BYTES(instr->ctx.addr.naddrs),
ctx               418 drivers/mtd/nand/raw/mxic_nand.c 						 instr->ctx.addr.addrs, NULL,
ctx               419 drivers/mtd/nand/raw/mxic_nand.c 						 instr->ctx.addr.naddrs);
ctx               427 drivers/mtd/nand/raw/mxic_nand.c 						 instr->ctx.data.buf.in,
ctx               428 drivers/mtd/nand/raw/mxic_nand.c 						 instr->ctx.data.len);
ctx               432 drivers/mtd/nand/raw/mxic_nand.c 			writel(instr->ctx.data.len,
ctx               437 drivers/mtd/nand/raw/mxic_nand.c 						 instr->ctx.data.buf.out, NULL,
ctx               438 drivers/mtd/nand/raw/mxic_nand.c 						 instr->ctx.data.len);
ctx              1035 drivers/mtd/nand/raw/nand_base.c 		instrs[0].ctx.cmd.opcode = NAND_CMD_READOOB;
ctx              1038 drivers/mtd/nand/raw/nand_base.c 		instrs[0].ctx.cmd.opcode = NAND_CMD_READ1;
ctx              1049 drivers/mtd/nand/raw/nand_base.c 		instrs[1].ctx.addr.naddrs++;
ctx              1086 drivers/mtd/nand/raw/nand_base.c 		instrs[1].ctx.addr.naddrs++;
ctx              1232 drivers/mtd/nand/raw/nand_base.c 		instrs[3].ctx.data.force_8bit = force_8bit;
ctx              1316 drivers/mtd/nand/raw/nand_base.c 	instrs[2].ctx.addr.naddrs = naddrs;
ctx              1333 drivers/mtd/nand/raw/nand_base.c 			instrs[0].ctx.cmd.opcode = NAND_CMD_READOOB;
ctx              1336 drivers/mtd/nand/raw/nand_base.c 			instrs[0].ctx.cmd.opcode = NAND_CMD_READ1;
ctx              1532 drivers/mtd/nand/raw/nand_base.c 		instrs[2].ctx.data.force_8bit = force_8bit;
ctx              1693 drivers/mtd/nand/raw/nand_base.c 			instrs[1].ctx.addr.naddrs++;
ctx              1885 drivers/mtd/nand/raw/nand_base.c 		instrs[0].ctx.data.force_8bit = force_8bit;
ctx              1929 drivers/mtd/nand/raw/nand_base.c 		instrs[0].ctx.data.force_8bit = force_8bit;
ctx              1990 drivers/mtd/nand/raw/nand_base.c 		if (!pat->ctx.addr.maxcycles)
ctx              1993 drivers/mtd/nand/raw/nand_base.c 		if (instr->ctx.addr.naddrs - *start_offset >
ctx              1994 drivers/mtd/nand/raw/nand_base.c 		    pat->ctx.addr.maxcycles) {
ctx              1995 drivers/mtd/nand/raw/nand_base.c 			*start_offset += pat->ctx.addr.maxcycles;
ctx              2002 drivers/mtd/nand/raw/nand_base.c 		if (!pat->ctx.data.maxlen)
ctx              2005 drivers/mtd/nand/raw/nand_base.c 		if (instr->ctx.data.len - *start_offset >
ctx              2006 drivers/mtd/nand/raw/nand_base.c 		    pat->ctx.data.maxlen) {
ctx              2007 drivers/mtd/nand/raw/nand_base.c 			*start_offset += pat->ctx.data.maxlen;
ctx              2032 drivers/mtd/nand/raw/nand_base.c 			 struct nand_op_parser_ctx *ctx)
ctx              2034 drivers/mtd/nand/raw/nand_base.c 	unsigned int instr_offset = ctx->subop.first_instr_start_off;
ctx              2035 drivers/mtd/nand/raw/nand_base.c 	const struct nand_op_instr *end = ctx->instrs + ctx->ninstrs;
ctx              2036 drivers/mtd/nand/raw/nand_base.c 	const struct nand_op_instr *instr = ctx->subop.instrs;
ctx              2097 drivers/mtd/nand/raw/nand_base.c 	ctx->subop.ninstrs = ninstrs;
ctx              2098 drivers/mtd/nand/raw/nand_base.c 	ctx->subop.last_instr_end_off = instr_offset;
ctx              2104 drivers/mtd/nand/raw/nand_base.c static void nand_op_parser_trace(const struct nand_op_parser_ctx *ctx)
ctx              2112 drivers/mtd/nand/raw/nand_base.c 	for (i = 0; i < ctx->ninstrs; i++) {
ctx              2113 drivers/mtd/nand/raw/nand_base.c 		instr = &ctx->instrs[i];
ctx              2115 drivers/mtd/nand/raw/nand_base.c 		if (instr == &ctx->subop.instrs[0])
ctx              2120 drivers/mtd/nand/raw/nand_base.c 		if (instr == &ctx->subop.instrs[ctx->subop.ninstrs - 1])
ctx              2125 drivers/mtd/nand/raw/nand_base.c static void nand_op_parser_trace(const struct nand_op_parser_ctx *ctx)
ctx              2173 drivers/mtd/nand/raw/nand_base.c 	struct nand_op_parser_ctx ctx = {
ctx              2180 drivers/mtd/nand/raw/nand_base.c 	while (ctx.subop.instrs < op->instrs + op->ninstrs) {
ctx              2186 drivers/mtd/nand/raw/nand_base.c 			struct nand_op_parser_ctx test_ctx = ctx;
ctx              2205 drivers/mtd/nand/raw/nand_base.c 		ctx = best_ctx;
ctx              2206 drivers/mtd/nand/raw/nand_base.c 		nand_op_parser_trace(&ctx);
ctx              2210 drivers/mtd/nand/raw/nand_base.c 			ret = pattern->exec(chip, &ctx.subop);
ctx              2219 drivers/mtd/nand/raw/nand_base.c 		ctx.subop.instrs = ctx.subop.instrs + ctx.subop.ninstrs;
ctx              2220 drivers/mtd/nand/raw/nand_base.c 		if (ctx.subop.last_instr_end_off)
ctx              2221 drivers/mtd/nand/raw/nand_base.c 			ctx.subop.instrs -= 1;
ctx              2223 drivers/mtd/nand/raw/nand_base.c 		ctx.subop.first_instr_start_off = ctx.subop.last_instr_end_off;
ctx              2299 drivers/mtd/nand/raw/nand_base.c 		end_off = subop->instrs[instr_idx].ctx.addr.naddrs;
ctx              2353 drivers/mtd/nand/raw/nand_base.c 		end_off = subop->instrs[instr_idx].ctx.data.len;
ctx              2157 drivers/mtd/nand/raw/nandsim.c 			ns_nand_write_byte(chip, instr->ctx.cmd.opcode);
ctx              2161 drivers/mtd/nand/raw/nandsim.c 			for (i = 0; i < instr->ctx.addr.naddrs; i++)
ctx              2162 drivers/mtd/nand/raw/nandsim.c 				ns_nand_write_byte(chip, instr->ctx.addr.addrs[i]);
ctx              2165 drivers/mtd/nand/raw/nandsim.c 			ns_nand_read_buf(chip, instr->ctx.data.buf.in, instr->ctx.data.len);
ctx              2168 drivers/mtd/nand/raw/nandsim.c 			ns_nand_write_buf(chip, instr->ctx.data.buf.out, instr->ctx.data.len);
ctx              1380 drivers/mtd/nand/raw/stm32_fmc2_nand.c 			writeb_relaxed(instr->ctx.cmd.opcode,
ctx              1385 drivers/mtd/nand/raw/stm32_fmc2_nand.c 			for (i = 0; i < instr->ctx.addr.naddrs; i++)
ctx              1386 drivers/mtd/nand/raw/stm32_fmc2_nand.c 				writeb_relaxed(instr->ctx.addr.addrs[i],
ctx              1391 drivers/mtd/nand/raw/stm32_fmc2_nand.c 			stm32_fmc2_read_data(chip, instr->ctx.data.buf.in,
ctx              1392 drivers/mtd/nand/raw/stm32_fmc2_nand.c 					     instr->ctx.data.len,
ctx              1393 drivers/mtd/nand/raw/stm32_fmc2_nand.c 					     instr->ctx.data.force_8bit);
ctx              1397 drivers/mtd/nand/raw/stm32_fmc2_nand.c 			stm32_fmc2_write_data(chip, instr->ctx.data.buf.out,
ctx              1398 drivers/mtd/nand/raw/stm32_fmc2_nand.c 					      instr->ctx.data.len,
ctx              1399 drivers/mtd/nand/raw/stm32_fmc2_nand.c 					      instr->ctx.data.force_8bit);
ctx              1404 drivers/mtd/nand/raw/stm32_fmc2_nand.c 						 instr->ctx.waitrdy.timeout_ms);
ctx              1790 drivers/mtd/nand/raw/sunxi_nand.c 				extcmd |= instr->ctx.cmd.opcode;
ctx              1793 drivers/mtd/nand/raw/sunxi_nand.c 				       NFC_CMD(instr->ctx.cmd.opcode);
ctx              1801 drivers/mtd/nand/raw/sunxi_nand.c 				u32 addr = instr->ctx.addr.addrs[j + start];
ctx              1821 drivers/mtd/nand/raw/sunxi_nand.c 					    instr->ctx.data.buf.out + start,
ctx              1824 drivers/mtd/nand/raw/sunxi_nand.c 				inbuf = instr->ctx.data.buf.in + start;
ctx              1871 drivers/mtd/nand/raw/sunxi_nand.c 				 subop->instrs[0].ctx.waitrdy.timeout_ms);
ctx               369 drivers/mtd/nand/raw/tegra_nand.c 				writel_relaxed(instr->ctx.cmd.opcode,
ctx               373 drivers/mtd/nand/raw/tegra_nand.c 				writel_relaxed(instr->ctx.cmd.opcode,
ctx               382 drivers/mtd/nand/raw/tegra_nand.c 			addrs = &instr->ctx.addr.addrs[offset];
ctx               411 drivers/mtd/nand/raw/tegra_nand.c 			memcpy(&reg, instr->ctx.data.buf.out + offset, size);
ctx               435 drivers/mtd/nand/raw/tegra_nand.c 		memcpy(instr_data_in->ctx.data.buf.in + offset, &reg, size);
ctx               382 drivers/mtd/nand/raw/vf610_nfc.c 		cmd2 |= instr->ctx.cmd.opcode << CMD_BYTE1_SHIFT;
ctx               393 drivers/mtd/nand/raw/vf610_nfc.c 			u8 val = instr->ctx.addr.addrs[i];
ctx               408 drivers/mtd/nand/raw/vf610_nfc.c 		force8bit = instr->ctx.data.force_8bit;
ctx               415 drivers/mtd/nand/raw/vf610_nfc.c 				     instr->ctx.data.buf.out + offset,
ctx               423 drivers/mtd/nand/raw/vf610_nfc.c 		cmd1 |= instr->ctx.cmd.opcode << CMD_BYTE2_SHIFT;
ctx               438 drivers/mtd/nand/raw/vf610_nfc.c 		force8bit = instr->ctx.data.force_8bit;
ctx               455 drivers/mtd/nand/raw/vf610_nfc.c 		vf610_nfc_rd_from_sram(instr->ctx.data.buf.in + offset,
ctx               112 drivers/net/caif/caif_virtio.c 	struct cfv_napi_context ctx;
ctx               257 drivers/net/caif/caif_virtio.c 	struct vringh_kiov *riov = &cfv->ctx.riov;
ctx               267 drivers/net/caif/caif_virtio.c 			if (cfv->ctx.head != USHRT_MAX) {
ctx               269 drivers/net/caif/caif_virtio.c 						     cfv->ctx.head,
ctx               271 drivers/net/caif/caif_virtio.c 				cfv->ctx.head = USHRT_MAX;
ctx               278 drivers/net/caif/caif_virtio.c 				&cfv->ctx.head,
ctx               715 drivers/net/caif/caif_virtio.c 	vringh_kiov_init(&cfv->ctx.riov, NULL, 0);
ctx               716 drivers/net/caif/caif_virtio.c 	cfv->ctx.head = USHRT_MAX;
ctx               758 drivers/net/caif/caif_virtio.c 	vringh_kiov_cleanup(&cfv->ctx.riov);
ctx               178 drivers/net/can/usb/mcba_usb.c 	struct mcba_usb_ctx *ctx = NULL;
ctx               182 drivers/net/can/usb/mcba_usb.c 			ctx = &priv->tx_context[i];
ctx               183 drivers/net/can/usb/mcba_usb.c 			ctx->ndx = i;
ctx               186 drivers/net/can/usb/mcba_usb.c 				ctx->can = true;
ctx               187 drivers/net/can/usb/mcba_usb.c 				ctx->dlc = cf->can_dlc;
ctx               189 drivers/net/can/usb/mcba_usb.c 				ctx->can = false;
ctx               190 drivers/net/can/usb/mcba_usb.c 				ctx->dlc = 0;
ctx               202 drivers/net/can/usb/mcba_usb.c 	return ctx;
ctx               208 drivers/net/can/usb/mcba_usb.c static inline void mcba_usb_free_ctx(struct mcba_usb_ctx *ctx)
ctx               211 drivers/net/can/usb/mcba_usb.c 	atomic_inc(&ctx->priv->free_ctx_cnt);
ctx               213 drivers/net/can/usb/mcba_usb.c 	ctx->ndx = MCBA_CTX_FREE;
ctx               216 drivers/net/can/usb/mcba_usb.c 	netif_wake_queue(ctx->priv->netdev);
ctx               221 drivers/net/can/usb/mcba_usb.c 	struct mcba_usb_ctx *ctx = urb->context;
ctx               224 drivers/net/can/usb/mcba_usb.c 	WARN_ON(!ctx);
ctx               226 drivers/net/can/usb/mcba_usb.c 	netdev = ctx->priv->netdev;
ctx               232 drivers/net/can/usb/mcba_usb.c 	if (ctx->can) {
ctx               237 drivers/net/can/usb/mcba_usb.c 		netdev->stats.tx_bytes += ctx->dlc;
ctx               240 drivers/net/can/usb/mcba_usb.c 		can_get_echo_skb(netdev, ctx->ndx);
ctx               247 drivers/net/can/usb/mcba_usb.c 	mcba_usb_free_ctx(ctx);
ctx               253 drivers/net/can/usb/mcba_usb.c 				 struct mcba_usb_ctx *ctx)
ctx               276 drivers/net/can/usb/mcba_usb.c 			  ctx);
ctx               314 drivers/net/can/usb/mcba_usb.c 	struct mcba_usb_ctx *ctx = NULL;
ctx               325 drivers/net/can/usb/mcba_usb.c 	ctx = mcba_usb_get_free_ctx(priv, cf);
ctx               326 drivers/net/can/usb/mcba_usb.c 	if (!ctx)
ctx               329 drivers/net/can/usb/mcba_usb.c 	can_put_echo_skb(skb, priv->netdev, ctx->ndx);
ctx               360 drivers/net/can/usb/mcba_usb.c 	err = mcba_usb_xmit(priv, (struct mcba_usb_msg *)&usb_msg, ctx);
ctx               367 drivers/net/can/usb/mcba_usb.c 	can_free_echo_skb(priv->netdev, ctx->ndx);
ctx               368 drivers/net/can/usb/mcba_usb.c 	mcba_usb_free_ctx(ctx);
ctx               379 drivers/net/can/usb/mcba_usb.c 	struct mcba_usb_ctx *ctx = NULL;
ctx               382 drivers/net/can/usb/mcba_usb.c 	ctx = mcba_usb_get_free_ctx(priv, NULL);
ctx               383 drivers/net/can/usb/mcba_usb.c 	if (!ctx) {
ctx               391 drivers/net/can/usb/mcba_usb.c 	err = mcba_usb_xmit(priv, usb_msg, ctx);
ctx               385 drivers/net/can/usb/ucan.c 				 struct ucan_urb_context *ctx)
ctx               397 drivers/net/can/usb/ucan.c 	if (ctx->allocated) {
ctx               398 drivers/net/can/usb/ucan.c 		ctx->allocated = false;
ctx               561 drivers/net/dsa/lan9303-core.c 			   int portmap, void *ctx);
ctx               563 drivers/net/dsa/lan9303-core.c static void lan9303_alr_loop(struct lan9303 *chip, alr_loop_cb_t *cb, void *ctx)
ctx               585 drivers/net/dsa/lan9303-core.c 		cb(chip, dat0, dat1, portmap, ctx);
ctx               610 drivers/net/dsa/lan9303-core.c 					 u32 dat1, int portmap, void *ctx)
ctx               612 drivers/net/dsa/lan9303-core.c 	struct del_port_learned_ctx *del_ctx = ctx;
ctx               630 drivers/net/dsa/lan9303-core.c 				      u32 dat1, int portmap, void *ctx)
ctx               632 drivers/net/dsa/lan9303-core.c 	struct port_fdb_dump_ctx *dump_ctx = ctx;
ctx                30 drivers/net/dsa/lan9303_mdio.c static int lan9303_mdio_write(void *ctx, uint32_t reg, uint32_t val)
ctx                32 drivers/net/dsa/lan9303_mdio.c 	struct lan9303_mdio *sw_dev = (struct lan9303_mdio *)ctx;
ctx                48 drivers/net/dsa/lan9303_mdio.c static int lan9303_mdio_read(void *ctx, uint32_t reg, uint32_t *val)
ctx                50 drivers/net/dsa/lan9303_mdio.c 	struct lan9303_mdio *sw_dev = (struct lan9303_mdio *)ctx;
ctx               208 drivers/net/dsa/qca8k.c qca8k_regmap_read(void *ctx, uint32_t reg, uint32_t *val)
ctx               210 drivers/net/dsa/qca8k.c 	struct qca8k_priv *priv = (struct qca8k_priv *)ctx;
ctx               218 drivers/net/dsa/qca8k.c qca8k_regmap_write(void *ctx, uint32_t reg, uint32_t val)
ctx               220 drivers/net/dsa/qca8k.c 	struct qca8k_priv *priv = (struct qca8k_priv *)ctx;
ctx               304 drivers/net/dsa/realtek-smi-core.c static int realtek_smi_write(void *ctx, u32 reg, u32 val)
ctx               306 drivers/net/dsa/realtek-smi-core.c 	struct realtek_smi *smi = ctx;
ctx               311 drivers/net/dsa/realtek-smi-core.c static int realtek_smi_read(void *ctx, u32 reg, u32 *val)
ctx               313 drivers/net/dsa/realtek-smi-core.c 	struct realtek_smi *smi = ctx;
ctx                74 drivers/net/dsa/sja1105/sja1105.h 	int (*ptp_cmd)(const void *ctx, const void *data);
ctx                75 drivers/net/dsa/sja1105/sja1105.h 	int (*reset_cmd)(const void *ctx, const void *data);
ctx                76 drivers/net/dsa/sja1105/sja1105.h 	int (*setup_rgmii_delay)(const void *ctx, int port);
ctx               170 drivers/net/dsa/sja1105/sja1105.h int sja1105pqrs_setup_rgmii_delay(const void *ctx, int port);
ctx               425 drivers/net/dsa/sja1105/sja1105_clocking.c int sja1105pqrs_setup_rgmii_delay(const void *ctx, int port)
ctx               427 drivers/net/dsa/sja1105/sja1105_clocking.c 	const struct sja1105_private *priv = ctx;
ctx                81 drivers/net/dsa/sja1105/sja1105_ptp.c int sja1105et_ptp_cmd(const void *ctx, const void *data)
ctx                84 drivers/net/dsa/sja1105/sja1105_ptp.c 	const struct sja1105_private *priv = ctx;
ctx                98 drivers/net/dsa/sja1105/sja1105_ptp.c int sja1105pqrs_ptp_cmd(const void *ctx, const void *data)
ctx               101 drivers/net/dsa/sja1105/sja1105_ptp.c 	const struct sja1105_private *priv = ctx;
ctx                15 drivers/net/dsa/sja1105/sja1105_ptp.h int sja1105et_ptp_cmd(const void *ctx, const void *data);
ctx                17 drivers/net/dsa/sja1105/sja1105_ptp.h int sja1105pqrs_ptp_cmd(const void *ctx, const void *data);
ctx               219 drivers/net/dsa/sja1105/sja1105_spi.c static int sja1105et_reset_cmd(const void *ctx, const void *data)
ctx               221 drivers/net/dsa/sja1105/sja1105_spi.c 	const struct sja1105_private *priv = ctx;
ctx               248 drivers/net/dsa/sja1105/sja1105_spi.c static int sja1105pqrs_reset_cmd(const void *ctx, const void *data)
ctx               250 drivers/net/dsa/sja1105/sja1105_spi.c 	const struct sja1105_private *priv = ctx;
ctx               332 drivers/net/ethernet/amazon/ena/ena_com.c 			      struct ena_com_create_io_ctx *ctx,
ctx               350 drivers/net/ethernet/amazon/ena/ena_com.c 		set_dev_node(ena_dev->dmadev, ctx->numa_node);
ctx               381 drivers/net/ethernet/amazon/ena/ena_com.c 		set_dev_node(ena_dev->dmadev, ctx->numa_node);
ctx               418 drivers/net/ethernet/amazon/ena/ena_com.c 			      struct ena_com_create_io_ctx *ctx,
ctx               435 drivers/net/ethernet/amazon/ena/ena_com.c 	set_dev_node(ena_dev->dmadev, ctx->numa_node);
ctx              1819 drivers/net/ethernet/amazon/ena/ena_com.c 			    struct ena_com_create_io_ctx *ctx)
ctx              1825 drivers/net/ethernet/amazon/ena/ena_com.c 	if (ctx->qid >= ENA_TOTAL_NUM_QUEUES) {
ctx              1827 drivers/net/ethernet/amazon/ena/ena_com.c 		       ctx->qid, ENA_TOTAL_NUM_QUEUES);
ctx              1831 drivers/net/ethernet/amazon/ena/ena_com.c 	io_sq = &ena_dev->io_sq_queues[ctx->qid];
ctx              1832 drivers/net/ethernet/amazon/ena/ena_com.c 	io_cq = &ena_dev->io_cq_queues[ctx->qid];
ctx              1838 drivers/net/ethernet/amazon/ena/ena_com.c 	io_cq->q_depth = ctx->queue_size;
ctx              1839 drivers/net/ethernet/amazon/ena/ena_com.c 	io_cq->direction = ctx->direction;
ctx              1840 drivers/net/ethernet/amazon/ena/ena_com.c 	io_cq->qid = ctx->qid;
ctx              1842 drivers/net/ethernet/amazon/ena/ena_com.c 	io_cq->msix_vector = ctx->msix_vector;
ctx              1844 drivers/net/ethernet/amazon/ena/ena_com.c 	io_sq->q_depth = ctx->queue_size;
ctx              1845 drivers/net/ethernet/amazon/ena/ena_com.c 	io_sq->direction = ctx->direction;
ctx              1846 drivers/net/ethernet/amazon/ena/ena_com.c 	io_sq->qid = ctx->qid;
ctx              1848 drivers/net/ethernet/amazon/ena/ena_com.c 	io_sq->mem_queue_type = ctx->mem_queue_type;
ctx              1850 drivers/net/ethernet/amazon/ena/ena_com.c 	if (ctx->direction == ENA_COM_IO_QUEUE_DIRECTION_TX)
ctx              1855 drivers/net/ethernet/amazon/ena/ena_com.c 	ret = ena_com_init_io_sq(ena_dev, ctx, io_sq);
ctx              1858 drivers/net/ethernet/amazon/ena/ena_com.c 	ret = ena_com_init_io_cq(ena_dev, ctx, io_cq);
ctx              2133 drivers/net/ethernet/amazon/ena/ena_com.c 			     struct ena_com_stats_ctx *ctx,
ctx              2136 drivers/net/ethernet/amazon/ena/ena_com.c 	struct ena_admin_aq_get_stats_cmd *get_cmd = &ctx->get_cmd;
ctx              2137 drivers/net/ethernet/amazon/ena/ena_com.c 	struct ena_admin_acq_get_stats_resp *get_resp = &ctx->get_resp;
ctx              2162 drivers/net/ethernet/amazon/ena/ena_com.c 	struct ena_com_stats_ctx ctx;
ctx              2165 drivers/net/ethernet/amazon/ena/ena_com.c 	memset(&ctx, 0x0, sizeof(ctx));
ctx              2166 drivers/net/ethernet/amazon/ena/ena_com.c 	ret = ena_get_dev_stats(ena_dev, &ctx, ENA_ADMIN_GET_STATS_TYPE_BASIC);
ctx              2168 drivers/net/ethernet/amazon/ena/ena_com.c 		memcpy(stats, &ctx.get_resp.basic_stats,
ctx              2169 drivers/net/ethernet/amazon/ena/ena_com.c 		       sizeof(ctx.get_resp.basic_stats));
ctx               453 drivers/net/ethernet/amazon/ena/ena_com.h 			    struct ena_com_create_io_ctx *ctx);
ctx              1630 drivers/net/ethernet/amazon/ena/ena_netdev.c 	struct ena_com_create_io_ctx ctx;
ctx              1643 drivers/net/ethernet/amazon/ena/ena_netdev.c 	memset(&ctx, 0x0, sizeof(ctx));
ctx              1645 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.direction = ENA_COM_IO_QUEUE_DIRECTION_TX;
ctx              1646 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.qid = ena_qid;
ctx              1647 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.mem_queue_type = ena_dev->tx_mem_queue_type;
ctx              1648 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.msix_vector = msix_vector;
ctx              1649 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.queue_size = tx_ring->ring_size;
ctx              1650 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.numa_node = cpu_to_node(tx_ring->cpu);
ctx              1652 drivers/net/ethernet/amazon/ena/ena_netdev.c 	rc = ena_com_create_io_queue(ena_dev, &ctx);
ctx              1671 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ena_com_update_numa_node(tx_ring->ena_com_io_cq, ctx.numa_node);
ctx              1698 drivers/net/ethernet/amazon/ena/ena_netdev.c 	struct ena_com_create_io_ctx ctx;
ctx              1710 drivers/net/ethernet/amazon/ena/ena_netdev.c 	memset(&ctx, 0x0, sizeof(ctx));
ctx              1712 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.qid = ena_qid;
ctx              1713 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.direction = ENA_COM_IO_QUEUE_DIRECTION_RX;
ctx              1714 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.mem_queue_type = ENA_ADMIN_PLACEMENT_POLICY_HOST;
ctx              1715 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.msix_vector = msix_vector;
ctx              1716 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.queue_size = rx_ring->ring_size;
ctx              1717 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx.numa_node = cpu_to_node(rx_ring->cpu);
ctx              1719 drivers/net/ethernet/amazon/ena/ena_netdev.c 	rc = ena_com_create_io_queue(ena_dev, &ctx);
ctx              1738 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ena_com_update_numa_node(rx_ring->ena_com_io_cq, ctx.numa_node);
ctx              3356 drivers/net/ethernet/amazon/ena/ena_netdev.c static int ena_calc_queue_size(struct ena_calc_queue_size_ctx *ctx)
ctx              3358 drivers/net/ethernet/amazon/ena/ena_netdev.c 	struct ena_admin_feature_llq_desc *llq = &ctx->get_feat_ctx->llq;
ctx              3359 drivers/net/ethernet/amazon/ena/ena_netdev.c 	struct ena_com_dev *ena_dev = ctx->ena_dev;
ctx              3365 drivers/net/ethernet/amazon/ena/ena_netdev.c 	if (ctx->ena_dev->supported_features & BIT(ENA_ADMIN_MAX_QUEUES_EXT)) {
ctx              3367 drivers/net/ethernet/amazon/ena/ena_netdev.c 			&ctx->get_feat_ctx->max_queue_ext.max_queue_ext;
ctx              3379 drivers/net/ethernet/amazon/ena/ena_netdev.c 		ctx->max_tx_sgl_size = min_t(u16, ENA_PKT_MAX_BUFS,
ctx              3381 drivers/net/ethernet/amazon/ena/ena_netdev.c 		ctx->max_rx_sgl_size = min_t(u16, ENA_PKT_MAX_BUFS,
ctx              3385 drivers/net/ethernet/amazon/ena/ena_netdev.c 			&ctx->get_feat_ctx->max_queues;
ctx              3397 drivers/net/ethernet/amazon/ena/ena_netdev.c 		ctx->max_tx_sgl_size = min_t(u16, ENA_PKT_MAX_BUFS,
ctx              3399 drivers/net/ethernet/amazon/ena/ena_netdev.c 		ctx->max_rx_sgl_size = min_t(u16, ENA_PKT_MAX_BUFS,
ctx              3414 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx->max_tx_queue_size = max_tx_queue_size;
ctx              3415 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx->max_rx_queue_size = max_rx_queue_size;
ctx              3416 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx->tx_queue_size = tx_queue_size;
ctx              3417 drivers/net/ethernet/amazon/ena/ena_netdev.c 	ctx->rx_queue_size = rx_queue_size;
ctx              6435 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	if (bp->hwrm_spec_code < 0x10902 || BNXT_VF(bp) || bp->ctx)
ctx              6443 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		struct bnxt_ctx_mem_info *ctx;
ctx              6446 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              6447 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		if (!ctx) {
ctx              6453 drivers/net/ethernet/broadcom/bnxt/bnxt.c 			kfree(ctx);
ctx              6458 drivers/net/ethernet/broadcom/bnxt/bnxt.c 			ctx->tqm_mem[i] = ctx_pg;
ctx              6460 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		bp->ctx = ctx;
ctx              6461 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->qp_max_entries = le32_to_cpu(resp->qp_max_entries);
ctx              6462 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->qp_min_qp1_entries = le16_to_cpu(resp->qp_min_qp1_entries);
ctx              6463 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->qp_max_l2_entries = le16_to_cpu(resp->qp_max_l2_entries);
ctx              6464 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->qp_entry_size = le16_to_cpu(resp->qp_entry_size);
ctx              6465 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->srq_max_l2_entries = le16_to_cpu(resp->srq_max_l2_entries);
ctx              6466 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->srq_max_entries = le32_to_cpu(resp->srq_max_entries);
ctx              6467 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->srq_entry_size = le16_to_cpu(resp->srq_entry_size);
ctx              6468 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->cq_max_l2_entries = le16_to_cpu(resp->cq_max_l2_entries);
ctx              6469 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->cq_max_entries = le32_to_cpu(resp->cq_max_entries);
ctx              6470 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->cq_entry_size = le16_to_cpu(resp->cq_entry_size);
ctx              6471 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->vnic_max_vnic_entries =
ctx              6473 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->vnic_max_ring_table_entries =
ctx              6475 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->vnic_entry_size = le16_to_cpu(resp->vnic_entry_size);
ctx              6476 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->stat_max_entries = le32_to_cpu(resp->stat_max_entries);
ctx              6477 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->stat_entry_size = le16_to_cpu(resp->stat_entry_size);
ctx              6478 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->tqm_entry_size = le16_to_cpu(resp->tqm_entry_size);
ctx              6479 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->tqm_min_entries_per_ring =
ctx              6481 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->tqm_max_entries_per_ring =
ctx              6483 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->tqm_entries_multiple = resp->tqm_entries_multiple;
ctx              6484 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		if (!ctx->tqm_entries_multiple)
ctx              6485 drivers/net/ethernet/broadcom/bnxt/bnxt.c 			ctx->tqm_entries_multiple = 1;
ctx              6486 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->mrav_max_entries = le32_to_cpu(resp->mrav_max_entries);
ctx              6487 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->mrav_entry_size = le16_to_cpu(resp->mrav_entry_size);
ctx              6488 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->mrav_num_entries_units =
ctx              6490 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->tim_entry_size = le16_to_cpu(resp->tim_entry_size);
ctx              6491 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->tim_max_entries = le32_to_cpu(resp->tim_max_entries);
ctx              6532 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	struct bnxt_ctx_mem_info *ctx = bp->ctx;
ctx              6541 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	if (!ctx)
ctx              6548 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx_pg = &ctx->qp_mem;
ctx              6550 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.qp_num_qp1_entries = cpu_to_le16(ctx->qp_min_qp1_entries);
ctx              6551 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.qp_num_l2_entries = cpu_to_le16(ctx->qp_max_l2_entries);
ctx              6552 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.qp_entry_size = cpu_to_le16(ctx->qp_entry_size);
ctx              6558 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx_pg = &ctx->srq_mem;
ctx              6560 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.srq_num_l2_entries = cpu_to_le16(ctx->srq_max_l2_entries);
ctx              6561 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.srq_entry_size = cpu_to_le16(ctx->srq_entry_size);
ctx              6567 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx_pg = &ctx->cq_mem;
ctx              6569 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.cq_num_l2_entries = cpu_to_le16(ctx->cq_max_l2_entries);
ctx              6570 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.cq_entry_size = cpu_to_le16(ctx->cq_entry_size);
ctx              6575 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx_pg = &ctx->vnic_mem;
ctx              6577 drivers/net/ethernet/broadcom/bnxt/bnxt.c 			cpu_to_le16(ctx->vnic_max_vnic_entries);
ctx              6579 drivers/net/ethernet/broadcom/bnxt/bnxt.c 			cpu_to_le16(ctx->vnic_max_ring_table_entries);
ctx              6580 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.vnic_entry_size = cpu_to_le16(ctx->vnic_entry_size);
ctx              6586 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx_pg = &ctx->stat_mem;
ctx              6587 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.stat_num_entries = cpu_to_le32(ctx->stat_max_entries);
ctx              6588 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.stat_entry_size = cpu_to_le16(ctx->stat_entry_size);
ctx              6594 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx_pg = &ctx->mrav_mem;
ctx              6596 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		if (ctx->mrav_num_entries_units)
ctx              6599 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.mrav_entry_size = cpu_to_le16(ctx->mrav_entry_size);
ctx              6605 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx_pg = &ctx->tim_mem;
ctx              6607 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.tim_entry_size = cpu_to_le16(ctx->tim_entry_size);
ctx              6620 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.tqm_entry_size = cpu_to_le16(ctx->tqm_entry_size);
ctx              6621 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx_pg = ctx->tqm_mem[i];
ctx              6734 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	struct bnxt_ctx_mem_info *ctx = bp->ctx;
ctx              6737 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	if (!ctx)
ctx              6740 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	if (ctx->tqm_mem[0]) {
ctx              6742 drivers/net/ethernet/broadcom/bnxt/bnxt.c 			bnxt_free_ctx_pg_tbls(bp, ctx->tqm_mem[i]);
ctx              6743 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		kfree(ctx->tqm_mem[0]);
ctx              6744 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx->tqm_mem[0] = NULL;
ctx              6747 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	bnxt_free_ctx_pg_tbls(bp, &ctx->tim_mem);
ctx              6748 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	bnxt_free_ctx_pg_tbls(bp, &ctx->mrav_mem);
ctx              6749 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	bnxt_free_ctx_pg_tbls(bp, &ctx->stat_mem);
ctx              6750 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	bnxt_free_ctx_pg_tbls(bp, &ctx->vnic_mem);
ctx              6751 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	bnxt_free_ctx_pg_tbls(bp, &ctx->cq_mem);
ctx              6752 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	bnxt_free_ctx_pg_tbls(bp, &ctx->srq_mem);
ctx              6753 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	bnxt_free_ctx_pg_tbls(bp, &ctx->qp_mem);
ctx              6754 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx->flags &= ~BNXT_CTX_FLAG_INITED;
ctx              6760 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	struct bnxt_ctx_mem_info *ctx;
ctx              6774 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx = bp->ctx;
ctx              6775 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	if (!ctx || (ctx->flags & BNXT_CTX_FLAG_INITED))
ctx              6784 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx_pg = &ctx->qp_mem;
ctx              6785 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx_pg->entries = ctx->qp_min_qp1_entries + ctx->qp_max_l2_entries +
ctx              6787 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	mem_size = ctx->qp_entry_size * ctx_pg->entries;
ctx              6792 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx_pg = &ctx->srq_mem;
ctx              6793 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx_pg->entries = ctx->srq_max_l2_entries + extra_srqs;
ctx              6794 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	mem_size = ctx->srq_entry_size * ctx_pg->entries;
ctx              6799 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx_pg = &ctx->cq_mem;
ctx              6800 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx_pg->entries = ctx->cq_max_l2_entries + extra_qps * 2;
ctx              6801 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	mem_size = ctx->cq_entry_size * ctx_pg->entries;
ctx              6806 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx_pg = &ctx->vnic_mem;
ctx              6807 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx_pg->entries = ctx->vnic_max_vnic_entries +
ctx              6808 drivers/net/ethernet/broadcom/bnxt/bnxt.c 			  ctx->vnic_max_ring_table_entries;
ctx              6809 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	mem_size = ctx->vnic_entry_size * ctx_pg->entries;
ctx              6814 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx_pg = &ctx->stat_mem;
ctx              6815 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx_pg->entries = ctx->stat_max_entries;
ctx              6816 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	mem_size = ctx->stat_entry_size * ctx_pg->entries;
ctx              6825 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx_pg = &ctx->mrav_mem;
ctx              6832 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	mem_size = ctx->mrav_entry_size * ctx_pg->entries;
ctx              6837 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	if (ctx->mrav_num_entries_units)
ctx              6839 drivers/net/ethernet/broadcom/bnxt/bnxt.c 			((num_mr / ctx->mrav_num_entries_units) << 16) |
ctx              6840 drivers/net/ethernet/broadcom/bnxt/bnxt.c 			 (num_ah / ctx->mrav_num_entries_units);
ctx              6842 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx_pg = &ctx->tim_mem;
ctx              6843 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx_pg->entries = ctx->qp_mem.entries;
ctx              6844 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	mem_size = ctx->tim_entry_size * ctx_pg->entries;
ctx              6851 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	entries = ctx->qp_max_l2_entries + extra_qps;
ctx              6852 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	entries = roundup(entries, ctx->tqm_entries_multiple);
ctx              6853 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	entries = clamp_t(u32, entries, ctx->tqm_min_entries_per_ring,
ctx              6854 drivers/net/ethernet/broadcom/bnxt/bnxt.c 			  ctx->tqm_max_entries_per_ring);
ctx              6856 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		ctx_pg = ctx->tqm_mem[i];
ctx              6858 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		mem_size = ctx->tqm_entry_size * entries;
ctx              6871 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	ctx->flags |= BNXT_CTX_FLAG_INITED;
ctx              8775 drivers/net/ethernet/broadcom/bnxt/bnxt.c 			kfree(bp->ctx);
ctx              8776 drivers/net/ethernet/broadcom/bnxt/bnxt.c 			bp->ctx = NULL;
ctx              10073 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	kfree(bp->ctx);
ctx              10074 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	bp->ctx = NULL;
ctx              11396 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	kfree(bp->ctx);
ctx              11397 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	bp->ctx = NULL;
ctx              11909 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	kfree(bp->ctx);
ctx              11910 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	bp->ctx = NULL;
ctx              1763 drivers/net/ethernet/broadcom/bnxt/bnxt.h 	struct bnxt_ctx_mem_info	*ctx;
ctx               358 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c 				 struct devlink_param_gset_ctx *ctx)
ctx               365 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c 	rc = bnxt_hwrm_nvm_req(bp, id, &req, sizeof(req), &ctx->val);
ctx               368 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c 			ctx->val.vbool = !ctx->val.vbool;
ctx               374 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c 				 struct devlink_param_gset_ctx *ctx)
ctx               382 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c 		ctx->val.vbool = !ctx->val.vbool;
ctx               384 drivers/net/ethernet/broadcom/bnxt/bnxt_devlink.c 	return bnxt_hwrm_nvm_req(bp, id, &req, sizeof(req), &ctx->val);
ctx               831 drivers/net/ethernet/broadcom/cnic.c 		if (cp->ctx_arr[i].ctx) {
ctx               833 drivers/net/ethernet/broadcom/cnic.c 					  cp->ctx_arr[i].ctx,
ctx               835 drivers/net/ethernet/broadcom/cnic.c 			cp->ctx_arr[i].ctx = NULL;
ctx               946 drivers/net/ethernet/broadcom/cnic.c 			cp->ctx_arr[i].ctx =
ctx               951 drivers/net/ethernet/broadcom/cnic.c 			if (cp->ctx_arr[i].ctx == NULL)
ctx              1219 drivers/net/ethernet/broadcom/cnic.c 		cp->ctx_arr[i].ctx =
ctx              1223 drivers/net/ethernet/broadcom/cnic.c 		if (cp->ctx_arr[i].ctx == NULL)
ctx              1380 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_context *ctx = &cp->ctx_tbl[l5_cid];
ctx              1383 drivers/net/ethernet/broadcom/cnic.c 	map = ctx->kwqe_data_mapping;
ctx              1386 drivers/net/ethernet/broadcom/cnic.c 	return ctx->kwqe_data;
ctx              1591 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_context *ctx = &cp->ctx_tbl[l5_cid];
ctx              1593 drivers/net/ethernet/broadcom/cnic.c 	if (ctx->ulp_proto_id == CNIC_ULP_ISCSI) {
ctx              1594 drivers/net/ethernet/broadcom/cnic.c 		struct cnic_iscsi *iscsi = ctx->proto.iscsi;
ctx              1599 drivers/net/ethernet/broadcom/cnic.c 		cnic_free_id(&cp->cid_tbl, ctx->cid);
ctx              1601 drivers/net/ethernet/broadcom/cnic.c 		cnic_free_id(&cp->fcoe_cid_tbl, ctx->cid);
ctx              1604 drivers/net/ethernet/broadcom/cnic.c 	ctx->cid = 0;
ctx              1612 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_context *ctx = &cp->ctx_tbl[l5_cid];
ctx              1613 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_iscsi *iscsi = ctx->proto.iscsi;
ctx              1615 drivers/net/ethernet/broadcom/cnic.c 	if (ctx->ulp_proto_id == CNIC_ULP_FCOE) {
ctx              1621 drivers/net/ethernet/broadcom/cnic.c 		ctx->cid = cid;
ctx              1631 drivers/net/ethernet/broadcom/cnic.c 	ctx->cid = cid;
ctx              1664 drivers/net/ethernet/broadcom/cnic.c 	void *ctx;
ctx              1675 drivers/net/ethernet/broadcom/cnic.c 	ctx = cp->ctx_arr[blk].ctx + align_off +
ctx              1678 drivers/net/ethernet/broadcom/cnic.c 		memset(ctx, 0, BNX2X_CONTEXT_MEM_SIZE);
ctx              1682 drivers/net/ethernet/broadcom/cnic.c 	return ctx;
ctx              1695 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_context *ctx = &cp->ctx_tbl[req1->iscsi_conn_id];
ctx              1696 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_iscsi *iscsi = ctx->proto.iscsi;
ctx              1697 drivers/net/ethernet/broadcom/cnic.c 	u32 cid = ctx->cid;
ctx              1704 drivers/net/ethernet/broadcom/cnic.c 	ctx->ctx_flags = 0;
ctx              1879 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_context *ctx;
ctx              1907 drivers/net/ethernet/broadcom/cnic.c 	ctx = &cp->ctx_tbl[l5_cid];
ctx              1908 drivers/net/ethernet/broadcom/cnic.c 	if (test_bit(CTX_FL_OFFLD_START, &ctx->ctx_flags)) {
ctx              1969 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_context *ctx = &cp->ctx_tbl[l5_cid];
ctx              1974 drivers/net/ethernet/broadcom/cnic.c 	init_waitqueue_head(&ctx->waitq);
ctx              1975 drivers/net/ethernet/broadcom/cnic.c 	ctx->wait_cond = 0;
ctx              1977 drivers/net/ethernet/broadcom/cnic.c 	hw_cid = BNX2X_HW_CID(bp, ctx->cid);
ctx              1983 drivers/net/ethernet/broadcom/cnic.c 		wait_event_timeout(ctx->waitq, ctx->wait_cond, CNIC_RAMROD_TMO);
ctx              1984 drivers/net/ethernet/broadcom/cnic.c 		if (unlikely(test_bit(CTX_FL_CID_ERROR, &ctx->ctx_flags)))
ctx              1997 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_context *ctx = &cp->ctx_tbl[l5_cid];
ctx              2002 drivers/net/ethernet/broadcom/cnic.c 	if (!test_bit(CTX_FL_OFFLD_START, &ctx->ctx_flags))
ctx              2005 drivers/net/ethernet/broadcom/cnic.c 	if (!time_after(jiffies, ctx->timestamp + (2 * HZ))) {
ctx              2006 drivers/net/ethernet/broadcom/cnic.c 		unsigned long delta = ctx->timestamp + (2 * HZ) - jiffies;
ctx              2011 drivers/net/ethernet/broadcom/cnic.c 		set_bit(CTX_FL_DELETE_WAIT, &ctx->ctx_flags);
ctx              2023 drivers/net/ethernet/broadcom/cnic.c 		clear_bit(CTX_FL_OFFLD_START, &ctx->ctx_flags);
ctx              2132 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_context *ctx = &cp->ctx_tbl[l5_cid];
ctx              2197 drivers/net/ethernet/broadcom/cnic.c 		set_bit(CTX_FL_OFFLD_START, &ctx->ctx_flags);
ctx              2349 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_context *ctx;
ctx              2373 drivers/net/ethernet/broadcom/cnic.c 	ctx = &cp->ctx_tbl[l5_cid];
ctx              2374 drivers/net/ethernet/broadcom/cnic.c 	if (test_bit(CTX_FL_OFFLD_START, &ctx->ctx_flags))
ctx              2382 drivers/net/ethernet/broadcom/cnic.c 	cid = ctx->cid;
ctx              2414 drivers/net/ethernet/broadcom/cnic.c 		set_bit(CTX_FL_OFFLD_START, &ctx->ctx_flags);
ctx              2499 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_context *ctx;
ctx              2511 drivers/net/ethernet/broadcom/cnic.c 	ctx = &cp->ctx_tbl[l5_cid];
ctx              2513 drivers/net/ethernet/broadcom/cnic.c 	init_waitqueue_head(&ctx->waitq);
ctx              2514 drivers/net/ethernet/broadcom/cnic.c 	ctx->wait_cond = 0;
ctx              2522 drivers/net/ethernet/broadcom/cnic.c 		wait_event_timeout(ctx->waitq, ctx->wait_cond, CNIC_RAMROD_TMO);
ctx              2523 drivers/net/ethernet/broadcom/cnic.c 		if (ctx->wait_cond)
ctx              2527 drivers/net/ethernet/broadcom/cnic.c 	set_bit(CTX_FL_DELETE_WAIT, &ctx->ctx_flags);
ctx              2545 drivers/net/ethernet/broadcom/cnic.c 		struct cnic_context *ctx = &cp->ctx_tbl[i];
ctx              2548 drivers/net/ethernet/broadcom/cnic.c 		while (test_bit(CTX_FL_DELETE_WAIT, &ctx->ctx_flags))
ctx              2552 drivers/net/ethernet/broadcom/cnic.c 			if (!test_bit(CTX_FL_OFFLD_START, &ctx->ctx_flags))
ctx              2557 drivers/net/ethernet/broadcom/cnic.c 		if (test_bit(CTX_FL_OFFLD_START, &ctx->ctx_flags))
ctx              2559 drivers/net/ethernet/broadcom/cnic.c 				   ctx->cid);
ctx              3299 drivers/net/ethernet/broadcom/cnic.c 			struct cnic_context *ctx = &cp->ctx_tbl[l5_cid];
ctx              3302 drivers/net/ethernet/broadcom/cnic.c 				set_bit(CTX_FL_CID_ERROR, &ctx->ctx_flags);
ctx              3308 drivers/net/ethernet/broadcom/cnic.c 			ctx->wait_cond = 1;
ctx              3309 drivers/net/ethernet/broadcom/cnic.c 			wake_up(&ctx->waitq);
ctx              3592 drivers/net/ethernet/broadcom/cnic.c 		struct cnic_context *ctx = &cp->ctx_tbl[l5_cid];
ctx              3594 drivers/net/ethernet/broadcom/cnic.c 		if (test_bit(CTX_FL_OFFLD_START, &ctx->ctx_flags))
ctx              3979 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_context *ctx = &cp->ctx_tbl[l5_cid];
ctx              3981 drivers/net/ethernet/broadcom/cnic.c 	ctx->timestamp = jiffies;
ctx              3982 drivers/net/ethernet/broadcom/cnic.c 	ctx->wait_cond = 1;
ctx              3983 drivers/net/ethernet/broadcom/cnic.c 	wake_up(&ctx->waitq);
ctx              4178 drivers/net/ethernet/broadcom/cnic.c 	struct cnic_context *ctx = &cp->ctx_tbl[csk->l5_cid];
ctx              4209 drivers/net/ethernet/broadcom/cnic.c 		ctx->timestamp = jiffies;
ctx              4288 drivers/net/ethernet/broadcom/cnic.c 		struct cnic_context *ctx = &cp->ctx_tbl[i];
ctx              4291 drivers/net/ethernet/broadcom/cnic.c 		if (!test_bit(CTX_FL_OFFLD_START, &ctx->ctx_flags) ||
ctx              4292 drivers/net/ethernet/broadcom/cnic.c 		    !test_bit(CTX_FL_DELETE_WAIT, &ctx->ctx_flags))
ctx              4295 drivers/net/ethernet/broadcom/cnic.c 		if (!time_after(jiffies, ctx->timestamp + (2 * HZ))) {
ctx              4300 drivers/net/ethernet/broadcom/cnic.c 		if (!test_and_clear_bit(CTX_FL_DELETE_WAIT, &ctx->ctx_flags))
ctx              4307 drivers/net/ethernet/broadcom/cnic.c 			if (ctx->ulp_proto_id == CNIC_ULP_ISCSI)
ctx              4310 drivers/net/ethernet/broadcom/cnic.c 			clear_bit(CTX_FL_OFFLD_START, &ctx->ctx_flags);
ctx              4396 drivers/net/ethernet/broadcom/cnic.c 		memset(cp->ctx_arr[i].ctx, 0, CNIC_PAGE_SIZE);
ctx              4853 drivers/net/ethernet/broadcom/cnic.c 		struct cnic_ctx *ctx = &cp->ctx_arr[i];
ctx              4854 drivers/net/ethernet/broadcom/cnic.c 		dma_addr_t map = ctx->mapping;
ctx              5658 drivers/net/ethernet/broadcom/cnic.c 		void *ctx;
ctx              5668 drivers/net/ethernet/broadcom/cnic.c 		ctx = cp->ulp_handle[if_type];
ctx              5673 drivers/net/ethernet/broadcom/cnic.c 		ulp_ops->indicate_netevent(ctx, event, vlan_id);
ctx               125 drivers/net/ethernet/broadcom/cnic.h 	void		*ctx;
ctx              2513 drivers/net/ethernet/broadcom/cnic_defs.h 	struct fcoe_abts_info ctx;
ctx              2522 drivers/net/ethernet/broadcom/cnic_defs.h 	struct fcoe_cleanup_info ctx;
ctx              2540 drivers/net/ethernet/broadcom/cnic_defs.h 	struct fcoe_fw_tx_seq_ctx ctx;
ctx              1464 drivers/net/ethernet/cavium/liquidio/cn23xx_pf_device.c 	struct oct_vf_stats_ctx *ctx = arg;
ctx              1466 drivers/net/ethernet/cavium/liquidio/cn23xx_pf_device.c 	memcpy(ctx->stats, cmd->data, sizeof(struct oct_vf_stats));
ctx              1467 drivers/net/ethernet/cavium/liquidio/cn23xx_pf_device.c 	atomic_set(&ctx->status, 1);
ctx              1475 drivers/net/ethernet/cavium/liquidio/cn23xx_pf_device.c 	struct oct_vf_stats_ctx ctx;
ctx              1493 drivers/net/ethernet/cavium/liquidio/cn23xx_pf_device.c 	ctx.stats = stats;
ctx              1494 drivers/net/ethernet/cavium/liquidio/cn23xx_pf_device.c 	atomic_set(&ctx.status, 0);
ctx              1495 drivers/net/ethernet/cavium/liquidio/cn23xx_pf_device.c 	mbox_cmd.fn_arg = (void *)&ctx;
ctx              1501 drivers/net/ethernet/cavium/liquidio/cn23xx_pf_device.c 	} while ((atomic_read(&ctx.status) == 0) && (count++ < timeout));
ctx              1503 drivers/net/ethernet/cavium/liquidio/cn23xx_pf_device.c 	ret = atomic_read(&ctx.status);
ctx               505 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	void *ctx = p->t3c_tid.ctx;
ctx               513 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	return ctx;
ctx               536 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		      void *ctx, unsigned int tid)
ctx               541 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	t->tid_tab[tid].ctx = ctx;
ctx               572 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		td->tid_release_list = p->ctx;
ctx               581 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 			p->ctx = (void *)td->tid_release_list;
ctx               587 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		p->ctx = NULL;
ctx               610 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	p->ctx = (void *)td->tid_release_list;
ctx               613 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	if (!p->ctx || td->release_list_incomplete)
ctx               627 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c void cxgb3_remove_tid(struct t3cdev *tdev, void *ctx, unsigned int tid)
ctx               633 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		(void)cmpxchg(&t->tid_tab[tid].ctx, ctx, NULL);
ctx               641 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 			t->tid_tab[tid].ctx = NULL;
ctx               651 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		     void *ctx)
ctx               664 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		p->t3c_tid.ctx = ctx;
ctx               675 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		     void *ctx)
ctx               686 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		p->t3c_tid.ctx = ctx;
ctx               746 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	if (t3c_tid && t3c_tid->ctx && t3c_tid->client &&
ctx               751 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 								    ctx);
ctx               766 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	if (t3c_tid && t3c_tid->ctx && t3c_tid->client->handlers &&
ctx               769 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 							     t3c_tid->ctx);
ctx               784 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	if (t3c_tid && t3c_tid->ctx && t3c_tid->client->handlers &&
ctx               787 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		    (dev, skb, t3c_tid->ctx);
ctx               811 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	if (t3c_tid && t3c_tid->ctx && t3c_tid->client->handlers &&
ctx               814 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		    (dev, skb, t3c_tid->ctx);
ctx               851 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	if (t3c_tid && t3c_tid->ctx && t3c_tid->client->handlers &&
ctx               854 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		    (dev, skb, t3c_tid->ctx);
ctx               905 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	if (t3c_tid && t3c_tid->ctx && t3c_tid->client->handlers &&
ctx               908 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		    (dev, skb, t3c_tid->ctx);
ctx               951 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 	if (t3c_tid && t3c_tid->ctx && t3c_tid->client->handlers &&
ctx               954 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 							  t3c_tid->ctx);
ctx               963 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		       void *ctx)
ctx               967 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		cxgb_neigh_update((struct neighbour *)ctx);
ctx               971 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		struct netevent_redirect *nr = ctx;
ctx              1141 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 		if (te && te->ctx && te->client && te->client->redirect) {
ctx              1142 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c 			update_tcb = te->client->redirect(te->ctx, old, new, e);
ctx                70 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.h 				      struct sk_buff *skb, void *ctx);
ctx                87 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.h 	int (*redirect)(void *ctx, struct dst_entry *old,
ctx                97 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.h 		     void *ctx);
ctx                99 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.h 		     void *ctx);
ctx               103 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.h 		      void *ctx, unsigned int tid);
ctx               105 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.h void cxgb3_remove_tid(struct t3cdev *dev, void *ctx, unsigned int tid);
ctx               109 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.h 	void *ctx;
ctx              1263 drivers/net/ethernet/chelsio/cxgb4/cxgb4.h 	struct filter_ctx *ctx; /* Caller's completion hook */
ctx              1093 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 				 struct filter_ctx *ctx)
ctx              1118 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	f->ctx = ctx;
ctx              1252 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		       struct filter_ctx *ctx)
ctx              1263 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 			return cxgb4_set_hash_filter(dev, fs, ctx);
ctx              1430 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	f->ctx = ctx;
ctx              1451 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 				 struct filter_ctx *ctx)
ctx              1485 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	f->ctx = ctx;
ctx              1518 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		       struct filter_ctx *ctx)
ctx              1528 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 			return cxgb4_del_hash_filter(dev, filter_id, ctx);
ctx              1545 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		f->ctx = ctx;
ctx              1556 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	if (ctx) {
ctx              1557 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		ctx->result = 0;
ctx              1558 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		complete(&ctx->completion);
ctx              1566 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	struct filter_ctx ctx;
ctx              1569 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	init_completion(&ctx.completion);
ctx              1571 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	ret = __cxgb4_set_filter(dev, filter_id, fs, &ctx);
ctx              1576 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	ret = wait_for_completion_timeout(&ctx.completion, 10 * HZ);
ctx              1580 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	ret = ctx.result;
ctx              1588 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	struct filter_ctx ctx;
ctx              1594 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	init_completion(&ctx.completion);
ctx              1596 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	ret = __cxgb4_del_filter(dev, filter_id, fs, &ctx);
ctx              1601 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	ret = wait_for_completion_timeout(&ctx.completion, 10 * HZ);
ctx              1605 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	ret = ctx.result;
ctx              1676 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	struct filter_ctx *ctx = NULL;
ctx              1688 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	ctx = f->ctx;
ctx              1689 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	f->ctx = NULL;
ctx              1693 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	if (ctx) {
ctx              1694 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		ctx->result = 0;
ctx              1695 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		complete(&ctx->completion);
ctx              1705 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	struct filter_ctx *ctx = NULL;
ctx              1717 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	ctx = f->ctx;
ctx              1718 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	f->ctx = NULL;
ctx              1727 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		if (ctx) {
ctx              1728 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 			ctx->tid = f->tid;
ctx              1729 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 			ctx->result = 0;
ctx              1735 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 			if (ctx) {
ctx              1736 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 				ctx->result = -EINVAL;
ctx              1737 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 				complete(&ctx->completion);
ctx              1748 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		if (ctx) {
ctx              1750 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 				ctx->result = -ENOSPC;
ctx              1752 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 				ctx->result = -EINVAL;
ctx              1758 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	if (ctx)
ctx              1759 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		complete(&ctx->completion);
ctx              1785 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		struct filter_ctx *ctx;
ctx              1790 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		ctx = f->ctx;
ctx              1791 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		f->ctx = NULL;
ctx              1798 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 			if (ctx)
ctx              1799 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 				ctx->result = 0;
ctx              1809 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 				if (ctx) {
ctx              1810 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 					ctx->result = 0;
ctx              1811 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 					ctx->tid = idx;
ctx              1815 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 				if (ctx)
ctx              1816 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 					ctx->result = err;
ctx              1825 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 			if (ctx)
ctx              1826 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 				ctx->result = -EINVAL;
ctx              1828 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 		if (ctx)
ctx              1829 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 			complete(&ctx->completion);
ctx               642 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c 	struct filter_ctx ctx;
ctx               675 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c 	init_completion(&ctx.completion);
ctx               676 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c 	ret = __cxgb4_set_filter(dev, fidx, fs, &ctx);
ctx               684 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c 	ret = wait_for_completion_timeout(&ctx.completion, 10 * HZ);
ctx               690 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c 	ret = ctx.result;
ctx               696 drivers/net/ethernet/chelsio/cxgb4/cxgb4_tc_flower.c 	ch_flower->filter_id = ctx.tid;
ctx               218 drivers/net/ethernet/chelsio/cxgb4/cxgb4_uld.h 		       struct filter_ctx *ctx);
ctx               221 drivers/net/ethernet/chelsio/cxgb4/cxgb4_uld.h 		       struct filter_ctx *ctx);
ctx              2032 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c static void cdan_cb(struct dpaa2_io_notification_ctx *ctx)
ctx              2036 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 	ch = container_of(ctx, struct dpaa2_eth_channel, nctx);
ctx               689 drivers/net/ethernet/huawei/hinic/hinic_port.c 	u32 ctx = 0;
ctx               703 drivers/net/ethernet/huawei/hinic/hinic_port.c 	ctx |=  HINIC_RSS_TYPE_SET(1, VALID) |
ctx               721 drivers/net/ethernet/huawei/hinic/hinic_port.c 	ctx_tbl->ctx = cpu_to_be32(ctx);
ctx               275 drivers/net/ethernet/huawei/hinic/hinic_port.h 	u32 ctx;
ctx               105 drivers/net/ethernet/intel/e1000e/ptp.c 					 void *ctx)
ctx               107 drivers/net/ethernet/intel/e1000e/ptp.c 	struct e1000_adapter *adapter = (struct e1000_adapter *)ctx;
ctx              3180 drivers/net/ethernet/intel/ice/ice_lib.c static void ice_vsi_update_q_map(struct ice_vsi *vsi, struct ice_vsi_ctx *ctx)
ctx              3182 drivers/net/ethernet/intel/ice/ice_lib.c 	vsi->info.mapping_flags = ctx->info.mapping_flags;
ctx              3183 drivers/net/ethernet/intel/ice/ice_lib.c 	memcpy(&vsi->info.q_mapping, &ctx->info.q_mapping,
ctx              3185 drivers/net/ethernet/intel/ice/ice_lib.c 	memcpy(&vsi->info.tc_mapping, ctx->info.tc_mapping,
ctx              3199 drivers/net/ethernet/intel/ice/ice_lib.c 	struct ice_vsi_ctx *ctx;
ctx              3216 drivers/net/ethernet/intel/ice/ice_lib.c 	ctx = devm_kzalloc(&pf->pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx              3217 drivers/net/ethernet/intel/ice/ice_lib.c 	if (!ctx)
ctx              3220 drivers/net/ethernet/intel/ice/ice_lib.c 	ctx->vf_num = 0;
ctx              3221 drivers/net/ethernet/intel/ice/ice_lib.c 	ctx->info = vsi->info;
ctx              3223 drivers/net/ethernet/intel/ice/ice_lib.c 	ice_vsi_setup_q_map(vsi, ctx);
ctx              3226 drivers/net/ethernet/intel/ice/ice_lib.c 	ctx->info.valid_sections = cpu_to_le16(ICE_AQ_VSI_PROP_RXQ_MAP_VALID);
ctx              3227 drivers/net/ethernet/intel/ice/ice_lib.c 	status = ice_update_vsi(&pf->hw, vsi->idx, ctx, NULL);
ctx              3244 drivers/net/ethernet/intel/ice/ice_lib.c 	ice_vsi_update_q_map(vsi, ctx);
ctx              3249 drivers/net/ethernet/intel/ice/ice_lib.c 	devm_kfree(&pf->pdev->dev, ctx);
ctx              3079 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 	struct ice_vsi_ctx *ctx;
ctx              3102 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 	ctx = devm_kzalloc(&pf->pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx              3103 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 	if (!ctx)
ctx              3106 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 	ctx->info.valid_sections = cpu_to_le16(ICE_AQ_VSI_PROP_SECURITY_VALID);
ctx              3109 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 		ctx->info.sec_flags |= ICE_AQ_VSI_SEC_FLAG_ENA_MAC_ANTI_SPOOF;
ctx              3110 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 		ctx->info.sw_flags2 |= ICE_AQ_VSI_SW_FLAG_RX_PRUNE_EN_M;
ctx              3113 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 	status = ice_update_vsi(&pf->hw, vsi->idx, ctx, NULL);
ctx              3122 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 	vsi->info.sec_flags = ctx->info.sec_flags;
ctx              3123 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 	vsi->info.sw_flags2 = ctx->info.sw_flags2;
ctx              3125 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 	devm_kfree(&pf->pdev->dev, ctx);
ctx               984 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c static void mvpp2_rss_port_c2_enable(struct mvpp2_port *port, u32 ctx)
ctx               994 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 	qh = (ctx >> 3) & MVPP22_CLS_C2_ATTR0_QHIGH_MASK;
ctx               995 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 	ql = ctx & MVPP22_CLS_C2_ATTR0_QLOW_MASK;
ctx              1083 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 	int index, ctx;
ctx              1124 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 		if (act->queue.ctx)
ctx              1139 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 		if (act->queue.ctx) {
ctx              1141 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 			ctx = mvpp22_rss_ctx(port, act->queue.ctx);
ctx              1142 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 			if (ctx < 0)
ctx              1145 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 			qh = (ctx >> 3) & MVPP22_CLS_C2_ATTR0_QHIGH_MASK;
ctx              1146 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 			ql = ctx & MVPP22_CLS_C2_ATTR0_QLOW_MASK;
ctx              1316 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 	if (act->queue.ctx && act->queue.index)
ctx              1483 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 	u32 ctx;
ctx              1486 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 	for (ctx = 0; ctx < MVPP22_N_RSS_TABLES; ctx++) {
ctx              1487 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 		if (!priv->rss_tables[ctx])
ctx              1491 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 	if (ctx == MVPP22_N_RSS_TABLES)
ctx              1494 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 	priv->rss_tables[ctx] = kzalloc(sizeof(*priv->rss_tables[ctx]),
ctx              1496 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 	if (!priv->rss_tables[ctx])
ctx              1499 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 	*rss_ctx = ctx;
ctx              1504 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 	mvpp2_write(priv, MVPP22_RSS_INDEX, MVPP22_RSS_INDEX_TABLE(ctx));
ctx              1507 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 	mvpp2_write(priv, MVPP22_RSS_INDEX, MVPP22_RSS_INDEX_QUEUE(ctx));
ctx              1508 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c 	mvpp2_write(priv, MVPP22_RXQ2RSS_TABLE, MVPP22_RSS_TABLE_POINTER(ctx));
ctx               460 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c 	void *ctx, *mask;
ctx               548 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c 	ctx = aq->res->base + 128;
ctx               572 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c 			memcpy(ctx, &req->rq, sizeof(struct nix_rq_ctx_s));
ctx               574 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c 			memcpy(ctx, &req->sq, sizeof(struct nix_sq_ctx_s));
ctx               576 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c 			memcpy(ctx, &req->cq, sizeof(struct nix_cq_ctx_s));
ctx               578 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c 			memcpy(ctx, &req->rss, sizeof(struct nix_rsse_s));
ctx               580 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c 			memcpy(ctx, &req->mce, sizeof(struct nix_rx_mce_s));
ctx               645 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c 				memcpy(&rsp->rq, ctx,
ctx               648 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c 				memcpy(&rsp->sq, ctx,
ctx               651 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c 				memcpy(&rsp->cq, ctx,
ctx               654 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c 				memcpy(&rsp->rss, ctx,
ctx               657 drivers/net/ethernet/marvell/octeontx2/af/rvu_nix.c 				memcpy(&rsp->mce, ctx,
ctx                65 drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c 	void *ctx, *mask;
ctx               100 drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c 	ctx = aq->res->base + 128;
ctx               110 drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c 			memcpy(ctx, &req->aura, sizeof(struct npa_aura_s));
ctx               114 drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c 			memcpy(ctx, &req->pool, sizeof(struct npa_pool_s));
ctx               126 drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c 			memcpy(ctx, &req->aura, sizeof(struct npa_aura_s));
ctx               128 drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c 			memcpy(ctx, &req->pool, sizeof(struct npa_pool_s));
ctx               188 drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c 				memcpy(&rsp->aura, ctx,
ctx               191 drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c 				memcpy(&rsp->pool, ctx,
ctx                62 drivers/net/ethernet/mellanox/mlx4/cq.c 	struct mlx4_eq_tasklet *ctx = (struct mlx4_eq_tasklet *)data;
ctx                65 drivers/net/ethernet/mellanox/mlx4/cq.c 	spin_lock_irqsave(&ctx->lock, flags);
ctx                66 drivers/net/ethernet/mellanox/mlx4/cq.c 	list_splice_tail_init(&ctx->list, &ctx->process_list);
ctx                67 drivers/net/ethernet/mellanox/mlx4/cq.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx                69 drivers/net/ethernet/mellanox/mlx4/cq.c 	list_for_each_entry_safe(mcq, temp, &ctx->process_list, tasklet_ctx.list) {
ctx                78 drivers/net/ethernet/mellanox/mlx4/cq.c 	if (!list_empty(&ctx->process_list))
ctx                79 drivers/net/ethernet/mellanox/mlx4/cq.c 		tasklet_schedule(&ctx->task);
ctx               186 drivers/net/ethernet/mellanox/mlx4/en_main.c static void *mlx4_en_get_netdev(struct mlx4_dev *dev, void *ctx, u8 port)
ctx               188 drivers/net/ethernet/mellanox/mlx4/en_main.c 	struct mlx4_en_dev *endev = ctx;
ctx               251 drivers/net/ethernet/mellanox/mlx4/en_main.c static void mlx4_en_activate(struct mlx4_dev *dev, void *ctx)
ctx               254 drivers/net/ethernet/mellanox/mlx4/en_main.c 	struct mlx4_en_dev *mdev = ctx;
ctx               371 drivers/net/ethernet/mellanox/mlx4/eq.c 	struct mlx4_slave_state *ctx = NULL;
ctx               387 drivers/net/ethernet/mellanox/mlx4/eq.c 	ctx = &priv->mfunc.master.slave_state[slave];
ctx               388 drivers/net/ethernet/mellanox/mlx4/eq.c 	spin_lock_irqsave(&ctx->lock, flags);
ctx               426 drivers/net/ethernet/mellanox/mlx4/eq.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx               227 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 	struct mlx4_set_vport_context *ctx;
ctx               233 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 	ctx = mailbox->buf;
ctx               244 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 		out_param[i].bw_share = be32_to_cpu(ctx->qos_p_up[i].bw_share);
ctx               246 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 			be32_to_cpu(ctx->qos_p_up[i].max_avg_bw);
ctx               248 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 			!!(be32_to_cpu(ctx->qos_p_up[i].enable) & 31);
ctx               264 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 	struct mlx4_set_vport_context *ctx;
ctx               270 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 	ctx = mailbox->buf;
ctx               273 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 		ctx->qos_p_up[i].bw_share = cpu_to_be32(in_param[i].bw_share);
ctx               274 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 		ctx->qos_p_up[i].max_avg_bw =
ctx               276 drivers/net/ethernet/mellanox/mlx4/fw_qos.c 		ctx->qos_p_up[i].enable =
ctx               182 drivers/net/ethernet/mellanox/mlx4/main.c 				       struct devlink_param_gset_ctx *ctx)
ctx               184 drivers/net/ethernet/mellanox/mlx4/main.c 	ctx->val.vbool = !!mlx4_internal_err_reset;
ctx               189 drivers/net/ethernet/mellanox/mlx4/main.c 				       struct devlink_param_gset_ctx *ctx)
ctx               191 drivers/net/ethernet/mellanox/mlx4/main.c 	mlx4_internal_err_reset = ctx->val.vbool;
ctx               196 drivers/net/ethernet/mellanox/mlx4/main.c 					    struct devlink_param_gset_ctx *ctx)
ctx               201 drivers/net/ethernet/mellanox/mlx4/main.c 	ctx->val.vbool = dev->persist->crdump.snapshot_enable;
ctx               206 drivers/net/ethernet/mellanox/mlx4/main.c 					    struct devlink_param_gset_ctx *ctx)
ctx               211 drivers/net/ethernet/mellanox/mlx4/main.c 	dev->persist->crdump.snapshot_enable = ctx->val.vbool;
ctx              5265 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c static void update_qos_vpp(struct mlx4_update_qp_context *ctx,
ctx              5268 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c 	ctx->qp_mask |= cpu_to_be64(1ULL << MLX4_UPD_QP_MASK_QOS_VPP);
ctx              5269 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c 	ctx->qp_context.qos_vport = work->qos_vport;
ctx              1754 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 			     struct mlx5_async_ctx *ctx)
ctx              1756 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	ctx->dev = dev;
ctx              1758 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	atomic_set(&ctx->num_inflight, 1);
ctx              1759 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	init_waitqueue_head(&ctx->wait);
ctx              1771 drivers/net/ethernet/mellanox/mlx5/core/cmd.c void mlx5_cmd_cleanup_async_ctx(struct mlx5_async_ctx *ctx)
ctx              1773 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	atomic_dec(&ctx->num_inflight);
ctx              1774 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	wait_event(ctx->wait, atomic_read(&ctx->num_inflight) == 0);
ctx              1781 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	struct mlx5_async_ctx *ctx = work->ctx;
ctx              1784 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	if (atomic_dec_and_test(&ctx->num_inflight))
ctx              1785 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 		wake_up(&ctx->wait);
ctx              1788 drivers/net/ethernet/mellanox/mlx5/core/cmd.c int mlx5_cmd_exec_cb(struct mlx5_async_ctx *ctx, void *in, int in_size,
ctx              1794 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	work->ctx = ctx;
ctx              1796 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	if (WARN_ON(!atomic_inc_not_zero(&ctx->num_inflight)))
ctx              1798 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	ret = cmd_exec(ctx->dev, in, in_size, out, out_size,
ctx              1800 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 	if (ret && atomic_dec_and_test(&ctx->num_inflight))
ctx              1801 drivers/net/ethernet/mellanox/mlx5/core/cmd.c 		wake_up(&ctx->wait);
ctx                50 drivers/net/ethernet/mellanox/mlx5/core/cq.c 	struct mlx5_eq_tasklet *ctx = (struct mlx5_eq_tasklet *)data;
ctx                54 drivers/net/ethernet/mellanox/mlx5/core/cq.c 	spin_lock_irqsave(&ctx->lock, flags);
ctx                55 drivers/net/ethernet/mellanox/mlx5/core/cq.c 	list_splice_tail_init(&ctx->list, &ctx->process_list);
ctx                56 drivers/net/ethernet/mellanox/mlx5/core/cq.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx                58 drivers/net/ethernet/mellanox/mlx5/core/cq.c 	list_for_each_entry_safe(mcq, temp, &ctx->process_list,
ctx                67 drivers/net/ethernet/mellanox/mlx5/core/cq.c 	if (!list_empty(&ctx->process_list))
ctx                68 drivers/net/ethernet/mellanox/mlx5/core/cq.c 		tasklet_schedule(&ctx->task);
ctx               206 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	struct mlx5_qp_context *ctx;
ctx               225 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	ctx = (struct mlx5_qp_context *)MLX5_ADDR_OF(query_qp_out, out, qpc);
ctx               232 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		param = (unsigned long)mlx5_qp_state_str(be32_to_cpu(ctx->flags) >> 28);
ctx               236 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		param = (unsigned long)mlx5_qp_type_str((be32_to_cpu(ctx->flags) >> 16) & 0xff);
ctx               240 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		switch (ctx->mtu_msgmax >> 5) {
ctx               261 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		param = 1 << ((ctx->rq_size_stride >> 3) & 0xf);
ctx               264 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		param = 1 << ((ctx->rq_size_stride & 7) + 4);
ctx               267 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		no_sq = be16_to_cpu(ctx->sq_crq_size) >> 15;
ctx               269 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 			param = 1 << (be16_to_cpu(ctx->sq_crq_size) >> 11);
ctx               274 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		param = (be32_to_cpu(ctx->log_pg_sz_remote_qpn) >> 24) & 0x1f;
ctx               278 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		param = be32_to_cpu(ctx->log_pg_sz_remote_qpn) & 0xffffff;
ctx               302 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	void *ctx;
ctx               315 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	ctx = MLX5_ADDR_OF(query_eq_out, out, eq_context_entry);
ctx               319 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		param = 1 << MLX5_GET(eqc, ctx, log_eq_size);
ctx               322 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		param = MLX5_GET(eqc, ctx, intr);
ctx               325 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		param = MLX5_GET(eqc, ctx, log_page_size) + 12;
ctx               339 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	void *ctx;
ctx               352 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 	ctx = MLX5_ADDR_OF(query_cq_out, out, cq_context);
ctx               359 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		param = 1 << MLX5_GET(cqc, ctx, log_cq_size);
ctx               362 drivers/net/ethernet/mellanox/mlx5/core/debugfs.c 		param = MLX5_GET(cqc, ctx, log_page_size);
ctx               152 drivers/net/ethernet/mellanox/mlx5/core/devlink.c 				    struct devlink_param_gset_ctx *ctx)
ctx               157 drivers/net/ethernet/mellanox/mlx5/core/devlink.c 	if (!strcmp(ctx->val.vstr, "smfs"))
ctx               167 drivers/net/ethernet/mellanox/mlx5/core/devlink.c 				    struct devlink_param_gset_ctx *ctx)
ctx               172 drivers/net/ethernet/mellanox/mlx5/core/devlink.c 		strcpy(ctx->val.vstr, "smfs");
ctx               174 drivers/net/ethernet/mellanox/mlx5/core/devlink.c 		strcpy(ctx->val.vstr, "dmfs");
ctx               203 drivers/net/ethernet/mellanox/mlx5/core/en/health.c 		return err_ctx->recover(err_ctx->ctx);
ctx                37 drivers/net/ethernet/mellanox/mlx5/core/en/health.h 	int (*recover)(void *ctx);
ctx                38 drivers/net/ethernet/mellanox/mlx5/core/en/health.h 	void *ctx;
ctx                56 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c static int mlx5e_rx_reporter_err_icosq_cqe_recover(void *ctx)
ctx                65 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c 	icosq = ctx;
ctx               111 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c 	err_ctx.ctx = icosq;
ctx               137 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c static int mlx5e_rx_reporter_err_rq_cqe_recover(void *ctx)
ctx               145 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c 	rq = ctx;
ctx               180 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c 	err_ctx.ctx = rq;
ctx               187 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c static int mlx5e_rx_reporter_timeout_recover(void *ctx)
ctx               194 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c 	rq = ctx;
ctx               211 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c 	err_ctx.ctx = rq;
ctx               221 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c 	return err_ctx->recover(err_ctx->ctx);
ctx                34 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c static int mlx5e_tx_reporter_err_cqe_recover(void *ctx)
ctx                42 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c 	sq = ctx;
ctx                91 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c 	err_ctx.ctx = sq;
ctx                98 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c static int mlx5e_tx_reporter_timeout_recover(void *ctx)
ctx               104 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c 	sq = ctx;
ctx               119 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c 	err_ctx.ctx = sq;
ctx               134 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c 	return err_ctx->recover(err_ctx->ctx);
ctx                12 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls.c 	tisc = MLX5_ADDR_OF(create_tis_in, in, ctx);
ctx                25 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c fill_static_params_ctx(void *ctx, struct mlx5e_ktls_offload_context_tx *priv_tx)
ctx                35 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	gcm_iv      = MLX5_ADDR_OF(tls_static_params, ctx, gcm_iv);
ctx                36 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	initial_rn  = MLX5_ADDR_OF(tls_static_params, ctx, initial_record_number);
ctx                43 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	MLX5_SET(tls_static_params, ctx, tls_version, tls_version);
ctx                44 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	MLX5_SET(tls_static_params, ctx, const_1, 1);
ctx                45 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	MLX5_SET(tls_static_params, ctx, const_2, 2);
ctx                46 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	MLX5_SET(tls_static_params, ctx, encryption_standard,
ctx                48 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	MLX5_SET(tls_static_params, ctx, dek_index, priv_tx->key_id);
ctx                76 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c fill_progress_params_ctx(void *ctx, struct mlx5e_ktls_offload_context_tx *priv_tx)
ctx                78 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	MLX5_SET(tls_progress_params, ctx, tisn, priv_tx->tisn);
ctx                79 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	MLX5_SET(tls_progress_params, ctx, record_tracker_state,
ctx                81 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	MLX5_SET(tls_progress_params, ctx, auth_state,
ctx               162 drivers/net/ethernet/mellanox/mlx5/core/en_common.c 		MLX5_SET(modify_tir_in, in, ctx.self_lb_block,
ctx               691 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	rqc = MLX5_ADDR_OF(create_rq_in, in, ctx);
ctx               729 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	rqc = MLX5_ADDR_OF(modify_rq_in, in, ctx);
ctx               757 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	rqc = MLX5_ADDR_OF(modify_rq_in, in, ctx);
ctx               786 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	rqc = MLX5_ADDR_OF(modify_rq_in, in, ctx);
ctx              1220 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	sqc = MLX5_ADDR_OF(create_sq_in, in, ctx);
ctx              1263 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	sqc = MLX5_ADDR_OF(modify_sq_in, in, ctx);
ctx              2566 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	rqtc = MLX5_ADDR_OF(modify_rqt_in, in, ctx);
ctx              2744 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	void *tirc = MLX5_ADDR_OF(modify_tir_in, in, ctx);
ctx              2792 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	tirc = MLX5_ADDR_OF(modify_tir_in, in, ctx);
ctx              3205 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 	void *tisc = MLX5_ADDR_OF(create_tis_in, in, ctx);
ctx              3247 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 			tisc = MLX5_ADDR_OF(create_tis_in, in, ctx);
ctx              3331 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 		tirc = MLX5_ADDR_OF(create_tir_in, in, ctx);
ctx              3346 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 		tirc = MLX5_ADDR_OF(create_tir_in, in, ctx);
ctx              3389 drivers/net/ethernet/mellanox/mlx5/core/en_main.c 		tirc = MLX5_ADDR_OF(create_tir_in, in, ctx);
ctx               464 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c 	tirc = MLX5_ADDR_OF(create_tir_in, in, ctx);
ctx               546 drivers/net/ethernet/mellanox/mlx5/core/en_tc.c 		tirc = MLX5_ADDR_OF(create_tir_in, in, ctx);
ctx              1591 drivers/net/ethernet/mellanox/mlx5/core/eswitch.c 	u32 ctx[MLX5_ST_SZ_DW(scheduling_context)] = {};
ctx              1595 drivers/net/ethernet/mellanox/mlx5/core/eswitch.c 	MLX5_SET(scheduling_context, ctx, max_average_bw, rate_mbps);
ctx              1599 drivers/net/ethernet/mellanox/mlx5/core/eswitch.c 						  ctx,
ctx                12 drivers/net/ethernet/mellanox/mlx5/core/events.c 	void           *ctx;
ctx               141 drivers/net/ethernet/mellanox/mlx5/core/events.c 	struct mlx5_events   *events   = event_nb->ctx;
ctx               153 drivers/net/ethernet/mellanox/mlx5/core/events.c 	struct mlx5_events   *events   = event_nb->ctx;
ctx               215 drivers/net/ethernet/mellanox/mlx5/core/events.c 	struct mlx5_events   *events   = event_nb->ctx;
ctx               306 drivers/net/ethernet/mellanox/mlx5/core/events.c 	struct mlx5_events      *events   = event_nb->ctx;
ctx               329 drivers/net/ethernet/mellanox/mlx5/core/events.c 	struct mlx5_events   *events   = event_nb->ctx;
ctx               371 drivers/net/ethernet/mellanox/mlx5/core/events.c 		events->notifiers[i].ctx = events;
ctx                82 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c 	struct mlx5_fpga_ipsec_sa_ctx	*ctx;
ctx               258 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c static int mlx5_fpga_ipsec_cmd_wait(void *ctx)
ctx               260 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c 	struct mlx5_fpga_ipsec_cmd_context *context = ctx;
ctx              1067 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c 	rule->ctx = mlx5_fpga_ipsec_fs_create_sa_ctx(dev, fte, is_egress);
ctx              1068 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c 	if (IS_ERR(rule->ctx)) {
ctx              1069 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c 		int err = PTR_ERR(rule->ctx);
ctx              1082 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c 		mlx5_fpga_ipsec_delete_sa_ctx(rule->ctx);
ctx              1147 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c 	mlx5_fpga_ipsec_delete_sa_ctx(rule->ctx);
ctx              1322 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c 		mlx5_fpga_ipsec_delete_sa_ctx(r->ctx);
ctx                45 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	 struct mlx5_fpga_tls_command_context *ctx,
ctx                63 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c mlx5_fpga_tls_put_command_ctx(struct mlx5_fpga_tls_command_context *ctx)
ctx                65 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	if (refcount_dec_and_test(&ctx->ref))
ctx                66 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 		kfree(ctx);
ctx                73 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	struct mlx5_fpga_tls_command_context *ctx;
ctx                78 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	ctx = list_first_entry(&tls->pending_cmds,
ctx                80 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	list_del(&ctx->list);
ctx                82 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	ctx->complete(conn, fdev, ctx, resp);
ctx                90 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	struct mlx5_fpga_tls_command_context *ctx =
ctx                93 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	mlx5_fpga_tls_put_command_ctx(ctx);
ctx               242 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	struct mlx5_fpga_tls_command_context *ctx;
ctx               246 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	ctx = kzalloc(sizeof(*ctx) + MLX5_TLS_COMMAND_SIZE, flags);
ctx               247 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	if (!ctx)
ctx               250 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	buf = &ctx->buf;
ctx               251 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	cmd = (ctx + 1);
ctx               261 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	mlx5_fpga_tls_cmd_send(mdev->fpga, ctx,
ctx               310 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	struct mlx5_setup_stream_context *ctx =
ctx               313 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	void *tls_cmd = ctx + 1;
ctx               317 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 		ctx->syndrome = MLX5_GET(tls_resp, resp->sg[0].data, syndrome);
ctx               321 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	status = atomic_xchg_release(&ctx->status, status);
ctx               323 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 		complete(&ctx->comp);
ctx               328 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 		      ctx->syndrome);
ctx               330 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	if (!ctx->syndrome) {
ctx               348 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 					  struct mlx5_setup_stream_context *ctx)
ctx               351 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	void *cmd = ctx + 1;
ctx               354 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	buf = &ctx->cmd.buf;
ctx               359 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	init_completion(&ctx->comp);
ctx               360 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	atomic_set(&ctx->status, MLX5_FPGA_CMD_PENDING);
ctx               361 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	ctx->syndrome = -1;
ctx               363 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	mlx5_fpga_tls_cmd_send(mdev->fpga, &ctx->cmd,
ctx               365 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	wait_for_completion_killable(&ctx->comp);
ctx               367 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	status = atomic_xchg_acquire(&ctx->status, MLX5_FPGA_CMD_ABANDONED);
ctx               372 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	if (unlikely(ctx->syndrome))
ctx               375 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	mlx5_fpga_tls_put_command_ctx(&ctx->cmd);
ctx               556 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	struct mlx5_setup_stream_context *ctx;
ctx               561 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	cmd_size = MLX5_TLS_COMMAND_SIZE + sizeof(*ctx);
ctx               562 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	ctx = kzalloc(cmd_size, GFP_KERNEL);
ctx               563 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	if (!ctx)
ctx               566 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	cmd = ctx + 1;
ctx               576 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	return mlx5_fpga_tls_setup_stream_cmd(mdev, ctx);
ctx               579 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	kfree(ctx);
ctx               264 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c 	tisc = MLX5_ADDR_OF(create_tis_in, in, ctx);
ctx                52 drivers/net/ethernet/mellanox/mlx5/core/lag.c 	void *lag_ctx = MLX5_ADDR_OF(create_lag_in, in, ctx);
ctx                67 drivers/net/ethernet/mellanox/mlx5/core/lag.c 	void *lag_ctx = MLX5_ADDR_OF(modify_lag_in, in, ctx);
ctx                41 drivers/net/ethernet/mellanox/mlx5/core/rl.c 				       void *ctx, u32 *element_id)
ctx                54 drivers/net/ethernet/mellanox/mlx5/core/rl.c 	memcpy(schedc, ctx, MLX5_ST_SZ_BYTES(scheduling_context));
ctx                66 drivers/net/ethernet/mellanox/mlx5/core/rl.c 				       void *ctx, u32 element_id,
ctx                83 drivers/net/ethernet/mellanox/mlx5/core/rl.c 	memcpy(schedc, ctx, MLX5_ST_SZ_BYTES(scheduling_context));
ctx               311 drivers/net/ethernet/mellanox/mlx5/core/transobj.c 	rqc = MLX5_ADDR_OF(create_rq_in, in, ctx);
ctx               330 drivers/net/ethernet/mellanox/mlx5/core/transobj.c 	sqc = MLX5_ADDR_OF(create_sq_in, in, ctx);
ctx               389 drivers/net/ethernet/mellanox/mlx5/core/transobj.c 	rqc = MLX5_ADDR_OF(modify_rq_in, in, ctx);
ctx               410 drivers/net/ethernet/mellanox/mlx5/core/transobj.c 	sqc = MLX5_ADDR_OF(modify_sq_in, in, ctx);
ctx               670 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	void *ctx;
ctx               698 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	ctx = MLX5_ADDR_OF(query_hca_vport_context_out, out, hca_vport_context);
ctx               699 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->field_select = MLX5_GET_PR(hca_vport_context, ctx, field_select);
ctx               700 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->sm_virt_aware = MLX5_GET_PR(hca_vport_context, ctx, sm_virt_aware);
ctx               701 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->has_smi = MLX5_GET_PR(hca_vport_context, ctx, has_smi);
ctx               702 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->has_raw = MLX5_GET_PR(hca_vport_context, ctx, has_raw);
ctx               703 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->policy = MLX5_GET_PR(hca_vport_context, ctx, vport_state_policy);
ctx               704 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->phys_state = MLX5_GET_PR(hca_vport_context, ctx,
ctx               706 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->vport_state = MLX5_GET_PR(hca_vport_context, ctx, vport_state);
ctx               707 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->port_physical_state = MLX5_GET_PR(hca_vport_context, ctx,
ctx               709 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->port_guid = MLX5_GET64_PR(hca_vport_context, ctx, port_guid);
ctx               710 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->node_guid = MLX5_GET64_PR(hca_vport_context, ctx, node_guid);
ctx               711 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->cap_mask1 = MLX5_GET_PR(hca_vport_context, ctx, cap_mask1);
ctx               712 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->cap_mask1_perm = MLX5_GET_PR(hca_vport_context, ctx,
ctx               714 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->cap_mask2 = MLX5_GET_PR(hca_vport_context, ctx, cap_mask2);
ctx               715 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->cap_mask2_perm = MLX5_GET_PR(hca_vport_context, ctx,
ctx               717 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->lid = MLX5_GET_PR(hca_vport_context, ctx, lid);
ctx               718 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->init_type_reply = MLX5_GET_PR(hca_vport_context, ctx,
ctx               720 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->lmc = MLX5_GET_PR(hca_vport_context, ctx, lmc);
ctx               721 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->subnet_timeout = MLX5_GET_PR(hca_vport_context, ctx,
ctx               723 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->sm_lid = MLX5_GET_PR(hca_vport_context, ctx, sm_lid);
ctx               724 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->sm_sl = MLX5_GET_PR(hca_vport_context, ctx, sm_sl);
ctx               725 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->qkey_violation_counter = MLX5_GET_PR(hca_vport_context, ctx,
ctx               727 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->pkey_violation_counter = MLX5_GET_PR(hca_vport_context, ctx,
ctx               729 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->grh_required = MLX5_GET_PR(hca_vport_context, ctx, grh_required);
ctx               730 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	rep->sys_image_guid = MLX5_GET64_PR(hca_vport_context, ctx,
ctx              1042 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	void *ctx;
ctx              1065 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	ctx = MLX5_ADDR_OF(modify_hca_vport_context_in, in, hca_vport_context);
ctx              1066 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, field_select, req->field_select);
ctx              1067 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, sm_virt_aware, req->sm_virt_aware);
ctx              1068 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, has_smi, req->has_smi);
ctx              1069 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, has_raw, req->has_raw);
ctx              1070 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, vport_state_policy, req->policy);
ctx              1071 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, port_physical_state, req->phys_state);
ctx              1072 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, vport_state, req->vport_state);
ctx              1073 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET64(hca_vport_context, ctx, port_guid, req->port_guid);
ctx              1074 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET64(hca_vport_context, ctx, node_guid, req->node_guid);
ctx              1075 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, cap_mask1, req->cap_mask1);
ctx              1076 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, cap_mask1_field_select, req->cap_mask1_perm);
ctx              1077 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, cap_mask2, req->cap_mask2);
ctx              1078 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, cap_mask2_field_select, req->cap_mask2_perm);
ctx              1079 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, lid, req->lid);
ctx              1080 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, init_type_reply, req->init_type_reply);
ctx              1081 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, lmc, req->lmc);
ctx              1082 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, subnet_timeout, req->subnet_timeout);
ctx              1083 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, sm_lid, req->sm_lid);
ctx              1084 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, sm_sl, req->sm_sl);
ctx              1085 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, qkey_violation_counter, req->qkey_violation_counter);
ctx              1086 drivers/net/ethernet/mellanox/mlx5/core/vport.c 	MLX5_SET(hca_vport_context, ctx, pkey_violation_counter, req->pkey_violation_counter);
ctx              5303 drivers/net/ethernet/mellanox/mlxsw/spectrum.c 					     struct devlink_param_gset_ctx *ctx)
ctx              5308 drivers/net/ethernet/mellanox/mlxsw/spectrum.c 	ctx->val.vu32 = mlxsw_sp_acl_region_rehash_intrvl_get(mlxsw_sp);
ctx              5314 drivers/net/ethernet/mellanox/mlxsw/spectrum.c 					     struct devlink_param_gset_ctx *ctx)
ctx              5319 drivers/net/ethernet/mellanox/mlxsw/spectrum.c 	return mlxsw_sp_acl_region_rehash_intrvl_set(mlxsw_sp, ctx->val.vu32);
ctx               216 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 		struct mlxsw_sp_acl_tcam_rehash_ctx ctx;
ctx               777 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	if (vregion->rehash.ctx.current_vchunk == vchunk) {
ctx               778 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 		vregion->rehash.ctx.start_ventry = NULL;
ctx               779 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 		vregion->rehash.ctx.stop_ventry = NULL;
ctx               790 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	vregion->rehash.ctx.current_vchunk = NULL;
ctx              1261 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 				       struct mlxsw_sp_acl_tcam_rehash_ctx *ctx)
ctx              1270 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	ctx->current_vchunk = vchunk;
ctx              1271 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	ctx->start_ventry = NULL;
ctx              1272 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	ctx->stop_ventry = NULL;
ctx              1279 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 				     struct mlxsw_sp_acl_tcam_rehash_ctx *ctx)
ctx              1283 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	ctx->current_vchunk = NULL;
ctx              1290 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 				     struct mlxsw_sp_acl_tcam_rehash_ctx *ctx,
ctx              1298 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 							     region, ctx);
ctx              1309 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	if (ctx->start_ventry)
ctx              1310 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 		ventry = ctx->start_ventry;
ctx              1319 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 		if (ventry == ctx->stop_ventry)
ctx              1325 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 			if (ctx->this_is_rollback) {
ctx              1329 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 				ctx->start_ventry = ventry;
ctx              1342 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 			ctx->start_ventry = NULL;
ctx              1343 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 			ctx->stop_ventry = ventry;
ctx              1350 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 			ctx->start_ventry = ventry;
ctx              1355 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	mlxsw_sp_acl_tcam_vchunk_migrate_end(mlxsw_sp, vchunk, ctx);
ctx              1362 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 				     struct mlxsw_sp_acl_tcam_rehash_ctx *ctx,
ctx              1371 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	if (ctx->current_vchunk)
ctx              1372 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 		vchunk = ctx->current_vchunk;
ctx              1380 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 							   ctx, credits);
ctx              1390 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 				  struct mlxsw_sp_acl_tcam_rehash_ctx *ctx,
ctx              1398 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 						   ctx, credits);
ctx              1405 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 		ctx->current_vchunk = NULL;
ctx              1406 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 		ctx->this_is_rollback = true;
ctx              1408 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 							    ctx, credits);
ctx              1422 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c mlxsw_sp_acl_tcam_vregion_rehash_in_progress(const struct mlxsw_sp_acl_tcam_rehash_ctx *ctx)
ctx              1424 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	return ctx->hints_priv;
ctx              1430 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 				       struct mlxsw_sp_acl_tcam_rehash_ctx *ctx)
ctx              1463 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	ctx->hints_priv = hints_priv;
ctx              1464 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	ctx->this_is_rollback = false;
ctx              1480 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 				     struct mlxsw_sp_acl_tcam_rehash_ctx *ctx)
ctx              1488 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	ops->region_rehash_hints_put(ctx->hints_priv);
ctx              1489 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	ctx->hints_priv = NULL;
ctx              1497 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	struct mlxsw_sp_acl_tcam_rehash_ctx *ctx = &vregion->rehash.ctx;
ctx              1504 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 	if (!mlxsw_sp_acl_tcam_vregion_rehash_in_progress(ctx)) {
ctx              1506 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 							     vregion, ctx);
ctx              1515 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 						ctx, credits);
ctx              1521 drivers/net/ethernet/mellanox/mlxsw/spectrum_acl_tcam.c 		mlxsw_sp_acl_tcam_vregion_rehash_end(mlxsw_sp, vregion, ctx);
ctx                25 drivers/net/ethernet/microchip/encx24j600-regmap.c static int encx24j600_switch_bank(struct encx24j600_context *ctx,
ctx                31 drivers/net/ethernet/microchip/encx24j600-regmap.c 	ret = spi_write(ctx->spi, &bank_opcode, 1);
ctx                33 drivers/net/ethernet/microchip/encx24j600-regmap.c 		ctx->bank = bank;
ctx                38 drivers/net/ethernet/microchip/encx24j600-regmap.c static int encx24j600_cmdn(struct encx24j600_context *ctx, u8 opcode,
ctx                48 drivers/net/ethernet/microchip/encx24j600-regmap.c 	return spi_sync(ctx->spi, &m);
ctx                53 drivers/net/ethernet/microchip/encx24j600-regmap.c 	struct encx24j600_context *ctx = context;
ctx                55 drivers/net/ethernet/microchip/encx24j600-regmap.c 	mutex_lock(&ctx->mutex);
ctx                60 drivers/net/ethernet/microchip/encx24j600-regmap.c 	struct encx24j600_context *ctx = context;
ctx                62 drivers/net/ethernet/microchip/encx24j600-regmap.c 	mutex_unlock(&ctx->mutex);
ctx                68 drivers/net/ethernet/microchip/encx24j600-regmap.c 	struct encx24j600_context *ctx = context;
ctx                78 drivers/net/ethernet/microchip/encx24j600-regmap.c 		if ((banked_reg < 0x16) && (ctx->bank != bank))
ctx                79 drivers/net/ethernet/microchip/encx24j600-regmap.c 			ret = encx24j600_switch_bank(ctx, bank);
ctx               111 drivers/net/ethernet/microchip/encx24j600-regmap.c 	ret = spi_write_then_read(ctx->spi, tx_buf, i, val, len);
ctx               116 drivers/net/ethernet/microchip/encx24j600-regmap.c static int regmap_encx24j600_sfr_update(struct encx24j600_context *ctx,
ctx               132 drivers/net/ethernet/microchip/encx24j600-regmap.c 		if ((banked_reg < 0x16) && (ctx->bank != bank))
ctx               133 drivers/net/ethernet/microchip/encx24j600-regmap.c 			ret = encx24j600_switch_bank(ctx, bank);
ctx               170 drivers/net/ethernet/microchip/encx24j600-regmap.c 	return spi_sync(ctx->spi, &m);
ctx               176 drivers/net/ethernet/microchip/encx24j600-regmap.c 	struct encx24j600_context *ctx = context;
ctx               178 drivers/net/ethernet/microchip/encx24j600-regmap.c 	return regmap_encx24j600_sfr_update(ctx, reg, val, len, WCRU, WCRCODE);
ctx               181 drivers/net/ethernet/microchip/encx24j600-regmap.c static int regmap_encx24j600_sfr_set_bits(struct encx24j600_context *ctx,
ctx               184 drivers/net/ethernet/microchip/encx24j600-regmap.c 	return regmap_encx24j600_sfr_update(ctx, reg, &val, 1, BFSU, BFSCODE);
ctx               187 drivers/net/ethernet/microchip/encx24j600-regmap.c static int regmap_encx24j600_sfr_clr_bits(struct encx24j600_context *ctx,
ctx               190 drivers/net/ethernet/microchip/encx24j600-regmap.c 	return regmap_encx24j600_sfr_update(ctx, reg, &val, 1, BFCU, BFCCODE);
ctx               197 drivers/net/ethernet/microchip/encx24j600-regmap.c 	struct encx24j600_context *ctx = context;
ctx               207 drivers/net/ethernet/microchip/encx24j600-regmap.c 		ret = regmap_encx24j600_sfr_set_bits(ctx, reg, set_mask);
ctx               212 drivers/net/ethernet/microchip/encx24j600-regmap.c 		ret = regmap_encx24j600_sfr_set_bits(ctx, reg + 1, set_mask);
ctx               215 drivers/net/ethernet/microchip/encx24j600-regmap.c 		ret = regmap_encx24j600_sfr_clr_bits(ctx, reg, clr_mask);
ctx               220 drivers/net/ethernet/microchip/encx24j600-regmap.c 		ret = regmap_encx24j600_sfr_clr_bits(ctx, reg + 1, clr_mask);
ctx               228 drivers/net/ethernet/microchip/encx24j600-regmap.c 	struct encx24j600_context *ctx = context;
ctx               231 drivers/net/ethernet/microchip/encx24j600-regmap.c 		return encx24j600_cmdn(ctx, reg, data, count);
ctx               234 drivers/net/ethernet/microchip/encx24j600-regmap.c 	return spi_write(ctx->spi, &reg, 1);
ctx               240 drivers/net/ethernet/microchip/encx24j600-regmap.c 	struct encx24j600_context *ctx = context;
ctx               245 drivers/net/ethernet/microchip/encx24j600-regmap.c 	return spi_write_then_read(ctx->spi, &reg, sizeof(reg), data, count);
ctx               353 drivers/net/ethernet/microchip/encx24j600-regmap.c 	struct encx24j600_context *ctx = context;
ctx               358 drivers/net/ethernet/microchip/encx24j600-regmap.c 	ret = regmap_write(ctx->regmap, MIREGADR, reg);
ctx               362 drivers/net/ethernet/microchip/encx24j600-regmap.c 	ret = regmap_write(ctx->regmap, MICMD, MIIRD);
ctx               367 drivers/net/ethernet/microchip/encx24j600-regmap.c 	while ((ret = regmap_read(ctx->regmap, MISTAT, &mistat) != 0) &&
ctx               374 drivers/net/ethernet/microchip/encx24j600-regmap.c 	ret = regmap_write(ctx->regmap, MICMD, 0);
ctx               378 drivers/net/ethernet/microchip/encx24j600-regmap.c 	ret = regmap_read(ctx->regmap, MIRD, val);
ctx               391 drivers/net/ethernet/microchip/encx24j600-regmap.c 	struct encx24j600_context *ctx = context;
ctx               396 drivers/net/ethernet/microchip/encx24j600-regmap.c 	ret = regmap_write(ctx->regmap, MIREGADR, reg);
ctx               400 drivers/net/ethernet/microchip/encx24j600-regmap.c 	ret = regmap_write(ctx->regmap, MIWR, val);
ctx               405 drivers/net/ethernet/microchip/encx24j600-regmap.c 	while ((ret = regmap_read(ctx->regmap, MISTAT, &mistat) != 0) &&
ctx               506 drivers/net/ethernet/microchip/encx24j600-regmap.c 				 struct encx24j600_context *ctx)
ctx               508 drivers/net/ethernet/microchip/encx24j600-regmap.c 	mutex_init(&ctx->mutex);
ctx               509 drivers/net/ethernet/microchip/encx24j600-regmap.c 	regcfg.lock_arg = ctx;
ctx               510 drivers/net/ethernet/microchip/encx24j600-regmap.c 	ctx->regmap = devm_regmap_init(dev, &regmap_encx24j600, ctx, &regcfg);
ctx               511 drivers/net/ethernet/microchip/encx24j600-regmap.c 	ctx->phymap = devm_regmap_init(dev, &phymap_encx24j600, ctx, &phycfg);
ctx                50 drivers/net/ethernet/microchip/encx24j600.c 	struct encx24j600_context ctx;
ctx               102 drivers/net/ethernet/microchip/encx24j600.c 	int ret = regmap_read(priv->ctx.regmap, reg, &val);
ctx               113 drivers/net/ethernet/microchip/encx24j600.c 	int ret = regmap_write(priv->ctx.regmap, reg, val);
ctx               124 drivers/net/ethernet/microchip/encx24j600.c 	int ret = regmap_update_bits(priv->ctx.regmap, reg, mask, val);
ctx               135 drivers/net/ethernet/microchip/encx24j600.c 	int ret = regmap_read(priv->ctx.phymap, reg, &val);
ctx               146 drivers/net/ethernet/microchip/encx24j600.c 	int ret = regmap_write(priv->ctx.phymap, reg, val);
ctx               166 drivers/net/ethernet/microchip/encx24j600.c 	int ret = regmap_write(priv->ctx.regmap, cmd, 0);
ctx               178 drivers/net/ethernet/microchip/encx24j600.c 	mutex_lock(&priv->ctx.mutex);
ctx               179 drivers/net/ethernet/microchip/encx24j600.c 	ret = regmap_encx24j600_spi_read(&priv->ctx, reg, data, count);
ctx               180 drivers/net/ethernet/microchip/encx24j600.c 	mutex_unlock(&priv->ctx.mutex);
ctx               190 drivers/net/ethernet/microchip/encx24j600.c 	mutex_lock(&priv->ctx.mutex);
ctx               191 drivers/net/ethernet/microchip/encx24j600.c 	ret = regmap_encx24j600_spi_write(&priv->ctx, reg, data, count);
ctx               192 drivers/net/ethernet/microchip/encx24j600.c 	mutex_unlock(&priv->ctx.mutex);
ctx               457 drivers/net/ethernet/microchip/encx24j600.c 	regcache_cache_bypass(priv->ctx.regmap, true);
ctx               464 drivers/net/ethernet/microchip/encx24j600.c 	regcache_cache_bypass(priv->ctx.regmap, false);
ctx               776 drivers/net/ethernet/microchip/encx24j600.c 	int ret = request_threaded_irq(priv->ctx.spi->irq, NULL, encx24j600_isr,
ctx               781 drivers/net/ethernet/microchip/encx24j600.c 			   priv->ctx.spi->irq, ret);
ctx               798 drivers/net/ethernet/microchip/encx24j600.c 	free_irq(priv->ctx.spi->irq, priv);
ctx               923 drivers/net/ethernet/microchip/encx24j600.c 		regmap_read(priv->ctx.regmap, reg, &val);
ctx              1029 drivers/net/ethernet/microchip/encx24j600.c 	priv->ctx.spi = spi;
ctx              1030 drivers/net/ethernet/microchip/encx24j600.c 	devm_regmap_init_encx24j600(&spi->dev, &priv->ctx);
ctx                19 drivers/net/ethernet/microchip/encx24j600_hw.h 				 struct encx24j600_context *ctx);
ctx                80 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	   u8 mode, u8 xfer, u8 areg, u8 breg, u8 size, enum cmd_ctx_swap ctx,
ctx                86 drivers/net/ethernet/netronome/nfp/bpf/jit.c 		FIELD_PREP(OP_CMD_CTX, ctx) |
ctx                91 drivers/net/ethernet/netronome/nfp/bpf/jit.c 		FIELD_PREP(OP_CMD_SIG, ctx != CMD_CTX_NO_SWAP) |
ctx               101 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	     swreg lreg, swreg rreg, u8 size, enum cmd_ctx_swap ctx, bool indir)
ctx               122 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	__emit_cmd(nfp_prog, op, mode, xfer, reg.areg, reg.breg, size, ctx,
ctx               128 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	 swreg lreg, swreg rreg, u8 size, enum cmd_ctx_swap ctx)
ctx               130 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	emit_cmd_any(nfp_prog, op, mode, xfer, lreg, rreg, size, ctx, false);
ctx               135 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	       swreg lreg, swreg rreg, u8 size, enum cmd_ctx_swap ctx)
ctx               137 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	emit_cmd_any(nfp_prog, op, mode, xfer, lreg, rreg, size, ctx, true);
ctx                84 drivers/net/ethernet/netronome/nfp/devlink_param.c 			 struct devlink_param_gset_ctx *ctx)
ctx               119 drivers/net/ethernet/netronome/nfp/devlink_param.c 			ctx->val.vu8 = arg->invalid_dl_val;
ctx               126 drivers/net/ethernet/netronome/nfp/devlink_param.c 	ctx->val.vu8 = arg->hi_to_dl[value];
ctx               135 drivers/net/ethernet/netronome/nfp/devlink_param.c 			 struct devlink_param_gset_ctx *ctx)
ctx               157 drivers/net/ethernet/netronome/nfp/devlink_param.c 		 arg->hwinfo_name, arg->dl_to_hi[ctx->val.vu8]);
ctx               233 drivers/net/ethernet/netronome/nfp/flower/cmsg.c 		u32 ctx = be32_to_cpu(msg->flow[i].host_ctx);
ctx               235 drivers/net/ethernet/netronome/nfp/flower/cmsg.c 		sub_flows[i] = nfp_flower_get_fl_payload_from_ctx(app, ctx);
ctx               537 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	u32 ctx;
ctx               540 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	for (ctx = 0; ctx < NFP_IND_NUM_CONTEXTS; ctx++) {
ctx               542 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 					    reg_sz, ctx, dest + ctx * reg_sz);
ctx                60 drivers/net/ethernet/pensando/ionic/ionic.h int ionic_adminq_post_wait(struct ionic_lif *lif, struct ionic_admin_ctx *ctx);
ctx               183 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx               184 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx               198 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		ctx.cmd.q_control.index, ctx.cmd.q_control.type);
ctx               209 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	return ionic_adminq_post_wait(lif, &ctx);
ctx               219 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx               220 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx               234 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		ctx.cmd.q_control.index, ctx.cmd.q_control.type);
ctx               244 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	return ionic_adminq_post_wait(lif, &ctx);
ctx               558 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx               559 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx               577 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	dev_dbg(dev, "txq_init.pid %d\n", ctx.cmd.q_init.pid);
ctx               578 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	dev_dbg(dev, "txq_init.index %d\n", ctx.cmd.q_init.index);
ctx               579 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	dev_dbg(dev, "txq_init.ring_base 0x%llx\n", ctx.cmd.q_init.ring_base);
ctx               580 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	dev_dbg(dev, "txq_init.ring_size %d\n", ctx.cmd.q_init.ring_size);
ctx               582 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	err = ionic_adminq_post_wait(lif, &ctx);
ctx               586 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	q->hw_type = ctx.comp.q_init.hw_type;
ctx               587 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	q->hw_index = le32_to_cpu(ctx.comp.q_init.hw_index);
ctx               605 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx               606 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx               622 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	dev_dbg(dev, "rxq_init.pid %d\n", ctx.cmd.q_init.pid);
ctx               623 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	dev_dbg(dev, "rxq_init.index %d\n", ctx.cmd.q_init.index);
ctx               624 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	dev_dbg(dev, "rxq_init.ring_base 0x%llx\n", ctx.cmd.q_init.ring_base);
ctx               625 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	dev_dbg(dev, "rxq_init.ring_size %d\n", ctx.cmd.q_init.ring_size);
ctx               627 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	err = ionic_adminq_post_wait(lif, &ctx);
ctx               631 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	q->hw_type = ctx.comp.q_init.hw_type;
ctx               632 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	q->hw_index = le32_to_cpu(ctx.comp.q_init.hw_index);
ctx               794 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx               795 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx               813 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		   ctx.comp.rx_filter_add.filter_id);
ctx               815 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	memcpy(ctx.cmd.rx_filter_add.mac.addr, addr, ETH_ALEN);
ctx               816 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	err = ionic_adminq_post_wait(lif, &ctx);
ctx               820 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	return ionic_rx_filter_save(lif, 0, IONIC_RXQ_INDEX_ANY, 0, &ctx);
ctx               825 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx               826 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx               842 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	ctx.cmd.rx_filter_del.filter_id = cpu_to_le32(f->filter_id);
ctx               846 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	err = ionic_adminq_post_wait(lif, &ctx);
ctx               851 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		   ctx.cmd.rx_filter_del.filter_id);
ctx               921 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx               922 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx               948 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	err = ionic_adminq_post_wait(lif, &ctx);
ctx              1068 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx              1069 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx              1081 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	ctx.cmd.lif_setattr.features = ionic_netdev_features_to_nic(features);
ctx              1082 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	err = ionic_adminq_post_wait(lif, &ctx);
ctx              1086 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	lif->hw_features = le64_to_cpu(ctx.cmd.lif_setattr.features &
ctx              1087 drivers/net/ethernet/pensando/ionic/ionic_lif.c 				       ctx.comp.lif_setattr.features);
ctx              1090 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	    !(vlan_flags & le64_to_cpu(ctx.comp.lif_setattr.features)))
ctx              1239 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx              1240 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx              1250 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	err = ionic_adminq_post_wait(lif, &ctx);
ctx              1282 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx              1283 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx              1293 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	err = ionic_adminq_post_wait(lif, &ctx);
ctx              1298 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		   ctx.comp.rx_filter_add.filter_id);
ctx              1300 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	return ionic_rx_filter_save(lif, 0, IONIC_RXQ_INDEX_ANY, 0, &ctx);
ctx              1307 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx              1308 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx              1325 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		   le32_to_cpu(ctx.cmd.rx_filter_del.filter_id));
ctx              1327 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	ctx.cmd.rx_filter_del.filter_id = cpu_to_le32(f->filter_id);
ctx              1331 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	return ionic_adminq_post_wait(lif, &ctx);
ctx              1337 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx              1338 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx              1359 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	memcpy(ctx.cmd.lif_setattr.rss.key, lif->rss_hash_key,
ctx              1362 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	return ionic_adminq_post_wait(lif, &ctx);
ctx              1889 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx              1890 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx              1905 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	dev_dbg(dev, "notifyq_init.pid %d\n", ctx.cmd.q_init.pid);
ctx              1906 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	dev_dbg(dev, "notifyq_init.index %d\n", ctx.cmd.q_init.index);
ctx              1907 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	dev_dbg(dev, "notifyq_init.ring_base 0x%llx\n", ctx.cmd.q_init.ring_base);
ctx              1908 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	dev_dbg(dev, "notifyq_init.ring_size %d\n", ctx.cmd.q_init.ring_size);
ctx              1910 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	err = ionic_adminq_post_wait(lif, &ctx);
ctx              1914 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	q->hw_type = ctx.comp.q_init.hw_type;
ctx              1915 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	q->hw_index = le32_to_cpu(ctx.comp.q_init.hw_index);
ctx              1934 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx              1935 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx              1945 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	err = ionic_adminq_post_wait(lif, &ctx);
ctx              1949 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	memcpy(addr.sa_data, ctx.comp.lif_getattr.mac, netdev->addr_len);
ctx              2080 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	struct ionic_admin_ctx ctx = {
ctx              2081 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx              2089 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	strlcpy(ctx.cmd.lif_setattr.name, lif->netdev->name,
ctx              2090 drivers/net/ethernet/pensando/ionic/ionic_lif.c 		sizeof(ctx.cmd.lif_setattr.name));
ctx              2092 drivers/net/ethernet/pensando/ionic/ionic_lif.c 	ionic_adminq_post_wait(lif, &ctx);
ctx               189 drivers/net/ethernet/pensando/ionic/ionic_main.c 				  struct ionic_admin_ctx *ctx,
ctx               197 drivers/net/ethernet/pensando/ionic/ionic_main.c 	if (ctx->comp.comp.status || timeout) {
ctx               198 drivers/net/ethernet/pensando/ionic/ionic_main.c 		opcode_str = ionic_opcode_to_str(ctx->cmd.cmd.opcode);
ctx               199 drivers/net/ethernet/pensando/ionic/ionic_main.c 		status_str = ionic_error_to_str(ctx->comp.comp.status);
ctx               201 drivers/net/ethernet/pensando/ionic/ionic_main.c 				ionic_error_to_errno(ctx->comp.comp.status);
ctx               204 drivers/net/ethernet/pensando/ionic/ionic_main.c 			   opcode_str, ctx->cmd.cmd.opcode,
ctx               218 drivers/net/ethernet/pensando/ionic/ionic_main.c 	struct ionic_admin_ctx *ctx = cb_arg;
ctx               222 drivers/net/ethernet/pensando/ionic/ionic_main.c 	if (!ctx)
ctx               228 drivers/net/ethernet/pensando/ionic/ionic_main.c 	memcpy(&ctx->comp, comp, sizeof(*comp));
ctx               232 drivers/net/ethernet/pensando/ionic/ionic_main.c 			 &ctx->comp, sizeof(ctx->comp), true);
ctx               234 drivers/net/ethernet/pensando/ionic/ionic_main.c 	complete_all(&ctx->work);
ctx               237 drivers/net/ethernet/pensando/ionic/ionic_main.c static int ionic_adminq_post(struct ionic_lif *lif, struct ionic_admin_ctx *ctx)
ctx               250 drivers/net/ethernet/pensando/ionic/ionic_main.c 	memcpy(adminq->head->desc, &ctx->cmd, sizeof(ctx->cmd));
ctx               254 drivers/net/ethernet/pensando/ionic/ionic_main.c 			 &ctx->cmd, sizeof(ctx->cmd), true);
ctx               256 drivers/net/ethernet/pensando/ionic/ionic_main.c 	ionic_q_post(adminq, true, ionic_adminq_cb, ctx);
ctx               264 drivers/net/ethernet/pensando/ionic/ionic_main.c int ionic_adminq_post_wait(struct ionic_lif *lif, struct ionic_admin_ctx *ctx)
ctx               271 drivers/net/ethernet/pensando/ionic/ionic_main.c 	err = ionic_adminq_post(lif, ctx);
ctx               273 drivers/net/ethernet/pensando/ionic/ionic_main.c 		name = ionic_opcode_to_str(ctx->cmd.cmd.opcode);
ctx               275 drivers/net/ethernet/pensando/ionic/ionic_main.c 			   name, ctx->cmd.cmd.opcode, err);
ctx               279 drivers/net/ethernet/pensando/ionic/ionic_main.c 	remaining = wait_for_completion_timeout(&ctx->work,
ctx               281 drivers/net/ethernet/pensando/ionic/ionic_main.c 	return ionic_adminq_check_err(lif, ctx, (remaining == 0));
ctx                22 drivers/net/ethernet/pensando/ionic/ionic_rx_filter.c 	struct ionic_admin_ctx ctx = {
ctx                23 drivers/net/ethernet/pensando/ionic/ionic_rx_filter.c 		.work = COMPLETION_INITIALIZER_ONSTACK(ctx.work),
ctx                30 drivers/net/ethernet/pensando/ionic/ionic_rx_filter.c 	return ionic_adminq_post_wait(lif, &ctx);
ctx                62 drivers/net/ethernet/pensando/ionic/ionic_rx_filter.c 			 u32 hash, struct ionic_admin_ctx *ctx)
ctx                70 drivers/net/ethernet/pensando/ionic/ionic_rx_filter.c 	ac = &ctx->cmd.rx_filter_add;
ctx                91 drivers/net/ethernet/pensando/ionic/ionic_rx_filter.c 	f->filter_id = le32_to_cpu(ctx->comp.rx_filter_add.filter_id);
ctx                31 drivers/net/ethernet/pensando/ionic/ionic_rx_filter.h 			 u32 hash, struct ionic_admin_ctx *ctx);
ctx               359 drivers/net/ethernet/qlogic/qed/qed_main.c 			    struct devlink_param_gset_ctx *ctx)
ctx               366 drivers/net/ethernet/qlogic/qed/qed_main.c 	ctx->val.vbool = cdev->iwarp_cmt;
ctx               372 drivers/net/ethernet/qlogic/qed/qed_main.c 			    struct devlink_param_gset_ctx *ctx)
ctx               379 drivers/net/ethernet/qlogic/qed/qed_main.c 	cdev->iwarp_cmt = ctx->val.vbool;
ctx              2698 drivers/net/ethernet/sfc/ef10.c 				   struct efx_rss_context *ctx)
ctx              2705 drivers/net/ethernet/sfc/ef10.c 	if (efx_ef10_get_rss_flags(efx, ctx->context_id, &flags) != 0)
ctx              2708 drivers/net/ethernet/sfc/ef10.c 		       ctx->context_id);
ctx              2715 drivers/net/ethernet/sfc/ef10.c 		ctx->rx_hash_udp_4tuple = true;
ctx              2719 drivers/net/ethernet/sfc/ef10.c 				      struct efx_rss_context *ctx,
ctx              2736 drivers/net/ethernet/sfc/ef10.c 		ctx->context_id = EFX_EF10_RSS_CONTEXT_INVALID;
ctx              2759 drivers/net/ethernet/sfc/ef10.c 	ctx->context_id = MCDI_DWORD(outbuf, RSS_CONTEXT_ALLOC_OUT_RSS_CONTEXT_ID);
ctx              2766 drivers/net/ethernet/sfc/ef10.c 		efx_ef10_set_rss_flags(efx, ctx);
ctx              2892 drivers/net/ethernet/sfc/ef10.c 					       struct efx_rss_context *ctx,
ctx              2900 drivers/net/ethernet/sfc/ef10.c 	if (ctx->context_id == EFX_EF10_RSS_CONTEXT_INVALID) {
ctx              2901 drivers/net/ethernet/sfc/ef10.c 		rc = efx_ef10_alloc_rss_context(efx, true, ctx, NULL);
ctx              2907 drivers/net/ethernet/sfc/ef10.c 		return efx_ef10_free_rss_context(efx, ctx->context_id);
ctx              2909 drivers/net/ethernet/sfc/ef10.c 	rc = efx_ef10_populate_rss_table(efx, ctx->context_id,
ctx              2914 drivers/net/ethernet/sfc/ef10.c 	memcpy(ctx->rx_indir_table, rx_indir_table,
ctx              2916 drivers/net/ethernet/sfc/ef10.c 	memcpy(ctx->rx_hash_key, key, efx->type->rx_hash_key_size);
ctx              2922 drivers/net/ethernet/sfc/ef10.c 					       struct efx_rss_context *ctx)
ctx              2935 drivers/net/ethernet/sfc/ef10.c 	if (ctx->context_id == EFX_EF10_RSS_CONTEXT_INVALID)
ctx              2939 drivers/net/ethernet/sfc/ef10.c 		       ctx->context_id);
ctx              2940 drivers/net/ethernet/sfc/ef10.c 	BUILD_BUG_ON(ARRAY_SIZE(ctx->rx_indir_table) !=
ctx              2950 drivers/net/ethernet/sfc/ef10.c 	for (i = 0; i < ARRAY_SIZE(ctx->rx_indir_table); i++)
ctx              2951 drivers/net/ethernet/sfc/ef10.c 		ctx->rx_indir_table[i] = MCDI_PTR(tablebuf,
ctx              2955 drivers/net/ethernet/sfc/ef10.c 		       ctx->context_id);
ctx              2956 drivers/net/ethernet/sfc/ef10.c 	BUILD_BUG_ON(ARRAY_SIZE(ctx->rx_hash_key) !=
ctx              2966 drivers/net/ethernet/sfc/ef10.c 	for (i = 0; i < ARRAY_SIZE(ctx->rx_hash_key); ++i)
ctx              2967 drivers/net/ethernet/sfc/ef10.c 		ctx->rx_hash_key[i] = MCDI_PTR(
ctx              2986 drivers/net/ethernet/sfc/ef10.c 	struct efx_rss_context *ctx;
ctx              2994 drivers/net/ethernet/sfc/ef10.c 	list_for_each_entry(ctx, &efx->rss_context.list, list) {
ctx              2996 drivers/net/ethernet/sfc/ef10.c 		ctx->context_id = EFX_EF10_RSS_CONTEXT_INVALID;
ctx              2998 drivers/net/ethernet/sfc/ef10.c 		rc = efx_ef10_rx_push_rss_context_config(efx, ctx,
ctx              2999 drivers/net/ethernet/sfc/ef10.c 							 ctx->rx_indir_table,
ctx              3000 drivers/net/ethernet/sfc/ef10.c 							 ctx->rx_hash_key);
ctx              3005 drivers/net/ethernet/sfc/ef10.c 				   ctx->user_id, rc);
ctx              4149 drivers/net/ethernet/sfc/ef10.c 				      struct efx_rss_context *ctx,
ctx              4162 drivers/net/ethernet/sfc/ef10.c 		if (WARN_ON_ONCE(!ctx))
ctx              4164 drivers/net/ethernet/sfc/ef10.c 		else if (WARN_ON_ONCE(ctx->context_id == EFX_EF10_RSS_CONTEXT_INVALID))
ctx              4192 drivers/net/ethernet/sfc/ef10.c 		MCDI_SET_DWORD(inbuf, FILTER_OP_IN_RX_CONTEXT, ctx->context_id);
ctx              4197 drivers/net/ethernet/sfc/ef10.c 				struct efx_rss_context *ctx, bool replacing)
ctx              4203 drivers/net/ethernet/sfc/ef10.c 	efx_ef10_filter_push_prep(efx, spec, inbuf, *handle, ctx, replacing);
ctx              4296 drivers/net/ethernet/sfc/ef10.c 	struct efx_rss_context *ctx = NULL;
ctx              4332 drivers/net/ethernet/sfc/ef10.c 			ctx = efx_find_rss_context_entry(efx, spec->rss_context);
ctx              4334 drivers/net/ethernet/sfc/ef10.c 			ctx = &efx->rss_context;
ctx              4335 drivers/net/ethernet/sfc/ef10.c 		if (!ctx) {
ctx              4339 drivers/net/ethernet/sfc/ef10.c 		if (ctx->context_id == EFX_EF10_RSS_CONTEXT_INVALID) {
ctx              4420 drivers/net/ethernet/sfc/ef10.c 				  ctx, replacing);
ctx              5037 drivers/net/ethernet/sfc/ef10.c 	struct efx_rss_context *ctx;
ctx              5069 drivers/net/ethernet/sfc/ef10.c 			ctx = efx_find_rss_context_entry(efx, spec->rss_context);
ctx              5071 drivers/net/ethernet/sfc/ef10.c 			ctx = &efx->rss_context;
ctx              5073 drivers/net/ethernet/sfc/ef10.c 			if (!ctx) {
ctx              5080 drivers/net/ethernet/sfc/ef10.c 			if (ctx->context_id == EFX_EF10_RSS_CONTEXT_INVALID) {
ctx              5091 drivers/net/ethernet/sfc/ef10.c 					  ctx, false);
ctx              1376 drivers/net/ethernet/sfc/efx.c 				    struct efx_rss_context *ctx)
ctx              1380 drivers/net/ethernet/sfc/efx.c 	for (i = 0; i < ARRAY_SIZE(ctx->rx_indir_table); i++)
ctx              1381 drivers/net/ethernet/sfc/efx.c 		ctx->rx_indir_table[i] =
ctx              3255 drivers/net/ethernet/sfc/efx.c 	struct efx_rss_context *ctx, *new;
ctx              3261 drivers/net/ethernet/sfc/efx.c 	list_for_each_entry(ctx, head, list) {
ctx              3262 drivers/net/ethernet/sfc/efx.c 		if (ctx->user_id != id)
ctx              3281 drivers/net/ethernet/sfc/efx.c 	list_add_tail(&new->list, &ctx->list);
ctx              3288 drivers/net/ethernet/sfc/efx.c 	struct efx_rss_context *ctx;
ctx              3292 drivers/net/ethernet/sfc/efx.c 	list_for_each_entry(ctx, head, list)
ctx              3293 drivers/net/ethernet/sfc/efx.c 		if (ctx->user_id == id)
ctx              3294 drivers/net/ethernet/sfc/efx.c 			return ctx;
ctx              3298 drivers/net/ethernet/sfc/efx.c void efx_free_rss_context_entry(struct efx_rss_context *ctx)
ctx              3300 drivers/net/ethernet/sfc/efx.c 	list_del(&ctx->list);
ctx              3301 drivers/net/ethernet/sfc/efx.c 	kfree(ctx);
ctx                35 drivers/net/ethernet/sfc/efx.h 				    struct efx_rss_context *ctx);
ctx               210 drivers/net/ethernet/sfc/efx.h void efx_free_rss_context_entry(struct efx_rss_context *ctx);
ctx               211 drivers/net/ethernet/sfc/efx.h static inline bool efx_rss_active(struct efx_rss_context *ctx)
ctx               213 drivers/net/ethernet/sfc/efx.h 	return ctx->context_id != EFX_EF10_RSS_CONTEXT_INVALID;
ctx               987 drivers/net/ethernet/sfc/ethtool.c 		struct efx_rss_context *ctx = &efx->rss_context;
ctx               991 drivers/net/ethernet/sfc/ethtool.c 			ctx = efx_find_rss_context_entry(efx, info->rss_context);
ctx               992 drivers/net/ethernet/sfc/ethtool.c 			if (!ctx) {
ctx               998 drivers/net/ethernet/sfc/ethtool.c 		if (!efx_rss_active(ctx)) /* No RSS */
ctx              1002 drivers/net/ethernet/sfc/ethtool.c 			if (ctx->rx_hash_udp_4tuple)
ctx              1013 drivers/net/ethernet/sfc/ethtool.c 			if (ctx->rx_hash_udp_4tuple)
ctx              1370 drivers/net/ethernet/sfc/ethtool.c 	struct efx_rss_context *ctx;
ctx              1377 drivers/net/ethernet/sfc/ethtool.c 	ctx = efx_find_rss_context_entry(efx, rss_context);
ctx              1378 drivers/net/ethernet/sfc/ethtool.c 	if (!ctx) {
ctx              1382 drivers/net/ethernet/sfc/ethtool.c 	rc = efx->type->rx_pull_rss_context_config(efx, ctx);
ctx              1389 drivers/net/ethernet/sfc/ethtool.c 		memcpy(indir, ctx->rx_indir_table, sizeof(ctx->rx_indir_table));
ctx              1391 drivers/net/ethernet/sfc/ethtool.c 		memcpy(key, ctx->rx_hash_key, efx->type->rx_hash_key_size);
ctx              1403 drivers/net/ethernet/sfc/ethtool.c 	struct efx_rss_context *ctx;
ctx              1421 drivers/net/ethernet/sfc/ethtool.c 		ctx = efx_alloc_rss_context_entry(efx);
ctx              1422 drivers/net/ethernet/sfc/ethtool.c 		if (!ctx) {
ctx              1426 drivers/net/ethernet/sfc/ethtool.c 		ctx->context_id = EFX_EF10_RSS_CONTEXT_INVALID;
ctx              1428 drivers/net/ethernet/sfc/ethtool.c 		efx_set_default_rx_indir_table(efx, ctx);
ctx              1429 drivers/net/ethernet/sfc/ethtool.c 		netdev_rss_key_fill(ctx->rx_hash_key, sizeof(ctx->rx_hash_key));
ctx              1432 drivers/net/ethernet/sfc/ethtool.c 		ctx = efx_find_rss_context_entry(efx, *rss_context);
ctx              1433 drivers/net/ethernet/sfc/ethtool.c 		if (!ctx) {
ctx              1441 drivers/net/ethernet/sfc/ethtool.c 		rc = efx->type->rx_push_rss_context_config(efx, ctx, NULL, NULL);
ctx              1443 drivers/net/ethernet/sfc/ethtool.c 			efx_free_rss_context_entry(ctx);
ctx              1448 drivers/net/ethernet/sfc/ethtool.c 		key = ctx->rx_hash_key;
ctx              1450 drivers/net/ethernet/sfc/ethtool.c 		indir = ctx->rx_indir_table;
ctx              1452 drivers/net/ethernet/sfc/ethtool.c 	rc = efx->type->rx_push_rss_context_config(efx, ctx, indir, key);
ctx              1454 drivers/net/ethernet/sfc/ethtool.c 		efx_free_rss_context_entry(ctx);
ctx              1456 drivers/net/ethernet/sfc/ethtool.c 		*rss_context = ctx->user_id;
ctx              1331 drivers/net/ethernet/sfc/net_driver.h 					  struct efx_rss_context *ctx,
ctx              1335 drivers/net/ethernet/sfc/net_driver.h 					  struct efx_rss_context *ctx);
ctx               202 drivers/net/ethernet/ti/cpsw.c static int cpsw_update_vlan_mc(struct net_device *vdev, int vid, void *ctx)
ctx               204 drivers/net/ethernet/ti/cpsw.c 	struct addr_sync_ctx *sync_ctx = ctx;
ctx               267 drivers/net/ethernet/ti/cpsw.c static int cpsw_purge_vlan_mc(struct net_device *vdev, int vid, void *ctx)
ctx               269 drivers/net/ethernet/ti/cpsw.c 	struct addr_sync_ctx *sync_ctx = ctx;
ctx               139 drivers/net/ethernet/ti/netcp.h 	void (*txtstamp)(void *ctx, struct sk_buff *skb);
ctx               114 drivers/net/ieee802154/at86rf230.c 			     struct at86rf230_state_change *ctx,
ctx               340 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               341 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               343 drivers/net/ieee802154/at86rf230.c 	if (ctx->free)
ctx               344 drivers/net/ieee802154/at86rf230.c 		kfree(ctx);
ctx               352 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               353 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               356 drivers/net/ieee802154/at86rf230.c 	at86rf230_async_state_change(lp, ctx, STATE_RX_AACK_ON,
ctx               362 drivers/net/ieee802154/at86rf230.c 		      struct at86rf230_state_change *ctx, int rc)
ctx               366 drivers/net/ieee802154/at86rf230.c 	at86rf230_async_state_change(lp, ctx, STATE_FORCE_TRX_OFF,
ctx               373 drivers/net/ieee802154/at86rf230.c 			 struct at86rf230_state_change *ctx,
ctx               378 drivers/net/ieee802154/at86rf230.c 	u8 *tx_buf = ctx->buf;
ctx               381 drivers/net/ieee802154/at86rf230.c 	ctx->msg.complete = complete;
ctx               382 drivers/net/ieee802154/at86rf230.c 	rc = spi_async(lp->spi, &ctx->msg);
ctx               384 drivers/net/ieee802154/at86rf230.c 		at86rf230_async_error(lp, ctx, rc);
ctx               389 drivers/net/ieee802154/at86rf230.c 			  struct at86rf230_state_change *ctx,
ctx               394 drivers/net/ieee802154/at86rf230.c 	ctx->buf[0] = (reg & CMD_REG_MASK) | CMD_REG | CMD_WRITE;
ctx               395 drivers/net/ieee802154/at86rf230.c 	ctx->buf[1] = val;
ctx               396 drivers/net/ieee802154/at86rf230.c 	ctx->msg.complete = complete;
ctx               397 drivers/net/ieee802154/at86rf230.c 	rc = spi_async(lp->spi, &ctx->msg);
ctx               399 drivers/net/ieee802154/at86rf230.c 		at86rf230_async_error(lp, ctx, rc);
ctx               405 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               406 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               407 drivers/net/ieee802154/at86rf230.c 	const u8 *buf = ctx->buf;
ctx               411 drivers/net/ieee802154/at86rf230.c 	if (trx_state != ctx->to_state) {
ctx               422 drivers/net/ieee802154/at86rf230.c 			if (ctx->to_state == STATE_RX_AACK_ON)
ctx               438 drivers/net/ieee802154/at86rf230.c 			if (ctx->to_state == STATE_TX_ON ||
ctx               439 drivers/net/ieee802154/at86rf230.c 			    ctx->to_state == STATE_TRX_OFF) {
ctx               440 drivers/net/ieee802154/at86rf230.c 				u8 state = ctx->to_state;
ctx               446 drivers/net/ieee802154/at86rf230.c 				at86rf230_async_state_change(lp, ctx, state,
ctx               447 drivers/net/ieee802154/at86rf230.c 							     ctx->complete);
ctx               453 drivers/net/ieee802154/at86rf230.c 			 ctx->from_state, ctx->to_state, trx_state);
ctx               457 drivers/net/ieee802154/at86rf230.c 	if (ctx->complete)
ctx               458 drivers/net/ieee802154/at86rf230.c 		ctx->complete(context);
ctx               463 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx =
ctx               465 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               467 drivers/net/ieee802154/at86rf230.c 	at86rf230_async_read_reg(lp, RG_TRX_STATUS, ctx,
ctx               477 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               478 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               488 drivers/net/ieee802154/at86rf230.c 	switch (ctx->to_state) {
ctx               490 drivers/net/ieee802154/at86rf230.c 		ctx->to_state = STATE_TX_ON;
ctx               494 drivers/net/ieee802154/at86rf230.c 		ctx->to_state = STATE_TRX_OFF;
ctx               501 drivers/net/ieee802154/at86rf230.c 	switch (ctx->from_state) {
ctx               503 drivers/net/ieee802154/at86rf230.c 		switch (ctx->to_state) {
ctx               526 drivers/net/ieee802154/at86rf230.c 		switch (ctx->to_state) {
ctx               544 drivers/net/ieee802154/at86rf230.c 		switch (ctx->to_state) {
ctx               558 drivers/net/ieee802154/at86rf230.c 	at86rf230_async_state_timer(&ctx->timer);
ctx               562 drivers/net/ieee802154/at86rf230.c 	hrtimer_start(&ctx->timer, tim, HRTIMER_MODE_REL);
ctx               568 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               569 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               570 drivers/net/ieee802154/at86rf230.c 	u8 *buf = ctx->buf;
ctx               576 drivers/net/ieee802154/at86rf230.c 		at86rf230_async_read_reg(lp, RG_TRX_STATUS, ctx,
ctx               582 drivers/net/ieee802154/at86rf230.c 	if (trx_state == ctx->to_state) {
ctx               583 drivers/net/ieee802154/at86rf230.c 		if (ctx->complete)
ctx               584 drivers/net/ieee802154/at86rf230.c 			ctx->complete(context);
ctx               589 drivers/net/ieee802154/at86rf230.c 	ctx->from_state = trx_state;
ctx               594 drivers/net/ieee802154/at86rf230.c 	at86rf230_async_write_reg(lp, RG_TRX_STATE, ctx->to_state, ctx,
ctx               600 drivers/net/ieee802154/at86rf230.c 			     struct at86rf230_state_change *ctx,
ctx               604 drivers/net/ieee802154/at86rf230.c 	ctx->to_state = state;
ctx               605 drivers/net/ieee802154/at86rf230.c 	ctx->complete = complete;
ctx               606 drivers/net/ieee802154/at86rf230.c 	at86rf230_async_read_reg(lp, RG_TRX_STATUS, ctx,
ctx               613 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               614 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               644 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               645 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               648 drivers/net/ieee802154/at86rf230.c 	kfree(ctx);
ctx               654 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               655 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               657 drivers/net/ieee802154/at86rf230.c 	at86rf230_async_state_change(lp, ctx, STATE_RX_AACK_ON,
ctx               664 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               665 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               668 drivers/net/ieee802154/at86rf230.c 		u8 trac = TRAC_MASK(ctx->buf[1]);
ctx               692 drivers/net/ieee802154/at86rf230.c 	at86rf230_async_state_change(lp, ctx, STATE_TX_ON, at86rf230_tx_on);
ctx               698 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               699 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               700 drivers/net/ieee802154/at86rf230.c 	const u8 *buf = ctx->buf;
ctx               714 drivers/net/ieee802154/at86rf230.c 		kfree(ctx);
ctx               720 drivers/net/ieee802154/at86rf230.c 	kfree(ctx);
ctx               726 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               727 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               728 drivers/net/ieee802154/at86rf230.c 	u8 *buf = ctx->buf;
ctx               751 drivers/net/ieee802154/at86rf230.c 	ctx->trx.len = AT86RF2XX_MAX_BUF;
ctx               752 drivers/net/ieee802154/at86rf230.c 	ctx->msg.complete = at86rf230_rx_read_frame_complete;
ctx               753 drivers/net/ieee802154/at86rf230.c 	rc = spi_async(lp->spi, &ctx->msg);
ctx               755 drivers/net/ieee802154/at86rf230.c 		ctx->trx.len = 2;
ctx               756 drivers/net/ieee802154/at86rf230.c 		at86rf230_async_error(lp, ctx, rc);
ctx               763 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               764 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               768 drivers/net/ieee802154/at86rf230.c 		at86rf230_async_read_reg(lp, RG_TRX_STATE, ctx,
ctx               771 drivers/net/ieee802154/at86rf230.c 		at86rf230_async_read_reg(lp, RG_TRX_STATE, ctx,
ctx               779 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               780 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               781 drivers/net/ieee802154/at86rf230.c 	const u8 *buf = ctx->buf;
ctx               787 drivers/net/ieee802154/at86rf230.c 		at86rf230_irq_trx_end(ctx);
ctx               791 drivers/net/ieee802154/at86rf230.c 		kfree(ctx);
ctx               814 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx;
ctx               819 drivers/net/ieee802154/at86rf230.c 	ctx = kzalloc(sizeof(*ctx), GFP_ATOMIC);
ctx               820 drivers/net/ieee802154/at86rf230.c 	if (!ctx) {
ctx               825 drivers/net/ieee802154/at86rf230.c 	at86rf230_setup_spi_messages(lp, ctx);
ctx               827 drivers/net/ieee802154/at86rf230.c 	ctx->free = true;
ctx               829 drivers/net/ieee802154/at86rf230.c 	ctx->buf[0] = (RG_IRQ_STATUS & CMD_REG_MASK) | CMD_REG;
ctx               830 drivers/net/ieee802154/at86rf230.c 	ctx->msg.complete = at86rf230_irq_status;
ctx               831 drivers/net/ieee802154/at86rf230.c 	rc = spi_async(lp->spi, &ctx->msg);
ctx               833 drivers/net/ieee802154/at86rf230.c 		at86rf230_async_error(lp, ctx, rc);
ctx               844 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               845 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               847 drivers/net/ieee802154/at86rf230.c 	ctx->trx.len = 2;
ctx               852 drivers/net/ieee802154/at86rf230.c 		at86rf230_async_write_reg(lp, RG_TRX_STATE, STATE_BUSY_TX, ctx,
ctx               859 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               860 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               862 drivers/net/ieee802154/at86rf230.c 	u8 *buf = ctx->buf;
ctx               870 drivers/net/ieee802154/at86rf230.c 	ctx->trx.len = skb->len + 2;
ctx               871 drivers/net/ieee802154/at86rf230.c 	ctx->msg.complete = at86rf230_write_frame_complete;
ctx               872 drivers/net/ieee802154/at86rf230.c 	rc = spi_async(lp->spi, &ctx->msg);
ctx               874 drivers/net/ieee802154/at86rf230.c 		ctx->trx.len = 2;
ctx               875 drivers/net/ieee802154/at86rf230.c 		at86rf230_async_error(lp, ctx, rc);
ctx               882 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               883 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               885 drivers/net/ieee802154/at86rf230.c 	at86rf230_async_state_change(lp, ctx, STATE_TX_ARET_ON,
ctx               892 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = context;
ctx               893 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_local *lp = ctx->lp;
ctx               897 drivers/net/ieee802154/at86rf230.c 		at86rf230_async_state_change(lp, ctx, STATE_TX_ARET_ON,
ctx               900 drivers/net/ieee802154/at86rf230.c 		at86rf230_async_state_change(lp, ctx, STATE_TX_ON,
ctx               908 drivers/net/ieee802154/at86rf230.c 	struct at86rf230_state_change *ctx = &lp->tx;
ctx               922 drivers/net/ieee802154/at86rf230.c 		at86rf230_async_state_change(lp, ctx, STATE_TRX_OFF,
ctx               926 drivers/net/ieee802154/at86rf230.c 		at86rf230_xmit_start(ctx);
ctx                44 drivers/net/phy/mdio-aspeed.c 	struct aspeed_mdio *ctx = bus->priv;
ctx                62 drivers/net/phy/mdio-aspeed.c 	iowrite32(ctrl, ctx->base + ASPEED_MDIO_CTRL);
ctx                64 drivers/net/phy/mdio-aspeed.c 	rc = readl_poll_timeout(ctx->base + ASPEED_MDIO_DATA, data,
ctx                76 drivers/net/phy/mdio-aspeed.c 	struct aspeed_mdio *ctx = bus->priv;
ctx                93 drivers/net/phy/mdio-aspeed.c 	iowrite32(ctrl, ctx->base + ASPEED_MDIO_CTRL);
ctx                95 drivers/net/phy/mdio-aspeed.c 	return readl_poll_timeout(ctx->base + ASPEED_MDIO_CTRL, ctrl,
ctx               103 drivers/net/phy/mdio-aspeed.c 	struct aspeed_mdio *ctx;
ctx               107 drivers/net/phy/mdio-aspeed.c 	bus = devm_mdiobus_alloc_size(&pdev->dev, sizeof(*ctx));
ctx               111 drivers/net/phy/mdio-aspeed.c 	ctx = bus->priv;
ctx               112 drivers/net/phy/mdio-aspeed.c 	ctx->base = devm_platform_ioremap_resource(pdev, 0);
ctx               113 drivers/net/phy/mdio-aspeed.c 	if (IS_ERR(ctx->base))
ctx               114 drivers/net/phy/mdio-aspeed.c 		return PTR_ERR(ctx->base);
ctx               360 drivers/net/team/team.c 			   struct team_gsetter_ctx *ctx)
ctx               364 drivers/net/team/team.c 	return opt_inst->option->getter(team, ctx);
ctx               369 drivers/net/team/team.c 			   struct team_gsetter_ctx *ctx)
ctx               373 drivers/net/team/team.c 	return opt_inst->option->setter(team, ctx);
ctx              1371 drivers/net/team/team.c static int team_mode_option_get(struct team *team, struct team_gsetter_ctx *ctx)
ctx              1373 drivers/net/team/team.c 	ctx->data.str_val = team->mode->kind;
ctx              1377 drivers/net/team/team.c static int team_mode_option_set(struct team *team, struct team_gsetter_ctx *ctx)
ctx              1379 drivers/net/team/team.c 	return team_change_mode(team, ctx->data.str_val);
ctx              1383 drivers/net/team/team.c 				       struct team_gsetter_ctx *ctx)
ctx              1385 drivers/net/team/team.c 	ctx->data.u32_val = team->notify_peers.count;
ctx              1390 drivers/net/team/team.c 				       struct team_gsetter_ctx *ctx)
ctx              1392 drivers/net/team/team.c 	team->notify_peers.count = ctx->data.u32_val;
ctx              1397 drivers/net/team/team.c 					  struct team_gsetter_ctx *ctx)
ctx              1399 drivers/net/team/team.c 	ctx->data.u32_val = team->notify_peers.interval;
ctx              1404 drivers/net/team/team.c 					  struct team_gsetter_ctx *ctx)
ctx              1406 drivers/net/team/team.c 	team->notify_peers.interval = ctx->data.u32_val;
ctx              1411 drivers/net/team/team.c 				       struct team_gsetter_ctx *ctx)
ctx              1413 drivers/net/team/team.c 	ctx->data.u32_val = team->mcast_rejoin.count;
ctx              1418 drivers/net/team/team.c 				       struct team_gsetter_ctx *ctx)
ctx              1420 drivers/net/team/team.c 	team->mcast_rejoin.count = ctx->data.u32_val;
ctx              1425 drivers/net/team/team.c 					  struct team_gsetter_ctx *ctx)
ctx              1427 drivers/net/team/team.c 	ctx->data.u32_val = team->mcast_rejoin.interval;
ctx              1432 drivers/net/team/team.c 					  struct team_gsetter_ctx *ctx)
ctx              1434 drivers/net/team/team.c 	team->mcast_rejoin.interval = ctx->data.u32_val;
ctx              1439 drivers/net/team/team.c 				   struct team_gsetter_ctx *ctx)
ctx              1441 drivers/net/team/team.c 	struct team_port *port = ctx->info->port;
ctx              1443 drivers/net/team/team.c 	ctx->data.bool_val = team_port_enabled(port);
ctx              1448 drivers/net/team/team.c 				   struct team_gsetter_ctx *ctx)
ctx              1450 drivers/net/team/team.c 	struct team_port *port = ctx->info->port;
ctx              1452 drivers/net/team/team.c 	if (ctx->data.bool_val)
ctx              1460 drivers/net/team/team.c 				       struct team_gsetter_ctx *ctx)
ctx              1462 drivers/net/team/team.c 	struct team_port *port = ctx->info->port;
ctx              1464 drivers/net/team/team.c 	ctx->data.bool_val = port->user.linkup;
ctx              1471 drivers/net/team/team.c 				       struct team_gsetter_ctx *ctx)
ctx              1473 drivers/net/team/team.c 	struct team_port *port = ctx->info->port;
ctx              1475 drivers/net/team/team.c 	port->user.linkup = ctx->data.bool_val;
ctx              1482 drivers/net/team/team.c 					  struct team_gsetter_ctx *ctx)
ctx              1484 drivers/net/team/team.c 	struct team_port *port = ctx->info->port;
ctx              1486 drivers/net/team/team.c 	ctx->data.bool_val = port->user.linkup_enabled;
ctx              1491 drivers/net/team/team.c 					  struct team_gsetter_ctx *ctx)
ctx              1493 drivers/net/team/team.c 	struct team_port *port = ctx->info->port;
ctx              1495 drivers/net/team/team.c 	port->user.linkup_enabled = ctx->data.bool_val;
ctx              1502 drivers/net/team/team.c 				    struct team_gsetter_ctx *ctx)
ctx              1504 drivers/net/team/team.c 	struct team_port *port = ctx->info->port;
ctx              1506 drivers/net/team/team.c 	ctx->data.s32_val = port->priority;
ctx              1511 drivers/net/team/team.c 				    struct team_gsetter_ctx *ctx)
ctx              1513 drivers/net/team/team.c 	struct team_port *port = ctx->info->port;
ctx              1514 drivers/net/team/team.c 	s32 priority = ctx->data.s32_val;
ctx              1524 drivers/net/team/team.c 				    struct team_gsetter_ctx *ctx)
ctx              1526 drivers/net/team/team.c 	struct team_port *port = ctx->info->port;
ctx              1528 drivers/net/team/team.c 	ctx->data.u32_val = port->queue_id;
ctx              1533 drivers/net/team/team.c 				    struct team_gsetter_ctx *ctx)
ctx              1535 drivers/net/team/team.c 	struct team_port *port = ctx->info->port;
ctx              1536 drivers/net/team/team.c 	u16 new_queue_id = ctx->data.u32_val;
ctx              2325 drivers/net/team/team.c 	struct team_gsetter_ctx ctx;
ctx              2328 drivers/net/team/team.c 	ctx.info = opt_inst_info;
ctx              2329 drivers/net/team/team.c 	err = team_option_get(team, opt_inst, &ctx);
ctx              2352 drivers/net/team/team.c 		if (nla_put_u32(skb, TEAM_ATTR_OPTION_DATA, ctx.data.u32_val))
ctx              2359 drivers/net/team/team.c 				   ctx.data.str_val))
ctx              2365 drivers/net/team/team.c 		if (nla_put(skb, TEAM_ATTR_OPTION_DATA, ctx.data.bin_val.len,
ctx              2366 drivers/net/team/team.c 			    ctx.data.bin_val.ptr))
ctx              2372 drivers/net/team/team.c 		if (ctx.data.bool_val &&
ctx              2379 drivers/net/team/team.c 		if (nla_put_s32(skb, TEAM_ATTR_OPTION_DATA, ctx.data.s32_val))
ctx              2604 drivers/net/team/team.c 			struct team_gsetter_ctx ctx;
ctx              2618 drivers/net/team/team.c 			ctx.info = opt_inst_info;
ctx              2621 drivers/net/team/team.c 				ctx.data.u32_val = nla_get_u32(attr_data);
ctx              2628 drivers/net/team/team.c 				ctx.data.str_val = nla_data(attr_data);
ctx              2631 drivers/net/team/team.c 				ctx.data.bin_val.len = nla_len(attr_data);
ctx              2632 drivers/net/team/team.c 				ctx.data.bin_val.ptr = nla_data(attr_data);
ctx              2635 drivers/net/team/team.c 				ctx.data.bool_val = attr_data ? true : false;
ctx              2638 drivers/net/team/team.c 				ctx.data.s32_val = nla_get_s32(attr_data);
ctx              2643 drivers/net/team/team.c 			err = team_option_set(team, opt_inst, &ctx);
ctx                67 drivers/net/team/team_mode_activebackup.c static int ab_active_port_get(struct team *team, struct team_gsetter_ctx *ctx)
ctx                74 drivers/net/team/team_mode_activebackup.c 		ctx->data.u32_val = active_port->dev->ifindex;
ctx                76 drivers/net/team/team_mode_activebackup.c 		ctx->data.u32_val = 0;
ctx                80 drivers/net/team/team_mode_activebackup.c static int ab_active_port_set(struct team *team, struct team_gsetter_ctx *ctx)
ctx                85 drivers/net/team/team_mode_activebackup.c 		if (port->dev->ifindex == ctx->data.u32_val) {
ctx               245 drivers/net/team/team_mode_loadbalance.c static int lb_bpf_func_get(struct team *team, struct team_gsetter_ctx *ctx)
ctx               250 drivers/net/team/team_mode_loadbalance.c 		ctx->data.bin_val.len = 0;
ctx               251 drivers/net/team/team_mode_loadbalance.c 		ctx->data.bin_val.ptr = NULL;
ctx               254 drivers/net/team/team_mode_loadbalance.c 	ctx->data.bin_val.len = lb_priv->ex->orig_fprog->len *
ctx               256 drivers/net/team/team_mode_loadbalance.c 	ctx->data.bin_val.ptr = lb_priv->ex->orig_fprog->filter;
ctx               287 drivers/net/team/team_mode_loadbalance.c static int lb_bpf_func_set(struct team *team, struct team_gsetter_ctx *ctx)
ctx               295 drivers/net/team/team_mode_loadbalance.c 	if (ctx->data.bin_val.len) {
ctx               296 drivers/net/team/team_mode_loadbalance.c 		err = __fprog_create(&fprog, ctx->data.bin_val.len,
ctx               297 drivers/net/team/team_mode_loadbalance.c 				     ctx->data.bin_val.ptr);
ctx               338 drivers/net/team/team_mode_loadbalance.c static int lb_tx_method_get(struct team *team, struct team_gsetter_ctx *ctx)
ctx               348 drivers/net/team/team_mode_loadbalance.c 	ctx->data.str_val = name;
ctx               352 drivers/net/team/team_mode_loadbalance.c static int lb_tx_method_set(struct team *team, struct team_gsetter_ctx *ctx)
ctx               357 drivers/net/team/team_mode_loadbalance.c 	func = lb_select_tx_port_get_func(ctx->data.str_val);
ctx               375 drivers/net/team/team_mode_loadbalance.c 					  struct team_gsetter_ctx *ctx)
ctx               379 drivers/net/team/team_mode_loadbalance.c 	unsigned char hash = ctx->info->array_index;
ctx               382 drivers/net/team/team_mode_loadbalance.c 	ctx->data.u32_val = port ? port->dev->ifindex : 0;
ctx               387 drivers/net/team/team_mode_loadbalance.c 					  struct team_gsetter_ctx *ctx)
ctx               391 drivers/net/team/team_mode_loadbalance.c 	unsigned char hash = ctx->info->array_index;
ctx               394 drivers/net/team/team_mode_loadbalance.c 		if (ctx->data.u32_val == port->dev->ifindex &&
ctx               414 drivers/net/team/team_mode_loadbalance.c static int lb_hash_stats_get(struct team *team, struct team_gsetter_ctx *ctx)
ctx               417 drivers/net/team/team_mode_loadbalance.c 	unsigned char hash = ctx->info->array_index;
ctx               419 drivers/net/team/team_mode_loadbalance.c 	ctx->data.bin_val.ptr = &lb_priv->ex->stats.info[hash].stats;
ctx               420 drivers/net/team/team_mode_loadbalance.c 	ctx->data.bin_val.len = sizeof(struct lb_stats);
ctx               434 drivers/net/team/team_mode_loadbalance.c static int lb_port_stats_get(struct team *team, struct team_gsetter_ctx *ctx)
ctx               436 drivers/net/team/team_mode_loadbalance.c 	struct team_port *port = ctx->info->port;
ctx               439 drivers/net/team/team_mode_loadbalance.c 	ctx->data.bin_val.ptr = &lb_port_priv->stats_info.stats;
ctx               440 drivers/net/team/team_mode_loadbalance.c 	ctx->data.bin_val.len = sizeof(struct lb_stats);
ctx               535 drivers/net/team/team_mode_loadbalance.c 					 struct team_gsetter_ctx *ctx)
ctx               539 drivers/net/team/team_mode_loadbalance.c 	ctx->data.u32_val = lb_priv->ex->stats.refresh_interval;
ctx               544 drivers/net/team/team_mode_loadbalance.c 					 struct team_gsetter_ctx *ctx)
ctx               549 drivers/net/team/team_mode_loadbalance.c 	interval = ctx->data.u32_val;
ctx                30 drivers/net/usb/cdc_mbim.c 	struct cdc_ncm_ctx *ctx;
ctx               141 drivers/net/usb/cdc_mbim.c 	struct cdc_ncm_ctx *ctx;
ctx               164 drivers/net/usb/cdc_mbim.c 	ctx = info->ctx;
ctx               167 drivers/net/usb/cdc_mbim.c 	if (ctx->mbim_desc && dev->status)
ctx               168 drivers/net/usb/cdc_mbim.c 		subdriver = usb_cdc_wdm_register(ctx->control,
ctx               170 drivers/net/usb/cdc_mbim.c 						 le16_to_cpu(ctx->mbim_desc->wMaxControlMessage),
ctx               197 drivers/net/usb/cdc_mbim.c 	struct cdc_ncm_ctx *ctx = info->ctx;
ctx               201 drivers/net/usb/cdc_mbim.c 		info->subdriver->disconnect(ctx->control);
ctx               223 drivers/net/usb/cdc_mbim.c 	struct cdc_ncm_ctx *ctx = info->ctx;
ctx               229 drivers/net/usb/cdc_mbim.c 	if (!ctx)
ctx               291 drivers/net/usb/cdc_mbim.c 	spin_lock_bh(&ctx->mtx);
ctx               293 drivers/net/usb/cdc_mbim.c 	spin_unlock_bh(&ctx->mtx);
ctx               413 drivers/net/usb/cdc_mbim.c 	struct cdc_ncm_ctx *ctx = info->ctx;
ctx               426 drivers/net/usb/cdc_mbim.c 	ndpoffset = cdc_ncm_rx_verify_nth16(ctx, skb_in);
ctx               473 drivers/net/usb/cdc_mbim.c 		if (((offset + len) > skb_in->len) || (len > ctx->rx_max)) {
ctx               495 drivers/net/usb/cdc_mbim.c 	ctx->rx_overhead += skb_in->len - payload;
ctx               496 drivers/net/usb/cdc_mbim.c 	ctx->rx_ntbs++;
ctx               508 drivers/net/usb/cdc_mbim.c 	struct cdc_ncm_ctx *ctx = info->ctx;
ctx               510 drivers/net/usb/cdc_mbim.c 	if (!ctx)
ctx               522 drivers/net/usb/cdc_mbim.c 	if (intf == ctx->control && info->subdriver && info->subdriver->suspend)
ctx               536 drivers/net/usb/cdc_mbim.c 	struct cdc_ncm_ctx *ctx = info->ctx;
ctx               537 drivers/net/usb/cdc_mbim.c 	bool callsub = (intf == ctx->control && info->subdriver && info->subdriver->resume);
ctx                65 drivers/net/usb/cdc_ncm.c static void cdc_ncm_tx_timeout_start(struct cdc_ncm_ctx *ctx);
ctx               109 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               114 drivers/net/usb/cdc_ncm.c 		p = (char *)ctx + cdc_ncm_gstrings_stats[i].stat_offset;
ctx               151 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               156 drivers/net/usb/cdc_ncm.c 	max = min_t(u32, CDC_NCM_NTB_MAX_SIZE_RX, le32_to_cpu(ctx->ncm_parm.dwNtbInMaxSize));
ctx               161 drivers/net/usb/cdc_ncm.c 			 le32_to_cpu(ctx->ncm_parm.dwNtbInMaxSize), min);
ctx               174 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               178 drivers/net/usb/cdc_ncm.c 	min = ctx->max_datagram_size + ctx->max_ndp_size + sizeof(struct usb_cdc_ncm_nth16);
ctx               179 drivers/net/usb/cdc_ncm.c 	max = min_t(u32, CDC_NCM_NTB_MAX_SIZE_TX, le32_to_cpu(ctx->ncm_parm.dwNtbOutMaxSize));
ctx               194 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               196 drivers/net/usb/cdc_ncm.c 	return sprintf(buf, "%u\n", ctx->min_tx_pkt);
ctx               202 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               204 drivers/net/usb/cdc_ncm.c 	return sprintf(buf, "%u\n", ctx->rx_max);
ctx               210 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               212 drivers/net/usb/cdc_ncm.c 	return sprintf(buf, "%u\n", ctx->tx_max);
ctx               218 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               220 drivers/net/usb/cdc_ncm.c 	return sprintf(buf, "%u\n", ctx->timer_interval / (u32)NSEC_PER_USEC);
ctx               226 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               233 drivers/net/usb/cdc_ncm.c 	ctx->min_tx_pkt = val;
ctx               240 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               246 drivers/net/usb/cdc_ncm.c 	cdc_ncm_update_rxtx_max(dev, val, ctx->tx_max);
ctx               253 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               259 drivers/net/usb/cdc_ncm.c 	cdc_ncm_update_rxtx_max(dev, ctx->rx_max, val);
ctx               266 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               276 drivers/net/usb/cdc_ncm.c 	spin_lock_bh(&ctx->mtx);
ctx               277 drivers/net/usb/cdc_ncm.c 	ctx->timer_interval = val * NSEC_PER_USEC;
ctx               278 drivers/net/usb/cdc_ncm.c 	if (!ctx->timer_interval)
ctx               279 drivers/net/usb/cdc_ncm.c 		ctx->tx_timer_pending = 0;
ctx               280 drivers/net/usb/cdc_ncm.c 	spin_unlock_bh(&ctx->mtx);
ctx               292 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               294 drivers/net/usb/cdc_ncm.c 	return sprintf(buf, "%c\n", ctx->drvflags & CDC_NCM_FLAG_NDP_TO_END ? 'Y' : 'N');
ctx               300 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               307 drivers/net/usb/cdc_ncm.c 	if (enable == (ctx->drvflags & CDC_NCM_FLAG_NDP_TO_END))
ctx               310 drivers/net/usb/cdc_ncm.c 	if (enable && !ctx->delayed_ndp16) {
ctx               311 drivers/net/usb/cdc_ncm.c 		ctx->delayed_ndp16 = kzalloc(ctx->max_ndp_size, GFP_KERNEL);
ctx               312 drivers/net/usb/cdc_ncm.c 		if (!ctx->delayed_ndp16)
ctx               319 drivers/net/usb/cdc_ncm.c 	spin_lock_bh(&ctx->mtx);
ctx               321 drivers/net/usb/cdc_ncm.c 		ctx->drvflags |= CDC_NCM_FLAG_NDP_TO_END;
ctx               323 drivers/net/usb/cdc_ncm.c 		ctx->drvflags &= ~CDC_NCM_FLAG_NDP_TO_END;
ctx               324 drivers/net/usb/cdc_ncm.c 	spin_unlock_bh(&ctx->mtx);
ctx               335 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0]; \
ctx               336 drivers/net/usb/cdc_ncm.c 	return sprintf(buf, format "\n", tocpu(ctx->ncm_parm.name));	\
ctx               378 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               379 drivers/net/usb/cdc_ncm.c 	u8 iface_no = ctx->control->cur_altsetting->desc.bInterfaceNumber;
ctx               385 drivers/net/usb/cdc_ncm.c 	if (val != ctx->rx_max) {
ctx               397 drivers/net/usb/cdc_ncm.c 			ctx->rx_max = val;
ctx               401 drivers/net/usb/cdc_ncm.c 	if (dev->rx_urb_size != ctx->rx_max) {
ctx               402 drivers/net/usb/cdc_ncm.c 		dev->rx_urb_size = ctx->rx_max;
ctx               408 drivers/net/usb/cdc_ncm.c 	if (val != ctx->tx_max)
ctx               418 drivers/net/usb/cdc_ncm.c 	if (val != le32_to_cpu(ctx->ncm_parm.dwNtbOutMaxSize) &&
ctx               423 drivers/net/usb/cdc_ncm.c 	if (netif_running(dev->net) && val > ctx->tx_max) {
ctx               427 drivers/net/usb/cdc_ncm.c 		if (ctx->tx_curr_skb) {
ctx               428 drivers/net/usb/cdc_ncm.c 			dev_kfree_skb_any(ctx->tx_curr_skb);
ctx               429 drivers/net/usb/cdc_ncm.c 			ctx->tx_curr_skb = NULL;
ctx               431 drivers/net/usb/cdc_ncm.c 		ctx->tx_max = val;
ctx               434 drivers/net/usb/cdc_ncm.c 		ctx->tx_max = val;
ctx               437 drivers/net/usb/cdc_ncm.c 	dev->hard_mtu = ctx->tx_max;
ctx               443 drivers/net/usb/cdc_ncm.c 	ctx->min_tx_pkt = clamp_t(u16, ctx->tx_max - 3 * usb_maxpacket(dev->udev, dev->out, 1),
ctx               444 drivers/net/usb/cdc_ncm.c 				  CDC_NCM_MIN_TX_PKT, ctx->tx_max);
ctx               450 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               452 drivers/net/usb/cdc_ncm.c 	if (cdc_ncm_comm_intf_is_mbim(dev->intf->cur_altsetting) && ctx->mbim_desc)
ctx               453 drivers/net/usb/cdc_ncm.c 		return ctx->mbim_desc->bmNetworkCapabilities;
ctx               454 drivers/net/usb/cdc_ncm.c 	if (ctx->func_desc)
ctx               455 drivers/net/usb/cdc_ncm.c 		return ctx->func_desc->bmNetworkCapabilities;
ctx               475 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               477 drivers/net/usb/cdc_ncm.c 	if (cdc_ncm_comm_intf_is_mbim(dev->intf->cur_altsetting) && ctx->mbim_desc)
ctx               478 drivers/net/usb/cdc_ncm.c 		return le16_to_cpu(ctx->mbim_desc->wMaxSegmentSize);
ctx               479 drivers/net/usb/cdc_ncm.c 	if (ctx->ether_desc)
ctx               480 drivers/net/usb/cdc_ncm.c 		return le16_to_cpu(ctx->ether_desc->wMaxSegmentSize);
ctx               489 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               490 drivers/net/usb/cdc_ncm.c 	u8 iface_no = ctx->control->cur_altsetting->desc.bInterfaceNumber;
ctx               496 drivers/net/usb/cdc_ncm.c 			      0, iface_no, &ctx->ncm_parm,
ctx               497 drivers/net/usb/cdc_ncm.c 			      sizeof(ctx->ncm_parm));
ctx               520 drivers/net/usb/cdc_ncm.c 	if (le16_to_cpu(ctx->ncm_parm.bmNtbFormatsSupported) &
ctx               533 drivers/net/usb/cdc_ncm.c 	ctx->rx_max = le32_to_cpu(ctx->ncm_parm.dwNtbInMaxSize);
ctx               534 drivers/net/usb/cdc_ncm.c 	ctx->tx_max = le32_to_cpu(ctx->ncm_parm.dwNtbOutMaxSize);
ctx               535 drivers/net/usb/cdc_ncm.c 	ctx->tx_remainder = le16_to_cpu(ctx->ncm_parm.wNdpOutPayloadRemainder);
ctx               536 drivers/net/usb/cdc_ncm.c 	ctx->tx_modulus = le16_to_cpu(ctx->ncm_parm.wNdpOutDivisor);
ctx               537 drivers/net/usb/cdc_ncm.c 	ctx->tx_ndp_modulus = le16_to_cpu(ctx->ncm_parm.wNdpOutAlignment);
ctx               539 drivers/net/usb/cdc_ncm.c 	ctx->tx_max_datagrams = le16_to_cpu(ctx->ncm_parm.wNtbOutMaxDatagrams);
ctx               543 drivers/net/usb/cdc_ncm.c 		ctx->rx_max, ctx->tx_max, ctx->tx_remainder, ctx->tx_modulus,
ctx               544 drivers/net/usb/cdc_ncm.c 		ctx->tx_ndp_modulus, ctx->tx_max_datagrams, cdc_ncm_flags(dev));
ctx               547 drivers/net/usb/cdc_ncm.c 	if ((ctx->tx_max_datagrams == 0) ||
ctx               548 drivers/net/usb/cdc_ncm.c 			(ctx->tx_max_datagrams > CDC_NCM_DPT_DATAGRAMS_MAX))
ctx               549 drivers/net/usb/cdc_ncm.c 		ctx->tx_max_datagrams = CDC_NCM_DPT_DATAGRAMS_MAX;
ctx               552 drivers/net/usb/cdc_ncm.c 	ctx->max_ndp_size = sizeof(struct usb_cdc_ncm_ndp16) + (ctx->tx_max_datagrams + 1) * sizeof(struct usb_cdc_ncm_dpe16);
ctx               555 drivers/net/usb/cdc_ncm.c 	ctx->timer_interval = CDC_NCM_TIMER_INTERVAL_USEC * NSEC_PER_USEC;
ctx               563 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               564 drivers/net/usb/cdc_ncm.c 	u8 iface_no = ctx->control->cur_altsetting->desc.bInterfaceNumber;
ctx               570 drivers/net/usb/cdc_ncm.c 	ctx->max_datagram_size = clamp_t(u32, new_size,
ctx               587 drivers/net/usb/cdc_ncm.c 	if (le16_to_cpu(max_datagram_size) == ctx->max_datagram_size)
ctx               590 drivers/net/usb/cdc_ncm.c 	max_datagram_size = cpu_to_le16(ctx->max_datagram_size);
ctx               599 drivers/net/usb/cdc_ncm.c 	dev->net->mtu = min_t(int, dev->net->mtu, ctx->max_datagram_size - cdc_ncm_eth_hlen(dev));
ctx               602 drivers/net/usb/cdc_ncm.c 	if (ctx->mbim_extended_desc) {
ctx               603 drivers/net/usb/cdc_ncm.c 		mbim_mtu = le16_to_cpu(ctx->mbim_extended_desc->wMTU);
ctx               611 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               620 drivers/net/usb/cdc_ncm.c 	val = ctx->tx_ndp_modulus;
ctx               623 drivers/net/usb/cdc_ncm.c 	    (val != ((-val) & val)) || (val >= ctx->tx_max)) {
ctx               625 drivers/net/usb/cdc_ncm.c 		ctx->tx_ndp_modulus = USB_CDC_NCM_NDP_ALIGN_MIN_SIZE;
ctx               634 drivers/net/usb/cdc_ncm.c 	val = ctx->tx_modulus;
ctx               637 drivers/net/usb/cdc_ncm.c 	    (val != ((-val) & val)) || (val >= ctx->tx_max)) {
ctx               639 drivers/net/usb/cdc_ncm.c 		ctx->tx_modulus = USB_CDC_NCM_NDP_ALIGN_MIN_SIZE;
ctx               643 drivers/net/usb/cdc_ncm.c 	if (ctx->tx_remainder >= ctx->tx_modulus) {
ctx               645 drivers/net/usb/cdc_ncm.c 		ctx->tx_remainder = 0;
ctx               649 drivers/net/usb/cdc_ncm.c 	ctx->tx_remainder = ((ctx->tx_remainder - cdc_ncm_eth_hlen(dev)) &
ctx               650 drivers/net/usb/cdc_ncm.c 			     (ctx->tx_modulus - 1));
ctx               655 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               662 drivers/net/usb/cdc_ncm.c 		       le32_to_cpu(ctx->ncm_parm.dwNtbInMaxSize));
ctx               664 drivers/net/usb/cdc_ncm.c 		       le32_to_cpu(ctx->ncm_parm.dwNtbOutMaxSize));
ctx               722 drivers/net/usb/cdc_ncm.c static void cdc_ncm_free(struct cdc_ncm_ctx *ctx)
ctx               724 drivers/net/usb/cdc_ncm.c 	if (ctx == NULL)
ctx               727 drivers/net/usb/cdc_ncm.c 	if (ctx->tx_rem_skb != NULL) {
ctx               728 drivers/net/usb/cdc_ncm.c 		dev_kfree_skb_any(ctx->tx_rem_skb);
ctx               729 drivers/net/usb/cdc_ncm.c 		ctx->tx_rem_skb = NULL;
ctx               732 drivers/net/usb/cdc_ncm.c 	if (ctx->tx_curr_skb != NULL) {
ctx               733 drivers/net/usb/cdc_ncm.c 		dev_kfree_skb_any(ctx->tx_curr_skb);
ctx               734 drivers/net/usb/cdc_ncm.c 		ctx->tx_curr_skb = NULL;
ctx               737 drivers/net/usb/cdc_ncm.c 	kfree(ctx->delayed_ndp16);
ctx               739 drivers/net/usb/cdc_ncm.c 	kfree(ctx);
ctx               770 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx;
ctx               780 drivers/net/usb/cdc_ncm.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               781 drivers/net/usb/cdc_ncm.c 	if (!ctx)
ctx               784 drivers/net/usb/cdc_ncm.c 	hrtimer_init(&ctx->tx_timer, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
ctx               785 drivers/net/usb/cdc_ncm.c 	ctx->tx_timer.function = &cdc_ncm_tx_timer_cb;
ctx               786 drivers/net/usb/cdc_ncm.c 	tasklet_init(&ctx->bh, cdc_ncm_txpath_bh, (unsigned long)dev);
ctx               787 drivers/net/usb/cdc_ncm.c 	atomic_set(&ctx->stop, 0);
ctx               788 drivers/net/usb/cdc_ncm.c 	spin_lock_init(&ctx->mtx);
ctx               791 drivers/net/usb/cdc_ncm.c 	dev->data[0] = (unsigned long)ctx;
ctx               794 drivers/net/usb/cdc_ncm.c 	ctx->control = intf;
ctx               805 drivers/net/usb/cdc_ncm.c 		ctx->data = usb_ifnum_to_if(dev->udev,
ctx               807 drivers/net/usb/cdc_ncm.c 	ctx->ether_desc = hdr.usb_cdc_ether_desc;
ctx               808 drivers/net/usb/cdc_ncm.c 	ctx->func_desc = hdr.usb_cdc_ncm_desc;
ctx               809 drivers/net/usb/cdc_ncm.c 	ctx->mbim_desc = hdr.usb_cdc_mbim_desc;
ctx               810 drivers/net/usb/cdc_ncm.c 	ctx->mbim_extended_desc = hdr.usb_cdc_mbim_extended_desc;
ctx               814 drivers/net/usb/cdc_ncm.c 		ctx->data = usb_ifnum_to_if(dev->udev, intf->cur_altsetting->desc.bInterfaceNumber + 1);
ctx               819 drivers/net/usb/cdc_ncm.c 	if (!ctx->data) {
ctx               824 drivers/net/usb/cdc_ncm.c 		if (!ctx->mbim_desc) {
ctx               829 drivers/net/usb/cdc_ncm.c 		if (!ctx->ether_desc || !ctx->func_desc) {
ctx               836 drivers/net/usb/cdc_ncm.c 	if (ctx->data != ctx->control) {
ctx               837 drivers/net/usb/cdc_ncm.c 		temp = usb_driver_claim_interface(driver, ctx->data, dev);
ctx               844 drivers/net/usb/cdc_ncm.c 	iface_no = ctx->data->cur_altsetting->desc.bInterfaceNumber;
ctx               847 drivers/net/usb/cdc_ncm.c 	ctx->drvflags = drvflags;
ctx               855 drivers/net/usb/cdc_ncm.c 	if (!(ctx->drvflags & CDC_MBIM_FLAG_AVOID_ALTSETTING_TOGGLE))
ctx               887 drivers/net/usb/cdc_ncm.c 	if (ctx->drvflags & CDC_NCM_FLAG_RESET_NTB16) {
ctx               908 drivers/net/usb/cdc_ncm.c 	cdc_ncm_find_endpoints(dev, ctx->data);
ctx               909 drivers/net/usb/cdc_ncm.c 	cdc_ncm_find_endpoints(dev, ctx->control);
ctx               915 drivers/net/usb/cdc_ncm.c 	usb_set_intfdata(ctx->data, dev);
ctx               916 drivers/net/usb/cdc_ncm.c 	usb_set_intfdata(ctx->control, dev);
ctx               918 drivers/net/usb/cdc_ncm.c 	if (ctx->ether_desc) {
ctx               919 drivers/net/usb/cdc_ncm.c 		temp = usbnet_get_ethernet_addr(dev, ctx->ether_desc->iMACAddress);
ctx               931 drivers/net/usb/cdc_ncm.c 	if (ctx->drvflags & CDC_NCM_FLAG_NDP_TO_END) {
ctx               932 drivers/net/usb/cdc_ncm.c 		ctx->delayed_ndp16 = kzalloc(ctx->max_ndp_size, GFP_KERNEL);
ctx               933 drivers/net/usb/cdc_ncm.c 		if (!ctx->delayed_ndp16)
ctx               951 drivers/net/usb/cdc_ncm.c 	usb_set_intfdata(ctx->control, NULL);
ctx               952 drivers/net/usb/cdc_ncm.c 	usb_set_intfdata(ctx->data, NULL);
ctx               953 drivers/net/usb/cdc_ncm.c 	if (ctx->data != ctx->control)
ctx               954 drivers/net/usb/cdc_ncm.c 		usb_driver_release_interface(driver, ctx->data);
ctx               965 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx               968 drivers/net/usb/cdc_ncm.c 	if (ctx == NULL)
ctx               971 drivers/net/usb/cdc_ncm.c 	atomic_set(&ctx->stop, 1);
ctx               973 drivers/net/usb/cdc_ncm.c 	hrtimer_cancel(&ctx->tx_timer);
ctx               975 drivers/net/usb/cdc_ncm.c 	tasklet_kill(&ctx->bh);
ctx               978 drivers/net/usb/cdc_ncm.c 	if (ctx->control == ctx->data)
ctx               979 drivers/net/usb/cdc_ncm.c 		ctx->data = NULL;
ctx               982 drivers/net/usb/cdc_ncm.c 	if (intf == ctx->control && ctx->data) {
ctx               983 drivers/net/usb/cdc_ncm.c 		usb_set_intfdata(ctx->data, NULL);
ctx               984 drivers/net/usb/cdc_ncm.c 		usb_driver_release_interface(driver, ctx->data);
ctx               985 drivers/net/usb/cdc_ncm.c 		ctx->data = NULL;
ctx               987 drivers/net/usb/cdc_ncm.c 	} else if (intf == ctx->data && ctx->control) {
ctx               988 drivers/net/usb/cdc_ncm.c 		usb_set_intfdata(ctx->control, NULL);
ctx               989 drivers/net/usb/cdc_ncm.c 		usb_driver_release_interface(driver, ctx->control);
ctx               990 drivers/net/usb/cdc_ncm.c 		ctx->control = NULL;
ctx               994 drivers/net/usb/cdc_ncm.c 	cdc_ncm_free(ctx);
ctx              1058 drivers/net/usb/cdc_ncm.c static struct usb_cdc_ncm_ndp16 *cdc_ncm_ndp(struct cdc_ncm_ctx *ctx, struct sk_buff *skb, __le32 sign, size_t reserve)
ctx              1068 drivers/net/usb/cdc_ncm.c 	if (ctx->drvflags & CDC_NCM_FLAG_NDP_TO_END) {
ctx              1069 drivers/net/usb/cdc_ncm.c 		if (ctx->delayed_ndp16->dwSignature == sign)
ctx              1070 drivers/net/usb/cdc_ncm.c 			return ctx->delayed_ndp16;
ctx              1076 drivers/net/usb/cdc_ncm.c 		else if (ctx->delayed_ndp16->dwSignature)
ctx              1089 drivers/net/usb/cdc_ncm.c 	if (!(ctx->drvflags & CDC_NCM_FLAG_NDP_TO_END))
ctx              1090 drivers/net/usb/cdc_ncm.c 		cdc_ncm_align_tail(skb, ctx->tx_ndp_modulus, 0, ctx->tx_curr_size);
ctx              1093 drivers/net/usb/cdc_ncm.c 	if ((ctx->tx_curr_size - skb->len - reserve) < ctx->max_ndp_size)
ctx              1103 drivers/net/usb/cdc_ncm.c 	if (!(ctx->drvflags & CDC_NCM_FLAG_NDP_TO_END))
ctx              1104 drivers/net/usb/cdc_ncm.c 		ndp16 = skb_put_zero(skb, ctx->max_ndp_size);
ctx              1106 drivers/net/usb/cdc_ncm.c 		ndp16 = ctx->delayed_ndp16;
ctx              1116 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx              1128 drivers/net/usb/cdc_ncm.c 	if (ctx->drvflags & CDC_NCM_FLAG_NDP_TO_END)
ctx              1129 drivers/net/usb/cdc_ncm.c 		delayed_ndp_size = ALIGN(ctx->max_ndp_size, ctx->tx_ndp_modulus);
ctx              1135 drivers/net/usb/cdc_ncm.c 		swap(skb, ctx->tx_rem_skb);
ctx              1136 drivers/net/usb/cdc_ncm.c 		swap(sign, ctx->tx_rem_sign);
ctx              1142 drivers/net/usb/cdc_ncm.c 	skb_out = ctx->tx_curr_skb;
ctx              1146 drivers/net/usb/cdc_ncm.c 		if (ctx->tx_low_mem_val == 0) {
ctx              1147 drivers/net/usb/cdc_ncm.c 			ctx->tx_curr_size = ctx->tx_max;
ctx              1148 drivers/net/usb/cdc_ncm.c 			skb_out = alloc_skb(ctx->tx_curr_size, GFP_ATOMIC);
ctx              1155 drivers/net/usb/cdc_ncm.c 				ctx->tx_low_mem_max_cnt = min(ctx->tx_low_mem_max_cnt + 1,
ctx              1157 drivers/net/usb/cdc_ncm.c 				ctx->tx_low_mem_val = ctx->tx_low_mem_max_cnt;
ctx              1166 drivers/net/usb/cdc_ncm.c 				ctx->tx_curr_size = max(skb->len,
ctx              1169 drivers/net/usb/cdc_ncm.c 				ctx->tx_curr_size = USB_CDC_NCM_NTB_MIN_OUT_SIZE;
ctx              1170 drivers/net/usb/cdc_ncm.c 			skb_out = alloc_skb(ctx->tx_curr_size, GFP_ATOMIC);
ctx              1180 drivers/net/usb/cdc_ncm.c 			ctx->tx_low_mem_val--;
ctx              1186 drivers/net/usb/cdc_ncm.c 		nth16->wSequence = cpu_to_le16(ctx->tx_seq++);
ctx              1189 drivers/net/usb/cdc_ncm.c 		ctx->tx_curr_frame_num = 0;
ctx              1192 drivers/net/usb/cdc_ncm.c 		ctx->tx_curr_frame_payload = 0;
ctx              1195 drivers/net/usb/cdc_ncm.c 	for (n = ctx->tx_curr_frame_num; n < ctx->tx_max_datagrams; n++) {
ctx              1198 drivers/net/usb/cdc_ncm.c 			skb = ctx->tx_rem_skb;
ctx              1199 drivers/net/usb/cdc_ncm.c 			sign = ctx->tx_rem_sign;
ctx              1200 drivers/net/usb/cdc_ncm.c 			ctx->tx_rem_skb = NULL;
ctx              1208 drivers/net/usb/cdc_ncm.c 		ndp16 = cdc_ncm_ndp(ctx, skb_out, sign, skb->len + ctx->tx_modulus + ctx->tx_remainder);
ctx              1211 drivers/net/usb/cdc_ncm.c 		cdc_ncm_align_tail(skb_out,  ctx->tx_modulus, ctx->tx_remainder, ctx->tx_curr_size);
ctx              1214 drivers/net/usb/cdc_ncm.c 		if (!ndp16 || skb_out->len + skb->len + delayed_ndp_size > ctx->tx_curr_size) {
ctx              1222 drivers/net/usb/cdc_ncm.c 				if (ctx->tx_rem_skb != NULL) {
ctx              1223 drivers/net/usb/cdc_ncm.c 					dev_kfree_skb_any(ctx->tx_rem_skb);
ctx              1226 drivers/net/usb/cdc_ncm.c 				ctx->tx_rem_skb = skb;
ctx              1227 drivers/net/usb/cdc_ncm.c 				ctx->tx_rem_sign = sign;
ctx              1230 drivers/net/usb/cdc_ncm.c 				ctx->tx_reason_ntb_full++;	/* count reason for transmitting */
ctx              1244 drivers/net/usb/cdc_ncm.c 		ctx->tx_curr_frame_payload += skb->len;	/* count real tx payload data */
ctx              1251 drivers/net/usb/cdc_ncm.c 			ctx->tx_reason_ndp_full++;	/* count reason for transmitting */
ctx              1263 drivers/net/usb/cdc_ncm.c 	ctx->tx_curr_frame_num = n;
ctx              1268 drivers/net/usb/cdc_ncm.c 		ctx->tx_curr_skb = skb_out;
ctx              1271 drivers/net/usb/cdc_ncm.c 	} else if ((n < ctx->tx_max_datagrams) && (ready2send == 0) && (ctx->timer_interval > 0)) {
ctx              1274 drivers/net/usb/cdc_ncm.c 		ctx->tx_curr_skb = skb_out;
ctx              1277 drivers/net/usb/cdc_ncm.c 			ctx->tx_timer_pending = CDC_NCM_TIMER_PENDING_CNT;
ctx              1281 drivers/net/usb/cdc_ncm.c 		if (n == ctx->tx_max_datagrams)
ctx              1282 drivers/net/usb/cdc_ncm.c 			ctx->tx_reason_max_datagram++;	/* count reason for transmitting */
ctx              1288 drivers/net/usb/cdc_ncm.c 	if (ctx->drvflags & CDC_NCM_FLAG_NDP_TO_END) {
ctx              1290 drivers/net/usb/cdc_ncm.c 		cdc_ncm_align_tail(skb_out, ctx->tx_ndp_modulus, 0, ctx->tx_curr_size - ctx->max_ndp_size);
ctx              1292 drivers/net/usb/cdc_ncm.c 		skb_put_data(skb_out, ctx->delayed_ndp16, ctx->max_ndp_size);
ctx              1295 drivers/net/usb/cdc_ncm.c 		ndp16 = memset(ctx->delayed_ndp16, 0, ctx->max_ndp_size);
ctx              1308 drivers/net/usb/cdc_ncm.c 	    skb_out->len > ctx->min_tx_pkt) {
ctx              1309 drivers/net/usb/cdc_ncm.c 		padding_count = ctx->tx_curr_size - skb_out->len;
ctx              1311 drivers/net/usb/cdc_ncm.c 	} else if (skb_out->len < ctx->tx_curr_size &&
ctx              1321 drivers/net/usb/cdc_ncm.c 	ctx->tx_curr_skb = NULL;
ctx              1324 drivers/net/usb/cdc_ncm.c 	ctx->tx_overhead += skb_out->len - ctx->tx_curr_frame_payload;
ctx              1325 drivers/net/usb/cdc_ncm.c 	ctx->tx_ntbs++;
ctx              1332 drivers/net/usb/cdc_ncm.c 				(long)ctx->tx_curr_frame_payload - skb_out->len);
ctx              1338 drivers/net/usb/cdc_ncm.c 	if (ctx->tx_curr_skb != NULL && n > 0)
ctx              1339 drivers/net/usb/cdc_ncm.c 		cdc_ncm_tx_timeout_start(ctx);
ctx              1344 drivers/net/usb/cdc_ncm.c static void cdc_ncm_tx_timeout_start(struct cdc_ncm_ctx *ctx)
ctx              1347 drivers/net/usb/cdc_ncm.c 	if (!(hrtimer_active(&ctx->tx_timer) || atomic_read(&ctx->stop)))
ctx              1348 drivers/net/usb/cdc_ncm.c 		hrtimer_start(&ctx->tx_timer,
ctx              1349 drivers/net/usb/cdc_ncm.c 				ctx->timer_interval,
ctx              1355 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx =
ctx              1358 drivers/net/usb/cdc_ncm.c 	if (!atomic_read(&ctx->stop))
ctx              1359 drivers/net/usb/cdc_ncm.c 		tasklet_schedule(&ctx->bh);
ctx              1366 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx              1368 drivers/net/usb/cdc_ncm.c 	spin_lock_bh(&ctx->mtx);
ctx              1369 drivers/net/usb/cdc_ncm.c 	if (ctx->tx_timer_pending != 0) {
ctx              1370 drivers/net/usb/cdc_ncm.c 		ctx->tx_timer_pending--;
ctx              1371 drivers/net/usb/cdc_ncm.c 		cdc_ncm_tx_timeout_start(ctx);
ctx              1372 drivers/net/usb/cdc_ncm.c 		spin_unlock_bh(&ctx->mtx);
ctx              1374 drivers/net/usb/cdc_ncm.c 		ctx->tx_reason_timeout++;	/* count reason for transmitting */
ctx              1375 drivers/net/usb/cdc_ncm.c 		spin_unlock_bh(&ctx->mtx);
ctx              1380 drivers/net/usb/cdc_ncm.c 		spin_unlock_bh(&ctx->mtx);
ctx              1388 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx              1397 drivers/net/usb/cdc_ncm.c 	if (ctx == NULL)
ctx              1400 drivers/net/usb/cdc_ncm.c 	spin_lock_bh(&ctx->mtx);
ctx              1402 drivers/net/usb/cdc_ncm.c 	spin_unlock_bh(&ctx->mtx);
ctx              1414 drivers/net/usb/cdc_ncm.c int cdc_ncm_rx_verify_nth16(struct cdc_ncm_ctx *ctx, struct sk_buff *skb_in)
ctx              1421 drivers/net/usb/cdc_ncm.c 	if (ctx == NULL)
ctx              1440 drivers/net/usb/cdc_ncm.c 	if (len > ctx->rx_max) {
ctx              1443 drivers/net/usb/cdc_ncm.c 			  ctx->rx_max);
ctx              1447 drivers/net/usb/cdc_ncm.c 	if ((ctx->rx_seq + 1) != le16_to_cpu(nth16->wSequence) &&
ctx              1448 drivers/net/usb/cdc_ncm.c 	    (ctx->rx_seq || le16_to_cpu(nth16->wSequence)) &&
ctx              1449 drivers/net/usb/cdc_ncm.c 	    !((ctx->rx_seq == 0xffff) && !le16_to_cpu(nth16->wSequence))) {
ctx              1452 drivers/net/usb/cdc_ncm.c 			  ctx->rx_seq, le16_to_cpu(nth16->wSequence));
ctx              1454 drivers/net/usb/cdc_ncm.c 	ctx->rx_seq = le16_to_cpu(nth16->wSequence);
ctx              1501 drivers/net/usb/cdc_ncm.c 	struct cdc_ncm_ctx *ctx = (struct cdc_ncm_ctx *)dev->data[0];
ctx              1512 drivers/net/usb/cdc_ncm.c 	ndpoffset = cdc_ncm_rx_verify_nth16(ctx, skb_in);
ctx              1547 drivers/net/usb/cdc_ncm.c 				(len > ctx->rx_max) || (len < ETH_HLEN)) {
ctx              1572 drivers/net/usb/cdc_ncm.c 	ctx->rx_overhead += skb_in->len - payload;
ctx              1573 drivers/net/usb/cdc_ncm.c 	ctx->rx_ntbs++;
ctx                30 drivers/net/usb/huawei_cdc_ncm.c 	struct cdc_ncm_ctx *ctx;
ctx                68 drivers/net/usb/huawei_cdc_ncm.c 	struct cdc_ncm_ctx *ctx;
ctx                89 drivers/net/usb/huawei_cdc_ncm.c 	ctx = drvstate->ctx;
ctx                96 drivers/net/usb/huawei_cdc_ncm.c 		subdriver = usb_cdc_wdm_register(ctx->control,
ctx               119 drivers/net/usb/huawei_cdc_ncm.c 	struct cdc_ncm_ctx *ctx = drvstate->ctx;
ctx               122 drivers/net/usb/huawei_cdc_ncm.c 		drvstate->subdriver->disconnect(ctx->control);
ctx               134 drivers/net/usb/huawei_cdc_ncm.c 	struct cdc_ncm_ctx *ctx = drvstate->ctx;
ctx               136 drivers/net/usb/huawei_cdc_ncm.c 	if (ctx == NULL) {
ctx               145 drivers/net/usb/huawei_cdc_ncm.c 	if (intf == ctx->control &&
ctx               162 drivers/net/usb/huawei_cdc_ncm.c 	struct cdc_ncm_ctx *ctx = drvstate->ctx;
ctx               166 drivers/net/usb/huawei_cdc_ncm.c 		(intf == ctx->control &&
ctx               631 drivers/net/virtio_net.c 				     void *buf, void *ctx,
ctx               638 drivers/net/virtio_net.c 	unsigned int xdp_headroom = (unsigned long)ctx;
ctx               782 drivers/net/virtio_net.c 					 void *ctx,
ctx               794 drivers/net/virtio_net.c 	unsigned int headroom = mergeable_ctx_to_headroom(ctx);
ctx               916 drivers/net/virtio_net.c 	truesize = mergeable_ctx_to_truesize(ctx);
ctx               919 drivers/net/virtio_net.c 			 dev->name, len, (unsigned long)ctx);
ctx               932 drivers/net/virtio_net.c 		buf = virtqueue_get_buf_ctx(rq->vq, &len, &ctx);
ctx               945 drivers/net/virtio_net.c 		truesize = mergeable_ctx_to_truesize(ctx);
ctx               948 drivers/net/virtio_net.c 				 dev->name, len, (unsigned long)ctx);
ctx              1011 drivers/net/virtio_net.c 			void *buf, unsigned int len, void **ctx,
ctx              1033 drivers/net/virtio_net.c 		skb = receive_mergeable(dev, vi, rq, buf, ctx, len, xdp_xmit,
ctx              1038 drivers/net/virtio_net.c 		skb = receive_small(dev, vi, rq, buf, ctx, len, xdp_xmit, stats);
ctx              1080 drivers/net/virtio_net.c 	void *ctx = (void *)(unsigned long)xdp_headroom;
ctx              1094 drivers/net/virtio_net.c 	err = virtqueue_add_inbuf_ctx(rq->vq, rq->sg, 1, buf, ctx, gfp);
ctx              1173 drivers/net/virtio_net.c 	void *ctx;
ctx              1200 drivers/net/virtio_net.c 	ctx = mergeable_len_to_ctx(len, headroom);
ctx              1201 drivers/net/virtio_net.c 	err = virtqueue_add_inbuf_ctx(rq->vq, rq->sg, 1, buf, ctx, gfp);
ctx              1321 drivers/net/virtio_net.c 		void *ctx;
ctx              1324 drivers/net/virtio_net.c 		       (buf = virtqueue_get_buf_ctx(rq->vq, &len, &ctx))) {
ctx              1325 drivers/net/virtio_net.c 			receive_buf(vi, rq, buf, len, ctx, xdp_xmit, &stats);
ctx              2758 drivers/net/virtio_net.c 	bool *ctx;
ctx              2778 drivers/net/virtio_net.c 		ctx = kcalloc(total_vqs, sizeof(*ctx), GFP_KERNEL);
ctx              2779 drivers/net/virtio_net.c 		if (!ctx)
ctx              2782 drivers/net/virtio_net.c 		ctx = NULL;
ctx              2799 drivers/net/virtio_net.c 		if (ctx)
ctx              2800 drivers/net/virtio_net.c 			ctx[rxq2vq(i)] = true;
ctx              2804 drivers/net/virtio_net.c 					 names, ctx, NULL);
ctx              2824 drivers/net/virtio_net.c 	kfree(ctx);
ctx               674 drivers/net/vmxnet3/vmxnet3_drv.c vmxnet3_map_pkt(struct sk_buff *skb, struct vmxnet3_tx_ctx *ctx,
ctx               684 drivers/net/vmxnet3/vmxnet3_drv.c 	BUG_ON(ctx->copy_size > skb_headlen(skb));
ctx               689 drivers/net/vmxnet3/vmxnet3_drv.c 	ctx->sop_txd = tq->tx_ring.base + tq->tx_ring.next2fill;
ctx               690 drivers/net/vmxnet3/vmxnet3_drv.c 	gdesc = ctx->sop_txd; /* both loops below can be skipped */
ctx               693 drivers/net/vmxnet3/vmxnet3_drv.c 	if (ctx->copy_size) {
ctx               694 drivers/net/vmxnet3/vmxnet3_drv.c 		ctx->sop_txd->txd.addr = cpu_to_le64(tq->data_ring.basePA +
ctx               697 drivers/net/vmxnet3/vmxnet3_drv.c 		ctx->sop_txd->dword[2] = cpu_to_le32(dw2 | ctx->copy_size);
ctx               698 drivers/net/vmxnet3/vmxnet3_drv.c 		ctx->sop_txd->dword[3] = 0;
ctx               706 drivers/net/vmxnet3/vmxnet3_drv.c 			le64_to_cpu(ctx->sop_txd->txd.addr),
ctx               707 drivers/net/vmxnet3/vmxnet3_drv.c 			ctx->sop_txd->dword[2], ctx->sop_txd->dword[3]);
ctx               715 drivers/net/vmxnet3/vmxnet3_drv.c 	len = skb_headlen(skb) - ctx->copy_size;
ctx               716 drivers/net/vmxnet3/vmxnet3_drv.c 	buf_offset = ctx->copy_size;
ctx               799 drivers/net/vmxnet3/vmxnet3_drv.c 	ctx->eop_txd = gdesc;
ctx               803 drivers/net/vmxnet3/vmxnet3_drv.c 	tbi->sop_idx = ctx->sop_txd - tq->tx_ring.base;
ctx               839 drivers/net/vmxnet3/vmxnet3_drv.c 		  struct vmxnet3_tx_ctx *ctx,
ctx               844 drivers/net/vmxnet3/vmxnet3_drv.c 	if (ctx->mss) {	/* TSO */
ctx               845 drivers/net/vmxnet3/vmxnet3_drv.c 		ctx->eth_ip_hdr_size = skb_transport_offset(skb);
ctx               846 drivers/net/vmxnet3/vmxnet3_drv.c 		ctx->l4_hdr_size = tcp_hdrlen(skb);
ctx               847 drivers/net/vmxnet3/vmxnet3_drv.c 		ctx->copy_size = ctx->eth_ip_hdr_size + ctx->l4_hdr_size;
ctx               850 drivers/net/vmxnet3/vmxnet3_drv.c 			ctx->eth_ip_hdr_size = skb_checksum_start_offset(skb);
ctx               852 drivers/net/vmxnet3/vmxnet3_drv.c 			if (ctx->ipv4) {
ctx               856 drivers/net/vmxnet3/vmxnet3_drv.c 			} else if (ctx->ipv6) {
ctx               864 drivers/net/vmxnet3/vmxnet3_drv.c 				ctx->l4_hdr_size = tcp_hdrlen(skb);
ctx               867 drivers/net/vmxnet3/vmxnet3_drv.c 				ctx->l4_hdr_size = sizeof(struct udphdr);
ctx               870 drivers/net/vmxnet3/vmxnet3_drv.c 				ctx->l4_hdr_size = 0;
ctx               874 drivers/net/vmxnet3/vmxnet3_drv.c 			ctx->copy_size = min(ctx->eth_ip_hdr_size +
ctx               875 drivers/net/vmxnet3/vmxnet3_drv.c 					 ctx->l4_hdr_size, skb->len);
ctx               877 drivers/net/vmxnet3/vmxnet3_drv.c 			ctx->eth_ip_hdr_size = 0;
ctx               878 drivers/net/vmxnet3/vmxnet3_drv.c 			ctx->l4_hdr_size = 0;
ctx               880 drivers/net/vmxnet3/vmxnet3_drv.c 			ctx->copy_size = min_t(unsigned int,
ctx               886 drivers/net/vmxnet3/vmxnet3_drv.c 			ctx->copy_size = skb->len;
ctx               889 drivers/net/vmxnet3/vmxnet3_drv.c 		if (unlikely(!pskb_may_pull(skb, ctx->copy_size)))
ctx               893 drivers/net/vmxnet3/vmxnet3_drv.c 	if (unlikely(ctx->copy_size > tq->txdata_desc_size)) {
ctx               895 drivers/net/vmxnet3/vmxnet3_drv.c 		ctx->copy_size = 0;
ctx               916 drivers/net/vmxnet3/vmxnet3_drv.c 		 struct vmxnet3_tx_ctx *ctx,
ctx               925 drivers/net/vmxnet3/vmxnet3_drv.c 	memcpy(tdd->data, skb->data, ctx->copy_size);
ctx               928 drivers/net/vmxnet3/vmxnet3_drv.c 		ctx->copy_size, tq->tx_ring.next2fill);
ctx               934 drivers/net/vmxnet3/vmxnet3_drv.c 		    struct vmxnet3_tx_ctx *ctx)
ctx               938 drivers/net/vmxnet3/vmxnet3_drv.c 	if (ctx->ipv4) {
ctx               944 drivers/net/vmxnet3/vmxnet3_drv.c 	} else if (ctx->ipv6) {
ctx               987 drivers/net/vmxnet3/vmxnet3_drv.c 	struct vmxnet3_tx_ctx ctx;
ctx               996 drivers/net/vmxnet3/vmxnet3_drv.c 	ctx.ipv4 = (vlan_get_protocol(skb) == cpu_to_be16(ETH_P_IP));
ctx               997 drivers/net/vmxnet3/vmxnet3_drv.c 	ctx.ipv6 = (vlan_get_protocol(skb) == cpu_to_be16(ETH_P_IPV6));
ctx               999 drivers/net/vmxnet3/vmxnet3_drv.c 	ctx.mss = skb_shinfo(skb)->gso_size;
ctx              1000 drivers/net/vmxnet3/vmxnet3_drv.c 	if (ctx.mss) {
ctx              1009 drivers/net/vmxnet3/vmxnet3_drv.c 		vmxnet3_prepare_tso(skb, &ctx);
ctx              1027 drivers/net/vmxnet3/vmxnet3_drv.c 	ret = vmxnet3_parse_hdr(skb, tq, &ctx, adapter);
ctx              1029 drivers/net/vmxnet3/vmxnet3_drv.c 		BUG_ON(ret <= 0 && ctx.copy_size != 0);
ctx              1031 drivers/net/vmxnet3/vmxnet3_drv.c 		if (ctx.mss) {
ctx              1032 drivers/net/vmxnet3/vmxnet3_drv.c 			if (unlikely(ctx.eth_ip_hdr_size + ctx.l4_hdr_size >
ctx              1039 drivers/net/vmxnet3/vmxnet3_drv.c 				if (unlikely(ctx.eth_ip_hdr_size +
ctx              1067 drivers/net/vmxnet3/vmxnet3_drv.c 	vmxnet3_copy_hdr(skb, tq, &ctx, adapter);
ctx              1070 drivers/net/vmxnet3/vmxnet3_drv.c 	if (vmxnet3_map_pkt(skb, &ctx, tq, adapter->pdev, adapter))
ctx              1074 drivers/net/vmxnet3/vmxnet3_drv.c 	ctx.eop_txd->dword[3] = cpu_to_le32(VMXNET3_TXD_CQ | VMXNET3_TXD_EOP);
ctx              1079 drivers/net/vmxnet3/vmxnet3_drv.c 	gdesc->dword[2] = ctx.sop_txd->dword[2];
ctx              1080 drivers/net/vmxnet3/vmxnet3_drv.c 	gdesc->dword[3] = ctx.sop_txd->dword[3];
ctx              1082 drivers/net/vmxnet3/vmxnet3_drv.c 	gdesc = ctx.sop_txd;
ctx              1085 drivers/net/vmxnet3/vmxnet3_drv.c 	if (ctx.mss) {
ctx              1086 drivers/net/vmxnet3/vmxnet3_drv.c 		gdesc->txd.hlen = ctx.eth_ip_hdr_size + ctx.l4_hdr_size;
ctx              1088 drivers/net/vmxnet3/vmxnet3_drv.c 		gdesc->txd.msscof = ctx.mss;
ctx              1089 drivers/net/vmxnet3/vmxnet3_drv.c 		num_pkts = (skb->len - gdesc->txd.hlen + ctx.mss - 1) / ctx.mss;
ctx              1092 drivers/net/vmxnet3/vmxnet3_drv.c 			gdesc->txd.hlen = ctx.eth_ip_hdr_size;
ctx              1094 drivers/net/vmxnet3/vmxnet3_drv.c 			gdesc->txd.msscof = ctx.eth_ip_hdr_size +
ctx              1123 drivers/net/vmxnet3/vmxnet3_drv.c 			   (struct Vmxnet3_TxDesc *)ctx.sop_txd);
ctx              1124 drivers/net/vmxnet3/vmxnet3_drv.c 	gdesc = ctx.sop_txd;
ctx              1128 drivers/net/vmxnet3/vmxnet3_drv.c 		(u32)(ctx.sop_txd -
ctx              1197 drivers/net/vmxnet3/vmxnet3_drv.c 		 struct vmxnet3_rx_ctx *ctx,  struct vmxnet3_adapter *adapter)
ctx              1215 drivers/net/vmxnet3/vmxnet3_drv.c 	if (ctx->skb)
ctx              1216 drivers/net/vmxnet3/vmxnet3_drv.c 		dev_kfree_skb_irq(ctx->skb);
ctx              1218 drivers/net/vmxnet3/vmxnet3_drv.c 	ctx->skb = NULL;
ctx              1287 drivers/net/vmxnet3/vmxnet3_drv.c 	struct vmxnet3_rx_ctx *ctx = &rq->rx_ctx;
ctx              1329 drivers/net/vmxnet3/vmxnet3_drv.c 			vmxnet3_rx_error(rq, rcd, ctx, adapter);
ctx              1342 drivers/net/vmxnet3/vmxnet3_drv.c 			BUG_ON(ctx->skb != NULL || rbi->skb == NULL);
ctx              1354 drivers/net/vmxnet3/vmxnet3_drv.c 			ctx->skb = rbi->skb;
ctx              1366 drivers/net/vmxnet3/vmxnet3_drv.c 				ctx->skb = NULL;
ctx              1377 drivers/net/vmxnet3/vmxnet3_drv.c 				ctx->skb = new_skb;
ctx              1382 drivers/net/vmxnet3/vmxnet3_drv.c 				ctx->skb = rbi->skb;
ctx              1396 drivers/net/vmxnet3/vmxnet3_drv.c 					ctx->skb = NULL;
ctx              1417 drivers/net/vmxnet3/vmxnet3_drv.c 				skb_set_hash(ctx->skb,
ctx              1421 drivers/net/vmxnet3/vmxnet3_drv.c 			skb_put(ctx->skb, rcd->len);
ctx              1437 drivers/net/vmxnet3/vmxnet3_drv.c 			BUG_ON(ctx->skb == NULL && !skip_page_frags);
ctx              1458 drivers/net/vmxnet3/vmxnet3_drv.c 					dev_kfree_skb(ctx->skb);
ctx              1459 drivers/net/vmxnet3/vmxnet3_drv.c 					ctx->skb = NULL;
ctx              1471 drivers/net/vmxnet3/vmxnet3_drv.c 					dev_kfree_skb(ctx->skb);
ctx              1472 drivers/net/vmxnet3/vmxnet3_drv.c 					ctx->skb = NULL;
ctx              1481 drivers/net/vmxnet3/vmxnet3_drv.c 				vmxnet3_append_frag(ctx->skb, rcd, rbi);
ctx              1492 drivers/net/vmxnet3/vmxnet3_drv.c 		skb = ctx->skb;
ctx              1537 drivers/net/vmxnet3/vmxnet3_drv.c 			ctx->skb = NULL;
ctx               168 drivers/net/wimax/i2400m/driver.c 	struct i2400m_reset_ctx ctx = {
ctx               169 drivers/net/wimax/i2400m/driver.c 		.completion = COMPLETION_INITIALIZER_ONSTACK(ctx.completion),
ctx               175 drivers/net/wimax/i2400m/driver.c 	i2400m->reset_ctx = &ctx;
ctx               180 drivers/net/wimax/i2400m/driver.c 	result = wait_for_completion_timeout(&ctx.completion, 4*HZ);
ctx               184 drivers/net/wimax/i2400m/driver.c 		result = ctx.result;
ctx               587 drivers/net/wimax/i2400m/driver.c 	struct i2400m_reset_ctx *ctx = i2400m->reset_ctx;
ctx               631 drivers/net/wimax/i2400m/driver.c 		ctx->result = result;
ctx               632 drivers/net/wimax/i2400m/driver.c 		complete(&ctx->completion);
ctx               717 drivers/net/wireless/ath/ath10k/ce.c static int __ath10k_ce_rx_post_buf(struct ath10k_ce_pipe *pipe, void *ctx,
ctx               739 drivers/net/wireless/ath/ath10k/ce.c 	dest_ring->per_transfer_context[write_index] = ctx;
ctx               748 drivers/net/wireless/ath/ath10k/ce.c 				      void *ctx,
ctx               772 drivers/net/wireless/ath/ath10k/ce.c 	dest_ring->per_transfer_context[write_index] = ctx;
ctx               801 drivers/net/wireless/ath/ath10k/ce.c int ath10k_ce_rx_post_buf(struct ath10k_ce_pipe *pipe, void *ctx,
ctx               809 drivers/net/wireless/ath/ath10k/ce.c 	ret = pipe->ops->ce_rx_post_buf(pipe, ctx, paddr);
ctx               196 drivers/net/wireless/ath/ath10k/ce.h int ath10k_ce_rx_post_buf(struct ath10k_ce_pipe *pipe, void *ctx,
ctx               309 drivers/net/wireless/ath/ath10k/ce.h 	int (*ce_rx_post_buf)(struct ath10k_ce_pipe *pipe, void *ctx,
ctx               504 drivers/net/wireless/ath/ath10k/htt_tx.c static int ath10k_htt_tx_clean_up_pending(int msdu_id, void *skb, void *ctx)
ctx               506 drivers/net/wireless/ath/ath10k/htt_tx.c 	struct ath10k *ar = ctx;
ctx              7726 drivers/net/wireless/ath/ath10k/mac.c 			     struct ieee80211_chanctx_conf *ctx,
ctx              7738 drivers/net/wireless/ath/ath10k/mac.c 	WARN_ON(ctx && vifs);
ctx              7752 drivers/net/wireless/ath/ath10k/mac.c 	if (!ctx && ath10k_mac_num_chanctxs(ar) == 1) {
ctx              7761 drivers/net/wireless/ath/ath10k/mac.c 	} else if ((ctx && ath10k_mac_num_chanctxs(ar) == 0) ||
ctx              7762 drivers/net/wireless/ath/ath10k/mac.c 		   (ctx && (ar->state == ATH10K_STATE_RESTARTED))) {
ctx              7768 drivers/net/wireless/ath/ath10k/mac.c 		ar->rx_channel = ctx->def.chan;
ctx              7865 drivers/net/wireless/ath/ath10k/mac.c 			  struct ieee80211_chanctx_conf *ctx)
ctx              7871 drivers/net/wireless/ath/ath10k/mac.c 		   ctx->def.chan->center_freq, ctx->def.width, ctx);
ctx              7876 drivers/net/wireless/ath/ath10k/mac.c 	ath10k_mac_update_rx_channel(ar, ctx, NULL, 0);
ctx              7889 drivers/net/wireless/ath/ath10k/mac.c 			     struct ieee80211_chanctx_conf *ctx)
ctx              7895 drivers/net/wireless/ath/ath10k/mac.c 		   ctx->def.chan->center_freq, ctx->def.width, ctx);
ctx              7910 drivers/net/wireless/ath/ath10k/mac.c 	struct ieee80211_chanctx_conf *ctx;
ctx              7922 drivers/net/wireless/ath/ath10k/mac.c 	if (rcu_access_pointer(vif->chanctx_conf) != arg->ctx)
ctx              7933 drivers/net/wireless/ath/ath10k/mac.c 	struct ieee80211_chanctx_conf *ctx;
ctx              7935 drivers/net/wireless/ath/ath10k/mac.c 	ctx = rcu_access_pointer(vif->chanctx_conf);
ctx              7936 drivers/net/wireless/ath/ath10k/mac.c 	if (ctx != arg->ctx)
ctx              7943 drivers/net/wireless/ath/ath10k/mac.c 	arg->vifs[arg->next_vif].old_ctx = ctx;
ctx              7944 drivers/net/wireless/ath/ath10k/mac.c 	arg->vifs[arg->next_vif].new_ctx = ctx;
ctx              7950 drivers/net/wireless/ath/ath10k/mac.c 			     struct ieee80211_chanctx_conf *ctx,
ctx              7954 drivers/net/wireless/ath/ath10k/mac.c 	struct ath10k_mac_change_chanctx_arg arg = { .ctx = ctx };
ctx              7960 drivers/net/wireless/ath/ath10k/mac.c 		   ctx->def.chan->center_freq, ctx->def.width, ctx, changed);
ctx              8008 drivers/net/wireless/ath/ath10k/mac.c 				 struct ieee80211_chanctx_conf *ctx)
ctx              8018 drivers/net/wireless/ath/ath10k/mac.c 		   ctx, arvif->vdev_id);
ctx              8025 drivers/net/wireless/ath/ath10k/mac.c 	ret = ath10k_vdev_start(arvif, &ctx->def);
ctx              8029 drivers/net/wireless/ath/ath10k/mac.c 			    ctx->def.chan->center_freq, ret);
ctx              8087 drivers/net/wireless/ath/ath10k/mac.c 				   struct ieee80211_chanctx_conf *ctx)
ctx              8097 drivers/net/wireless/ath/ath10k/mac.c 		   ctx, arvif->vdev_id);
ctx              3119 drivers/net/wireless/ath/ath10k/pci.c static int ath10k_pci_napi_poll(struct napi_struct *ctx, int budget)
ctx              3121 drivers/net/wireless/ath/ath10k/pci.c 	struct ath10k *ar = container_of(ctx, struct ath10k, napi);
ctx              3127 drivers/net/wireless/ath/ath10k/pci.c 		napi_complete(ctx);
ctx              3136 drivers/net/wireless/ath/ath10k/pci.c 		napi_complete_done(ctx, done);
ctx              3146 drivers/net/wireless/ath/ath10k/pci.c 			napi_reschedule(ctx);
ctx              1164 drivers/net/wireless/ath/ath10k/snoc.c static int ath10k_snoc_napi_poll(struct napi_struct *ctx, int budget)
ctx              1166 drivers/net/wireless/ath/ath10k/snoc.c 	struct ath10k *ar = container_of(ctx, struct ath10k, napi);
ctx              1170 drivers/net/wireless/ath/ath10k/snoc.c 		napi_complete(ctx);
ctx              1178 drivers/net/wireless/ath/ath10k/snoc.c 		napi_complete(ctx);
ctx              9414 drivers/net/wireless/ath/ath10k/wmi.c 					       void *ctx)
ctx              9417 drivers/net/wireless/ath/ath10k/wmi.c 	struct ath10k *ar = ctx;
ctx               445 drivers/net/wireless/ath/ath9k/ath9k.h 	for (ctx = &sc->chanctx[0];                                 \
ctx               446 drivers/net/wireless/ath/ath9k/ath9k.h 	     ctx <= &sc->chanctx[ARRAY_SIZE(sc->chanctx) - 1];      \
ctx               447 drivers/net/wireless/ath/ath9k/ath9k.h 	     ctx++)
ctx               450 drivers/net/wireless/ath/ath9k/ath9k.h void ath_chanctx_set_channel(struct ath_softc *sc, struct ath_chanctx *ctx,
ctx               456 drivers/net/wireless/ath/ath9k/ath9k.h ath_chanctx_get(struct ieee80211_chanctx_conf *ctx)
ctx               458 drivers/net/wireless/ath/ath9k/ath9k.h 	struct ath_chanctx **ptr = (void *) ctx->drv_priv;
ctx               477 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_chanctx_wake_queues(struct ath_softc *sc, struct ath_chanctx *ctx);
ctx               478 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_chanctx_stop_queues(struct ath_softc *sc, struct ath_chanctx *ctx);
ctx               479 drivers/net/wireless/ath/ath9k/ath9k.h void ath_chanctx_check_active(struct ath_softc *sc, struct ath_chanctx *ctx);
ctx               551 drivers/net/wireless/ath/ath9k/ath9k.h 					     struct ath_chanctx *ctx)
ctx               555 drivers/net/wireless/ath/ath9k/ath9k.h 					     struct ath_chanctx *ctx)
ctx               559 drivers/net/wireless/ath/ath9k/ath9k.h 					    struct ath_chanctx *ctx)
ctx               673 drivers/net/wireless/ath/ath9k/ath9k.h 			       struct ath_chanctx *ctx,
ctx               676 drivers/net/wireless/ath/ath9k/ath9k.h 				   struct ath_chanctx *ctx);
ctx               106 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 	struct owl_ctx *ctx = (struct owl_ctx *)pci_get_drvdata(pdev);
ctx               109 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 	complete(&ctx->eeprom_load);
ctx               159 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 	struct owl_ctx *ctx;
ctx               174 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               175 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 	if (!ctx)
ctx               178 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 	init_completion(&ctx->eeprom_load);
ctx               180 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 	pci_set_drvdata(pdev, ctx);
ctx               191 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 	struct owl_ctx *ctx = pci_get_drvdata(pdev);
ctx               193 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 	if (ctx) {
ctx               194 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c 		wait_for_completion(&ctx->eeprom_load);
ctx               587 drivers/net/wireless/ath/ath9k/beacon.c 				      struct ath_chanctx *ctx,
ctx               591 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_beacon_config *cur_conf = &ctx->beacon;
ctx               630 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_chanctx *ctx;
ctx               646 drivers/net/wireless/ath/ath9k/beacon.c 	ctx = avp->chanctx;
ctx               647 drivers/net/wireless/ath/ath9k/beacon.c 	cur_conf = &ctx->beacon;
ctx               652 drivers/net/wireless/ath/ath9k/beacon.c 		ath9k_cache_beacon_config(sc, ctx, &main_vif->bss_conf);
ctx               660 drivers/net/wireless/ath/ath9k/beacon.c 	ath9k_cache_beacon_config(sc, ctx, &main_vif->bss_conf);
ctx               105 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_chanctx *ctx;
ctx               117 drivers/net/wireless/ath/ath9k/channel.c 		ctx = &sc->chanctx[i];
ctx               118 drivers/net/wireless/ath/ath9k/channel.c 		cfg80211_chandef_create(&ctx->chandef, chan, NL80211_CHAN_HT20);
ctx               119 drivers/net/wireless/ath/ath9k/channel.c 		INIT_LIST_HEAD(&ctx->vifs);
ctx               120 drivers/net/wireless/ath/ath9k/channel.c 		ctx->txpower = ATH_TXPOWER_MAX;
ctx               121 drivers/net/wireless/ath/ath9k/channel.c 		ctx->flush_timeout = HZ / 5; /* 200ms */
ctx               122 drivers/net/wireless/ath/ath9k/channel.c 		for (j = 0; j < ARRAY_SIZE(ctx->acq); j++) {
ctx               123 drivers/net/wireless/ath/ath9k/channel.c 			INIT_LIST_HEAD(&ctx->acq[j].acq_new);
ctx               124 drivers/net/wireless/ath/ath9k/channel.c 			INIT_LIST_HEAD(&ctx->acq[j].acq_old);
ctx               125 drivers/net/wireless/ath/ath9k/channel.c 			spin_lock_init(&ctx->acq[j].lock);
ctx               130 drivers/net/wireless/ath/ath9k/channel.c void ath_chanctx_set_channel(struct ath_softc *sc, struct ath_chanctx *ctx,
ctx               138 drivers/net/wireless/ath/ath9k/channel.c 		memcpy(&ctx->chandef, chandef, sizeof(*chandef));
ctx               139 drivers/net/wireless/ath/ath9k/channel.c 	cur_chan = sc->cur_chan == ctx;
ctx               159 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_chanctx *ctx;
ctx               165 drivers/net/wireless/ath/ath9k/channel.c 	ath_for_each_chanctx(sc, ctx) {
ctx               166 drivers/net/wireless/ath/ath9k/channel.c 		if (!ctx->active)
ctx               169 drivers/net/wireless/ath/ath9k/channel.c 		list_for_each_entry(avp, &ctx->vifs, list) {
ctx               174 drivers/net/wireless/ath/ath9k/channel.c 				return ctx;
ctx               247 drivers/net/wireless/ath/ath9k/channel.c void ath_chanctx_check_active(struct ath_softc *sc, struct ath_chanctx *ctx)
ctx               255 drivers/net/wireless/ath/ath9k/channel.c 	if (!ctx)
ctx               258 drivers/net/wireless/ath/ath9k/channel.c 	if (ctx == &sc->offchannel.chan) {
ctx               262 drivers/net/wireless/ath/ath9k/channel.c 			ctx->flush_timeout =
ctx               265 drivers/net/wireless/ath/ath9k/channel.c 			ctx->flush_timeout =
ctx               278 drivers/net/wireless/ath/ath9k/channel.c 	ictx = ctx;
ctx               280 drivers/net/wireless/ath/ath9k/channel.c 	list_for_each_entry(avp, &ctx->vifs, list) {
ctx               294 drivers/net/wireless/ath/ath9k/channel.c 	ctx->active = active;
ctx               296 drivers/net/wireless/ath/ath9k/channel.c 	ath_for_each_chanctx(sc, ctx) {
ctx               297 drivers/net/wireless/ath/ath9k/channel.c 		if (!ctx->assigned || list_empty(&ctx->vifs))
ctx               327 drivers/net/wireless/ath/ath9k/channel.c ath_chanctx_get_next(struct ath_softc *sc, struct ath_chanctx *ctx)
ctx               329 drivers/net/wireless/ath/ath9k/channel.c 	int idx = ctx - &sc->chanctx[0];
ctx               390 drivers/net/wireless/ath/ath9k/channel.c 				     struct ath_chanctx *ctx,
ctx               399 drivers/net/wireless/ath/ath9k/channel.c 	if (ctx->active && sc->sched.extend_absence) {
ctx               408 drivers/net/wireless/ath/ath9k/channel.c 	if (ctx->active && sc->sched.beacon_miss >= 2) {
ctx               415 drivers/net/wireless/ath/ath9k/channel.c 				       struct ath_chanctx *ctx,
ctx               436 drivers/net/wireless/ath/ath9k/channel.c 	if (ctx->active && avp->noa_duration)
ctx               501 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_chanctx *ctx;
ctx               543 drivers/net/wireless/ath/ath9k/channel.c 		ctx = ath_chanctx_get_next(sc, sc->cur_chan);
ctx               544 drivers/net/wireless/ath/ath9k/channel.c 		if (ctx->active && sc->sched.state == ATH_CHANCTX_STATE_IDLE) {
ctx               545 drivers/net/wireless/ath/ath9k/channel.c 			sc->next_chan = ctx;
ctx               567 drivers/net/wireless/ath/ath9k/channel.c 		if (!ctx->active && avp->noa_duration &&
ctx               598 drivers/net/wireless/ath/ath9k/channel.c 			ath_chanctx_offchannel_noa(sc, ctx, avp, tsf_time);
ctx               602 drivers/net/wireless/ath/ath9k/channel.c 		ath_chanctx_handle_bmiss(sc, ctx, avp);
ctx               625 drivers/net/wireless/ath/ath9k/channel.c 		if (ctx->active &&
ctx               630 drivers/net/wireless/ath/ath9k/channel.c 		if (ctx->active && sc->sched.force_noa_update)
ctx               760 drivers/net/wireless/ath/ath9k/channel.c 		ctx = ath_chanctx_get_next(sc, sc->cur_chan);
ctx               762 drivers/net/wireless/ath/ath9k/channel.c 		if (!ctx->assigned)
ctx               765 drivers/net/wireless/ath/ath9k/channel.c 		sc->next_chan = ctx;
ctx               801 drivers/net/wireless/ath/ath9k/channel.c static void ath_chanctx_switch(struct ath_softc *sc, struct ath_chanctx *ctx,
ctx               809 drivers/net/wireless/ath/ath9k/channel.c 	    (sc->cur_chan != ctx) && (ctx == &sc->offchannel.chan)) {
ctx               811 drivers/net/wireless/ath/ath9k/channel.c 			ctx->chandef = *chandef;
ctx               825 drivers/net/wireless/ath/ath9k/channel.c 	sc->next_chan = ctx;
ctx               827 drivers/net/wireless/ath/ath9k/channel.c 		ctx->chandef = *chandef;
ctx               864 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_chanctx *ctx;
ctx               866 drivers/net/wireless/ath/ath9k/channel.c 	ath_for_each_chanctx(sc, ctx) {
ctx               867 drivers/net/wireless/ath/ath9k/channel.c 		if (!ctx->assigned || list_empty(&ctx->vifs))
ctx               869 drivers/net/wireless/ath/ath9k/channel.c 		if (active && !ctx->active)
ctx               872 drivers/net/wireless/ath/ath9k/channel.c 		if (ctx->switch_after_beacon)
ctx               873 drivers/net/wireless/ath/ath9k/channel.c 			return ctx;
ctx              1060 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_chanctx *ctx;
ctx              1072 drivers/net/wireless/ath/ath9k/channel.c 		ctx = ath_chanctx_get_oper_chan(sc, true);
ctx              1073 drivers/net/wireless/ath/ath9k/channel.c 		if (ctx->active) {
ctx              1079 drivers/net/wireless/ath/ath9k/channel.c 			ath_chanctx_switch(sc, ctx, NULL);
ctx              1335 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_chanctx *ctx;
ctx              1347 drivers/net/wireless/ath/ath9k/channel.c 	ctx = &sc->offchannel.chan;
ctx              1348 drivers/net/wireless/ath/ath9k/channel.c 	INIT_LIST_HEAD(&ctx->vifs);
ctx              1349 drivers/net/wireless/ath/ath9k/channel.c 	ctx->txpower = ATH_TXPOWER_MAX;
ctx              1350 drivers/net/wireless/ath/ath9k/channel.c 	cfg80211_chandef_create(&ctx->chandef, chan, NL80211_CHAN_HT20);
ctx              1352 drivers/net/wireless/ath/ath9k/channel.c 	for (i = 0; i < ARRAY_SIZE(ctx->acq); i++) {
ctx              1353 drivers/net/wireless/ath/ath9k/channel.c 		INIT_LIST_HEAD(&ctx->acq[i].acq_new);
ctx              1354 drivers/net/wireless/ath/ath9k/channel.c 		INIT_LIST_HEAD(&ctx->acq[i].acq_old);
ctx              1355 drivers/net/wireless/ath/ath9k/channel.c 		spin_lock_init(&ctx->acq[i].lock);
ctx              1385 drivers/net/wireless/ath/ath9k/channel.c void ath9k_chanctx_stop_queues(struct ath_softc *sc, struct ath_chanctx *ctx)
ctx              1390 drivers/net/wireless/ath/ath9k/channel.c 	if (ctx == &sc->offchannel.chan) {
ctx              1396 drivers/net/wireless/ath/ath9k/channel.c 					     ctx->hw_queue_base + i);
ctx              1404 drivers/net/wireless/ath/ath9k/channel.c void ath9k_chanctx_wake_queues(struct ath_softc *sc, struct ath_chanctx *ctx)
ctx              1409 drivers/net/wireless/ath/ath9k/channel.c 	if (ctx == &sc->offchannel.chan) {
ctx              1415 drivers/net/wireless/ath/ath9k/channel.c 					     ctx->hw_queue_base + i);
ctx               664 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_chanctx *ctx;
ctx               732 drivers/net/wireless/ath/ath9k/debug.c 	ath_for_each_chanctx(sc, ctx) {
ctx               733 drivers/net/wireless/ath/ath9k/debug.c 		if (list_empty(&ctx->vifs))
ctx               735 drivers/net/wireless/ath/ath9k/debug.c 		ath9k_calculate_iter_data(sc, ctx, &iter_data);
ctx               739 drivers/net/wireless/ath/ath9k/debug.c 			   i++, (int)(ctx->assigned), iter_data.naps,
ctx               528 drivers/net/wireless/ath/ath9k/init.c 				    void *ctx)
ctx               530 drivers/net/wireless/ath/ath9k/init.c 	struct ath9k_eeprom_ctx *ec = ctx;
ctx               650 drivers/net/wireless/ath/ath9k/main.c 	struct ath_chanctx *ctx = sc->cur_chan;
ctx               661 drivers/net/wireless/ath/ath9k/main.c 	init_channel = ath9k_cmn_get_channel(hw, ah, &ctx->chandef);
ctx               982 drivers/net/wireless/ath/ath9k/main.c 				    struct ath_chanctx *ctx,
ctx               992 drivers/net/wireless/ath/ath9k/main.c 	list_for_each_entry(avp, &ctx->vifs, list) {
ctx               993 drivers/net/wireless/ath/ath9k/main.c 		if (ctx->nvifs_assigned != 1)
ctx              1011 drivers/net/wireless/ath/ath9k/main.c 			       struct ath_chanctx *ctx,
ctx              1024 drivers/net/wireless/ath/ath9k/main.c 	list_for_each_entry(avp, &ctx->vifs, list)
ctx              1027 drivers/net/wireless/ath/ath9k/main.c 	ath9k_update_bssid_mask(sc, ctx, iter_data);
ctx              1099 drivers/net/wireless/ath/ath9k/main.c 				   struct ath_chanctx *ctx)
ctx              1105 drivers/net/wireless/ath/ath9k/main.c 	ath_chanctx_check_active(sc, ctx);
ctx              1107 drivers/net/wireless/ath/ath9k/main.c 	if (ctx != sc->cur_chan)
ctx              1111 drivers/net/wireless/ath/ath9k/main.c 	if (ctx == &sc->offchannel.chan)
ctx              1116 drivers/net/wireless/ath/ath9k/main.c 	ath9k_calculate_iter_data(sc, ctx, &iter_data);
ctx              1146 drivers/net/wireless/ath/ath9k/main.c 	ctx->switch_after_beacon = false;
ctx              1152 drivers/net/wireless/ath/ath9k/main.c 			ctx->switch_after_beacon = true;
ctx              1156 drivers/net/wireless/ath/ath9k/main.c 		bool changed = (iter_data.primary_sta != ctx->primary_sta);
ctx              1163 drivers/net/wireless/ath/ath9k/main.c 			ctx->primary_sta = iter_data.primary_sta;
ctx              1165 drivers/net/wireless/ath/ath9k/main.c 			ctx->primary_sta = NULL;
ctx              1409 drivers/net/wireless/ath/ath9k/main.c 	struct ath_chanctx *ctx = sc->cur_chan;
ctx              1425 drivers/net/wireless/ath/ath9k/main.c 			ath_chanctx_set_channel(sc, ctx, &ctx->chandef);
ctx              1456 drivers/net/wireless/ath/ath9k/main.c 		ctx->offchannel = !!(conf->flags & IEEE80211_CONF_OFFCHANNEL);
ctx              1457 drivers/net/wireless/ath/ath9k/main.c 		ath_chanctx_set_channel(sc, ctx, &hw->conf.chandef);
ctx              1485 drivers/net/wireless/ath/ath9k/main.c 	struct ath_chanctx *ctx;
ctx              1492 drivers/net/wireless/ath/ath9k/main.c 	ath_for_each_chanctx(sc, ctx)
ctx              1493 drivers/net/wireless/ath/ath9k/main.c 		ctx->rxfilter = *total_flags;
ctx              2424 drivers/net/wireless/ath/ath9k/main.c 	struct ath_chanctx *ctx, **ptr;
ctx              2429 drivers/net/wireless/ath/ath9k/main.c 	ath_for_each_chanctx(sc, ctx) {
ctx              2430 drivers/net/wireless/ath/ath9k/main.c 		if (ctx->assigned)
ctx              2434 drivers/net/wireless/ath/ath9k/main.c 		*ptr = ctx;
ctx              2435 drivers/net/wireless/ath/ath9k/main.c 		ctx->assigned = true;
ctx              2436 drivers/net/wireless/ath/ath9k/main.c 		pos = ctx - &sc->chanctx[0];
ctx              2437 drivers/net/wireless/ath/ath9k/main.c 		ctx->hw_queue_base = pos * IEEE80211_NUM_ACS;
ctx              2443 drivers/net/wireless/ath/ath9k/main.c 		ath_chanctx_set_channel(sc, ctx, &conf->def);
ctx              2459 drivers/net/wireless/ath/ath9k/main.c 	struct ath_chanctx *ctx = ath_chanctx_get(conf);
ctx              2467 drivers/net/wireless/ath/ath9k/main.c 	ctx->assigned = false;
ctx              2468 drivers/net/wireless/ath/ath9k/main.c 	ctx->hw_queue_base = 0;
ctx              2480 drivers/net/wireless/ath/ath9k/main.c 	struct ath_chanctx *ctx = ath_chanctx_get(conf);
ctx              2486 drivers/net/wireless/ath/ath9k/main.c 	ath_chanctx_set_channel(sc, ctx, &conf->def);
ctx              2497 drivers/net/wireless/ath/ath9k/main.c 	struct ath_chanctx *ctx = ath_chanctx_get(conf);
ctx              2509 drivers/net/wireless/ath/ath9k/main.c 	avp->chanctx = ctx;
ctx              2510 drivers/net/wireless/ath/ath9k/main.c 	ctx->nvifs_assigned++;
ctx              2511 drivers/net/wireless/ath/ath9k/main.c 	list_add_tail(&avp->list, &ctx->vifs);
ctx              2512 drivers/net/wireless/ath/ath9k/main.c 	ath9k_calculate_summary_state(sc, ctx);
ctx              2514 drivers/net/wireless/ath/ath9k/main.c 		vif->hw_queue[i] = ctx->hw_queue_base + i;
ctx              2528 drivers/net/wireless/ath/ath9k/main.c 	struct ath_chanctx *ctx = ath_chanctx_get(conf);
ctx              2541 drivers/net/wireless/ath/ath9k/main.c 	ctx->nvifs_assigned--;
ctx              2543 drivers/net/wireless/ath/ath9k/main.c 	ath9k_calculate_summary_state(sc, ctx);
ctx                76 drivers/net/wireless/ath/wil6210/debugfs.c 		has_skb = ring->ctx && ring->ctx[idx].skb;
ctx               135 drivers/net/wireless/ath/wil6210/debugfs.c 					   _s : (ring->ctx[i].skb ? _h : 'h'));
ctx              1135 drivers/net/wireless/ath/wil6210/debugfs.c 			skb = ring->ctx ? ring->ctx[txdesc_idx].skb : NULL;
ctx              1149 drivers/net/wireless/ath/wil6210/debugfs.c 		skb = ring->ctx[txdesc_idx].skb;
ctx               132 drivers/net/wireless/ath/wil6210/txrx.c 	vring->ctx = kcalloc(vring->size, sizeof(vring->ctx[0]), GFP_KERNEL);
ctx               133 drivers/net/wireless/ath/wil6210/txrx.c 	if (!vring->ctx) {
ctx               155 drivers/net/wireless/ath/wil6210/txrx.c 		kfree(vring->ctx);
ctx               156 drivers/net/wireless/ath/wil6210/txrx.c 		vring->ctx = NULL;
ctx               176 drivers/net/wireless/ath/wil6210/txrx.c 		     vring->va, &vring->pa, vring->ctx);
ctx               182 drivers/net/wireless/ath/wil6210/txrx.c 			     struct wil_ctx *ctx)
ctx               188 drivers/net/wireless/ath/wil6210/txrx.c 	switch (ctx->mapped_as) {
ctx               211 drivers/net/wireless/ath/wil6210/txrx.c 			     &vring->pa, vring->ctx);
ctx               215 drivers/net/wireless/ath/wil6210/txrx.c 			     &vring->pa, vring->ctx);
ctx               221 drivers/net/wireless/ath/wil6210/txrx.c 		struct wil_ctx *ctx;
ctx               228 drivers/net/wireless/ath/wil6210/txrx.c 			ctx = &vring->ctx[vring->swtail];
ctx               229 drivers/net/wireless/ath/wil6210/txrx.c 			if (!ctx) {
ctx               237 drivers/net/wireless/ath/wil6210/txrx.c 			wil_txdesc_unmap(dev, (union wil_tx_desc *)d, ctx);
ctx               238 drivers/net/wireless/ath/wil6210/txrx.c 			if (ctx->skb)
ctx               239 drivers/net/wireless/ath/wil6210/txrx.c 				dev_kfree_skb_any(ctx->skb);
ctx               246 drivers/net/wireless/ath/wil6210/txrx.c 			ctx = &vring->ctx[vring->swhead];
ctx               251 drivers/net/wireless/ath/wil6210/txrx.c 			kfree_skb(ctx->skb);
ctx               256 drivers/net/wireless/ath/wil6210/txrx.c 	kfree(vring->ctx);
ctx               259 drivers/net/wireless/ath/wil6210/txrx.c 	vring->ctx = NULL;
ctx               303 drivers/net/wireless/ath/wil6210/txrx.c 	vring->ctx[i].skb = skb;
ctx               483 drivers/net/wireless/ath/wil6210/txrx.c 	skb = vring->ctx[i].skb;
ctx               484 drivers/net/wireless/ath/wil6210/txrx.c 	vring->ctx[i].skb = NULL;
ctx              1839 drivers/net/wireless/ath/wil6210/txrx.c 	vring->ctx[i].mapped_as = wil_mapped_as_single;
ctx              1840 drivers/net/wireless/ath/wil6210/txrx.c 	hdr_ctx = &vring->ctx[i];
ctx              1875 drivers/net/wireless/ath/wil6210/txrx.c 				vring->ctx[i].mapped_as = wil_mapped_as_page;
ctx              1882 drivers/net/wireless/ath/wil6210/txrx.c 				vring->ctx[i].mapped_as = wil_mapped_as_single;
ctx              1895 drivers/net/wireless/ath/wil6210/txrx.c 				first_ctx = &vring->ctx[i];
ctx              1980 drivers/net/wireless/ath/wil6210/txrx.c 	vring->ctx[i].skb = skb_get(skb);
ctx              2017 drivers/net/wireless/ath/wil6210/txrx.c 		struct wil_ctx *ctx;
ctx              2024 drivers/net/wireless/ath/wil6210/txrx.c 		ctx = &vring->ctx[i];
ctx              2025 drivers/net/wireless/ath/wil6210/txrx.c 		wil_txdesc_unmap(dev, (union wil_tx_desc *)d, ctx);
ctx              2026 drivers/net/wireless/ath/wil6210/txrx.c 		memset(ctx, 0, sizeof(*ctx));
ctx              2074 drivers/net/wireless/ath/wil6210/txrx.c 	ring->ctx[i].mapped_as = wil_mapped_as_single;
ctx              2090 drivers/net/wireless/ath/wil6210/txrx.c 	ring->ctx[i].nr_frags = nr_frags;
ctx              2111 drivers/net/wireless/ath/wil6210/txrx.c 		ring->ctx[i].mapped_as = wil_mapped_as_page;
ctx              2133 drivers/net/wireless/ath/wil6210/txrx.c 	ring->ctx[i].skb = skb_get(skb);
ctx              2174 drivers/net/wireless/ath/wil6210/txrx.c 		struct wil_ctx *ctx;
ctx              2177 drivers/net/wireless/ath/wil6210/txrx.c 		ctx = &ring->ctx[i];
ctx              2183 drivers/net/wireless/ath/wil6210/txrx.c 					    ctx);
ctx              2185 drivers/net/wireless/ath/wil6210/txrx.c 		memset(ctx, 0, sizeof(*ctx));
ctx              2460 drivers/net/wireless/ath/wil6210/txrx.c 		struct wil_ctx *ctx = &vring->ctx[vring->swtail];
ctx              2466 drivers/net/wireless/ath/wil6210/txrx.c 		int lf = (vring->swtail + ctx->nr_frags) % vring->size;
ctx              2479 drivers/net/wireless/ath/wil6210/txrx.c 			ctx = &vring->ctx[vring->swtail];
ctx              2480 drivers/net/wireless/ath/wil6210/txrx.c 			skb = ctx->skb;
ctx              2497 drivers/net/wireless/ath/wil6210/txrx.c 						    ctx);
ctx              2521 drivers/net/wireless/ath/wil6210/txrx.c 			memset(ctx, 0, sizeof(*ctx));
ctx                40 drivers/net/wireless/ath/wil6210/txrx_edma.c 				   struct wil_ctx *ctx)
ctx                46 drivers/net/wireless/ath/wil6210/txrx_edma.c 	switch (ctx->mapped_as) {
ctx               398 drivers/net/wireless/ath/wil6210/txrx_edma.c 	ring->ctx = kcalloc(ring->size, sizeof(ring->ctx[0]), GFP_KERNEL);
ctx               399 drivers/net/wireless/ath/wil6210/txrx_edma.c 	if (!ring->ctx)
ctx               417 drivers/net/wireless/ath/wil6210/txrx_edma.c 		     ring->size, ring->va, &ring->pa, ring->ctx);
ctx               425 drivers/net/wireless/ath/wil6210/txrx_edma.c 	kfree(ring->ctx);
ctx               426 drivers/net/wireless/ath/wil6210/txrx_edma.c 	ring->ctx = NULL;
ctx               446 drivers/net/wireless/ath/wil6210/txrx_edma.c 			     &ring->pa, ring->ctx);
ctx               460 drivers/net/wireless/ath/wil6210/txrx_edma.c 		     &ring->pa, ring->ctx);
ctx               463 drivers/net/wireless/ath/wil6210/txrx_edma.c 		struct wil_ctx *ctx;
ctx               470 drivers/net/wireless/ath/wil6210/txrx_edma.c 		ctx = &ring->ctx[ring->swtail];
ctx               471 drivers/net/wireless/ath/wil6210/txrx_edma.c 		if (!ctx) {
ctx               479 drivers/net/wireless/ath/wil6210/txrx_edma.c 		wil_tx_desc_unmap_edma(dev, (union wil_tx_desc *)d, ctx);
ctx               480 drivers/net/wireless/ath/wil6210/txrx_edma.c 		if (ctx->skb)
ctx               481 drivers/net/wireless/ath/wil6210/txrx_edma.c 			dev_kfree_skb_any(ctx->skb);
ctx               487 drivers/net/wireless/ath/wil6210/txrx_edma.c 	kfree(ring->ctx);
ctx               490 drivers/net/wireless/ath/wil6210/txrx_edma.c 	ring->ctx = NULL;
ctx              1229 drivers/net/wireless/ath/wil6210/txrx_edma.c 			struct wil_ctx *ctx = &ring->ctx[ring->swtail];
ctx              1232 drivers/net/wireless/ath/wil6210/txrx_edma.c 			struct sk_buff *skb = ctx->skb;
ctx              1250 drivers/net/wireless/ath/wil6210/txrx_edma.c 					       ctx);
ctx              1274 drivers/net/wireless/ath/wil6210/txrx_edma.c 			memset(ctx, 0, sizeof(*ctx));
ctx              1373 drivers/net/wireless/ath/wil6210/txrx_edma.c 		ring->ctx[i].mapped_as = wil_mapped_as_single;
ctx              1376 drivers/net/wireless/ath/wil6210/txrx_edma.c 		ring->ctx[i].mapped_as = wil_mapped_as_page;
ctx              1394 drivers/net/wireless/ath/wil6210/txrx_edma.c 		ring->ctx[i].skb = skb_get(skb);
ctx              1533 drivers/net/wireless/ath/wil6210/txrx_edma.c 		struct wil_ctx *ctx;
ctx              1541 drivers/net/wireless/ath/wil6210/txrx_edma.c 		ctx = &ring->ctx[i];
ctx              1542 drivers/net/wireless/ath/wil6210/txrx_edma.c 		wil_tx_desc_unmap_edma(dev, (union wil_tx_desc *)d, ctx);
ctx              1543 drivers/net/wireless/ath/wil6210/txrx_edma.c 		memset(ctx, 0, sizeof(*ctx));
ctx               538 drivers/net/wireless/ath/wil6210/wil6210.h 	struct wil_ctx *ctx; /* ctx[size] - software context */
ctx               615 drivers/net/wireless/ath/wil6210/wil6210.h 			      struct wil_ctx *ctx);
ctx              2179 drivers/net/wireless/broadcom/b43/main.c 	struct b43_request_fw_context *ctx = context;
ctx              2181 drivers/net/wireless/broadcom/b43/main.c 	ctx->blob = firmware;
ctx              2182 drivers/net/wireless/broadcom/b43/main.c 	complete(&ctx->dev->fw_load_complete);
ctx              2185 drivers/net/wireless/broadcom/b43/main.c int b43_do_request_fw(struct b43_request_fw_context *ctx,
ctx              2201 drivers/net/wireless/broadcom/b43/main.c 		if ((fw->type == ctx->req_type) &&
ctx              2212 drivers/net/wireless/broadcom/b43/main.c 	switch (ctx->req_type) {
ctx              2214 drivers/net/wireless/broadcom/b43/main.c 		snprintf(ctx->fwname, sizeof(ctx->fwname),
ctx              2219 drivers/net/wireless/broadcom/b43/main.c 		snprintf(ctx->fwname, sizeof(ctx->fwname),
ctx              2229 drivers/net/wireless/broadcom/b43/main.c 		init_completion(&ctx->dev->fw_load_complete);
ctx              2230 drivers/net/wireless/broadcom/b43/main.c 		err = request_firmware_nowait(THIS_MODULE, 1, ctx->fwname,
ctx              2231 drivers/net/wireless/broadcom/b43/main.c 					      ctx->dev->dev->dev, GFP_KERNEL,
ctx              2232 drivers/net/wireless/broadcom/b43/main.c 					      ctx, b43_fw_cb);
ctx              2237 drivers/net/wireless/broadcom/b43/main.c 		wait_for_completion(&ctx->dev->fw_load_complete);
ctx              2238 drivers/net/wireless/broadcom/b43/main.c 		if (ctx->blob)
ctx              2244 drivers/net/wireless/broadcom/b43/main.c 	err = request_firmware(&ctx->blob, ctx->fwname,
ctx              2245 drivers/net/wireless/broadcom/b43/main.c 			       ctx->dev->dev->dev);
ctx              2247 drivers/net/wireless/broadcom/b43/main.c 		snprintf(ctx->errors[ctx->req_type],
ctx              2248 drivers/net/wireless/broadcom/b43/main.c 			 sizeof(ctx->errors[ctx->req_type]),
ctx              2250 drivers/net/wireless/broadcom/b43/main.c 			 ctx->fwname);
ctx              2253 drivers/net/wireless/broadcom/b43/main.c 		snprintf(ctx->errors[ctx->req_type],
ctx              2254 drivers/net/wireless/broadcom/b43/main.c 			 sizeof(ctx->errors[ctx->req_type]),
ctx              2256 drivers/net/wireless/broadcom/b43/main.c 			 ctx->fwname, err);
ctx              2260 drivers/net/wireless/broadcom/b43/main.c 	if (ctx->blob->size < sizeof(struct b43_fw_header))
ctx              2262 drivers/net/wireless/broadcom/b43/main.c 	hdr = (struct b43_fw_header *)(ctx->blob->data);
ctx              2267 drivers/net/wireless/broadcom/b43/main.c 		if (size != ctx->blob->size - sizeof(struct b43_fw_header))
ctx              2278 drivers/net/wireless/broadcom/b43/main.c 	fw->data = ctx->blob;
ctx              2280 drivers/net/wireless/broadcom/b43/main.c 	fw->type = ctx->req_type;
ctx              2285 drivers/net/wireless/broadcom/b43/main.c 	snprintf(ctx->errors[ctx->req_type],
ctx              2286 drivers/net/wireless/broadcom/b43/main.c 		 sizeof(ctx->errors[ctx->req_type]),
ctx              2287 drivers/net/wireless/broadcom/b43/main.c 		 "Firmware file \"%s\" format error.\n", ctx->fwname);
ctx              2288 drivers/net/wireless/broadcom/b43/main.c 	release_firmware(ctx->blob);
ctx              2294 drivers/net/wireless/broadcom/b43/main.c static int b43_try_request_fw(struct b43_request_fw_context *ctx)
ctx              2296 drivers/net/wireless/broadcom/b43/main.c 	struct b43_wldev *dev = ctx->dev;
ctx              2297 drivers/net/wireless/broadcom/b43/main.c 	struct b43_firmware *fw = &ctx->dev->fw;
ctx              2299 drivers/net/wireless/broadcom/b43/main.c 	const u8 rev = ctx->dev->dev->core_rev;
ctx              2369 drivers/net/wireless/broadcom/b43/main.c 	err = b43_do_request_fw(ctx, filename, &fw->ucode, true);
ctx              2381 drivers/net/wireless/broadcom/b43/main.c 	err = b43_do_request_fw(ctx, filename, &fw->pcm, false);
ctx              2445 drivers/net/wireless/broadcom/b43/main.c 	err = b43_do_request_fw(ctx, filename, &fw->initvals, false);
ctx              2505 drivers/net/wireless/broadcom/b43/main.c 	err = b43_do_request_fw(ctx, filename, &fw->initvals_band, false);
ctx              2509 drivers/net/wireless/broadcom/b43/main.c 	fw->opensource = (ctx->req_type == B43_FWTYPE_OPENSOURCE);
ctx              2514 drivers/net/wireless/broadcom/b43/main.c 	err = ctx->fatal_failure = -EOPNOTSUPP;
ctx              2520 drivers/net/wireless/broadcom/b43/main.c 	err = ctx->fatal_failure = -EOPNOTSUPP;
ctx              2526 drivers/net/wireless/broadcom/b43/main.c 	err = ctx->fatal_failure = -EOPNOTSUPP;
ctx              2551 drivers/net/wireless/broadcom/b43/main.c 	struct b43_request_fw_context *ctx;
ctx              2556 drivers/net/wireless/broadcom/b43/main.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx              2557 drivers/net/wireless/broadcom/b43/main.c 	if (!ctx)
ctx              2559 drivers/net/wireless/broadcom/b43/main.c 	ctx->dev = dev;
ctx              2561 drivers/net/wireless/broadcom/b43/main.c 	ctx->req_type = B43_FWTYPE_PROPRIETARY;
ctx              2562 drivers/net/wireless/broadcom/b43/main.c 	err = b43_try_request_fw(ctx);
ctx              2566 drivers/net/wireless/broadcom/b43/main.c 	if (ctx->fatal_failure)
ctx              2570 drivers/net/wireless/broadcom/b43/main.c 	ctx->req_type = B43_FWTYPE_OPENSOURCE;
ctx              2571 drivers/net/wireless/broadcom/b43/main.c 	err = b43_try_request_fw(ctx);
ctx              2574 drivers/net/wireless/broadcom/b43/main.c 	if(ctx->fatal_failure)
ctx              2579 drivers/net/wireless/broadcom/b43/main.c 		errmsg = ctx->errors[i];
ctx              2601 drivers/net/wireless/broadcom/b43/main.c 	kfree(ctx);
ctx                95 drivers/net/wireless/broadcom/b43/main.h int b43_do_request_fw(struct b43_request_fw_context *ctx, const char *name,
ctx               226 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	void *ctx;
ctx               243 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	regdata = ci->ops->read32(ci->ctx, CORE_SB(core->base, sbidhigh));
ctx               255 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	regdata = ci->ops->read32(ci->ctx, address);
ctx               268 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	regdata = ci->ops->read32(ci->ctx, core->wrapbase + BCMA_IOCTL);
ctx               271 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	regdata = ci->ops->read32(ci->ctx, core->wrapbase + BCMA_RESET_CTL);
ctx               285 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	val = ci->ops->read32(ci->ctx, CORE_SB(base, sbtmstatelow));
ctx               289 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	val = ci->ops->read32(ci->ctx, CORE_SB(base, sbtmstatelow));
ctx               295 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		val = ci->ops->read32(ci->ctx, CORE_SB(base, sbtmstatelow));
ctx               296 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		ci->ops->write32(ci->ctx, CORE_SB(base, sbtmstatelow),
ctx               299 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		val = ci->ops->read32(ci->ctx, CORE_SB(base, sbtmstatelow));
ctx               301 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		SPINWAIT((ci->ops->read32(ci->ctx, CORE_SB(base, sbtmstatehigh))
ctx               304 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		val = ci->ops->read32(ci->ctx, CORE_SB(base, sbtmstatehigh));
ctx               308 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		val = ci->ops->read32(ci->ctx, CORE_SB(base, sbidlow));
ctx               310 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 			val = ci->ops->read32(ci->ctx,
ctx               313 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 			ci->ops->write32(ci->ctx,
ctx               315 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 			val = ci->ops->read32(ci->ctx,
ctx               318 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 			SPINWAIT((ci->ops->read32(ci->ctx,
ctx               326 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		ci->ops->write32(ci->ctx, CORE_SB(base, sbtmstatelow), val);
ctx               327 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		val = ci->ops->read32(ci->ctx, CORE_SB(base, sbtmstatelow));
ctx               331 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		val = ci->ops->read32(ci->ctx, CORE_SB(base, sbidlow));
ctx               333 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 			val = ci->ops->read32(ci->ctx,
ctx               336 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 			ci->ops->write32(ci->ctx,
ctx               342 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	ci->ops->write32(ci->ctx, CORE_SB(base, sbtmstatelow),
ctx               356 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	regdata = ci->ops->read32(ci->ctx, core->wrapbase + BCMA_RESET_CTL);
ctx               361 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	ci->ops->write32(ci->ctx, core->wrapbase + BCMA_IOCTL,
ctx               363 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	ci->ops->read32(ci->ctx, core->wrapbase + BCMA_IOCTL);
ctx               366 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	ci->ops->write32(ci->ctx, core->wrapbase + BCMA_RESET_CTL,
ctx               371 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	SPINWAIT(ci->ops->read32(ci->ctx, core->wrapbase + BCMA_RESET_CTL) !=
ctx               376 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	ci->ops->write32(ci->ctx, core->wrapbase + BCMA_IOCTL,
ctx               378 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	ci->ops->read32(ci->ctx, core->wrapbase + BCMA_IOCTL);
ctx               401 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	ci->ops->write32(ci->ctx, CORE_SB(base, sbtmstatelow),
ctx               404 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	regdata = ci->ops->read32(ci->ctx, CORE_SB(base, sbtmstatelow));
ctx               408 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	regdata = ci->ops->read32(ci->ctx, CORE_SB(base, sbtmstatehigh));
ctx               410 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		ci->ops->write32(ci->ctx, CORE_SB(base, sbtmstatehigh), 0);
ctx               412 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	regdata = ci->ops->read32(ci->ctx, CORE_SB(base, sbimstate));
ctx               415 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		ci->ops->write32(ci->ctx, CORE_SB(base, sbimstate), regdata);
ctx               419 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	ci->ops->write32(ci->ctx, CORE_SB(base, sbtmstatelow),
ctx               421 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	regdata = ci->ops->read32(ci->ctx, CORE_SB(base, sbtmstatelow));
ctx               425 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	ci->ops->write32(ci->ctx, CORE_SB(base, sbtmstatelow),
ctx               427 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	regdata = ci->ops->read32(ci->ctx, CORE_SB(base, sbtmstatelow));
ctx               443 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	while (ci->ops->read32(ci->ctx, core->wrapbase + BCMA_RESET_CTL) &
ctx               445 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		ci->ops->write32(ci->ctx, core->wrapbase + BCMA_RESET_CTL, 0);
ctx               452 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	ci->ops->write32(ci->ctx, core->wrapbase + BCMA_IOCTL,
ctx               454 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	ci->ops->read32(ci->ctx, core->wrapbase + BCMA_IOCTL);
ctx               532 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	return core->chip->ops->read32(core->chip->ctx, core->pub.base + reg);
ctx               538 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	core->chip->ops->write32(core->chip->ctx, core->pub.base + reg, val);
ctx               762 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	val = ci->ops->read32(ci->ctx, *eromaddr);
ctx               861 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	eromaddr = ci->ops->read32(ci->ctx, CORE_CC_REG(SI_ENUM_BASE, eromptr));
ctx               923 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	regdata = ci->ops->read32(ci->ctx, CORE_CC_REG(SI_ENUM_BASE, chipid));
ctx               980 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		ci->ops->reset(ci->ctx, &ci->pub);
ctx              1007 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		val = chip->ops->read32(chip->ctx, cpu->wrapbase + BCMA_IOCTL);
ctx              1032 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	pub->cc_caps = chip->ops->read32(chip->ctx,
ctx              1034 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	pub->cc_caps_ext = chip->ops->read32(chip->ctx,
ctx              1041 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		val = chip->ops->read32(chip->ctx,
ctx              1052 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		ret = chip->ops->setup(chip->ctx, pub);
ctx              1057 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c struct brcmf_chip *brcmf_chip_attach(void *ctx,
ctx              1081 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	chip->ctx = ctx;
ctx              1083 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	err = ops->prepare(ctx);
ctx              1216 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	chip->ops->activate(chip->ctx, &chip->pub, 0);
ctx              1242 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	chip->ops->activate(chip->ctx, &chip->pub, rstvec);
ctx              1269 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 	chip->ops->activate(chip->ctx, &chip->pub, rstvec);
ctx              1351 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		chip->ops->write32(chip->ctx, addr, 3);
ctx              1353 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		reg = chip->ops->read32(chip->ctx, addr);
ctx              1357 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		reg = chip->ops->read32(chip->ctx, addr);
ctx              1362 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		reg = chip->ops->read32(chip->ctx, addr);
ctx              1366 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		reg = chip->ops->read32(chip->ctx, addr);
ctx              1371 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		reg = chip->ops->read32(chip->ctx, addr);
ctx              1376 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c 		reg = chip->ops->read32(chip->ctx, addr);
ctx                64 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.h 	u32 (*read32)(void *ctx, u32 addr);
ctx                65 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.h 	void (*write32)(void *ctx, u32 addr, u32 value);
ctx                66 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.h 	int (*prepare)(void *ctx);
ctx                67 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.h 	int (*reset)(void *ctx, struct brcmf_chip *chip);
ctx                68 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.h 	int (*setup)(void *ctx, struct brcmf_chip *chip);
ctx                69 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.h 	void (*activate)(void *ctx, struct brcmf_chip *chip, u32 rstvec);
ctx                73 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.h struct brcmf_chip *brcmf_chip_attach(void *ctx,
ctx                16 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.c 				  int (*cr_ring_bell)(void *ctx),
ctx                17 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.c 				  int (*cr_update_rptr)(void *ctx),
ctx                18 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.c 				  int (*cr_update_wptr)(void *ctx),
ctx                19 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.c 				  int (*cr_write_rptr)(void *ctx),
ctx                20 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.c 				  int (*cr_write_wptr)(void *ctx), void *ctx)
ctx                27 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.c 	commonring->cr_ctx = ctx;
ctx                18 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.h 	int (*cr_ring_bell)(void *ctx);
ctx                19 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.h 	int (*cr_update_rptr)(void *ctx);
ctx                20 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.h 	int (*cr_update_wptr)(void *ctx);
ctx                21 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.h 	int (*cr_write_rptr)(void *ctx);
ctx                22 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.h 	int (*cr_write_wptr)(void *ctx);
ctx                36 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.h 				  int (*cr_ring_bell)(void *ctx),
ctx                37 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.h 				  int (*cr_update_rptr)(void *ctx),
ctx                38 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.h 				  int (*cr_update_wptr)(void *ctx),
ctx                39 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.h 				  int (*cr_write_rptr)(void *ctx),
ctx                40 drivers/net/wireless/broadcom/brcm80211/brcmfmac/commonring.h 				  int (*cr_write_wptr)(void *ctx), void *ctx);
ctx               436 drivers/net/wireless/broadcom/brcm80211/brcmfmac/firmware.c static void brcmf_fw_request_done(const struct firmware *fw, void *ctx);
ctx               521 drivers/net/wireless/broadcom/brcm80211/brcmfmac/firmware.c static int brcmf_fw_request_nvram_done(const struct firmware *fw, void *ctx)
ctx               523 drivers/net/wireless/broadcom/brcm80211/brcmfmac/firmware.c 	struct brcmf_fw *fwctx = ctx;
ctx               624 drivers/net/wireless/broadcom/brcm80211/brcmfmac/firmware.c static void brcmf_fw_request_done(const struct firmware *fw, void *ctx)
ctx               626 drivers/net/wireless/broadcom/brcm80211/brcmfmac/firmware.c 	struct brcmf_fw *fwctx = ctx;
ctx               633 drivers/net/wireless/broadcom/brcm80211/brcmfmac/firmware.c 		ret = brcmf_fw_complete_request(fw, ctx);
ctx               921 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c static int brcmf_pcie_ring_mb_write_rptr(void *ctx)
ctx               923 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	struct brcmf_pcie_ringbuf *ring = (struct brcmf_pcie_ringbuf *)ctx;
ctx               939 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c static int brcmf_pcie_ring_mb_write_wptr(void *ctx)
ctx               941 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	struct brcmf_pcie_ringbuf *ring = (struct brcmf_pcie_ringbuf *)ctx;
ctx               957 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c static int brcmf_pcie_ring_mb_ring_bell(void *ctx)
ctx               959 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	struct brcmf_pcie_ringbuf *ring = (struct brcmf_pcie_ringbuf *)ctx;
ctx               973 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c static int brcmf_pcie_ring_mb_update_rptr(void *ctx)
ctx               975 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	struct brcmf_pcie_ringbuf *ring = (struct brcmf_pcie_ringbuf *)ctx;
ctx               991 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c static int brcmf_pcie_ring_mb_update_wptr(void *ctx)
ctx               993 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	struct brcmf_pcie_ringbuf *ring = (struct brcmf_pcie_ringbuf *)ctx;
ctx              1689 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c static u32 brcmf_pcie_buscore_read32(void *ctx, u32 addr)
ctx              1691 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	struct brcmf_pciedev_info *devinfo = (struct brcmf_pciedev_info *)ctx;
ctx              1698 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c static void brcmf_pcie_buscore_write32(void *ctx, u32 addr, u32 value)
ctx              1700 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	struct brcmf_pciedev_info *devinfo = (struct brcmf_pciedev_info *)ctx;
ctx              1707 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c static int brcmf_pcie_buscoreprep(void *ctx)
ctx              1709 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	return brcmf_pcie_get_resource(ctx);
ctx              1713 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c static int brcmf_pcie_buscore_reset(void *ctx, struct brcmf_chip *chip)
ctx              1715 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	struct brcmf_pciedev_info *devinfo = (struct brcmf_pciedev_info *)ctx;
ctx              1730 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c static void brcmf_pcie_buscore_activate(void *ctx, struct brcmf_chip *chip,
ctx              1733 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	struct brcmf_pciedev_info *devinfo = (struct brcmf_pciedev_info *)ctx;
ctx              3791 drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c static int brcmf_sdio_buscoreprep(void *ctx)
ctx              3793 drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c 	struct brcmf_sdio_dev *sdiodev = ctx;
ctx              3836 drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c static void brcmf_sdio_buscore_activate(void *ctx, struct brcmf_chip *chip,
ctx              3839 drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c 	struct brcmf_sdio_dev *sdiodev = ctx;
ctx              3853 drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c static u32 brcmf_sdio_buscore_read32(void *ctx, u32 addr)
ctx              3855 drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c 	struct brcmf_sdio_dev *sdiodev = ctx;
ctx              3880 drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c static void brcmf_sdio_buscore_write32(void *ctx, u32 addr, u32 val)
ctx              3882 drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c 	struct brcmf_sdio_dev *sdiodev = ctx;
ctx               140 drivers/net/wireless/intel/iwlwifi/dvm/agn.h 				   struct iwl_rxon_context *ctx);
ctx               142 drivers/net/wireless/intel/iwlwifi/dvm/agn.h int iwlagn_commit_rxon(struct iwl_priv *priv, struct iwl_rxon_context *ctx);
ctx               143 drivers/net/wireless/intel/iwlwifi/dvm/agn.h void iwlagn_set_rxon_chain(struct iwl_priv *priv, struct iwl_rxon_context *ctx);
ctx               150 drivers/net/wireless/intel/iwlwifi/dvm/agn.h 			struct iwl_rxon_context *ctx);
ctx               153 drivers/net/wireless/intel/iwlwifi/dvm/agn.h 			 struct iwl_rxon_context *ctx);
ctx               155 drivers/net/wireless/intel/iwlwifi/dvm/agn.h 			    struct iwl_rxon_context *ctx,
ctx               305 drivers/net/wireless/intel/iwlwifi/dvm/agn.h void iwl_restore_stations(struct iwl_priv *priv, struct iwl_rxon_context *ctx);
ctx               307 drivers/net/wireless/intel/iwlwifi/dvm/agn.h 			      struct iwl_rxon_context *ctx);
ctx               312 drivers/net/wireless/intel/iwlwifi/dvm/agn.h int iwl_add_station_common(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               319 drivers/net/wireless/intel/iwlwifi/dvm/agn.h u8 iwl_prep_station(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               322 drivers/net/wireless/intel/iwlwifi/dvm/agn.h int iwl_send_lq_cmd(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               325 drivers/net/wireless/intel/iwlwifi/dvm/agn.h int iwl_sta_update_ht(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               329 drivers/net/wireless/intel/iwlwifi/dvm/agn.h 			    struct iwl_rxon_context *ctx,
ctx               341 drivers/net/wireless/intel/iwlwifi/dvm/agn.h 			       struct iwl_rxon_context *ctx);
ctx               342 drivers/net/wireless/intel/iwlwifi/dvm/agn.h int iwlagn_add_bssid_station(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               345 drivers/net/wireless/intel/iwlwifi/dvm/agn.h 			       struct iwl_rxon_context *ctx,
ctx               348 drivers/net/wireless/intel/iwlwifi/dvm/agn.h 			    struct iwl_rxon_context *ctx,
ctx               351 drivers/net/wireless/intel/iwlwifi/dvm/agn.h 				 struct iwl_rxon_context *ctx);
ctx               352 drivers/net/wireless/intel/iwlwifi/dvm/agn.h int iwl_set_dynamic_key(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               355 drivers/net/wireless/intel/iwlwifi/dvm/agn.h int iwl_remove_dynamic_key(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               369 drivers/net/wireless/intel/iwlwifi/dvm/agn.h 			     struct iwl_rxon_context *ctx);
ctx               964 drivers/net/wireless/intel/iwlwifi/dvm/calib.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx               991 drivers/net/wireless/intel/iwlwifi/dvm/calib.c 	rxon_band24 = !!(ctx->staging.flags & RXON_FLG_BAND_24G_MSK);
ctx               992 drivers/net/wireless/intel/iwlwifi/dvm/calib.c 	rxon_chnum = le16_to_cpu(ctx->staging.channel);
ctx               437 drivers/net/wireless/intel/iwlwifi/dvm/debugfs.c 	struct iwl_rxon_context *ctx;
ctx               442 drivers/net/wireless/intel/iwlwifi/dvm/debugfs.c 	for_each_context(priv, ctx) {
ctx               444 drivers/net/wireless/intel/iwlwifi/dvm/debugfs.c 				 ctx->ctxid);
ctx               450 drivers/net/wireless/intel/iwlwifi/dvm/debugfs.c 				ctx->qos_data.def_qos_parm.ac[i].cw_min,
ctx               451 drivers/net/wireless/intel/iwlwifi/dvm/debugfs.c 				ctx->qos_data.def_qos_parm.ac[i].cw_max,
ctx               452 drivers/net/wireless/intel/iwlwifi/dvm/debugfs.c 				ctx->qos_data.def_qos_parm.ac[i].aifsn,
ctx               453 drivers/net/wireless/intel/iwlwifi/dvm/debugfs.c 				ctx->qos_data.def_qos_parm.ac[i].edca_txop);
ctx               194 drivers/net/wireless/intel/iwlwifi/dvm/dev.h 	struct iwl_rxon_context *ctx;
ctx               210 drivers/net/wireless/intel/iwlwifi/dvm/dev.h 	struct iwl_rxon_context *ctx;
ctx               906 drivers/net/wireless/intel/iwlwifi/dvm/dev.h 	return vif_priv->ctx;
ctx               909 drivers/net/wireless/intel/iwlwifi/dvm/dev.h #define for_each_context(priv, ctx)				\
ctx               910 drivers/net/wireless/intel/iwlwifi/dvm/dev.h 	for (ctx = &priv->contexts[IWL_RXON_CTX_BSS];		\
ctx               911 drivers/net/wireless/intel/iwlwifi/dvm/dev.h 	     ctx < &priv->contexts[NUM_IWL_RXON_CTX]; ctx++)	\
ctx               912 drivers/net/wireless/intel/iwlwifi/dvm/dev.h 		if (priv->valid_contexts & BIT(ctx->ctxid))
ctx               914 drivers/net/wireless/intel/iwlwifi/dvm/dev.h static inline int iwl_is_associated_ctx(struct iwl_rxon_context *ctx)
ctx               916 drivers/net/wireless/intel/iwlwifi/dvm/dev.h 	return (ctx->active.filter_flags & RXON_FILTER_ASSOC_MSK) ? 1 : 0;
ctx               927 drivers/net/wireless/intel/iwlwifi/dvm/dev.h 	struct iwl_rxon_context *ctx;
ctx               928 drivers/net/wireless/intel/iwlwifi/dvm/dev.h 	for_each_context(priv, ctx)
ctx               929 drivers/net/wireless/intel/iwlwifi/dvm/dev.h 		if (iwl_is_associated_ctx(ctx))
ctx               395 drivers/net/wireless/intel/iwlwifi/dvm/devices.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx               401 drivers/net/wireless/intel/iwlwifi/dvm/devices.c 	u16 beacon_interval = le16_to_cpu(ctx->timing.beacon_interval);
ctx               402 drivers/net/wireless/intel/iwlwifi/dvm/devices.c 	struct ieee80211_vif *vif = ctx->vif;
ctx               412 drivers/net/wireless/intel/iwlwifi/dvm/devices.c 		      ctx->active.channel, ch);
ctx               414 drivers/net/wireless/intel/iwlwifi/dvm/devices.c 	cmd.rxon_flags = ctx->staging.flags;
ctx               415 drivers/net/wireless/intel/iwlwifi/dvm/devices.c 	cmd.rxon_filter_flags = ctx->staging.filter_flags;
ctx               556 drivers/net/wireless/intel/iwlwifi/dvm/devices.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx               562 drivers/net/wireless/intel/iwlwifi/dvm/devices.c 	u16 beacon_interval = le16_to_cpu(ctx->timing.beacon_interval);
ctx               563 drivers/net/wireless/intel/iwlwifi/dvm/devices.c 	struct ieee80211_vif *vif = ctx->vif;
ctx               580 drivers/net/wireless/intel/iwlwifi/dvm/devices.c 		      ctx->active.channel, ch);
ctx               582 drivers/net/wireless/intel/iwlwifi/dvm/devices.c 	cmd->rxon_flags = ctx->staging.flags;
ctx               583 drivers/net/wireless/intel/iwlwifi/dvm/devices.c 	cmd->rxon_filter_flags = ctx->staging.filter_flags;
ctx               106 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		return iwlagn_add_bssid_station(priv, vif_priv->ctx,
ctx               315 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	struct iwl_rxon_context *ctx, *found_ctx = NULL;
ctx               322 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		for_each_context(priv, ctx) {
ctx               323 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 			if (ctx->vif && ctx->vif->type == NL80211_IFTYPE_AP &&
ctx               324 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 			    iwl_is_associated_ctx(ctx)) {
ctx               337 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 			ctx = priv->cur_rssi_ctx;
ctx               338 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 			ieee80211_disable_rssi_reports(ctx->vif);
ctx               348 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	for_each_context(priv, ctx) {
ctx               349 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		if (ctx->vif && ctx->vif->type == NL80211_IFTYPE_STATION &&
ctx               350 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		    iwl_is_associated_ctx(ctx)) {
ctx               351 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 			found_ctx = ctx;
ctx               370 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		ctx = priv->cur_rssi_ctx;
ctx               371 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		if (ctx->vif)
ctx               372 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 			ieee80211_disable_rssi_reports(ctx->vif);
ctx               395 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	struct iwl_rxon_context *ctx;
ctx               447 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		for_each_context(priv, ctx) {
ctx               448 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 			if (ctx->vif && ctx->vif->type == NL80211_IFTYPE_STATION)
ctx               449 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 				ieee80211_request_smps(ctx->vif, smps_request);
ctx               586 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx               589 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	if (!ctx->vif || (ctx->vif->type != NL80211_IFTYPE_STATION)) {
ctx               594 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	ave_rssi = ieee80211_ave_rssi(ctx->vif);
ctx               778 drivers/net/wireless/intel/iwlwifi/dvm/lib.c void iwlagn_set_rxon_chain(struct iwl_priv *priv, struct iwl_rxon_context *ctx)
ctx               826 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	ctx->staging.rx_chain = cpu_to_le16(rx_chain);
ctx               829 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		ctx->staging.rx_chain |= RXON_RX_CHAIN_MIMO_FORCE_MSK;
ctx               831 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		ctx->staging.rx_chain &= ~RXON_RX_CHAIN_MIMO_FORCE_MSK;
ctx               834 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 			ctx->staging.rx_chain,
ctx               868 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	struct iwl_rxon_context *ctx;
ctx               884 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	struct iwl_rxon_context *ctx = data->ctx;
ctx               898 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	     !sta && !ctx->key_mapping_keys)
ctx               899 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		ret = iwl_set_default_wep_key(priv, ctx, key);
ctx               901 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		ret = iwl_set_dynamic_key(priv, ctx, key, sta);
ctx              1043 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx              1055 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		.ctx = ctx,
ctx              1056 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		.bssid = ctx->active.bssid_addr,
ctx              1113 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	memcpy(&rxon, &ctx->active, sizeof(rxon));
ctx              1132 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	memcpy(&ctx->staging, &rxon, sizeof(rxon));
ctx              1133 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 	ret = iwlagn_commit_rxon(priv, ctx);
ctx              1144 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		ctx->key_mapping_keys = 0;
ctx              1152 drivers/net/wireless/intel/iwlwifi/dvm/lib.c 		ieee80211_iter_keys(priv->hw, ctx->vif,
ctx                88 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct iwl_rxon_context *ctx;
ctx               133 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	for_each_context(priv, ctx) {
ctx               134 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		hw->wiphy->interface_modes |= ctx->interface_modes;
ctx               135 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		hw->wiphy->interface_modes |= ctx->exclusive_interface_modes;
ctx               226 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct iwl_rxon_context *ctx;
ctx               236 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	for_each_context(priv, ctx) {
ctx               237 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ret = iwlagn_alloc_bcast_station(priv, ctx);
ctx               363 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx               373 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	if (!ctx->vif || ctx->vif->type != NL80211_IFTYPE_STATION ||
ctx               374 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	    !iwl_is_associated_ctx(ctx)) {
ctx               427 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx               458 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	vif = ctx->vif;
ctx               558 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	memset((void *)&ctx->active, 0, sizeof(ctx->active));
ctx               559 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	iwl_connection_init_rx_config(priv, ctx);
ctx               560 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	iwlagn_set_rxon_chain(priv, ctx);
ctx               607 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct iwl_rxon_context *ctx = vif_priv->ctx;
ctx               660 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 			is_default_wep_key = !ctx->key_mapping_keys;
ctx               670 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 			ret = iwl_set_default_wep_key(priv, vif_priv->ctx, key);
ctx               673 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ret = iwl_set_dynamic_key(priv, vif_priv->ctx, key, sta);
ctx               687 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 			ret = iwl_remove_default_wep_key(priv, ctx, key);
ctx               689 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 			ret = iwl_remove_dynamic_key(priv, ctx, key, sta);
ctx               796 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	ret = iwl_add_station_common(priv, vif_priv->ctx, sta->addr,
ctx               905 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ret = iwl_sta_update_ht(priv, vif_priv->ctx, sta);
ctx               948 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx               963 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	if (!iwl_is_associated_ctx(ctx))
ctx               970 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	if (le16_to_cpu(ctx->active.channel) == ch)
ctx               979 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ctx->ht.is_40mhz = false;
ctx               980 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ctx->ht.extension_chan_offset = IEEE80211_HT_PARAM_CHA_SEC_NONE;
ctx               983 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ctx->ht.extension_chan_offset = IEEE80211_HT_PARAM_CHA_SEC_BELOW;
ctx               984 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ctx->ht.is_40mhz = true;
ctx               987 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ctx->ht.extension_chan_offset = IEEE80211_HT_PARAM_CHA_SEC_ABOVE;
ctx               988 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ctx->ht.is_40mhz = true;
ctx               992 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	if ((le16_to_cpu(ctx->staging.channel) != ch))
ctx               993 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ctx->staging.flags = 0;
ctx               995 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	iwl_set_rxon_channel(priv, channel, ctx);
ctx               997 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	iwl_set_flags_for_band(priv, ctx, channel->band, ctx->vif);
ctx              1008 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ieee80211_chswitch_done(ctx->vif, false);
ctx              1022 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx              1030 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	if (ctx->vif)
ctx              1031 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ieee80211_chswitch_done(ctx->vif, is_success);
ctx              1041 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct iwl_rxon_context *ctx;
ctx              1062 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	for_each_context(priv, ctx) {
ctx              1063 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ctx->staging.filter_flags &= ~filter_nand;
ctx              1064 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ctx->staging.filter_flags |= filter_or;
ctx              1165 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct iwl_rxon_context *ctx = vif_priv->ctx;
ctx              1168 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	if (WARN_ON(!ctx))
ctx              1187 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	ctx->qos_data.def_qos_parm.ac[q].cw_min =
ctx              1189 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	ctx->qos_data.def_qos_parm.ac[q].cw_max =
ctx              1191 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	ctx->qos_data.def_qos_parm.ac[q].aifsn = params->aifs;
ctx              1192 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	ctx->qos_data.def_qos_parm.ac[q].edca_txop =
ctx              1195 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	ctx->qos_data.def_qos_parm.ac[q].reserved1 = 0;
ctx              1210 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c static int iwl_set_mode(struct iwl_priv *priv, struct iwl_rxon_context *ctx)
ctx              1212 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	iwl_connection_init_rx_config(priv, ctx);
ctx              1214 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	iwlagn_set_rxon_chain(priv, ctx);
ctx              1216 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	return iwlagn_commit_rxon(priv, ctx);
ctx              1220 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 			       struct iwl_rxon_context *ctx)
ctx              1222 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct ieee80211_vif *vif = ctx->vif;
ctx              1234 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	ctx->is_active = true;
ctx              1236 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	err = iwl_set_mode(priv, ctx);
ctx              1238 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		if (!ctx->always_active)
ctx              1239 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 			ctx->is_active = false;
ctx              1255 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		vif->hw_queue[ac] = ctx->ac_to_queue[ac];
ctx              1258 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		vif->cab_queue = ctx->mcast_queue;
ctx              1270 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct iwl_rxon_context *tmp, *ctx = NULL;
ctx              1294 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 				ctx = tmp;
ctx              1311 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		ctx = tmp;
ctx              1315 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	if (!ctx) {
ctx              1320 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	vif_priv->ctx = ctx;
ctx              1321 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	ctx->vif = vif;
ctx              1337 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	err = iwl_setup_interface(priv, ctx);
ctx              1341 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	ctx->vif = NULL;
ctx              1354 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct iwl_rxon_context *ctx = iwl_rxon_ctx_from_vif(vif);
ctx              1364 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		iwl_set_mode(priv, ctx);
ctx              1365 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 		if (!ctx->always_active)
ctx              1366 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 			ctx->is_active = false;
ctx              1384 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct iwl_rxon_context *ctx = iwl_rxon_ctx_from_vif(vif);
ctx              1390 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	WARN_ON(ctx->vif != vif);
ctx              1391 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	ctx->vif = NULL;
ctx              1406 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	struct iwl_rxon_context *ctx, *tmp;
ctx              1417 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	ctx = iwl_rxon_ctx_from_vif(vif);
ctx              1424 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	if (ctx->ctxid != IWL_RXON_CTX_BSS) {
ctx              1429 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	if (!ctx->vif || !iwl_is_ready_rf(priv)) {
ctx              1439 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	interface_modes = ctx->interface_modes | ctx->exclusive_interface_modes;
ctx              1445 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	if (ctx->exclusive_interface_modes & BIT(newtype)) {
ctx              1447 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 			if (ctx == tmp)
ctx              1466 drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c 	err = iwl_setup_interface(priv, ctx);
ctx               150 drivers/net/wireless/intel/iwlwifi/dvm/main.c 	struct iwl_rxon_context *ctx;
ctx               152 drivers/net/wireless/intel/iwlwifi/dvm/main.c 	for_each_context(priv, ctx) {
ctx               153 drivers/net/wireless/intel/iwlwifi/dvm/main.c 		iwlagn_set_rxon_chain(priv, ctx);
ctx               154 drivers/net/wireless/intel/iwlwifi/dvm/main.c 		if (ctx->active.rx_chain != ctx->staging.rx_chain)
ctx               155 drivers/net/wireless/intel/iwlwifi/dvm/main.c 			iwlagn_commit_rxon(priv, ctx);
ctx               331 drivers/net/wireless/intel/iwlwifi/dvm/main.c 	struct iwl_rxon_context *ctx;
ctx               350 drivers/net/wireless/intel/iwlwifi/dvm/main.c 	for_each_context(priv, ctx) {
ctx               351 drivers/net/wireless/intel/iwlwifi/dvm/main.c 		iwlagn_set_rxon_chain(priv, ctx);
ctx               352 drivers/net/wireless/intel/iwlwifi/dvm/main.c 		iwlagn_commit_rxon(priv, ctx);
ctx               774 drivers/net/wireless/intel/iwlwifi/dvm/main.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx               833 drivers/net/wireless/intel/iwlwifi/dvm/main.c 	if (iwl_is_associated_ctx(ctx) && !priv->wowlan) {
ctx               835 drivers/net/wireless/intel/iwlwifi/dvm/main.c 				(struct iwl_rxon_cmd *)&ctx->active;
ctx               837 drivers/net/wireless/intel/iwlwifi/dvm/main.c 		ctx->staging.filter_flags |= RXON_FILTER_ASSOC_MSK;
ctx               845 drivers/net/wireless/intel/iwlwifi/dvm/main.c 		iwlagn_set_rxon_chain(priv, ctx);
ctx               856 drivers/net/wireless/intel/iwlwifi/dvm/main.c 	ret = iwlagn_commit_rxon(priv, ctx);
ctx               879 drivers/net/wireless/intel/iwlwifi/dvm/main.c 	struct iwl_rxon_context *ctx;
ctx               887 drivers/net/wireless/intel/iwlwifi/dvm/main.c 	for_each_context(priv, ctx) {
ctx               895 drivers/net/wireless/intel/iwlwifi/dvm/main.c 		memset(ctx->wep_keys, 0, sizeof(ctx->wep_keys));
ctx               896 drivers/net/wireless/intel/iwlwifi/dvm/main.c 		ctx->key_mapping_keys = 0;
ctx               331 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	struct iwl_rxon_context *ctx = sta_priv->ctx;
ctx               343 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		iwl_send_lq_cmd(lq_sta->drv, ctx, &lq_sta->lq, CMD_ASYNC,
ctx               843 drivers/net/wireless/intel/iwlwifi/dvm/rs.c static void rs_bt_update_lq(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               867 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		iwl_send_lq_cmd(priv, ctx, &lq_sta->lq, CMD_ASYNC, false);
ctx               894 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	struct iwl_rxon_context *ctx = sta_priv->ctx;
ctx               961 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 			iwl_send_lq_cmd(priv, ctx, &lq_sta->lq, CMD_ASYNC, false);
ctx              1060 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		rs_bt_update_lq(priv, ctx, lq_sta);
ctx              1254 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	struct iwl_rxon_context *ctx = sta_priv->ctx;
ctx              1274 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	if (iwl_is_ht40_tx_allowed(priv, ctx, sta))
ctx              1309 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	struct iwl_rxon_context *ctx = sta_priv->ctx;
ctx              1329 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	if (iwl_is_ht40_tx_allowed(priv, ctx, sta))
ctx              1365 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	struct iwl_rxon_context *ctx = sta_priv->ctx;
ctx              1378 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	if (iwl_is_ht40_tx_allowed(priv, ctx, sta))
ctx              2172 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 			       struct iwl_rxon_context *ctx,
ctx              2182 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	iwl_send_lq_cmd(priv, ctx, &lq_sta->lq, CMD_ASYNC, false);
ctx              2219 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	struct iwl_rxon_context *ctx = sta_priv->ctx;
ctx              2295 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 			rs_update_rate_tbl(priv, ctx, lq_sta, tbl,
ctx              2537 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		rs_update_rate_tbl(priv, ctx, lq_sta, tbl, index, is_green);
ctx              2578 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 			iwl_send_lq_cmd(priv, ctx, &lq_sta->lq, CMD_ASYNC, false);
ctx              2650 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	struct iwl_rxon_context *ctx;
ctx              2656 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	ctx = sta_priv->ctx;
ctx              2688 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	iwl_send_lq_cmd(priv, ctx, &lq_sta->lq, 0, true);
ctx                57 drivers/net/wireless/intel/iwlwifi/dvm/rx.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx                58 drivers/net/wireless/intel/iwlwifi/dvm/rx.c 	struct iwl_rxon_cmd *rxon = (void *)&ctx->active;
ctx                65 drivers/net/wireless/intel/iwlwifi/dvm/rx.c 		ctx->staging.channel = csa->channel;
ctx               618 drivers/net/wireless/intel/iwlwifi/dvm/rx.c 	struct iwl_rxon_context *ctx;
ctx               666 drivers/net/wireless/intel/iwlwifi/dvm/rx.c 		for_each_context(priv, ctx) {
ctx               668 drivers/net/wireless/intel/iwlwifi/dvm/rx.c 					      ctx->active.bssid_addr))
ctx                24 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 				   struct iwl_rxon_context *ctx)
ctx                26 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	memset(&ctx->staging, 0, sizeof(ctx->staging));
ctx                28 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (!ctx->vif) {
ctx                29 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.dev_type = ctx->unused_devtype;
ctx                31 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	switch (ctx->vif->type) {
ctx                33 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.dev_type = ctx->ap_devtype;
ctx                37 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.dev_type = ctx->station_devtype;
ctx                38 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.filter_flags = RXON_FILTER_ACCEPT_GRP_MSK;
ctx                42 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.dev_type = ctx->ibss_devtype;
ctx                43 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags = RXON_FLG_SHORT_PREAMBLE_MSK;
ctx                44 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.filter_flags = RXON_FILTER_BCON_AWARE_MSK |
ctx                49 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.dev_type = RXON_DEV_TYPE_SNIFFER;
ctx                54 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			ctx->vif->type);
ctx                62 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags &= ~RXON_FLG_SHORT_PREAMBLE_MSK;
ctx                64 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags |= RXON_FLG_SHORT_PREAMBLE_MSK;
ctx                67 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->staging.channel =
ctx                71 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	iwl_set_flags_for_band(priv, ctx, priv->band, ctx->vif);
ctx                74 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->staging.flags &= ~(RXON_FLG_CHANNEL_MODE_MIXED |
ctx                76 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (ctx->vif)
ctx                77 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		memcpy(ctx->staging.node_addr, ctx->vif->addr, ETH_ALEN);
ctx                79 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->staging.ofdm_ht_single_stream_basic_rates = 0xff;
ctx                80 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->staging.ofdm_ht_dual_stream_basic_rates = 0xff;
ctx                81 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->staging.ofdm_ht_triple_stream_basic_rates = 0xff;
ctx                85 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			      struct iwl_rxon_context *ctx,
ctx                92 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ret = iwl_dvm_send_cmd_pdu(priv, ctx->rxon_cmd,
ctx               105 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			      struct iwl_rxon_context *ctx,
ctx               122 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ret = iwl_dvm_send_cmd_pdu(priv, ctx->rxon_cmd,
ctx               142 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			      struct iwl_rxon_context *ctx,
ctx               149 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ret = iwl_dvm_send_cmd_pdu(priv, ctx->rxon_cmd, 0,
ctx               158 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			      struct iwl_rxon_context *ctx)
ctx               162 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (!ctx->is_active)
ctx               165 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->qos_data.def_qos_parm.qos_flags = 0;
ctx               167 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (ctx->qos_data.qos_active)
ctx               168 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->qos_data.def_qos_parm.qos_flags |=
ctx               171 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (ctx->ht.enabled)
ctx               172 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->qos_data.def_qos_parm.qos_flags |= QOS_PARAM_FLG_TGN_MSK;
ctx               175 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		      ctx->qos_data.qos_active,
ctx               176 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		      ctx->qos_data.def_qos_parm.qos_flags);
ctx               178 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ret = iwl_dvm_send_cmd_pdu(priv, ctx->qos_cmd, 0,
ctx               180 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			       &ctx->qos_data.def_qos_parm);
ctx               198 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 				  struct iwl_rxon_context *ctx)
ctx               202 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	const struct iwl_rxon_cmd *rxon1 = &ctx->staging;
ctx               203 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	const struct iwl_rxon_cmd *rxon2 = &ctx->active;
ctx               221 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	rxon_assoc.flags = ctx->staging.flags;
ctx               222 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	rxon_assoc.filter_flags = ctx->staging.filter_flags;
ctx               223 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	rxon_assoc.ofdm_basic_rates = ctx->staging.ofdm_basic_rates;
ctx               224 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	rxon_assoc.cck_basic_rates = ctx->staging.cck_basic_rates;
ctx               229 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	    ctx->staging.ofdm_ht_single_stream_basic_rates;
ctx               231 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	    ctx->staging.ofdm_ht_dual_stream_basic_rates;
ctx               232 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	rxon_assoc.rx_chain_select_flags = ctx->staging.rx_chain;
ctx               234 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		 ctx->staging.ofdm_ht_triple_stream_basic_rates;
ctx               235 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	rxon_assoc.acquisition_data = ctx->staging.acquisition_data;
ctx               237 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ret = iwl_dvm_send_cmd_pdu(priv, ctx->rxon_assoc_cmd,
ctx               278 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 				struct iwl_rxon_context *ctx)
ctx               284 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct ieee80211_vif *vif = ctx->vif;
ctx               290 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	memset(&ctx->timing, 0, sizeof(struct iwl_rxon_time_cmd));
ctx               292 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->timing.timestamp = cpu_to_le64(priv->timestamp);
ctx               293 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->timing.listen_interval = cpu_to_le16(conf->listen_interval);
ctx               301 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->timing.atim_window = 0;
ctx               303 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (ctx->ctxid == IWL_RXON_CTX_PAN &&
ctx               304 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	    (!ctx->vif || ctx->vif->type != NL80211_IFTYPE_STATION) &&
ctx               308 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->timing.beacon_interval =
ctx               310 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		beacon_int = le16_to_cpu(ctx->timing.beacon_interval);
ctx               311 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	} else if (ctx->ctxid == IWL_RXON_CTX_BSS &&
ctx               315 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		   (!iwl_is_associated_ctx(ctx) || !ctx->vif ||
ctx               316 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		    !ctx->vif->bss_conf.beacon_int)) {
ctx               317 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->timing.beacon_interval =
ctx               319 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		beacon_int = le16_to_cpu(ctx->timing.beacon_interval);
ctx               323 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->timing.beacon_interval = cpu_to_le16(beacon_int);
ctx               326 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->beacon_int = beacon_int;
ctx               331 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->timing.beacon_init_val = cpu_to_le32(interval_tm - rem);
ctx               333 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->timing.dtim_period = vif ? (vif->bss_conf.dtim_period ?: 1) : 1;
ctx               337 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			le16_to_cpu(ctx->timing.beacon_interval),
ctx               338 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			le32_to_cpu(ctx->timing.beacon_init_val),
ctx               339 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			le16_to_cpu(ctx->timing.atim_window));
ctx               341 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	return iwl_dvm_send_cmd_pdu(priv, ctx->rxon_timing_cmd,
ctx               342 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 				0, sizeof(ctx->timing), &ctx->timing);
ctx               346 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			       struct iwl_rxon_context *ctx)
ctx               349 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct iwl_rxon_cmd *active = (void *)&ctx->active;
ctx               351 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (ctx->ctxid == IWL_RXON_CTX_BSS) {
ctx               352 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ret = iwlagn_disable_bss(priv, ctx, &ctx->staging);
ctx               354 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ret = iwlagn_disable_pan(priv, ctx, &ctx->staging);
ctx               357 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		if (ctx->vif) {
ctx               358 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			ret = iwl_send_rxon_timing(priv, ctx);
ctx               363 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			ret = iwlagn_disconn_pan(priv, ctx, &ctx->staging);
ctx               373 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	iwl_clear_ucode_stations(priv, ctx);
ctx               375 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	iwl_update_bcast_station(priv, ctx);
ctx               376 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	iwl_restore_stations(priv, ctx);
ctx               377 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ret = iwl_restore_default_wep_keys(priv, ctx);
ctx               383 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	memcpy(active, &ctx->staging, sizeof(*active));
ctx               392 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx               426 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		memcmp(&ctx->active, &ctx->staging, sizeof(ctx->staging));
ctx               446 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			       struct iwl_rxon_context *ctx)
ctx               449 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct iwl_rxon_cmd *active = (void *)&ctx->active;
ctx               452 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (ctx->ctxid == IWL_RXON_CTX_BSS) {
ctx               453 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ret = iwl_send_rxon_timing(priv, ctx);
ctx               460 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	iwlagn_update_qos(priv, ctx);
ctx               467 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (ctx->vif && (ctx->vif->type == NL80211_IFTYPE_AP)) {
ctx               468 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ret = iwlagn_update_beacon(priv, ctx->vif);
ctx               484 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ret = iwl_dvm_send_cmd_pdu(priv, ctx->rxon_cmd, 0,
ctx               485 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		      sizeof(struct iwl_rxon_cmd), &ctx->staging);
ctx               490 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	memcpy(active, &ctx->staging, sizeof(*active));
ctx               493 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (ctx->vif && (ctx->vif->type == NL80211_IFTYPE_ADHOC))
ctx               494 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		if (iwlagn_update_beacon(priv, ctx->vif))
ctx               604 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			     struct iwl_rxon_context *ctx)
ctx               606 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct iwl_rxon_cmd *rxon = &ctx->staging;
ctx               608 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (!ctx->ht.enabled) {
ctx               619 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	rxon->flags |= cpu_to_le32(ctx->ht.protection <<
ctx               627 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (iwl_is_ht40_tx_allowed(priv, ctx, NULL)) {
ctx               629 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		if (ctx->ht.protection ==
ctx               636 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			switch (ctx->ht.extension_chan_offset) {
ctx               651 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			switch (ctx->ht.extension_chan_offset) {
ctx               676 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	iwlagn_set_rxon_chain(priv, ctx);
ctx               680 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			le32_to_cpu(rxon->flags), ctx->ht.protection,
ctx               681 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			ctx->ht.extension_chan_offset);
ctx               686 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct iwl_rxon_context *ctx;
ctx               688 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	for_each_context(priv, ctx)
ctx               689 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		_iwl_set_rxon_ht(priv, ht_conf, ctx);
ctx               700 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			 struct iwl_rxon_context *ctx)
ctx               705 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if ((le16_to_cpu(ctx->staging.channel) == channel) &&
ctx               709 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->staging.channel = cpu_to_le16(channel);
ctx               711 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags &= ~RXON_FLG_BAND_24G_MSK;
ctx               713 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags |= RXON_FLG_BAND_24G_MSK;
ctx               722 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			    struct iwl_rxon_context *ctx,
ctx               727 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags &=
ctx               730 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags |= RXON_FLG_SHORT_SLOT_MSK;
ctx               734 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			ctx->staging.flags |= RXON_FLG_SHORT_SLOT_MSK;
ctx               736 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			ctx->staging.flags &= ~RXON_FLG_SHORT_SLOT_MSK;
ctx               738 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags |= RXON_FLG_BAND_24G_MSK;
ctx               739 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags |= RXON_FLG_AUTO_DETECT_MSK;
ctx               740 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags &= ~RXON_FLG_CCK_MSK;
ctx               745 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 				  struct iwl_rxon_context *ctx, int hw_decrypt)
ctx               747 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct iwl_rxon_cmd *rxon = &ctx->staging;
ctx               758 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			      struct iwl_rxon_context *ctx)
ctx               760 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct iwl_rxon_cmd *rxon = &ctx->staging;
ctx               838 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 				  struct iwl_rxon_context *ctx)
ctx               840 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	const struct iwl_rxon_cmd *staging = &ctx->staging;
ctx               841 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	const struct iwl_rxon_cmd *active = &ctx->active;
ctx               858 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	CHK(!iwl_is_associated_ctx(ctx));
ctx               896 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct iwl_rxon_context *ctx = &priv->contexts[ctxid];
ctx               897 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct iwl_rxon_cmd *rxon = &ctx->staging;
ctx               920 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 				 struct iwl_rxon_context *ctx)
ctx               927 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (ctx->vif) {
ctx               929 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		unsigned long basic = ctx->vif->bss_conf.basic_rates;
ctx              1006 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->staging.cck_basic_rates = cck;
ctx              1007 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->staging.ofdm_basic_rates = ofdm;
ctx              1028 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c int iwlagn_commit_rxon(struct iwl_priv *priv, struct iwl_rxon_context *ctx)
ctx              1031 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct iwl_rxon_cmd *active = (void *)&ctx->active;
ctx              1032 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	bool new_assoc = !!(ctx->staging.filter_flags & RXON_FILTER_ASSOC_MSK);
ctx              1043 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (!ctx->is_active)
ctx              1047 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->staging.flags |= RXON_FLG_TSF2HOST_MSK;
ctx              1050 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	iwl_calc_basic_rates(priv, ctx);
ctx              1057 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags |= RXON_FLG_SELF_CTS_EN;
ctx              1059 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if ((ctx->vif && ctx->vif->bss_conf.use_short_slot) ||
ctx              1060 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	    !(ctx->staging.flags & RXON_FLG_BAND_24G_MSK))
ctx              1061 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags |= RXON_FLG_SHORT_SLOT_MSK;
ctx              1063 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags &= ~RXON_FLG_SHORT_SLOT_MSK;
ctx              1065 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	iwl_print_rx_config_cmd(priv, ctx->ctxid);
ctx              1066 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ret = iwl_check_rxon_cmd(priv, ctx);
ctx              1077 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	    (priv->switch_channel != ctx->staging.channel)) {
ctx              1088 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (!iwl_full_rxon_required(priv, ctx)) {
ctx              1089 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ret = iwlagn_send_rxon_assoc(priv, ctx);
ctx              1095 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		memcpy(active, &ctx->staging, sizeof(*active));
ctx              1108 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	iwl_set_rxon_hwcrypto(priv, ctx, !iwlwifi_mod_params.swcrypto);
ctx              1116 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		       le16_to_cpu(ctx->staging.channel),
ctx              1117 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		       ctx->staging.bssid_addr);
ctx              1125 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ret = iwlagn_rxon_disconn(priv, ctx);
ctx              1134 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		return iwlagn_rxon_connect(priv, ctx);
ctx              1140 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			struct iwl_rxon_context *ctx)
ctx              1143 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->ht.extension_chan_offset =
ctx              1145 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->ht.is_40mhz = true;
ctx              1147 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->ht.extension_chan_offset =
ctx              1149 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->ht.is_40mhz = true;
ctx              1151 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->ht.extension_chan_offset =
ctx              1153 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->ht.is_40mhz = false;
ctx              1160 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct iwl_rxon_context *ctx;
ctx              1191 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		for_each_context(priv, ctx)
ctx              1192 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			iwlagn_set_rxon_chain(priv, ctx);
ctx              1196 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		for_each_context(priv, ctx) {
ctx              1198 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			if (ctx->ht.enabled != conf_is_ht(conf))
ctx              1199 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 				ctx->ht.enabled = conf_is_ht(conf);
ctx              1201 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			if (ctx->ht.enabled) {
ctx              1204 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 				if (!ctx->ht.is_40mhz ||
ctx              1205 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 						!iwl_is_associated_ctx(ctx))
ctx              1206 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 					iwlagn_config_ht40(conf, ctx);
ctx              1208 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 				ctx->ht.is_40mhz = false;
ctx              1214 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			ctx->ht.protection = IEEE80211_HT_OP_MODE_PROTECTION_NONE;
ctx              1219 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			if (le16_to_cpu(ctx->staging.channel) !=
ctx              1221 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 				ctx->staging.flags = 0;
ctx              1223 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			iwl_set_rxon_channel(priv, channel, ctx);
ctx              1226 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			iwl_set_flags_for_band(priv, ctx, channel->band,
ctx              1227 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 					       ctx->vif);
ctx              1247 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	for_each_context(priv, ctx) {
ctx              1248 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		if (!memcmp(&ctx->staging, &ctx->active, sizeof(ctx->staging)))
ctx              1250 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		iwlagn_commit_rxon(priv, ctx);
ctx              1260 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 				       struct iwl_rxon_context *ctx,
ctx              1263 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct ieee80211_vif *vif = ctx->vif;
ctx              1336 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->ht_need_multiple_chains = need_multiple;
ctx              1394 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct iwl_rxon_context *ctx = iwl_rxon_ctx_from_vif(vif);
ctx              1414 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (unlikely(!ctx->vif)) {
ctx              1424 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->qos_data.qos_active = bss_conf->qos;
ctx              1425 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		iwlagn_update_qos(priv, ctx);
ctx              1428 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	ctx->staging.assoc_id = cpu_to_le16(vif->bss_conf.aid);
ctx              1430 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags |= RXON_FLG_SHORT_PREAMBLE_MSK;
ctx              1432 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags &= ~RXON_FLG_SHORT_PREAMBLE_MSK;
ctx              1437 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			ctx->staging.filter_flags |= RXON_FILTER_ASSOC_MSK;
ctx              1439 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			ctx->staging.filter_flags &= ~RXON_FILTER_ASSOC_MSK;
ctx              1441 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			if (ctx->ctxid == IWL_RXON_CTX_BSS)
ctx              1448 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (ctx->ht.enabled) {
ctx              1449 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->ht.protection = bss_conf->ht_operation_mode &
ctx              1451 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->ht.non_gf_sta_present = !!(bss_conf->ht_operation_mode &
ctx              1453 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		iwlagn_check_needed_chains(priv, ctx, bss_conf);
ctx              1457 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	iwlagn_set_rxon_chain(priv, ctx);
ctx              1460 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags |= RXON_FLG_TGG_PROTECT_MSK;
ctx              1462 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags &= ~RXON_FLG_TGG_PROTECT_MSK;
ctx              1465 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags |= RXON_FLG_SELF_CTS_EN;
ctx              1467 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		ctx->staging.flags &= ~RXON_FLG_SELF_CTS_EN;
ctx              1469 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	memcpy(ctx->staging.bssid_addr, bss_conf->bssid, ETH_ALEN);
ctx              1474 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			ctx->staging.filter_flags |= RXON_FILTER_ASSOC_MSK;
ctx              1475 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			priv->beacon_ctx = ctx;
ctx              1477 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			ctx->staging.filter_flags &= ~RXON_FILTER_ASSOC_MSK;
ctx              1492 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			ctx->staging.filter_flags |= RXON_FILTER_BCON_AWARE_MSK;
ctx              1494 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			ctx->staging.filter_flags &=
ctx              1498 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (force || memcmp(&ctx->staging, &ctx->active, sizeof(ctx->staging)))
ctx              1499 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		iwlagn_commit_rxon(priv, ctx);
ctx              1524 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	if (changes & BSS_CHANGED_BEACON && priv->beacon_ctx == ctx) {
ctx              1534 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	struct iwl_rxon_context *ctx;
ctx              1547 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 	for_each_context(priv, ctx)
ctx              1548 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 		if (memcmp(&ctx->staging, &ctx->active, sizeof(ctx->staging)))
ctx              1549 drivers/net/wireless/intel/iwlwifi/dvm/rxon.c 			iwlagn_commit_rxon(priv, ctx);
ctx               364 drivers/net/wireless/intel/iwlwifi/dvm/scan.c 	struct iwl_rxon_context *ctx;
ctx               379 drivers/net/wireless/intel/iwlwifi/dvm/scan.c 	for_each_context(priv, ctx) {
ctx               380 drivers/net/wireless/intel/iwlwifi/dvm/scan.c 		switch (ctx->staging.dev_type) {
ctx               387 drivers/net/wireless/intel/iwlwifi/dvm/scan.c 			if (!iwl_is_associated_ctx(ctx))
ctx               399 drivers/net/wireless/intel/iwlwifi/dvm/scan.c 		limits[n_active++] = ctx->beacon_int ?: IWL_PASSIVE_DWELL_BASE;
ctx               435 drivers/net/wireless/intel/iwlwifi/dvm/scan.c 	struct iwl_rxon_context *ctx;
ctx               441 drivers/net/wireless/intel/iwlwifi/dvm/scan.c 		for_each_context(priv, ctx) {
ctx               443 drivers/net/wireless/intel/iwlwifi/dvm/scan.c 				le16_to_cpu(ctx->staging.channel);
ctx               622 drivers/net/wireless/intel/iwlwifi/dvm/scan.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx               649 drivers/net/wireless/intel/iwlwifi/dvm/scan.c 		ctx = iwl_rxon_ctx_from_vif(vif);
ctx               736 drivers/net/wireless/intel/iwlwifi/dvm/scan.c 	scan->tx_cmd.sta_id = ctx->bcast_sta_id;
ctx               132 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 			    struct iwl_rxon_context *ctx,
ctx               135 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	if (!ctx->ht.enabled || !ctx->ht.is_40mhz)
ctx               152 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 				  struct iwl_rxon_context *ctx,
ctx               194 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	if (iwl_is_ht40_tx_allowed(priv, ctx, sta))
ctx               198 drivers/net/wireless/intel/iwlwifi/dvm/sta.c int iwl_sta_update_ht(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               208 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	iwl_sta_calc_ht_flags(priv, sta, ctx, &flags, &mask);
ctx               226 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 				   struct iwl_rxon_context *ctx)
ctx               230 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	iwl_sta_calc_ht_flags(priv, sta, ctx, &flags, &mask);
ctx               242 drivers/net/wireless/intel/iwlwifi/dvm/sta.c u8 iwl_prep_station(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               250 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 		sta_id = ctx->ap_sta_id;
ctx               252 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 		sta_id = ctx->bcast_sta_id;
ctx               303 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	station->sta.station_flags = ctx->station_flags;
ctx               304 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	station->ctxid = ctx->ctxid;
ctx               310 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 		sta_priv->ctx = ctx;
ctx               318 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	iwl_set_ht_add_station(priv, sta_id, sta, ctx);
ctx               329 drivers/net/wireless/intel/iwlwifi/dvm/sta.c int iwl_add_station_common(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               339 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	sta_id = iwl_prep_station(priv, ctx, addr, is_ap, sta);
ctx               554 drivers/net/wireless/intel/iwlwifi/dvm/sta.c static void iwl_sta_fill_lq(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               569 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	else if (ctx && ctx->vif && ctx->vif->p2p)
ctx               613 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 			      struct iwl_rxon_context *ctx)
ctx               622 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 		if (ctx && ctx->ctxid != priv->stations[i].ctxid)
ctx               647 drivers/net/wireless/intel/iwlwifi/dvm/sta.c void iwl_restore_stations(struct iwl_priv *priv, struct iwl_rxon_context *ctx)
ctx               666 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 		if (ctx->ctxid != priv->stations[i].ctxid)
ctx               685 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 					iwl_sta_fill_lq(priv, ctx, i, &lq);
ctx               710 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 				iwl_send_lq_cmd(priv, ctx, &lq, 0, true);
ctx               788 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 			      struct iwl_rxon_context *ctx,
ctx               793 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	if (ctx->ht.enabled)
ctx               797 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 		       ctx->active.channel);
ctx               820 drivers/net/wireless/intel/iwlwifi/dvm/sta.c int iwl_send_lq_cmd(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               846 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	if (is_lq_table_valid(priv, ctx, lq))
ctx               867 drivers/net/wireless/intel/iwlwifi/dvm/sta.c iwl_sta_alloc_lq(struct iwl_priv *priv, struct iwl_rxon_context *ctx,
ctx               878 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	iwl_sta_fill_lq(priv, ctx, sta_id, link_cmd);
ctx               889 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 			     struct iwl_rxon_context *ctx,
ctx               899 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	ret = iwl_add_station_common(priv, ctx, addr, 0, NULL, &sta_id);
ctx               913 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	link_cmd = iwl_sta_alloc_lq(priv, ctx, sta_id);
ctx               921 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	ret = iwl_send_lq_cmd(priv, ctx, link_cmd, 0, true);
ctx               941 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 				      struct iwl_rxon_context *ctx,
ctx               950 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 		.id = ctx->wep_key_cmd,
ctx               961 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 		if (ctx->wep_keys[i].key_size) {
ctx               968 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 		wep_cmd->key[i].key_size = ctx->wep_keys[i].key_size;
ctx               969 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 		memcpy(&wep_cmd->key[i].key[3], ctx->wep_keys[i].key,
ctx               970 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 				ctx->wep_keys[i].key_size);
ctx               987 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 				 struct iwl_rxon_context *ctx)
ctx               991 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	return iwl_send_static_wepkey_cmd(priv, ctx, false);
ctx               995 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 			       struct iwl_rxon_context *ctx,
ctx              1005 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	memset(&ctx->wep_keys[keyconf->keyidx], 0, sizeof(ctx->wep_keys[0]));
ctx              1012 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	ret = iwl_send_static_wepkey_cmd(priv, ctx, 1);
ctx              1020 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 			    struct iwl_rxon_context *ctx,
ctx              1036 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	ctx->wep_keys[keyconf->keyidx].key_size = keyconf->keylen;
ctx              1037 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	memcpy(&ctx->wep_keys[keyconf->keyidx].key, &keyconf->key,
ctx              1040 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	ret = iwl_send_static_wepkey_cmd(priv, ctx, false);
ctx              1076 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	if (vif->type == NL80211_IFTYPE_STATION && vif_priv->ctx)
ctx              1077 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 		return vif_priv->ctx->ap_sta_id;
ctx              1156 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 			   struct iwl_rxon_context *ctx,
ctx              1161 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	u8 sta_id = iwlagn_key_sta_id(priv, ctx->vif, sta);
ctx              1179 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	ctx->key_mapping_keys--;
ctx              1204 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 			struct iwl_rxon_context *ctx,
ctx              1211 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	u8 sta_id = iwlagn_key_sta_id(priv, ctx->vif, sta);
ctx              1223 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	ctx->key_mapping_keys++;
ctx              1230 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 			addr = ctx->active.bssid_addr;
ctx              1250 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 		ctx->key_mapping_keys--;
ctx              1269 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 			       struct iwl_rxon_context *ctx)
ctx              1275 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	sta_id = iwl_prep_station(priv, ctx, iwl_bcast_addr, false, NULL);
ctx              1287 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	link_cmd = iwl_sta_alloc_lq(priv, ctx, sta_id);
ctx              1308 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 			     struct iwl_rxon_context *ctx)
ctx              1311 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	u8 sta_id = ctx->bcast_sta_id;
ctx              1313 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	link_cmd = iwl_sta_alloc_lq(priv, ctx, sta_id);
ctx              1332 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	struct iwl_rxon_context *ctx;
ctx              1335 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 	for_each_context(priv, ctx) {
ctx              1336 drivers/net/wireless/intel/iwlwifi/dvm/sta.c 		ret = iwl_update_bcast_station(priv, ctx);
ctx               404 drivers/net/wireless/intel/iwlwifi/dvm/tt.c 				struct iwl_rxon_context *ctx;
ctx               406 drivers/net/wireless/intel/iwlwifi/dvm/tt.c 				for_each_context(priv, ctx) {
ctx               409 drivers/net/wireless/intel/iwlwifi/dvm/tt.c 					rxon = &ctx->staging;
ctx               269 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 	struct iwl_rxon_context *ctx = &priv->contexts[IWL_RXON_CTX_BSS];
ctx               280 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 		ctx = iwl_rxon_ctx_from_vif(info->control.vif);
ctx               314 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 		sta_id = ctx->bcast_sta_id;
ctx               317 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 		sta_id = iwl_sta_id_or_broadcast(ctx, sta);
ctx               369 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 	info->driver_data[0] = ctx;
ctx               580 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 	struct iwl_rxon_context *ctx = iwl_rxon_ctx_from_vif(vif);
ctx               600 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 	txq_id = iwlagn_alloc_agg_txq(priv, ctx->ac_to_queue[tid_to_ac[tid]]);
ctx               685 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 	struct iwl_rxon_context *ctx = iwl_rxon_ctx_from_vif(vif);
ctx               697 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 	fifo = ctx->ac_to_fifo[tid_to_ac[tid]];
ctx               741 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 	return iwl_send_lq_cmd(priv, ctx,
ctx               748 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 	enum iwl_rxon_context_id ctx;
ctx               755 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 	ctx = priv->stations[sta_id].ctxid;
ctx               756 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 	vif = priv->contexts[ctx].vif;
ctx               788 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 				     struct iwl_rxon_context *ctx,
ctx               795 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 	sta = ieee80211_find_sta(ctx->vif, addr1);
ctx              1129 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 	struct iwl_rxon_context *ctx;
ctx              1189 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 			ctx = info->driver_data[0];
ctx              1196 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 			    ctx->vif &&
ctx              1197 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 			    ctx->vif->type == NL80211_IFTYPE_STATION) {
ctx              1224 drivers/net/wireless/intel/iwlwifi/dvm/tx.c 				iwlagn_non_agg_tx_status(priv, ctx, hdr->addr1);
ctx                71 drivers/net/wireless/intel/iwlwifi/fw/runtime.h 	int (*dump_start)(void *ctx);
ctx                72 drivers/net/wireless/intel/iwlwifi/fw/runtime.h 	void (*dump_end)(void *ctx);
ctx                73 drivers/net/wireless/intel/iwlwifi/fw/runtime.h 	bool (*fw_running)(void *ctx);
ctx                74 drivers/net/wireless/intel/iwlwifi/fw/runtime.h 	int (*send_hcmd)(void *ctx, struct iwl_host_cmd *host_cmd);
ctx                75 drivers/net/wireless/intel/iwlwifi/fw/runtime.h 	bool (*d3_debug_enable)(void *ctx);
ctx               476 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 	struct ieee80211_chanctx_conf *ctx;
ctx               493 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 	ctx = rcu_dereference(vif->chanctx_conf);
ctx               494 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 	if (WARN_ON(!ctx)) {
ctx               498 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 	chandef = ctx->def;
ctx               499 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 	chains_static = ctx->rx_chains_static;
ctx               500 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 	chains_dynamic = ctx->rx_chains_dynamic;
ctx               159 drivers/net/wireless/intel/iwlwifi/mvm/ftm-responder.c 	struct ieee80211_chanctx_conf ctx, *pctx;
ctx               182 drivers/net/wireless/intel/iwlwifi/mvm/ftm-responder.c 	ctx = *pctx;
ctx               187 drivers/net/wireless/intel/iwlwifi/mvm/ftm-responder.c 	ret = iwl_mvm_phy_ctxt_changed(mvm, phy_ctxt, &ctx.def,
ctx               188 drivers/net/wireless/intel/iwlwifi/mvm/ftm-responder.c 				       ctx.rx_chains_static,
ctx               189 drivers/net/wireless/intel/iwlwifi/mvm/ftm-responder.c 				       ctx.rx_chains_dynamic);
ctx               193 drivers/net/wireless/intel/iwlwifi/mvm/ftm-responder.c 	ret = iwl_mvm_ftm_responder_cmd(mvm, vif, &ctx.def);
ctx              3863 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	struct ieee80211_chanctx_conf *ctx;
ctx              3871 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	if (rcu_access_pointer(vif->chanctx_conf) == data->ctx &&
ctx              3877 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 					     struct ieee80211_chanctx_conf *ctx)
ctx              3881 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 		.ctx = ctx,
ctx              3892 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 				 struct ieee80211_chanctx_conf *ctx)
ctx              3894 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	u16 *phy_ctxt_id = (u16 *)ctx->drv_priv;
ctx              3896 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	bool responder = iwl_mvm_is_ftm_responder_chanctx(mvm, ctx);
ctx              3897 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	struct cfg80211_chan_def *def = responder ? &ctx->def : &ctx->min_def;
ctx              3911 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 				       ctx->rx_chains_static,
ctx              3912 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 				       ctx->rx_chains_dynamic);
ctx              3925 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 			       struct ieee80211_chanctx_conf *ctx)
ctx              3931 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	ret = __iwl_mvm_add_chanctx(mvm, ctx);
ctx              3938 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 				     struct ieee80211_chanctx_conf *ctx)
ctx              3940 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	u16 *phy_ctxt_id = (u16 *)ctx->drv_priv;
ctx              3949 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 				   struct ieee80211_chanctx_conf *ctx)
ctx              3954 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	__iwl_mvm_remove_chanctx(mvm, ctx);
ctx              3959 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 				   struct ieee80211_chanctx_conf *ctx,
ctx              3963 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	u16 *phy_ctxt_id = (u16 *)ctx->drv_priv;
ctx              3965 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	bool responder = iwl_mvm_is_ftm_responder_chanctx(mvm, ctx);
ctx              3966 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	struct cfg80211_chan_def *def = responder ? &ctx->def : &ctx->min_def;
ctx              3992 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 				 ctx->rx_chains_static,
ctx              3993 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 				 ctx->rx_chains_dynamic);
ctx              4001 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 					struct ieee80211_chanctx_conf *ctx,
ctx              4004 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	u16 *phy_ctxt_id = (u16 *)ctx->drv_priv;
ctx              4102 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 				      struct ieee80211_chanctx_conf *ctx)
ctx              4108 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	ret = __iwl_mvm_assign_vif_chanctx(mvm, vif, ctx, false);
ctx              4116 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 					   struct ieee80211_chanctx_conf *ctx,
ctx              4173 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 					 struct ieee80211_chanctx_conf *ctx)
ctx              4178 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 	__iwl_mvm_unassign_vif_chanctx(mvm, vif, ctx, false);
ctx               565 drivers/net/wireless/intel/iwlwifi/mvm/ops.c static int iwl_mvm_fwrt_dump_start(void *ctx)
ctx               567 drivers/net/wireless/intel/iwlwifi/mvm/ops.c 	struct iwl_mvm *mvm = ctx;
ctx               574 drivers/net/wireless/intel/iwlwifi/mvm/ops.c static void iwl_mvm_fwrt_dump_end(void *ctx)
ctx               576 drivers/net/wireless/intel/iwlwifi/mvm/ops.c 	struct iwl_mvm *mvm = ctx;
ctx               581 drivers/net/wireless/intel/iwlwifi/mvm/ops.c static bool iwl_mvm_fwrt_fw_running(void *ctx)
ctx               583 drivers/net/wireless/intel/iwlwifi/mvm/ops.c 	return iwl_mvm_firmware_running(ctx);
ctx               586 drivers/net/wireless/intel/iwlwifi/mvm/ops.c static int iwl_mvm_fwrt_send_hcmd(void *ctx, struct iwl_host_cmd *host_cmd)
ctx               588 drivers/net/wireless/intel/iwlwifi/mvm/ops.c 	struct iwl_mvm *mvm = (struct iwl_mvm *)ctx;
ctx               598 drivers/net/wireless/intel/iwlwifi/mvm/ops.c static bool iwl_mvm_d3_debug_enable(void *ctx)
ctx               187 drivers/net/wireless/intersil/hostap/hostap_download.c 	struct prism2_download_aux_dump *ctx = m->private;
ctx               189 drivers/net/wireless/intersil/hostap/hostap_download.c 	hfa384x_from_aux(ctx->local->dev, (unsigned long)v - 1, 0x80, ctx->page);
ctx               190 drivers/net/wireless/intersil/hostap/hostap_download.c 	seq_write(m, ctx->page, 0x80);
ctx               196 drivers/net/wireless/intersil/hostap/hostap_download.c 	struct prism2_download_aux_dump *ctx = m->private;
ctx               197 drivers/net/wireless/intersil/hostap/hostap_download.c 	prism2_enable_aux_port(ctx->local->dev, 1);
ctx               213 drivers/net/wireless/intersil/hostap/hostap_download.c 	struct prism2_download_aux_dump *ctx = m->private;
ctx               214 drivers/net/wireless/intersil/hostap/hostap_download.c 	prism2_enable_aux_port(ctx->local->dev, 0);
ctx               284 drivers/net/wireless/intersil/orinoco/orinoco_usb.c static void ezusb_ctx_complete(struct request_context *ctx);
ctx               296 drivers/net/wireless/intersil/orinoco/orinoco_usb.c static void ezusb_request_context_put(struct request_context *ctx)
ctx               298 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!refcount_dec_and_test(&ctx->refcount))
ctx               301 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	WARN_ON(!ctx->done.done);
ctx               302 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	BUG_ON(ctx->outurb->status == -EINPROGRESS);
ctx               303 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	BUG_ON(timer_pending(&ctx->timer));
ctx               304 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	usb_free_urb(ctx->outurb);
ctx               305 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	kfree(ctx->buf);
ctx               306 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	kfree(ctx);
ctx               320 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx = from_timer(ctx, t, timer);
ctx               322 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx->outurb->transfer_flags |= URB_ASYNC_UNLINK;
ctx               323 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (usb_unlink_urb(ctx->outurb) == -EINPROGRESS) {
ctx               324 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ctx->state = EZUSB_CTX_REQ_TIMEOUT;
ctx               326 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ctx->state = EZUSB_CTX_RESP_TIMEOUT;
ctx               327 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		dev_dbg(&ctx->outurb->dev->dev, "couldn't unlink\n");
ctx               328 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		refcount_inc(&ctx->refcount);
ctx               329 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ctx->killed = 1;
ctx               330 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ezusb_ctx_complete(ctx);
ctx               331 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ezusb_request_context_put(ctx);
ctx               338 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx;
ctx               340 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx = kzalloc(sizeof(*ctx), GFP_ATOMIC);
ctx               341 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx)
ctx               344 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx->buf = kmalloc(BULK_BUF_SIZE, GFP_ATOMIC);
ctx               345 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx->buf) {
ctx               346 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		kfree(ctx);
ctx               349 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx->outurb = usb_alloc_urb(0, GFP_ATOMIC);
ctx               350 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx->outurb) {
ctx               351 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		kfree(ctx->buf);
ctx               352 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		kfree(ctx);
ctx               356 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx->upriv = upriv;
ctx               357 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx->state = EZUSB_CTX_START;
ctx               358 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx->out_rid = out_rid;
ctx               359 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx->in_rid = in_rid;
ctx               361 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	refcount_set(&ctx->refcount, 1);
ctx               362 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	init_completion(&ctx->done);
ctx               364 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	timer_setup(&ctx->timer, ezusb_request_timerfn, 0);
ctx               365 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	return ctx;
ctx               379 drivers/net/wireless/intersil/orinoco/orinoco_usb.c static void ezusb_ctx_complete(struct request_context *ctx)
ctx               381 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct ezusb_priv *upriv = ctx->upriv;
ctx               386 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	list_del_init(&ctx->list);
ctx               393 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	switch (ctx->state) {
ctx               401 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		if ((ctx->out_rid == EZUSB_RID_TX) && upriv->dev) {
ctx               405 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			if (ctx->state != EZUSB_CTX_COMPLETE)
ctx               412 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ezusb_complete_all(&ctx->done);
ctx               413 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ezusb_request_context_put(ctx);
ctx               422 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			ezusb_complete_all(&ctx->done);
ctx               423 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			ezusb_request_context_put(ctx);
ctx               447 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx;
ctx               458 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx =
ctx               462 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx->upriv->udev)
ctx               466 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	list_move_tail(&ctx->list, &upriv->req_active);
ctx               468 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (ctx->state == EZUSB_CTX_QUEUED) {
ctx               469 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		refcount_inc(&ctx->refcount);
ctx               470 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		result = usb_submit_urb(ctx->outurb, GFP_ATOMIC);
ctx               472 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			ctx->state = EZUSB_CTX_REQSUBMIT_FAIL;
ctx               479 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			ezusb_ctx_complete(ctx);
ctx               480 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			ezusb_request_context_put(ctx);
ctx               484 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ctx->state = EZUSB_CTX_REQ_SUBMITTED;
ctx               485 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ezusb_mod_timer(ctx->upriv, &ctx->timer,
ctx               497 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 				  struct request_context *ctx)
ctx               503 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx->upriv->udev) {
ctx               507 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	refcount_inc(&ctx->refcount);
ctx               508 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	list_add_tail(&ctx->list, &upriv->req_pending);
ctx               511 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx->state = EZUSB_CTX_QUEUED;
ctx               522 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx = urb->context;
ctx               523 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct ezusb_priv *upriv = ctx->upriv;
ctx               527 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	del_timer(&ctx->timer);
ctx               529 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (ctx->killed) {
ctx               535 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	state = ctx->state;
ctx               540 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			if (ctx->in_rid) {
ctx               541 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 				ctx->state = EZUSB_CTX_REQ_COMPLETE;
ctx               543 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 				ezusb_mod_timer(upriv, &ctx->timer,
ctx               552 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			ctx->state = EZUSB_CTX_COMPLETE;
ctx               554 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			ezusb_ctx_complete(ctx);
ctx               570 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			ctx->state = EZUSB_CTX_REQ_FAILED;
ctx               577 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			ezusb_ctx_complete(ctx);
ctx               589 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ezusb_request_context_put(ctx);
ctx               596 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx = NULL;
ctx               614 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 				ctx = c;
ctx               623 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (ctx == NULL) {
ctx               632 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	urb->transfer_buffer = ctx->buf;
ctx               633 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx->buf = (void *) ans;
ctx               634 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx->buf_length = urb->actual_length;
ctx               636 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	state = ctx->state;
ctx               644 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ctx->state = EZUSB_CTX_RESP_RECEIVED;
ctx               655 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ctx->state = EZUSB_CTX_COMPLETE;
ctx               658 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		del_timer(&ctx->timer);
ctx               662 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ezusb_ctx_complete(ctx);
ctx               671 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		del_timer(&ctx->timer);
ctx               672 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ctx->outurb->transfer_flags |= URB_ASYNC_UNLINK;
ctx               673 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		usb_unlink_urb(ctx->outurb);
ctx               681 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			       struct request_context *ctx)
ctx               683 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	switch (ctx->state) {
ctx               693 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			while (!ctx->done.done && msecs--)
ctx               696 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			wait_event_interruptible(ctx->done.wait,
ctx               697 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 						 ctx->done.done);
ctx               865 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			    struct request_context *ctx,
ctx               885 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	req_size = ezusb_fill_req(ctx->buf, length, ctx->out_rid, data,
ctx               887 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	usb_fill_bulk_urb(ctx->outurb, upriv->udev, upriv->write_pipe,
ctx               888 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			  ctx->buf, req_size,
ctx               889 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			  ezusb_request_out_callback, ctx);
ctx               891 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (ctx->in_rid)
ctx               894 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ezusb_req_enqueue_run(upriv, ctx);
ctx               898 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (ctx->in_rid)
ctx               899 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ezusb_req_ctx_wait(upriv, ctx);
ctx               901 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	state = ctx->state;
ctx               904 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		retval = ctx->outurb->status;
ctx               909 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		if (!ctx->in_rid)
ctx               933 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (ctx->in_rid) {
ctx               934 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		struct ezusb_packet *ans = ctx->buf;
ctx               942 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		if (exp_len != ctx->buf_length) {
ctx               945 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			    ctx->in_rid, exp_len, ctx->buf_length);
ctx               956 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ezusb_request_context_put(ctx);
ctx               965 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx;
ctx               977 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx = ezusb_alloc_ctx(upriv, rid, EZUSB_RID_ACK);
ctx               978 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx)
ctx               986 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	return ezusb_access_ltv(upriv, ctx, length, data, frame_type,
ctx               994 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx;
ctx               999 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx = ezusb_alloc_ctx(upriv, rid, rid);
ctx              1000 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx)
ctx              1003 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	return ezusb_access_ltv(upriv, ctx, 0, NULL, EZUSB_FRAME_CONTROL,
ctx              1011 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx;
ctx              1022 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx = ezusb_alloc_ctx(upriv, EZUSB_RID_DOCMD, EZUSB_RID_ACK);
ctx              1023 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx)
ctx              1026 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	return ezusb_access_ltv(upriv, ctx, sizeof(data), &data,
ctx              1034 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx;
ctx              1043 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx = ezusb_alloc_ctx(upriv, EZUSB_RID_DOCMD, EZUSB_RID_ACK);
ctx              1044 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx)
ctx              1047 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	return ezusb_access_ltv(upriv, ctx, sizeof(data), &data,
ctx              1088 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx;
ctx              1093 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx = ezusb_alloc_ctx(upriv, EZUSB_RID_READ_PDA, EZUSB_RID_READ_PDA);
ctx              1094 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx)
ctx              1104 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	return ezusb_access_ltv(upriv, ctx, sizeof(data), &data,
ctx              1112 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx;
ctx              1115 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx = ezusb_alloc_ctx(upriv, EZUSB_RID_PROG_INIT, EZUSB_RID_ACK);
ctx              1116 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx)
ctx              1119 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	return ezusb_access_ltv(upriv, ctx, sizeof(data), &data,
ctx              1126 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx;
ctx              1128 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx = ezusb_alloc_ctx(upriv, EZUSB_RID_PROG_END, EZUSB_RID_ACK);
ctx              1129 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx)
ctx              1132 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	return ezusb_access_ltv(upriv, ctx, 0, NULL,
ctx              1140 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx;
ctx              1144 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx = ezusb_alloc_ctx(upriv, EZUSB_RID_PROG_SET_ADDR, EZUSB_RID_ACK);
ctx              1145 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx)
ctx              1148 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	err = ezusb_access_ltv(upriv, ctx, sizeof(data), &data,
ctx              1153 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx = ezusb_alloc_ctx(upriv, EZUSB_RID_PROG_BYTES, EZUSB_RID_ACK);
ctx              1154 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx)
ctx              1157 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	return ezusb_access_ltv(upriv, ctx, len, buf,
ctx              1202 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	struct request_context *ctx;
ctx              1237 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	ctx = ezusb_alloc_ctx(upriv, EZUSB_RID_TX, 0);
ctx              1238 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	if (!ctx)
ctx              1241 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	memset(ctx->buf, 0, BULK_BUF_SIZE);
ctx              1242 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	buf = ctx->buf->data;
ctx              1275 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	tx_size = ALIGN(buf - ctx->buf->data, 2);
ctx              1277 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 	err = ezusb_access_ltv(upriv, ctx, tx_size, NULL,
ctx              1473 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		struct request_context *ctx;
ctx              1476 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ctx = list_entry(item, struct request_context, list);
ctx              1477 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		refcount_inc(&ctx->refcount);
ctx              1479 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ctx->outurb->transfer_flags |= URB_ASYNC_UNLINK;
ctx              1480 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		err = usb_unlink_urb(ctx->outurb);
ctx              1484 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			wait_for_completion(&ctx->done);
ctx              1486 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		del_timer_sync(&ctx->timer);
ctx              1489 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		if (!list_empty(&ctx->list))
ctx              1490 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 			ezusb_ctx_complete(ctx);
ctx              1492 drivers/net/wireless/intersil/orinoco/orinoco_usb.c 		ezusb_request_context_put(ctx);
ctx              2237 drivers/net/wireless/mac80211_hwsim.c 				      struct ieee80211_chanctx_conf *ctx)
ctx              2239 drivers/net/wireless/mac80211_hwsim.c 	hwsim_set_chanctx_magic(ctx);
ctx              2242 drivers/net/wireless/mac80211_hwsim.c 		  ctx->def.chan->center_freq, ctx->def.width,
ctx              2243 drivers/net/wireless/mac80211_hwsim.c 		  ctx->def.center_freq1, ctx->def.center_freq2);
ctx              2248 drivers/net/wireless/mac80211_hwsim.c 					  struct ieee80211_chanctx_conf *ctx)
ctx              2252 drivers/net/wireless/mac80211_hwsim.c 		  ctx->def.chan->center_freq, ctx->def.width,
ctx              2253 drivers/net/wireless/mac80211_hwsim.c 		  ctx->def.center_freq1, ctx->def.center_freq2);
ctx              2254 drivers/net/wireless/mac80211_hwsim.c 	hwsim_check_chanctx_magic(ctx);
ctx              2255 drivers/net/wireless/mac80211_hwsim.c 	hwsim_clear_chanctx_magic(ctx);
ctx              2259 drivers/net/wireless/mac80211_hwsim.c 					  struct ieee80211_chanctx_conf *ctx,
ctx              2262 drivers/net/wireless/mac80211_hwsim.c 	hwsim_check_chanctx_magic(ctx);
ctx              2265 drivers/net/wireless/mac80211_hwsim.c 		  ctx->def.chan->center_freq, ctx->def.width,
ctx              2266 drivers/net/wireless/mac80211_hwsim.c 		  ctx->def.center_freq1, ctx->def.center_freq2);
ctx              2271 drivers/net/wireless/mac80211_hwsim.c 					     struct ieee80211_chanctx_conf *ctx)
ctx              2274 drivers/net/wireless/mac80211_hwsim.c 	hwsim_check_chanctx_magic(ctx);
ctx              2281 drivers/net/wireless/mac80211_hwsim.c 						struct ieee80211_chanctx_conf *ctx)
ctx              2284 drivers/net/wireless/mac80211_hwsim.c 	hwsim_check_chanctx_magic(ctx);
ctx               286 drivers/net/wireless/marvell/mwifiex/11n_rxreorder.c mwifiex_11n_find_last_seq_num(struct reorder_tmr_cnxt *ctx)
ctx               288 drivers/net/wireless/marvell/mwifiex/11n_rxreorder.c 	struct mwifiex_rx_reorder_tbl *rx_reorder_tbl_ptr = ctx->ptr;
ctx               289 drivers/net/wireless/marvell/mwifiex/11n_rxreorder.c 	struct mwifiex_private *priv = ctx->priv;
ctx               314 drivers/net/wireless/marvell/mwifiex/11n_rxreorder.c 	struct reorder_tmr_cnxt *ctx =
ctx               315 drivers/net/wireless/marvell/mwifiex/11n_rxreorder.c 		from_timer(ctx, t, timer);
ctx               318 drivers/net/wireless/marvell/mwifiex/11n_rxreorder.c 	ctx->timer_is_set = false;
ctx               319 drivers/net/wireless/marvell/mwifiex/11n_rxreorder.c 	seq_num = mwifiex_11n_find_last_seq_num(ctx);
ctx               324 drivers/net/wireless/marvell/mwifiex/11n_rxreorder.c 	mwifiex_dbg(ctx->priv->adapter, INFO, "info: flush data %d\n", seq_num);
ctx               325 drivers/net/wireless/marvell/mwifiex/11n_rxreorder.c 	start_win = (ctx->ptr->start_win + seq_num + 1) & (MAX_TID_VALUE - 1);
ctx               326 drivers/net/wireless/marvell/mwifiex/11n_rxreorder.c 	mwifiex_11n_dispatch_pkt_until_start_win(ctx->priv, ctx->ptr,
ctx              2420 drivers/net/wireless/marvell/mwifiex/pcie.c 	struct mwifiex_msix_context *ctx = context;
ctx              2421 drivers/net/wireless/marvell/mwifiex/pcie.c 	struct pci_dev *pdev = ctx->dev;
ctx              2438 drivers/net/wireless/marvell/mwifiex/pcie.c 		mwifiex_interrupt_status(adapter, ctx->msg_id);
ctx                53 drivers/net/wireless/marvell/mwifiex/usb.c static int mwifiex_usb_submit_rx_urb(struct urb_context *ctx, int size);
ctx               296 drivers/net/wireless/marvell/mwifiex/usb.c static int mwifiex_usb_submit_rx_urb(struct urb_context *ctx, int size)
ctx               298 drivers/net/wireless/marvell/mwifiex/usb.c 	struct mwifiex_adapter *adapter = ctx->adapter;
ctx               302 drivers/net/wireless/marvell/mwifiex/usb.c 		if (card->rx_cmd_ep == ctx->ep) {
ctx               305 drivers/net/wireless/marvell/mwifiex/usb.c 			dev_kfree_skb_any(ctx->skb);
ctx               306 drivers/net/wireless/marvell/mwifiex/usb.c 			ctx->skb = NULL;
ctx               310 drivers/net/wireless/marvell/mwifiex/usb.c 			    __func__, ctx->ep);
ctx               314 drivers/net/wireless/marvell/mwifiex/usb.c 	if (card->rx_cmd_ep != ctx->ep) {
ctx               315 drivers/net/wireless/marvell/mwifiex/usb.c 		ctx->skb = dev_alloc_skb(size);
ctx               316 drivers/net/wireless/marvell/mwifiex/usb.c 		if (!ctx->skb) {
ctx               323 drivers/net/wireless/marvell/mwifiex/usb.c 	if (card->rx_cmd_ep == ctx->ep &&
ctx               325 drivers/net/wireless/marvell/mwifiex/usb.c 		usb_fill_int_urb(ctx->urb, card->udev,
ctx               326 drivers/net/wireless/marvell/mwifiex/usb.c 				 usb_rcvintpipe(card->udev, ctx->ep),
ctx               327 drivers/net/wireless/marvell/mwifiex/usb.c 				 ctx->skb->data, size, mwifiex_usb_rx_complete,
ctx               328 drivers/net/wireless/marvell/mwifiex/usb.c 				 (void *)ctx, card->rx_cmd_interval);
ctx               330 drivers/net/wireless/marvell/mwifiex/usb.c 		usb_fill_bulk_urb(ctx->urb, card->udev,
ctx               331 drivers/net/wireless/marvell/mwifiex/usb.c 				  usb_rcvbulkpipe(card->udev, ctx->ep),
ctx               332 drivers/net/wireless/marvell/mwifiex/usb.c 				  ctx->skb->data, size, mwifiex_usb_rx_complete,
ctx               333 drivers/net/wireless/marvell/mwifiex/usb.c 				  (void *)ctx);
ctx               335 drivers/net/wireless/marvell/mwifiex/usb.c 	if (card->rx_cmd_ep == ctx->ep)
ctx               340 drivers/net/wireless/marvell/mwifiex/usb.c 	if (usb_submit_urb(ctx->urb, GFP_ATOMIC)) {
ctx               342 drivers/net/wireless/marvell/mwifiex/usb.c 		dev_kfree_skb_any(ctx->skb);
ctx               343 drivers/net/wireless/marvell/mwifiex/usb.c 		ctx->skb = NULL;
ctx               345 drivers/net/wireless/marvell/mwifiex/usb.c 		if (card->rx_cmd_ep == ctx->ep)
ctx              1570 drivers/net/wireless/marvell/mwifiex/usb.c 	struct urb_context *ctx;
ctx              1575 drivers/net/wireless/marvell/mwifiex/usb.c 		ctx = &card->rx_data_list[i];
ctx              1576 drivers/net/wireless/marvell/mwifiex/usb.c 		mwifiex_usb_submit_rx_urb(ctx, MWIFIEX_RX_DATA_BUF_SIZE);
ctx              4681 drivers/net/wireless/ti/wlcore/main.c 				 struct ieee80211_chanctx_conf *ctx)
ctx              4684 drivers/net/wireless/ti/wlcore/main.c 		     ieee80211_frequency_to_channel(ctx->def.chan->center_freq),
ctx              4685 drivers/net/wireless/ti/wlcore/main.c 		     cfg80211_get_chandef_type(&ctx->def));
ctx              4690 drivers/net/wireless/ti/wlcore/main.c 				     struct ieee80211_chanctx_conf *ctx)
ctx              4693 drivers/net/wireless/ti/wlcore/main.c 		     ieee80211_frequency_to_channel(ctx->def.chan->center_freq),
ctx              4694 drivers/net/wireless/ti/wlcore/main.c 		     cfg80211_get_chandef_type(&ctx->def));
ctx              4698 drivers/net/wireless/ti/wlcore/main.c 				     struct ieee80211_chanctx_conf *ctx,
ctx              4705 drivers/net/wireless/ti/wlcore/main.c 		ctx->def.chan->center_freq);
ctx              4709 drivers/net/wireless/ti/wlcore/main.c 		     channel, cfg80211_get_chandef_type(&ctx->def), changed);
ctx              4723 drivers/net/wireless/ti/wlcore/main.c 		if (rcu_access_pointer(vif->chanctx_conf) != ctx) {
ctx              4732 drivers/net/wireless/ti/wlcore/main.c 		    ctx->radar_enabled && !wlvif->radar_enabled &&
ctx              4733 drivers/net/wireless/ti/wlcore/main.c 		    ctx->def.chan->dfs_state == NL80211_DFS_USABLE) {
ctx              4748 drivers/net/wireless/ti/wlcore/main.c 					struct ieee80211_chanctx_conf *ctx)
ctx              4753 drivers/net/wireless/ti/wlcore/main.c 		ctx->def.chan->center_freq);
ctx              4759 drivers/net/wireless/ti/wlcore/main.c 		     cfg80211_get_chandef_type(&ctx->def),
ctx              4760 drivers/net/wireless/ti/wlcore/main.c 		     ctx->radar_enabled, ctx->def.chan->dfs_state);
ctx              4776 drivers/net/wireless/ti/wlcore/main.c 	wlvif->band = ctx->def.chan->band;
ctx              4778 drivers/net/wireless/ti/wlcore/main.c 	wlvif->channel_type = cfg80211_get_chandef_type(&ctx->def);
ctx              4783 drivers/net/wireless/ti/wlcore/main.c 	if (ctx->radar_enabled &&
ctx              4784 drivers/net/wireless/ti/wlcore/main.c 	    ctx->def.chan->dfs_state == NL80211_DFS_USABLE) {
ctx              4800 drivers/net/wireless/ti/wlcore/main.c 					   struct ieee80211_chanctx_conf *ctx)
ctx              4809 drivers/net/wireless/ti/wlcore/main.c 		     ieee80211_frequency_to_channel(ctx->def.chan->center_freq),
ctx              4810 drivers/net/wireless/ti/wlcore/main.c 		     cfg80211_get_chandef_type(&ctx->def));
ctx               562 drivers/net/xen-netback/interface.c 			  { { .ctx = NULL,
ctx               580 drivers/net/xen-netback/netback.c 			callback_param(queue, prev_pending_idx).ctx =
ctx               583 drivers/net/xen-netback/netback.c 		callback_param(queue, pending_idx).ctx = NULL;
ctx              1126 drivers/net/xen-netback/netback.c 		callback_param(queue, pending_idx).ctx = NULL;
ctx              1232 drivers/net/xen-netback/netback.c 		ubuf = (struct ubuf_info *) ubuf->ctx;
ctx               119 drivers/ntb/core.c 	ntb->ctx = NULL;
ctx               134 drivers/ntb/core.c int ntb_set_ctx(struct ntb_dev *ntb, void *ctx,
ctx               146 drivers/ntb/core.c 		ntb->ctx = ctx;
ctx               162 drivers/ntb/core.c 		ntb->ctx = NULL;
ctx               175 drivers/ntb/core.c 			ntb->ctx_ops->link_event(ntb->ctx);
ctx               188 drivers/ntb/core.c 			ntb->ctx_ops->db_event(ntb->ctx, vector);
ctx               201 drivers/ntb/core.c 			ntb->ctx_ops->msg_event(ntb->ctx);
ctx                13 drivers/ntb/msi.c 	void (*desc_changed)(void *ctx);
ctx                32 drivers/ntb/msi.c 		 void (*desc_changed)(void *ctx))
ctx               224 drivers/ntb/msi.c 		dr->ntb->msi->desc_changed(dr->ntb->ctx);
ctx              1405 drivers/ntb/ntb_transport.c 	struct ntb_transport_ctx *nt = ndev->ctx;
ctx              1993 drivers/ntb/ntb_transport.c 	nt = ndev->ctx;
ctx               104 drivers/ntb/test/ntb_msi_test.c static void ntb_msit_desc_changed(void *ctx)
ctx               106 drivers/ntb/test/ntb_msi_test.c 	struct ntb_msit_ctx *nm = ctx;
ctx               121 drivers/ntb/test/ntb_msi_test.c static void ntb_msit_link_event(void *ctx)
ctx               123 drivers/ntb/test/ntb_msi_test.c 	struct ntb_msit_ctx *nm = ctx;
ctx               149 drivers/ntb/test/ntb_msi_test.c static void ntb_msit_db_event(void *ctx, int vec)
ctx               151 drivers/ntb/test/ntb_msi_test.c 	struct ntb_msit_ctx *nm = ctx;
ctx               391 drivers/ntb/test/ntb_msi_test.c 	struct ntb_msit_ctx *nm = ntb->ctx;
ctx               479 drivers/ntb/test/ntb_perf.c static void perf_link_event(void *ctx)
ctx               481 drivers/ntb/test/ntb_perf.c 	struct perf_ctx *perf = ctx;
ctx               501 drivers/ntb/test/ntb_perf.c static void perf_db_event(void *ctx, int vec)
ctx               503 drivers/ntb/test/ntb_perf.c 	struct perf_ctx *perf = ctx;
ctx               512 drivers/ntb/test/ntb_perf.c static void perf_msg_event(void *ctx)
ctx               514 drivers/ntb/test/ntb_perf.c 	struct perf_ctx *perf = ctx;
ctx              1466 drivers/ntb/test/ntb_perf.c 	struct perf_ctx *perf = ntb->ctx;
ctx               224 drivers/ntb/test/ntb_pingpong.c static void pp_link_event(void *ctx)
ctx               226 drivers/ntb/test/ntb_pingpong.c 	struct pp_ctx *pp = ctx;
ctx               231 drivers/ntb/test/ntb_pingpong.c static void pp_db_event(void *ctx, int vec)
ctx               233 drivers/ntb/test/ntb_pingpong.c 	struct pp_ctx *pp = ctx;
ctx               401 drivers/ntb/test/ntb_pingpong.c 	struct pp_ctx *pp = ntb->ctx;
ctx               289 drivers/ntb/test/ntb_tool.c static void tool_link_event(void *ctx)
ctx               291 drivers/ntb/test/ntb_tool.c 	struct tool_ctx *tc = ctx;
ctx               304 drivers/ntb/test/ntb_tool.c static void tool_db_event(void *ctx, int vec)
ctx               306 drivers/ntb/test/ntb_tool.c 	struct tool_ctx *tc = ctx;
ctx               318 drivers/ntb/test/ntb_tool.c static void tool_msg_event(void *ctx)
ctx               320 drivers/ntb/test/ntb_tool.c 	struct tool_ctx *tc = ctx;
ctx              1654 drivers/ntb/test/ntb_tool.c 	struct tool_ctx *tc = ntb->ctx;
ctx               177 drivers/nvdimm/bus.c 	struct clear_badblocks_context *ctx = data;
ctx               190 drivers/nvdimm/bus.c 	if (ctx->phys < nd_region->ndr_start
ctx               191 drivers/nvdimm/bus.c 			|| (ctx->phys + ctx->cleared) > ndr_end)
ctx               194 drivers/nvdimm/bus.c 	sector = (ctx->phys - nd_region->ndr_start) / 512;
ctx               195 drivers/nvdimm/bus.c 	badblocks_clear(&nd_region->bb, sector, ctx->cleared / 512);
ctx               206 drivers/nvdimm/bus.c 	struct clear_badblocks_context ctx = {
ctx               211 drivers/nvdimm/bus.c 	device_for_each_child(&nvdimm_bus->dev, &ctx,
ctx              1187 drivers/nvdimm/region_devs.c 	struct conflict_context *ctx = data;
ctx              1194 drivers/nvdimm/region_devs.c 	if (nd_region == ctx->nd_region)
ctx              1197 drivers/nvdimm/region_devs.c 	res_end = ctx->start + ctx->size;
ctx              1200 drivers/nvdimm/region_devs.c 	if (ctx->start >= region_start && ctx->start < region_end)
ctx              1211 drivers/nvdimm/region_devs.c 	struct conflict_context ctx = {
ctx              1217 drivers/nvdimm/region_devs.c 	return device_for_each_child(&nvdimm_bus->dev, &ctx, region_conflict);
ctx               471 drivers/parport/parport_ip32.c 	unsigned int			ctx;
ctx               493 drivers/parport/parport_ip32.c 		volatile u64 __iomem *ctxreg = (parport_ip32_dma.ctx == 0) ?
ctx               512 drivers/parport/parport_ip32.c 			 parport_ip32_dma.ctx, ctxval ? "*" : "");
ctx               521 drivers/parport/parport_ip32.c 		parport_ip32_dma.ctx ^= 1U;
ctx               543 drivers/parport/parport_ip32.c 		pr_trace(NULL, "(%d): ctx=%d", irq, parport_ip32_dma.ctx);
ctx               596 drivers/parport/parport_ip32.c 	parport_ip32_dma.ctx = 0;
ctx              1131 drivers/perf/xgene_pmu.c xgene_pmu_dev_add(struct xgene_pmu *xgene_pmu, struct xgene_pmu_dev_ctx *ctx)
ctx              1140 drivers/perf/xgene_pmu.c 	pmu->inf = &ctx->inf;
ctx              1141 drivers/perf/xgene_pmu.c 	ctx->pmu_dev = pmu;
ctx              1182 drivers/perf/xgene_pmu.c 	if (xgene_init_perf(pmu, ctx->name)) {
ctx              1183 drivers/perf/xgene_pmu.c 		dev_err(dev, "%s PMU: Failed to init perf driver\n", ctx->name);
ctx              1187 drivers/perf/xgene_pmu.c 	dev_info(dev, "%s PMU registered\n", ctx->name);
ctx              1235 drivers/perf/xgene_pmu.c 	struct xgene_pmu_dev_ctx *ctx;
ctx              1256 drivers/perf/xgene_pmu.c 		list_for_each_entry(ctx, &xgene_pmu->mcpmus, next) {
ctx              1257 drivers/perf/xgene_pmu.c 			_xgene_pmu_isr(irq, ctx->pmu_dev);
ctx              1261 drivers/perf/xgene_pmu.c 		list_for_each_entry(ctx, &xgene_pmu->mcbpmus, next) {
ctx              1262 drivers/perf/xgene_pmu.c 			_xgene_pmu_isr(irq, ctx->pmu_dev);
ctx              1266 drivers/perf/xgene_pmu.c 		list_for_each_entry(ctx, &xgene_pmu->l3cpmus, next) {
ctx              1267 drivers/perf/xgene_pmu.c 			_xgene_pmu_isr(irq, ctx->pmu_dev);
ctx              1271 drivers/perf/xgene_pmu.c 		list_for_each_entry(ctx, &xgene_pmu->iobpmus, next) {
ctx              1272 drivers/perf/xgene_pmu.c 			_xgene_pmu_isr(irq, ctx->pmu_dev);
ctx              1479 drivers/perf/xgene_pmu.c 	struct xgene_pmu_dev_ctx *ctx;
ctx              1487 drivers/perf/xgene_pmu.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx              1488 drivers/perf/xgene_pmu.c 	if (!ctx)
ctx              1514 drivers/perf/xgene_pmu.c 	ctx->name = xgene_pmu_dev_name(dev, type, enable_bit);
ctx              1515 drivers/perf/xgene_pmu.c 	if (!ctx->name) {
ctx              1519 drivers/perf/xgene_pmu.c 	inf = &ctx->inf;
ctx              1524 drivers/perf/xgene_pmu.c 	return ctx;
ctx              1562 drivers/perf/xgene_pmu.c 	struct xgene_pmu_dev_ctx *ctx;
ctx              1574 drivers/perf/xgene_pmu.c 	ctx = acpi_get_pmu_hw_inf(xgene_pmu, adev, (u32)acpi_id->driver_data);
ctx              1575 drivers/perf/xgene_pmu.c 	if (!ctx)
ctx              1578 drivers/perf/xgene_pmu.c 	if (xgene_pmu_dev_add(xgene_pmu, ctx)) {
ctx              1580 drivers/perf/xgene_pmu.c 		devm_kfree(xgene_pmu->dev, ctx);
ctx              1584 drivers/perf/xgene_pmu.c 	switch (ctx->inf.type) {
ctx              1586 drivers/perf/xgene_pmu.c 		list_add(&ctx->next, &xgene_pmu->l3cpmus);
ctx              1589 drivers/perf/xgene_pmu.c 		list_add(&ctx->next, &xgene_pmu->iobpmus);
ctx              1592 drivers/perf/xgene_pmu.c 		list_add(&ctx->next, &xgene_pmu->iobpmus);
ctx              1595 drivers/perf/xgene_pmu.c 		list_add(&ctx->next, &xgene_pmu->mcbpmus);
ctx              1598 drivers/perf/xgene_pmu.c 		list_add(&ctx->next, &xgene_pmu->mcpmus);
ctx              1637 drivers/perf/xgene_pmu.c 	struct xgene_pmu_dev_ctx *ctx;
ctx              1643 drivers/perf/xgene_pmu.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx              1644 drivers/perf/xgene_pmu.c 	if (!ctx)
ctx              1662 drivers/perf/xgene_pmu.c 	ctx->name = xgene_pmu_dev_name(dev, type, enable_bit);
ctx              1663 drivers/perf/xgene_pmu.c 	if (!ctx->name) {
ctx              1668 drivers/perf/xgene_pmu.c 	inf = &ctx->inf;
ctx              1673 drivers/perf/xgene_pmu.c 	return ctx;
ctx              1679 drivers/perf/xgene_pmu.c 	struct xgene_pmu_dev_ctx *ctx;
ctx              1687 drivers/perf/xgene_pmu.c 			ctx = fdt_get_pmu_hw_inf(xgene_pmu, np, PMU_TYPE_L3C);
ctx              1689 drivers/perf/xgene_pmu.c 			ctx = fdt_get_pmu_hw_inf(xgene_pmu, np, PMU_TYPE_IOB);
ctx              1691 drivers/perf/xgene_pmu.c 			ctx = fdt_get_pmu_hw_inf(xgene_pmu, np, PMU_TYPE_MCB);
ctx              1693 drivers/perf/xgene_pmu.c 			ctx = fdt_get_pmu_hw_inf(xgene_pmu, np, PMU_TYPE_MC);
ctx              1695 drivers/perf/xgene_pmu.c 			ctx = NULL;
ctx              1697 drivers/perf/xgene_pmu.c 		if (!ctx)
ctx              1700 drivers/perf/xgene_pmu.c 		if (xgene_pmu_dev_add(xgene_pmu, ctx)) {
ctx              1702 drivers/perf/xgene_pmu.c 			devm_kfree(xgene_pmu->dev, ctx);
ctx              1706 drivers/perf/xgene_pmu.c 		switch (ctx->inf.type) {
ctx              1708 drivers/perf/xgene_pmu.c 			list_add(&ctx->next, &xgene_pmu->l3cpmus);
ctx              1711 drivers/perf/xgene_pmu.c 			list_add(&ctx->next, &xgene_pmu->iobpmus);
ctx              1714 drivers/perf/xgene_pmu.c 			list_add(&ctx->next, &xgene_pmu->iobpmus);
ctx              1717 drivers/perf/xgene_pmu.c 			list_add(&ctx->next, &xgene_pmu->mcbpmus);
ctx              1720 drivers/perf/xgene_pmu.c 			list_add(&ctx->next, &xgene_pmu->mcpmus);
ctx              1812 drivers/perf/xgene_pmu.c 	struct xgene_pmu_dev_ctx *ctx;
ctx              1821 drivers/perf/xgene_pmu.c 	list_for_each_entry(ctx, &xgene_pmu->mcpmus, next) {
ctx              1822 drivers/perf/xgene_pmu.c 		perf_pmu_migrate_context(&ctx->pmu_dev->pmu, cpu, target);
ctx              1824 drivers/perf/xgene_pmu.c 	list_for_each_entry(ctx, &xgene_pmu->mcbpmus, next) {
ctx              1825 drivers/perf/xgene_pmu.c 		perf_pmu_migrate_context(&ctx->pmu_dev->pmu, cpu, target);
ctx              1827 drivers/perf/xgene_pmu.c 	list_for_each_entry(ctx, &xgene_pmu->l3cpmus, next) {
ctx              1828 drivers/perf/xgene_pmu.c 		perf_pmu_migrate_context(&ctx->pmu_dev->pmu, cpu, target);
ctx              1830 drivers/perf/xgene_pmu.c 	list_for_each_entry(ctx, &xgene_pmu->iobpmus, next) {
ctx              1831 drivers/perf/xgene_pmu.c 		perf_pmu_migrate_context(&ctx->pmu_dev->pmu, cpu, target);
ctx              1956 drivers/perf/xgene_pmu.c 	struct xgene_pmu_dev_ctx *ctx;
ctx              1958 drivers/perf/xgene_pmu.c 	list_for_each_entry(ctx, pmus, next) {
ctx              1959 drivers/perf/xgene_pmu.c 		perf_pmu_unregister(&ctx->pmu_dev->pmu);
ctx               593 drivers/phy/phy-xgene.c static void cmu_wr(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type,
ctx               596 drivers/phy/phy-xgene.c 	void __iomem *sds_base = ctx->sds_base;
ctx               610 drivers/phy/phy-xgene.c static void cmu_rd(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type,
ctx               613 drivers/phy/phy-xgene.c 	void __iomem *sds_base = ctx->sds_base;
ctx               624 drivers/phy/phy-xgene.c static void cmu_toggle1to0(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type,
ctx               629 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, reg, &val);
ctx               631 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, reg, val);
ctx               632 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, reg, &val);
ctx               634 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, reg, val);
ctx               637 drivers/phy/phy-xgene.c static void cmu_clrbits(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type,
ctx               642 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, reg, &val);
ctx               644 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, reg, val);
ctx               647 drivers/phy/phy-xgene.c static void cmu_setbits(struct xgene_phy_ctx *ctx, enum cmu_type_t cmu_type,
ctx               652 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, reg, &val);
ctx               654 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, reg, val);
ctx               657 drivers/phy/phy-xgene.c static void serdes_wr(struct xgene_phy_ctx *ctx, int lane, u32 reg, u32 data)
ctx               659 drivers/phy/phy-xgene.c 	void __iomem *sds_base = ctx->sds_base;
ctx               672 drivers/phy/phy-xgene.c static void serdes_rd(struct xgene_phy_ctx *ctx, int lane, u32 reg, u32 *data)
ctx               674 drivers/phy/phy-xgene.c 	void __iomem *sds_base = ctx->sds_base;
ctx               683 drivers/phy/phy-xgene.c static void serdes_clrbits(struct xgene_phy_ctx *ctx, int lane, u32 reg,
ctx               688 drivers/phy/phy-xgene.c 	serdes_rd(ctx, lane, reg, &val);
ctx               690 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, reg, val);
ctx               693 drivers/phy/phy-xgene.c static void serdes_setbits(struct xgene_phy_ctx *ctx, int lane, u32 reg,
ctx               698 drivers/phy/phy-xgene.c 	serdes_rd(ctx, lane, reg, &val);
ctx               700 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, reg, val);
ctx               703 drivers/phy/phy-xgene.c static void xgene_phy_cfg_cmu_clk_type(struct xgene_phy_ctx *ctx,
ctx               710 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG12, &val);
ctx               712 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG12, val);
ctx               714 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG13, 0x0222);
ctx               715 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG14, 0x2225);
ctx               720 drivers/phy/phy-xgene.c 		cmu_rd(ctx, cmu_type, CMU_REG0, &val);
ctx               722 drivers/phy/phy-xgene.c 		cmu_wr(ctx, cmu_type, CMU_REG0, val);
ctx               724 drivers/phy/phy-xgene.c 		cmu_rd(ctx, cmu_type, CMU_REG1, &val);
ctx               726 drivers/phy/phy-xgene.c 		cmu_wr(ctx, cmu_type, CMU_REG1, val);
ctx               727 drivers/phy/phy-xgene.c 		dev_dbg(ctx->dev, "Set external reference clock\n");
ctx               730 drivers/phy/phy-xgene.c 		cmu_rd(ctx, cmu_type, CMU_REG0, &val);
ctx               732 drivers/phy/phy-xgene.c 		cmu_wr(ctx, cmu_type, CMU_REG0, val);
ctx               734 drivers/phy/phy-xgene.c 		cmu_rd(ctx, cmu_type, CMU_REG1, &val);
ctx               736 drivers/phy/phy-xgene.c 		cmu_wr(ctx, cmu_type, CMU_REG1, val);
ctx               737 drivers/phy/phy-xgene.c 		dev_dbg(ctx->dev, "Set internal reference clock\n");
ctx               745 drivers/phy/phy-xgene.c 		cmu_rd(ctx, cmu_type, CMU_REG1, &val);
ctx               747 drivers/phy/phy-xgene.c 		cmu_wr(ctx, cmu_type, CMU_REG1, val);
ctx               749 drivers/phy/phy-xgene.c 		cmu_rd(ctx, cmu_type, CMU_REG1, &val);
ctx               751 drivers/phy/phy-xgene.c 		cmu_wr(ctx, cmu_type, CMU_REG1, val);
ctx               752 drivers/phy/phy-xgene.c 		dev_dbg(ctx->dev,
ctx               757 drivers/phy/phy-xgene.c static void xgene_phy_sata_cfg_cmu_core(struct xgene_phy_ctx *ctx,
ctx               766 drivers/phy/phy-xgene.c 		cmu_rd(ctx, cmu_type, CMU_REG34, &val);
ctx               771 drivers/phy/phy-xgene.c 		cmu_wr(ctx, cmu_type, CMU_REG34, val);
ctx               775 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG0, &val);
ctx               780 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG0, val);
ctx               783 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG1, &val);
ctx               793 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG1, val);
ctx               796 drivers/phy/phy-xgene.c 		cmu_clrbits(ctx, cmu_type, CMU_REG5, CMU_REG5_PLL_RESETB_MASK);
ctx               799 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG2, &val);
ctx               817 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG2, val);
ctx               820 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG3, &val);
ctx               832 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG3, val);
ctx               835 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG26, &val);
ctx               837 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG26, val);
ctx               840 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG5, &val);
ctx               847 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG5, val);
ctx               850 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG6, &val);
ctx               853 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG6, val);
ctx               857 drivers/phy/phy-xgene.c 		cmu_rd(ctx, cmu_type, CMU_REG9, &val);
ctx               867 drivers/phy/phy-xgene.c 		cmu_wr(ctx, cmu_type, CMU_REG9, val);
ctx               870 drivers/phy/phy-xgene.c 			cmu_rd(ctx, cmu_type, CMU_REG10, &val);
ctx               872 drivers/phy/phy-xgene.c 			cmu_wr(ctx, cmu_type, CMU_REG10, val);
ctx               876 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG16, &val);
ctx               883 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG16, val);
ctx               886 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG30, &val);
ctx               889 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG30, val);
ctx               892 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG31, 0xF);
ctx               894 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG32, &val);
ctx               900 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG32, val);
ctx               904 drivers/phy/phy-xgene.c 		cmu_wr(ctx, cmu_type, CMU_REG34, 0x8d27);
ctx               906 drivers/phy/phy-xgene.c 		cmu_wr(ctx, cmu_type, CMU_REG34, 0x873c);
ctx               909 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG37, 0xF00F);
ctx               912 drivers/phy/phy-xgene.c static void xgene_phy_ssc_enable(struct xgene_phy_ctx *ctx,
ctx               918 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG35, &val);
ctx               920 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG35, val);
ctx               923 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG36, &val);
ctx               927 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG36, val);
ctx               930 drivers/phy/phy-xgene.c 	cmu_clrbits(ctx, cmu_type, CMU_REG5, CMU_REG5_PLL_RESETB_MASK);
ctx               931 drivers/phy/phy-xgene.c 	cmu_setbits(ctx, cmu_type, CMU_REG5, CMU_REG5_PLL_RESETB_MASK);
ctx               934 drivers/phy/phy-xgene.c 	cmu_toggle1to0(ctx, cmu_type, CMU_REG32,
ctx               938 drivers/phy/phy-xgene.c static void xgene_phy_sata_cfg_lanes(struct xgene_phy_ctx *ctx)
ctx               946 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG147, 0x6);
ctx               949 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG0, &val);
ctx               953 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG0, val);
ctx               956 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG1, &val);
ctx               959 drivers/phy/phy-xgene.c 			ctx->sata_param.txboostgain[lane * 3 +
ctx               960 drivers/phy/phy-xgene.c 			ctx->sata_param.speed[lane]]);
ctx               961 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG1, val);
ctx               965 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG2, &val);
ctx               969 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG2, val);
ctx               972 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG4, &val);
ctx               974 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG4, val);
ctx               977 drivers/phy/phy-xgene.c 			serdes_rd(ctx, lane, RXTX_REG1, &val);
ctx               980 drivers/phy/phy-xgene.c 			serdes_wr(ctx, lane, RXTX_REG1, val);
ctx               984 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG5, &val);
ctx               986 drivers/phy/phy-xgene.c 			ctx->sata_param.txprecursor_cn1[lane * 3 +
ctx               987 drivers/phy/phy-xgene.c 			ctx->sata_param.speed[lane]]);
ctx               989 drivers/phy/phy-xgene.c 			ctx->sata_param.txpostcursor_cp1[lane * 3 +
ctx               990 drivers/phy/phy-xgene.c 			ctx->sata_param.speed[lane]]);
ctx               992 drivers/phy/phy-xgene.c 			ctx->sata_param.txprecursor_cn2[lane * 3 +
ctx               993 drivers/phy/phy-xgene.c 			ctx->sata_param.speed[lane]]);
ctx               994 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG5, val);
ctx               997 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG6, &val);
ctx               999 drivers/phy/phy-xgene.c 			ctx->sata_param.txamplitude[lane * 3 +
ctx              1000 drivers/phy/phy-xgene.c 			ctx->sata_param.speed[lane]]);
ctx              1005 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG6, val);
ctx              1008 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG7, &val);
ctx              1011 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG7, val);
ctx              1014 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG8, &val);
ctx              1020 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG8, val);
ctx              1023 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG11, &val);
ctx              1025 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG11, val);
ctx              1028 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG12, &val);
ctx              1032 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG12, val);
ctx              1035 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG26, &val);
ctx              1038 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG26, val);
ctx              1040 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG28, 0x0);
ctx              1043 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG31, 0x0);
ctx              1046 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG61, &val);
ctx              1050 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG61, val);
ctx              1052 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG62, &val);
ctx              1054 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG62, val);
ctx              1059 drivers/phy/phy-xgene.c 			serdes_rd(ctx, lane, reg, &val);
ctx              1063 drivers/phy/phy-xgene.c 			serdes_wr(ctx, lane, reg, val);
ctx              1069 drivers/phy/phy-xgene.c 			serdes_rd(ctx, lane, reg, &val);
ctx              1073 drivers/phy/phy-xgene.c 			serdes_wr(ctx, lane, reg, val);
ctx              1079 drivers/phy/phy-xgene.c 			serdes_rd(ctx, lane, reg, &val);
ctx              1083 drivers/phy/phy-xgene.c 			serdes_wr(ctx, lane, reg, val);
ctx              1086 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG102, &val);
ctx              1088 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG102, val);
ctx              1090 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG114, 0xffe0);
ctx              1092 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG125, &val);
ctx              1094 drivers/phy/phy-xgene.c 			ctx->sata_param.txeyedirection[lane * 3 +
ctx              1095 drivers/phy/phy-xgene.c 			ctx->sata_param.speed[lane]]);
ctx              1097 drivers/phy/phy-xgene.c 			ctx->sata_param.txeyetuning[lane * 3 +
ctx              1098 drivers/phy/phy-xgene.c 			ctx->sata_param.speed[lane]]);
ctx              1100 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG125, val);
ctx              1102 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG127, &val);
ctx              1104 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG127, val);
ctx              1106 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG128, &val);
ctx              1108 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG128, val);
ctx              1110 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG145, &val);
ctx              1120 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, RXTX_REG145, val);
ctx              1128 drivers/phy/phy-xgene.c 			serdes_wr(ctx, lane, reg, 0xFFFF);
ctx              1133 drivers/phy/phy-xgene.c static int xgene_phy_cal_rdy_chk(struct xgene_phy_ctx *ctx,
ctx              1137 drivers/phy/phy-xgene.c 	void __iomem *csr_serdes = ctx->sds_base;
ctx              1146 drivers/phy/phy-xgene.c 		cmu_setbits(ctx, cmu_type, CMU_REG5, CMU_REG5_PLL_RESETB_MASK);
ctx              1153 drivers/phy/phy-xgene.c 		cmu_rd(ctx, cmu_type, CMU_REG1, &val);
ctx              1155 drivers/phy/phy-xgene.c 		cmu_wr(ctx, cmu_type, CMU_REG1, val);
ctx              1162 drivers/phy/phy-xgene.c 		cmu_toggle1to0(ctx, cmu_type, CMU_REG32,
ctx              1179 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG17, &val);
ctx              1182 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG17, val);
ctx              1183 drivers/phy/phy-xgene.c 	cmu_toggle1to0(ctx, cmu_type, CMU_REG17,
ctx              1190 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG17, &val);
ctx              1193 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG17, val);
ctx              1194 drivers/phy/phy-xgene.c 	cmu_toggle1to0(ctx, cmu_type, CMU_REG16,
ctx              1197 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG17, &val);
ctx              1200 drivers/phy/phy-xgene.c 	cmu_wr(ctx, cmu_type, CMU_REG17, val);
ctx              1201 drivers/phy/phy-xgene.c 	cmu_toggle1to0(ctx, cmu_type, CMU_REG16,
ctx              1208 drivers/phy/phy-xgene.c 		cmu_rd(ctx, cmu_type, CMU_REG7, &val);
ctx              1218 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG7, &val);
ctx              1219 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "PLL calibration %s\n",
ctx              1222 drivers/phy/phy-xgene.c 		dev_err(ctx->dev,
ctx              1226 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "PLL calibration successful\n");
ctx              1228 drivers/phy/phy-xgene.c 	cmu_rd(ctx, cmu_type, CMU_REG15, &val);
ctx              1229 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "PHY Tx is %sready\n", val & 0x300 ? "" : "not ");
ctx              1233 drivers/phy/phy-xgene.c static void xgene_phy_pdwn_force_vco(struct xgene_phy_ctx *ctx,
ctx              1239 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "Reset VCO and re-start again\n");
ctx              1241 drivers/phy/phy-xgene.c 		cmu_rd(ctx, cmu_type, CMU_REG16, &val);
ctx              1243 drivers/phy/phy-xgene.c 		cmu_wr(ctx, cmu_type, CMU_REG16, val);
ctx              1246 drivers/phy/phy-xgene.c 	cmu_toggle1to0(ctx, cmu_type, CMU_REG0, CMU_REG0_PDOWN_MASK);
ctx              1247 drivers/phy/phy-xgene.c 	cmu_toggle1to0(ctx, cmu_type, CMU_REG32,
ctx              1251 drivers/phy/phy-xgene.c static int xgene_phy_hw_init_sata(struct xgene_phy_ctx *ctx,
ctx              1254 drivers/phy/phy-xgene.c 	void __iomem *sds_base = ctx->sds_base;
ctx              1259 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "Reset PHY\n");
ctx              1273 drivers/phy/phy-xgene.c 		ctx->sata_param.txspeed[ctx->sata_param.speed[0]]);
ctx              1276 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "Set the customer pin mode to SATA\n");
ctx              1282 drivers/phy/phy-xgene.c 	xgene_phy_cfg_cmu_clk_type(ctx, PHY_CMU, clk_type);
ctx              1285 drivers/phy/phy-xgene.c 	xgene_phy_sata_cfg_cmu_core(ctx, PHY_CMU, clk_type);
ctx              1289 drivers/phy/phy-xgene.c 		xgene_phy_ssc_enable(ctx, PHY_CMU);
ctx              1292 drivers/phy/phy-xgene.c 	xgene_phy_sata_cfg_lanes(ctx);
ctx              1303 drivers/phy/phy-xgene.c 		if (!xgene_phy_cal_rdy_chk(ctx, PHY_CMU, clk_type))
ctx              1306 drivers/phy/phy-xgene.c 		xgene_phy_pdwn_force_vco(ctx, PHY_CMU, clk_type);
ctx              1310 drivers/phy/phy-xgene.c 		dev_err(ctx->dev, "PLL calibration failed\n");
ctx              1315 drivers/phy/phy-xgene.c static int xgene_phy_hw_initialize(struct xgene_phy_ctx *ctx,
ctx              1321 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "PHY init clk type %d\n", clk_type);
ctx              1323 drivers/phy/phy-xgene.c 	if (ctx->mode == MODE_SATA) {
ctx              1324 drivers/phy/phy-xgene.c 		rc = xgene_phy_hw_init_sata(ctx, clk_type, ssc_enable);
ctx              1328 drivers/phy/phy-xgene.c 		dev_err(ctx->dev, "Un-supported customer pin mode %d\n",
ctx              1329 drivers/phy/phy-xgene.c 			ctx->mode);
ctx              1342 drivers/phy/phy-xgene.c static void xgene_phy_force_lat_summer_cal(struct xgene_phy_ctx *ctx, int lane)
ctx              1370 drivers/phy/phy-xgene.c 	serdes_setbits(ctx, lane, RXTX_REG127,
ctx              1377 drivers/phy/phy-xgene.c 	serdes_clrbits(ctx, lane, RXTX_REG127,
ctx              1386 drivers/phy/phy-xgene.c 	serdes_setbits(ctx, lane, RXTX_REG127,
ctx              1393 drivers/phy/phy-xgene.c 	serdes_clrbits(ctx, lane, RXTX_REG127,
ctx              1397 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG28, 0x7);
ctx              1398 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG31, 0x7e00);
ctx              1399 drivers/phy/phy-xgene.c 	serdes_clrbits(ctx, lane, RXTX_REG4,
ctx              1401 drivers/phy/phy-xgene.c 	serdes_clrbits(ctx, lane, RXTX_REG7,
ctx              1404 drivers/phy/phy-xgene.c 		serdes_wr(ctx, lane, serdes_reg[i].reg,
ctx              1408 drivers/phy/phy-xgene.c static void xgene_phy_reset_rxd(struct xgene_phy_ctx *ctx, int lane)
ctx              1411 drivers/phy/phy-xgene.c 	serdes_clrbits(ctx, lane, RXTX_REG7, RXTX_REG7_RESETB_RXD_MASK);
ctx              1414 drivers/phy/phy-xgene.c 	serdes_setbits(ctx, lane, RXTX_REG7, RXTX_REG7_RESETB_RXD_MASK);
ctx              1422 drivers/phy/phy-xgene.c static void xgene_phy_gen_avg_val(struct xgene_phy_ctx *ctx, int lane)
ctx              1436 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "Generating avg calibration value for lane %d\n",
ctx              1440 drivers/phy/phy-xgene.c 	serdes_setbits(ctx, lane, RXTX_REG12,
ctx              1443 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG28, 0x0000);
ctx              1445 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG31, 0x0000);
ctx              1456 drivers/phy/phy-xgene.c 		xgene_phy_force_lat_summer_cal(ctx, lane);
ctx              1458 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG21, &val);
ctx              1463 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG22, &val);
ctx              1468 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG23, &val);
ctx              1472 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG24, &val);
ctx              1476 drivers/phy/phy-xgene.c 		serdes_rd(ctx, lane, RXTX_REG121, &val);
ctx              1492 drivers/phy/phy-xgene.c 			dev_dbg(ctx->dev, "Iteration %d:\n", avg_loop);
ctx              1493 drivers/phy/phy-xgene.c 			dev_dbg(ctx->dev, "DO 0x%x XO 0x%x EO 0x%x SO 0x%x\n",
ctx              1496 drivers/phy/phy-xgene.c 			dev_dbg(ctx->dev, "DE 0x%x XE 0x%x EE 0x%x SE 0x%x\n",
ctx              1499 drivers/phy/phy-xgene.c 			dev_dbg(ctx->dev, "SUM 0x%x\n", sum_cal_itr);
ctx              1502 drivers/phy/phy-xgene.c 			dev_err(ctx->dev,
ctx              1506 drivers/phy/phy-xgene.c 		xgene_phy_reset_rxd(ctx, lane);
ctx              1510 drivers/phy/phy-xgene.c 	serdes_rd(ctx, lane, RXTX_REG127, &val);
ctx              1515 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG127, val);
ctx              1517 drivers/phy/phy-xgene.c 	serdes_rd(ctx, lane, RXTX_REG128, &val);
ctx              1522 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG128, val);
ctx              1524 drivers/phy/phy-xgene.c 	serdes_rd(ctx, lane, RXTX_REG129, &val);
ctx              1529 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG129, val);
ctx              1531 drivers/phy/phy-xgene.c 	serdes_rd(ctx, lane, RXTX_REG130, &val);
ctx              1536 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG130, val);
ctx              1539 drivers/phy/phy-xgene.c 	serdes_rd(ctx, lane, RXTX_REG14, &val);
ctx              1542 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG14, val);
ctx              1544 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "Average Value:\n");
ctx              1545 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "DO 0x%x XO 0x%x EO 0x%x SO 0x%x\n",
ctx              1550 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "DE 0x%x XE 0x%x EE 0x%x SE 0x%x\n",
ctx              1555 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "SUM 0x%x\n",
ctx              1558 drivers/phy/phy-xgene.c 	serdes_rd(ctx, lane, RXTX_REG14, &val);
ctx              1560 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG14, val);
ctx              1561 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "Enable Manual Summer calibration\n");
ctx              1563 drivers/phy/phy-xgene.c 	serdes_rd(ctx, lane, RXTX_REG127, &val);
ctx              1565 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "Enable Manual Latch calibration\n");
ctx              1566 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG127, val);
ctx              1569 drivers/phy/phy-xgene.c 	serdes_rd(ctx, lane, RXTX_REG12, &val);
ctx              1571 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG12, val);
ctx              1573 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG28, 0x0007);
ctx              1575 drivers/phy/phy-xgene.c 	serdes_wr(ctx, lane, RXTX_REG31, 0x7e00);
ctx              1580 drivers/phy/phy-xgene.c 	struct xgene_phy_ctx *ctx = phy_get_drvdata(phy);
ctx              1584 drivers/phy/phy-xgene.c 	rc = xgene_phy_hw_initialize(ctx, CLK_EXT_DIFF, SSC_DISABLE);
ctx              1586 drivers/phy/phy-xgene.c 		dev_err(ctx->dev, "PHY initialize failed %d\n", rc);
ctx              1591 drivers/phy/phy-xgene.c 	if (!IS_ERR(ctx->clk)) {
ctx              1593 drivers/phy/phy-xgene.c 		clk_prepare_enable(ctx->clk);
ctx              1594 drivers/phy/phy-xgene.c 		clk_disable_unprepare(ctx->clk);
ctx              1595 drivers/phy/phy-xgene.c 		clk_prepare_enable(ctx->clk);
ctx              1600 drivers/phy/phy-xgene.c 		xgene_phy_gen_avg_val(ctx, i);
ctx              1602 drivers/phy/phy-xgene.c 	dev_dbg(ctx->dev, "PHY initialized\n");
ctx              1614 drivers/phy/phy-xgene.c 	struct xgene_phy_ctx *ctx = dev_get_drvdata(dev);
ctx              1621 drivers/phy/phy-xgene.c 	ctx->mode = args->args[0];
ctx              1622 drivers/phy/phy-xgene.c 	return ctx->phy;
ctx              1646 drivers/phy/phy-xgene.c 	struct xgene_phy_ctx *ctx;
ctx              1658 drivers/phy/phy-xgene.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx              1659 drivers/phy/phy-xgene.c 	if (!ctx)
ctx              1662 drivers/phy/phy-xgene.c 	ctx->dev = &pdev->dev;
ctx              1665 drivers/phy/phy-xgene.c 	ctx->sds_base = devm_ioremap_resource(&pdev->dev, res);
ctx              1666 drivers/phy/phy-xgene.c 	if (IS_ERR(ctx->sds_base))
ctx              1667 drivers/phy/phy-xgene.c 		return PTR_ERR(ctx->sds_base);
ctx              1670 drivers/phy/phy-xgene.c 	ctx->clk = clk_get(&pdev->dev, NULL);
ctx              1674 drivers/phy/phy-xgene.c 		ctx->sata_param.txeyetuning, 6, default_txeye_tuning, 1);
ctx              1676 drivers/phy/phy-xgene.c 		ctx->sata_param.txeyedirection, 6, default_txeye_direction, 1);
ctx              1678 drivers/phy/phy-xgene.c 		ctx->sata_param.txboostgain, 6, default_txboost_gain, 1);
ctx              1680 drivers/phy/phy-xgene.c 		ctx->sata_param.txamplitude, 6, default_txamp, 13300);
ctx              1682 drivers/phy/phy-xgene.c 		ctx->sata_param.txprecursor_cn1, 6, default_txcn1, 18200);
ctx              1684 drivers/phy/phy-xgene.c 		ctx->sata_param.txprecursor_cn2, 6, default_txcn2, 18200);
ctx              1686 drivers/phy/phy-xgene.c 		ctx->sata_param.txpostcursor_cp1, 6, default_txcp1, 18200);
ctx              1688 drivers/phy/phy-xgene.c 		ctx->sata_param.txspeed, 3, default_spd, 1);
ctx              1690 drivers/phy/phy-xgene.c 		ctx->sata_param.speed[i] = 2; /* Default to Gen3 */
ctx              1692 drivers/phy/phy-xgene.c 	platform_set_drvdata(pdev, ctx);
ctx              1694 drivers/phy/phy-xgene.c 	ctx->phy = devm_phy_create(ctx->dev, NULL, &xgene_phy_ops);
ctx              1695 drivers/phy/phy-xgene.c 	if (IS_ERR(ctx->phy)) {
ctx              1697 drivers/phy/phy-xgene.c 		return PTR_ERR(ctx->phy);
ctx              1699 drivers/phy/phy-xgene.c 	phy_set_drvdata(ctx->phy, ctx);
ctx              1701 drivers/phy/phy-xgene.c 	phy_provider = devm_of_phy_provider_register(ctx->dev, xgene_phy_xlate);
ctx              2544 drivers/pinctrl/aspeed/pinctrl-aspeed-g4.c static int aspeed_g4_sig_expr_set(struct aspeed_pinmux_data *ctx,
ctx              2556 drivers/pinctrl/aspeed/pinctrl-aspeed-g4.c 		if (!ctx->maps[desc->ip])
ctx              2580 drivers/pinctrl/aspeed/pinctrl-aspeed-g4.c 		ret = regmap_update_bits(ctx->maps[desc->ip], desc->reg,
ctx              2587 drivers/pinctrl/aspeed/pinctrl-aspeed-g4.c 	ret = aspeed_sig_expr_eval(ctx, expr, enable);
ctx              2617 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c static struct regmap *aspeed_g5_acquire_regmap(struct aspeed_pinmux_data *ctx,
ctx              2621 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 		WARN(!ctx->maps[ip], "Missing SCU syscon!");
ctx              2622 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 		return ctx->maps[ip];
ctx              2628 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 	if (likely(ctx->maps[ip]))
ctx              2629 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 		return ctx->maps[ip];
ctx              2635 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 		node = of_parse_phandle(ctx->dev->of_node,
ctx              2645 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 		ctx->maps[ASPEED_IP_GFX] = map;
ctx              2646 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 		dev_dbg(ctx->dev, "Acquired GFX regmap");
ctx              2654 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 		node = of_parse_phandle(ctx->dev->of_node,
ctx              2664 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 		ctx->maps[ASPEED_IP_LPC] = map;
ctx              2665 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 		dev_dbg(ctx->dev, "Acquired LPC regmap");
ctx              2672 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c static int aspeed_g5_sig_expr_eval(struct aspeed_pinmux_data *ctx,
ctx              2683 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 		map = aspeed_g5_acquire_regmap(ctx, desc->ip);
ctx              2685 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 			dev_err(ctx->dev,
ctx              2691 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 		ret = aspeed_sig_desc_eval(desc, enabled, ctx->maps[desc->ip]);
ctx              2712 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c static int aspeed_g5_sig_expr_set(struct aspeed_pinmux_data *ctx,
ctx              2725 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 		map = aspeed_g5_acquire_regmap(ctx, desc->ip);
ctx              2727 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 			dev_err(ctx->dev,
ctx              2759 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 				ret = regmap_write(ctx->maps[desc->ip],
ctx              2766 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 		ret = regmap_update_bits(ctx->maps[desc->ip], desc->reg,
ctx              2773 drivers/pinctrl/aspeed/pinctrl-aspeed-g5.c 	ret = aspeed_sig_expr_eval(ctx, expr, enable);
ctx              2246 drivers/pinctrl/aspeed/pinctrl-aspeed-g6.c static int aspeed_g6_sig_expr_set(struct aspeed_pinmux_data *ctx,
ctx              2259 drivers/pinctrl/aspeed/pinctrl-aspeed-g6.c 		if (!ctx->maps[desc->ip])
ctx              2280 drivers/pinctrl/aspeed/pinctrl-aspeed-g6.c 				ret = regmap_update_bits(ctx->maps[desc->ip],
ctx              2285 drivers/pinctrl/aspeed/pinctrl-aspeed-g6.c 		ret = regmap_update_bits(ctx->maps[desc->ip], desc->reg,
ctx              2291 drivers/pinctrl/aspeed/pinctrl-aspeed-g6.c 	ret = aspeed_sig_expr_eval(ctx, expr, enable);
ctx                74 drivers/pinctrl/aspeed/pinctrl-aspeed.c static int aspeed_sig_expr_enable(struct aspeed_pinmux_data *ctx,
ctx                79 drivers/pinctrl/aspeed/pinctrl-aspeed.c 	ret = aspeed_sig_expr_eval(ctx, expr, true);
ctx                84 drivers/pinctrl/aspeed/pinctrl-aspeed.c 		return aspeed_sig_expr_set(ctx, expr, true);
ctx                89 drivers/pinctrl/aspeed/pinctrl-aspeed.c static int aspeed_sig_expr_disable(struct aspeed_pinmux_data *ctx,
ctx                94 drivers/pinctrl/aspeed/pinctrl-aspeed.c 	ret = aspeed_sig_expr_eval(ctx, expr, true);
ctx                99 drivers/pinctrl/aspeed/pinctrl-aspeed.c 		return aspeed_sig_expr_set(ctx, expr, false);
ctx               112 drivers/pinctrl/aspeed/pinctrl-aspeed.c static int aspeed_disable_sig(struct aspeed_pinmux_data *ctx,
ctx               121 drivers/pinctrl/aspeed/pinctrl-aspeed.c 		ret = aspeed_sig_expr_disable(ctx, *exprs);
ctx                81 drivers/pinctrl/aspeed/pinmux-aspeed.c int aspeed_sig_expr_eval(struct aspeed_pinmux_data *ctx,
ctx                87 drivers/pinctrl/aspeed/pinmux-aspeed.c 	if (ctx->ops->eval)
ctx                88 drivers/pinctrl/aspeed/pinmux-aspeed.c 		return ctx->ops->eval(ctx, expr, enabled);
ctx                93 drivers/pinctrl/aspeed/pinmux-aspeed.c 		ret = aspeed_sig_desc_eval(desc, enabled, ctx->maps[desc->ip]);
ctx               779 drivers/pinctrl/aspeed/pinmux-aspeed.h 	int (*eval)(struct aspeed_pinmux_data *ctx,
ctx               781 drivers/pinctrl/aspeed/pinmux-aspeed.h 	int (*set)(struct aspeed_pinmux_data *ctx,
ctx               801 drivers/pinctrl/aspeed/pinmux-aspeed.h int aspeed_sig_expr_eval(struct aspeed_pinmux_data *ctx,
ctx               804 drivers/pinctrl/aspeed/pinmux-aspeed.h static inline int aspeed_sig_expr_set(struct aspeed_pinmux_data *ctx,
ctx               808 drivers/pinctrl/aspeed/pinmux-aspeed.h 	return ctx->ops->set(ctx, expr, enabled);
ctx              1769 drivers/pinctrl/intel/pinctrl-cherryview.c 		struct chv_pin_context *ctx;
ctx              1776 drivers/pinctrl/intel/pinctrl-cherryview.c 		ctx = &pctrl->saved_pin_context[i];
ctx              1779 drivers/pinctrl/intel/pinctrl-cherryview.c 		ctx->padctrl0 = readl(reg) & ~CHV_PADCTRL0_GPIORXSTATE;
ctx              1782 drivers/pinctrl/intel/pinctrl-cherryview.c 		ctx->padctrl1 = readl(reg);
ctx              1807 drivers/pinctrl/intel/pinctrl-cherryview.c 		const struct chv_pin_context *ctx;
ctx              1815 drivers/pinctrl/intel/pinctrl-cherryview.c 		ctx = &pctrl->saved_pin_context[i];
ctx              1820 drivers/pinctrl/intel/pinctrl-cherryview.c 		if (ctx->padctrl0 != val) {
ctx              1821 drivers/pinctrl/intel/pinctrl-cherryview.c 			chv_writel(ctx->padctrl0, reg);
ctx              1828 drivers/pinctrl/intel/pinctrl-cherryview.c 		if (ctx->padctrl1 != val) {
ctx              1829 drivers/pinctrl/intel/pinctrl-cherryview.c 			chv_writel(ctx->padctrl1, reg);
ctx               919 drivers/platform/mellanox/mlxbf-tmfifo.c 					const bool *ctx,
ctx              1763 drivers/platform/x86/mlx-platform.c 	struct mlxplat_mlxcpld_regmap_context *ctx = context;
ctx              1765 drivers/platform/x86/mlx-platform.c 	*val = ioread8(ctx->base + reg);
ctx              1772 drivers/platform/x86/mlx-platform.c 	struct mlxplat_mlxcpld_regmap_context *ctx = context;
ctx              1774 drivers/platform/x86/mlx-platform.c 	iowrite8(val, ctx->base + reg);
ctx                39 drivers/power/reset/ocelot-reset.c 	struct ocelot_reset_context *ctx = container_of(this, struct
ctx                44 drivers/power/reset/ocelot-reset.c 	regmap_update_bits(ctx->cpu_ctrl, ICPU_CFG_CPU_SYSTEM_CTRL_RESET,
ctx                48 drivers/power/reset/ocelot-reset.c 	regmap_update_bits(ctx->cpu_ctrl, ICPU_CFG_CPU_SYSTEM_CTRL_GENERAL_CTRL,
ctx                52 drivers/power/reset/ocelot-reset.c 	writel(SOFT_CHIP_RST, ctx->base);
ctx                60 drivers/power/reset/ocelot-reset.c 	struct ocelot_reset_context *ctx;
ctx                66 drivers/power/reset/ocelot-reset.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx                67 drivers/power/reset/ocelot-reset.c 	if (!ctx)
ctx                71 drivers/power/reset/ocelot-reset.c 	ctx->base = devm_ioremap_resource(dev, res);
ctx                72 drivers/power/reset/ocelot-reset.c 	if (IS_ERR(ctx->base))
ctx                73 drivers/power/reset/ocelot-reset.c 		return PTR_ERR(ctx->base);
ctx                75 drivers/power/reset/ocelot-reset.c 	ctx->cpu_ctrl = syscon_regmap_lookup_by_compatible("mscc,ocelot-cpu-syscon");
ctx                76 drivers/power/reset/ocelot-reset.c 	if (IS_ERR(ctx->cpu_ctrl))
ctx                77 drivers/power/reset/ocelot-reset.c 		return PTR_ERR(ctx->cpu_ctrl);
ctx                79 drivers/power/reset/ocelot-reset.c 	ctx->restart_handler.notifier_call = ocelot_restart_handle;
ctx                80 drivers/power/reset/ocelot-reset.c 	ctx->restart_handler.priority = 192;
ctx                81 drivers/power/reset/ocelot-reset.c 	err = register_restart_handler(&ctx->restart_handler);
ctx                29 drivers/power/reset/syscon-reboot.c 	struct syscon_reboot_context *ctx =
ctx                34 drivers/power/reset/syscon-reboot.c 	regmap_update_bits(ctx->map, ctx->offset, ctx->mask, ctx->value);
ctx                44 drivers/power/reset/syscon-reboot.c 	struct syscon_reboot_context *ctx;
ctx                49 drivers/power/reset/syscon-reboot.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx                50 drivers/power/reset/syscon-reboot.c 	if (!ctx)
ctx                53 drivers/power/reset/syscon-reboot.c 	ctx->map = syscon_regmap_lookup_by_phandle(dev->of_node, "regmap");
ctx                54 drivers/power/reset/syscon-reboot.c 	if (IS_ERR(ctx->map))
ctx                55 drivers/power/reset/syscon-reboot.c 		return PTR_ERR(ctx->map);
ctx                57 drivers/power/reset/syscon-reboot.c 	if (of_property_read_u32(pdev->dev.of_node, "offset", &ctx->offset))
ctx                60 drivers/power/reset/syscon-reboot.c 	value_err = of_property_read_u32(pdev->dev.of_node, "value", &ctx->value);
ctx                61 drivers/power/reset/syscon-reboot.c 	mask_err = of_property_read_u32(pdev->dev.of_node, "mask", &ctx->mask);
ctx                69 drivers/power/reset/syscon-reboot.c 		ctx->value = ctx->mask;
ctx                70 drivers/power/reset/syscon-reboot.c 		ctx->mask = 0xFFFFFFFF;
ctx                73 drivers/power/reset/syscon-reboot.c 		ctx->mask = 0xFFFFFFFF;
ctx                76 drivers/power/reset/syscon-reboot.c 	ctx->restart_handler.notifier_call = syscon_restart_handle;
ctx                77 drivers/power/reset/syscon-reboot.c 	ctx->restart_handler.priority = 192;
ctx                78 drivers/power/reset/syscon-reboot.c 	err = register_restart_handler(&ctx->restart_handler);
ctx                33 drivers/power/reset/xgene-reboot.c 	struct xgene_reboot_context *ctx =
ctx                38 drivers/power/reset/xgene-reboot.c 	writel(ctx->mask, ctx->csr);
ctx                42 drivers/power/reset/xgene-reboot.c 	dev_emerg(ctx->dev, "Unable to restart system\n");
ctx                49 drivers/power/reset/xgene-reboot.c 	struct xgene_reboot_context *ctx;
ctx                53 drivers/power/reset/xgene-reboot.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx                54 drivers/power/reset/xgene-reboot.c 	if (!ctx)
ctx                57 drivers/power/reset/xgene-reboot.c 	ctx->csr = of_iomap(dev->of_node, 0);
ctx                58 drivers/power/reset/xgene-reboot.c 	if (!ctx->csr) {
ctx                63 drivers/power/reset/xgene-reboot.c 	if (of_property_read_u32(dev->of_node, "mask", &ctx->mask))
ctx                64 drivers/power/reset/xgene-reboot.c 		ctx->mask = 0xFFFFFFFF;
ctx                66 drivers/power/reset/xgene-reboot.c 	ctx->dev = dev;
ctx                67 drivers/power/reset/xgene-reboot.c 	ctx->restart_handler.notifier_call = xgene_restart_handler;
ctx                68 drivers/power/reset/xgene-reboot.c 	ctx->restart_handler.priority = 128;
ctx                69 drivers/power/reset/xgene-reboot.c 	err = register_restart_handler(&ctx->restart_handler);
ctx                71 drivers/power/reset/xgene-reboot.c 		iounmap(ctx->csr);
ctx                34 drivers/ptp/ptp_kvm.c 			       void *ctx)
ctx                38 drivers/pwm/pwm-tiecap.c 	struct ecap_context ctx;
ctx               263 drivers/pwm/pwm-tiecap.c 	pc->ctx.ecctl2 = readw(pc->mmio_base + ECCTL2);
ctx               264 drivers/pwm/pwm-tiecap.c 	pc->ctx.cap4 = readl(pc->mmio_base + CAP4);
ctx               265 drivers/pwm/pwm-tiecap.c 	pc->ctx.cap3 = readl(pc->mmio_base + CAP3);
ctx               271 drivers/pwm/pwm-tiecap.c 	writel(pc->ctx.cap3, pc->mmio_base + CAP3);
ctx               272 drivers/pwm/pwm-tiecap.c 	writel(pc->ctx.cap4, pc->mmio_base + CAP4);
ctx               273 drivers/pwm/pwm-tiecap.c 	writew(pc->ctx.ecctl2, pc->mmio_base + ECCTL2);
ctx               114 drivers/pwm/pwm-tiehrpwm.c 	struct ehrpwm_context ctx;
ctx               509 drivers/pwm/pwm-tiehrpwm.c 	pc->ctx.tbctl = ehrpwm_read(pc->mmio_base, TBCTL);
ctx               510 drivers/pwm/pwm-tiehrpwm.c 	pc->ctx.tbprd = ehrpwm_read(pc->mmio_base, TBPRD);
ctx               511 drivers/pwm/pwm-tiehrpwm.c 	pc->ctx.cmpa = ehrpwm_read(pc->mmio_base, CMPA);
ctx               512 drivers/pwm/pwm-tiehrpwm.c 	pc->ctx.cmpb = ehrpwm_read(pc->mmio_base, CMPB);
ctx               513 drivers/pwm/pwm-tiehrpwm.c 	pc->ctx.aqctla = ehrpwm_read(pc->mmio_base, AQCTLA);
ctx               514 drivers/pwm/pwm-tiehrpwm.c 	pc->ctx.aqctlb = ehrpwm_read(pc->mmio_base, AQCTLB);
ctx               515 drivers/pwm/pwm-tiehrpwm.c 	pc->ctx.aqsfrc = ehrpwm_read(pc->mmio_base, AQSFRC);
ctx               516 drivers/pwm/pwm-tiehrpwm.c 	pc->ctx.aqcsfrc = ehrpwm_read(pc->mmio_base, AQCSFRC);
ctx               523 drivers/pwm/pwm-tiehrpwm.c 	ehrpwm_write(pc->mmio_base, TBPRD, pc->ctx.tbprd);
ctx               524 drivers/pwm/pwm-tiehrpwm.c 	ehrpwm_write(pc->mmio_base, CMPA, pc->ctx.cmpa);
ctx               525 drivers/pwm/pwm-tiehrpwm.c 	ehrpwm_write(pc->mmio_base, CMPB, pc->ctx.cmpb);
ctx               526 drivers/pwm/pwm-tiehrpwm.c 	ehrpwm_write(pc->mmio_base, AQCTLA, pc->ctx.aqctla);
ctx               527 drivers/pwm/pwm-tiehrpwm.c 	ehrpwm_write(pc->mmio_base, AQCTLB, pc->ctx.aqctlb);
ctx               528 drivers/pwm/pwm-tiehrpwm.c 	ehrpwm_write(pc->mmio_base, AQSFRC, pc->ctx.aqsfrc);
ctx               529 drivers/pwm/pwm-tiehrpwm.c 	ehrpwm_write(pc->mmio_base, AQCSFRC, pc->ctx.aqcsfrc);
ctx               530 drivers/pwm/pwm-tiehrpwm.c 	ehrpwm_write(pc->mmio_base, TBCTL, pc->ctx.tbctl);
ctx                68 drivers/remoteproc/remoteproc_virtio.c 				    const char *name, bool ctx)
ctx               108 drivers/remoteproc/remoteproc_virtio.c 	vq = vring_new_virtqueue(id, len, rvring->align, vdev, false, ctx,
ctx               147 drivers/remoteproc/remoteproc_virtio.c 				 const bool * ctx,
ctx               159 drivers/remoteproc/remoteproc_virtio.c 				    ctx ? ctx[i] : false);
ctx               401 drivers/s390/cio/vfio_ccw_ops.c 	struct eventfd_ctx **ctx;
ctx               407 drivers/s390/cio/vfio_ccw_ops.c 	ctx = &private->io_trigger;
ctx               412 drivers/s390/cio/vfio_ccw_ops.c 		if (*ctx)
ctx               413 drivers/s390/cio/vfio_ccw_ops.c 			eventfd_signal(*ctx, 1);
ctx               423 drivers/s390/cio/vfio_ccw_ops.c 		if (trigger && *ctx)
ctx               424 drivers/s390/cio/vfio_ccw_ops.c 			eventfd_signal(*ctx, 1);
ctx               435 drivers/s390/cio/vfio_ccw_ops.c 			if (*ctx)
ctx               436 drivers/s390/cio/vfio_ccw_ops.c 				eventfd_ctx_put(*ctx);
ctx               437 drivers/s390/cio/vfio_ccw_ops.c 			*ctx = NULL;
ctx               445 drivers/s390/cio/vfio_ccw_ops.c 			if (*ctx)
ctx               446 drivers/s390/cio/vfio_ccw_ops.c 				eventfd_ctx_put(*ctx);
ctx               448 drivers/s390/cio/vfio_ccw_ops.c 			*ctx = efdctx;
ctx               513 drivers/s390/virtio/virtio_ccw.c 					     const char *name, bool ctx,
ctx               545 drivers/s390/virtio/virtio_ccw.c 				    vdev, true, may_reduce, ctx,
ctx               656 drivers/s390/virtio/virtio_ccw.c 			       const bool *ctx,
ctx               675 drivers/s390/virtio/virtio_ccw.c 					     names[i], ctx ? ctx[i] : false,
ctx               223 drivers/sbus/char/oradax.c static int dax_ccb_exec(struct dax_ctx *ctx, const char __user *buf,
ctx               374 drivers/sbus/char/oradax.c 	struct dax_ctx *ctx = (struct dax_ctx *)f->private_data;
ctx               379 drivers/sbus/char/oradax.c 	if (ctx->owner != current) {
ctx               394 drivers/sbus/char/oradax.c 	if (remap_pfn_range(vma, vma->vm_start, ctx->ca_buf_ra >> PAGE_SHIFT,
ctx               403 drivers/sbus/char/oradax.c static void dax_unlock_pages(struct dax_ctx *ctx, int ccb_index, int nelem)
ctx               409 drivers/sbus/char/oradax.c 			struct page *p = ctx->pages[i][j];
ctx               416 drivers/sbus/char/oradax.c 				ctx->pages[i][j] = NULL;
ctx               438 drivers/sbus/char/oradax.c static int dax_lock_pages(struct dax_ctx *ctx, int idx,
ctx               444 drivers/sbus/char/oradax.c 		struct dax_ccb *ccbp = &ctx->ccb_buf[i];
ctx               455 drivers/sbus/char/oradax.c 					  &ctx->pages[i + idx][OUT]) != 0) {
ctx               465 drivers/sbus/char/oradax.c 					  &ctx->pages[i + idx][PRI]) != 0) {
ctx               475 drivers/sbus/char/oradax.c 					  &ctx->pages[i + idx][SEC]) != 0) {
ctx               485 drivers/sbus/char/oradax.c 					  &ctx->pages[i + idx][TBL]) != 0) {
ctx               499 drivers/sbus/char/oradax.c 	dax_unlock_pages(ctx, idx, nelem);
ctx               503 drivers/sbus/char/oradax.c static void dax_ccb_wait(struct dax_ctx *ctx, int idx)
ctx               511 drivers/sbus/char/oradax.c 		if (ctx->ca_buf[idx].status == CCA_STAT_NOT_COMPLETED)
ctx               517 drivers/sbus/char/oradax.c 		(void *)ctx, idx, DAX_CCB_USEC, DAX_CCB_RETRIES);
ctx               519 drivers/sbus/char/oradax.c 	ret = dax_ccb_kill(ctx->ca_buf_ra + idx * sizeof(struct dax_cca),
ctx               526 drivers/sbus/char/oradax.c 	struct dax_ctx *ctx = (struct dax_ctx *)f->private_data;
ctx               532 drivers/sbus/char/oradax.c 		if (ctx->ca_buf[i].status == CCA_STAT_NOT_COMPLETED) {
ctx               534 drivers/sbus/char/oradax.c 			dax_ccb_wait(ctx, i);
ctx               536 drivers/sbus/char/oradax.c 		dax_unlock_pages(ctx, i, 1);
ctx               539 drivers/sbus/char/oradax.c 	kfree(ctx->ccb_buf);
ctx               540 drivers/sbus/char/oradax.c 	kfree(ctx->ca_buf);
ctx               541 drivers/sbus/char/oradax.c 	dax_stat_dbg("CCBs: %d good, %d bad", ctx->ccb_count, ctx->fail_count);
ctx               542 drivers/sbus/char/oradax.c 	kfree(ctx);
ctx               550 drivers/sbus/char/oradax.c 	struct dax_ctx *ctx = f->private_data;
ctx               552 drivers/sbus/char/oradax.c 	if (ctx->client != current)
ctx               555 drivers/sbus/char/oradax.c 	ctx->client = NULL;
ctx               559 drivers/sbus/char/oradax.c 	if (copy_to_user(buf, &ctx->result, sizeof(union ccb_result)))
ctx               567 drivers/sbus/char/oradax.c 	struct dax_ctx *ctx = f->private_data;
ctx               572 drivers/sbus/char/oradax.c 	if (ctx->client != NULL)
ctx               579 drivers/sbus/char/oradax.c 		return dax_ccb_exec(ctx, buf, count, ppos); /* CCB EXEC */
ctx               585 drivers/sbus/char/oradax.c 	if (ctx->owner != current)
ctx               591 drivers/sbus/char/oradax.c 	ca = ctx->ca_buf_ra + hdr.ca_offset;
ctx               601 drivers/sbus/char/oradax.c 		ret = dax_ccb_kill(ca, &ctx->result.kill.action);
ctx               609 drivers/sbus/char/oradax.c 		ctx->ca_buf[idx].status = CCA_STAT_KILLED;
ctx               610 drivers/sbus/char/oradax.c 		ctx->ca_buf[idx].err = CCA_ERR_KILLED;
ctx               611 drivers/sbus/char/oradax.c 		ctx->client = current;
ctx               621 drivers/sbus/char/oradax.c 		ret = dax_ccb_info(ca, &ctx->result.info);
ctx               628 drivers/sbus/char/oradax.c 		ctx->client = current;
ctx               633 drivers/sbus/char/oradax.c 			if (ctx->ca_buf[i].status !=
ctx               635 drivers/sbus/char/oradax.c 				dax_unlock_pages(ctx, i, 1);
ctx               646 drivers/sbus/char/oradax.c 	struct dax_ctx *ctx = NULL;
ctx               649 drivers/sbus/char/oradax.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               650 drivers/sbus/char/oradax.c 	if (ctx == NULL)
ctx               653 drivers/sbus/char/oradax.c 	ctx->ccb_buf = kcalloc(DAX_MAX_CCBS, sizeof(struct dax_ccb),
ctx               655 drivers/sbus/char/oradax.c 	if (ctx->ccb_buf == NULL)
ctx               658 drivers/sbus/char/oradax.c 	ctx->ccb_buf_ra = virt_to_phys(ctx->ccb_buf);
ctx               660 drivers/sbus/char/oradax.c 		(void *)ctx->ccb_buf, ctx->ccb_buf_ra);
ctx               663 drivers/sbus/char/oradax.c 	ctx->ca_buf = kzalloc(DAX_MMAP_LEN, GFP_KERNEL);
ctx               664 drivers/sbus/char/oradax.c 	if (ctx->ca_buf == NULL)
ctx               667 drivers/sbus/char/oradax.c 		ctx->ca_buf[i].status = CCA_STAT_COMPLETED;
ctx               669 drivers/sbus/char/oradax.c 	ctx->ca_buf_ra = virt_to_phys(ctx->ca_buf);
ctx               671 drivers/sbus/char/oradax.c 		(void *)ctx, (void *)ctx->ca_buf, ctx->ca_buf_ra);
ctx               673 drivers/sbus/char/oradax.c 	ctx->owner = current;
ctx               674 drivers/sbus/char/oradax.c 	f->private_data = ctx;
ctx               678 drivers/sbus/char/oradax.c 	kfree(ctx->ccb_buf);
ctx               680 drivers/sbus/char/oradax.c 	kfree(ctx);
ctx               779 drivers/sbus/char/oradax.c static int dax_preprocess_usr_ccbs(struct dax_ctx *ctx, int idx, int nelem)
ctx               790 drivers/sbus/char/oradax.c 		struct dax_ccb *ccbp = &ctx->ccb_buf[i];
ctx               838 drivers/sbus/char/oradax.c 		ccbp->ca = (void *)ctx->ca_buf_ra + ca_offset;
ctx               839 drivers/sbus/char/oradax.c 		memset(&ctx->ca_buf[idx + i], 0, sizeof(struct dax_cca));
ctx               842 drivers/sbus/char/oradax.c 			i, ccbp, ca_offset, ctx->ca_buf_ra + ca_offset);
ctx               852 drivers/sbus/char/oradax.c static int dax_ccb_exec(struct dax_ctx *ctx, const char __user *buf,
ctx               858 drivers/sbus/char/oradax.c 	ctx->client = current;
ctx               862 drivers/sbus/char/oradax.c 	if (ctx->owner != current) {
ctx               864 drivers/sbus/char/oradax.c 		ctx->result.exec.status = DAX_SUBMIT_ERR_THR_INIT;
ctx               871 drivers/sbus/char/oradax.c 		ctx->result.exec.status = DAX_SUBMIT_ERR_NO_CA_AVAIL;
ctx               879 drivers/sbus/char/oradax.c 	if (copy_from_user(ctx->ccb_buf, buf, count)) {
ctx               881 drivers/sbus/char/oradax.c 		ctx->result.exec.status = DAX_SUBMIT_ERR_CCB_ARR_MMU_MISS;
ctx               887 drivers/sbus/char/oradax.c 		if (ctx->ca_buf[i].status == CCA_STAT_NOT_COMPLETED) {
ctx               889 drivers/sbus/char/oradax.c 			ctx->result.exec.status = DAX_SUBMIT_ERR_NO_CA_AVAIL;
ctx               893 drivers/sbus/char/oradax.c 	dax_unlock_pages(ctx, idx, nccbs);
ctx               895 drivers/sbus/char/oradax.c 	ctx->result.exec.status = dax_preprocess_usr_ccbs(ctx, idx, nccbs);
ctx               896 drivers/sbus/char/oradax.c 	if (ctx->result.exec.status != DAX_SUBMIT_OK)
ctx               899 drivers/sbus/char/oradax.c 	ctx->result.exec.status = dax_lock_pages(ctx, idx, nccbs,
ctx               900 drivers/sbus/char/oradax.c 						 &ctx->result.exec.status_data);
ctx               901 drivers/sbus/char/oradax.c 	if (ctx->result.exec.status != DAX_SUBMIT_OK)
ctx               905 drivers/sbus/char/oradax.c 		dax_prt_ccbs(ctx->ccb_buf, nccbs);
ctx               907 drivers/sbus/char/oradax.c 	hv_rv = sun4v_ccb_submit(ctx->ccb_buf_ra, count,
ctx               909 drivers/sbus/char/oradax.c 				 &accepted_len, &ctx->result.exec.status_data);
ctx               922 drivers/sbus/char/oradax.c 		ctx->result.exec.status = DAX_SUBMIT_OK;
ctx               930 drivers/sbus/char/oradax.c 		ctx->result.exec.status = DAX_SUBMIT_ERR_WOULDBLOCK;
ctx               938 drivers/sbus/char/oradax.c 		ctx->result.exec.status = DAX_SUBMIT_ERR_NOMAP;
ctx               948 drivers/sbus/char/oradax.c 		ctx->result.exec.status = DAX_SUBMIT_ERR_CCB_INVAL;
ctx               957 drivers/sbus/char/oradax.c 		ctx->result.exec.status = DAX_SUBMIT_ERR_NOACCESS;
ctx               966 drivers/sbus/char/oradax.c 		ctx->result.exec.status = DAX_SUBMIT_ERR_UNAVAIL;
ctx               969 drivers/sbus/char/oradax.c 		ctx->result.exec.status = DAX_SUBMIT_ERR_INTERNAL;
ctx               976 drivers/sbus/char/oradax.c 	dax_unlock_pages(ctx, idx + naccepted, nccbs - naccepted);
ctx               980 drivers/sbus/char/oradax.c 		ctx->ca_buf[i].status = CCA_STAT_COMPLETED;
ctx               982 drivers/sbus/char/oradax.c 	ctx->ccb_count += naccepted;
ctx               983 drivers/sbus/char/oradax.c 	ctx->fail_count += nccbs - naccepted;
ctx               986 drivers/sbus/char/oradax.c 		hv_rv, accepted_len, ctx->result.exec.status_data,
ctx               987 drivers/sbus/char/oradax.c 		ctx->result.exec.status);
ctx               990 drivers/sbus/char/oradax.c 		ctx->client = NULL; /* no read needed to complete protocol */
ctx                38 drivers/scsi/aacraid/src.c 	struct aac_msix_ctx *ctx;
ctx                45 drivers/scsi/aacraid/src.c 	ctx = (struct aac_msix_ctx *)dev_id;
ctx                46 drivers/scsi/aacraid/src.c 	dev = ctx->dev;
ctx                47 drivers/scsi/aacraid/src.c 	vector_no = ctx->vector_no;
ctx              1183 drivers/scsi/aic7xxx/aic7xxx.h 						   ahc_io_ctx_t ctx,
ctx              6038 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_search_untagged_queues(struct ahc_softc *ahc, ahc_io_ctx_t ctx,
ctx              6096 drivers/scsi/aic7xxx/aic7xxx_core.c 			 || (ctx != NULL && ctx != scb->io_ctx))
ctx               211 drivers/scsi/bnx2fc/57xx_hsi_bnx2fc.h 	struct fcoe_abts_info ctx;
ctx               220 drivers/scsi/bnx2fc/57xx_hsi_bnx2fc.h 	struct fcoe_cleanup_info ctx;
ctx               238 drivers/scsi/bnx2fc/57xx_hsi_bnx2fc.h 	struct fcoe_fw_tx_seq_ctx ctx;
ctx              1490 drivers/scsi/bnx2fc/bnx2fc_hwi.c 	task->txwr_rxrd.union_ctx.cleanup.ctx.cleaned_task_id = orig_xid;
ctx              1492 drivers/scsi/bnx2fc/bnx2fc_hwi.c 	task->txwr_rxrd.union_ctx.cleanup.ctx.rolled_tx_seq_cnt = 0;
ctx              1493 drivers/scsi/bnx2fc/bnx2fc_hwi.c 	task->txwr_rxrd.union_ctx.cleanup.ctx.rolled_tx_data_offset = offset;
ctx              1557 drivers/scsi/bnx2fc/bnx2fc_hwi.c 	task->txwr_rxrd.union_ctx.cleanup.ctx.cleaned_task_id = orig_xid;
ctx              1743 drivers/scsi/bnx2fc/bnx2fc_hwi.c 	task->txwr_rxrd.union_ctx.tx_seq.ctx.seq_cnt = 1;
ctx               475 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c static int do_act_establish(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx               477 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c 	struct cxgbi_sock *csk = ctx;
ctx               571 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c static int do_act_open_rpl(struct t3cdev *tdev, struct sk_buff *skb, void *ctx)
ctx               573 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c 	struct cxgbi_sock *csk = ctx;
ctx               606 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c static int do_peer_close(struct t3cdev *cdev, struct sk_buff *skb, void *ctx)
ctx               608 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c 	struct cxgbi_sock *csk = ctx;
ctx               624 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c 			    void *ctx)
ctx               626 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c 	struct cxgbi_sock *csk = ctx;
ctx               661 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c static int do_abort_req(struct t3cdev *cdev, struct sk_buff *skb, void *ctx)
ctx               664 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c 	struct cxgbi_sock *csk = ctx;
ctx               708 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c static int do_abort_rpl(struct t3cdev *cdev, struct sk_buff *skb, void *ctx)
ctx               711 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c 	struct cxgbi_sock *csk = ctx;
ctx               744 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c static int do_iscsi_hdr(struct t3cdev *t3dev, struct sk_buff *skb, void *ctx)
ctx               746 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c 	struct cxgbi_sock *csk = ctx;
ctx               848 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c static int do_wr_ack(struct t3cdev *cdev, struct sk_buff *skb, void *ctx)
ctx               850 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c 	struct cxgbi_sock *csk = ctx;
ctx              1781 drivers/scsi/cxlflash/main.c 	void *ctx;
ctx              1847 drivers/scsi/cxlflash/main.c 			ctx = hwq->ctx_cookie;
ctx              1850 drivers/scsi/cxlflash/main.c 				reg = cfg->ops->get_irq_objhndl(ctx, j);
ctx              1957 drivers/scsi/cxlflash/main.c 	void *ctx = hwq->ctx_cookie;
ctx              1963 drivers/scsi/cxlflash/main.c 	rc = cfg->ops->allocate_afu_irqs(ctx, num_irqs);
ctx              1971 drivers/scsi/cxlflash/main.c 	rc = cfg->ops->map_afu_irq(ctx, 1, cxlflash_sync_err_irq, hwq,
ctx              1979 drivers/scsi/cxlflash/main.c 	rc = cfg->ops->map_afu_irq(ctx, 2, cxlflash_rrq_irq, hwq,
ctx              1991 drivers/scsi/cxlflash/main.c 	rc = cfg->ops->map_afu_irq(ctx, 3, cxlflash_async_err_irq, hwq,
ctx              2011 drivers/scsi/cxlflash/main.c 	void *ctx;
ctx              2023 drivers/scsi/cxlflash/main.c 		ctx = cfg->ops->get_context(cfg->dev, cfg->afu_cookie);
ctx              2026 drivers/scsi/cxlflash/main.c 		ctx = cfg->ops->dev_context_init(cfg->dev, cfg->afu_cookie);
ctx              2029 drivers/scsi/cxlflash/main.c 	if (IS_ERR_OR_NULL(ctx)) {
ctx              2035 drivers/scsi/cxlflash/main.c 	hwq->ctx_cookie = ctx;
ctx              2039 drivers/scsi/cxlflash/main.c 	cfg->ops->set_master(ctx);
ctx              2043 drivers/scsi/cxlflash/main.c 		rc = cfg->ops->afu_reset(ctx);
ctx              2071 drivers/scsi/cxlflash/main.c 		cfg->ops->release_context(ctx);
ctx              2398 drivers/scsi/cxlflash/main.c int cxlflash_afu_sync(struct afu *afu, ctx_hndl_t ctx, res_hndl_t res, u8 mode)
ctx              2405 drivers/scsi/cxlflash/main.c 		__func__, afu, ctx, res, mode);
ctx              2413 drivers/scsi/cxlflash/main.c 	put_unaligned_be16(ctx, &rcb.cdb[2]);
ctx                51 drivers/scsi/cxlflash/ocxl_hw.c static void ocxlflash_release_mapping(struct ocxlflash_context *ctx)
ctx                53 drivers/scsi/cxlflash/ocxl_hw.c 	if (ctx->mapping)
ctx                55 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->mapping = NULL;
ctx               129 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = ctx_cookie;
ctx               130 drivers/scsi/cxlflash/ocxl_hw.c 	struct device *dev = ctx->hw_afu->dev;
ctx               132 drivers/scsi/cxlflash/ocxl_hw.c 	mutex_lock(&ctx->state_mutex);
ctx               133 drivers/scsi/cxlflash/ocxl_hw.c 	if (ctx->state != STARTED) {
ctx               135 drivers/scsi/cxlflash/ocxl_hw.c 			ctx->state);
ctx               136 drivers/scsi/cxlflash/ocxl_hw.c 		mutex_unlock(&ctx->state_mutex);
ctx               139 drivers/scsi/cxlflash/ocxl_hw.c 	mutex_unlock(&ctx->state_mutex);
ctx               141 drivers/scsi/cxlflash/ocxl_hw.c 	return ioremap(ctx->psn_phys, ctx->psn_size);
ctx               161 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = ctx_cookie;
ctx               163 drivers/scsi/cxlflash/ocxl_hw.c 	return ctx->pe;
ctx               177 drivers/scsi/cxlflash/ocxl_hw.c static int afu_map_irq(u64 flags, struct ocxlflash_context *ctx, int num,
ctx               180 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxl_hw_afu *afu = ctx->hw_afu;
ctx               187 drivers/scsi/cxlflash/ocxl_hw.c 	if (num < 0 || num >= ctx->num_irqs) {
ctx               193 drivers/scsi/cxlflash/ocxl_hw.c 	irq = &ctx->irqs[num];
ctx               249 drivers/scsi/cxlflash/ocxl_hw.c static void afu_unmap_irq(u64 flags, struct ocxlflash_context *ctx, int num,
ctx               252 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxl_hw_afu *afu = ctx->hw_afu;
ctx               256 drivers/scsi/cxlflash/ocxl_hw.c 	if (num < 0 || num >= ctx->num_irqs) {
ctx               261 drivers/scsi/cxlflash/ocxl_hw.c 	irq = &ctx->irqs[num];
ctx               293 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = ctx_cookie;
ctx               295 drivers/scsi/cxlflash/ocxl_hw.c 	if (irq < 0 || irq >= ctx->num_irqs)
ctx               298 drivers/scsi/cxlflash/ocxl_hw.c 	return (__force u64)ctx->irqs[irq].vtrig;
ctx               309 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = data;
ctx               311 drivers/scsi/cxlflash/ocxl_hw.c 	spin_lock(&ctx->slock);
ctx               312 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->fault_addr = addr;
ctx               313 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->fault_dsisr = dsisr;
ctx               314 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->pending_fault = true;
ctx               315 drivers/scsi/cxlflash/ocxl_hw.c 	spin_unlock(&ctx->slock);
ctx               317 drivers/scsi/cxlflash/ocxl_hw.c 	wake_up_all(&ctx->wq);
ctx               328 drivers/scsi/cxlflash/ocxl_hw.c static int start_context(struct ocxlflash_context *ctx)
ctx               330 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxl_hw_afu *afu = ctx->hw_afu;
ctx               334 drivers/scsi/cxlflash/ocxl_hw.c 	bool master = ctx->master;
ctx               339 drivers/scsi/cxlflash/ocxl_hw.c 	mutex_lock(&ctx->state_mutex);
ctx               340 drivers/scsi/cxlflash/ocxl_hw.c 	if (ctx->state != OPENED) {
ctx               342 drivers/scsi/cxlflash/ocxl_hw.c 			__func__, ctx->state);
ctx               348 drivers/scsi/cxlflash/ocxl_hw.c 		ctx->psn_size = acfg->global_mmio_size;
ctx               349 drivers/scsi/cxlflash/ocxl_hw.c 		ctx->psn_phys = afu->gmmio_phys;
ctx               351 drivers/scsi/cxlflash/ocxl_hw.c 		ctx->psn_size = acfg->pp_mmio_stride;
ctx               352 drivers/scsi/cxlflash/ocxl_hw.c 		ctx->psn_phys = afu->ppmmio_phys + (ctx->pe * ctx->psn_size);
ctx               364 drivers/scsi/cxlflash/ocxl_hw.c 	rc = ocxl_link_add_pe(link_token, ctx->pe, pid, 0, 0, mm,
ctx               365 drivers/scsi/cxlflash/ocxl_hw.c 			      ocxlflash_xsl_fault, ctx);
ctx               372 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->state = STARTED;
ctx               374 drivers/scsi/cxlflash/ocxl_hw.c 	mutex_unlock(&ctx->state_mutex);
ctx               386 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = ctx_cookie;
ctx               388 drivers/scsi/cxlflash/ocxl_hw.c 	return start_context(ctx);
ctx               399 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = ctx_cookie;
ctx               400 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxl_hw_afu *afu = ctx->hw_afu;
ctx               407 drivers/scsi/cxlflash/ocxl_hw.c 	mutex_lock(&ctx->state_mutex);
ctx               408 drivers/scsi/cxlflash/ocxl_hw.c 	state = ctx->state;
ctx               409 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->state = CLOSED;
ctx               410 drivers/scsi/cxlflash/ocxl_hw.c 	mutex_unlock(&ctx->state_mutex);
ctx               415 drivers/scsi/cxlflash/ocxl_hw.c 					 ctx->pe);
ctx               424 drivers/scsi/cxlflash/ocxl_hw.c 	rc = ocxl_link_remove_pe(afu->link_token, ctx->pe);
ctx               440 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = ctx_cookie;
ctx               441 drivers/scsi/cxlflash/ocxl_hw.c 	struct device *dev = ctx->hw_afu->dev;
ctx               456 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = ctx_cookie;
ctx               458 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->master = true;
ctx               486 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx;
ctx               489 drivers/scsi/cxlflash/ocxl_hw.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               490 drivers/scsi/cxlflash/ocxl_hw.c 	if (unlikely(!ctx)) {
ctx               497 drivers/scsi/cxlflash/ocxl_hw.c 	rc = idr_alloc(&afu->idr, ctx, 0, afu->max_pasid, GFP_NOWAIT);
ctx               504 drivers/scsi/cxlflash/ocxl_hw.c 	spin_lock_init(&ctx->slock);
ctx               505 drivers/scsi/cxlflash/ocxl_hw.c 	init_waitqueue_head(&ctx->wq);
ctx               506 drivers/scsi/cxlflash/ocxl_hw.c 	mutex_init(&ctx->state_mutex);
ctx               508 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->state = OPENED;
ctx               509 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->pe = rc;
ctx               510 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->master = false;
ctx               511 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->mapping = NULL;
ctx               512 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->hw_afu = afu;
ctx               513 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->irq_bitmap = 0;
ctx               514 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->pending_irq = false;
ctx               515 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->pending_fault = false;
ctx               517 drivers/scsi/cxlflash/ocxl_hw.c 	return ctx;
ctx               519 drivers/scsi/cxlflash/ocxl_hw.c 	kfree(ctx);
ctx               521 drivers/scsi/cxlflash/ocxl_hw.c 	ctx = ERR_PTR(rc);
ctx               533 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = ctx_cookie;
ctx               537 drivers/scsi/cxlflash/ocxl_hw.c 	if (!ctx)
ctx               540 drivers/scsi/cxlflash/ocxl_hw.c 	dev = ctx->hw_afu->dev;
ctx               541 drivers/scsi/cxlflash/ocxl_hw.c 	mutex_lock(&ctx->state_mutex);
ctx               542 drivers/scsi/cxlflash/ocxl_hw.c 	if (ctx->state >= STARTED) {
ctx               544 drivers/scsi/cxlflash/ocxl_hw.c 			ctx->state);
ctx               545 drivers/scsi/cxlflash/ocxl_hw.c 		mutex_unlock(&ctx->state_mutex);
ctx               549 drivers/scsi/cxlflash/ocxl_hw.c 	mutex_unlock(&ctx->state_mutex);
ctx               551 drivers/scsi/cxlflash/ocxl_hw.c 	idr_remove(&ctx->hw_afu->idr, ctx->pe);
ctx               552 drivers/scsi/cxlflash/ocxl_hw.c 	ocxlflash_release_mapping(ctx);
ctx               553 drivers/scsi/cxlflash/ocxl_hw.c 	kfree(ctx);
ctx               588 drivers/scsi/cxlflash/ocxl_hw.c static void free_afu_irqs(struct ocxlflash_context *ctx)
ctx               590 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxl_hw_afu *afu = ctx->hw_afu;
ctx               594 drivers/scsi/cxlflash/ocxl_hw.c 	if (!ctx->irqs) {
ctx               599 drivers/scsi/cxlflash/ocxl_hw.c 	for (i = ctx->num_irqs; i >= 0; i--)
ctx               600 drivers/scsi/cxlflash/ocxl_hw.c 		ocxl_link_free_irq(afu->link_token, ctx->irqs[i].hwirq);
ctx               602 drivers/scsi/cxlflash/ocxl_hw.c 	kfree(ctx->irqs);
ctx               603 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->irqs = NULL;
ctx               613 drivers/scsi/cxlflash/ocxl_hw.c static int alloc_afu_irqs(struct ocxlflash_context *ctx, int num)
ctx               615 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxl_hw_afu *afu = ctx->hw_afu;
ctx               623 drivers/scsi/cxlflash/ocxl_hw.c 	if (ctx->irqs) {
ctx               654 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->irqs = irqs;
ctx               655 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->num_irqs = num;
ctx               911 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx;
ctx               939 drivers/scsi/cxlflash/ocxl_hw.c 	ctx = ocxlflash_dev_context_init(pdev, afu);
ctx               940 drivers/scsi/cxlflash/ocxl_hw.c 	if (IS_ERR(ctx)) {
ctx               941 drivers/scsi/cxlflash/ocxl_hw.c 		rc = PTR_ERR(ctx);
ctx               947 drivers/scsi/cxlflash/ocxl_hw.c 	afu->ocxl_ctx = ctx;
ctx               967 drivers/scsi/cxlflash/ocxl_hw.c static inline bool ctx_event_pending(struct ocxlflash_context *ctx)
ctx               969 drivers/scsi/cxlflash/ocxl_hw.c 	if (ctx->pending_irq || ctx->pending_fault)
ctx               984 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = file->private_data;
ctx               985 drivers/scsi/cxlflash/ocxl_hw.c 	struct device *dev = ctx->hw_afu->dev;
ctx               989 drivers/scsi/cxlflash/ocxl_hw.c 	poll_wait(file, &ctx->wq, poll);
ctx               991 drivers/scsi/cxlflash/ocxl_hw.c 	spin_lock_irqsave(&ctx->slock, lock_flags);
ctx               992 drivers/scsi/cxlflash/ocxl_hw.c 	if (ctx_event_pending(ctx))
ctx               994 drivers/scsi/cxlflash/ocxl_hw.c 	else if (ctx->state == CLOSED)
ctx               996 drivers/scsi/cxlflash/ocxl_hw.c 	spin_unlock_irqrestore(&ctx->slock, lock_flags);
ctx               999 drivers/scsi/cxlflash/ocxl_hw.c 		__func__, ctx->pe, mask);
ctx              1016 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = file->private_data;
ctx              1017 drivers/scsi/cxlflash/ocxl_hw.c 	struct device *dev = ctx->hw_afu->dev;
ctx              1032 drivers/scsi/cxlflash/ocxl_hw.c 	spin_lock_irqsave(&ctx->slock, lock_flags);
ctx              1035 drivers/scsi/cxlflash/ocxl_hw.c 		prepare_to_wait(&ctx->wq, &event_wait, TASK_INTERRUPTIBLE);
ctx              1037 drivers/scsi/cxlflash/ocxl_hw.c 		if (ctx_event_pending(ctx) || (ctx->state == CLOSED))
ctx              1054 drivers/scsi/cxlflash/ocxl_hw.c 		spin_unlock_irqrestore(&ctx->slock, lock_flags);
ctx              1056 drivers/scsi/cxlflash/ocxl_hw.c 		spin_lock_irqsave(&ctx->slock, lock_flags);
ctx              1059 drivers/scsi/cxlflash/ocxl_hw.c 	finish_wait(&ctx->wq, &event_wait);
ctx              1062 drivers/scsi/cxlflash/ocxl_hw.c 	event.header.process_element = ctx->pe;
ctx              1064 drivers/scsi/cxlflash/ocxl_hw.c 	if (ctx->pending_irq) {
ctx              1069 drivers/scsi/cxlflash/ocxl_hw.c 		bit = find_first_bit(&ctx->irq_bitmap, ctx->num_irqs);
ctx              1070 drivers/scsi/cxlflash/ocxl_hw.c 		clear_bit(bit, &ctx->irq_bitmap);
ctx              1072 drivers/scsi/cxlflash/ocxl_hw.c 		if (bitmap_empty(&ctx->irq_bitmap, ctx->num_irqs))
ctx              1073 drivers/scsi/cxlflash/ocxl_hw.c 			ctx->pending_irq = false;
ctx              1074 drivers/scsi/cxlflash/ocxl_hw.c 	} else if (ctx->pending_fault) {
ctx              1077 drivers/scsi/cxlflash/ocxl_hw.c 		event.fault.addr = ctx->fault_addr;
ctx              1078 drivers/scsi/cxlflash/ocxl_hw.c 		event.fault.dsisr = ctx->fault_dsisr;
ctx              1079 drivers/scsi/cxlflash/ocxl_hw.c 		ctx->pending_fault = false;
ctx              1082 drivers/scsi/cxlflash/ocxl_hw.c 	spin_unlock_irqrestore(&ctx->slock, lock_flags);
ctx              1094 drivers/scsi/cxlflash/ocxl_hw.c 	finish_wait(&ctx->wq, &event_wait);
ctx              1095 drivers/scsi/cxlflash/ocxl_hw.c 	spin_unlock_irqrestore(&ctx->slock, lock_flags);
ctx              1108 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = file->private_data;
ctx              1112 drivers/scsi/cxlflash/ocxl_hw.c 	for (i = ctx->num_irqs; i >= 0; i--)
ctx              1113 drivers/scsi/cxlflash/ocxl_hw.c 		afu_unmap_irq(0, ctx, i, ctx);
ctx              1114 drivers/scsi/cxlflash/ocxl_hw.c 	free_afu_irqs(ctx);
ctx              1116 drivers/scsi/cxlflash/ocxl_hw.c 	return ocxlflash_release_context(ctx);
ctx              1128 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = vma->vm_file->private_data;
ctx              1129 drivers/scsi/cxlflash/ocxl_hw.c 	struct device *dev = ctx->hw_afu->dev;
ctx              1133 drivers/scsi/cxlflash/ocxl_hw.c 	if (offset >= ctx->psn_size)
ctx              1136 drivers/scsi/cxlflash/ocxl_hw.c 	mutex_lock(&ctx->state_mutex);
ctx              1137 drivers/scsi/cxlflash/ocxl_hw.c 	if (ctx->state != STARTED) {
ctx              1139 drivers/scsi/cxlflash/ocxl_hw.c 			__func__, ctx->state);
ctx              1140 drivers/scsi/cxlflash/ocxl_hw.c 		mutex_unlock(&ctx->state_mutex);
ctx              1143 drivers/scsi/cxlflash/ocxl_hw.c 	mutex_unlock(&ctx->state_mutex);
ctx              1145 drivers/scsi/cxlflash/ocxl_hw.c 	mmio_area = ctx->psn_phys;
ctx              1164 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = file->private_data;
ctx              1167 drivers/scsi/cxlflash/ocxl_hw.c 	    (ctx->psn_size >> PAGE_SHIFT))
ctx              1198 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = ctx_cookie;
ctx              1199 drivers/scsi/cxlflash/ocxl_hw.c 	struct device *dev = ctx->hw_afu->dev;
ctx              1206 drivers/scsi/cxlflash/ocxl_hw.c 	if (ctx->mapping) {
ctx              1233 drivers/scsi/cxlflash/ocxl_hw.c 	name = kasprintf(GFP_KERNEL, "ocxlflash:%d", ctx->pe);
ctx              1234 drivers/scsi/cxlflash/ocxl_hw.c 	file = ocxlflash_getfile(dev, name, fops, ctx, flags);
ctx              1243 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->mapping = file->f_mapping;
ctx              1274 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = data;
ctx              1275 drivers/scsi/cxlflash/ocxl_hw.c 	struct device *dev = ctx->hw_afu->dev;
ctx              1279 drivers/scsi/cxlflash/ocxl_hw.c 		__func__, ctx->pe, irq);
ctx              1281 drivers/scsi/cxlflash/ocxl_hw.c 	for (i = 0; i < ctx->num_irqs; i++) {
ctx              1282 drivers/scsi/cxlflash/ocxl_hw.c 		if (ctx->irqs[i].virq == irq)
ctx              1285 drivers/scsi/cxlflash/ocxl_hw.c 	if (unlikely(i >= ctx->num_irqs)) {
ctx              1290 drivers/scsi/cxlflash/ocxl_hw.c 	spin_lock(&ctx->slock);
ctx              1291 drivers/scsi/cxlflash/ocxl_hw.c 	set_bit(i - 1, &ctx->irq_bitmap);
ctx              1292 drivers/scsi/cxlflash/ocxl_hw.c 	ctx->pending_irq = true;
ctx              1293 drivers/scsi/cxlflash/ocxl_hw.c 	spin_unlock(&ctx->slock);
ctx              1295 drivers/scsi/cxlflash/ocxl_hw.c 	wake_up_all(&ctx->wq);
ctx              1309 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxlflash_context *ctx = ctx_cookie;
ctx              1310 drivers/scsi/cxlflash/ocxl_hw.c 	struct ocxl_hw_afu *afu = ctx->hw_afu;
ctx              1316 drivers/scsi/cxlflash/ocxl_hw.c 	rc = alloc_afu_irqs(ctx, num_irqs);
ctx              1324 drivers/scsi/cxlflash/ocxl_hw.c 				 dev_name(dev), ctx->pe, i);
ctx              1325 drivers/scsi/cxlflash/ocxl_hw.c 		rc = afu_map_irq(0, ctx, i, ocxlflash_afu_irq, ctx, name);
ctx              1334 drivers/scsi/cxlflash/ocxl_hw.c 	rc = start_context(ctx);
ctx              1343 drivers/scsi/cxlflash/ocxl_hw.c 		afu_unmap_irq(0, ctx, i, ctx);
ctx              1344 drivers/scsi/cxlflash/ocxl_hw.c 	free_afu_irqs(ctx);
ctx               286 drivers/scsi/cxlflash/superpipe.c 			val = cfg->ops->get_irq_objhndl(ctxi->ctx, i);
ctx               827 drivers/scsi/cxlflash/superpipe.c 			 void *ctx, int ctxid, struct file *file, u32 perms,
ctx               837 drivers/scsi/cxlflash/superpipe.c 	ctxi->ctx = ctx;
ctx               994 drivers/scsi/cxlflash/superpipe.c 	void *ctx = cfg->ops->fops_get_context(file);
ctx              1002 drivers/scsi/cxlflash/superpipe.c 	ctxid = cfg->ops->process_element(ctx);
ctx              1005 drivers/scsi/cxlflash/superpipe.c 			__func__, ctx, ctxid);
ctx              1109 drivers/scsi/cxlflash/superpipe.c 	void *ctx = cfg->ops->fops_get_context(file);
ctx              1117 drivers/scsi/cxlflash/superpipe.c 	ctxid = cfg->ops->process_element(ctx);
ctx              1120 drivers/scsi/cxlflash/superpipe.c 			__func__, ctx, ctxid);
ctx              1182 drivers/scsi/cxlflash/superpipe.c 	void *ctx = cfg->ops->fops_get_context(file);
ctx              1189 drivers/scsi/cxlflash/superpipe.c 	ctxid = cfg->ops->process_element(ctx);
ctx              1192 drivers/scsi/cxlflash/superpipe.c 			__func__, ctx, ctxid);
ctx              1335 drivers/scsi/cxlflash/superpipe.c 	void *ctx = NULL;
ctx              1412 drivers/scsi/cxlflash/superpipe.c 	ctx = cfg->ops->dev_context_init(cfg->dev, cfg->afu_cookie);
ctx              1413 drivers/scsi/cxlflash/superpipe.c 	if (IS_ERR_OR_NULL(ctx)) {
ctx              1415 drivers/scsi/cxlflash/superpipe.c 			__func__, ctx);
ctx              1420 drivers/scsi/cxlflash/superpipe.c 	rc = cfg->ops->start_work(ctx, irqs);
ctx              1427 drivers/scsi/cxlflash/superpipe.c 	ctxid = cfg->ops->process_element(ctx);
ctx              1434 drivers/scsi/cxlflash/superpipe.c 	file = cfg->ops->get_fd(ctx, &cfg->cxl_fops, &fd);
ctx              1445 drivers/scsi/cxlflash/superpipe.c 	init_context(ctxi, cfg, ctx, ctxid, file, perms, irqs);
ctx              1492 drivers/scsi/cxlflash/superpipe.c 	if (!IS_ERR_OR_NULL(ctx)) {
ctx              1493 drivers/scsi/cxlflash/superpipe.c 		cfg->ops->stop_context(ctx);
ctx              1494 drivers/scsi/cxlflash/superpipe.c 		cfg->ops->release_context(ctx);
ctx              1495 drivers/scsi/cxlflash/superpipe.c 		ctx = NULL;
ctx              1543 drivers/scsi/cxlflash/superpipe.c 	void *ctx;
ctx              1546 drivers/scsi/cxlflash/superpipe.c 	ctx = cfg->ops->dev_context_init(cfg->dev, cfg->afu_cookie);
ctx              1547 drivers/scsi/cxlflash/superpipe.c 	if (IS_ERR_OR_NULL(ctx)) {
ctx              1549 drivers/scsi/cxlflash/superpipe.c 			__func__, ctx);
ctx              1554 drivers/scsi/cxlflash/superpipe.c 	rc = cfg->ops->start_work(ctx, ctxi->irqs);
ctx              1561 drivers/scsi/cxlflash/superpipe.c 	ctxid = cfg->ops->process_element(ctx);
ctx              1568 drivers/scsi/cxlflash/superpipe.c 	file = cfg->ops->get_fd(ctx, &cfg->cxl_fops, &fd);
ctx              1589 drivers/scsi/cxlflash/superpipe.c 	ctxi->ctx = ctx;
ctx              1615 drivers/scsi/cxlflash/superpipe.c 	cfg->ops->stop_context(ctx);
ctx              1617 drivers/scsi/cxlflash/superpipe.c 	cfg->ops->release_context(ctx);
ctx               103 drivers/scsi/cxlflash/superpipe.h 	void *ctx;
ctx               139 drivers/scsi/lpfc/lpfc_mbox.c 	void *ctx;
ctx               142 drivers/scsi/lpfc/lpfc_mbox.c 	ctx = pmb->ctx_buf;
ctx               154 drivers/scsi/lpfc/lpfc_mbox.c 	pmb->ctx_buf = ctx;
ctx               171 drivers/scsi/lpfc/lpfc_mbox.c 	void *ctx;
ctx               175 drivers/scsi/lpfc/lpfc_mbox.c 	ctx = pmb->ctx_buf;
ctx               189 drivers/scsi/lpfc/lpfc_mbox.c 	pmb->ctx_buf = ctx;
ctx               338 drivers/scsi/lpfc/lpfc_nvmet.c 	rsp = &ctxp->ctx.ls_req;
ctx               724 drivers/scsi/lpfc/lpfc_nvmet.c 	rsp = &ctxp->ctx.fcp_req;
ctx               844 drivers/scsi/lpfc/lpfc_nvmet.c 		container_of(rsp, struct lpfc_nvmet_rcv_ctx, ctx.ls_req);
ctx               935 drivers/scsi/lpfc/lpfc_nvmet.c 		container_of(rsp, struct lpfc_nvmet_rcv_ctx, ctx.fcp_req);
ctx              1069 drivers/scsi/lpfc/lpfc_nvmet.c 		container_of(req, struct lpfc_nvmet_rcv_ctx, ctx.fcp_req);
ctx              1131 drivers/scsi/lpfc/lpfc_nvmet.c 		container_of(rsp, struct lpfc_nvmet_rcv_ctx, ctx.fcp_req);
ctx              1174 drivers/scsi/lpfc/lpfc_nvmet.c 		container_of(rsp, struct lpfc_nvmet_rcv_ctx, ctx.fcp_req);
ctx              1666 drivers/scsi/lpfc/lpfc_nvmet.c 		req = &ctxp->ctx.fcp_req;
ctx              1712 drivers/scsi/lpfc/lpfc_nvmet.c 		rsp = &ctxp->ctx.fcp_req;
ctx              1786 drivers/scsi/lpfc/lpfc_nvmet.c 					       &ctxp->ctx.fcp_req);
ctx              1891 drivers/scsi/lpfc/lpfc_nvmet.c 				if (ctxp->ctx.fcp_req.op == NVMET_FCOP_RSP)
ctx              2010 drivers/scsi/lpfc/lpfc_nvmet.c 	rc = nvmet_fc_rcv_ls_req(phba->targetport, &ctxp->ctx.ls_req,
ctx              2088 drivers/scsi/lpfc/lpfc_nvmet.c 	rc = nvmet_fc_rcv_fcp_req(phba->targetport, &ctxp->ctx.fcp_req,
ctx              2590 drivers/scsi/lpfc/lpfc_nvmet.c 	struct nvmefc_tgt_fcp_req *rsp = &ctxp->ctx.fcp_req;
ctx               110 drivers/scsi/lpfc/lpfc_nvmet.h 	} ctx;
ctx                25 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	struct e4_fcoe_task_context *ctx = task_params->context;
ctx                26 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	const u8 val_byte = ctx->ystorm_ag_context.byte0;
ctx                34 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	memset(ctx, 0, sizeof(*(ctx)));
ctx                35 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	ctx->ystorm_ag_context.byte0 = val_byte;
ctx                42 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	y_st_ctx = &ctx->ystorm_st_context;
ctx                50 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	t_st_ctx = &ctx->tstorm_st_context;
ctx                64 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	u_ag_ctx = &ctx->ustorm_ag_context;
ctx                68 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	m_st_ctx = &ctx->mstorm_st_context;
ctx               118 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	struct e4_fcoe_task_context *ctx = task_params->context;
ctx               119 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	const u8 val_byte = ctx->ystorm_ag_context.byte0;
ctx               126 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	memset(ctx, 0, sizeof(*(ctx)));
ctx               127 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	ctx->ystorm_ag_context.byte0 = val_byte;
ctx               130 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	y_st_ctx = &ctx->ystorm_st_context;
ctx               142 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	m_st_ctx = &ctx->mstorm_st_context;
ctx               152 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	t_st_ctx = &ctx->tstorm_st_context;
ctx               162 drivers/scsi/qedf/drv_fcoe_fw_funcs.c 	u_ag_ctx = &ctx->ustorm_ag_context;
ctx              3399 drivers/scsi/qla2xxx/qla_def.h 	struct crc_context *ctx;
ctx                94 drivers/scsi/qla2xxx/qla_inline.h qla2x00_clean_dsd_pool(struct qla_hw_data *ha, struct crc_context *ctx)
ctx                99 drivers/scsi/qla2xxx/qla_inline.h 	list_for_each_entry_safe(dsd, tdsd, &ctx->dsd_list, list) {
ctx               105 drivers/scsi/qla2xxx/qla_inline.h 	INIT_LIST_HEAD(&ctx->dsd_list);
ctx               596 drivers/scsi/qla2xxx/qla_iocb.c 	struct ct6_dsd *ctx;
ctx               624 drivers/scsi/qla2xxx/qla_iocb.c 	ctx = sp->u.scmd.ct6_ctx;
ctx               637 drivers/scsi/qla2xxx/qla_iocb.c 		list_add_tail(&dsd_ptr->list, &ctx->dsd_list);
ctx               638 drivers/scsi/qla2xxx/qla_iocb.c 		ctx->dsd_use_cnt++;
ctx               962 drivers/scsi/qla2xxx/qla_iocb.c 				    &(tc->ctx->dsd_list));
ctx              1059 drivers/scsi/qla2xxx/qla_iocb.c 				    &(tc->ctx->dsd_list));
ctx              1108 drivers/scsi/qla2xxx/qla_iocb.c 		difctx = tc->ctx;
ctx              3091 drivers/scsi/qla2xxx/qla_iocb.c 	struct ct6_dsd *ctx;
ctx              3194 drivers/scsi/qla2xxx/qla_iocb.c 		ctx = sp->u.scmd.ct6_ctx =
ctx              3196 drivers/scsi/qla2xxx/qla_iocb.c 		if (!ctx) {
ctx              3202 drivers/scsi/qla2xxx/qla_iocb.c 		memset(ctx, 0, sizeof(struct ct6_dsd));
ctx              3203 drivers/scsi/qla2xxx/qla_iocb.c 		ctx->fcp_cmnd = dma_pool_zalloc(ha->fcp_cmnd_dma_pool,
ctx              3204 drivers/scsi/qla2xxx/qla_iocb.c 			GFP_ATOMIC, &ctx->fcp_cmnd_dma);
ctx              3205 drivers/scsi/qla2xxx/qla_iocb.c 		if (!ctx->fcp_cmnd) {
ctx              3212 drivers/scsi/qla2xxx/qla_iocb.c 		INIT_LIST_HEAD(&ctx->dsd_list);
ctx              3213 drivers/scsi/qla2xxx/qla_iocb.c 		ctx->dsd_use_cnt = 0;
ctx              3226 drivers/scsi/qla2xxx/qla_iocb.c 			ctx->fcp_cmnd_len = 12 + cmd->cmd_len + 4;
ctx              3229 drivers/scsi/qla2xxx/qla_iocb.c 			ctx->fcp_cmnd_len = 12 + 16 + 4;
ctx              3256 drivers/scsi/qla2xxx/qla_iocb.c 		int_to_scsilun(cmd->device->lun, &ctx->fcp_cmnd->lun);
ctx              3257 drivers/scsi/qla2xxx/qla_iocb.c 		ctx->fcp_cmnd->additional_cdb_len = additional_cdb_len;
ctx              3260 drivers/scsi/qla2xxx/qla_iocb.c 			ctx->fcp_cmnd->additional_cdb_len |= 1;
ctx              3262 drivers/scsi/qla2xxx/qla_iocb.c 			ctx->fcp_cmnd->additional_cdb_len |= 2;
ctx              3266 drivers/scsi/qla2xxx/qla_iocb.c 			ctx->fcp_cmnd->task_attribute |=
ctx              3269 drivers/scsi/qla2xxx/qla_iocb.c 		memcpy(ctx->fcp_cmnd->cdb, cmd->cmnd, cmd->cmd_len);
ctx              3271 drivers/scsi/qla2xxx/qla_iocb.c 		fcp_dl = (uint32_t *)(ctx->fcp_cmnd->cdb + 16 +
ctx              3275 drivers/scsi/qla2xxx/qla_iocb.c 		cmd_pkt->fcp_cmnd_dseg_len = cpu_to_le16(ctx->fcp_cmnd_len);
ctx              3276 drivers/scsi/qla2xxx/qla_iocb.c 		put_unaligned_le64(ctx->fcp_cmnd_dma,
ctx              3385 drivers/scsi/qla2xxx/qla_iocb.c 	dma_pool_free(ha->fcp_cmnd_dma_pool, ctx->fcp_cmnd, ctx->fcp_cmnd_dma);
ctx              2462 drivers/scsi/qla2xxx/qla_target.c 	if (!cmd->ctx)
ctx              2466 drivers/scsi/qla2xxx/qla_target.c 		qla2x00_clean_dsd_pool(ha, cmd->ctx);
ctx              2468 drivers/scsi/qla2xxx/qla_target.c 	dma_pool_free(ha->dl_dma_pool, cmd->ctx, cmd->ctx->crc_ctx_dma);
ctx              2898 drivers/scsi/qla2xxx/qla_target.c qla_tgt_set_dif_tags(struct qla_tgt_cmd *cmd, struct crc_context *ctx,
ctx              2912 drivers/scsi/qla2xxx/qla_target.c 	ctx->app_tag = 0;
ctx              2913 drivers/scsi/qla2xxx/qla_target.c 	ctx->app_tag_mask[0] = 0x0;
ctx              2914 drivers/scsi/qla2xxx/qla_target.c 	ctx->app_tag_mask[1] = 0x0;
ctx              2933 drivers/scsi/qla2xxx/qla_target.c 		ctx->ref_tag = cpu_to_le32(lba);
ctx              2935 drivers/scsi/qla2xxx/qla_target.c 		ctx->ref_tag_mask[0] = 0xff;
ctx              2936 drivers/scsi/qla2xxx/qla_target.c 		ctx->ref_tag_mask[1] = 0xff;
ctx              2937 drivers/scsi/qla2xxx/qla_target.c 		ctx->ref_tag_mask[2] = 0xff;
ctx              2938 drivers/scsi/qla2xxx/qla_target.c 		ctx->ref_tag_mask[3] = 0xff;
ctx              2945 drivers/scsi/qla2xxx/qla_target.c 	    ctx->ref_tag = cpu_to_le32(lba);
ctx              2952 drivers/scsi/qla2xxx/qla_target.c 	    ctx->ref_tag_mask[0] = 0xff;
ctx              2953 drivers/scsi/qla2xxx/qla_target.c 	    ctx->ref_tag_mask[1] = 0xff;
ctx              2954 drivers/scsi/qla2xxx/qla_target.c 	    ctx->ref_tag_mask[2] = 0xff;
ctx              2955 drivers/scsi/qla2xxx/qla_target.c 	    ctx->ref_tag_mask[3] = 0xff;
ctx              2962 drivers/scsi/qla2xxx/qla_target.c 	    ctx->ref_tag = cpu_to_le32(lba);
ctx              2969 drivers/scsi/qla2xxx/qla_target.c 	    ctx->ref_tag_mask[0] = 0xff;
ctx              2970 drivers/scsi/qla2xxx/qla_target.c 	    ctx->ref_tag_mask[1] = 0xff;
ctx              2971 drivers/scsi/qla2xxx/qla_target.c 	    ctx->ref_tag_mask[2] = 0xff;
ctx              2972 drivers/scsi/qla2xxx/qla_target.c 	    ctx->ref_tag_mask[3] = 0xff;
ctx              2977 drivers/scsi/qla2xxx/qla_target.c 	    ctx->ref_tag_mask[0] = ctx->ref_tag_mask[1] =
ctx              2978 drivers/scsi/qla2xxx/qla_target.c 		ctx->ref_tag_mask[2] = ctx->ref_tag_mask[3] = 0x00;
ctx              3116 drivers/scsi/qla2xxx/qla_target.c 	crc_ctx_pkt = cmd->ctx =
ctx              3159 drivers/scsi/qla2xxx/qla_target.c 	tc.ctx = crc_ctx_pkt;
ctx               914 drivers/scsi/qla2xxx/qla_target.h 	struct crc_context *ctx;
ctx               161 drivers/scsi/snic/snic_ctl.c 	ulong ctx = 0;
ctx               166 drivers/scsi/snic/snic_ctl.c 	snic_io_hdr_dec(&fwreq->hdr, &typ, &hdr_stat, &cmnd_id, &hid, &ctx);
ctx               168 drivers/scsi/snic/snic_ctl.c 	rqi = (struct snic_req_info *) ctx;
ctx                60 drivers/scsi/snic/snic_disc.c 		     dma_addr_t rsp_buf_pa, ulong ctx)
ctx                66 drivers/scsi/snic/snic_disc.c 			1, ctx);
ctx               400 drivers/scsi/snic/snic_disc.c 	ulong ctx;
ctx               405 drivers/scsi/snic/snic_disc.c 	snic_io_hdr_dec(&fwreq->hdr, &typ, &cmpl_stat, &cmnd_id, &hid, &ctx);
ctx               406 drivers/scsi/snic/snic_disc.c 	rqi = (struct snic_req_info *) ctx;
ctx               115 drivers/scsi/snic/snic_fwint.h 		u16 sg_cnt, ulong ctx)
ctx               123 drivers/scsi/snic/snic_fwint.h 	hdr->init_ctx = ctx;
ctx               130 drivers/scsi/snic/snic_fwint.h 		u32 *hid, ulong *ctx)
ctx               136 drivers/scsi/snic/snic_fwint.h 	*ctx = hdr->init_ctx;
ctx                28 drivers/scsi/snic/snic_res.h snic_icmnd_init(struct snic_host_req *req, u32 cmnd_id, u32 host_id, u64 ctx,
ctx                34 drivers/scsi/snic/snic_res.h 			ctx);
ctx                49 drivers/scsi/snic/snic_res.h snic_itmf_init(struct snic_host_req *req, u32 cmnd_id, u32 host_id, ulong ctx,
ctx                52 drivers/scsi/snic/snic_res.h 	snic_io_hdr_enc(&req->hdr, SNIC_REQ_ITMF, 0, cmnd_id, host_id, 0, ctx);
ctx               549 drivers/scsi/snic/snic_scsi.c 	ulong ctx;
ctx               558 drivers/scsi/snic/snic_scsi.c 	snic_io_hdr_dec(&fwreq->hdr, &typ, &hdr_stat, &cmnd_id, &hid, &ctx);
ctx               564 drivers/scsi/snic/snic_scsi.c 		      typ, hdr_stat, cmnd_id, hid, ctx);
ctx               587 drivers/scsi/snic/snic_scsi.c 			 (ulong) fwreq, le32_to_cpu(icmnd_cmpl->resid), ctx);
ctx               607 drivers/scsi/snic/snic_scsi.c 	SNIC_BUG_ON(rqi != (struct snic_req_info *)ctx);
ctx               621 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) ctx;
ctx               925 drivers/scsi/snic/snic_scsi.c 	ulong ctx;
ctx               931 drivers/scsi/snic/snic_scsi.c 	snic_io_hdr_dec(&fwreq->hdr, &typ, &hdr_stat, &cmnd_id, &hid, &ctx);
ctx               934 drivers/scsi/snic/snic_scsi.c 		      __func__, typ, hdr_stat, cmnd_id, hid, ctx);
ctx               943 drivers/scsi/snic/snic_scsi.c 		rqi = (struct snic_req_info *) ctx;
ctx              1004 drivers/scsi/snic/snic_scsi.c 	ulong ctx;
ctx              1015 drivers/scsi/snic/snic_scsi.c 	snic_io_hdr_dec(&fwreq->hdr, &typ, &hdr_stat, &cmnd_id, &hid, &ctx);
ctx              1018 drivers/scsi/snic/snic_scsi.c 		       cmnd_id, ctx, snic_io_status_to_str(hdr_stat));
ctx              1022 drivers/scsi/snic/snic_scsi.c 		      typ, hdr_stat, cmnd_id, hid, ctx);
ctx              1026 drivers/scsi/snic/snic_scsi.c 		rqi = (struct snic_req_info *) ctx;
ctx              1029 drivers/scsi/snic/snic_scsi.c 			       cmnd_id, ctx, snic_io_status_to_str(hdr_stat));
ctx              1134 drivers/scsi/snic/snic_scsi.c 	ulong ctx;
ctx              1138 drivers/scsi/snic/snic_scsi.c 	snic_io_hdr_dec(&fwreq->hdr, &typ, &hdr_stat, &cmnd_id, &hid, &ctx);
ctx              1141 drivers/scsi/snic/snic_scsi.c 		      typ, hdr_stat, cmnd_id, hid, ctx);
ctx               159 drivers/scsi/vmw_pvscsi.c 	struct pvscsi_ctx *ctx, *end;
ctx               162 drivers/scsi/vmw_pvscsi.c 	for (ctx = adapter->cmd_map; ctx < end; ctx++)
ctx               163 drivers/scsi/vmw_pvscsi.c 		if (ctx->cmd == cmd)
ctx               164 drivers/scsi/vmw_pvscsi.c 			return ctx;
ctx               172 drivers/scsi/vmw_pvscsi.c 	struct pvscsi_ctx *ctx;
ctx               177 drivers/scsi/vmw_pvscsi.c 	ctx = list_first_entry(&adapter->cmd_pool, struct pvscsi_ctx, list);
ctx               178 drivers/scsi/vmw_pvscsi.c 	ctx->cmd = cmd;
ctx               179 drivers/scsi/vmw_pvscsi.c 	list_del(&ctx->list);
ctx               181 drivers/scsi/vmw_pvscsi.c 	return ctx;
ctx               185 drivers/scsi/vmw_pvscsi.c 				   struct pvscsi_ctx *ctx)
ctx               187 drivers/scsi/vmw_pvscsi.c 	ctx->cmd = NULL;
ctx               188 drivers/scsi/vmw_pvscsi.c 	ctx->abort_cmp = NULL;
ctx               189 drivers/scsi/vmw_pvscsi.c 	list_add(&ctx->list, &adapter->cmd_pool);
ctx               198 drivers/scsi/vmw_pvscsi.c 			      const struct pvscsi_ctx *ctx)
ctx               200 drivers/scsi/vmw_pvscsi.c 	return ctx - adapter->cmd_map + 1;
ctx               261 drivers/scsi/vmw_pvscsi.c 			     const struct pvscsi_ctx *ctx)
ctx               265 drivers/scsi/vmw_pvscsi.c 	cmd.target = ctx->cmd->device->id;
ctx               266 drivers/scsi/vmw_pvscsi.c 	cmd.context = pvscsi_map_context(adapter, ctx);
ctx               329 drivers/scsi/vmw_pvscsi.c static void pvscsi_create_sg(struct pvscsi_ctx *ctx,
ctx               337 drivers/scsi/vmw_pvscsi.c 	sge = &ctx->sgl->sge[0];
ctx               350 drivers/scsi/vmw_pvscsi.c 			      struct pvscsi_ctx *ctx, struct scsi_cmnd *cmd,
ctx               372 drivers/scsi/vmw_pvscsi.c 			pvscsi_create_sg(ctx, sg, segs);
ctx               375 drivers/scsi/vmw_pvscsi.c 			ctx->sglPA = dma_map_single(&adapter->dev->dev,
ctx               376 drivers/scsi/vmw_pvscsi.c 					ctx->sgl, SGL_SIZE, DMA_TO_DEVICE);
ctx               377 drivers/scsi/vmw_pvscsi.c 			if (dma_mapping_error(&adapter->dev->dev, ctx->sglPA)) {
ctx               381 drivers/scsi/vmw_pvscsi.c 				ctx->sglPA = 0;
ctx               384 drivers/scsi/vmw_pvscsi.c 			e->dataAddr = ctx->sglPA;
ctx               392 drivers/scsi/vmw_pvscsi.c 		ctx->dataPA = dma_map_single(&adapter->dev->dev, sg, bufflen,
ctx               394 drivers/scsi/vmw_pvscsi.c 		if (dma_mapping_error(&adapter->dev->dev, ctx->dataPA)) {
ctx               399 drivers/scsi/vmw_pvscsi.c 		e->dataAddr = ctx->dataPA;
ctx               406 drivers/scsi/vmw_pvscsi.c 				 struct pvscsi_ctx *ctx)
ctx               411 drivers/scsi/vmw_pvscsi.c 	cmd = ctx->cmd;
ctx               419 drivers/scsi/vmw_pvscsi.c 			if (ctx->sglPA) {
ctx               420 drivers/scsi/vmw_pvscsi.c 				dma_unmap_single(&adapter->dev->dev, ctx->sglPA,
ctx               422 drivers/scsi/vmw_pvscsi.c 				ctx->sglPA = 0;
ctx               425 drivers/scsi/vmw_pvscsi.c 			dma_unmap_single(&adapter->dev->dev, ctx->dataPA,
ctx               429 drivers/scsi/vmw_pvscsi.c 		dma_unmap_single(&adapter->dev->dev, ctx->sensePA,
ctx               537 drivers/scsi/vmw_pvscsi.c 	struct pvscsi_ctx *ctx;
ctx               543 drivers/scsi/vmw_pvscsi.c 	ctx = pvscsi_get_context(adapter, e->context);
ctx               544 drivers/scsi/vmw_pvscsi.c 	cmd = ctx->cmd;
ctx               545 drivers/scsi/vmw_pvscsi.c 	abort_cmp = ctx->abort_cmp;
ctx               546 drivers/scsi/vmw_pvscsi.c 	pvscsi_unmap_buffers(adapter, ctx);
ctx               547 drivers/scsi/vmw_pvscsi.c 	pvscsi_release_context(adapter, ctx);
ctx               633 drivers/scsi/vmw_pvscsi.c 		cmd, cmd->cmnd[0], ctx, cmd->result, btstat, sdstat);
ctx               677 drivers/scsi/vmw_pvscsi.c 			     struct pvscsi_ctx *ctx, struct scsi_cmnd *cmd)
ctx               711 drivers/scsi/vmw_pvscsi.c 		ctx->sensePA = dma_map_single(&adapter->dev->dev,
ctx               714 drivers/scsi/vmw_pvscsi.c 		if (dma_mapping_error(&adapter->dev->dev, ctx->sensePA)) {
ctx               717 drivers/scsi/vmw_pvscsi.c 			ctx->sensePA = 0;
ctx               720 drivers/scsi/vmw_pvscsi.c 		e->senseAddr = ctx->sensePA;
ctx               741 drivers/scsi/vmw_pvscsi.c 	if (pvscsi_map_buffers(adapter, ctx, cmd, e) != 0) {
ctx               743 drivers/scsi/vmw_pvscsi.c 			dma_unmap_single(&adapter->dev->dev, ctx->sensePA,
ctx               746 drivers/scsi/vmw_pvscsi.c 			ctx->sensePA = 0;
ctx               751 drivers/scsi/vmw_pvscsi.c 	e->context = pvscsi_map_context(adapter, ctx);
ctx               764 drivers/scsi/vmw_pvscsi.c 	struct pvscsi_ctx *ctx;
ctx               770 drivers/scsi/vmw_pvscsi.c 	ctx = pvscsi_acquire_context(adapter, cmd);
ctx               771 drivers/scsi/vmw_pvscsi.c 	if (!ctx || pvscsi_queue_ring(adapter, ctx, cmd) != 0) {
ctx               772 drivers/scsi/vmw_pvscsi.c 		if (ctx)
ctx               773 drivers/scsi/vmw_pvscsi.c 			pvscsi_release_context(adapter, ctx);
ctx               782 drivers/scsi/vmw_pvscsi.c 		"queued cmd %p, ctx %p, op=%x\n", cmd, ctx, op);
ctx               796 drivers/scsi/vmw_pvscsi.c 	struct pvscsi_ctx *ctx;
ctx               817 drivers/scsi/vmw_pvscsi.c 	ctx = pvscsi_find_context(adapter, cmd);
ctx               818 drivers/scsi/vmw_pvscsi.c 	if (!ctx) {
ctx               827 drivers/scsi/vmw_pvscsi.c 	ctx->abort_cmp = &abort_cmp;
ctx               829 drivers/scsi/vmw_pvscsi.c 	pvscsi_abort_cmd(adapter, ctx);
ctx               840 drivers/scsi/vmw_pvscsi.c 		ctx->abort_cmp = NULL;
ctx               870 drivers/scsi/vmw_pvscsi.c 		struct pvscsi_ctx *ctx = &adapter->cmd_map[i];
ctx               871 drivers/scsi/vmw_pvscsi.c 		struct scsi_cmnd *cmd = ctx->cmd;
ctx               875 drivers/scsi/vmw_pvscsi.c 			pvscsi_unmap_buffers(adapter, ctx);
ctx               876 drivers/scsi/vmw_pvscsi.c 			pvscsi_release_context(adapter, ctx);
ctx              1191 drivers/scsi/vmw_pvscsi.c 	struct pvscsi_ctx *ctx = adapter->cmd_map;
ctx              1194 drivers/scsi/vmw_pvscsi.c 	for (i = 0; i < adapter->req_depth; ++i, ++ctx)
ctx              1195 drivers/scsi/vmw_pvscsi.c 		free_pages((unsigned long)ctx->sgl, get_order(SGL_SIZE));
ctx              1254 drivers/scsi/vmw_pvscsi.c 	struct pvscsi_ctx *ctx;
ctx              1257 drivers/scsi/vmw_pvscsi.c 	ctx = adapter->cmd_map;
ctx              1260 drivers/scsi/vmw_pvscsi.c 	for (i = 0; i < adapter->req_depth; ++i, ++ctx) {
ctx              1261 drivers/scsi/vmw_pvscsi.c 		ctx->sgl = (void *)__get_free_pages(GFP_KERNEL,
ctx              1263 drivers/scsi/vmw_pvscsi.c 		ctx->sglPA = 0;
ctx              1264 drivers/scsi/vmw_pvscsi.c 		BUG_ON(!IS_ALIGNED(((unsigned long)ctx->sgl), PAGE_SIZE));
ctx              1265 drivers/scsi/vmw_pvscsi.c 		if (!ctx->sgl) {
ctx              1266 drivers/scsi/vmw_pvscsi.c 			for (; i >= 0; --i, --ctx) {
ctx              1267 drivers/scsi/vmw_pvscsi.c 				free_pages((unsigned long)ctx->sgl,
ctx              1269 drivers/scsi/vmw_pvscsi.c 				ctx->sgl = NULL;
ctx              1477 drivers/scsi/vmw_pvscsi.c 		struct pvscsi_ctx *ctx = adapter->cmd_map + i;
ctx              1478 drivers/scsi/vmw_pvscsi.c 		list_add(&ctx->list, &adapter->cmd_pool);
ctx                91 drivers/soc/bcm/brcmstb/pm/pm-mips.c static void brcm_pm_save_cp0_context(struct brcm_pm_s3_context *ctx)
ctx                94 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	ctx->cp0_regs[CONTEXT] = read_c0_context();
ctx                95 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	ctx->cp0_regs[USER_LOCAL] = read_c0_userlocal();
ctx                96 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	ctx->cp0_regs[PGMK] = read_c0_pagemask();
ctx                97 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	ctx->cp0_regs[HWRENA] = read_c0_cache();
ctx                98 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	ctx->cp0_regs[COMPARE] = read_c0_compare();
ctx                99 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	ctx->cp0_regs[STATUS] = read_c0_status();
ctx               102 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	ctx->cp0_regs[CONFIG] = read_c0_brcm_config();
ctx               103 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	ctx->cp0_regs[MODE] = read_c0_brcm_mode();
ctx               104 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	ctx->cp0_regs[EDSP] = read_c0_brcm_edsp();
ctx               105 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	ctx->cp0_regs[BOOT_VEC] = read_c0_brcm_bootvec();
ctx               106 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	ctx->cp0_regs[EBASE] = read_c0_ebase();
ctx               108 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	ctx->sc_boot_vec = bmips_read_zscm_reg(0xa0);
ctx               111 drivers/soc/bcm/brcmstb/pm/pm-mips.c static void brcm_pm_restore_cp0_context(struct brcm_pm_s3_context *ctx)
ctx               114 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	bmips_write_zscm_reg(0xa0, ctx->sc_boot_vec);
ctx               117 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	write_c0_context(ctx->cp0_regs[CONTEXT]);
ctx               118 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	write_c0_userlocal(ctx->cp0_regs[USER_LOCAL]);
ctx               119 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	write_c0_pagemask(ctx->cp0_regs[PGMK]);
ctx               120 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	write_c0_cache(ctx->cp0_regs[HWRENA]);
ctx               121 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	write_c0_compare(ctx->cp0_regs[COMPARE]);
ctx               122 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	write_c0_status(ctx->cp0_regs[STATUS]);
ctx               125 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	write_c0_brcm_config(ctx->cp0_regs[CONFIG]);
ctx               126 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	write_c0_brcm_mode(ctx->cp0_regs[MODE]);
ctx               127 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	write_c0_brcm_edsp(ctx->cp0_regs[EDSP]);
ctx               128 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	write_c0_brcm_bootvec(ctx->cp0_regs[BOOT_VEC]);
ctx               129 drivers/soc/bcm/brcmstb/pm/pm-mips.c 	write_c0_ebase(ctx->cp0_regs[EBASE]);
ctx               201 drivers/soc/fsl/dpio/dpio-service.c 			struct dpaa2_io_notification_ctx *ctx;
ctx               205 drivers/soc/fsl/dpio/dpio-service.c 			ctx = (void *)(uintptr_t)q64;
ctx               206 drivers/soc/fsl/dpio/dpio-service.c 			ctx->cb(ctx);
ctx               255 drivers/soc/fsl/dpio/dpio-service.c 			      struct dpaa2_io_notification_ctx *ctx,
ctx               261 drivers/soc/fsl/dpio/dpio-service.c 	d = service_select_by_cpu(d, ctx->desired_cpu);
ctx               269 drivers/soc/fsl/dpio/dpio-service.c 	ctx->dpio_id = d->dpio_desc.dpio_id;
ctx               270 drivers/soc/fsl/dpio/dpio-service.c 	ctx->qman64 = (u64)(uintptr_t)ctx;
ctx               271 drivers/soc/fsl/dpio/dpio-service.c 	ctx->dpio_private = d;
ctx               273 drivers/soc/fsl/dpio/dpio-service.c 	list_add(&ctx->node, &d->notifications);
ctx               277 drivers/soc/fsl/dpio/dpio-service.c 	if (ctx->is_cdan)
ctx               279 drivers/soc/fsl/dpio/dpio-service.c 							 (u16)ctx->id,
ctx               280 drivers/soc/fsl/dpio/dpio-service.c 							 ctx->qman64);
ctx               295 drivers/soc/fsl/dpio/dpio-service.c 				 struct dpaa2_io_notification_ctx *ctx,
ctx               298 drivers/soc/fsl/dpio/dpio-service.c 	struct dpaa2_io *d = ctx->dpio_private;
ctx               301 drivers/soc/fsl/dpio/dpio-service.c 	if (ctx->is_cdan)
ctx               302 drivers/soc/fsl/dpio/dpio-service.c 		qbman_swp_CDAN_disable(d->swp, (u16)ctx->id);
ctx               305 drivers/soc/fsl/dpio/dpio-service.c 	list_del(&ctx->node);
ctx               325 drivers/soc/fsl/dpio/dpio-service.c 			   struct dpaa2_io_notification_ctx *ctx)
ctx               330 drivers/soc/fsl/dpio/dpio-service.c 	d = service_select_by_cpu(d, ctx->desired_cpu);
ctx               335 drivers/soc/fsl/dpio/dpio-service.c 	if (ctx->is_cdan)
ctx               336 drivers/soc/fsl/dpio/dpio-service.c 		err = qbman_swp_CDAN_enable(d->swp, (u16)ctx->id);
ctx               338 drivers/soc/fsl/dpio/dpio-service.c 		err = qbman_swp_fq_schedule(d->swp, ctx->id);
ctx              1068 drivers/soc/fsl/dpio/qbman-portal.c 		       u64 ctx)
ctx              1085 drivers/soc/fsl/dpio/qbman-portal.c 	p->cdan_ctx = cpu_to_le64(ctx);
ctx               190 drivers/soc/fsl/dpio/qbman-portal.h 		       u64 ctx);
ctx               288 drivers/soc/fsl/dpio/qbman-portal.h 	return le64_to_cpu(scn->scn.ctx);
ctx               380 drivers/soc/fsl/dpio/qbman-portal.h 					     u64 ctx)
ctx               384 drivers/soc/fsl/dpio/qbman-portal.h 				  0, ctx);
ctx               425 drivers/soc/fsl/dpio/qbman-portal.h 						    u64 ctx)
ctx               429 drivers/soc/fsl/dpio/qbman-portal.h 				  1, ctx);
ctx               209 drivers/soc/ixp4xx/ixp4xx-npe.c static int __must_check npe_debug_instr(struct npe *npe, u32 instr, u32 ctx,
ctx               225 drivers/soc/ixp4xx/ixp4xx-npe.c 		      (ctx << ECS_REG_1_CCTXT_BITS) |
ctx               226 drivers/soc/ixp4xx/ixp4xx-npe.c 		      (ctx << ECS_REG_1_SELCTXT_BITS));
ctx               253 drivers/soc/ixp4xx/ixp4xx-npe.c 					       u8 val, u32 ctx)
ctx               260 drivers/soc/ixp4xx/ixp4xx-npe.c 	return npe_debug_instr(npe, instr, ctx, 1); /* execute it */
ctx               264 drivers/soc/ixp4xx/ixp4xx-npe.c 						u16 val, u32 ctx)
ctx               271 drivers/soc/ixp4xx/ixp4xx-npe.c 	return npe_debug_instr(npe, instr, ctx, 1); /* execute it */
ctx               275 drivers/soc/ixp4xx/ixp4xx-npe.c 						u32 val, u32 ctx)
ctx               278 drivers/soc/ixp4xx/ixp4xx-npe.c 	if (npe_logical_reg_write16(npe, addr, val >> 16, ctx))
ctx               280 drivers/soc/ixp4xx/ixp4xx-npe.c 	return npe_logical_reg_write16(npe, addr + 2, val & 0xFFFF, ctx);
ctx                39 drivers/soundwire/intel_init.c static int sdw_intel_cleanup_pdev(struct sdw_intel_ctx *ctx)
ctx                41 drivers/soundwire/intel_init.c 	struct sdw_link_data *link = ctx->links;
ctx                47 drivers/soundwire/intel_init.c 	for (i = 0; i < ctx->count; i++) {
ctx                53 drivers/soundwire/intel_init.c 	kfree(ctx->links);
ctx                54 drivers/soundwire/intel_init.c 	ctx->links = NULL;
ctx                65 drivers/soundwire/intel_init.c 	struct sdw_intel_ctx *ctx;
ctx               105 drivers/soundwire/intel_init.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               106 drivers/soundwire/intel_init.c 	if (!ctx)
ctx               109 drivers/soundwire/intel_init.c 	ctx->count = count;
ctx               110 drivers/soundwire/intel_init.c 	ctx->links = kcalloc(ctx->count, sizeof(*ctx->links), GFP_KERNEL);
ctx               111 drivers/soundwire/intel_init.c 	if (!ctx->links)
ctx               114 drivers/soundwire/intel_init.c 	link = ctx->links;
ctx               155 drivers/soundwire/intel_init.c 	return ctx;
ctx               158 drivers/soundwire/intel_init.c 	sdw_intel_cleanup_pdev(ctx);
ctx               160 drivers/soundwire/intel_init.c 	kfree(ctx);
ctx               228 drivers/soundwire/intel_init.c 	struct sdw_intel_ctx *ctx = arg;
ctx               230 drivers/soundwire/intel_init.c 	sdw_intel_cleanup_pdev(ctx);
ctx               231 drivers/soundwire/intel_init.c 	kfree(ctx);
ctx               129 drivers/spi/spi-omap2-mcspi.c 	struct omap2_mcspi_regs ctx;
ctx               274 drivers/spi/spi-omap2-mcspi.c 	struct omap2_mcspi_regs	*ctx = &mcspi->ctx;
ctx               290 drivers/spi/spi-omap2-mcspi.c 	ctx->modulctrl = l;
ctx              1041 drivers/spi/spi-omap2-mcspi.c 	struct omap2_mcspi_regs	*ctx = &mcspi->ctx;
ctx              1055 drivers/spi/spi-omap2-mcspi.c 		list_add_tail(&cs->node, &ctx->cs);
ctx              1180 drivers/spi/spi-omap2-mcspi.c 		chconf = mcspi->ctx.modulctrl;
ctx              1183 drivers/spi/spi-omap2-mcspi.c 		mcspi->ctx.modulctrl =
ctx              1245 drivers/spi/spi-omap2-mcspi.c 		chconf = mcspi->ctx.modulctrl;
ctx              1248 drivers/spi/spi-omap2-mcspi.c 		mcspi->ctx.modulctrl =
ctx              1267 drivers/spi/spi-omap2-mcspi.c 	struct omap2_mcspi_regs	*ctx = &mcspi->ctx;
ctx              1275 drivers/spi/spi-omap2-mcspi.c 	list_for_each_entry(cs, &ctx->cs, node) {
ctx              1313 drivers/spi/spi-omap2-mcspi.c 	struct omap2_mcspi_regs	*ctx = &mcspi->ctx;
ctx              1325 drivers/spi/spi-omap2-mcspi.c 	ctx->wakeupenable = OMAP2_MCSPI_WAKEUPENABLE_WKEN;
ctx              1342 drivers/spi/spi-omap2-mcspi.c 	struct omap2_mcspi_regs *ctx = &mcspi->ctx;
ctx              1346 drivers/spi/spi-omap2-mcspi.c 	mcspi_write_reg(master, OMAP2_MCSPI_MODULCTRL, ctx->modulctrl);
ctx              1347 drivers/spi/spi-omap2-mcspi.c 	mcspi_write_reg(master, OMAP2_MCSPI_WAKEUPENABLE, ctx->wakeupenable);
ctx              1349 drivers/spi/spi-omap2-mcspi.c 	list_for_each_entry(cs, &ctx->cs, node) {
ctx              1456 drivers/spi/spi-omap2-mcspi.c 	INIT_LIST_HEAD(&mcspi->ctx.cs);
ctx               204 drivers/ssb/main.c int ssb_devices_freeze(struct ssb_bus *bus, struct ssb_freeze_context *ctx)
ctx               210 drivers/ssb/main.c 	memset(ctx, 0, sizeof(*ctx));
ctx               211 drivers/ssb/main.c 	ctx->bus = bus;
ctx               212 drivers/ssb/main.c 	WARN_ON(bus->nr_devices > ARRAY_SIZE(ctx->device_frozen));
ctx               226 drivers/ssb/main.c 		ctx->device_frozen[i] = 1;
ctx               238 drivers/ssb/main.c int ssb_devices_thaw(struct ssb_freeze_context *ctx)
ctx               240 drivers/ssb/main.c 	struct ssb_bus *bus = ctx->bus;
ctx               247 drivers/ssb/main.c 		if (!ctx->device_frozen[i])
ctx               172 drivers/ssb/ssb_private.h extern int ssb_devices_freeze(struct ssb_bus *bus, struct ssb_freeze_context *ctx);
ctx               173 drivers/ssb/ssb_private.h extern int ssb_devices_thaw(struct ssb_freeze_context *ctx);
ctx              2200 drivers/staging/exfat/exfat_super.c static int exfat_readdir(struct file *filp, struct dir_context *ctx)
ctx              2214 drivers/staging/exfat/exfat_super.c 	cpos = ctx->pos;
ctx              2225 drivers/staging/exfat/exfat_super.c 			if (!dir_emit_dots(filp, ctx))
ctx              2228 drivers/staging/exfat/exfat_super.c 			ctx->pos++;
ctx              2278 drivers/staging/exfat/exfat_super.c 	if (!dir_emit(ctx, de.Name, strlen(de.Name), inum,
ctx              2282 drivers/staging/exfat/exfat_super.c 	ctx->pos = cpos;
ctx              2286 drivers/staging/exfat/exfat_super.c 	ctx->pos = cpos;
ctx               145 drivers/staging/gasket/gasket_interrupt.c 	struct eventfd_ctx *ctx;
ctx               148 drivers/staging/gasket/gasket_interrupt.c 	ctx = interrupt_data->eventfd_ctxs[interrupt_index];
ctx               149 drivers/staging/gasket/gasket_interrupt.c 	if (ctx)
ctx               150 drivers/staging/gasket/gasket_interrupt.c 		eventfd_signal(ctx, 1);
ctx               490 drivers/staging/gasket/gasket_interrupt.c 	struct eventfd_ctx *ctx = eventfd_ctx_fdget(event_fd);
ctx               492 drivers/staging/gasket/gasket_interrupt.c 	if (IS_ERR(ctx))
ctx               493 drivers/staging/gasket/gasket_interrupt.c 		return PTR_ERR(ctx);
ctx               498 drivers/staging/gasket/gasket_interrupt.c 	interrupt_data->eventfd_ctxs[interrupt] = ctx;
ctx               247 drivers/staging/media/hantro/hantro.h 	int (*buf_finish)(struct hantro_ctx *ctx,
ctx               379 drivers/staging/media/hantro/hantro.h bool hantro_is_encoder_ctx(const struct hantro_ctx *ctx);
ctx               381 drivers/staging/media/hantro/hantro.h void *hantro_get_ctrl(struct hantro_ctx *ctx, u32 id);
ctx               385 drivers/staging/media/hantro/hantro.h hantro_get_src_buf(struct hantro_ctx *ctx)
ctx               387 drivers/staging/media/hantro/hantro.h 	return v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               391 drivers/staging/media/hantro/hantro.h hantro_get_dst_buf(struct hantro_ctx *ctx)
ctx               393 drivers/staging/media/hantro/hantro.h 	return v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx                38 drivers/staging/media/hantro/hantro_drv.c void *hantro_get_ctrl(struct hantro_ctx *ctx, u32 id)
ctx                42 drivers/staging/media/hantro/hantro_drv.c 	ctrl = v4l2_ctrl_find(&ctx->ctrl_handler, id);
ctx                59 drivers/staging/media/hantro/hantro_drv.c hantro_enc_buf_finish(struct hantro_ctx *ctx, struct vb2_buffer *buf,
ctx                64 drivers/staging/media/hantro/hantro_drv.c 	avail_size = vb2_plane_size(buf, 0) - ctx->vpu_dst_fmt->header_size;
ctx                72 drivers/staging/media/hantro/hantro_drv.c 	if (ctx->jpeg_enc.bounce_buffer.cpu) {
ctx                74 drivers/staging/media/hantro/hantro_drv.c 		       ctx->vpu_dst_fmt->header_size,
ctx                75 drivers/staging/media/hantro/hantro_drv.c 		       ctx->jpeg_enc.bounce_buffer.cpu, bytesused);
ctx                78 drivers/staging/media/hantro/hantro_drv.c 		ctx->vpu_dst_fmt->header_size + bytesused;
ctx                83 drivers/staging/media/hantro/hantro_drv.c hantro_dec_buf_finish(struct hantro_ctx *ctx, struct vb2_buffer *buf,
ctx                87 drivers/staging/media/hantro/hantro_drv.c 	buf->planes[0].bytesused = ctx->dst_fmt.plane_fmt[0].sizeimage;
ctx                92 drivers/staging/media/hantro/hantro_drv.c 			      struct hantro_ctx *ctx,
ctx               103 drivers/staging/media/hantro/hantro_drv.c 	src = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               104 drivers/staging/media/hantro/hantro_drv.c 	dst = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx               111 drivers/staging/media/hantro/hantro_drv.c 	src->sequence = ctx->sequence_out++;
ctx               112 drivers/staging/media/hantro/hantro_drv.c 	dst->sequence = ctx->sequence_cap++;
ctx               114 drivers/staging/media/hantro/hantro_drv.c 	ret = ctx->buf_finish(ctx, &dst->vb2_buf, bytesused);
ctx               121 drivers/staging/media/hantro/hantro_drv.c 	v4l2_m2m_job_finish(vpu->m2m_dev, ctx->fh.m2m_ctx);
ctx               127 drivers/staging/media/hantro/hantro_drv.c 	struct hantro_ctx *ctx =
ctx               136 drivers/staging/media/hantro/hantro_drv.c 		hantro_job_finish(vpu, ctx, bytesused, result);
ctx               142 drivers/staging/media/hantro/hantro_drv.c 	struct hantro_ctx *ctx;
ctx               146 drivers/staging/media/hantro/hantro_drv.c 	ctx = v4l2_m2m_get_curr_priv(vpu->m2m_dev);
ctx               147 drivers/staging/media/hantro/hantro_drv.c 	if (ctx) {
ctx               149 drivers/staging/media/hantro/hantro_drv.c 		ctx->codec_ops->reset(ctx);
ctx               150 drivers/staging/media/hantro/hantro_drv.c 		hantro_job_finish(vpu, ctx, 0, VB2_BUF_STATE_ERROR);
ctx               154 drivers/staging/media/hantro/hantro_drv.c void hantro_prepare_run(struct hantro_ctx *ctx)
ctx               158 drivers/staging/media/hantro/hantro_drv.c 	src_buf = hantro_get_src_buf(ctx);
ctx               160 drivers/staging/media/hantro/hantro_drv.c 				&ctx->ctrl_handler);
ctx               163 drivers/staging/media/hantro/hantro_drv.c void hantro_finish_run(struct hantro_ctx *ctx)
ctx               167 drivers/staging/media/hantro/hantro_drv.c 	src_buf = hantro_get_src_buf(ctx);
ctx               169 drivers/staging/media/hantro/hantro_drv.c 				   &ctx->ctrl_handler);
ctx               172 drivers/staging/media/hantro/hantro_drv.c 	schedule_delayed_work(&ctx->dev->watchdog_work,
ctx               178 drivers/staging/media/hantro/hantro_drv.c 	struct hantro_ctx *ctx = priv;
ctx               182 drivers/staging/media/hantro/hantro_drv.c 	src = hantro_get_src_buf(ctx);
ctx               183 drivers/staging/media/hantro/hantro_drv.c 	dst = hantro_get_dst_buf(ctx);
ctx               185 drivers/staging/media/hantro/hantro_drv.c 	ret = clk_bulk_enable(ctx->dev->variant->num_clocks, ctx->dev->clocks);
ctx               188 drivers/staging/media/hantro/hantro_drv.c 	ret = pm_runtime_get_sync(ctx->dev->dev);
ctx               194 drivers/staging/media/hantro/hantro_drv.c 	ctx->codec_ops->run(ctx);
ctx               198 drivers/staging/media/hantro/hantro_drv.c 	hantro_job_finish(ctx->dev, ctx, 0, VB2_BUF_STATE_ERROR);
ctx               201 drivers/staging/media/hantro/hantro_drv.c bool hantro_is_encoder_ctx(const struct hantro_ctx *ctx)
ctx               203 drivers/staging/media/hantro/hantro_drv.c 	return ctx->buf_finish == hantro_enc_buf_finish;
ctx               213 drivers/staging/media/hantro/hantro_drv.c 	struct hantro_ctx *ctx = priv;
ctx               218 drivers/staging/media/hantro/hantro_drv.c 	src_vq->drv_priv = ctx;
ctx               231 drivers/staging/media/hantro/hantro_drv.c 	src_vq->lock = &ctx->dev->vpu_mutex;
ctx               232 drivers/staging/media/hantro/hantro_drv.c 	src_vq->dev = ctx->dev->v4l2_dev.dev;
ctx               246 drivers/staging/media/hantro/hantro_drv.c 	if (hantro_is_encoder_ctx(ctx)) {
ctx               257 drivers/staging/media/hantro/hantro_drv.c 	dst_vq->drv_priv = ctx;
ctx               261 drivers/staging/media/hantro/hantro_drv.c 	dst_vq->lock = &ctx->dev->vpu_mutex;
ctx               262 drivers/staging/media/hantro/hantro_drv.c 	dst_vq->dev = ctx->dev->v4l2_dev.dev;
ctx               269 drivers/staging/media/hantro/hantro_drv.c 	struct hantro_ctx *ctx;
ctx               271 drivers/staging/media/hantro/hantro_drv.c 	ctx = container_of(ctrl->handler,
ctx               278 drivers/staging/media/hantro/hantro_drv.c 		ctx->jpeg_quality = ctrl->val;
ctx               363 drivers/staging/media/hantro/hantro_drv.c 			      struct hantro_ctx *ctx,
ctx               368 drivers/staging/media/hantro/hantro_drv.c 	v4l2_ctrl_handler_init(&ctx->ctrl_handler, num_ctrls);
ctx               374 drivers/staging/media/hantro/hantro_drv.c 		v4l2_ctrl_new_custom(&ctx->ctrl_handler,
ctx               376 drivers/staging/media/hantro/hantro_drv.c 		if (ctx->ctrl_handler.error) {
ctx               379 drivers/staging/media/hantro/hantro_drv.c 				ctx->ctrl_handler.error);
ctx               380 drivers/staging/media/hantro/hantro_drv.c 			v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx               381 drivers/staging/media/hantro/hantro_drv.c 			return ctx->ctrl_handler.error;
ctx               384 drivers/staging/media/hantro/hantro_drv.c 	return v4l2_ctrl_handler_setup(&ctx->ctrl_handler);
ctx               396 drivers/staging/media/hantro/hantro_drv.c 	struct hantro_ctx *ctx;
ctx               408 drivers/staging/media/hantro/hantro_drv.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               409 drivers/staging/media/hantro/hantro_drv.c 	if (!ctx)
ctx               412 drivers/staging/media/hantro/hantro_drv.c 	ctx->dev = vpu;
ctx               415 drivers/staging/media/hantro/hantro_drv.c 		ctx->buf_finish = hantro_enc_buf_finish;
ctx               416 drivers/staging/media/hantro/hantro_drv.c 		ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(vpu->m2m_dev, ctx,
ctx               420 drivers/staging/media/hantro/hantro_drv.c 		ctx->buf_finish = hantro_dec_buf_finish;
ctx               421 drivers/staging/media/hantro/hantro_drv.c 		ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(vpu->m2m_dev, ctx,
ctx               424 drivers/staging/media/hantro/hantro_drv.c 		ctx->fh.m2m_ctx = ERR_PTR(-ENODEV);
ctx               426 drivers/staging/media/hantro/hantro_drv.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx               427 drivers/staging/media/hantro/hantro_drv.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx               428 drivers/staging/media/hantro/hantro_drv.c 		kfree(ctx);
ctx               432 drivers/staging/media/hantro/hantro_drv.c 	v4l2_fh_init(&ctx->fh, vdev);
ctx               433 drivers/staging/media/hantro/hantro_drv.c 	filp->private_data = &ctx->fh;
ctx               434 drivers/staging/media/hantro/hantro_drv.c 	v4l2_fh_add(&ctx->fh);
ctx               436 drivers/staging/media/hantro/hantro_drv.c 	hantro_reset_fmts(ctx);
ctx               438 drivers/staging/media/hantro/hantro_drv.c 	ret = hantro_ctrls_setup(vpu, ctx, allowed_codecs);
ctx               443 drivers/staging/media/hantro/hantro_drv.c 	ctx->fh.ctrl_handler = &ctx->ctrl_handler;
ctx               448 drivers/staging/media/hantro/hantro_drv.c 	v4l2_fh_del(&ctx->fh);
ctx               449 drivers/staging/media/hantro/hantro_drv.c 	v4l2_fh_exit(&ctx->fh);
ctx               450 drivers/staging/media/hantro/hantro_drv.c 	kfree(ctx);
ctx               456 drivers/staging/media/hantro/hantro_drv.c 	struct hantro_ctx *ctx =
ctx               463 drivers/staging/media/hantro/hantro_drv.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx               464 drivers/staging/media/hantro/hantro_drv.c 	v4l2_fh_del(&ctx->fh);
ctx               465 drivers/staging/media/hantro/hantro_drv.c 	v4l2_fh_exit(&ctx->fh);
ctx               466 drivers/staging/media/hantro/hantro_drv.c 	v4l2_ctrl_handler_free(&ctx->ctrl_handler);
ctx               467 drivers/staging/media/hantro/hantro_drv.c 	kfree(ctx);
ctx                22 drivers/staging/media/hantro/hantro_g1_h264_dec.c static void set_params(struct hantro_ctx *ctx)
ctx                24 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	const struct hantro_h264_dec_ctrls *ctrls = &ctx->h264_dec.ctrls;
ctx                29 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	struct vb2_v4l2_buffer *src_buf = hantro_get_src_buf(ctx);
ctx                30 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               130 drivers/staging/media/hantro/hantro_g1_h264_dec.c static void set_ref(struct hantro_ctx *ctx)
ctx               132 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	struct v4l2_h264_dpb_entry *dpb = ctx->h264_dec.dpb;
ctx               134 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               176 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	b0_reflist = ctx->h264_dec.reflists.b0;
ctx               177 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	b1_reflist = ctx->h264_dec.reflists.b1;
ctx               178 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	p_reflist = ctx->h264_dec.reflists.p;
ctx               225 drivers/staging/media/hantro/hantro_g1_h264_dec.c 		struct vb2_buffer *buf =  hantro_h264_get_ref_buf(ctx, i);
ctx               232 drivers/staging/media/hantro/hantro_g1_h264_dec.c static void set_buffers(struct hantro_ctx *ctx)
ctx               234 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	const struct hantro_h264_dec_ctrls *ctrls = &ctx->h264_dec.ctrls;
ctx               236 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               239 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	src_buf = hantro_get_src_buf(ctx);
ctx               240 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	dst_buf = hantro_get_dst_buf(ctx);
ctx               252 drivers/staging/media/hantro/hantro_g1_h264_dec.c 		size_t pic_size = ctx->h264_dec.pic_size;
ctx               256 drivers/staging/media/hantro/hantro_g1_h264_dec.c 			mv_offset += 32 * H264_MB_WIDTH(ctx->dst_fmt.width);
ctx               263 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	vdpu_write_relaxed(vpu, ctx->h264_dec.priv.dma, G1_REG_ADDR_QTABLE);
ctx               266 drivers/staging/media/hantro/hantro_g1_h264_dec.c void hantro_g1_h264_dec_run(struct hantro_ctx *ctx)
ctx               268 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               271 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	if (hantro_h264_dec_prepare_run(ctx))
ctx               275 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	set_params(ctx);
ctx               276 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	set_ref(ctx);
ctx               277 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	set_buffers(ctx);
ctx               279 drivers/staging/media/hantro/hantro_g1_h264_dec.c 	hantro_finish_run(ctx);
ctx                86 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 				     struct hantro_ctx *ctx)
ctx                90 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	quantization = hantro_get_ctrl(ctx,
ctx                92 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	hantro_mpeg2_dec_copy_qtable(ctx->mpeg2_dec.qtable.cpu,
ctx                94 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	vdpu_write_relaxed(vpu, ctx->mpeg2_dec.qtable.dma,
ctx                99 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c hantro_g1_mpeg2_dec_set_buffers(struct hantro_dev *vpu, struct hantro_ctx *ctx,
ctx               110 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	vq = v4l2_m2m_get_dst_vq(ctx->fh.m2m_ctx);
ctx               131 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 		addr += ALIGN(ctx->dst_fmt.width, 16);
ctx               161 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c void hantro_g1_mpeg2_dec_run(struct hantro_ctx *ctx)
ctx               163 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               170 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	src_buf = hantro_get_src_buf(ctx);
ctx               171 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	dst_buf = hantro_get_dst_buf(ctx);
ctx               174 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	hantro_prepare_run(ctx);
ctx               176 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	slice_params = hantro_get_ctrl(ctx,
ctx               210 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	reg = G1_REG_PIC_MB_WIDTH(MPEG2_MB_WIDTH(ctx->dst_fmt.width)) |
ctx               211 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	      G1_REG_PIC_MB_HEIGHT_P(MPEG2_MB_HEIGHT(ctx->dst_fmt.height)) |
ctx               244 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	hantro_g1_mpeg2_dec_set_quantization(vpu, ctx);
ctx               246 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	hantro_g1_mpeg2_dec_set_buffers(vpu, ctx, &src_buf->vb2_buf,
ctx               250 drivers/staging/media/hantro/hantro_g1_mpeg2_dec.c 	hantro_finish_run(ctx);
ctx               135 drivers/staging/media/hantro/hantro_g1_vp8_dec.c static void cfg_lf(struct hantro_ctx *ctx,
ctx               140 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               177 drivers/staging/media/hantro/hantro_g1_vp8_dec.c static void cfg_qp(struct hantro_ctx *ctx,
ctx               182 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               232 drivers/staging/media/hantro/hantro_g1_vp8_dec.c static void cfg_parts(struct hantro_ctx *ctx,
ctx               235 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               246 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	vb2_src = hantro_get_src_buf(ctx);
ctx               330 drivers/staging/media/hantro/hantro_g1_vp8_dec.c static void cfg_tap(struct hantro_ctx *ctx,
ctx               333 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               370 drivers/staging/media/hantro/hantro_g1_vp8_dec.c static void cfg_ref(struct hantro_ctx *ctx,
ctx               373 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	struct vb2_queue *cap_q = &ctx->fh.m2m_ctx->cap_q_ctx.q;
ctx               374 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               378 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	vb2_dst = hantro_get_dst_buf(ctx);
ctx               402 drivers/staging/media/hantro/hantro_g1_vp8_dec.c static void cfg_buffers(struct hantro_ctx *ctx,
ctx               406 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               411 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	vb2_dst = hantro_get_dst_buf(ctx);
ctx               414 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	vdpu_write_relaxed(vpu, ctx->vp8_dec.prob_tbl.dma,
ctx               418 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	reg = G1_REG_FWD_PIC1_SEGMENT_BASE(ctx->vp8_dec.segment_map.dma);
ctx               430 drivers/staging/media/hantro/hantro_g1_vp8_dec.c void hantro_g1_vp8_dec_run(struct hantro_ctx *ctx)
ctx               433 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               434 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	size_t height = ctx->dst_fmt.height;
ctx               435 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	size_t width = ctx->dst_fmt.width;
ctx               439 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	hantro_prepare_run(ctx);
ctx               441 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	hdr = hantro_get_ctrl(ctx, V4L2_CID_MPEG_VIDEO_VP8_FRAME_HEADER);
ctx               446 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	if (VP8_FRAME_IS_KEY_FRAME(hdr) && ctx->vp8_dec.segment_map.cpu)
ctx               447 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 		memset(ctx->vp8_dec.segment_map.cpu, 0,
ctx               448 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 		       ctx->vp8_dec.segment_map.size);
ctx               450 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	hantro_vp8_prob_update(ctx, hdr);
ctx               493 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	cfg_lf(ctx, hdr);
ctx               494 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	cfg_qp(ctx, hdr);
ctx               495 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	cfg_parts(ctx, hdr);
ctx               496 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	cfg_tap(ctx, hdr);
ctx               497 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	cfg_ref(ctx, hdr);
ctx               498 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	cfg_buffers(ctx, hdr);
ctx               500 drivers/staging/media/hantro/hantro_g1_vp8_dec.c 	hantro_finish_run(ctx);
ctx                19 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 				       struct hantro_ctx *ctx)
ctx                21 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	struct v4l2_pix_format_mplane *pix_fmt = &ctx->src_fmt;
ctx                27 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 		| H1_REG_IN_IMG_CTRL_FMT(ctx->vpu_src_fmt->enc_fmt);
ctx                32 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 					   struct hantro_ctx *ctx,
ctx                35 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	struct v4l2_pix_format_mplane *pix_fmt = &ctx->src_fmt;
ctx                40 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	vepu_write_relaxed(vpu, ctx->jpeg_enc.bounce_buffer.dma,
ctx                42 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	vepu_write_relaxed(vpu, ctx->jpeg_enc.bounce_buffer.size,
ctx                85 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c void hantro_h1_jpeg_enc_run(struct hantro_ctx *ctx)
ctx                87 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	struct hantro_dev *vpu = ctx->dev;
ctx                92 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	src_buf = hantro_get_src_buf(ctx);
ctx                93 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	dst_buf = hantro_get_dst_buf(ctx);
ctx                95 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	hantro_prepare_run(ctx);
ctx                99 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	jpeg_ctx.width = ctx->dst_fmt.width;
ctx               100 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	jpeg_ctx.height = ctx->dst_fmt.height;
ctx               101 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	jpeg_ctx.quality = ctx->jpeg_quality;
ctx               108 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	hantro_h1_set_src_img_ctrl(vpu, ctx);
ctx               109 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	hantro_h1_jpeg_enc_set_buffers(vpu, ctx, &src_buf->vb2_buf);
ctx               124 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	reg = H1_REG_ENC_CTRL_WIDTH(JPEG_MB_WIDTH(ctx->src_fmt.width))
ctx               125 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 		| H1_REG_ENC_CTRL_HEIGHT(JPEG_MB_HEIGHT(ctx->src_fmt.height))
ctx               130 drivers/staging/media/hantro/hantro_h1_jpeg_enc.c 	hantro_finish_run(ctx);
ctx               198 drivers/staging/media/hantro/hantro_h264.c reorder_scaling_list(struct hantro_ctx *ctx)
ctx               200 drivers/staging/media/hantro/hantro_h264.c 	const struct hantro_h264_dec_ctrls *ctrls = &ctx->h264_dec.ctrls;
ctx               205 drivers/staging/media/hantro/hantro_h264.c 	struct hantro_h264_dec_priv_tbl *tbl = ctx->h264_dec.priv.cpu;
ctx               224 drivers/staging/media/hantro/hantro_h264.c static void prepare_table(struct hantro_ctx *ctx)
ctx               226 drivers/staging/media/hantro/hantro_h264.c 	const struct hantro_h264_dec_ctrls *ctrls = &ctx->h264_dec.ctrls;
ctx               228 drivers/staging/media/hantro/hantro_h264.c 	struct hantro_h264_dec_priv_tbl *tbl = ctx->h264_dec.priv.cpu;
ctx               229 drivers/staging/media/hantro/hantro_h264.c 	const struct v4l2_h264_dpb_entry *dpb = ctx->h264_dec.dpb;
ctx               240 drivers/staging/media/hantro/hantro_h264.c 	reorder_scaling_list(ctx);
ctx               268 drivers/staging/media/hantro/hantro_h264.c init_reflist_builder(struct hantro_ctx *ctx,
ctx               274 drivers/staging/media/hantro/hantro_h264.c 	struct vb2_v4l2_buffer *buf = hantro_get_dst_buf(ctx);
ctx               275 drivers/staging/media/hantro/hantro_h264.c 	const struct v4l2_h264_dpb_entry *dpb = ctx->h264_dec.dpb;
ctx               276 drivers/staging/media/hantro/hantro_h264.c 	struct vb2_queue *cap_q = &ctx->fh.m2m_ctx->cap_q_ctx.q;
ctx               280 drivers/staging/media/hantro/hantro_h264.c 	dec_param = ctx->h264_dec.ctrls.decode;
ctx               281 drivers/staging/media/hantro/hantro_h264.c 	slice_params = &ctx->h264_dec.ctrls.slices[0];
ctx               282 drivers/staging/media/hantro/hantro_h264.c 	sps = ctx->h264_dec.ctrls.sps;
ctx               291 drivers/staging/media/hantro/hantro_h264.c 	for (i = 0; i < ARRAY_SIZE(ctx->h264_dec.dpb); i++) {
ctx               320 drivers/staging/media/hantro/hantro_h264.c 	for (i = b->num_valid; i < ARRAY_SIZE(ctx->h264_dec.dpb); i++)
ctx               472 drivers/staging/media/hantro/hantro_h264.c static void update_dpb(struct hantro_ctx *ctx)
ctx               479 drivers/staging/media/hantro/hantro_h264.c 	dec_param = ctx->h264_dec.ctrls.decode;
ctx               482 drivers/staging/media/hantro/hantro_h264.c 	for (i = 0; i < ARRAY_SIZE(ctx->h264_dec.dpb); i++)
ctx               483 drivers/staging/media/hantro/hantro_h264.c 		ctx->h264_dec.dpb[i].flags &= ~V4L2_H264_DPB_ENTRY_FLAG_ACTIVE;
ctx               496 drivers/staging/media/hantro/hantro_h264.c 		for_each_clear_bit(j, used, ARRAY_SIZE(ctx->h264_dec.dpb)) {
ctx               499 drivers/staging/media/hantro/hantro_h264.c 			cdpb = &ctx->h264_dec.dpb[j];
ctx               509 drivers/staging/media/hantro/hantro_h264.c 		if (j == ARRAY_SIZE(ctx->h264_dec.dpb))
ctx               523 drivers/staging/media/hantro/hantro_h264.c 		j = find_first_zero_bit(used, ARRAY_SIZE(ctx->h264_dec.dpb));
ctx               524 drivers/staging/media/hantro/hantro_h264.c 		if (WARN_ON(j >= ARRAY_SIZE(ctx->h264_dec.dpb)))
ctx               527 drivers/staging/media/hantro/hantro_h264.c 		cdpb = &ctx->h264_dec.dpb[j];
ctx               533 drivers/staging/media/hantro/hantro_h264.c struct vb2_buffer *hantro_h264_get_ref_buf(struct hantro_ctx *ctx,
ctx               536 drivers/staging/media/hantro/hantro_h264.c 	struct vb2_queue *cap_q = &ctx->fh.m2m_ctx->cap_q_ctx.q;
ctx               537 drivers/staging/media/hantro/hantro_h264.c 	struct v4l2_h264_dpb_entry *dpb = ctx->h264_dec.dpb;
ctx               554 drivers/staging/media/hantro/hantro_h264.c 		dst_buf = hantro_get_dst_buf(ctx);
ctx               561 drivers/staging/media/hantro/hantro_h264.c int hantro_h264_dec_prepare_run(struct hantro_ctx *ctx)
ctx               563 drivers/staging/media/hantro/hantro_h264.c 	struct hantro_h264_dec_hw_ctx *h264_ctx = &ctx->h264_dec;
ctx               567 drivers/staging/media/hantro/hantro_h264.c 	hantro_prepare_run(ctx);
ctx               570 drivers/staging/media/hantro/hantro_h264.c 		hantro_get_ctrl(ctx, V4L2_CID_MPEG_VIDEO_H264_SCALING_MATRIX);
ctx               575 drivers/staging/media/hantro/hantro_h264.c 		hantro_get_ctrl(ctx, V4L2_CID_MPEG_VIDEO_H264_DECODE_PARAMS);
ctx               580 drivers/staging/media/hantro/hantro_h264.c 		hantro_get_ctrl(ctx, V4L2_CID_MPEG_VIDEO_H264_SLICE_PARAMS);
ctx               585 drivers/staging/media/hantro/hantro_h264.c 		hantro_get_ctrl(ctx, V4L2_CID_MPEG_VIDEO_H264_SPS);
ctx               590 drivers/staging/media/hantro/hantro_h264.c 		hantro_get_ctrl(ctx, V4L2_CID_MPEG_VIDEO_H264_PPS);
ctx               595 drivers/staging/media/hantro/hantro_h264.c 	update_dpb(ctx);
ctx               598 drivers/staging/media/hantro/hantro_h264.c 	prepare_table(ctx);
ctx               601 drivers/staging/media/hantro/hantro_h264.c 	init_reflist_builder(ctx, &reflist_builder);
ctx               608 drivers/staging/media/hantro/hantro_h264.c void hantro_h264_dec_exit(struct hantro_ctx *ctx)
ctx               610 drivers/staging/media/hantro/hantro_h264.c 	struct hantro_dev *vpu = ctx->dev;
ctx               611 drivers/staging/media/hantro/hantro_h264.c 	struct hantro_h264_dec_hw_ctx *h264_dec = &ctx->h264_dec;
ctx               617 drivers/staging/media/hantro/hantro_h264.c int hantro_h264_dec_init(struct hantro_ctx *ctx)
ctx               619 drivers/staging/media/hantro/hantro_h264.c 	struct hantro_dev *vpu = ctx->dev;
ctx               620 drivers/staging/media/hantro/hantro_h264.c 	struct hantro_h264_dec_hw_ctx *h264_dec = &ctx->h264_dec;
ctx               634 drivers/staging/media/hantro/hantro_h264.c 	v4l2_fill_pixfmt_mp(&pix_mp, ctx->dst_fmt.pixelformat,
ctx               635 drivers/staging/media/hantro/hantro_h264.c 			    ctx->dst_fmt.width, ctx->dst_fmt.height);
ctx               126 drivers/staging/media/hantro/hantro_hw.h 	int (*init)(struct hantro_ctx *ctx);
ctx               127 drivers/staging/media/hantro/hantro_hw.h 	void (*exit)(struct hantro_ctx *ctx);
ctx               128 drivers/staging/media/hantro/hantro_hw.h 	void (*run)(struct hantro_ctx *ctx);
ctx               129 drivers/staging/media/hantro/hantro_hw.h 	void (*done)(struct hantro_ctx *ctx, enum vb2_buffer_state);
ctx               130 drivers/staging/media/hantro/hantro_hw.h 	void (*reset)(struct hantro_ctx *ctx);
ctx               150 drivers/staging/media/hantro/hantro_hw.h void hantro_run(struct hantro_ctx *ctx);
ctx               153 drivers/staging/media/hantro/hantro_hw.h void hantro_prepare_run(struct hantro_ctx *ctx);
ctx               154 drivers/staging/media/hantro/hantro_hw.h void hantro_finish_run(struct hantro_ctx *ctx);
ctx               156 drivers/staging/media/hantro/hantro_hw.h void hantro_h1_jpeg_enc_run(struct hantro_ctx *ctx);
ctx               157 drivers/staging/media/hantro/hantro_hw.h void rk3399_vpu_jpeg_enc_run(struct hantro_ctx *ctx);
ctx               158 drivers/staging/media/hantro/hantro_hw.h int hantro_jpeg_enc_init(struct hantro_ctx *ctx);
ctx               159 drivers/staging/media/hantro/hantro_hw.h void hantro_jpeg_enc_exit(struct hantro_ctx *ctx);
ctx               161 drivers/staging/media/hantro/hantro_hw.h struct vb2_buffer *hantro_h264_get_ref_buf(struct hantro_ctx *ctx,
ctx               163 drivers/staging/media/hantro/hantro_hw.h int hantro_h264_dec_prepare_run(struct hantro_ctx *ctx);
ctx               164 drivers/staging/media/hantro/hantro_hw.h void hantro_g1_h264_dec_run(struct hantro_ctx *ctx);
ctx               165 drivers/staging/media/hantro/hantro_hw.h int hantro_h264_dec_init(struct hantro_ctx *ctx);
ctx               166 drivers/staging/media/hantro/hantro_hw.h void hantro_h264_dec_exit(struct hantro_ctx *ctx);
ctx               168 drivers/staging/media/hantro/hantro_hw.h void hantro_g1_mpeg2_dec_run(struct hantro_ctx *ctx);
ctx               169 drivers/staging/media/hantro/hantro_hw.h void rk3399_vpu_mpeg2_dec_run(struct hantro_ctx *ctx);
ctx               172 drivers/staging/media/hantro/hantro_hw.h int hantro_mpeg2_dec_init(struct hantro_ctx *ctx);
ctx               173 drivers/staging/media/hantro/hantro_hw.h void hantro_mpeg2_dec_exit(struct hantro_ctx *ctx);
ctx               175 drivers/staging/media/hantro/hantro_hw.h void hantro_g1_vp8_dec_run(struct hantro_ctx *ctx);
ctx               176 drivers/staging/media/hantro/hantro_hw.h void rk3399_vpu_vp8_dec_run(struct hantro_ctx *ctx);
ctx               177 drivers/staging/media/hantro/hantro_hw.h int hantro_vp8_dec_init(struct hantro_ctx *ctx);
ctx               178 drivers/staging/media/hantro/hantro_hw.h void hantro_vp8_dec_exit(struct hantro_ctx *ctx);
ctx               179 drivers/staging/media/hantro/hantro_hw.h void hantro_vp8_prob_update(struct hantro_ctx *ctx,
ctx               265 drivers/staging/media/hantro/hantro_jpeg.c hantro_jpeg_get_qtable(struct hantro_jpeg_ctx *ctx, int index)
ctx               268 drivers/staging/media/hantro/hantro_jpeg.c 		return ctx->buffer + LUMA_QUANT_OFF;
ctx               269 drivers/staging/media/hantro/hantro_jpeg.c 	return ctx->buffer + CHROMA_QUANT_OFF;
ctx               272 drivers/staging/media/hantro/hantro_jpeg.c void hantro_jpeg_header_assemble(struct hantro_jpeg_ctx *ctx)
ctx               274 drivers/staging/media/hantro/hantro_jpeg.c 	char *buf = ctx->buffer;
ctx               279 drivers/staging/media/hantro/hantro_jpeg.c 	buf[HEIGHT_OFF + 0] = ctx->height >> 8;
ctx               280 drivers/staging/media/hantro/hantro_jpeg.c 	buf[HEIGHT_OFF + 1] = ctx->height;
ctx               281 drivers/staging/media/hantro/hantro_jpeg.c 	buf[WIDTH_OFF + 0] = ctx->width >> 8;
ctx               282 drivers/staging/media/hantro/hantro_jpeg.c 	buf[WIDTH_OFF + 1] = ctx->width;
ctx               291 drivers/staging/media/hantro/hantro_jpeg.c 	jpeg_set_quality(buf, ctx->quality);
ctx               294 drivers/staging/media/hantro/hantro_jpeg.c int hantro_jpeg_enc_init(struct hantro_ctx *ctx)
ctx               296 drivers/staging/media/hantro/hantro_jpeg.c 	ctx->jpeg_enc.bounce_buffer.size =
ctx               297 drivers/staging/media/hantro/hantro_jpeg.c 		ctx->dst_fmt.plane_fmt[0].sizeimage -
ctx               298 drivers/staging/media/hantro/hantro_jpeg.c 		ctx->vpu_dst_fmt->header_size;
ctx               300 drivers/staging/media/hantro/hantro_jpeg.c 	ctx->jpeg_enc.bounce_buffer.cpu =
ctx               301 drivers/staging/media/hantro/hantro_jpeg.c 		dma_alloc_attrs(ctx->dev->dev,
ctx               302 drivers/staging/media/hantro/hantro_jpeg.c 				ctx->jpeg_enc.bounce_buffer.size,
ctx               303 drivers/staging/media/hantro/hantro_jpeg.c 				&ctx->jpeg_enc.bounce_buffer.dma,
ctx               306 drivers/staging/media/hantro/hantro_jpeg.c 	if (!ctx->jpeg_enc.bounce_buffer.cpu)
ctx               312 drivers/staging/media/hantro/hantro_jpeg.c void hantro_jpeg_enc_exit(struct hantro_ctx *ctx)
ctx               314 drivers/staging/media/hantro/hantro_jpeg.c 	dma_free_attrs(ctx->dev->dev,
ctx               315 drivers/staging/media/hantro/hantro_jpeg.c 		       ctx->jpeg_enc.bounce_buffer.size,
ctx               316 drivers/staging/media/hantro/hantro_jpeg.c 		       ctx->jpeg_enc.bounce_buffer.cpu,
ctx               317 drivers/staging/media/hantro/hantro_jpeg.c 		       ctx->jpeg_enc.bounce_buffer.dma,
ctx                12 drivers/staging/media/hantro/hantro_jpeg.h unsigned char *hantro_jpeg_get_qtable(struct hantro_jpeg_ctx *ctx, int index);
ctx                13 drivers/staging/media/hantro/hantro_jpeg.h void hantro_jpeg_header_assemble(struct hantro_jpeg_ctx *ctx);
ctx                38 drivers/staging/media/hantro/hantro_mpeg2.c int hantro_mpeg2_dec_init(struct hantro_ctx *ctx)
ctx                40 drivers/staging/media/hantro/hantro_mpeg2.c 	struct hantro_dev *vpu = ctx->dev;
ctx                42 drivers/staging/media/hantro/hantro_mpeg2.c 	ctx->mpeg2_dec.qtable.size = ARRAY_SIZE(zigzag) * 4;
ctx                43 drivers/staging/media/hantro/hantro_mpeg2.c 	ctx->mpeg2_dec.qtable.cpu =
ctx                45 drivers/staging/media/hantro/hantro_mpeg2.c 				   ctx->mpeg2_dec.qtable.size,
ctx                46 drivers/staging/media/hantro/hantro_mpeg2.c 				   &ctx->mpeg2_dec.qtable.dma,
ctx                48 drivers/staging/media/hantro/hantro_mpeg2.c 	if (!ctx->mpeg2_dec.qtable.cpu)
ctx                53 drivers/staging/media/hantro/hantro_mpeg2.c void hantro_mpeg2_dec_exit(struct hantro_ctx *ctx)
ctx                55 drivers/staging/media/hantro/hantro_mpeg2.c 	struct hantro_dev *vpu = ctx->dev;
ctx                58 drivers/staging/media/hantro/hantro_mpeg2.c 			  ctx->mpeg2_dec.qtable.size,
ctx                59 drivers/staging/media/hantro/hantro_mpeg2.c 			  ctx->mpeg2_dec.qtable.cpu,
ctx                60 drivers/staging/media/hantro/hantro_mpeg2.c 			  ctx->mpeg2_dec.qtable.dma);
ctx                34 drivers/staging/media/hantro/hantro_v4l2.c hantro_get_formats(const struct hantro_ctx *ctx, unsigned int *num_fmts)
ctx                38 drivers/staging/media/hantro/hantro_v4l2.c 	if (hantro_is_encoder_ctx(ctx)) {
ctx                39 drivers/staging/media/hantro/hantro_v4l2.c 		formats = ctx->dev->variant->enc_fmts;
ctx                40 drivers/staging/media/hantro/hantro_v4l2.c 		*num_fmts = ctx->dev->variant->num_enc_fmts;
ctx                42 drivers/staging/media/hantro/hantro_v4l2.c 		formats = ctx->dev->variant->dec_fmts;
ctx                43 drivers/staging/media/hantro/hantro_v4l2.c 		*num_fmts = ctx->dev->variant->num_dec_fmts;
ctx                91 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = fh_to_ctx(priv);
ctx               101 drivers/staging/media/hantro/hantro_v4l2.c 	formats = hantro_get_formats(ctx, &num_fmts);
ctx               123 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = fh_to_ctx(priv);
ctx               138 drivers/staging/media/hantro/hantro_v4l2.c 	skip_mode_none = capture == hantro_is_encoder_ctx(ctx);
ctx               140 drivers/staging/media/hantro/hantro_v4l2.c 	formats = hantro_get_formats(ctx, &num_fmts);
ctx               172 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = fh_to_ctx(priv);
ctx               176 drivers/staging/media/hantro/hantro_v4l2.c 	*pix_mp = ctx->src_fmt;
ctx               185 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = fh_to_ctx(priv);
ctx               189 drivers/staging/media/hantro/hantro_v4l2.c 	*pix_mp = ctx->dst_fmt;
ctx               197 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = fh_to_ctx(priv);
ctx               203 drivers/staging/media/hantro/hantro_v4l2.c 	coded = capture == hantro_is_encoder_ctx(ctx);
ctx               211 drivers/staging/media/hantro/hantro_v4l2.c 	formats = hantro_get_formats(ctx, &num_fmts);
ctx               221 drivers/staging/media/hantro/hantro_v4l2.c 	} else if (hantro_is_encoder_ctx(ctx)) {
ctx               222 drivers/staging/media/hantro/hantro_v4l2.c 		vpu_fmt = ctx->vpu_dst_fmt;
ctx               224 drivers/staging/media/hantro/hantro_v4l2.c 		vpu_fmt = ctx->vpu_src_fmt;
ctx               229 drivers/staging/media/hantro/hantro_v4l2.c 		pix_mp->width = ctx->src_fmt.width;
ctx               230 drivers/staging/media/hantro/hantro_v4l2.c 		pix_mp->height = ctx->src_fmt.height;
ctx               247 drivers/staging/media/hantro/hantro_v4l2.c 		if (ctx->vpu_src_fmt->fourcc == V4L2_PIX_FMT_H264_SLICE)
ctx               291 drivers/staging/media/hantro/hantro_v4l2.c hantro_reset_encoded_fmt(struct hantro_ctx *ctx)
ctx               297 drivers/staging/media/hantro/hantro_v4l2.c 	formats = hantro_get_formats(ctx, &num_fmts);
ctx               300 drivers/staging/media/hantro/hantro_v4l2.c 	if (hantro_is_encoder_ctx(ctx)) {
ctx               301 drivers/staging/media/hantro/hantro_v4l2.c 		ctx->vpu_dst_fmt = vpu_fmt;
ctx               302 drivers/staging/media/hantro/hantro_v4l2.c 		fmt = &ctx->dst_fmt;
ctx               304 drivers/staging/media/hantro/hantro_v4l2.c 		ctx->vpu_src_fmt = vpu_fmt;
ctx               305 drivers/staging/media/hantro/hantro_v4l2.c 		fmt = &ctx->src_fmt;
ctx               317 drivers/staging/media/hantro/hantro_v4l2.c hantro_reset_raw_fmt(struct hantro_ctx *ctx)
ctx               323 drivers/staging/media/hantro/hantro_v4l2.c 	formats = hantro_get_formats(ctx, &num_fmts);
ctx               326 drivers/staging/media/hantro/hantro_v4l2.c 	if (hantro_is_encoder_ctx(ctx)) {
ctx               327 drivers/staging/media/hantro/hantro_v4l2.c 		ctx->vpu_src_fmt = raw_vpu_fmt;
ctx               328 drivers/staging/media/hantro/hantro_v4l2.c 		raw_fmt = &ctx->src_fmt;
ctx               329 drivers/staging/media/hantro/hantro_v4l2.c 		encoded_fmt = &ctx->dst_fmt;
ctx               331 drivers/staging/media/hantro/hantro_v4l2.c 		ctx->vpu_dst_fmt = raw_vpu_fmt;
ctx               332 drivers/staging/media/hantro/hantro_v4l2.c 		raw_fmt = &ctx->dst_fmt;
ctx               333 drivers/staging/media/hantro/hantro_v4l2.c 		encoded_fmt = &ctx->src_fmt;
ctx               342 drivers/staging/media/hantro/hantro_v4l2.c void hantro_reset_fmts(struct hantro_ctx *ctx)
ctx               344 drivers/staging/media/hantro/hantro_v4l2.c 	hantro_reset_encoded_fmt(ctx);
ctx               345 drivers/staging/media/hantro/hantro_v4l2.c 	hantro_reset_raw_fmt(ctx);
ctx               349 drivers/staging/media/hantro/hantro_v4l2.c hantro_update_requires_request(struct hantro_ctx *ctx, u32 fourcc)
ctx               353 drivers/staging/media/hantro/hantro_v4l2.c 		ctx->fh.m2m_ctx->out_q_ctx.q.requires_requests = false;
ctx               358 drivers/staging/media/hantro/hantro_v4l2.c 		ctx->fh.m2m_ctx->out_q_ctx.q.requires_requests = true;
ctx               369 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = fh_to_ctx(priv);
ctx               370 drivers/staging/media/hantro/hantro_v4l2.c 	struct vb2_queue *vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               379 drivers/staging/media/hantro/hantro_v4l2.c 	if (!hantro_is_encoder_ctx(ctx)) {
ctx               388 drivers/staging/media/hantro/hantro_v4l2.c 		    pix_mp->pixelformat != ctx->src_fmt.pixelformat))
ctx               395 drivers/staging/media/hantro/hantro_v4l2.c 		peer_vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
ctx               408 drivers/staging/media/hantro/hantro_v4l2.c 	formats = hantro_get_formats(ctx, &num_fmts);
ctx               409 drivers/staging/media/hantro/hantro_v4l2.c 	ctx->vpu_src_fmt = hantro_find_format(formats, num_fmts,
ctx               411 drivers/staging/media/hantro/hantro_v4l2.c 	ctx->src_fmt = *pix_mp;
ctx               422 drivers/staging/media/hantro/hantro_v4l2.c 	if (!hantro_is_encoder_ctx(ctx))
ctx               423 drivers/staging/media/hantro/hantro_v4l2.c 		hantro_reset_raw_fmt(ctx);
ctx               426 drivers/staging/media/hantro/hantro_v4l2.c 	ctx->dst_fmt.colorspace = pix_mp->colorspace;
ctx               427 drivers/staging/media/hantro/hantro_v4l2.c 	ctx->dst_fmt.ycbcr_enc = pix_mp->ycbcr_enc;
ctx               428 drivers/staging/media/hantro/hantro_v4l2.c 	ctx->dst_fmt.xfer_func = pix_mp->xfer_func;
ctx               429 drivers/staging/media/hantro/hantro_v4l2.c 	ctx->dst_fmt.quantization = pix_mp->quantization;
ctx               431 drivers/staging/media/hantro/hantro_v4l2.c 	hantro_update_requires_request(ctx, pix_mp->pixelformat);
ctx               433 drivers/staging/media/hantro/hantro_v4l2.c 	vpu_debug(0, "OUTPUT codec mode: %d\n", ctx->vpu_src_fmt->codec_mode);
ctx               443 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = fh_to_ctx(priv);
ctx               450 drivers/staging/media/hantro/hantro_v4l2.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               454 drivers/staging/media/hantro/hantro_v4l2.c 	if (hantro_is_encoder_ctx(ctx)) {
ctx               462 drivers/staging/media/hantro/hantro_v4l2.c 		peer_vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
ctx               465 drivers/staging/media/hantro/hantro_v4l2.c 		    (pix_mp->pixelformat != ctx->dst_fmt.pixelformat ||
ctx               466 drivers/staging/media/hantro/hantro_v4l2.c 		     pix_mp->height != ctx->dst_fmt.height ||
ctx               467 drivers/staging/media/hantro/hantro_v4l2.c 		     pix_mp->width != ctx->dst_fmt.width))
ctx               475 drivers/staging/media/hantro/hantro_v4l2.c 	formats = hantro_get_formats(ctx, &num_fmts);
ctx               476 drivers/staging/media/hantro/hantro_v4l2.c 	ctx->vpu_dst_fmt = hantro_find_format(formats, num_fmts,
ctx               478 drivers/staging/media/hantro/hantro_v4l2.c 	ctx->dst_fmt = *pix_mp;
ctx               489 drivers/staging/media/hantro/hantro_v4l2.c 	if (hantro_is_encoder_ctx(ctx))
ctx               490 drivers/staging/media/hantro/hantro_v4l2.c 		hantro_reset_raw_fmt(ctx);
ctx               493 drivers/staging/media/hantro/hantro_v4l2.c 	ctx->src_fmt.colorspace = pix_mp->colorspace;
ctx               494 drivers/staging/media/hantro/hantro_v4l2.c 	ctx->src_fmt.ycbcr_enc = pix_mp->ycbcr_enc;
ctx               495 drivers/staging/media/hantro/hantro_v4l2.c 	ctx->src_fmt.xfer_func = pix_mp->xfer_func;
ctx               496 drivers/staging/media/hantro/hantro_v4l2.c 	ctx->src_fmt.quantization = pix_mp->quantization;
ctx               498 drivers/staging/media/hantro/hantro_v4l2.c 	vpu_debug(0, "CAPTURE codec mode: %d\n", ctx->vpu_dst_fmt->codec_mode);
ctx               502 drivers/staging/media/hantro/hantro_v4l2.c 	hantro_update_requires_request(ctx, pix_mp->pixelformat);
ctx               540 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = vb2_get_drv_priv(vq);
ctx               546 drivers/staging/media/hantro/hantro_v4l2.c 		pixfmt = &ctx->dst_fmt;
ctx               549 drivers/staging/media/hantro/hantro_v4l2.c 		pixfmt = &ctx->src_fmt;
ctx               593 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = vb2_get_drv_priv(vq);
ctx               596 drivers/staging/media/hantro/hantro_v4l2.c 		return hantro_buf_plane_check(vb, ctx->vpu_src_fmt,
ctx               597 drivers/staging/media/hantro/hantro_v4l2.c 						  &ctx->src_fmt);
ctx               599 drivers/staging/media/hantro/hantro_v4l2.c 	return hantro_buf_plane_check(vb, ctx->vpu_dst_fmt, &ctx->dst_fmt);
ctx               604 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               607 drivers/staging/media/hantro/hantro_v4l2.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx               612 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = vb2_get_drv_priv(q);
ctx               614 drivers/staging/media/hantro/hantro_v4l2.c 	return hantro_is_encoder_ctx(ctx) != V4L2_TYPE_IS_OUTPUT(q->type);
ctx               619 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = vb2_get_drv_priv(q);
ctx               623 drivers/staging/media/hantro/hantro_v4l2.c 		ctx->sequence_out = 0;
ctx               625 drivers/staging/media/hantro/hantro_v4l2.c 		ctx->sequence_cap = 0;
ctx               631 drivers/staging/media/hantro/hantro_v4l2.c 			codec_mode = ctx->vpu_src_fmt->codec_mode;
ctx               633 drivers/staging/media/hantro/hantro_v4l2.c 			codec_mode = ctx->vpu_dst_fmt->codec_mode;
ctx               636 drivers/staging/media/hantro/hantro_v4l2.c 		ctx->codec_ops = &ctx->dev->variant->codec_ops[codec_mode];
ctx               637 drivers/staging/media/hantro/hantro_v4l2.c 		if (ctx->codec_ops->init)
ctx               638 drivers/staging/media/hantro/hantro_v4l2.c 			ret = ctx->codec_ops->init(ctx);
ctx               648 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = vb2_get_drv_priv(q);
ctx               653 drivers/staging/media/hantro/hantro_v4l2.c 		vbuf = buf_remove(ctx->fh.m2m_ctx);
ctx               657 drivers/staging/media/hantro/hantro_v4l2.c 					   &ctx->ctrl_handler);
ctx               664 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = vb2_get_drv_priv(q);
ctx               667 drivers/staging/media/hantro/hantro_v4l2.c 		if (ctx->codec_ops && ctx->codec_ops->exit)
ctx               668 drivers/staging/media/hantro/hantro_v4l2.c 			ctx->codec_ops->exit(ctx);
ctx               684 drivers/staging/media/hantro/hantro_v4l2.c 	struct hantro_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               686 drivers/staging/media/hantro/hantro_v4l2.c 	v4l2_ctrl_request_complete(vb->req_obj.req, &ctx->ctrl_handler);
ctx                24 drivers/staging/media/hantro/hantro_v4l2.h void hantro_reset_fmts(struct hantro_ctx *ctx);
ctx                49 drivers/staging/media/hantro/hantro_vp8.c void hantro_vp8_prob_update(struct hantro_ctx *ctx,
ctx                57 drivers/staging/media/hantro/hantro_vp8.c 	dst = ctx->vp8_dec.prob_tbl.cpu;
ctx               111 drivers/staging/media/hantro/hantro_vp8.c 	dst = ctx->vp8_dec.prob_tbl.cpu;
ctx               126 drivers/staging/media/hantro/hantro_vp8.c 	dst = ctx->vp8_dec.prob_tbl.cpu;
ctx               145 drivers/staging/media/hantro/hantro_vp8.c int hantro_vp8_dec_init(struct hantro_ctx *ctx)
ctx               147 drivers/staging/media/hantro/hantro_vp8.c 	struct hantro_dev *vpu = ctx->dev;
ctx               154 drivers/staging/media/hantro/hantro_vp8.c 	mb_width = DIV_ROUND_UP(ctx->dst_fmt.width, 16);
ctx               155 drivers/staging/media/hantro/hantro_vp8.c 	mb_height = DIV_ROUND_UP(ctx->dst_fmt.height, 16);
ctx               162 drivers/staging/media/hantro/hantro_vp8.c 	aux_buf = &ctx->vp8_dec.segment_map;
ctx               173 drivers/staging/media/hantro/hantro_vp8.c 	aux_buf = &ctx->vp8_dec.prob_tbl;
ctx               185 drivers/staging/media/hantro/hantro_vp8.c 	dma_free_coherent(vpu->dev, ctx->vp8_dec.segment_map.size,
ctx               186 drivers/staging/media/hantro/hantro_vp8.c 			  ctx->vp8_dec.segment_map.cpu,
ctx               187 drivers/staging/media/hantro/hantro_vp8.c 			  ctx->vp8_dec.segment_map.dma);
ctx               192 drivers/staging/media/hantro/hantro_vp8.c void hantro_vp8_dec_exit(struct hantro_ctx *ctx)
ctx               194 drivers/staging/media/hantro/hantro_vp8.c 	struct hantro_vp8_dec_hw_ctx *vp8_dec = &ctx->vp8_dec;
ctx               195 drivers/staging/media/hantro/hantro_vp8.c 	struct hantro_dev *vpu = ctx->dev;
ctx               149 drivers/staging/media/hantro/rk3288_vpu_hw.c static void rk3288_vpu_enc_reset(struct hantro_ctx *ctx)
ctx               151 drivers/staging/media/hantro/rk3288_vpu_hw.c 	struct hantro_dev *vpu = ctx->dev;
ctx               158 drivers/staging/media/hantro/rk3288_vpu_hw.c static void rk3288_vpu_dec_reset(struct hantro_ctx *ctx)
ctx               160 drivers/staging/media/hantro/rk3288_vpu_hw.c 	struct hantro_dev *vpu = ctx->dev;
ctx               135 drivers/staging/media/hantro/rk3399_vpu_hw.c static void rk3399_vpu_enc_reset(struct hantro_ctx *ctx)
ctx               137 drivers/staging/media/hantro/rk3399_vpu_hw.c 	struct hantro_dev *vpu = ctx->dev;
ctx               144 drivers/staging/media/hantro/rk3399_vpu_hw.c static void rk3399_vpu_dec_reset(struct hantro_ctx *ctx)
ctx               146 drivers/staging/media/hantro/rk3399_vpu_hw.c 	struct hantro_dev *vpu = ctx->dev;
ctx                37 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 					struct hantro_ctx *ctx)
ctx                39 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	struct v4l2_pix_format_mplane *pix_fmt = &ctx->src_fmt;
ctx                59 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	reg = VEPU_REG_IN_IMG_CTRL_FMT(ctx->vpu_src_fmt->enc_fmt);
ctx                64 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 					    struct hantro_ctx *ctx,
ctx                67 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	struct v4l2_pix_format_mplane *pix_fmt = &ctx->src_fmt;
ctx                72 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	vepu_write_relaxed(vpu, ctx->jpeg_enc.bounce_buffer.dma,
ctx                74 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	vepu_write_relaxed(vpu, ctx->jpeg_enc.bounce_buffer.size,
ctx               116 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c void rk3399_vpu_jpeg_enc_run(struct hantro_ctx *ctx)
ctx               118 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	struct hantro_dev *vpu = ctx->dev;
ctx               123 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	src_buf = hantro_get_src_buf(ctx);
ctx               124 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	dst_buf = hantro_get_dst_buf(ctx);
ctx               126 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	hantro_prepare_run(ctx);
ctx               130 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	jpeg_ctx.width = ctx->dst_fmt.width;
ctx               131 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	jpeg_ctx.height = ctx->dst_fmt.height;
ctx               132 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	jpeg_ctx.quality = ctx->jpeg_quality;
ctx               139 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	rk3399_vpu_set_src_img_ctrl(vpu, ctx);
ctx               140 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	rk3399_vpu_jpeg_enc_set_buffers(vpu, ctx, &src_buf->vb2_buf);
ctx               157 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	reg = VEPU_REG_MB_WIDTH(JPEG_MB_WIDTH(ctx->src_fmt.width))
ctx               158 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 		| VEPU_REG_MB_HEIGHT(JPEG_MB_HEIGHT(ctx->src_fmt.height))
ctx               164 drivers/staging/media/hantro/rk3399_vpu_hw_jpeg_enc.c 	hantro_finish_run(ctx);
ctx                88 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 				      struct hantro_ctx *ctx)
ctx                92 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	quantization = hantro_get_ctrl(ctx,
ctx                94 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	hantro_mpeg2_dec_copy_qtable(ctx->mpeg2_dec.qtable.cpu, quantization);
ctx                95 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	vdpu_write_relaxed(vpu, ctx->mpeg2_dec.qtable.dma,
ctx               101 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 				 struct hantro_ctx *ctx,
ctx               112 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	vq = v4l2_m2m_get_dst_vq(ctx->fh.m2m_ctx);
ctx               133 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 		addr += ALIGN(ctx->dst_fmt.width, 16);
ctx               163 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c void rk3399_vpu_mpeg2_dec_run(struct hantro_ctx *ctx)
ctx               165 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               172 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	src_buf = hantro_get_src_buf(ctx);
ctx               173 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	dst_buf = hantro_get_dst_buf(ctx);
ctx               175 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	hantro_prepare_run(ctx);
ctx               177 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	slice_params = hantro_get_ctrl(ctx,
ctx               226 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	reg = VDPU_REG_PIC_MB_WIDTH(MPEG2_MB_WIDTH(ctx->dst_fmt.width)) |
ctx               227 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	      VDPU_REG_PIC_MB_HEIGHT_P(MPEG2_MB_HEIGHT(ctx->dst_fmt.height)) |
ctx               249 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	rk3399_vpu_mpeg2_dec_set_quantization(vpu, ctx);
ctx               251 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	rk3399_vpu_mpeg2_dec_set_buffers(vpu, ctx, &src_buf->vb2_buf,
ctx               256 drivers/staging/media/hantro/rk3399_vpu_hw_mpeg2_dec.c 	hantro_finish_run(ctx);
ctx               276 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c static void cfg_lf(struct hantro_ctx *ctx,
ctx               281 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               315 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c static void cfg_qp(struct hantro_ctx *ctx,
ctx               320 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               345 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c static void cfg_parts(struct hantro_ctx *ctx,
ctx               348 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               358 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	vb2_src = hantro_get_src_buf(ctx);
ctx               428 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c static void cfg_tap(struct hantro_ctx *ctx,
ctx               431 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               447 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c static void cfg_ref(struct hantro_ctx *ctx,
ctx               450 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               455 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	cap_q = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               456 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	vb2_dst = hantro_get_dst_buf(ctx);
ctx               480 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c static void cfg_buffers(struct hantro_ctx *ctx,
ctx               484 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               489 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	vb2_dst = hantro_get_dst_buf(ctx);
ctx               492 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	vdpu_write_relaxed(vpu, ctx->vp8_dec.prob_tbl.dma,
ctx               496 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	reg = VDPU_REG_FWD_PIC1_SEGMENT_BASE(ctx->vp8_dec.segment_map.dma);
ctx               509 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c void rk3399_vpu_vp8_dec_run(struct hantro_ctx *ctx)
ctx               512 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	struct hantro_dev *vpu = ctx->dev;
ctx               513 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	size_t height = ctx->dst_fmt.height;
ctx               514 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	size_t width = ctx->dst_fmt.width;
ctx               518 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	hantro_prepare_run(ctx);
ctx               520 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	hdr = hantro_get_ctrl(ctx, V4L2_CID_MPEG_VIDEO_VP8_FRAME_HEADER);
ctx               525 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	if (VP8_FRAME_IS_KEY_FRAME(hdr) && ctx->vp8_dec.segment_map.cpu)
ctx               526 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 		memset(ctx->vp8_dec.segment_map.cpu, 0,
ctx               527 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 		       ctx->vp8_dec.segment_map.size);
ctx               529 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	hantro_vp8_prob_update(ctx, hdr);
ctx               538 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	ctx->codec_ops->reset(ctx);
ctx               585 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	cfg_lf(ctx, hdr);
ctx               586 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	cfg_qp(ctx, hdr);
ctx               587 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	cfg_parts(ctx, hdr);
ctx               588 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	cfg_tap(ctx, hdr);
ctx               589 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	cfg_ref(ctx, hdr);
ctx               590 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	cfg_buffers(ctx, hdr);
ctx               592 drivers/staging/media/hantro/rk3399_vpu_hw_vp8_dec.c 	hantro_finish_run(ctx);
ctx                67 drivers/staging/media/imx/imx-media-csc-scaler.c static struct ipu_csc_scaler_q_data *get_q_data(struct ipu_csc_scaler_ctx *ctx,
ctx                71 drivers/staging/media/imx/imx-media-csc-scaler.c 		return &ctx->q_data[V4L2_M2M_SRC];
ctx                73 drivers/staging/media/imx/imx-media-csc-scaler.c 		return &ctx->q_data[V4L2_M2M_DST];
ctx                82 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = _ctx;
ctx                84 drivers/staging/media/imx/imx-media-csc-scaler.c 	if (ctx->icc)
ctx                85 drivers/staging/media/imx/imx-media-csc-scaler.c 		ipu_image_convert_abort(ctx->icc);
ctx                90 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = _ctx;
ctx                91 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_priv *priv = ctx->priv;
ctx                94 drivers/staging/media/imx/imx-media-csc-scaler.c 	src_buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx                95 drivers/staging/media/imx/imx-media-csc-scaler.c 	dst_buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx                99 drivers/staging/media/imx/imx-media-csc-scaler.c 	src_buf->sequence = ctx->sequence++;
ctx               107 drivers/staging/media/imx/imx-media-csc-scaler.c 	v4l2_m2m_job_finish(priv->m2m_dev, ctx->fh.m2m_ctx);
ctx               113 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = _ctx;
ctx               114 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_priv *priv = ctx->priv;
ctx               119 drivers/staging/media/imx/imx-media-csc-scaler.c 	src_buf = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx               120 drivers/staging/media/imx/imx-media-csc-scaler.c 	dst_buf = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx               126 drivers/staging/media/imx/imx-media-csc-scaler.c 	run->ctx = ctx->icc;
ctx               132 drivers/staging/media/imx/imx-media-csc-scaler.c 		v4l2_err(ctx->priv->vdev.vfd->v4l2_dev,
ctx               140 drivers/staging/media/imx/imx-media-csc-scaler.c 	v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               141 drivers/staging/media/imx/imx-media-csc-scaler.c 	v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx               144 drivers/staging/media/imx/imx-media-csc-scaler.c 	v4l2_m2m_job_finish(priv->m2m_dev, ctx->fh.m2m_ctx);
ctx               179 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = fh_to_ctx(priv);
ctx               182 drivers/staging/media/imx/imx-media-csc-scaler.c 	q_data = get_q_data(ctx, f->type);
ctx               192 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = fh_to_ctx(priv);
ctx               193 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_q_data *q_data = get_q_data(ctx, f->type);
ctx               205 drivers/staging/media/imx/imx-media-csc-scaler.c 			get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               211 drivers/staging/media/imx/imx-media-csc-scaler.c 			get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               217 drivers/staging/media/imx/imx-media-csc-scaler.c 	ipu_image_convert_adjust(&test_in, &test_out, ctx->rot_mode);
ctx               241 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = fh_to_ctx(priv);
ctx               245 drivers/staging/media/imx/imx-media-csc-scaler.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               247 drivers/staging/media/imx/imx-media-csc-scaler.c 		v4l2_err(ctx->priv->vdev.vfd->v4l2_dev, "%s: queue busy\n",
ctx               252 drivers/staging/media/imx/imx-media-csc-scaler.c 	q_data = get_q_data(ctx, f->type);
ctx               278 drivers/staging/media/imx/imx-media-csc-scaler.c 		q_data = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               296 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = fh_to_ctx(priv);
ctx               305 drivers/staging/media/imx/imx-media-csc-scaler.c 		q_data = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               312 drivers/staging/media/imx/imx-media-csc-scaler.c 		q_data = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               334 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = fh_to_ctx(priv);
ctx               354 drivers/staging/media/imx/imx-media-csc-scaler.c 	q_data = get_q_data(ctx, s->type);
ctx               421 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = vb2_get_drv_priv(vq);
ctx               425 drivers/staging/media/imx/imx-media-csc-scaler.c 	q_data = get_q_data(ctx, vq->type);
ctx               437 drivers/staging/media/imx/imx-media-csc-scaler.c 	dev_dbg(ctx->priv->dev, "get %d buffer(s) of size %d each.\n",
ctx               447 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = vb2_get_drv_priv(vq);
ctx               451 drivers/staging/media/imx/imx-media-csc-scaler.c 	dev_dbg(ctx->priv->dev, "type: %d\n", vq->type);
ctx               457 drivers/staging/media/imx/imx-media-csc-scaler.c 			dev_dbg(ctx->priv->dev, "%s: field isn't supported\n",
ctx               463 drivers/staging/media/imx/imx-media-csc-scaler.c 	q_data = get_q_data(ctx, vq->type);
ctx               467 drivers/staging/media/imx/imx-media-csc-scaler.c 		dev_dbg(ctx->priv->dev,
ctx               480 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               482 drivers/staging/media/imx/imx-media-csc-scaler.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, to_vb2_v4l2_buffer(vb));
ctx               502 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = vb2_get_drv_priv(q);
ctx               503 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_priv *priv = ctx->priv;
ctx               509 drivers/staging/media/imx/imx-media-csc-scaler.c 	other_q = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
ctx               516 drivers/staging/media/imx/imx-media-csc-scaler.c 	if (ctx->icc) {
ctx               517 drivers/staging/media/imx/imx-media-csc-scaler.c 		v4l2_warn(ctx->priv->vdev.vfd->v4l2_dev, "removing old ICC\n");
ctx               518 drivers/staging/media/imx/imx-media-csc-scaler.c 		ipu_image_convert_unprepare(ctx->icc);
ctx               521 drivers/staging/media/imx/imx-media-csc-scaler.c 	q_data = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_OUTPUT);
ctx               524 drivers/staging/media/imx/imx-media-csc-scaler.c 	q_data = get_q_data(ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               527 drivers/staging/media/imx/imx-media-csc-scaler.c 	ctx->icc = ipu_image_convert_prepare(ipu, ic_task, &in, &out,
ctx               528 drivers/staging/media/imx/imx-media-csc-scaler.c 					     ctx->rot_mode,
ctx               529 drivers/staging/media/imx/imx-media-csc-scaler.c 					     ipu_ic_pp_complete, ctx);
ctx               530 drivers/staging/media/imx/imx-media-csc-scaler.c 	if (IS_ERR(ctx->icc)) {
ctx               532 drivers/staging/media/imx/imx-media-csc-scaler.c 		int ret = PTR_ERR(ctx->icc);
ctx               534 drivers/staging/media/imx/imx-media-csc-scaler.c 		ctx->icc = NULL;
ctx               535 drivers/staging/media/imx/imx-media-csc-scaler.c 		v4l2_err(ctx->priv->vdev.vfd->v4l2_dev, "%s: error %d\n",
ctx               537 drivers/staging/media/imx/imx-media-csc-scaler.c 		while ((buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx)))
ctx               539 drivers/staging/media/imx/imx-media-csc-scaler.c 		while ((buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx)))
ctx               549 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = vb2_get_drv_priv(q);
ctx               552 drivers/staging/media/imx/imx-media-csc-scaler.c 	if (ctx->icc) {
ctx               553 drivers/staging/media/imx/imx-media-csc-scaler.c 		ipu_image_convert_unprepare(ctx->icc);
ctx               554 drivers/staging/media/imx/imx-media-csc-scaler.c 		ctx->icc = NULL;
ctx               557 drivers/staging/media/imx/imx-media-csc-scaler.c 	ctx->sequence = 0;
ctx               560 drivers/staging/media/imx/imx-media-csc-scaler.c 		while ((buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx)))
ctx               563 drivers/staging/media/imx/imx-media-csc-scaler.c 		while ((buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx)))
ctx               581 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = priv;
ctx               587 drivers/staging/media/imx/imx-media-csc-scaler.c 	src_vq->drv_priv = ctx;
ctx               592 drivers/staging/media/imx/imx-media-csc-scaler.c 	src_vq->lock = &ctx->priv->mutex;
ctx               593 drivers/staging/media/imx/imx-media-csc-scaler.c 	src_vq->dev = ctx->priv->dev;
ctx               602 drivers/staging/media/imx/imx-media-csc-scaler.c 	dst_vq->drv_priv = ctx;
ctx               607 drivers/staging/media/imx/imx-media-csc-scaler.c 	dst_vq->lock = &ctx->priv->mutex;
ctx               608 drivers/staging/media/imx/imx-media-csc-scaler.c 	dst_vq->dev = ctx->priv->dev;
ctx               615 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = container_of(ctrl->handler,
ctx               623 drivers/staging/media/imx/imx-media-csc-scaler.c 	rotate = ctx->rotate;
ctx               624 drivers/staging/media/imx/imx-media-csc-scaler.c 	hflip = ctx->hflip;
ctx               625 drivers/staging/media/imx/imx-media-csc-scaler.c 	vflip = ctx->vflip;
ctx               645 drivers/staging/media/imx/imx-media-csc-scaler.c 	if (rot_mode != ctx->rot_mode) {
ctx               649 drivers/staging/media/imx/imx-media-csc-scaler.c 		in_fmt = &ctx->q_data[V4L2_M2M_SRC].cur_fmt;
ctx               650 drivers/staging/media/imx/imx-media-csc-scaler.c 		out_fmt = &ctx->q_data[V4L2_M2M_DST].cur_fmt;
ctx               656 drivers/staging/media/imx/imx-media-csc-scaler.c 		    ipu_rot_mode_is_irt(ctx->rot_mode)) {
ctx               662 drivers/staging/media/imx/imx-media-csc-scaler.c 		ipu_image_convert_adjust(&test_in, &test_out, ctx->rot_mode);
ctx               671 drivers/staging/media/imx/imx-media-csc-scaler.c 			out_q = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
ctx               684 drivers/staging/media/imx/imx-media-csc-scaler.c 			cap_q = v4l2_m2m_get_vq(ctx->fh.m2m_ctx,
ctx               693 drivers/staging/media/imx/imx-media-csc-scaler.c 		ctx->rot_mode = rot_mode;
ctx               694 drivers/staging/media/imx/imx-media-csc-scaler.c 		ctx->rotate = rotate;
ctx               695 drivers/staging/media/imx/imx-media-csc-scaler.c 		ctx->hflip = hflip;
ctx               696 drivers/staging/media/imx/imx-media-csc-scaler.c 		ctx->vflip = vflip;
ctx               706 drivers/staging/media/imx/imx-media-csc-scaler.c static int ipu_csc_scaler_init_controls(struct ipu_csc_scaler_ctx *ctx)
ctx               708 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct v4l2_ctrl_handler *hdlr = &ctx->ctrl_hdlr;
ctx               752 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = NULL;
ctx               755 drivers/staging/media/imx/imx-media-csc-scaler.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               756 drivers/staging/media/imx/imx-media-csc-scaler.c 	if (!ctx)
ctx               759 drivers/staging/media/imx/imx-media-csc-scaler.c 	ctx->rot_mode = IPU_ROTATE_NONE;
ctx               761 drivers/staging/media/imx/imx-media-csc-scaler.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx               762 drivers/staging/media/imx/imx-media-csc-scaler.c 	file->private_data = &ctx->fh;
ctx               763 drivers/staging/media/imx/imx-media-csc-scaler.c 	v4l2_fh_add(&ctx->fh);
ctx               764 drivers/staging/media/imx/imx-media-csc-scaler.c 	ctx->priv = priv;
ctx               766 drivers/staging/media/imx/imx-media-csc-scaler.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(priv->m2m_dev, ctx,
ctx               768 drivers/staging/media/imx/imx-media-csc-scaler.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx               769 drivers/staging/media/imx/imx-media-csc-scaler.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx               773 drivers/staging/media/imx/imx-media-csc-scaler.c 	ret = ipu_csc_scaler_init_controls(ctx);
ctx               777 drivers/staging/media/imx/imx-media-csc-scaler.c 	ctx->fh.ctrl_handler = &ctx->ctrl_hdlr;
ctx               779 drivers/staging/media/imx/imx-media-csc-scaler.c 	ctx->q_data[V4L2_M2M_SRC] = ipu_csc_scaler_q_data_default;
ctx               780 drivers/staging/media/imx/imx-media-csc-scaler.c 	ctx->q_data[V4L2_M2M_DST] = ipu_csc_scaler_q_data_default;
ctx               782 drivers/staging/media/imx/imx-media-csc-scaler.c 	dev_dbg(priv->dev, "Created instance %p, m2m_ctx: %p\n", ctx,
ctx               783 drivers/staging/media/imx/imx-media-csc-scaler.c 		ctx->fh.m2m_ctx);
ctx               788 drivers/staging/media/imx/imx-media-csc-scaler.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx               790 drivers/staging/media/imx/imx-media-csc-scaler.c 	v4l2_fh_del(&ctx->fh);
ctx               791 drivers/staging/media/imx/imx-media-csc-scaler.c 	v4l2_fh_exit(&ctx->fh);
ctx               792 drivers/staging/media/imx/imx-media-csc-scaler.c 	kfree(ctx);
ctx               799 drivers/staging/media/imx/imx-media-csc-scaler.c 	struct ipu_csc_scaler_ctx *ctx = fh_to_ctx(file->private_data);
ctx               801 drivers/staging/media/imx/imx-media-csc-scaler.c 	dev_dbg(priv->dev, "Releasing instance %p\n", ctx);
ctx               803 drivers/staging/media/imx/imx-media-csc-scaler.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx               804 drivers/staging/media/imx/imx-media-csc-scaler.c 	v4l2_fh_del(&ctx->fh);
ctx               805 drivers/staging/media/imx/imx-media-csc-scaler.c 	v4l2_fh_exit(&ctx->fh);
ctx               806 drivers/staging/media/imx/imx-media-csc-scaler.c 	kfree(ctx);
ctx               254 drivers/staging/media/omap4iss/iss_csi2.c 	struct iss_csi2_ctx_cfg *ctx = &csi2->contexts[0];
ctx               256 drivers/staging/media/omap4iss/iss_csi2.c 	ctx->ping_addr = addr;
ctx               257 drivers/staging/media/omap4iss/iss_csi2.c 	ctx->pong_addr = addr;
ctx               258 drivers/staging/media/omap4iss/iss_csi2.c 	iss_reg_write(csi2->iss, csi2->regs1, CSI2_CTX_PING_ADDR(ctx->ctxnum),
ctx               259 drivers/staging/media/omap4iss/iss_csi2.c 		      ctx->ping_addr);
ctx               260 drivers/staging/media/omap4iss/iss_csi2.c 	iss_reg_write(csi2->iss, csi2->regs1, CSI2_CTX_PONG_ADDR(ctx->ctxnum),
ctx               261 drivers/staging/media/omap4iss/iss_csi2.c 		      ctx->pong_addr);
ctx               283 drivers/staging/media/omap4iss/iss_csi2.c 	struct iss_csi2_ctx_cfg *ctx = &csi2->contexts[ctxnum];
ctx               305 drivers/staging/media/omap4iss/iss_csi2.c 	ctx->enabled = enable;
ctx               314 drivers/staging/media/omap4iss/iss_csi2.c 			    struct iss_csi2_ctx_cfg *ctx)
ctx               318 drivers/staging/media/omap4iss/iss_csi2.c 	ctx->frame = 0;
ctx               321 drivers/staging/media/omap4iss/iss_csi2.c 	if (ctx->eof_enabled)
ctx               324 drivers/staging/media/omap4iss/iss_csi2.c 	if (ctx->eol_enabled)
ctx               327 drivers/staging/media/omap4iss/iss_csi2.c 	if (ctx->checksum_enabled)
ctx               330 drivers/staging/media/omap4iss/iss_csi2.c 	iss_reg_write(csi2->iss, csi2->regs1, CSI2_CTX_CTRL1(ctx->ctxnum), reg);
ctx               333 drivers/staging/media/omap4iss/iss_csi2.c 	reg = ctx->virtual_id << CSI2_CTX_CTRL2_VIRTUAL_ID_SHIFT;
ctx               334 drivers/staging/media/omap4iss/iss_csi2.c 	reg |= ctx->format_id << CSI2_CTX_CTRL2_FORMAT_SHIFT;
ctx               336 drivers/staging/media/omap4iss/iss_csi2.c 	if (ctx->dpcm_decompress && ctx->dpcm_predictor)
ctx               339 drivers/staging/media/omap4iss/iss_csi2.c 	if (is_usr_def_mapping(ctx->format_id))
ctx               342 drivers/staging/media/omap4iss/iss_csi2.c 	iss_reg_write(csi2->iss, csi2->regs1, CSI2_CTX_CTRL2(ctx->ctxnum), reg);
ctx               345 drivers/staging/media/omap4iss/iss_csi2.c 	iss_reg_write(csi2->iss, csi2->regs1, CSI2_CTX_CTRL3(ctx->ctxnum),
ctx               346 drivers/staging/media/omap4iss/iss_csi2.c 		      ctx->alpha << CSI2_CTX_CTRL3_ALPHA_SHIFT);
ctx               349 drivers/staging/media/omap4iss/iss_csi2.c 	iss_reg_update(csi2->iss, csi2->regs1, CSI2_CTX_DAT_OFST(ctx->ctxnum),
ctx               350 drivers/staging/media/omap4iss/iss_csi2.c 		       CSI2_CTX_DAT_OFST_MASK, ctx->data_offset);
ctx               352 drivers/staging/media/omap4iss/iss_csi2.c 	iss_reg_write(csi2->iss, csi2->regs1, CSI2_CTX_PING_ADDR(ctx->ctxnum),
ctx               353 drivers/staging/media/omap4iss/iss_csi2.c 		      ctx->ping_addr);
ctx               354 drivers/staging/media/omap4iss/iss_csi2.c 	iss_reg_write(csi2->iss, csi2->regs1, CSI2_CTX_PONG_ADDR(ctx->ctxnum),
ctx               355 drivers/staging/media/omap4iss/iss_csi2.c 		      ctx->pong_addr);
ctx               665 drivers/staging/media/omap4iss/iss_csi2.c 			 struct iss_csi2_ctx_cfg *ctx)
ctx               667 drivers/staging/media/omap4iss/iss_csi2.c 	unsigned int n = ctx->ctxnum;
ctx               684 drivers/staging/media/omap4iss/iss_csi2.c 				     CSI2_CTX_CTRL2(ctx->ctxnum))
ctx               701 drivers/staging/media/omap4iss/iss_csi2.c 			delta = frame - ctx->frame;
ctx               702 drivers/staging/media/omap4iss/iss_csi2.c 			if (frame < ctx->frame)
ctx               704 drivers/staging/media/omap4iss/iss_csi2.c 			ctx->frame = frame;
ctx               726 drivers/staging/media/omap4iss/iss_csi2.c 			ctx->format_id = csi2_ctx_map_format(csi2);
ctx               727 drivers/staging/media/omap4iss/iss_csi2.c 			csi2_ctx_config(csi2, ctx);
ctx               102 drivers/staging/media/sunxi/cedrus/cedrus.c void *cedrus_find_control_data(struct cedrus_ctx *ctx, u32 id)
ctx               106 drivers/staging/media/sunxi/cedrus/cedrus.c 	for (i = 0; ctx->ctrls[i]; i++)
ctx               107 drivers/staging/media/sunxi/cedrus/cedrus.c 		if (ctx->ctrls[i]->id == id)
ctx               108 drivers/staging/media/sunxi/cedrus/cedrus.c 			return ctx->ctrls[i]->p_cur.p;
ctx               113 drivers/staging/media/sunxi/cedrus/cedrus.c static int cedrus_init_ctrls(struct cedrus_dev *dev, struct cedrus_ctx *ctx)
ctx               115 drivers/staging/media/sunxi/cedrus/cedrus.c 	struct v4l2_ctrl_handler *hdl = &ctx->hdl;
ctx               129 drivers/staging/media/sunxi/cedrus/cedrus.c 	ctx->ctrls = kzalloc(ctrl_size, GFP_KERNEL);
ctx               130 drivers/staging/media/sunxi/cedrus/cedrus.c 	if (!ctx->ctrls)
ctx               141 drivers/staging/media/sunxi/cedrus/cedrus.c 			kfree(ctx->ctrls);
ctx               145 drivers/staging/media/sunxi/cedrus/cedrus.c 		ctx->ctrls[i] = ctrl;
ctx               148 drivers/staging/media/sunxi/cedrus/cedrus.c 	ctx->fh.ctrl_handler = hdl;
ctx               158 drivers/staging/media/sunxi/cedrus/cedrus.c 	struct cedrus_ctx *ctx = NULL;
ctx               168 drivers/staging/media/sunxi/cedrus/cedrus.c 			ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               174 drivers/staging/media/sunxi/cedrus/cedrus.c 	if (!ctx)
ctx               179 drivers/staging/media/sunxi/cedrus/cedrus.c 		v4l2_info(&ctx->dev->v4l2_dev,
ctx               183 drivers/staging/media/sunxi/cedrus/cedrus.c 		v4l2_info(&ctx->dev->v4l2_dev,
ctx               188 drivers/staging/media/sunxi/cedrus/cedrus.c 	parent_hdl = &ctx->hdl;
ctx               192 drivers/staging/media/sunxi/cedrus/cedrus.c 		v4l2_info(&ctx->dev->v4l2_dev, "Missing codec control(s)\n");
ctx               197 drivers/staging/media/sunxi/cedrus/cedrus.c 		if (cedrus_controls[i].codec != ctx->current_codec ||
ctx               204 drivers/staging/media/sunxi/cedrus/cedrus.c 			v4l2_info(&ctx->dev->v4l2_dev,
ctx               218 drivers/staging/media/sunxi/cedrus/cedrus.c 	struct cedrus_ctx *ctx = NULL;
ctx               224 drivers/staging/media/sunxi/cedrus/cedrus.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               225 drivers/staging/media/sunxi/cedrus/cedrus.c 	if (!ctx) {
ctx               230 drivers/staging/media/sunxi/cedrus/cedrus.c 	v4l2_fh_init(&ctx->fh, video_devdata(file));
ctx               231 drivers/staging/media/sunxi/cedrus/cedrus.c 	file->private_data = &ctx->fh;
ctx               232 drivers/staging/media/sunxi/cedrus/cedrus.c 	ctx->dev = dev;
ctx               234 drivers/staging/media/sunxi/cedrus/cedrus.c 	ret = cedrus_init_ctrls(dev, ctx);
ctx               238 drivers/staging/media/sunxi/cedrus/cedrus.c 	ctx->fh.m2m_ctx = v4l2_m2m_ctx_init(dev->m2m_dev, ctx,
ctx               240 drivers/staging/media/sunxi/cedrus/cedrus.c 	if (IS_ERR(ctx->fh.m2m_ctx)) {
ctx               241 drivers/staging/media/sunxi/cedrus/cedrus.c 		ret = PTR_ERR(ctx->fh.m2m_ctx);
ctx               245 drivers/staging/media/sunxi/cedrus/cedrus.c 	v4l2_fh_add(&ctx->fh);
ctx               252 drivers/staging/media/sunxi/cedrus/cedrus.c 	v4l2_ctrl_handler_free(&ctx->hdl);
ctx               254 drivers/staging/media/sunxi/cedrus/cedrus.c 	kfree(ctx);
ctx               263 drivers/staging/media/sunxi/cedrus/cedrus.c 	struct cedrus_ctx *ctx = container_of(file->private_data,
ctx               268 drivers/staging/media/sunxi/cedrus/cedrus.c 	v4l2_fh_del(&ctx->fh);
ctx               269 drivers/staging/media/sunxi/cedrus/cedrus.c 	v4l2_m2m_ctx_release(ctx->fh.m2m_ctx);
ctx               271 drivers/staging/media/sunxi/cedrus/cedrus.c 	v4l2_ctrl_handler_free(&ctx->hdl);
ctx               272 drivers/staging/media/sunxi/cedrus/cedrus.c 	kfree(ctx->ctrls);
ctx               274 drivers/staging/media/sunxi/cedrus/cedrus.c 	v4l2_fh_exit(&ctx->fh);
ctx               276 drivers/staging/media/sunxi/cedrus/cedrus.c 	kfree(ctx);
ctx               117 drivers/staging/media/sunxi/cedrus/cedrus.h 	void (*irq_clear)(struct cedrus_ctx *ctx);
ctx               118 drivers/staging/media/sunxi/cedrus/cedrus.h 	void (*irq_disable)(struct cedrus_ctx *ctx);
ctx               119 drivers/staging/media/sunxi/cedrus/cedrus.h 	enum cedrus_irq_status (*irq_status)(struct cedrus_ctx *ctx);
ctx               120 drivers/staging/media/sunxi/cedrus/cedrus.h 	void (*setup)(struct cedrus_ctx *ctx, struct cedrus_run *run);
ctx               121 drivers/staging/media/sunxi/cedrus/cedrus.h 	int (*start)(struct cedrus_ctx *ctx);
ctx               122 drivers/staging/media/sunxi/cedrus/cedrus.h 	void (*stop)(struct cedrus_ctx *ctx);
ctx               123 drivers/staging/media/sunxi/cedrus/cedrus.h 	void (*trigger)(struct cedrus_ctx *ctx);
ctx               179 drivers/staging/media/sunxi/cedrus/cedrus.h static inline dma_addr_t cedrus_dst_buf_addr(struct cedrus_ctx *ctx,
ctx               188 drivers/staging/media/sunxi/cedrus/cedrus.h 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               192 drivers/staging/media/sunxi/cedrus/cedrus.h 	return buf ? cedrus_buf_addr(buf, &ctx->dst_fmt, plane) : 0;
ctx               207 drivers/staging/media/sunxi/cedrus/cedrus.h void *cedrus_find_control_data(struct cedrus_ctx *ctx, u32 id);
ctx                27 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 	struct cedrus_ctx *ctx = priv;
ctx                28 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 	struct cedrus_dev *dev = ctx->dev;
ctx                32 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 	run.src = v4l2_m2m_next_src_buf(ctx->fh.m2m_ctx);
ctx                33 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 	run.dst = v4l2_m2m_next_dst_buf(ctx->fh.m2m_ctx);
ctx                39 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 		v4l2_ctrl_request_setup(src_req, &ctx->hdl);
ctx                41 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 	switch (ctx->src_fmt.pixelformat) {
ctx                43 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 		run.mpeg2.slice_params = cedrus_find_control_data(ctx,
ctx                45 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 		run.mpeg2.quantization = cedrus_find_control_data(ctx,
ctx                50 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 		run.h264.decode_params = cedrus_find_control_data(ctx,
ctx                52 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 		run.h264.pps = cedrus_find_control_data(ctx,
ctx                54 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 		run.h264.scaling_matrix = cedrus_find_control_data(ctx,
ctx                56 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 		run.h264.slice_params = cedrus_find_control_data(ctx,
ctx                58 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 		run.h264.sps = cedrus_find_control_data(ctx,
ctx                68 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 	dev->dec_ops[ctx->current_codec]->setup(ctx, &run);
ctx                73 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 		v4l2_ctrl_request_complete(src_req, &ctx->hdl);
ctx                75 drivers/staging/media/sunxi/cedrus/cedrus_dec.c 	dev->dec_ops[ctx->current_codec]->trigger(ctx);
ctx                56 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static dma_addr_t cedrus_h264_mv_col_buf_addr(struct cedrus_ctx *ctx,
ctx                60 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	dma_addr_t addr = ctx->codec.h264.mv_col_buf_dma;
ctx                63 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	addr += position * ctx->codec.h264.mv_col_buf_field_size * 2;
ctx                66 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	addr += field * ctx->codec.h264.mv_col_buf_field_size;
ctx                71 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static void cedrus_fill_ref_pic(struct cedrus_ctx *ctx,
ctx                84 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	pic->luma_ptr = cpu_to_le32(cedrus_buf_addr(vbuf, &ctx->dst_fmt, 0));
ctx                85 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	pic->chroma_ptr = cpu_to_le32(cedrus_buf_addr(vbuf, &ctx->dst_fmt, 1));
ctx                87 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 		cpu_to_le32(cedrus_h264_mv_col_buf_addr(ctx, position, 0));
ctx                89 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 		cpu_to_le32(cedrus_h264_mv_col_buf_addr(ctx, position, 1));
ctx                92 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static void cedrus_write_frame_list(struct cedrus_ctx *ctx,
ctx               101 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	struct cedrus_dev *dev = ctx->dev;
ctx               107 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	cap_q = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               130 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 		cedrus_fill_ref_pic(ctx, cedrus_buf,
ctx               153 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	cedrus_fill_ref_pic(ctx, output_buf,
ctx               166 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static void _cedrus_write_ref_list(struct cedrus_ctx *ctx,
ctx               173 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	struct cedrus_dev *dev = ctx->dev;
ctx               178 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	cap_q = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               213 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static void cedrus_write_ref_list0(struct cedrus_ctx *ctx,
ctx               218 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	_cedrus_write_ref_list(ctx, run,
ctx               224 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static void cedrus_write_ref_list1(struct cedrus_ctx *ctx,
ctx               229 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	_cedrus_write_ref_list(ctx, run,
ctx               235 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static void cedrus_write_scaling_lists(struct cedrus_ctx *ctx,
ctx               240 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	struct cedrus_dev *dev = ctx->dev;
ctx               255 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static void cedrus_write_pred_weight_table(struct cedrus_ctx *ctx,
ctx               262 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	struct cedrus_dev *dev = ctx->dev;
ctx               296 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static void cedrus_set_params(struct cedrus_ctx *ctx,
ctx               304 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	struct cedrus_dev *dev = ctx->dev;
ctx               335 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 		cedrus_write_pred_weight_table(ctx, run);
ctx               340 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 		cedrus_write_ref_list0(ctx, run);
ctx               343 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 		cedrus_write_ref_list1(ctx, run);
ctx               417 drivers/staging/media/sunxi/cedrus/cedrus_h264.c cedrus_h264_irq_status(struct cedrus_ctx *ctx)
ctx               419 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	struct cedrus_dev *dev = ctx->dev;
ctx               432 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static void cedrus_h264_irq_clear(struct cedrus_ctx *ctx)
ctx               434 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	struct cedrus_dev *dev = ctx->dev;
ctx               440 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static void cedrus_h264_irq_disable(struct cedrus_ctx *ctx)
ctx               442 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	struct cedrus_dev *dev = ctx->dev;
ctx               449 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static void cedrus_h264_setup(struct cedrus_ctx *ctx,
ctx               452 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	struct cedrus_dev *dev = ctx->dev;
ctx               458 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 		     ctx->codec.h264.pic_info_buf_dma);
ctx               460 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 		     ctx->codec.h264.neighbor_info_buf_dma);
ctx               462 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	cedrus_write_scaling_lists(ctx, run);
ctx               463 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	cedrus_write_frame_list(ctx, run);
ctx               465 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	cedrus_set_params(ctx, run);
ctx               468 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static int cedrus_h264_start(struct cedrus_ctx *ctx)
ctx               470 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	struct cedrus_dev *dev = ctx->dev;
ctx               481 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	ctx->codec.h264.pic_info_buf =
ctx               483 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 				   &ctx->codec.h264.pic_info_buf_dma,
ctx               485 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	if (!ctx->codec.h264.pic_info_buf)
ctx               495 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	ctx->codec.h264.neighbor_info_buf =
ctx               497 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 				   &ctx->codec.h264.neighbor_info_buf_dma,
ctx               499 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	if (!ctx->codec.h264.neighbor_info_buf) {
ctx               504 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	field_size = DIV_ROUND_UP(ctx->src_fmt.width, 16) *
ctx               505 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 		DIV_ROUND_UP(ctx->src_fmt.height, 16) * 16;
ctx               522 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	ctx->codec.h264.mv_col_buf_field_size = field_size;
ctx               525 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	ctx->codec.h264.mv_col_buf_size = mv_col_size;
ctx               526 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	ctx->codec.h264.mv_col_buf = dma_alloc_coherent(dev->dev,
ctx               527 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 							ctx->codec.h264.mv_col_buf_size,
ctx               528 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 							&ctx->codec.h264.mv_col_buf_dma,
ctx               530 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	if (!ctx->codec.h264.mv_col_buf) {
ctx               539 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 			  ctx->codec.h264.neighbor_info_buf,
ctx               540 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 			  ctx->codec.h264.neighbor_info_buf_dma);
ctx               544 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 			  ctx->codec.h264.pic_info_buf,
ctx               545 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 			  ctx->codec.h264.pic_info_buf_dma);
ctx               549 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static void cedrus_h264_stop(struct cedrus_ctx *ctx)
ctx               551 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	struct cedrus_dev *dev = ctx->dev;
ctx               553 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	dma_free_coherent(dev->dev, ctx->codec.h264.mv_col_buf_size,
ctx               554 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 			  ctx->codec.h264.mv_col_buf,
ctx               555 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 			  ctx->codec.h264.mv_col_buf_dma);
ctx               557 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 			  ctx->codec.h264.neighbor_info_buf,
ctx               558 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 			  ctx->codec.h264.neighbor_info_buf_dma);
ctx               560 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 			  ctx->codec.h264.pic_info_buf,
ctx               561 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 			  ctx->codec.h264.pic_info_buf_dma);
ctx               564 drivers/staging/media/sunxi/cedrus/cedrus_h264.c static void cedrus_h264_trigger(struct cedrus_ctx *ctx)
ctx               566 drivers/staging/media/sunxi/cedrus/cedrus_h264.c 	struct cedrus_dev *dev = ctx->dev;
ctx               105 drivers/staging/media/sunxi/cedrus/cedrus_hw.c 	struct cedrus_ctx *ctx;
ctx               110 drivers/staging/media/sunxi/cedrus/cedrus_hw.c 	ctx = v4l2_m2m_get_curr_priv(dev->m2m_dev);
ctx               111 drivers/staging/media/sunxi/cedrus/cedrus_hw.c 	if (!ctx) {
ctx               117 drivers/staging/media/sunxi/cedrus/cedrus_hw.c 	status = dev->dec_ops[ctx->current_codec]->irq_status(ctx);
ctx               121 drivers/staging/media/sunxi/cedrus/cedrus_hw.c 	dev->dec_ops[ctx->current_codec]->irq_disable(ctx);
ctx               122 drivers/staging/media/sunxi/cedrus/cedrus_hw.c 	dev->dec_ops[ctx->current_codec]->irq_clear(ctx);
ctx               124 drivers/staging/media/sunxi/cedrus/cedrus_hw.c 	src_buf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               125 drivers/staging/media/sunxi/cedrus/cedrus_hw.c 	dst_buf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx               141 drivers/staging/media/sunxi/cedrus/cedrus_hw.c 	v4l2_m2m_job_finish(ctx->dev->m2m_dev, ctx->fh.m2m_ctx);
ctx                40 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c static enum cedrus_irq_status cedrus_mpeg2_irq_status(struct cedrus_ctx *ctx)
ctx                42 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	struct cedrus_dev *dev = ctx->dev;
ctx                58 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c static void cedrus_mpeg2_irq_clear(struct cedrus_ctx *ctx)
ctx                60 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	struct cedrus_dev *dev = ctx->dev;
ctx                65 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c static void cedrus_mpeg2_irq_disable(struct cedrus_ctx *ctx)
ctx                67 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	struct cedrus_dev *dev = ctx->dev;
ctx                75 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c static void cedrus_mpeg2_setup(struct cedrus_ctx *ctx, struct cedrus_run *run)
ctx                84 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	struct cedrus_dev *dev = ctx->dev;
ctx               156 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	reg = VE_DEC_MPEG_PICBOUNDSIZE_WIDTH(ctx->src_fmt.width);
ctx               157 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	reg |= VE_DEC_MPEG_PICBOUNDSIZE_HEIGHT(ctx->src_fmt.height);
ctx               163 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, V4L2_BUF_TYPE_VIDEO_CAPTURE);
ctx               166 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	fwd_luma_addr = cedrus_dst_buf_addr(ctx, forward_idx, 0);
ctx               167 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	fwd_chroma_addr = cedrus_dst_buf_addr(ctx, forward_idx, 1);
ctx               173 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	bwd_luma_addr = cedrus_dst_buf_addr(ctx, backward_idx, 0);
ctx               174 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	bwd_chroma_addr = cedrus_dst_buf_addr(ctx, backward_idx, 1);
ctx               181 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	dst_luma_addr = cedrus_dst_buf_addr(ctx, run->dst->vb2_buf.index, 0);
ctx               182 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	dst_chroma_addr = cedrus_dst_buf_addr(ctx, run->dst->vb2_buf.index, 1);
ctx               227 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c static void cedrus_mpeg2_trigger(struct cedrus_ctx *ctx)
ctx               229 drivers/staging/media/sunxi/cedrus/cedrus_mpeg2.c 	struct cedrus_dev *dev = ctx->dev;
ctx               165 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = cedrus_file2ctx(file);
ctx               166 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_dev *dev = ctx->dev;
ctx               215 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = cedrus_file2ctx(file);
ctx               218 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	if (!ctx->dst_fmt.width || !ctx->dst_fmt.height) {
ctx               225 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	f->fmt.pix = ctx->dst_fmt;
ctx               233 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = cedrus_file2ctx(file);
ctx               236 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	if (!ctx->dst_fmt.width || !ctx->dst_fmt.height) {
ctx               244 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	f->fmt.pix = ctx->src_fmt;
ctx               252 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = cedrus_file2ctx(file);
ctx               253 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_dev *dev = ctx->dev;
ctx               268 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = cedrus_file2ctx(file);
ctx               269 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_dev *dev = ctx->dev;
ctx               288 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = cedrus_file2ctx(file);
ctx               289 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_dev *dev = ctx->dev;
ctx               293 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               301 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	ctx->dst_fmt = f->fmt.pix;
ctx               303 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	cedrus_dst_format_set(dev, &ctx->dst_fmt);
ctx               311 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = cedrus_file2ctx(file);
ctx               315 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	vq = v4l2_m2m_get_vq(ctx->fh.m2m_ctx, f->type);
ctx               323 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	ctx->src_fmt = f->fmt.pix;
ctx               326 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	ctx->dst_fmt.colorspace = f->fmt.pix.colorspace;
ctx               327 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	ctx->dst_fmt.xfer_func = f->fmt.pix.xfer_func;
ctx               328 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	ctx->dst_fmt.ycbcr_enc = f->fmt.pix.ycbcr_enc;
ctx               329 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	ctx->dst_fmt.quantization = f->fmt.pix.quantization;
ctx               366 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = vb2_get_drv_priv(vq);
ctx               367 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_dev *dev = ctx->dev;
ctx               373 drivers/staging/media/sunxi/cedrus/cedrus_video.c 		pix_fmt = &ctx->src_fmt;
ctx               376 drivers/staging/media/sunxi/cedrus/cedrus_video.c 		pix_fmt = &ctx->dst_fmt;
ctx               396 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = vb2_get_drv_priv(vq);
ctx               401 drivers/staging/media/sunxi/cedrus/cedrus_video.c 			vbuf = v4l2_m2m_src_buf_remove(ctx->fh.m2m_ctx);
ctx               403 drivers/staging/media/sunxi/cedrus/cedrus_video.c 			vbuf = v4l2_m2m_dst_buf_remove(ctx->fh.m2m_ctx);
ctx               409 drivers/staging/media/sunxi/cedrus/cedrus_video.c 					   &ctx->hdl);
ctx               425 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = vb2_get_drv_priv(vq);
ctx               429 drivers/staging/media/sunxi/cedrus/cedrus_video.c 		pix_fmt = &ctx->src_fmt;
ctx               431 drivers/staging/media/sunxi/cedrus/cedrus_video.c 		pix_fmt = &ctx->dst_fmt;
ctx               443 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = vb2_get_drv_priv(vq);
ctx               444 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_dev *dev = ctx->dev;
ctx               447 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	switch (ctx->src_fmt.pixelformat) {
ctx               449 drivers/staging/media/sunxi/cedrus/cedrus_video.c 		ctx->current_codec = CEDRUS_CODEC_MPEG2;
ctx               453 drivers/staging/media/sunxi/cedrus/cedrus_video.c 		ctx->current_codec = CEDRUS_CODEC_H264;
ctx               461 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	    dev->dec_ops[ctx->current_codec]->start)
ctx               462 drivers/staging/media/sunxi/cedrus/cedrus_video.c 		ret = dev->dec_ops[ctx->current_codec]->start(ctx);
ctx               472 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = vb2_get_drv_priv(vq);
ctx               473 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_dev *dev = ctx->dev;
ctx               476 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	    dev->dec_ops[ctx->current_codec]->stop)
ctx               477 drivers/staging/media/sunxi/cedrus/cedrus_video.c 		dev->dec_ops[ctx->current_codec]->stop(ctx);
ctx               485 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               487 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	v4l2_m2m_buf_queue(ctx->fh.m2m_ctx, vbuf);
ctx               492 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue);
ctx               494 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	v4l2_ctrl_request_complete(vb->req_obj.req, &ctx->hdl);
ctx               512 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	struct cedrus_ctx *ctx = priv;
ctx               517 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	src_vq->drv_priv = ctx;
ctx               523 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	src_vq->lock = &ctx->dev->dev_mutex;
ctx               524 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	src_vq->dev = ctx->dev->dev;
ctx               534 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	dst_vq->drv_priv = ctx;
ctx               540 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	dst_vq->lock = &ctx->dev->dev_mutex;
ctx               541 drivers/staging/media/sunxi/cedrus/cedrus_video.c 	dst_vq->dev = ctx->dev->dev;
ctx               299 drivers/staging/media/tegra-vde/vde.c 				      struct tegra_vde_h264_decoder_ctx *ctx,
ctx               338 drivers/staging/media/tegra-vde/vde.c 	tegra_setup_frameidx(vde, dpb_frames, ctx->dpb_frames_nb,
ctx               339 drivers/staging/media/tegra-vde/vde.c 			     ctx->pic_width_in_mbs, ctx->pic_height_in_mbs);
ctx               342 drivers/staging/media/tegra-vde/vde.c 				    ctx->dpb_frames_nb - 1,
ctx               343 drivers/staging/media/tegra-vde/vde.c 				    ctx->dpb_ref_frames_with_earlier_poc_nb);
ctx               355 drivers/staging/media/tegra-vde/vde.c 	value = ctx->pic_width_in_mbs << 11 | ctx->pic_height_in_mbs << 3;
ctx               389 drivers/staging/media/tegra-vde/vde.c 	value |= ctx->pic_width_in_mbs << 11;
ctx               390 drivers/staging/media/tegra-vde/vde.c 	value |= ctx->pic_height_in_mbs << 3;
ctx               394 drivers/staging/media/tegra-vde/vde.c 	value = !ctx->baseline_profile << 17;
ctx               395 drivers/staging/media/tegra-vde/vde.c 	value |= ctx->level_idc << 13;
ctx               396 drivers/staging/media/tegra-vde/vde.c 	value |= ctx->log2_max_pic_order_cnt_lsb << 7;
ctx               397 drivers/staging/media/tegra-vde/vde.c 	value |= ctx->pic_order_cnt_type << 5;
ctx               398 drivers/staging/media/tegra-vde/vde.c 	value |= ctx->log2_max_frame_num;
ctx               402 drivers/staging/media/tegra-vde/vde.c 	value = ctx->pic_init_qp << 25;
ctx               403 drivers/staging/media/tegra-vde/vde.c 	value |= !!(ctx->deblocking_filter_control_present_flag) << 2;
ctx               404 drivers/staging/media/tegra-vde/vde.c 	value |= !!ctx->pic_order_present_flag;
ctx               408 drivers/staging/media/tegra-vde/vde.c 	value = ctx->chroma_qp_index_offset;
ctx               409 drivers/staging/media/tegra-vde/vde.c 	value |= ctx->num_ref_idx_l0_active_minus1 << 5;
ctx               410 drivers/staging/media/tegra-vde/vde.c 	value |= ctx->num_ref_idx_l1_active_minus1 << 10;
ctx               411 drivers/staging/media/tegra-vde/vde.c 	value |= !!ctx->constrained_intra_pred_flag << 15;
ctx               428 drivers/staging/media/tegra-vde/vde.c 	value |= ctx->pic_width_in_mbs << 11;
ctx               429 drivers/staging/media/tegra-vde/vde.c 	value |= ctx->pic_height_in_mbs << 3;
ctx               434 drivers/staging/media/tegra-vde/vde.c 	value |= ctx->level_idc << 4;
ctx               435 drivers/staging/media/tegra-vde/vde.c 	value |= !ctx->baseline_profile << 1;
ctx               436 drivers/staging/media/tegra-vde/vde.c 	value |= !!ctx->direct_8x8_inference_flag;
ctx               445 drivers/staging/media/tegra-vde/vde.c 	value |= ctx->chroma_qp_index_offset << 8;
ctx               450 drivers/staging/media/tegra-vde/vde.c 					    ctx->dpb_frames_nb - 1,
ctx               451 drivers/staging/media/tegra-vde/vde.c 					    ctx->pic_order_cnt_type == 0);
ctx               466 drivers/staging/media/tegra-vde/vde.c 	if (!ctx->baseline_profile)
ctx               624 drivers/staging/media/tegra-vde/vde.c 				       struct tegra_vde_h264_decoder_ctx *ctx)
ctx               626 drivers/staging/media/tegra-vde/vde.c 	if (ctx->dpb_frames_nb == 0 || ctx->dpb_frames_nb > 17) {
ctx               627 drivers/staging/media/tegra-vde/vde.c 		dev_err(dev, "Bad DPB size %u\n", ctx->dpb_frames_nb);
ctx               631 drivers/staging/media/tegra-vde/vde.c 	if (ctx->level_idc > 15) {
ctx               632 drivers/staging/media/tegra-vde/vde.c 		dev_err(dev, "Bad level value %u\n", ctx->level_idc);
ctx               636 drivers/staging/media/tegra-vde/vde.c 	if (ctx->pic_init_qp > 52) {
ctx               637 drivers/staging/media/tegra-vde/vde.c 		dev_err(dev, "Bad pic_init_qp value %u\n", ctx->pic_init_qp);
ctx               641 drivers/staging/media/tegra-vde/vde.c 	if (ctx->log2_max_pic_order_cnt_lsb > 16) {
ctx               643 drivers/staging/media/tegra-vde/vde.c 			ctx->log2_max_pic_order_cnt_lsb);
ctx               647 drivers/staging/media/tegra-vde/vde.c 	if (ctx->log2_max_frame_num > 16) {
ctx               649 drivers/staging/media/tegra-vde/vde.c 			ctx->log2_max_frame_num);
ctx               653 drivers/staging/media/tegra-vde/vde.c 	if (ctx->chroma_qp_index_offset > 31) {
ctx               655 drivers/staging/media/tegra-vde/vde.c 			ctx->chroma_qp_index_offset);
ctx               659 drivers/staging/media/tegra-vde/vde.c 	if (ctx->pic_order_cnt_type > 2) {
ctx               661 drivers/staging/media/tegra-vde/vde.c 			ctx->pic_order_cnt_type);
ctx               665 drivers/staging/media/tegra-vde/vde.c 	if (ctx->num_ref_idx_l0_active_minus1 > 15) {
ctx               667 drivers/staging/media/tegra-vde/vde.c 			ctx->num_ref_idx_l0_active_minus1);
ctx               671 drivers/staging/media/tegra-vde/vde.c 	if (ctx->num_ref_idx_l1_active_minus1 > 15) {
ctx               673 drivers/staging/media/tegra-vde/vde.c 			ctx->num_ref_idx_l1_active_minus1);
ctx               677 drivers/staging/media/tegra-vde/vde.c 	if (!ctx->pic_width_in_mbs || ctx->pic_width_in_mbs > 127) {
ctx               679 drivers/staging/media/tegra-vde/vde.c 			ctx->pic_width_in_mbs);
ctx               683 drivers/staging/media/tegra-vde/vde.c 	if (!ctx->pic_height_in_mbs || ctx->pic_height_in_mbs > 127) {
ctx               685 drivers/staging/media/tegra-vde/vde.c 			ctx->pic_height_in_mbs);
ctx               696 drivers/staging/media/tegra-vde/vde.c 	struct tegra_vde_h264_decoder_ctx ctx;
ctx               713 drivers/staging/media/tegra-vde/vde.c 	if (copy_from_user(&ctx, (void __user *)vaddr, sizeof(ctx)))
ctx               716 drivers/staging/media/tegra-vde/vde.c 	ret = tegra_vde_validate_h264_ctx(dev, &ctx);
ctx               720 drivers/staging/media/tegra-vde/vde.c 	ret = tegra_vde_attach_dmabuf(vde, ctx.bitstream_data_fd,
ctx               721 drivers/staging/media/tegra-vde/vde.c 				      ctx.bitstream_data_offset,
ctx               730 drivers/staging/media/tegra-vde/vde.c 	frames = kmalloc_array(ctx.dpb_frames_nb, sizeof(*frames), GFP_KERNEL);
ctx               736 drivers/staging/media/tegra-vde/vde.c 	dpb_frames = kcalloc(ctx.dpb_frames_nb, sizeof(*dpb_frames),
ctx               743 drivers/staging/media/tegra-vde/vde.c 	macroblocks_nb = ctx.pic_width_in_mbs * ctx.pic_height_in_mbs;
ctx               744 drivers/staging/media/tegra-vde/vde.c 	frames_user = u64_to_user_ptr(ctx.dpb_frames_ptr);
ctx               747 drivers/staging/media/tegra-vde/vde.c 			   ctx.dpb_frames_nb * sizeof(*frames))) {
ctx               752 drivers/staging/media/tegra-vde/vde.c 	cstride = ALIGN(ctx.pic_width_in_mbs * 8, 16);
ctx               753 drivers/staging/media/tegra-vde/vde.c 	csize = cstride * ctx.pic_height_in_mbs * 8;
ctx               756 drivers/staging/media/tegra-vde/vde.c 	for (i = 0; i < ctx.dpb_frames_nb; i++) {
ctx               768 drivers/staging/media/tegra-vde/vde.c 							ctx.baseline_profile,
ctx               806 drivers/staging/media/tegra-vde/vde.c 	ret = tegra_vde_setup_hw_context(vde, &ctx, dpb_frames,
ctx               855 drivers/staging/media/tegra-vde/vde.c 						ctx.baseline_profile, ret != 0);
ctx               638 drivers/staging/qlge/qlge_main.c 	struct intr_context *ctx = qdev->intr_context + intr;
ctx               645 drivers/staging/qlge/qlge_main.c 			   ctx->intr_en_mask);
ctx               651 drivers/staging/qlge/qlge_main.c 	if (atomic_dec_and_test(&ctx->irq_cnt)) {
ctx               653 drivers/staging/qlge/qlge_main.c 			   ctx->intr_en_mask);
ctx               663 drivers/staging/qlge/qlge_main.c 	struct intr_context *ctx;
ctx               671 drivers/staging/qlge/qlge_main.c 	ctx = qdev->intr_context + intr;
ctx               673 drivers/staging/qlge/qlge_main.c 	if (!atomic_read(&ctx->irq_cnt)) {
ctx               675 drivers/staging/qlge/qlge_main.c 		ctx->intr_dis_mask);
ctx               678 drivers/staging/qlge/qlge_main.c 	atomic_inc(&ctx->irq_cnt);
ctx              2300 drivers/staging/qlge/qlge_main.c 	struct intr_context *ctx = &qdev->intr_context[rx_ring->cq_id];
ctx              2312 drivers/staging/qlge/qlge_main.c 		if ((ctx->irq_mask & (1 << trx_ring->cq_id)) &&
ctx              3412 drivers/staging/qlge/qlge_main.c static void ql_set_irq_mask(struct ql_adapter *qdev, struct intr_context *ctx)
ctx              3414 drivers/staging/qlge/qlge_main.c 	int j, vect = ctx->intr;
ctx              3421 drivers/staging/qlge/qlge_main.c 		ctx->irq_mask = (1 << qdev->rx_ring[vect].cq_id);
ctx              3425 drivers/staging/qlge/qlge_main.c 			ctx->irq_mask |=
ctx              3434 drivers/staging/qlge/qlge_main.c 			ctx->irq_mask |= (1 << qdev->rx_ring[j].cq_id);
ctx              2272 drivers/staging/rtl8723bs/core/rtw_security.c static void aes_128_encrypt(void *ctx, u8 *plain, u8 *crypt)
ctx              2274 drivers/staging/rtl8723bs/core/rtw_security.c 	rijndaelEncrypt(ctx, plain, crypt);
ctx              2291 drivers/staging/rtl8723bs/core/rtw_security.c static void aes_encrypt_deinit(void *ctx)
ctx              2293 drivers/staging/rtl8723bs/core/rtw_security.c 	kzfree(ctx);
ctx              2313 drivers/staging/rtl8723bs/core/rtw_security.c 	void *ctx;
ctx              2318 drivers/staging/rtl8723bs/core/rtw_security.c 	ctx = aes_encrypt_init(key, 16);
ctx              2319 drivers/staging/rtl8723bs/core/rtw_security.c 	if (ctx == NULL)
ctx              2342 drivers/staging/rtl8723bs/core/rtw_security.c 			aes_128_encrypt(ctx, cbc, cbc);
ctx              2347 drivers/staging/rtl8723bs/core/rtw_security.c 	aes_128_encrypt(ctx, pad, pad);
ctx              2365 drivers/staging/rtl8723bs/core/rtw_security.c 	aes_128_encrypt(ctx, pad, mac);
ctx              2366 drivers/staging/rtl8723bs/core/rtw_security.c 	aes_encrypt_deinit(ctx);
ctx               125 drivers/tee/optee/call.c u32 optee_do_call_with_arg(struct tee_context *ctx, phys_addr_t parg)
ctx               127 drivers/tee/optee/call.c 	struct optee *optee = tee_get_drvdata(ctx->teedev);
ctx               156 drivers/tee/optee/call.c 			optee_handle_rpc(ctx, &param, &call_ctx);
ctx               173 drivers/tee/optee/call.c static struct tee_shm *get_msg_arg(struct tee_context *ctx, size_t num_params,
ctx               181 drivers/tee/optee/call.c 	shm = tee_shm_alloc(ctx, OPTEE_MSG_GET_ARG_SIZE(num_params),
ctx               208 drivers/tee/optee/call.c int optee_open_session(struct tee_context *ctx,
ctx               212 drivers/tee/optee/call.c 	struct optee_context_data *ctxdata = ctx->data;
ctx               220 drivers/tee/optee/call.c 	shm = get_msg_arg(ctx, arg->num_params + 2, &msg_arg, &msg_parg);
ctx               249 drivers/tee/optee/call.c 	if (optee_do_call_with_arg(ctx, msg_parg)) {
ctx               268 drivers/tee/optee/call.c 		optee_close_session(ctx, msg_arg->session);
ctx               280 drivers/tee/optee/call.c int optee_close_session(struct tee_context *ctx, u32 session)
ctx               282 drivers/tee/optee/call.c 	struct optee_context_data *ctxdata = ctx->data;
ctx               298 drivers/tee/optee/call.c 	shm = get_msg_arg(ctx, 0, &msg_arg, &msg_parg);
ctx               304 drivers/tee/optee/call.c 	optee_do_call_with_arg(ctx, msg_parg);
ctx               310 drivers/tee/optee/call.c int optee_invoke_func(struct tee_context *ctx, struct tee_ioctl_invoke_arg *arg,
ctx               313 drivers/tee/optee/call.c 	struct optee_context_data *ctxdata = ctx->data;
ctx               327 drivers/tee/optee/call.c 	shm = get_msg_arg(ctx, arg->num_params, &msg_arg, &msg_parg);
ctx               339 drivers/tee/optee/call.c 	if (optee_do_call_with_arg(ctx, msg_parg)) {
ctx               356 drivers/tee/optee/call.c int optee_cancel_req(struct tee_context *ctx, u32 cancel_id, u32 session)
ctx               358 drivers/tee/optee/call.c 	struct optee_context_data *ctxdata = ctx->data;
ctx               371 drivers/tee/optee/call.c 	shm = get_msg_arg(ctx, 0, &msg_arg, &msg_parg);
ctx               378 drivers/tee/optee/call.c 	optee_do_call_with_arg(ctx, msg_parg);
ctx               572 drivers/tee/optee/call.c int optee_shm_register(struct tee_context *ctx, struct tee_shm *shm,
ctx               593 drivers/tee/optee/call.c 	shm_arg = get_msg_arg(ctx, 1, &msg_arg, &msg_parg);
ctx               614 drivers/tee/optee/call.c 	if (optee_do_call_with_arg(ctx, msg_parg) ||
ctx               624 drivers/tee/optee/call.c int optee_shm_unregister(struct tee_context *ctx, struct tee_shm *shm)
ctx               631 drivers/tee/optee/call.c 	shm_arg = get_msg_arg(ctx, 1, &msg_arg, &msg_parg);
ctx               640 drivers/tee/optee/call.c 	if (optee_do_call_with_arg(ctx, msg_parg) ||
ctx               647 drivers/tee/optee/call.c int optee_shm_register_supp(struct tee_context *ctx, struct tee_shm *shm,
ctx               658 drivers/tee/optee/call.c int optee_shm_unregister_supp(struct tee_context *ctx, struct tee_shm *shm)
ctx               221 drivers/tee/optee/core.c static int optee_open(struct tee_context *ctx)
ctx               224 drivers/tee/optee/core.c 	struct tee_device *teedev = ctx->teedev;
ctx               235 drivers/tee/optee/core.c 		if (!optee->supp.ctx) {
ctx               237 drivers/tee/optee/core.c 			optee->supp.ctx = ctx;
ctx               249 drivers/tee/optee/core.c 	ctx->data = ctxdata;
ctx               253 drivers/tee/optee/core.c static void optee_release(struct tee_context *ctx)
ctx               255 drivers/tee/optee/core.c 	struct optee_context_data *ctxdata = ctx->data;
ctx               256 drivers/tee/optee/core.c 	struct tee_device *teedev = ctx->teedev;
ctx               267 drivers/tee/optee/core.c 	shm = tee_shm_alloc(ctx, sizeof(struct optee_msg_arg), TEE_SHM_MAPPED);
ctx               288 drivers/tee/optee/core.c 			optee_do_call_with_arg(ctx, parg);
ctx               297 drivers/tee/optee/core.c 	ctx->data = NULL;
ctx                34 drivers/tee/optee/device.c static int get_devices(struct tee_context *ctx, u32 session,
ctx                55 drivers/tee/optee/device.c 	ret = tee_client_invoke_func(ctx, &inv_arg, param);
ctx                98 drivers/tee/optee/device.c 	struct tee_context *ctx = NULL;
ctx               105 drivers/tee/optee/device.c 	ctx = tee_client_open_context(NULL, optee_ctx_match, NULL, NULL);
ctx               106 drivers/tee/optee/device.c 	if (IS_ERR(ctx))
ctx               114 drivers/tee/optee/device.c 	rc = tee_client_open_session(ctx, &sess_arg, NULL);
ctx               121 drivers/tee/optee/device.c 	rc = get_devices(ctx, sess_arg.session, NULL, &shm_size);
ctx               125 drivers/tee/optee/device.c 	device_shm = tee_shm_alloc(ctx, shm_size,
ctx               133 drivers/tee/optee/device.c 	rc = get_devices(ctx, sess_arg.session, device_shm, &shm_size);
ctx               155 drivers/tee/optee/device.c 	tee_client_close_session(ctx, sess_arg.session);
ctx               157 drivers/tee/optee/device.c 	tee_client_close_context(ctx);
ctx                60 drivers/tee/optee/optee_private.h 	struct tee_context *ctx;
ctx               123 drivers/tee/optee/optee_private.h void optee_handle_rpc(struct tee_context *ctx, struct optee_rpc_param *param,
ctx               130 drivers/tee/optee/optee_private.h u32 optee_supp_thrd_req(struct tee_context *ctx, u32 func, size_t num_params,
ctx               133 drivers/tee/optee/optee_private.h int optee_supp_read(struct tee_context *ctx, void __user *buf, size_t len);
ctx               134 drivers/tee/optee/optee_private.h int optee_supp_write(struct tee_context *ctx, void __user *buf, size_t len);
ctx               139 drivers/tee/optee/optee_private.h int optee_supp_recv(struct tee_context *ctx, u32 *func, u32 *num_params,
ctx               141 drivers/tee/optee/optee_private.h int optee_supp_send(struct tee_context *ctx, u32 ret, u32 num_params,
ctx               144 drivers/tee/optee/optee_private.h u32 optee_do_call_with_arg(struct tee_context *ctx, phys_addr_t parg);
ctx               145 drivers/tee/optee/optee_private.h int optee_open_session(struct tee_context *ctx,
ctx               148 drivers/tee/optee/optee_private.h int optee_close_session(struct tee_context *ctx, u32 session);
ctx               149 drivers/tee/optee/optee_private.h int optee_invoke_func(struct tee_context *ctx, struct tee_ioctl_invoke_arg *arg,
ctx               151 drivers/tee/optee/optee_private.h int optee_cancel_req(struct tee_context *ctx, u32 cancel_id, u32 session);
ctx               156 drivers/tee/optee/optee_private.h int optee_shm_register(struct tee_context *ctx, struct tee_shm *shm,
ctx               159 drivers/tee/optee/optee_private.h int optee_shm_unregister(struct tee_context *ctx, struct tee_shm *shm);
ctx               161 drivers/tee/optee/optee_private.h int optee_shm_register_supp(struct tee_context *ctx, struct tee_shm *shm,
ctx               164 drivers/tee/optee/optee_private.h int optee_shm_unregister_supp(struct tee_context *ctx, struct tee_shm *shm);
ctx               143 drivers/tee/optee/rpc.c static void handle_rpc_supp_cmd(struct tee_context *ctx,
ctx               162 drivers/tee/optee/rpc.c 	arg->ret = optee_supp_thrd_req(ctx, arg->cmd, arg->num_params, params);
ctx               170 drivers/tee/optee/rpc.c static struct tee_shm *cmd_alloc_suppl(struct tee_context *ctx, size_t sz)
ctx               174 drivers/tee/optee/rpc.c 	struct optee *optee = tee_get_drvdata(ctx->teedev);
ctx               182 drivers/tee/optee/rpc.c 	ret = optee_supp_thrd_req(ctx, OPTEE_MSG_RPC_CMD_SHM_ALLOC, 1, &param);
ctx               188 drivers/tee/optee/rpc.c 	shm = tee_shm_get_from_id(optee->supp.ctx, param.u.value.c);
ctx               193 drivers/tee/optee/rpc.c static void handle_rpc_func_cmd_shm_alloc(struct tee_context *ctx,
ctx               220 drivers/tee/optee/rpc.c 		shm = cmd_alloc_suppl(ctx, sz);
ctx               223 drivers/tee/optee/rpc.c 		shm = tee_shm_alloc(ctx, sz, TEE_SHM_MAPPED);
ctx               289 drivers/tee/optee/rpc.c static void cmd_free_suppl(struct tee_context *ctx, struct tee_shm *shm)
ctx               311 drivers/tee/optee/rpc.c 	optee_supp_thrd_req(ctx, OPTEE_MSG_RPC_CMD_SHM_FREE, 1, &param);
ctx               314 drivers/tee/optee/rpc.c static void handle_rpc_func_cmd_shm_free(struct tee_context *ctx,
ctx               330 drivers/tee/optee/rpc.c 		cmd_free_suppl(ctx, shm);
ctx               356 drivers/tee/optee/rpc.c static void handle_rpc_func_cmd(struct tee_context *ctx, struct optee *optee,
ctx               380 drivers/tee/optee/rpc.c 		handle_rpc_func_cmd_shm_alloc(ctx, arg, call_ctx);
ctx               383 drivers/tee/optee/rpc.c 		handle_rpc_func_cmd_shm_free(ctx, arg);
ctx               386 drivers/tee/optee/rpc.c 		handle_rpc_supp_cmd(ctx, arg);
ctx               398 drivers/tee/optee/rpc.c void optee_handle_rpc(struct tee_context *ctx, struct optee_rpc_param *param,
ctx               401 drivers/tee/optee/rpc.c 	struct tee_device *teedev = ctx->teedev;
ctx               408 drivers/tee/optee/rpc.c 		shm = tee_shm_alloc(ctx, param->a1, TEE_SHM_MAPPED);
ctx               434 drivers/tee/optee/rpc.c 		handle_rpc_func_cmd(ctx, optee, shm, call_ctx);
ctx                44 drivers/tee/optee/shm_pool.c 		rc = optee_shm_register(shm->ctx, shm, pages, nr_pages,
ctx                56 drivers/tee/optee/shm_pool.c 		optee_shm_unregister(shm->ctx, shm);
ctx                61 drivers/tee/optee/supp.c 	supp->ctx = NULL;
ctx                76 drivers/tee/optee/supp.c u32 optee_supp_thrd_req(struct tee_context *ctx, u32 func, size_t num_params,
ctx                80 drivers/tee/optee/supp.c 	struct optee *optee = tee_get_drvdata(ctx->teedev);
ctx                90 drivers/tee/optee/supp.c 	if (!supp->ctx && ctx->supp_nowait)
ctx               118 drivers/tee/optee/supp.c 		interruptable = !supp->ctx;
ctx               229 drivers/tee/optee/supp.c int optee_supp_recv(struct tee_context *ctx, u32 *func, u32 *num_params,
ctx               232 drivers/tee/optee/supp.c 	struct tee_device *teedev = ctx->teedev;
ctx               338 drivers/tee/optee/supp.c int optee_supp_send(struct tee_context *ctx, u32 ret, u32 num_params,
ctx               341 drivers/tee/optee/supp.c 	struct tee_device *teedev = ctx->teedev;
ctx                34 drivers/tee/tee_core.c 	struct tee_context *ctx;
ctx                39 drivers/tee/tee_core.c 	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx                40 drivers/tee/tee_core.c 	if (!ctx) {
ctx                45 drivers/tee/tee_core.c 	kref_init(&ctx->refcount);
ctx                46 drivers/tee/tee_core.c 	ctx->teedev = teedev;
ctx                47 drivers/tee/tee_core.c 	INIT_LIST_HEAD(&ctx->list_shm);
ctx                48 drivers/tee/tee_core.c 	rc = teedev->desc->ops->open(ctx);
ctx                52 drivers/tee/tee_core.c 	return ctx;
ctx                54 drivers/tee/tee_core.c 	kfree(ctx);
ctx                60 drivers/tee/tee_core.c void teedev_ctx_get(struct tee_context *ctx)
ctx                62 drivers/tee/tee_core.c 	if (ctx->releasing)
ctx                65 drivers/tee/tee_core.c 	kref_get(&ctx->refcount);
ctx                70 drivers/tee/tee_core.c 	struct tee_context *ctx = container_of(ref, struct tee_context,
ctx                72 drivers/tee/tee_core.c 	ctx->releasing = true;
ctx                73 drivers/tee/tee_core.c 	ctx->teedev->desc->ops->release(ctx);
ctx                74 drivers/tee/tee_core.c 	kfree(ctx);
ctx                77 drivers/tee/tee_core.c void teedev_ctx_put(struct tee_context *ctx)
ctx                79 drivers/tee/tee_core.c 	if (ctx->releasing)
ctx                82 drivers/tee/tee_core.c 	kref_put(&ctx->refcount, teedev_ctx_release);
ctx                85 drivers/tee/tee_core.c static void teedev_close_context(struct tee_context *ctx)
ctx                87 drivers/tee/tee_core.c 	tee_device_put(ctx->teedev);
ctx                88 drivers/tee/tee_core.c 	teedev_ctx_put(ctx);
ctx                93 drivers/tee/tee_core.c 	struct tee_context *ctx;
ctx                95 drivers/tee/tee_core.c 	ctx = teedev_open(container_of(inode->i_cdev, struct tee_device, cdev));
ctx                96 drivers/tee/tee_core.c 	if (IS_ERR(ctx))
ctx                97 drivers/tee/tee_core.c 		return PTR_ERR(ctx);
ctx               103 drivers/tee/tee_core.c 	ctx->supp_nowait = false;
ctx               104 drivers/tee/tee_core.c 	filp->private_data = ctx;
ctx               114 drivers/tee/tee_core.c static int tee_ioctl_version(struct tee_context *ctx,
ctx               119 drivers/tee/tee_core.c 	ctx->teedev->desc->ops->get_version(ctx->teedev, &vers);
ctx               121 drivers/tee/tee_core.c 	if (ctx->teedev->desc->flags & TEE_DESC_PRIVILEGED)
ctx               130 drivers/tee/tee_core.c static int tee_ioctl_shm_alloc(struct tee_context *ctx,
ctx               144 drivers/tee/tee_core.c 	shm = tee_shm_alloc(ctx, data.size, TEE_SHM_MAPPED | TEE_SHM_DMA_BUF);
ctx               167 drivers/tee/tee_core.c tee_ioctl_shm_register(struct tee_context *ctx,
ctx               181 drivers/tee/tee_core.c 	shm = tee_shm_register(ctx, data.addr, data.length,
ctx               203 drivers/tee/tee_core.c static int params_from_user(struct tee_context *ctx, struct tee_param *params,
ctx               242 drivers/tee/tee_core.c 			shm = tee_shm_get_from_id(ctx, ip.c);
ctx               297 drivers/tee/tee_core.c static int tee_ioctl_open_session(struct tee_context *ctx,
ctx               309 drivers/tee/tee_core.c 	if (!ctx->teedev->desc->ops->open_session)
ctx               332 drivers/tee/tee_core.c 		rc = params_from_user(ctx, params, arg.num_params, uparams);
ctx               337 drivers/tee/tee_core.c 	rc = ctx->teedev->desc->ops->open_session(ctx, &arg, params);
ctx               354 drivers/tee/tee_core.c 	if (rc && have_session && ctx->teedev->desc->ops->close_session)
ctx               355 drivers/tee/tee_core.c 		ctx->teedev->desc->ops->close_session(ctx, arg.session);
ctx               369 drivers/tee/tee_core.c static int tee_ioctl_invoke(struct tee_context *ctx,
ctx               380 drivers/tee/tee_core.c 	if (!ctx->teedev->desc->ops->invoke_func)
ctx               403 drivers/tee/tee_core.c 		rc = params_from_user(ctx, params, arg.num_params, uparams);
ctx               408 drivers/tee/tee_core.c 	rc = ctx->teedev->desc->ops->invoke_func(ctx, &arg, params);
ctx               430 drivers/tee/tee_core.c static int tee_ioctl_cancel(struct tee_context *ctx,
ctx               435 drivers/tee/tee_core.c 	if (!ctx->teedev->desc->ops->cancel_req)
ctx               441 drivers/tee/tee_core.c 	return ctx->teedev->desc->ops->cancel_req(ctx, arg.cancel_id,
ctx               446 drivers/tee/tee_core.c tee_ioctl_close_session(struct tee_context *ctx,
ctx               451 drivers/tee/tee_core.c 	if (!ctx->teedev->desc->ops->close_session)
ctx               457 drivers/tee/tee_core.c 	return ctx->teedev->desc->ops->close_session(ctx, arg.session);
ctx               460 drivers/tee/tee_core.c static int params_to_supp(struct tee_context *ctx,
ctx               504 drivers/tee/tee_core.c static int tee_ioctl_supp_recv(struct tee_context *ctx,
ctx               514 drivers/tee/tee_core.c 	if (!ctx->teedev->desc->ops->supp_recv)
ctx               535 drivers/tee/tee_core.c 	rc = params_from_user(ctx, params, num_params, uarg->params);
ctx               539 drivers/tee/tee_core.c 	rc = ctx->teedev->desc->ops->supp_recv(ctx, &func, &num_params, params);
ctx               549 drivers/tee/tee_core.c 	rc = params_to_supp(ctx, uarg->params, num_params, params);
ctx               601 drivers/tee/tee_core.c static int tee_ioctl_supp_send(struct tee_context *ctx,
ctx               612 drivers/tee/tee_core.c 	if (!ctx->teedev->desc->ops->supp_send)
ctx               638 drivers/tee/tee_core.c 	rc = ctx->teedev->desc->ops->supp_send(ctx, ret, num_params, params);
ctx               646 drivers/tee/tee_core.c 	struct tee_context *ctx = filp->private_data;
ctx               651 drivers/tee/tee_core.c 		return tee_ioctl_version(ctx, uarg);
ctx               653 drivers/tee/tee_core.c 		return tee_ioctl_shm_alloc(ctx, uarg);
ctx               655 drivers/tee/tee_core.c 		return tee_ioctl_shm_register(ctx, uarg);
ctx               657 drivers/tee/tee_core.c 		return tee_ioctl_open_session(ctx, uarg);
ctx               659 drivers/tee/tee_core.c 		return tee_ioctl_invoke(ctx, uarg);
ctx               661 drivers/tee/tee_core.c 		return tee_ioctl_cancel(ctx, uarg);
ctx               663 drivers/tee/tee_core.c 		return tee_ioctl_close_session(ctx, uarg);
ctx               665 drivers/tee/tee_core.c 		return tee_ioctl_supp_recv(ctx, uarg);
ctx               667 drivers/tee/tee_core.c 		return tee_ioctl_supp_send(ctx, uarg);
ctx               959 drivers/tee/tee_core.c 	struct tee_context *ctx = NULL;
ctx               969 drivers/tee/tee_core.c 			ctx = ERR_PTR(-ENOENT);
ctx               976 drivers/tee/tee_core.c 		ctx = teedev_open(container_of(dev, struct tee_device, dev));
ctx               977 drivers/tee/tee_core.c 	} while (IS_ERR(ctx) && PTR_ERR(ctx) != -ENOMEM);
ctx               987 drivers/tee/tee_core.c 	if (!IS_ERR(ctx))
ctx               988 drivers/tee/tee_core.c 		ctx->supp_nowait = true;
ctx               990 drivers/tee/tee_core.c 	return ctx;
ctx               994 drivers/tee/tee_core.c void tee_client_close_context(struct tee_context *ctx)
ctx               996 drivers/tee/tee_core.c 	teedev_close_context(ctx);
ctx              1000 drivers/tee/tee_core.c void tee_client_get_version(struct tee_context *ctx,
ctx              1003 drivers/tee/tee_core.c 	ctx->teedev->desc->ops->get_version(ctx->teedev, vers);
ctx              1007 drivers/tee/tee_core.c int tee_client_open_session(struct tee_context *ctx,
ctx              1011 drivers/tee/tee_core.c 	if (!ctx->teedev->desc->ops->open_session)
ctx              1013 drivers/tee/tee_core.c 	return ctx->teedev->desc->ops->open_session(ctx, arg, param);
ctx              1017 drivers/tee/tee_core.c int tee_client_close_session(struct tee_context *ctx, u32 session)
ctx              1019 drivers/tee/tee_core.c 	if (!ctx->teedev->desc->ops->close_session)
ctx              1021 drivers/tee/tee_core.c 	return ctx->teedev->desc->ops->close_session(ctx, session);
ctx              1025 drivers/tee/tee_core.c int tee_client_invoke_func(struct tee_context *ctx,
ctx              1029 drivers/tee/tee_core.c 	if (!ctx->teedev->desc->ops->invoke_func)
ctx              1031 drivers/tee/tee_core.c 	return ctx->teedev->desc->ops->invoke_func(ctx, arg, param);
ctx              1035 drivers/tee/tee_core.c int tee_client_cancel_req(struct tee_context *ctx,
ctx              1038 drivers/tee/tee_core.c 	if (!ctx->teedev->desc->ops->cancel_req)
ctx              1040 drivers/tee/tee_core.c 	return ctx->teedev->desc->ops->cancel_req(ctx, arg->cancel_id,
ctx                67 drivers/tee/tee_private.h void teedev_ctx_get(struct tee_context *ctx);
ctx                68 drivers/tee/tee_private.h void teedev_ctx_put(struct tee_context *ctx);
ctx                20 drivers/tee/tee_shm.c 	if (shm->ctx)
ctx                35 drivers/tee/tee_shm.c 		int rc = teedev->desc->ops->shm_unregister(shm->ctx, shm);
ctx                47 drivers/tee/tee_shm.c 	if (shm->ctx)
ctx                48 drivers/tee/tee_shm.c 		teedev_ctx_put(shm->ctx);
ctx               100 drivers/tee/tee_shm.c static struct tee_shm *__tee_shm_alloc(struct tee_context *ctx,
ctx               109 drivers/tee/tee_shm.c 	if (ctx && ctx->teedev != teedev) {
ctx               142 drivers/tee/tee_shm.c 	shm->ctx = ctx;
ctx               177 drivers/tee/tee_shm.c 	if (ctx) {
ctx               178 drivers/tee/tee_shm.c 		teedev_ctx_get(ctx);
ctx               180 drivers/tee/tee_shm.c 		list_add_tail(&shm->link, &ctx->list_shm);
ctx               210 drivers/tee/tee_shm.c struct tee_shm *tee_shm_alloc(struct tee_context *ctx, size_t size, u32 flags)
ctx               212 drivers/tee/tee_shm.c 	return __tee_shm_alloc(ctx, ctx->teedev, size, flags);
ctx               222 drivers/tee/tee_shm.c struct tee_shm *tee_shm_register(struct tee_context *ctx, unsigned long addr,
ctx               225 drivers/tee/tee_shm.c 	struct tee_device *teedev = ctx->teedev;
ctx               245 drivers/tee/tee_shm.c 	teedev_ctx_get(ctx);
ctx               255 drivers/tee/tee_shm.c 	shm->ctx = ctx;
ctx               287 drivers/tee/tee_shm.c 	rc = teedev->desc->ops->shm_register(ctx, shm, shm->pages,
ctx               305 drivers/tee/tee_shm.c 			teedev->desc->ops->shm_unregister(ctx, shm);
ctx               311 drivers/tee/tee_shm.c 	list_add_tail(&shm->link, &ctx->list_shm);
ctx               331 drivers/tee/tee_shm.c 	teedev_ctx_put(ctx);
ctx               468 drivers/tee/tee_shm.c struct tee_shm *tee_shm_get_from_id(struct tee_context *ctx, int id)
ctx               473 drivers/tee/tee_shm.c 	if (!ctx)
ctx               476 drivers/tee/tee_shm.c 	teedev = ctx->teedev;
ctx               479 drivers/tee/tee_shm.c 	if (!shm || shm->ctx != ctx)
ctx                65 drivers/thermal/qcom/tsens-8960.c 	ret = regmap_read(map, THRESHOLD_ADDR, &priv->ctx.threshold);
ctx                69 drivers/thermal/qcom/tsens-8960.c 	ret = regmap_read(map, CNTL_ADDR, &priv->ctx.control);
ctx               104 drivers/thermal/qcom/tsens-8960.c 	ret = regmap_write(map, THRESHOLD_ADDR, priv->ctx.threshold);
ctx               108 drivers/thermal/qcom/tsens-8960.c 	ret = regmap_write(map, CNTL_ADDR, priv->ctx.control);
ctx               307 drivers/thermal/qcom/tsens.h 	struct tsens_context		ctx;
ctx               636 drivers/tty/serial/ifx6x60.c static void ifx_spi_complete(void *ctx)
ctx               638 drivers/tty/serial/ifx6x60.c 	struct ifx_spi_device *ifx_dev = ctx;
ctx                35 drivers/usb/core/message.c 	struct api_context *ctx = urb->context;
ctx                37 drivers/usb/core/message.c 	ctx->status = urb->status;
ctx                38 drivers/usb/core/message.c 	complete(&ctx->done);
ctx                50 drivers/usb/core/message.c 	struct api_context ctx;
ctx                54 drivers/usb/core/message.c 	init_completion(&ctx.done);
ctx                55 drivers/usb/core/message.c 	urb->context = &ctx;
ctx                62 drivers/usb/core/message.c 	if (!wait_for_completion_timeout(&ctx.done, expire)) {
ctx                64 drivers/usb/core/message.c 		retval = (ctx.status == -ENOENT ? -ETIMEDOUT : ctx.status);
ctx                74 drivers/usb/core/message.c 		retval = ctx.status;
ctx               231 drivers/usb/early/xhci-dbc.c 	struct xdbc_context *ctx;
ctx               311 drivers/usb/early/xhci-dbc.c 	ctx = (struct xdbc_context *)xdbc.dbcc_base;
ctx               313 drivers/usb/early/xhci-dbc.c 	ctx->info.string0	= cpu_to_le64(xdbc.string_dma);
ctx               314 drivers/usb/early/xhci-dbc.c 	ctx->info.manufacturer	= cpu_to_le64(xdbc.string_dma + XDBC_MAX_STRING_LENGTH);
ctx               315 drivers/usb/early/xhci-dbc.c 	ctx->info.product	= cpu_to_le64(xdbc.string_dma + XDBC_MAX_STRING_LENGTH * 2);
ctx               316 drivers/usb/early/xhci-dbc.c 	ctx->info.serial	= cpu_to_le64(xdbc.string_dma + XDBC_MAX_STRING_LENGTH * 3);
ctx               317 drivers/usb/early/xhci-dbc.c 	ctx->info.length	= cpu_to_le32(string_length);
ctx               321 drivers/usb/early/xhci-dbc.c 	ep_out = (struct xdbc_ep_context *)&ctx->out;
ctx               328 drivers/usb/early/xhci-dbc.c 	ep_in = (struct xdbc_ep_context *)&ctx->in;
ctx              1571 drivers/usb/gadget/function/f_fs.c 	struct ffs_sb_fill_data *ctx = fc->fs_private;
ctx              1583 drivers/usb/gadget/function/f_fs.c 	ffs->file_perms = ctx->perms;
ctx              1584 drivers/usb/gadget/function/f_fs.c 	ffs->no_disconnect = ctx->no_disconnect;
ctx              1599 drivers/usb/gadget/function/f_fs.c 	ctx->ffs_data = ffs;
ctx              1605 drivers/usb/gadget/function/f_fs.c 	struct ffs_sb_fill_data *ctx = fc->fs_private;
ctx              1607 drivers/usb/gadget/function/f_fs.c 	if (ctx) {
ctx              1608 drivers/usb/gadget/function/f_fs.c 		if (ctx->ffs_data) {
ctx              1609 drivers/usb/gadget/function/f_fs.c 			ffs_release_dev(ctx->ffs_data);
ctx              1610 drivers/usb/gadget/function/f_fs.c 			ffs_data_put(ctx->ffs_data);
ctx              1613 drivers/usb/gadget/function/f_fs.c 		kfree(ctx);
ctx              1625 drivers/usb/gadget/function/f_fs.c 	struct ffs_sb_fill_data *ctx;
ctx              1627 drivers/usb/gadget/function/f_fs.c 	ctx = kzalloc(sizeof(struct ffs_sb_fill_data), GFP_KERNEL);
ctx              1628 drivers/usb/gadget/function/f_fs.c 	if (!ctx)
ctx              1631 drivers/usb/gadget/function/f_fs.c 	ctx->perms.mode = S_IFREG | 0600;
ctx              1632 drivers/usb/gadget/function/f_fs.c 	ctx->perms.uid = GLOBAL_ROOT_UID;
ctx              1633 drivers/usb/gadget/function/f_fs.c 	ctx->perms.gid = GLOBAL_ROOT_GID;
ctx              1634 drivers/usb/gadget/function/f_fs.c 	ctx->root_mode = S_IFDIR | 0500;
ctx              1635 drivers/usb/gadget/function/f_fs.c 	ctx->no_disconnect = false;
ctx              1637 drivers/usb/gadget/function/f_fs.c 	fc->fs_private = ctx;
ctx                14 drivers/usb/host/xhci-dbg.c 		struct xhci_container_ctx *ctx)
ctx                16 drivers/usb/host/xhci-dbg.c 	struct xhci_slot_ctx *slot_ctx = xhci_get_slot_ctx(xhci, ctx);
ctx               100 drivers/usb/host/xhci-dbgcap.c 	info			= (struct dbc_info_context *)dbc->ctx->bytes;
ctx               124 drivers/usb/host/xhci-dbgcap.c 	xhci_write_64(xhci, dbc->ctx->dma, &dbc->regs->dccp);
ctx               399 drivers/usb/host/xhci-dbgcap.c 	dbc->ctx = xhci_alloc_container_ctx(xhci, XHCI_CTX_TYPE_DEVICE, flags);
ctx               400 drivers/usb/host/xhci-dbgcap.c 	if (!dbc->ctx)
ctx               429 drivers/usb/host/xhci-dbgcap.c 	xhci_free_container_ctx(xhci, dbc->ctx);
ctx               430 drivers/usb/host/xhci-dbgcap.c 	dbc->ctx = NULL;
ctx               462 drivers/usb/host/xhci-dbgcap.c 	xhci_free_container_ctx(xhci, dbc->ctx);
ctx               463 drivers/usb/host/xhci-dbgcap.c 	dbc->ctx = NULL;
ctx               142 drivers/usb/host/xhci-dbgcap.h 	struct xhci_container_ctx	*ctx;
ctx               157 drivers/usb/host/xhci-dbgcap.h 	((struct xhci_ep_ctx *)((d)->ctx->bytes + DBC_CONTEXT_SIZE))
ctx               159 drivers/usb/host/xhci-dbgcap.h 	((struct xhci_ep_ctx *)((d)->ctx->bytes + DBC_CONTEXT_SIZE * 2))
ctx               463 drivers/usb/host/xhci-mem.c 	struct xhci_container_ctx *ctx;
ctx               469 drivers/usb/host/xhci-mem.c 	ctx = kzalloc_node(sizeof(*ctx), flags, dev_to_node(dev));
ctx               470 drivers/usb/host/xhci-mem.c 	if (!ctx)
ctx               473 drivers/usb/host/xhci-mem.c 	ctx->type = type;
ctx               474 drivers/usb/host/xhci-mem.c 	ctx->size = HCC_64BYTE_CONTEXT(xhci->hcc_params) ? 2048 : 1024;
ctx               476 drivers/usb/host/xhci-mem.c 		ctx->size += CTX_SIZE(xhci->hcc_params);
ctx               478 drivers/usb/host/xhci-mem.c 	ctx->bytes = dma_pool_zalloc(xhci->device_pool, flags, &ctx->dma);
ctx               479 drivers/usb/host/xhci-mem.c 	if (!ctx->bytes) {
ctx               480 drivers/usb/host/xhci-mem.c 		kfree(ctx);
ctx               483 drivers/usb/host/xhci-mem.c 	return ctx;
ctx               487 drivers/usb/host/xhci-mem.c 			     struct xhci_container_ctx *ctx)
ctx               489 drivers/usb/host/xhci-mem.c 	if (!ctx)
ctx               491 drivers/usb/host/xhci-mem.c 	dma_pool_free(xhci->device_pool, ctx->bytes, ctx->dma);
ctx               492 drivers/usb/host/xhci-mem.c 	kfree(ctx);
ctx               496 drivers/usb/host/xhci-mem.c 					      struct xhci_container_ctx *ctx)
ctx               498 drivers/usb/host/xhci-mem.c 	if (ctx->type != XHCI_CTX_TYPE_INPUT)
ctx               501 drivers/usb/host/xhci-mem.c 	return (struct xhci_input_control_ctx *)ctx->bytes;
ctx               505 drivers/usb/host/xhci-mem.c 					struct xhci_container_ctx *ctx)
ctx               507 drivers/usb/host/xhci-mem.c 	if (ctx->type == XHCI_CTX_TYPE_DEVICE)
ctx               508 drivers/usb/host/xhci-mem.c 		return (struct xhci_slot_ctx *)ctx->bytes;
ctx               511 drivers/usb/host/xhci-mem.c 		(ctx->bytes + CTX_SIZE(xhci->hcc_params));
ctx               515 drivers/usb/host/xhci-mem.c 				    struct xhci_container_ctx *ctx,
ctx               520 drivers/usb/host/xhci-mem.c 	if (ctx->type == XHCI_CTX_TYPE_INPUT)
ctx               524 drivers/usb/host/xhci-mem.c 		(ctx->bytes + (ep_index * CTX_SIZE(xhci->hcc_params)));
ctx              1118 drivers/usb/host/xhci-ring.c 			struct xhci_stream_ctx *ctx =
ctx              1120 drivers/usb/host/xhci-ring.c 			deq = le64_to_cpu(ctx->stream_ring) & SCTX_DEQ_MASK;
ctx                76 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_hcd *xhci, struct xhci_container_ctx *ctx,
ctx                78 drivers/usb/host/xhci-trace.h 	TP_ARGS(xhci, ctx, ep_num),
ctx                88 drivers/usb/host/xhci-trace.h 			((ctx->type == XHCI_CTX_TYPE_INPUT) + ep_num + 1))
ctx                95 drivers/usb/host/xhci-trace.h 		__entry->ctx_type = ctx->type;
ctx                96 drivers/usb/host/xhci-trace.h 		__entry->ctx_dma = ctx->dma;
ctx                97 drivers/usb/host/xhci-trace.h 		__entry->ctx_va = ctx->bytes;
ctx               100 drivers/usb/host/xhci-trace.h 		memcpy(__get_dynamic_array(ctx_data), ctx->bytes,
ctx               102 drivers/usb/host/xhci-trace.h 			((ctx->type == XHCI_CTX_TYPE_INPUT) + ep_num + 1));
ctx               111 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_hcd *xhci, struct xhci_container_ctx *ctx,
ctx               113 drivers/usb/host/xhci-trace.h 	TP_ARGS(xhci, ctx, ep_num)
ctx               319 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_ep_ctx *ctx),
ctx               320 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx),
ctx               328 drivers/usb/host/xhci-trace.h 		__entry->info = le32_to_cpu(ctx->ep_info);
ctx               329 drivers/usb/host/xhci-trace.h 		__entry->info2 = le32_to_cpu(ctx->ep_info2);
ctx               330 drivers/usb/host/xhci-trace.h 		__entry->deq = le64_to_cpu(ctx->deq);
ctx               331 drivers/usb/host/xhci-trace.h 		__entry->tx_info = le32_to_cpu(ctx->tx_info);
ctx               339 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_ep_ctx *ctx),
ctx               340 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               344 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_ep_ctx *ctx),
ctx               345 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               349 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_ep_ctx *ctx),
ctx               350 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               354 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_ep_ctx *ctx),
ctx               355 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               359 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_ep_ctx *ctx),
ctx               360 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               364 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_slot_ctx *ctx),
ctx               365 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx),
ctx               373 drivers/usb/host/xhci-trace.h 		__entry->info = le32_to_cpu(ctx->dev_info);
ctx               374 drivers/usb/host/xhci-trace.h 		__entry->info2 = le32_to_cpu(ctx->dev_info2);
ctx               375 drivers/usb/host/xhci-trace.h 		__entry->tt_info = le64_to_cpu(ctx->tt_info);
ctx               376 drivers/usb/host/xhci-trace.h 		__entry->state = le32_to_cpu(ctx->dev_state);
ctx               385 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_slot_ctx *ctx),
ctx               386 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               390 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_slot_ctx *ctx),
ctx               391 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               395 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_slot_ctx *ctx),
ctx               396 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               400 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_slot_ctx *ctx),
ctx               401 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               405 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_slot_ctx *ctx),
ctx               406 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               410 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_slot_ctx *ctx),
ctx               411 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               415 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_slot_ctx *ctx),
ctx               416 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               420 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_slot_ctx *ctx),
ctx               421 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               425 drivers/usb/host/xhci-trace.h 	TP_PROTO(struct xhci_slot_ctx *ctx),
ctx               426 drivers/usb/host/xhci-trace.h 	TP_ARGS(ctx)
ctx               725 drivers/usb/host/xhci.h #define GET_EP_CTX_STATE(ctx)	(le32_to_cpu((ctx)->ep_info) & EP_STATE_MASK)
ctx              1973 drivers/usb/host/xhci.h 		struct xhci_container_ctx *ctx);
ctx              2051 drivers/usb/host/xhci.h 		struct xhci_container_ctx *ctx);
ctx              2163 drivers/usb/host/xhci.h struct xhci_input_control_ctx *xhci_get_input_control_ctx(struct xhci_container_ctx *ctx);
ctx              2164 drivers/usb/host/xhci.h struct xhci_slot_ctx *xhci_get_slot_ctx(struct xhci_hcd *xhci, struct xhci_container_ctx *ctx);
ctx              2165 drivers/usb/host/xhci.h struct xhci_ep_ctx *xhci_get_ep_ctx(struct xhci_hcd *xhci, struct xhci_container_ctx *ctx, unsigned int ep_index);
ctx              1089 drivers/usb/misc/usbtest.c 	struct ctrl_ctx		*ctx = urb->context;
ctx              1098 drivers/usb/misc/usbtest.c 	spin_lock_irqsave(&ctx->lock, flags);
ctx              1099 drivers/usb/misc/usbtest.c 	ctx->count--;
ctx              1100 drivers/usb/misc/usbtest.c 	ctx->pending--;
ctx              1107 drivers/usb/misc/usbtest.c 		if ((subcase->number - ctx->last) != 1) {
ctx              1108 drivers/usb/misc/usbtest.c 			ERROR(ctx->dev,
ctx              1110 drivers/usb/misc/usbtest.c 				subcase->number, ctx->last);
ctx              1112 drivers/usb/misc/usbtest.c 			ctx->last = subcase->number;
ctx              1116 drivers/usb/misc/usbtest.c 	ctx->last = subcase->number;
ctx              1134 drivers/usb/misc/usbtest.c 			ERROR(ctx->dev, "subtest %d error, status %d\n",
ctx              1141 drivers/usb/misc/usbtest.c 		if (ctx->status == 0) {
ctx              1144 drivers/usb/misc/usbtest.c 			ctx->status = status;
ctx              1145 drivers/usb/misc/usbtest.c 			ERROR(ctx->dev, "control queue %02x.%02x, err %d, "
ctx              1148 drivers/usb/misc/usbtest.c 					status, ctx->count, subcase->number,
ctx              1157 drivers/usb/misc/usbtest.c 			for (i = 1; i < ctx->param->sglen; i++) {
ctx              1158 drivers/usb/misc/usbtest.c 				struct urb *u = ctx->urb[
ctx              1160 drivers/usb/misc/usbtest.c 							% ctx->param->sglen];
ctx              1164 drivers/usb/misc/usbtest.c 				spin_unlock(&ctx->lock);
ctx              1166 drivers/usb/misc/usbtest.c 				spin_lock(&ctx->lock);
ctx              1173 drivers/usb/misc/usbtest.c 					ERROR(ctx->dev, "urb unlink --> %d\n",
ctx              1177 drivers/usb/misc/usbtest.c 			status = ctx->status;
ctx              1182 drivers/usb/misc/usbtest.c 	if ((status == 0) && (ctx->pending < ctx->count)) {
ctx              1185 drivers/usb/misc/usbtest.c 			ERROR(ctx->dev,
ctx              1190 drivers/usb/misc/usbtest.c 			ctx->pending++;
ctx              1195 drivers/usb/misc/usbtest.c 	if (ctx->pending == 0)
ctx              1196 drivers/usb/misc/usbtest.c 		complete(&ctx->complete);
ctx              1197 drivers/usb/misc/usbtest.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx              1537 drivers/usb/misc/usbtest.c 	struct queued_ctx	*ctx = urb->context;
ctx              1539 drivers/usb/misc/usbtest.c 	if (ctx->status)
ctx              1541 drivers/usb/misc/usbtest.c 	if (urb == ctx->urbs[ctx->num - 4] || urb == ctx->urbs[ctx->num - 2]) {
ctx              1547 drivers/usb/misc/usbtest.c 		ctx->status = status;
ctx              1550 drivers/usb/misc/usbtest.c 	if (atomic_dec_and_test(&ctx->pending))
ctx              1551 drivers/usb/misc/usbtest.c 		complete(&ctx->complete);
ctx              1557 drivers/usb/misc/usbtest.c 	struct queued_ctx	ctx;
ctx              1564 drivers/usb/misc/usbtest.c 	init_completion(&ctx.complete);
ctx              1565 drivers/usb/misc/usbtest.c 	atomic_set(&ctx.pending, 1);	/* One more than the actual value */
ctx              1566 drivers/usb/misc/usbtest.c 	ctx.num = num;
ctx              1567 drivers/usb/misc/usbtest.c 	ctx.status = 0;
ctx              1575 drivers/usb/misc/usbtest.c 	ctx.urbs = kcalloc(num, sizeof(struct urb *), GFP_KERNEL);
ctx              1576 drivers/usb/misc/usbtest.c 	if (!ctx.urbs)
ctx              1579 drivers/usb/misc/usbtest.c 		ctx.urbs[i] = usb_alloc_urb(0, GFP_KERNEL);
ctx              1580 drivers/usb/misc/usbtest.c 		if (!ctx.urbs[i])
ctx              1582 drivers/usb/misc/usbtest.c 		usb_fill_bulk_urb(ctx.urbs[i], udev, pipe, buf, size,
ctx              1583 drivers/usb/misc/usbtest.c 				unlink_queued_callback, &ctx);
ctx              1584 drivers/usb/misc/usbtest.c 		ctx.urbs[i]->transfer_dma = buf_dma;
ctx              1585 drivers/usb/misc/usbtest.c 		ctx.urbs[i]->transfer_flags = URB_NO_TRANSFER_DMA_MAP;
ctx              1587 drivers/usb/misc/usbtest.c 		if (usb_pipeout(ctx.urbs[i]->pipe)) {
ctx              1588 drivers/usb/misc/usbtest.c 			simple_fill_buf(ctx.urbs[i]);
ctx              1589 drivers/usb/misc/usbtest.c 			ctx.urbs[i]->transfer_flags |= URB_ZERO_PACKET;
ctx              1595 drivers/usb/misc/usbtest.c 		atomic_inc(&ctx.pending);
ctx              1596 drivers/usb/misc/usbtest.c 		retval = usb_submit_urb(ctx.urbs[i], GFP_KERNEL);
ctx              1600 drivers/usb/misc/usbtest.c 			atomic_dec(&ctx.pending);
ctx              1601 drivers/usb/misc/usbtest.c 			ctx.status = retval;
ctx              1606 drivers/usb/misc/usbtest.c 		usb_unlink_urb(ctx.urbs[num - 4]);
ctx              1607 drivers/usb/misc/usbtest.c 		usb_unlink_urb(ctx.urbs[num - 2]);
ctx              1610 drivers/usb/misc/usbtest.c 			usb_unlink_urb(ctx.urbs[i]);
ctx              1613 drivers/usb/misc/usbtest.c 	if (atomic_dec_and_test(&ctx.pending))		/* The extra count */
ctx              1614 drivers/usb/misc/usbtest.c 		complete(&ctx.complete);
ctx              1615 drivers/usb/misc/usbtest.c 	wait_for_completion(&ctx.complete);
ctx              1616 drivers/usb/misc/usbtest.c 	retval = ctx.status;
ctx              1620 drivers/usb/misc/usbtest.c 		usb_free_urb(ctx.urbs[i]);
ctx              1621 drivers/usb/misc/usbtest.c 	kfree(ctx.urbs);
ctx              1928 drivers/usb/misc/usbtest.c 	struct transfer_context	*ctx = urb->context;
ctx              1931 drivers/usb/misc/usbtest.c 	spin_lock_irqsave(&ctx->lock, flags);
ctx              1932 drivers/usb/misc/usbtest.c 	ctx->count--;
ctx              1934 drivers/usb/misc/usbtest.c 	ctx->packet_count += urb->number_of_packets;
ctx              1936 drivers/usb/misc/usbtest.c 		ctx->errors += urb->error_count;
ctx              1938 drivers/usb/misc/usbtest.c 		ctx->errors += (ctx->is_iso ? urb->number_of_packets : 1);
ctx              1940 drivers/usb/misc/usbtest.c 		ctx->errors++;
ctx              1941 drivers/usb/misc/usbtest.c 	else if (check_guard_bytes(ctx->dev, urb) != 0)
ctx              1942 drivers/usb/misc/usbtest.c 		ctx->errors++;
ctx              1944 drivers/usb/misc/usbtest.c 	if (urb->status == 0 && ctx->count > (ctx->pending - 1)
ctx              1945 drivers/usb/misc/usbtest.c 			&& !ctx->submit_error) {
ctx              1951 drivers/usb/misc/usbtest.c 			dev_err(&ctx->dev->intf->dev,
ctx              1957 drivers/usb/misc/usbtest.c 			ctx->submit_error = 1;
ctx              1962 drivers/usb/misc/usbtest.c 	ctx->pending--;
ctx              1963 drivers/usb/misc/usbtest.c 	if (ctx->pending == 0) {
ctx              1964 drivers/usb/misc/usbtest.c 		if (ctx->errors)
ctx              1965 drivers/usb/misc/usbtest.c 			dev_err(&ctx->dev->intf->dev,
ctx              1967 drivers/usb/misc/usbtest.c 				ctx->errors, ctx->packet_count);
ctx              1968 drivers/usb/misc/usbtest.c 		complete(&ctx->done);
ctx              1971 drivers/usb/misc/usbtest.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx              1725 drivers/usb/serial/keyspan.c 				      int dir, void *ctx, char *buf, int len,
ctx              1755 drivers/usb/serial/keyspan.c 				 buf, len, callback, ctx,
ctx              1761 drivers/usb/serial/keyspan.c 				  buf, len, callback, ctx);
ctx               720 drivers/usb/serial/sierra.c 					int dir, void *ctx, int len,
ctx               736 drivers/usb/serial/sierra.c 			buf, len, callback, ctx);
ctx               464 drivers/usb/serial/usb_wwan.c 				      int dir, void *ctx, char *buf, int len,
ctx               477 drivers/usb/serial/usb_wwan.c 			  buf, len, callback, ctx);
ctx                33 drivers/vfio/pci/vfio_pci_intrs.c 		eventfd_signal(vdev->ctx[0].trigger, 1);
ctx                52 drivers/vfio/pci/vfio_pci_intrs.c 	} else if (!vdev->ctx[0].masked) {
ctx                62 drivers/vfio/pci/vfio_pci_intrs.c 		vdev->ctx[0].masked = true;
ctx                90 drivers/vfio/pci/vfio_pci_intrs.c 	} else if (vdev->ctx[0].masked && !vdev->virq_disabled) {
ctx               102 drivers/vfio/pci/vfio_pci_intrs.c 		vdev->ctx[0].masked = (ret > 0);
ctx               126 drivers/vfio/pci/vfio_pci_intrs.c 		vdev->ctx[0].masked = true;
ctx               128 drivers/vfio/pci/vfio_pci_intrs.c 	} else if (!vdev->ctx[0].masked &&  /* may be shared */
ctx               130 drivers/vfio/pci/vfio_pci_intrs.c 		vdev->ctx[0].masked = true;
ctx               150 drivers/vfio/pci/vfio_pci_intrs.c 	vdev->ctx = kzalloc(sizeof(struct vfio_pci_irq_ctx), GFP_KERNEL);
ctx               151 drivers/vfio/pci/vfio_pci_intrs.c 	if (!vdev->ctx)
ctx               162 drivers/vfio/pci/vfio_pci_intrs.c 	vdev->ctx[0].masked = vdev->virq_disabled;
ctx               164 drivers/vfio/pci/vfio_pci_intrs.c 		pci_intx(vdev->pdev, !vdev->ctx[0].masked);
ctx               179 drivers/vfio/pci/vfio_pci_intrs.c 	if (vdev->ctx[0].trigger) {
ctx               181 drivers/vfio/pci/vfio_pci_intrs.c 		kfree(vdev->ctx[0].name);
ctx               182 drivers/vfio/pci/vfio_pci_intrs.c 		eventfd_ctx_put(vdev->ctx[0].trigger);
ctx               183 drivers/vfio/pci/vfio_pci_intrs.c 		vdev->ctx[0].trigger = NULL;
ctx               189 drivers/vfio/pci/vfio_pci_intrs.c 	vdev->ctx[0].name = kasprintf(GFP_KERNEL, "vfio-intx(%s)",
ctx               191 drivers/vfio/pci/vfio_pci_intrs.c 	if (!vdev->ctx[0].name)
ctx               196 drivers/vfio/pci/vfio_pci_intrs.c 		kfree(vdev->ctx[0].name);
ctx               200 drivers/vfio/pci/vfio_pci_intrs.c 	vdev->ctx[0].trigger = trigger;
ctx               206 drivers/vfio/pci/vfio_pci_intrs.c 			  irqflags, vdev->ctx[0].name, vdev);
ctx               208 drivers/vfio/pci/vfio_pci_intrs.c 		vdev->ctx[0].trigger = NULL;
ctx               209 drivers/vfio/pci/vfio_pci_intrs.c 		kfree(vdev->ctx[0].name);
ctx               219 drivers/vfio/pci/vfio_pci_intrs.c 	if (!vdev->pci_2_3 && vdev->ctx[0].masked)
ctx               228 drivers/vfio/pci/vfio_pci_intrs.c 	vfio_virqfd_disable(&vdev->ctx[0].unmask);
ctx               229 drivers/vfio/pci/vfio_pci_intrs.c 	vfio_virqfd_disable(&vdev->ctx[0].mask);
ctx               233 drivers/vfio/pci/vfio_pci_intrs.c 	kfree(vdev->ctx);
ctx               256 drivers/vfio/pci/vfio_pci_intrs.c 	vdev->ctx = kcalloc(nvec, sizeof(struct vfio_pci_irq_ctx), GFP_KERNEL);
ctx               257 drivers/vfio/pci/vfio_pci_intrs.c 	if (!vdev->ctx)
ctx               265 drivers/vfio/pci/vfio_pci_intrs.c 		kfree(vdev->ctx);
ctx               296 drivers/vfio/pci/vfio_pci_intrs.c 	if (vdev->ctx[vector].trigger) {
ctx               297 drivers/vfio/pci/vfio_pci_intrs.c 		irq_bypass_unregister_producer(&vdev->ctx[vector].producer);
ctx               298 drivers/vfio/pci/vfio_pci_intrs.c 		free_irq(irq, vdev->ctx[vector].trigger);
ctx               299 drivers/vfio/pci/vfio_pci_intrs.c 		kfree(vdev->ctx[vector].name);
ctx               300 drivers/vfio/pci/vfio_pci_intrs.c 		eventfd_ctx_put(vdev->ctx[vector].trigger);
ctx               301 drivers/vfio/pci/vfio_pci_intrs.c 		vdev->ctx[vector].trigger = NULL;
ctx               307 drivers/vfio/pci/vfio_pci_intrs.c 	vdev->ctx[vector].name = kasprintf(GFP_KERNEL, "vfio-msi%s[%d](%s)",
ctx               310 drivers/vfio/pci/vfio_pci_intrs.c 	if (!vdev->ctx[vector].name)
ctx               315 drivers/vfio/pci/vfio_pci_intrs.c 		kfree(vdev->ctx[vector].name);
ctx               334 drivers/vfio/pci/vfio_pci_intrs.c 			  vdev->ctx[vector].name, trigger);
ctx               336 drivers/vfio/pci/vfio_pci_intrs.c 		kfree(vdev->ctx[vector].name);
ctx               341 drivers/vfio/pci/vfio_pci_intrs.c 	vdev->ctx[vector].producer.token = trigger;
ctx               342 drivers/vfio/pci/vfio_pci_intrs.c 	vdev->ctx[vector].producer.irq = irq;
ctx               343 drivers/vfio/pci/vfio_pci_intrs.c 	ret = irq_bypass_register_producer(&vdev->ctx[vector].producer);
ctx               347 drivers/vfio/pci/vfio_pci_intrs.c 		vdev->ctx[vector].producer.token, ret);
ctx               349 drivers/vfio/pci/vfio_pci_intrs.c 	vdev->ctx[vector].trigger = trigger;
ctx               381 drivers/vfio/pci/vfio_pci_intrs.c 		vfio_virqfd_disable(&vdev->ctx[i].unmask);
ctx               382 drivers/vfio/pci/vfio_pci_intrs.c 		vfio_virqfd_disable(&vdev->ctx[i].mask);
ctx               398 drivers/vfio/pci/vfio_pci_intrs.c 	kfree(vdev->ctx);
ctx               423 drivers/vfio/pci/vfio_pci_intrs.c 						  &vdev->ctx[0].unmask, fd);
ctx               425 drivers/vfio/pci/vfio_pci_intrs.c 		vfio_virqfd_disable(&vdev->ctx[0].unmask);
ctx               532 drivers/vfio/pci/vfio_pci_intrs.c 		if (!vdev->ctx[i].trigger)
ctx               535 drivers/vfio/pci/vfio_pci_intrs.c 			eventfd_signal(vdev->ctx[i].trigger, 1);
ctx               539 drivers/vfio/pci/vfio_pci_intrs.c 				eventfd_signal(vdev->ctx[i].trigger, 1);
ctx               545 drivers/vfio/pci/vfio_pci_intrs.c static int vfio_pci_set_ctx_trigger_single(struct eventfd_ctx **ctx,
ctx               551 drivers/vfio/pci/vfio_pci_intrs.c 		if (*ctx) {
ctx               553 drivers/vfio/pci/vfio_pci_intrs.c 				eventfd_signal(*ctx, 1);
ctx               555 drivers/vfio/pci/vfio_pci_intrs.c 				eventfd_ctx_put(*ctx);
ctx               556 drivers/vfio/pci/vfio_pci_intrs.c 				*ctx = NULL;
ctx               567 drivers/vfio/pci/vfio_pci_intrs.c 		if (trigger && *ctx)
ctx               568 drivers/vfio/pci/vfio_pci_intrs.c 			eventfd_signal(*ctx, 1);
ctx               579 drivers/vfio/pci/vfio_pci_intrs.c 			if (*ctx)
ctx               580 drivers/vfio/pci/vfio_pci_intrs.c 				eventfd_ctx_put(*ctx);
ctx               581 drivers/vfio/pci/vfio_pci_intrs.c 			*ctx = NULL;
ctx               589 drivers/vfio/pci/vfio_pci_intrs.c 			if (*ctx)
ctx               590 drivers/vfio/pci/vfio_pci_intrs.c 				eventfd_ctx_put(*ctx);
ctx               592 drivers/vfio/pci/vfio_pci_intrs.c 			*ctx = efdctx;
ctx                96 drivers/vfio/pci/vfio_pci_private.h 	struct vfio_pci_irq_ctx	*ctx;
ctx               110 drivers/vfio/virqfd.c 	struct eventfd_ctx *ctx;
ctx               134 drivers/vfio/virqfd.c 	ctx = eventfd_ctx_fileget(irqfd.file);
ctx               135 drivers/vfio/virqfd.c 	if (IS_ERR(ctx)) {
ctx               136 drivers/vfio/virqfd.c 		ret = PTR_ERR(ctx);
ctx               140 drivers/vfio/virqfd.c 	virqfd->eventfd = ctx;
ctx               185 drivers/vfio/virqfd.c 	eventfd_ctx_put(ctx);
ctx               386 drivers/vhost/net.c 	struct vhost_net_ubuf_ref *ubufs = ubuf->ctx;
ctx               901 drivers/vhost/net.c 			ubuf->ctx = nvq->ubufs;
ctx              1589 drivers/vhost/vhost.c 	struct eventfd_ctx *ctx = NULL;
ctx              1660 drivers/vhost/vhost.c 		ctx = f.fd == -1 ? NULL : eventfd_ctx_fdget(f.fd);
ctx              1661 drivers/vhost/vhost.c 		if (IS_ERR(ctx)) {
ctx              1662 drivers/vhost/vhost.c 			r = PTR_ERR(ctx);
ctx              1665 drivers/vhost/vhost.c 		swap(ctx, vq->call_ctx);
ctx              1672 drivers/vhost/vhost.c 		ctx = f.fd == -1 ? NULL : eventfd_ctx_fdget(f.fd);
ctx              1673 drivers/vhost/vhost.c 		if (IS_ERR(ctx)) {
ctx              1674 drivers/vhost/vhost.c 			r = PTR_ERR(ctx);
ctx              1677 drivers/vhost/vhost.c 		swap(ctx, vq->error_ctx);
ctx              1705 drivers/vhost/vhost.c 	if (!IS_ERR_OR_NULL(ctx))
ctx              1706 drivers/vhost/vhost.c 		eventfd_ctx_put(ctx);
ctx              1751 drivers/vhost/vhost.c 	struct eventfd_ctx *ctx;
ctx              1797 drivers/vhost/vhost.c 		ctx = fd == -1 ? NULL : eventfd_ctx_fdget(fd);
ctx              1798 drivers/vhost/vhost.c 		if (IS_ERR(ctx)) {
ctx              1799 drivers/vhost/vhost.c 			r = PTR_ERR(ctx);
ctx              1802 drivers/vhost/vhost.c 		swap(ctx, d->log_ctx);
ctx              1808 drivers/vhost/vhost.c 		if (ctx)
ctx              1809 drivers/vhost/vhost.c 			eventfd_ctx_put(ctx);
ctx               377 drivers/video/fbdev/hyperv_fb.c static void synthvid_receive(void *ctx)
ctx               379 drivers/video/fbdev/hyperv_fb.c 	struct hv_device *hdev = ctx;
ctx               116 drivers/video/fbdev/omap2/omapfb/dss/dispc.c 	u32		ctx[DISPC_SZ_REGS / sizeof(u32)];
ctx               283 drivers/video/fbdev/omap2/omapfb/dss/dispc.c 	dispc.ctx[DISPC_##reg / sizeof(u32)] = dispc_read_reg(DISPC_##reg)
ctx               285 drivers/video/fbdev/omap2/omapfb/dss/dispc.c 	dispc_write_reg(DISPC_##reg, dispc.ctx[DISPC_##reg / sizeof(u32)])
ctx               148 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	struct dpi_clk_calc_ctx *ctx = data;
ctx               155 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	if (ctx->pck_min >= 100000000) {
ctx               163 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->dispc_cinfo.lck_div = lckd;
ctx               164 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->dispc_cinfo.pck_div = pckd;
ctx               165 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->dispc_cinfo.lck = lck;
ctx               166 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->dispc_cinfo.pck = pck;
ctx               175 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	struct dpi_clk_calc_ctx *ctx = data;
ctx               182 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	if (m_dispc > 1 && m_dispc % 2 != 0 && ctx->pck_min >= 100000000)
ctx               185 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->dsi_cinfo.mX[HSDIV_DISPC] = m_dispc;
ctx               186 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->dsi_cinfo.clkout[HSDIV_DISPC] = dispc;
ctx               188 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	return dispc_div_calc(dispc, ctx->pck_min, ctx->pck_max,
ctx               189 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 			dpi_calc_dispc_cb, ctx);
ctx               197 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	struct dpi_clk_calc_ctx *ctx = data;
ctx               199 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->dsi_cinfo.n = n;
ctx               200 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->dsi_cinfo.m = m;
ctx               201 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->dsi_cinfo.fint = fint;
ctx               202 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->dsi_cinfo.clkdco = clkdco;
ctx               204 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	return dss_pll_hsdiv_calc(ctx->pll, clkdco,
ctx               205 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 		ctx->pck_min, dss_feat_get_param_max(FEAT_PARAM_DSS_FCK),
ctx               206 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 		dpi_calc_hsdiv_cb, ctx);
ctx               211 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	struct dpi_clk_calc_ctx *ctx = data;
ctx               213 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->fck = fck;
ctx               215 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	return dispc_div_calc(fck, ctx->pck_min, ctx->pck_max,
ctx               216 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 			dpi_calc_dispc_cb, ctx);
ctx               220 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 		struct dpi_clk_calc_ctx *ctx)
ctx               225 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	memset(ctx, 0, sizeof(*ctx));
ctx               226 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->pll = dpi->pll;
ctx               227 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->pck_min = pck - 1000;
ctx               228 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ctx->pck_max = pck + 1000;
ctx               233 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	clkin = clk_get_rate(ctx->pll->clkin);
ctx               235 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	return dss_pll_calc(ctx->pll, clkin,
ctx               237 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 			dpi_calc_pll_cb, ctx);
ctx               240 drivers/video/fbdev/omap2/omapfb/dss/dpi.c static bool dpi_dss_clk_calc(unsigned long pck, struct dpi_clk_calc_ctx *ctx)
ctx               254 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 		memset(ctx, 0, sizeof(*ctx));
ctx               256 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 			ctx->pck_min = max(pck - 1000 * i * i * i, 0lu);
ctx               258 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 			ctx->pck_min = 0;
ctx               259 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 		ctx->pck_max = pck + 1000 * i * i * i;
ctx               261 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 		ok = dss_div_calc(pck, ctx->pck_min, dpi_calc_dss_cb, ctx);
ctx               275 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	struct dpi_clk_calc_ctx ctx;
ctx               279 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ok = dpi_dsi_clk_calc(dpi, pck_req, &ctx);
ctx               283 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	r = dss_pll_set_config(dpi->pll, &ctx.dsi_cinfo);
ctx               290 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	dpi->mgr_config.clock_info = ctx.dispc_cinfo;
ctx               292 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	*fck = ctx.dsi_cinfo.clkout[HSDIV_DISPC];
ctx               293 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	*lck_div = ctx.dispc_cinfo.lck_div;
ctx               294 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	*pck_div = ctx.dispc_cinfo.pck_div;
ctx               302 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	struct dpi_clk_calc_ctx ctx;
ctx               306 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	ok = dpi_dss_clk_calc(pck_req, &ctx);
ctx               310 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	r = dss_set_fck_rate(ctx.fck);
ctx               314 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	dpi->mgr_config.clock_info = ctx.dispc_cinfo;
ctx               316 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	*fck = ctx.fck;
ctx               317 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	*lck_div = ctx.dispc_cinfo.lck_div;
ctx               318 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	*pck_div = ctx.dispc_cinfo.pck_div;
ctx               502 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	struct dpi_clk_calc_ctx ctx;
ctx               512 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 		ok = dpi_dsi_clk_calc(dpi, timings->pixelclock, &ctx);
ctx               516 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 		fck = ctx.dsi_cinfo.clkout[HSDIV_DISPC];
ctx               518 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 		ok = dpi_dss_clk_calc(timings->pixelclock, &ctx);
ctx               522 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 		fck = ctx.fck;
ctx               525 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	lck_div = ctx.dispc_cinfo.lck_div;
ctx               526 drivers/video/fbdev/omap2/omapfb/dss/dpi.c 	pck_div = ctx.dispc_cinfo.pck_div;
ctx              4404 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	struct dsi_clk_calc_ctx *ctx = data;
ctx              4405 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	struct omap_video_timings *t = &ctx->dispc_vm;
ctx              4407 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dispc_cinfo.lck_div = lckd;
ctx              4408 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dispc_cinfo.pck_div = pckd;
ctx              4409 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dispc_cinfo.lck = lck;
ctx              4410 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dispc_cinfo.pck = pck;
ctx              4412 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	*t = *ctx->config->timings;
ctx              4414 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	t->x_res = ctx->config->timings->x_res;
ctx              4415 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	t->y_res = ctx->config->timings->y_res;
ctx              4425 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	struct dsi_clk_calc_ctx *ctx = data;
ctx              4427 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsi_cinfo.mX[HSDIV_DISPC] = m_dispc;
ctx              4428 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsi_cinfo.clkout[HSDIV_DISPC] = dispc;
ctx              4430 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	return dispc_div_calc(dispc, ctx->req_pck_min, ctx->req_pck_max,
ctx              4431 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 			dsi_cm_calc_dispc_cb, ctx);
ctx              4437 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	struct dsi_clk_calc_ctx *ctx = data;
ctx              4439 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsi_cinfo.n = n;
ctx              4440 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsi_cinfo.m = m;
ctx              4441 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsi_cinfo.fint = fint;
ctx              4442 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsi_cinfo.clkdco = clkdco;
ctx              4444 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	return dss_pll_hsdiv_calc(ctx->pll, clkdco, ctx->req_pck_min,
ctx              4446 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 			dsi_cm_calc_hsdiv_cb, ctx);
ctx              4451 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		struct dsi_clk_calc_ctx *ctx)
ctx              4472 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	memset(ctx, 0, sizeof(*ctx));
ctx              4473 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsidev = dsi->pdev;
ctx              4474 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->pll = &dsi->pll;
ctx              4475 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->config = cfg;
ctx              4476 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->req_pck_min = pck;
ctx              4477 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->req_pck_nom = pck;
ctx              4478 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->req_pck_max = pck * 3 / 2;
ctx              4483 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	return dss_pll_calc(ctx->pll, clkin,
ctx              4485 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 			dsi_cm_calc_pll_cb, ctx);
ctx              4488 drivers/video/fbdev/omap2/omapfb/dss/dsi.c static bool dsi_vm_calc_blanking(struct dsi_clk_calc_ctx *ctx)
ctx              4490 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	struct dsi_data *dsi = dsi_get_dsidrv_data(ctx->dsidev);
ctx              4491 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	const struct omap_dss_dsi_config *cfg = ctx->config;
ctx              4494 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	unsigned long hsclk = ctx->dsi_cinfo.clkdco / 4;
ctx              4511 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	req_pck_min = ctx->req_pck_min;
ctx              4512 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	req_pck_max = ctx->req_pck_max;
ctx              4513 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	req_pck_nom = ctx->req_pck_nom;
ctx              4515 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	dispc_pck = ctx->dispc_cinfo.pck;
ctx              4579 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	dsi_vm = &ctx->dsi_vm;
ctx              4643 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	dispc_vm = &ctx->dispc_vm;
ctx              4689 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	struct dsi_clk_calc_ctx *ctx = data;
ctx              4691 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dispc_cinfo.lck_div = lckd;
ctx              4692 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dispc_cinfo.pck_div = pckd;
ctx              4693 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dispc_cinfo.lck = lck;
ctx              4694 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dispc_cinfo.pck = pck;
ctx              4696 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	if (dsi_vm_calc_blanking(ctx) == false)
ctx              4700 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	print_dispc_vm("dispc", &ctx->dispc_vm);
ctx              4701 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	print_dsi_vm("dsi  ", &ctx->dsi_vm);
ctx              4702 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	print_dispc_vm("req  ", ctx->config->timings);
ctx              4703 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	print_dsi_dispc_vm("act  ", &ctx->dsi_vm);
ctx              4712 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	struct dsi_clk_calc_ctx *ctx = data;
ctx              4715 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsi_cinfo.mX[HSDIV_DISPC] = m_dispc;
ctx              4716 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsi_cinfo.clkout[HSDIV_DISPC] = dispc;
ctx              4723 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	if (ctx->config->trans_mode == OMAP_DSS_DSI_BURST_MODE)
ctx              4724 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		pck_max = ctx->req_pck_max + 10000000;
ctx              4726 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		pck_max = ctx->req_pck_max;
ctx              4728 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	return dispc_div_calc(dispc, ctx->req_pck_min, pck_max,
ctx              4729 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 			dsi_vm_calc_dispc_cb, ctx);
ctx              4735 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	struct dsi_clk_calc_ctx *ctx = data;
ctx              4737 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsi_cinfo.n = n;
ctx              4738 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsi_cinfo.m = m;
ctx              4739 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsi_cinfo.fint = fint;
ctx              4740 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsi_cinfo.clkdco = clkdco;
ctx              4742 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	return dss_pll_hsdiv_calc(ctx->pll, clkdco, ctx->req_pck_min,
ctx              4744 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 			dsi_vm_calc_hsdiv_cb, ctx);
ctx              4749 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		struct dsi_clk_calc_ctx *ctx)
ctx              4761 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	memset(ctx, 0, sizeof(*ctx));
ctx              4762 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->dsidev = dsi->pdev;
ctx              4763 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->pll = &dsi->pll;
ctx              4764 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->config = cfg;
ctx              4767 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->req_pck_min = t->pixelclock - 1000;
ctx              4768 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->req_pck_nom = t->pixelclock;
ctx              4769 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	ctx->req_pck_max = t->pixelclock + 1000;
ctx              4771 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	byteclk_min = div64_u64((u64)ctx->req_pck_min * bitspp, ndl * 8);
ctx              4778 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		byteclk_max = div64_u64((u64)ctx->req_pck_max * bitspp,
ctx              4784 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	return dss_pll_calc(ctx->pll, clkin,
ctx              4786 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 			dsi_vm_calc_pll_cb, ctx);
ctx              4794 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	struct dsi_clk_calc_ctx ctx;
ctx              4804 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		ok = dsi_vm_calc(dsi, config, &ctx);
ctx              4806 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		ok = dsi_cm_calc(dsi, config, &ctx);
ctx              4814 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	dsi_pll_calc_dsi_fck(&ctx.dsi_cinfo);
ctx              4816 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	r = dsi_lp_clock_calc(ctx.dsi_cinfo.clkout[HSDIV_DSI],
ctx              4823 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	dsi->user_dsi_cinfo = ctx.dsi_cinfo;
ctx              4824 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	dsi->user_dispc_cinfo = ctx.dispc_cinfo;
ctx              4826 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	dsi->timings = ctx.dispc_vm;
ctx              4827 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 	dsi->vm_timings = ctx.dsi_vm;
ctx                89 drivers/video/fbdev/omap2/omapfb/dss/dss.c 	u32		ctx[DSS_SZ_REGS / sizeof(u32)];
ctx               124 drivers/video/fbdev/omap2/omapfb/dss/dss.c 	dss.ctx[(DSS_##reg).idx / sizeof(u32)] = dss_read_reg(DSS_##reg)
ctx               126 drivers/video/fbdev/omap2/omapfb/dss/dss.c 	dss_write_reg(DSS_##reg, dss.ctx[(DSS_##reg).idx / sizeof(u32)])
ctx                49 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 	struct sdi_clk_calc_ctx *ctx = data;
ctx                51 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 	ctx->dispc_cinfo.lck_div = lckd;
ctx                52 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 	ctx->dispc_cinfo.pck_div = pckd;
ctx                53 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 	ctx->dispc_cinfo.lck = lck;
ctx                54 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 	ctx->dispc_cinfo.pck = pck;
ctx                61 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 	struct sdi_clk_calc_ctx *ctx = data;
ctx                63 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 	ctx->fck = fck;
ctx                65 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 	return dispc_div_calc(fck, ctx->pck_min, ctx->pck_max,
ctx                66 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 			dpi_calc_dispc_cb, ctx);
ctx                74 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 	struct sdi_clk_calc_ctx ctx;
ctx                86 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 		memset(&ctx, 0, sizeof(ctx));
ctx                88 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 			ctx.pck_min = max(pclk - 1000 * i * i * i, 0lu);
ctx                90 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 			ctx.pck_min = 0;
ctx                91 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 		ctx.pck_max = pclk + 1000 * i * i * i;
ctx                93 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 		ok = dss_div_calc(pclk, ctx.pck_min, dpi_calc_dss_cb, &ctx);
ctx                95 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 			*fck = ctx.fck;
ctx                96 drivers/video/fbdev/omap2/omapfb/dss/sdi.c 			*dispc_cinfo = ctx.dispc_cinfo;
ctx                71 drivers/video/fbdev/omap2/omapfb/vrfb.c static void omap2_sms_write_rot_control(u32 val, unsigned ctx)
ctx                73 drivers/video/fbdev/omap2/omapfb/vrfb.c 	__raw_writel(val, vrfb_base + SMS_ROT_CONTROL(ctx));
ctx                76 drivers/video/fbdev/omap2/omapfb/vrfb.c static void omap2_sms_write_rot_size(u32 val, unsigned ctx)
ctx                78 drivers/video/fbdev/omap2/omapfb/vrfb.c 	__raw_writel(val, vrfb_base + SMS_ROT_SIZE(ctx));
ctx                81 drivers/video/fbdev/omap2/omapfb/vrfb.c static void omap2_sms_write_rot_physical_ba(u32 val, unsigned ctx)
ctx                83 drivers/video/fbdev/omap2/omapfb/vrfb.c 	__raw_writel(val, vrfb_base + SMS_ROT_PHYSICAL_BA(ctx));
ctx                86 drivers/video/fbdev/omap2/omapfb/vrfb.c static inline void restore_hw_context(int ctx)
ctx                88 drivers/video/fbdev/omap2/omapfb/vrfb.c 	omap2_sms_write_rot_control(ctxs[ctx].control, ctx);
ctx                89 drivers/video/fbdev/omap2/omapfb/vrfb.c 	omap2_sms_write_rot_size(ctxs[ctx].size, ctx);
ctx                90 drivers/video/fbdev/omap2/omapfb/vrfb.c 	omap2_sms_write_rot_physical_ba(ctxs[ctx].physical_ba, ctx);
ctx               176 drivers/video/fbdev/omap2/omapfb/vrfb.c 	u8 ctx = vrfb->context;
ctx               180 drivers/video/fbdev/omap2/omapfb/vrfb.c 	DBG("omapfb_set_vrfb(%d, %lx, %dx%d, %d, %d)\n", ctx, paddr,
ctx               211 drivers/video/fbdev/omap2/omapfb/vrfb.c 	ctxs[ctx].physical_ba = paddr;
ctx               212 drivers/video/fbdev/omap2/omapfb/vrfb.c 	ctxs[ctx].size = size;
ctx               213 drivers/video/fbdev/omap2/omapfb/vrfb.c 	ctxs[ctx].control = control;
ctx               215 drivers/video/fbdev/omap2/omapfb/vrfb.c 	omap2_sms_write_rot_physical_ba(paddr, ctx);
ctx               216 drivers/video/fbdev/omap2/omapfb/vrfb.c 	omap2_sms_write_rot_size(size, ctx);
ctx               217 drivers/video/fbdev/omap2/omapfb/vrfb.c 	omap2_sms_write_rot_control(control, ctx);
ctx               252 drivers/video/fbdev/omap2/omapfb/vrfb.c 	int ctx = vrfb->context;
ctx               254 drivers/video/fbdev/omap2/omapfb/vrfb.c 	if (ctx == 0xff)
ctx               257 drivers/video/fbdev/omap2/omapfb/vrfb.c 	DBG("release ctx %d\n", ctx);
ctx               261 drivers/video/fbdev/omap2/omapfb/vrfb.c 	BUG_ON(!(ctx_map & (1 << ctx)));
ctx               263 drivers/video/fbdev/omap2/omapfb/vrfb.c 	clear_bit(ctx, &ctx_map);
ctx               282 drivers/video/fbdev/omap2/omapfb/vrfb.c 	u8 ctx;
ctx               289 drivers/video/fbdev/omap2/omapfb/vrfb.c 	for (ctx = 0; ctx < num_ctxs; ++ctx)
ctx               290 drivers/video/fbdev/omap2/omapfb/vrfb.c 		if ((ctx_map & (1 << ctx)) == 0)
ctx               293 drivers/video/fbdev/omap2/omapfb/vrfb.c 	if (ctx == num_ctxs) {
ctx               299 drivers/video/fbdev/omap2/omapfb/vrfb.c 	DBG("found free ctx %d\n", ctx);
ctx               301 drivers/video/fbdev/omap2/omapfb/vrfb.c 	set_bit(ctx, &ctx_map);
ctx               305 drivers/video/fbdev/omap2/omapfb/vrfb.c 	vrfb->context = ctx;
ctx               308 drivers/video/fbdev/omap2/omapfb/vrfb.c 		paddr = ctxs[ctx].base + SMS_ROT_VIRT_BASE(rot);
ctx               312 drivers/video/fbdev/omap2/omapfb/vrfb.c 					ctx, rot * 90);
ctx               320 drivers/video/fbdev/omap2/omapfb/vrfb.c 		DBG("VRFB %d/%d: %lx\n", ctx, rot*90, vrfb->paddr[rot]);
ctx               240 drivers/video/fbdev/pxa3xx-gcu.c pxa3xx_gcu_handle_irq(int irq, void *ctx)
ctx               242 drivers/video/fbdev/pxa3xx-gcu.c 	struct pxa3xx_gcu_priv *priv = ctx;
ctx              1365 drivers/video/hdmi.c hdmi_audio_coding_type_ext_get_name(enum hdmi_audio_coding_type_ext ctx)
ctx              1367 drivers/video/hdmi.c 	if (ctx < 0 || ctx > 0x1f)
ctx              1370 drivers/video/hdmi.c 	switch (ctx) {
ctx               350 drivers/virtio/virtio_mmio.c 				  const char *name, bool ctx)
ctx               387 drivers/virtio/virtio_mmio.c 				 true, true, ctx, vm_notify, callback, name);
ctx               462 drivers/virtio/virtio_mmio.c 		       const bool *ctx,
ctx               486 drivers/virtio/virtio_mmio.c 				     ctx ? ctx[i] : false);
ctx               177 drivers/virtio/virtio_pci_common.c 				     bool ctx,
ctx               189 drivers/virtio/virtio_pci_common.c 	vq = vp_dev->setup_vq(vp_dev, info, index, callback, name, ctx,
ctx               282 drivers/virtio/virtio_pci_common.c 		const bool *ctx,
ctx               324 drivers/virtio/virtio_pci_common.c 				     ctx ? ctx[i] : false,
ctx               355 drivers/virtio/virtio_pci_common.c 		const char * const names[], const bool *ctx)
ctx               377 drivers/virtio/virtio_pci_common.c 				     ctx ? ctx[i] : false,
ctx               394 drivers/virtio/virtio_pci_common.c 		const char * const names[], const bool *ctx,
ctx               400 drivers/virtio/virtio_pci_common.c 	err = vp_find_vqs_msix(vdev, nvqs, vqs, callbacks, names, true, ctx, desc);
ctx               404 drivers/virtio/virtio_pci_common.c 	err = vp_find_vqs_msix(vdev, nvqs, vqs, callbacks, names, false, ctx, desc);
ctx               408 drivers/virtio/virtio_pci_common.c 	return vp_find_vqs_intx(vdev, nvqs, vqs, callbacks, names, ctx);
ctx               102 drivers/virtio/virtio_pci_common.h 				      bool ctx,
ctx               132 drivers/virtio/virtio_pci_common.h 		const char * const names[], const bool *ctx,
ctx               118 drivers/virtio/virtio_pci_legacy.c 				  bool ctx,
ctx               139 drivers/virtio/virtio_pci_legacy.c 				    true, false, ctx,
ctx               311 drivers/virtio/virtio_pci_modern.c 				  bool ctx,
ctx               343 drivers/virtio/virtio_pci_modern.c 				    true, true, ctx,
ctx               405 drivers/virtio/virtio_pci_modern.c 			      const char * const names[], const bool *ctx,
ctx               410 drivers/virtio/virtio_pci_modern.c 	int rc = vp_find_vqs(vdev, nvqs, vqs, callbacks, names, ctx, desc);
ctx               421 drivers/virtio/virtio_ring.c 				      void *ctx,
ctx               434 drivers/virtio/virtio_ring.c 	BUG_ON(ctx && vq->indirect);
ctx               542 drivers/virtio/virtio_ring.c 		vq->split.desc_state[head].indir_desc = ctx;
ctx               621 drivers/virtio/virtio_ring.c 			     void **ctx)
ctx               667 drivers/virtio/virtio_ring.c 	} else if (ctx) {
ctx               668 drivers/virtio/virtio_ring.c 		*ctx = vq->split.desc_state[head].indir_desc;
ctx               680 drivers/virtio/virtio_ring.c 					 void **ctx)
ctx               720 drivers/virtio/virtio_ring.c 	detach_buf_split(vq, i, ctx);
ctx              1097 drivers/virtio/virtio_ring.c 				       void *ctx,
ctx              1110 drivers/virtio/virtio_ring.c 	BUG_ON(ctx && vq->indirect);
ctx              1196 drivers/virtio/virtio_ring.c 	vq->packed.desc_state[id].indir_desc = ctx;
ctx              1282 drivers/virtio/virtio_ring.c 			      unsigned int id, void **ctx)
ctx              1322 drivers/virtio/virtio_ring.c 	} else if (ctx) {
ctx              1323 drivers/virtio/virtio_ring.c 		*ctx = state->indir_desc;
ctx              1348 drivers/virtio/virtio_ring.c 					  void **ctx)
ctx              1385 drivers/virtio/virtio_ring.c 	detach_buf_packed(vq, id, ctx);
ctx              1699 drivers/virtio/virtio_ring.c 				void *ctx,
ctx              1705 drivers/virtio/virtio_ring.c 					out_sgs, in_sgs, data, ctx, gfp) :
ctx              1707 drivers/virtio/virtio_ring.c 					out_sgs, in_sgs, data, ctx, gfp);
ctx              1806 drivers/virtio/virtio_ring.c 			void *ctx,
ctx              1809 drivers/virtio/virtio_ring.c 	return virtqueue_add(vq, &sg, num, 0, 1, data, ctx, gfp);
ctx              1895 drivers/virtio/virtio_ring.c 			    void **ctx)
ctx              1899 drivers/virtio/virtio_ring.c 	return vq->packed_ring ? virtqueue_get_buf_ctx_packed(_vq, len, ctx) :
ctx              1900 drivers/virtio/virtio_ring.c 				 virtqueue_get_buf_ctx_split(_vq, len, ctx);
ctx               557 drivers/visorbus/visorchipset.c static const guid_t *parser_id_get(struct parser_context *ctx)
ctx               559 drivers/visorbus/visorchipset.c 	return &ctx->data.id;
ctx               579 drivers/visorbus/visorchipset.c static void *parser_name_get(struct parser_context *ctx)
ctx               583 drivers/visorbus/visorchipset.c 	phdr = &ctx->data;
ctx               585 drivers/visorbus/visorchipset.c 	    (unsigned long)phdr->name_length > ctx->param_bytes)
ctx               587 drivers/visorbus/visorchipset.c 	ctx->curr = (char *)&phdr + phdr->name_offset;
ctx               588 drivers/visorbus/visorchipset.c 	ctx->bytes_remaining = phdr->name_length;
ctx               589 drivers/visorbus/visorchipset.c 	return parser_string_get(ctx->curr, phdr->name_length);
ctx              1306 drivers/visorbus/visorchipset.c static void parser_done(struct parser_context *ctx)
ctx              1308 drivers/visorbus/visorchipset.c 	chipset_dev->controlvm_payload_bytes_buffered -= ctx->param_bytes;
ctx              1309 drivers/visorbus/visorchipset.c 	kfree(ctx);
ctx              1316 drivers/visorbus/visorchipset.c 	struct parser_context *ctx;
ctx              1328 drivers/visorbus/visorchipset.c 	ctx = kzalloc(allocbytes, GFP_KERNEL);
ctx              1329 drivers/visorbus/visorchipset.c 	if (!ctx) {
ctx              1333 drivers/visorbus/visorchipset.c 	ctx->allocbytes = allocbytes;
ctx              1334 drivers/visorbus/visorchipset.c 	ctx->param_bytes = bytes;
ctx              1338 drivers/visorbus/visorchipset.c 	memcpy(&ctx->data, mapping, bytes);
ctx              1340 drivers/visorbus/visorchipset.c 	ctx->byte_stream = true;
ctx              1341 drivers/visorbus/visorchipset.c 	chipset_dev->controlvm_payload_bytes_buffered += ctx->param_bytes;
ctx              1342 drivers/visorbus/visorchipset.c 	return ctx;
ctx              1345 drivers/visorbus/visorchipset.c 	kfree(ctx);
ctx                86 fs/9p/vfs_dir.c static int v9fs_dir_readdir(struct file *file, struct dir_context *ctx)
ctx               112 fs/9p/vfs_dir.c 			n = p9_client_read(file->private_data, ctx->pos, &to,
ctx               130 fs/9p/vfs_dir.c 			over = !dir_emit(ctx, st.name, strlen(st.name),
ctx               137 fs/9p/vfs_dir.c 			ctx->pos += err;
ctx               148 fs/9p/vfs_dir.c static int v9fs_dir_readdir_dotl(struct file *file, struct dir_context *ctx)
ctx               168 fs/9p/vfs_dir.c 						ctx->pos);
ctx               186 fs/9p/vfs_dir.c 			if (!dir_emit(ctx, curdirent.d_name,
ctx               192 fs/9p/vfs_dir.c 			ctx->pos = curdirent.d_off;
ctx                55 fs/adfs/dir.c  adfs_readdir(struct file *file, struct dir_context *ctx)
ctx                64 fs/adfs/dir.c  	if (ctx->pos >> 32)
ctx                71 fs/adfs/dir.c  	if (ctx->pos == 0) {
ctx                72 fs/adfs/dir.c  		if (!dir_emit_dot(file, ctx))
ctx                74 fs/adfs/dir.c  		ctx->pos = 1;
ctx                76 fs/adfs/dir.c  	if (ctx->pos == 1) {
ctx                77 fs/adfs/dir.c  		if (!dir_emit(ctx, "..", 2, dir.parent_id, DT_DIR))
ctx                79 fs/adfs/dir.c  		ctx->pos = 2;
ctx                84 fs/adfs/dir.c  	ret = ops->setpos(&dir, ctx->pos - 2);
ctx                88 fs/adfs/dir.c  		if (!dir_emit(ctx, obj.name, obj.name_len,
ctx                91 fs/adfs/dir.c  		ctx->pos++;
ctx                45 fs/affs/dir.c  affs_readdir(struct file *file, struct dir_context *ctx)
ctx                59 fs/affs/dir.c  	pr_debug("%s(ino=%lu,f_pos=%llx)\n", __func__, inode->i_ino, ctx->pos);
ctx                61 fs/affs/dir.c  	if (ctx->pos < 2) {
ctx                63 fs/affs/dir.c  		if (!dir_emit_dots(file, ctx))
ctx                68 fs/affs/dir.c  	chain_pos = (ctx->pos - 2) & 0xffff;
ctx                69 fs/affs/dir.c  	hash_pos  = (ctx->pos - 2) >> 16;
ctx                74 fs/affs/dir.c  		ctx->pos = ((hash_pos << 16) | chain_pos) + 2;
ctx               109 fs/affs/dir.c  		ctx->pos = (hash_pos << 16) + 2;
ctx               123 fs/affs/dir.c  				 namelen, name, ino, hash_pos, ctx->pos);
ctx               125 fs/affs/dir.c  			if (!dir_emit(ctx, name, namelen, ino, DT_UNKNOWN))
ctx               127 fs/affs/dir.c  			ctx->pos++;
ctx                23 fs/afs/dir.c   static int afs_readdir(struct file *file, struct dir_context *ctx);
ctx                27 fs/afs/dir.c   static int afs_lookup_one_filldir(struct dir_context *ctx, const char *name, int nlen,
ctx                29 fs/afs/dir.c   static int afs_lookup_filldir(struct dir_context *ctx, const char *name, int nlen,
ctx                90 fs/afs/dir.c   	struct dir_context	ctx;
ctx                97 fs/afs/dir.c   	struct dir_context	ctx;
ctx               351 fs/afs/dir.c   				 struct dir_context *ctx,
ctx               360 fs/afs/dir.c   	_enter("%u,%x,%p,,",(unsigned)ctx->pos,blkoff,block);
ctx               362 fs/afs/dir.c   	curr = (ctx->pos - blkoff) / sizeof(union afs_xdr_dirent);
ctx               377 fs/afs/dir.c   				ctx->pos = blkoff +
ctx               423 fs/afs/dir.c   		if (!dir_emit(ctx, dire->u.name, nlen,
ctx               425 fs/afs/dir.c   			      (ctx->actor == afs_lookup_filldir ||
ctx               426 fs/afs/dir.c   			       ctx->actor == afs_lookup_one_filldir)?
ctx               432 fs/afs/dir.c   		ctx->pos = blkoff + next * sizeof(union afs_xdr_dirent);
ctx               442 fs/afs/dir.c   static int afs_dir_iterate(struct inode *dir, struct dir_context *ctx,
ctx               453 fs/afs/dir.c   	_enter("{%lu},%u,,", dir->i_ino, (unsigned)ctx->pos);
ctx               466 fs/afs/dir.c   	ctx->pos += sizeof(union afs_xdr_dirent) - 1;
ctx               467 fs/afs/dir.c   	ctx->pos &= ~(sizeof(union afs_xdr_dirent) - 1);
ctx               471 fs/afs/dir.c   	while (ctx->pos < req->actual_len) {
ctx               472 fs/afs/dir.c   		blkoff = ctx->pos & ~(sizeof(union afs_xdr_dir_block) - 1);
ctx               492 fs/afs/dir.c   			ret = afs_dir_iterate_block(dvnode, ctx, dblock, blkoff);
ctx               500 fs/afs/dir.c   		} while (ctx->pos < dir->i_size && blkoff < limit);
ctx               516 fs/afs/dir.c   static int afs_readdir(struct file *file, struct dir_context *ctx)
ctx               520 fs/afs/dir.c   	return afs_dir_iterate(file_inode(file), ctx, afs_file_key(file),
ctx               529 fs/afs/dir.c   static int afs_lookup_one_filldir(struct dir_context *ctx, const char *name,
ctx               533 fs/afs/dir.c   		container_of(ctx, struct afs_lookup_one_cookie, ctx);
ctx               567 fs/afs/dir.c   		.ctx.actor = afs_lookup_one_filldir,
ctx               576 fs/afs/dir.c   	ret = afs_dir_iterate(dir, &cookie.ctx, key, _dir_version);
ctx               598 fs/afs/dir.c   static int afs_lookup_filldir(struct dir_context *ctx, const char *name,
ctx               602 fs/afs/dir.c   		container_of(ctx, struct afs_lookup_cookie, ctx);
ctx               659 fs/afs/dir.c   	cookie->ctx.actor = afs_lookup_filldir;
ctx               678 fs/afs/dir.c   	ret = afs_dir_iterate(dir, &cookie->ctx, key, &data_version);
ctx                74 fs/afs/mntpt.c 	struct afs_fs_context *ctx = fc->fs_private;
ctx                87 fs/afs/mntpt.c 		ctx->type = AFSVL_RWVOL;
ctx                88 fs/afs/mntpt.c 		ctx->force = true;
ctx                90 fs/afs/mntpt.c 	if (ctx->cell) {
ctx                91 fs/afs/mntpt.c 		afs_put_cell(ctx->net, ctx->cell);
ctx                92 fs/afs/mntpt.c 		ctx->cell = NULL;
ctx               105 fs/afs/mntpt.c 			ctx->type = AFSVL_RWVOL;
ctx               106 fs/afs/mntpt.c 			ctx->force = true;
ctx               111 fs/afs/mntpt.c 		cell = afs_lookup_cell(ctx->net, p, size, NULL, false);
ctx               116 fs/afs/mntpt.c 		ctx->cell = cell;
ctx               118 fs/afs/mntpt.c 		ctx->volname = afs_root_volume;
ctx               119 fs/afs/mntpt.c 		ctx->volnamesz = sizeof(afs_root_volume) - 1;
ctx               127 fs/afs/mntpt.c 			ctx->cell = afs_get_cell(src_as->cell);
ctx               232 fs/afs/super.c 	struct afs_fs_context *ctx = fc->fs_private;
ctx               247 fs/afs/super.c 			ctx->no_cell = true;
ctx               256 fs/afs/super.c 		ctx->type = AFSVL_RWVOL;
ctx               257 fs/afs/super.c 		ctx->force = true;
ctx               262 fs/afs/super.c 	ctx->volname = strchr(name, ':');
ctx               263 fs/afs/super.c 	if (ctx->volname) {
ctx               265 fs/afs/super.c 		cellnamesz = ctx->volname - name;
ctx               266 fs/afs/super.c 		ctx->volname++;
ctx               268 fs/afs/super.c 		ctx->volname = name;
ctx               274 fs/afs/super.c 	suffix = strrchr(ctx->volname, '.');
ctx               277 fs/afs/super.c 			ctx->type = AFSVL_ROVOL;
ctx               278 fs/afs/super.c 			ctx->force = true;
ctx               280 fs/afs/super.c 			ctx->type = AFSVL_BACKVOL;
ctx               281 fs/afs/super.c 			ctx->force = true;
ctx               288 fs/afs/super.c 	ctx->volnamesz = suffix ?
ctx               289 fs/afs/super.c 		suffix - ctx->volname : strlen(ctx->volname);
ctx               292 fs/afs/super.c 	       cellnamesz, cellnamesz, cellname ?: "", ctx->cell);
ctx               296 fs/afs/super.c 		cell = afs_lookup_cell(ctx->net, cellname, cellnamesz,
ctx               303 fs/afs/super.c 		afs_put_cell(ctx->net, ctx->cell);
ctx               304 fs/afs/super.c 		ctx->cell = cell;
ctx               308 fs/afs/super.c 	       ctx->cell->name, ctx->cell,
ctx               309 fs/afs/super.c 	       ctx->volnamesz, ctx->volnamesz, ctx->volname,
ctx               310 fs/afs/super.c 	       suffix ?: "-", ctx->type, ctx->force ? " FORCE" : "");
ctx               323 fs/afs/super.c 	struct afs_fs_context *ctx = fc->fs_private;
ctx               335 fs/afs/super.c 		ctx->autocell = true;
ctx               339 fs/afs/super.c 		ctx->dyn_root = true;
ctx               343 fs/afs/super.c 		ctx->flock_mode = result.uint_32;
ctx               359 fs/afs/super.c 	struct afs_fs_context *ctx = fc->fs_private;
ctx               363 fs/afs/super.c 	if (!ctx->dyn_root) {
ctx               364 fs/afs/super.c 		if (ctx->no_cell) {
ctx               369 fs/afs/super.c 		if (!ctx->cell) {
ctx               375 fs/afs/super.c 		key = afs_request_key(ctx->cell);
ctx               379 fs/afs/super.c 		ctx->key = key;
ctx               381 fs/afs/super.c 		if (ctx->volume) {
ctx               382 fs/afs/super.c 			afs_put_volume(ctx->cell, ctx->volume);
ctx               383 fs/afs/super.c 			ctx->volume = NULL;
ctx               386 fs/afs/super.c 		volume = afs_create_volume(ctx);
ctx               390 fs/afs/super.c 		ctx->volume = volume;
ctx               401 fs/afs/super.c 	struct afs_fs_context *ctx = fc->fs_private;
ctx               406 fs/afs/super.c 		as->volume->vid == ctx->volume->vid &&
ctx               407 fs/afs/super.c 		as->cell == ctx->cell &&
ctx               427 fs/afs/super.c static int afs_fill_super(struct super_block *sb, struct afs_fs_context *ctx)
ctx               461 fs/afs/super.c 		inode = afs_iget(sb, ctx->key, &iget_data, NULL, NULL, NULL);
ctx               467 fs/afs/super.c 	if (ctx->autocell || as->dyn_root)
ctx               494 fs/afs/super.c 	struct afs_fs_context *ctx = fc->fs_private;
ctx               500 fs/afs/super.c 		as->flock_mode = ctx->flock_mode;
ctx               501 fs/afs/super.c 		if (ctx->dyn_root) {
ctx               504 fs/afs/super.c 			as->cell = afs_get_cell(ctx->cell);
ctx               505 fs/afs/super.c 			as->volume = __afs_get_volume(ctx->volume);
ctx               545 fs/afs/super.c 	struct afs_fs_context *ctx = fc->fs_private;
ctx               575 fs/afs/super.c 		ret = afs_fill_super(sb, ctx);
ctx               598 fs/afs/super.c 	struct afs_fs_context *ctx = fc->fs_private;
ctx               601 fs/afs/super.c 	afs_put_volume(ctx->cell, ctx->volume);
ctx               602 fs/afs/super.c 	afs_put_cell(ctx->net, ctx->cell);
ctx               603 fs/afs/super.c 	key_put(ctx->key);
ctx               604 fs/afs/super.c 	kfree(ctx);
ctx               618 fs/afs/super.c 	struct afs_fs_context *ctx;
ctx               621 fs/afs/super.c 	ctx = kzalloc(sizeof(struct afs_fs_context), GFP_KERNEL);
ctx               622 fs/afs/super.c 	if (!ctx)
ctx               625 fs/afs/super.c 	ctx->type = AFSVL_ROVOL;
ctx               626 fs/afs/super.c 	ctx->net = afs_net(fc->net_ns);
ctx               630 fs/afs/super.c 	cell = afs_lookup_cell_rcu(ctx->net, NULL, 0);
ctx               634 fs/afs/super.c 	ctx->cell = cell;
ctx               636 fs/afs/super.c 	fc->fs_private = ctx;
ctx               236 fs/aio.c       static struct file *aio_private_file(struct kioctx *ctx, loff_t nr_pages)
ctx               244 fs/aio.c       	inode->i_mapping->private_data = ctx;
ctx               283 fs/aio.c       static void put_aio_ring_file(struct kioctx *ctx)
ctx               285 fs/aio.c       	struct file *aio_ring_file = ctx->aio_ring_file;
ctx               295 fs/aio.c       		ctx->aio_ring_file = NULL;
ctx               302 fs/aio.c       static void aio_free_ring(struct kioctx *ctx)
ctx               309 fs/aio.c       	put_aio_ring_file(ctx);
ctx               311 fs/aio.c       	for (i = 0; i < ctx->nr_pages; i++) {
ctx               314 fs/aio.c       				page_count(ctx->ring_pages[i]));
ctx               315 fs/aio.c       		page = ctx->ring_pages[i];
ctx               318 fs/aio.c       		ctx->ring_pages[i] = NULL;
ctx               322 fs/aio.c       	if (ctx->ring_pages && ctx->ring_pages != ctx->internal_pages) {
ctx               323 fs/aio.c       		kfree(ctx->ring_pages);
ctx               324 fs/aio.c       		ctx->ring_pages = NULL;
ctx               339 fs/aio.c       		struct kioctx *ctx;
ctx               341 fs/aio.c       		ctx = rcu_dereference(table->table[i]);
ctx               342 fs/aio.c       		if (ctx && ctx->aio_ring_file == file) {
ctx               343 fs/aio.c       			if (!atomic_read(&ctx->dead)) {
ctx               344 fs/aio.c       				ctx->user_id = ctx->mmap_base = vma->vm_start;
ctx               380 fs/aio.c       	struct kioctx *ctx;
ctx               397 fs/aio.c       	ctx = mapping->private_data;
ctx               398 fs/aio.c       	if (!ctx) {
ctx               407 fs/aio.c       	if (!mutex_trylock(&ctx->ring_lock)) {
ctx               413 fs/aio.c       	if (idx < (pgoff_t)ctx->nr_pages) {
ctx               415 fs/aio.c       		if (ctx->ring_pages[idx] != old)
ctx               437 fs/aio.c       	spin_lock_irqsave(&ctx->completion_lock, flags);
ctx               439 fs/aio.c       	BUG_ON(ctx->ring_pages[idx] != old);
ctx               440 fs/aio.c       	ctx->ring_pages[idx] = new;
ctx               441 fs/aio.c       	spin_unlock_irqrestore(&ctx->completion_lock, flags);
ctx               447 fs/aio.c       	mutex_unlock(&ctx->ring_lock);
ctx               461 fs/aio.c       static int aio_setup_ring(struct kioctx *ctx, unsigned int nr_events)
ctx               480 fs/aio.c       	file = aio_private_file(ctx, nr_pages);
ctx               482 fs/aio.c       		ctx->aio_ring_file = NULL;
ctx               486 fs/aio.c       	ctx->aio_ring_file = file;
ctx               490 fs/aio.c       	ctx->ring_pages = ctx->internal_pages;
ctx               492 fs/aio.c       		ctx->ring_pages = kcalloc(nr_pages, sizeof(struct page *),
ctx               494 fs/aio.c       		if (!ctx->ring_pages) {
ctx               495 fs/aio.c       			put_aio_ring_file(ctx);
ctx               511 fs/aio.c       		ctx->ring_pages[i] = page;
ctx               513 fs/aio.c       	ctx->nr_pages = i;
ctx               516 fs/aio.c       		aio_free_ring(ctx);
ctx               520 fs/aio.c       	ctx->mmap_size = nr_pages * PAGE_SIZE;
ctx               521 fs/aio.c       	pr_debug("attempting mmap of %lu bytes\n", ctx->mmap_size);
ctx               524 fs/aio.c       		ctx->mmap_size = 0;
ctx               525 fs/aio.c       		aio_free_ring(ctx);
ctx               529 fs/aio.c       	ctx->mmap_base = do_mmap_pgoff(ctx->aio_ring_file, 0, ctx->mmap_size,
ctx               533 fs/aio.c       	if (IS_ERR((void *)ctx->mmap_base)) {
ctx               534 fs/aio.c       		ctx->mmap_size = 0;
ctx               535 fs/aio.c       		aio_free_ring(ctx);
ctx               539 fs/aio.c       	pr_debug("mmap address: 0x%08lx\n", ctx->mmap_base);
ctx               541 fs/aio.c       	ctx->user_id = ctx->mmap_base;
ctx               542 fs/aio.c       	ctx->nr_events = nr_events; /* trusted copy */
ctx               544 fs/aio.c       	ring = kmap_atomic(ctx->ring_pages[0]);
ctx               553 fs/aio.c       	flush_dcache_page(ctx->ring_pages[0]);
ctx               565 fs/aio.c       	struct kioctx *ctx = req->ki_ctx;
ctx               571 fs/aio.c       	spin_lock_irqsave(&ctx->ctx_lock, flags);
ctx               572 fs/aio.c       	list_add_tail(&req->ki_list, &ctx->active_reqs);
ctx               574 fs/aio.c       	spin_unlock_irqrestore(&ctx->ctx_lock, flags);
ctx               585 fs/aio.c       	struct kioctx *ctx = container_of(to_rcu_work(work), struct kioctx,
ctx               587 fs/aio.c       	pr_debug("freeing %p\n", ctx);
ctx               589 fs/aio.c       	aio_free_ring(ctx);
ctx               590 fs/aio.c       	free_percpu(ctx->cpu);
ctx               591 fs/aio.c       	percpu_ref_exit(&ctx->reqs);
ctx               592 fs/aio.c       	percpu_ref_exit(&ctx->users);
ctx               593 fs/aio.c       	kmem_cache_free(kioctx_cachep, ctx);
ctx               598 fs/aio.c       	struct kioctx *ctx = container_of(ref, struct kioctx, reqs);
ctx               601 fs/aio.c       	if (ctx->rq_wait && atomic_dec_and_test(&ctx->rq_wait->count))
ctx               602 fs/aio.c       		complete(&ctx->rq_wait->comp);
ctx               605 fs/aio.c       	INIT_RCU_WORK(&ctx->free_rwork, free_ioctx);
ctx               606 fs/aio.c       	queue_rcu_work(system_wq, &ctx->free_rwork);
ctx               616 fs/aio.c       	struct kioctx *ctx = container_of(ref, struct kioctx, users);
ctx               619 fs/aio.c       	spin_lock_irq(&ctx->ctx_lock);
ctx               621 fs/aio.c       	while (!list_empty(&ctx->active_reqs)) {
ctx               622 fs/aio.c       		req = list_first_entry(&ctx->active_reqs,
ctx               628 fs/aio.c       	spin_unlock_irq(&ctx->ctx_lock);
ctx               630 fs/aio.c       	percpu_ref_kill(&ctx->reqs);
ctx               631 fs/aio.c       	percpu_ref_put(&ctx->reqs);
ctx               634 fs/aio.c       static int ioctx_add_table(struct kioctx *ctx, struct mm_struct *mm)
ctx               647 fs/aio.c       					ctx->id = i;
ctx               648 fs/aio.c       					rcu_assign_pointer(table->table[i], ctx);
ctx               655 fs/aio.c       					ring = kmap_atomic(ctx->ring_pages[0]);
ctx               656 fs/aio.c       					ring->id = ctx->id;
ctx               705 fs/aio.c       	struct kioctx *ctx;
ctx               735 fs/aio.c       	ctx = kmem_cache_zalloc(kioctx_cachep, GFP_KERNEL);
ctx               736 fs/aio.c       	if (!ctx)
ctx               739 fs/aio.c       	ctx->max_reqs = max_reqs;
ctx               741 fs/aio.c       	spin_lock_init(&ctx->ctx_lock);
ctx               742 fs/aio.c       	spin_lock_init(&ctx->completion_lock);
ctx               743 fs/aio.c       	mutex_init(&ctx->ring_lock);
ctx               746 fs/aio.c       	mutex_lock(&ctx->ring_lock);
ctx               747 fs/aio.c       	init_waitqueue_head(&ctx->wait);
ctx               749 fs/aio.c       	INIT_LIST_HEAD(&ctx->active_reqs);
ctx               751 fs/aio.c       	if (percpu_ref_init(&ctx->users, free_ioctx_users, 0, GFP_KERNEL))
ctx               754 fs/aio.c       	if (percpu_ref_init(&ctx->reqs, free_ioctx_reqs, 0, GFP_KERNEL))
ctx               757 fs/aio.c       	ctx->cpu = alloc_percpu(struct kioctx_cpu);
ctx               758 fs/aio.c       	if (!ctx->cpu)
ctx               761 fs/aio.c       	err = aio_setup_ring(ctx, nr_events);
ctx               765 fs/aio.c       	atomic_set(&ctx->reqs_available, ctx->nr_events - 1);
ctx               766 fs/aio.c       	ctx->req_batch = (ctx->nr_events - 1) / (num_possible_cpus() * 4);
ctx               767 fs/aio.c       	if (ctx->req_batch < 1)
ctx               768 fs/aio.c       		ctx->req_batch = 1;
ctx               772 fs/aio.c       	if (aio_nr + ctx->max_reqs > aio_max_nr ||
ctx               773 fs/aio.c       	    aio_nr + ctx->max_reqs < aio_nr) {
ctx               778 fs/aio.c       	aio_nr += ctx->max_reqs;
ctx               781 fs/aio.c       	percpu_ref_get(&ctx->users);	/* io_setup() will drop this ref */
ctx               782 fs/aio.c       	percpu_ref_get(&ctx->reqs);	/* free_ioctx_users() will drop this */
ctx               784 fs/aio.c       	err = ioctx_add_table(ctx, mm);
ctx               789 fs/aio.c       	mutex_unlock(&ctx->ring_lock);
ctx               792 fs/aio.c       		 ctx, ctx->user_id, mm, ctx->nr_events);
ctx               793 fs/aio.c       	return ctx;
ctx               796 fs/aio.c       	aio_nr_sub(ctx->max_reqs);
ctx               798 fs/aio.c       	atomic_set(&ctx->dead, 1);
ctx               799 fs/aio.c       	if (ctx->mmap_size)
ctx               800 fs/aio.c       		vm_munmap(ctx->mmap_base, ctx->mmap_size);
ctx               801 fs/aio.c       	aio_free_ring(ctx);
ctx               803 fs/aio.c       	mutex_unlock(&ctx->ring_lock);
ctx               804 fs/aio.c       	free_percpu(ctx->cpu);
ctx               805 fs/aio.c       	percpu_ref_exit(&ctx->reqs);
ctx               806 fs/aio.c       	percpu_ref_exit(&ctx->users);
ctx               807 fs/aio.c       	kmem_cache_free(kioctx_cachep, ctx);
ctx               817 fs/aio.c       static int kill_ioctx(struct mm_struct *mm, struct kioctx *ctx,
ctx               823 fs/aio.c       	if (atomic_xchg(&ctx->dead, 1)) {
ctx               829 fs/aio.c       	WARN_ON(ctx != rcu_access_pointer(table->table[ctx->id]));
ctx               830 fs/aio.c       	RCU_INIT_POINTER(table->table[ctx->id], NULL);
ctx               834 fs/aio.c       	wake_up_all(&ctx->wait);
ctx               843 fs/aio.c       	aio_nr_sub(ctx->max_reqs);
ctx               845 fs/aio.c       	if (ctx->mmap_size)
ctx               846 fs/aio.c       		vm_munmap(ctx->mmap_base, ctx->mmap_size);
ctx               848 fs/aio.c       	ctx->rq_wait = wait;
ctx               849 fs/aio.c       	percpu_ref_kill(&ctx->users);
ctx               875 fs/aio.c       		struct kioctx *ctx =
ctx               878 fs/aio.c       		if (!ctx) {
ctx               890 fs/aio.c       		ctx->mmap_size = 0;
ctx               891 fs/aio.c       		kill_ioctx(mm, ctx, &wait);
ctx               903 fs/aio.c       static void put_reqs_available(struct kioctx *ctx, unsigned nr)
ctx               909 fs/aio.c       	kcpu = this_cpu_ptr(ctx->cpu);
ctx               912 fs/aio.c       	while (kcpu->reqs_available >= ctx->req_batch * 2) {
ctx               913 fs/aio.c       		kcpu->reqs_available -= ctx->req_batch;
ctx               914 fs/aio.c       		atomic_add(ctx->req_batch, &ctx->reqs_available);
ctx               920 fs/aio.c       static bool __get_reqs_available(struct kioctx *ctx)
ctx               927 fs/aio.c       	kcpu = this_cpu_ptr(ctx->cpu);
ctx               929 fs/aio.c       		int old, avail = atomic_read(&ctx->reqs_available);
ctx               932 fs/aio.c       			if (avail < ctx->req_batch)
ctx               936 fs/aio.c       			avail = atomic_cmpxchg(&ctx->reqs_available,
ctx               937 fs/aio.c       					       avail, avail - ctx->req_batch);
ctx               940 fs/aio.c       		kcpu->reqs_available += ctx->req_batch;
ctx               957 fs/aio.c       static void refill_reqs_available(struct kioctx *ctx, unsigned head,
ctx               963 fs/aio.c       	head %= ctx->nr_events;
ctx               967 fs/aio.c       		events_in_ring = ctx->nr_events - (head - tail);
ctx               969 fs/aio.c       	completed = ctx->completed_events;
ctx               978 fs/aio.c       	ctx->completed_events -= completed;
ctx               979 fs/aio.c       	put_reqs_available(ctx, completed);
ctx               986 fs/aio.c       static void user_refill_reqs_available(struct kioctx *ctx)
ctx               988 fs/aio.c       	spin_lock_irq(&ctx->completion_lock);
ctx               989 fs/aio.c       	if (ctx->completed_events) {
ctx              1002 fs/aio.c       		ring = kmap_atomic(ctx->ring_pages[0]);
ctx              1006 fs/aio.c       		refill_reqs_available(ctx, head, ctx->tail);
ctx              1009 fs/aio.c       	spin_unlock_irq(&ctx->completion_lock);
ctx              1012 fs/aio.c       static bool get_reqs_available(struct kioctx *ctx)
ctx              1014 fs/aio.c       	if (__get_reqs_available(ctx))
ctx              1016 fs/aio.c       	user_refill_reqs_available(ctx);
ctx              1017 fs/aio.c       	return __get_reqs_available(ctx);
ctx              1027 fs/aio.c       static inline struct aio_kiocb *aio_get_req(struct kioctx *ctx)
ctx              1035 fs/aio.c       	if (unlikely(!get_reqs_available(ctx))) {
ctx              1040 fs/aio.c       	percpu_ref_get(&ctx->reqs);
ctx              1041 fs/aio.c       	req->ki_ctx = ctx;
ctx              1052 fs/aio.c       	struct kioctx *ctx, *ret = NULL;
ctx              1066 fs/aio.c       	ctx = rcu_dereference(table->table[id]);
ctx              1067 fs/aio.c       	if (ctx && ctx->user_id == ctx_id) {
ctx              1068 fs/aio.c       		if (percpu_ref_tryget_live(&ctx->users))
ctx              1069 fs/aio.c       			ret = ctx;
ctx              1091 fs/aio.c       	struct kioctx	*ctx = iocb->ki_ctx;
ctx              1102 fs/aio.c       	spin_lock_irqsave(&ctx->completion_lock, flags);
ctx              1104 fs/aio.c       	tail = ctx->tail;
ctx              1107 fs/aio.c       	if (++tail >= ctx->nr_events)
ctx              1110 fs/aio.c       	ev_page = kmap_atomic(ctx->ring_pages[pos / AIO_EVENTS_PER_PAGE]);
ctx              1116 fs/aio.c       	flush_dcache_page(ctx->ring_pages[pos / AIO_EVENTS_PER_PAGE]);
ctx              1118 fs/aio.c       	pr_debug("%p[%u]: %p: %p %Lx %Lx %Lx\n", ctx, tail, iocb,
ctx              1127 fs/aio.c       	ctx->tail = tail;
ctx              1129 fs/aio.c       	ring = kmap_atomic(ctx->ring_pages[0]);
ctx              1133 fs/aio.c       	flush_dcache_page(ctx->ring_pages[0]);
ctx              1135 fs/aio.c       	ctx->completed_events++;
ctx              1136 fs/aio.c       	if (ctx->completed_events > 1)
ctx              1137 fs/aio.c       		refill_reqs_available(ctx, head, tail);
ctx              1138 fs/aio.c       	spin_unlock_irqrestore(&ctx->completion_lock, flags);
ctx              1158 fs/aio.c       	if (waitqueue_active(&ctx->wait))
ctx              1159 fs/aio.c       		wake_up(&ctx->wait);
ctx              1174 fs/aio.c       static long aio_read_events_ring(struct kioctx *ctx,
ctx              1189 fs/aio.c       	mutex_lock(&ctx->ring_lock);
ctx              1192 fs/aio.c       	ring = kmap_atomic(ctx->ring_pages[0]);
ctx              1203 fs/aio.c       	pr_debug("h%u t%u m%u\n", head, tail, ctx->nr_events);
ctx              1208 fs/aio.c       	head %= ctx->nr_events;
ctx              1209 fs/aio.c       	tail %= ctx->nr_events;
ctx              1216 fs/aio.c       		avail = (head <= tail ?  tail : ctx->nr_events) - head;
ctx              1221 fs/aio.c       		page = ctx->ring_pages[pos / AIO_EVENTS_PER_PAGE];
ctx              1239 fs/aio.c       		head %= ctx->nr_events;
ctx              1242 fs/aio.c       	ring = kmap_atomic(ctx->ring_pages[0]);
ctx              1245 fs/aio.c       	flush_dcache_page(ctx->ring_pages[0]);
ctx              1249 fs/aio.c       	mutex_unlock(&ctx->ring_lock);
ctx              1254 fs/aio.c       static bool aio_read_events(struct kioctx *ctx, long min_nr, long nr,
ctx              1257 fs/aio.c       	long ret = aio_read_events_ring(ctx, event + *i, nr - *i);
ctx              1262 fs/aio.c       	if (unlikely(atomic_read(&ctx->dead)))
ctx              1271 fs/aio.c       static long read_events(struct kioctx *ctx, long min_nr, long nr,
ctx              1292 fs/aio.c       		aio_read_events(ctx, min_nr, nr, event, &ret);
ctx              1294 fs/aio.c       		wait_event_interruptible_hrtimeout(ctx->wait,
ctx              1295 fs/aio.c       				aio_read_events(ctx, min_nr, nr, event, &ret),
ctx              1316 fs/aio.c       	unsigned long ctx;
ctx              1319 fs/aio.c       	ret = get_user(ctx, ctxp);
ctx              1324 fs/aio.c       	if (unlikely(ctx || nr_events == 0)) {
ctx              1326 fs/aio.c       		         ctx, nr_events);
ctx              1347 fs/aio.c       	unsigned long ctx;
ctx              1350 fs/aio.c       	ret = get_user(ctx, ctx32p);
ctx              1355 fs/aio.c       	if (unlikely(ctx || nr_events == 0)) {
ctx              1357 fs/aio.c       		         ctx, nr_events);
ctx              1382 fs/aio.c       SYSCALL_DEFINE1(io_destroy, aio_context_t, ctx)
ctx              1384 fs/aio.c       	struct kioctx *ioctx = lookup_ioctx(ctx);
ctx              1414 fs/aio.c       	struct kioctx *ctx = iocb->ki_ctx;
ctx              1417 fs/aio.c       	spin_lock_irqsave(&ctx->ctx_lock, flags);
ctx              1419 fs/aio.c       	spin_unlock_irqrestore(&ctx->ctx_lock, flags);
ctx              1634 fs/aio.c       	struct kioctx *ctx = iocb->ki_ctx;
ctx              1647 fs/aio.c       	spin_lock_irq(&ctx->ctx_lock);
ctx              1650 fs/aio.c       		spin_unlock_irq(&ctx->ctx_lock);
ctx              1656 fs/aio.c       	spin_unlock_irq(&ctx->ctx_lock);
ctx              1693 fs/aio.c       		struct kioctx *ctx = iocb->ki_ctx;
ctx              1709 fs/aio.c       		spin_unlock_irqrestore(&ctx->ctx_lock, flags);
ctx              1743 fs/aio.c       	struct kioctx *ctx = aiocb->ki_ctx;
ctx              1773 fs/aio.c       	spin_lock_irq(&ctx->ctx_lock);
ctx              1787 fs/aio.c       			list_add_tail(&aiocb->ki_list, &ctx->active_reqs);
ctx              1796 fs/aio.c       	spin_unlock_irq(&ctx->ctx_lock);
ctx              1802 fs/aio.c       static int __io_submit_one(struct kioctx *ctx, const struct iocb *iocb,
ctx              1856 fs/aio.c       static int io_submit_one(struct kioctx *ctx, struct iocb __user *user_iocb,
ctx              1882 fs/aio.c       	req = aio_get_req(ctx);
ctx              1886 fs/aio.c       	err = __io_submit_one(ctx, &iocb, user_iocb, req, compat);
ctx              1898 fs/aio.c       		put_reqs_available(ctx, 1);
ctx              1918 fs/aio.c       	struct kioctx *ctx;
ctx              1926 fs/aio.c       	ctx = lookup_ioctx(ctx_id);
ctx              1927 fs/aio.c       	if (unlikely(!ctx)) {
ctx              1932 fs/aio.c       	if (nr > ctx->nr_events)
ctx              1933 fs/aio.c       		nr = ctx->nr_events;
ctx              1945 fs/aio.c       		ret = io_submit_one(ctx, user_iocb, false);
ctx              1952 fs/aio.c       	percpu_ref_put(&ctx->users);
ctx              1960 fs/aio.c       	struct kioctx *ctx;
ctx              1968 fs/aio.c       	ctx = lookup_ioctx(ctx_id);
ctx              1969 fs/aio.c       	if (unlikely(!ctx)) {
ctx              1974 fs/aio.c       	if (nr > ctx->nr_events)
ctx              1975 fs/aio.c       		nr = ctx->nr_events;
ctx              1987 fs/aio.c       		ret = io_submit_one(ctx, compat_ptr(user_iocb), true);
ctx              1994 fs/aio.c       	percpu_ref_put(&ctx->users);
ctx              2012 fs/aio.c       	struct kioctx *ctx;
ctx              2023 fs/aio.c       	ctx = lookup_ioctx(ctx_id);
ctx              2024 fs/aio.c       	if (unlikely(!ctx))
ctx              2027 fs/aio.c       	spin_lock_irq(&ctx->ctx_lock);
ctx              2029 fs/aio.c       	list_for_each_entry(kiocb, &ctx->active_reqs, ki_list) {
ctx              2036 fs/aio.c       	spin_unlock_irq(&ctx->ctx_lock);
ctx              2047 fs/aio.c       	percpu_ref_put(&ctx->users);
ctx                45 fs/anon_inodes.c 	struct pseudo_fs_context *ctx = init_pseudo(fc, ANON_INODE_FS_MAGIC);
ctx                46 fs/anon_inodes.c 	if (!ctx)
ctx                48 fs/anon_inodes.c 	ctx->dops = &anon_inodefs_dentry_operations;
ctx               215 fs/befs/linuxvfs.c befs_readdir(struct file *file, struct dir_context *ctx)
ctx               226 fs/befs/linuxvfs.c 		  __func__, file, inode->i_ino, ctx->pos);
ctx               229 fs/befs/linuxvfs.c 		result = befs_btree_read(sb, ds, ctx->pos, BEFS_NAME_LEN + 1,
ctx               259 fs/befs/linuxvfs.c 			if (!dir_emit(ctx, nlsname, nlsnamelen,
ctx               266 fs/befs/linuxvfs.c 			if (!dir_emit(ctx, keybuf, keysize,
ctx               270 fs/befs/linuxvfs.c 		ctx->pos++;
ctx                29 fs/bfs/dir.c   static int bfs_readdir(struct file *f, struct dir_context *ctx)
ctx                37 fs/bfs/dir.c   	if (ctx->pos & (BFS_DIRENT_SIZE - 1)) {
ctx                39 fs/bfs/dir.c   					(unsigned long)ctx->pos,
ctx                44 fs/bfs/dir.c   	while (ctx->pos < dir->i_size) {
ctx                45 fs/bfs/dir.c   		offset = ctx->pos & (BFS_BSIZE - 1);
ctx                46 fs/bfs/dir.c   		block = BFS_I(dir)->i_sblock + (ctx->pos >> BFS_BSIZE_BITS);
ctx                49 fs/bfs/dir.c   			ctx->pos += BFS_BSIZE - offset;
ctx                56 fs/bfs/dir.c   				if (!dir_emit(ctx, de->name, size,
ctx                64 fs/bfs/dir.c   			ctx->pos += BFS_DIRENT_SIZE;
ctx                65 fs/bfs/dir.c   		} while ((offset < BFS_BSIZE) && (ctx->pos < dir->i_size));
ctx               828 fs/block_dev.c 	struct pseudo_fs_context *ctx = init_pseudo(fc, BDEVFS_MAGIC);
ctx               829 fs/block_dev.c 	if (!ctx)
ctx               832 fs/block_dev.c 	ctx->ops = &bdev_sops;
ctx              1867 fs/btrfs/backref.c 			     iterate_extent_inodes_t *iterate, void *ctx)
ctx              1877 fs/btrfs/backref.c 		ret = iterate(eie->inum, eie->offset, root, ctx);
ctx              1897 fs/btrfs/backref.c 				iterate_extent_inodes_t *iterate, void *ctx,
ctx              1952 fs/btrfs/backref.c 						iterate, ctx);
ctx              1971 fs/btrfs/backref.c 				iterate_extent_inodes_t *iterate, void *ctx,
ctx              1990 fs/btrfs/backref.c 					iterate, ctx, ignore_offset);
ctx              1996 fs/btrfs/backref.c 			      struct extent_buffer *eb, void *ctx);
ctx              2000 fs/btrfs/backref.c 			      iterate_irefs_t *iterate, void *ctx)
ctx              2047 fs/btrfs/backref.c 				      (unsigned long)(iref + 1), eb, ctx);
ctx              2063 fs/btrfs/backref.c 				 iterate_irefs_t *iterate, void *ctx)
ctx              2106 fs/btrfs/backref.c 				      (unsigned long)&extref->name, eb, ctx);
ctx              2125 fs/btrfs/backref.c 			 void *ctx)
ctx              2130 fs/btrfs/backref.c 	ret = iterate_inode_refs(inum, fs_root, path, iterate, ctx);
ctx              2136 fs/btrfs/backref.c 	ret = iterate_inode_extrefs(inum, fs_root, path, iterate, ctx);
ctx              2148 fs/btrfs/backref.c 			 struct extent_buffer *eb, void *ctx)
ctx              2150 fs/btrfs/backref.c 	struct inode_fs_paths *ipath = ctx;
ctx                20 fs/btrfs/backref.h 		void *ctx);
ctx                33 fs/btrfs/backref.h 				iterate_extent_inodes_t *iterate, void *ctx,
ctx                38 fs/btrfs/backref.h 				iterate_extent_inodes_t *iterate, void *ctx,
ctx              1683 fs/btrfs/delayed-inode.c int btrfs_readdir_delayed_dir_index(struct dir_context *ctx,
ctx              1705 fs/btrfs/delayed-inode.c 		if (curr->key.offset < ctx->pos) {
ctx              1711 fs/btrfs/delayed-inode.c 		ctx->pos = curr->key.offset;
ctx              1720 fs/btrfs/delayed-inode.c 		over = !dir_emit(ctx, name, name_len,
ctx              1728 fs/btrfs/delayed-inode.c 		ctx->pos++;
ctx               132 fs/btrfs/delayed-inode.h int btrfs_readdir_delayed_dir_index(struct dir_context *ctx,
ctx              2069 fs/btrfs/file.c 	struct btrfs_log_ctx ctx;
ctx              2074 fs/btrfs/file.c 	btrfs_init_log_ctx(&ctx, inode);
ctx              2202 fs/btrfs/file.c 	ret = btrfs_log_dentry_safe(trans, dentry, start, end, &ctx);
ctx              2223 fs/btrfs/file.c 			ret = btrfs_sync_log(trans, root, &ctx);
ctx              2234 fs/btrfs/file.c 	ASSERT(list_empty(&ctx.list));
ctx              1215 fs/btrfs/inode.c 	struct async_cow *ctx;
ctx              1235 fs/btrfs/inode.c 	ctx = kvmalloc(struct_size(ctx, chunks, num_chunks), GFP_KERNEL);
ctx              1238 fs/btrfs/inode.c 	if (!ctx) {
ctx              1251 fs/btrfs/inode.c 	async_chunk = ctx->chunks;
ctx              1252 fs/btrfs/inode.c 	atomic_set(&ctx->num_chunks, num_chunks);
ctx              1265 fs/btrfs/inode.c 		async_chunk[i].pending = &ctx->num_chunks;
ctx              2540 fs/btrfs/inode.c 				       void *ctx)
ctx              2543 fs/btrfs/inode.c 	struct old_sa_defrag_extent *old = ctx;
ctx              6071 fs/btrfs/inode.c static int btrfs_filldir(void *addr, int entries, struct dir_context *ctx)
ctx              6077 fs/btrfs/inode.c 		ctx->pos = get_unaligned(&entry->offset);
ctx              6078 fs/btrfs/inode.c 		if (!dir_emit(ctx, name, get_unaligned(&entry->name_len),
ctx              6084 fs/btrfs/inode.c 		ctx->pos++;
ctx              6089 fs/btrfs/inode.c static int btrfs_real_readdir(struct file *file, struct dir_context *ctx)
ctx              6111 fs/btrfs/inode.c 	if (!dir_emit_dots(file, ctx))
ctx              6127 fs/btrfs/inode.c 	key.offset = ctx->pos;
ctx              6154 fs/btrfs/inode.c 		if (found_key.offset < ctx->pos)
ctx              6163 fs/btrfs/inode.c 			ret = btrfs_filldir(private->filldir_buf, entries, ctx);
ctx              6190 fs/btrfs/inode.c 	ret = btrfs_filldir(private->filldir_buf, entries, ctx);
ctx              6194 fs/btrfs/inode.c 	ret = btrfs_readdir_delayed_dir_index(ctx, &ins_list);
ctx              6215 fs/btrfs/inode.c 	if (ctx->pos >= INT_MAX)
ctx              6216 fs/btrfs/inode.c 		ctx->pos = LLONG_MAX;
ctx              6218 fs/btrfs/inode.c 		ctx->pos = INT_MAX;
ctx              9942 fs/btrfs/inode.c 	struct btrfs_log_ctx ctx;
ctx              10096 fs/btrfs/inode.c 		btrfs_init_log_ctx(&ctx, old_inode);
ctx              10099 fs/btrfs/inode.c 					 false, &ctx);
ctx              10142 fs/btrfs/inode.c 		ret = btrfs_sync_log(trans, BTRFS_I(old_inode)->root, &ctx);
ctx              10147 fs/btrfs/inode.c 		list_del(&ctx.list);
ctx              4439 fs/btrfs/ioctl.c static int build_ino_list(u64 inum, u64 offset, u64 root, void *ctx)
ctx              4441 fs/btrfs/ioctl.c 	struct btrfs_data_container *inodes = ctx;
ctx               121 fs/btrfs/props.c 				void *ctx)
ctx               206 fs/btrfs/props.c 			iterator(ctx, handler, value_buf, data_len);
ctx               225 fs/btrfs/props.c static void inode_prop_iterator(void *ctx,
ctx               230 fs/btrfs/props.c 	struct inode *inode = ctx;
ctx               284 fs/btrfs/send.c 				  void *ctx);
ctx               879 fs/btrfs/send.c 				   void *ctx);
ctx               891 fs/btrfs/send.c 			     iterate_inode_ref_t iterate, void *ctx)
ctx               985 fs/btrfs/send.c 		ret = iterate(num, dir, index, p, ctx);
ctx              1000 fs/btrfs/send.c 				  u8 type, void *ctx);
ctx              1010 fs/btrfs/send.c 			    iterate_dir_item_t iterate, void *ctx)
ctx              1106 fs/btrfs/send.c 				data_len, type, ctx);
ctx              1123 fs/btrfs/send.c 			    struct fs_path *p, void *ctx)
ctx              1126 fs/btrfs/send.c 	struct fs_path *pt = ctx;
ctx              4209 fs/btrfs/send.c 		      void *ctx, struct list_head *refs)
ctx              4212 fs/btrfs/send.c 	struct send_ctx *sctx = ctx;
ctx              4242 fs/btrfs/send.c 			    void *ctx)
ctx              4244 fs/btrfs/send.c 	struct send_ctx *sctx = ctx;
ctx              4245 fs/btrfs/send.c 	return record_ref(sctx->send_root, dir, name, ctx, &sctx->new_refs);
ctx              4251 fs/btrfs/send.c 				void *ctx)
ctx              4253 fs/btrfs/send.c 	struct send_ctx *sctx = ctx;
ctx              4254 fs/btrfs/send.c 	return record_ref(sctx->parent_root, dir, name, ctx,
ctx              4298 fs/btrfs/send.c 	struct find_ref_ctx *ctx = ctx_;
ctx              4302 fs/btrfs/send.c 	if (dir == ctx->dir && fs_path_len(name) == fs_path_len(ctx->name) &&
ctx              4303 fs/btrfs/send.c 	    strncmp(name->start, ctx->name->start, fs_path_len(name)) == 0) {
ctx              4308 fs/btrfs/send.c 		ret = get_inode_info(ctx->root, dir, NULL, &dir_gen, NULL,
ctx              4312 fs/btrfs/send.c 		if (dir_gen != ctx->dir_gen)
ctx              4314 fs/btrfs/send.c 		ctx->found_idx = num;
ctx              4326 fs/btrfs/send.c 	struct find_ref_ctx ctx;
ctx              4328 fs/btrfs/send.c 	ctx.dir = dir;
ctx              4329 fs/btrfs/send.c 	ctx.name = name;
ctx              4330 fs/btrfs/send.c 	ctx.dir_gen = dir_gen;
ctx              4331 fs/btrfs/send.c 	ctx.found_idx = -1;
ctx              4332 fs/btrfs/send.c 	ctx.root = root;
ctx              4334 fs/btrfs/send.c 	ret = iterate_inode_ref(root, path, key, 0, __find_iref, &ctx);
ctx              4338 fs/btrfs/send.c 	if (ctx.found_idx == -1)
ctx              4341 fs/btrfs/send.c 	return ctx.found_idx;
ctx              4346 fs/btrfs/send.c 				    void *ctx)
ctx              4350 fs/btrfs/send.c 	struct send_ctx *sctx = ctx;
ctx              4369 fs/btrfs/send.c 					void *ctx)
ctx              4373 fs/btrfs/send.c 	struct send_ctx *sctx = ctx;
ctx              4532 fs/btrfs/send.c 			       u8 type, void *ctx)
ctx              4535 fs/btrfs/send.c 	struct send_ctx *sctx = ctx;
ctx              4573 fs/btrfs/send.c 				   u8 type, void *ctx)
ctx              4576 fs/btrfs/send.c 	struct send_ctx *sctx = ctx;
ctx              4623 fs/btrfs/send.c 	struct find_xattr_ctx *ctx = vctx;
ctx              4625 fs/btrfs/send.c 	if (name_len == ctx->name_len &&
ctx              4626 fs/btrfs/send.c 	    strncmp(name, ctx->name, name_len) == 0) {
ctx              4627 fs/btrfs/send.c 		ctx->found_idx = num;
ctx              4628 fs/btrfs/send.c 		ctx->found_data_len = data_len;
ctx              4629 fs/btrfs/send.c 		ctx->found_data = kmemdup(data, data_len, GFP_KERNEL);
ctx              4630 fs/btrfs/send.c 		if (!ctx->found_data)
ctx              4644 fs/btrfs/send.c 	struct find_xattr_ctx ctx;
ctx              4646 fs/btrfs/send.c 	ctx.name = name;
ctx              4647 fs/btrfs/send.c 	ctx.name_len = name_len;
ctx              4648 fs/btrfs/send.c 	ctx.found_idx = -1;
ctx              4649 fs/btrfs/send.c 	ctx.found_data = NULL;
ctx              4650 fs/btrfs/send.c 	ctx.found_data_len = 0;
ctx              4652 fs/btrfs/send.c 	ret = iterate_dir_item(root, path, __find_xattr, &ctx);
ctx              4656 fs/btrfs/send.c 	if (ctx.found_idx == -1)
ctx              4659 fs/btrfs/send.c 		*data = ctx.found_data;
ctx              4660 fs/btrfs/send.c 		*data_len = ctx.found_data_len;
ctx              4662 fs/btrfs/send.c 		kfree(ctx.found_data);
ctx              4664 fs/btrfs/send.c 	return ctx.found_idx;
ctx              4671 fs/btrfs/send.c 				       u8 type, void *ctx)
ctx              4674 fs/btrfs/send.c 	struct send_ctx *sctx = ctx;
ctx              4683 fs/btrfs/send.c 				data_len, type, ctx);
ctx              4688 fs/btrfs/send.c 					data, data_len, type, ctx);
ctx              4701 fs/btrfs/send.c 					   u8 type, void *ctx)
ctx              4704 fs/btrfs/send.c 	struct send_ctx *sctx = ctx;
ctx              4710 fs/btrfs/send.c 				data_len, type, ctx);
ctx              6035 fs/btrfs/send.c 			     void *ctx)
ctx              6037 fs/btrfs/send.c 	struct parent_paths_ctx *ppctx = ctx;
ctx              6052 fs/btrfs/send.c 	struct parent_paths_ctx ctx;
ctx              6066 fs/btrfs/send.c 	ctx.refs = &deleted_refs;
ctx              6067 fs/btrfs/send.c 	ctx.sctx = sctx;
ctx              6090 fs/btrfs/send.c 					record_parent_ref, &ctx);
ctx              6454 fs/btrfs/send.c 		      void *ctx)
ctx              6457 fs/btrfs/send.c 	struct send_ctx *sctx = ctx;
ctx              6663 fs/btrfs/send.c 			btrfs_changed_cb_t changed_cb, void *ctx)
ctx              6815 fs/btrfs/send.c 						ctx);
ctx              6826 fs/btrfs/send.c 						ctx);
ctx              6840 fs/btrfs/send.c 						ctx);
ctx              6848 fs/btrfs/send.c 						ctx);
ctx              6863 fs/btrfs/send.c 						 &left_key, result, ctx);
ctx                40 fs/btrfs/tests/btrfs-tests.c 	struct pseudo_fs_context *ctx = init_pseudo(fc, BTRFS_TEST_MAGIC);
ctx                41 fs/btrfs/tests/btrfs-tests.c 	if (!ctx)
ctx                43 fs/btrfs/tests/btrfs-tests.c 	ctx->ops = &btrfs_test_super_ops;
ctx                99 fs/btrfs/tree-log.c 			   struct btrfs_log_ctx *ctx);
ctx               139 fs/btrfs/tree-log.c 			   struct btrfs_log_ctx *ctx)
ctx               176 fs/btrfs/tree-log.c 	if (ctx) {
ctx               178 fs/btrfs/tree-log.c 		list_add_tail(&ctx->list, &root->log_ctxs[index]);
ctx               179 fs/btrfs/tree-log.c 		ctx->log_transid = root->log_transid;
ctx              2996 fs/btrfs/tree-log.c 					struct btrfs_log_ctx *ctx)
ctx              2998 fs/btrfs/tree-log.c 	if (!ctx)
ctx              3002 fs/btrfs/tree-log.c 	list_del_init(&ctx->list);
ctx              3013 fs/btrfs/tree-log.c 	struct btrfs_log_ctx *ctx;
ctx              3016 fs/btrfs/tree-log.c 	list_for_each_entry_safe(ctx, safe, &root->log_ctxs[index], list) {
ctx              3017 fs/btrfs/tree-log.c 		list_del_init(&ctx->list);
ctx              3018 fs/btrfs/tree-log.c 		ctx->log_ret = error;
ctx              3037 fs/btrfs/tree-log.c 		   struct btrfs_root *root, struct btrfs_log_ctx *ctx)
ctx              3052 fs/btrfs/tree-log.c 	log_transid = ctx->log_transid;
ctx              3055 fs/btrfs/tree-log.c 		return ctx->log_ret;
ctx              3062 fs/btrfs/tree-log.c 		return ctx->log_ret;
ctx              3570 fs/btrfs/tree-log.c 			  struct btrfs_log_ctx *ctx,
ctx              3700 fs/btrfs/tree-log.c 			if (ctx &&
ctx              3704 fs/btrfs/tree-log.c 				ctx->log_new_dentries = true;
ctx              3770 fs/btrfs/tree-log.c 			  struct btrfs_log_ctx *ctx)
ctx              3782 fs/btrfs/tree-log.c 				ctx, min_key, &max_key);
ctx              4146 fs/btrfs/tree-log.c 			  struct btrfs_log_ctx *ctx)
ctx              4371 fs/btrfs/tree-log.c 				     struct btrfs_log_ctx *ctx,
ctx              4452 fs/btrfs/tree-log.c 		ret = log_one_extent(trans, inode, root, em, path, ctx);
ctx              4837 fs/btrfs/tree-log.c 				  struct btrfs_log_ctx *ctx,
ctx              4888 fs/btrfs/tree-log.c 						      0, LLONG_MAX, ctx);
ctx              4946 fs/btrfs/tree-log.c 				      LOG_OTHER_INODE, 0, LLONG_MAX, ctx);
ctx              5029 fs/btrfs/tree-log.c 			   struct btrfs_log_ctx *ctx)
ctx              5205 fs/btrfs/tree-log.c 			} else if (ret > 0 && ctx &&
ctx              5206 fs/btrfs/tree-log.c 				   other_ino != btrfs_ino(BTRFS_I(ctx->inode))) {
ctx              5224 fs/btrfs/tree-log.c 						ctx, other_ino, other_parent);
ctx              5334 fs/btrfs/tree-log.c 						ctx, start, end);
ctx              5373 fs/btrfs/tree-log.c 					ctx);
ctx              5547 fs/btrfs/tree-log.c 				struct btrfs_log_ctx *ctx)
ctx              5629 fs/btrfs/tree-log.c 			ctx->log_new_dentries = false;
ctx              5633 fs/btrfs/tree-log.c 					      log_mode, 0, LLONG_MAX, ctx);
ctx              5640 fs/btrfs/tree-log.c 			if (ctx->log_new_dentries) {
ctx              5677 fs/btrfs/tree-log.c 				 struct btrfs_log_ctx *ctx)
ctx              5774 fs/btrfs/tree-log.c 			if (ctx)
ctx              5775 fs/btrfs/tree-log.c 				ctx->log_new_dentries = false;
ctx              5777 fs/btrfs/tree-log.c 					      LOG_INODE_ALL, 0, LLONG_MAX, ctx);
ctx              5781 fs/btrfs/tree-log.c 			if (!ret && ctx && ctx->log_new_dentries)
ctx              5783 fs/btrfs/tree-log.c 						   BTRFS_I(dir_inode), ctx);
ctx              5799 fs/btrfs/tree-log.c 			     struct btrfs_log_ctx *ctx)
ctx              5826 fs/btrfs/tree-log.c 					      0, LLONG_MAX, ctx);
ctx              5862 fs/btrfs/tree-log.c 				  struct btrfs_log_ctx *ctx)
ctx              5881 fs/btrfs/tree-log.c 					LOG_INODE_EXISTS, 0, LLONG_MAX, ctx);
ctx              5900 fs/btrfs/tree-log.c 				 struct btrfs_log_ctx *ctx)
ctx              5913 fs/btrfs/tree-log.c 		return log_new_ancestors_fast(trans, inode, parent, ctx);
ctx              5968 fs/btrfs/tree-log.c 		ret = log_new_ancestors(trans, root, path, ctx);
ctx              5992 fs/btrfs/tree-log.c 				  struct btrfs_log_ctx *ctx)
ctx              6039 fs/btrfs/tree-log.c 	ret = start_log_trans(trans, root, ctx);
ctx              6043 fs/btrfs/tree-log.c 	ret = btrfs_log_inode(trans, root, inode, inode_only, start, end, ctx);
ctx              6060 fs/btrfs/tree-log.c 	if (S_ISDIR(inode->vfs_inode.i_mode) && ctx && ctx->log_new_dentries)
ctx              6105 fs/btrfs/tree-log.c 		ret = btrfs_log_all_parents(trans, inode, ctx);
ctx              6110 fs/btrfs/tree-log.c 	ret = log_all_new_ancestors(trans, inode, parent, ctx);
ctx              6115 fs/btrfs/tree-log.c 		ret = log_new_dir_dentries(trans, root, inode, ctx);
ctx              6125 fs/btrfs/tree-log.c 		btrfs_remove_log_ctx(root, ctx);
ctx              6141 fs/btrfs/tree-log.c 			  struct btrfs_log_ctx *ctx)
ctx              6147 fs/btrfs/tree-log.c 				     start, end, LOG_INODE_ALL, ctx);
ctx              6434 fs/btrfs/tree-log.c 			bool sync_log, struct btrfs_log_ctx *ctx)
ctx              6472 fs/btrfs/tree-log.c 	ASSERT(ctx);
ctx              6474 fs/btrfs/tree-log.c 				     LOG_INODE_EXISTS, ctx);
ctx                23 fs/btrfs/tree-log.h static inline void btrfs_init_log_ctx(struct btrfs_log_ctx *ctx,
ctx                26 fs/btrfs/tree-log.h 	ctx->log_ret = 0;
ctx                27 fs/btrfs/tree-log.h 	ctx->log_transid = 0;
ctx                28 fs/btrfs/tree-log.h 	ctx->log_new_dentries = false;
ctx                29 fs/btrfs/tree-log.h 	ctx->inode = inode;
ctx                30 fs/btrfs/tree-log.h 	INIT_LIST_HEAD(&ctx->list);
ctx                45 fs/btrfs/tree-log.h 		   struct btrfs_root *root, struct btrfs_log_ctx *ctx);
ctx                54 fs/btrfs/tree-log.h 			  struct btrfs_log_ctx *ctx);
ctx                80 fs/btrfs/tree-log.h 			bool sync_log, struct btrfs_log_ctx *ctx);
ctx               201 fs/ceph/caps.c 		      struct ceph_cap_reservation *ctx, int need)
ctx               213 fs/ceph/caps.c 	dout("reserve caps ctx=%p need=%d\n", ctx, need);
ctx               274 fs/ceph/caps.c 			ctx, need, have + alloc);
ctx               281 fs/ceph/caps.c 		ctx->count = need;
ctx               282 fs/ceph/caps.c 		ctx->used = 0;
ctx               300 fs/ceph/caps.c 	     ctx, mdsc->caps_total_count, mdsc->caps_use_count,
ctx               306 fs/ceph/caps.c 			 struct ceph_cap_reservation *ctx)
ctx               309 fs/ceph/caps.c 	if (!ctx->count)
ctx               312 fs/ceph/caps.c 	dout("unreserve caps ctx=%p count=%d\n", ctx, ctx->count);
ctx               314 fs/ceph/caps.c 	__ceph_unreserve_caps(mdsc, ctx->count);
ctx               315 fs/ceph/caps.c 	ctx->count = 0;
ctx               323 fs/ceph/caps.c 		ceph_reclaim_caps_nr(mdsc, ctx->used);
ctx               327 fs/ceph/caps.c 			      struct ceph_cap_reservation *ctx)
ctx               332 fs/ceph/caps.c 	if (!ctx) {
ctx               361 fs/ceph/caps.c 	     ctx, ctx->count, mdsc->caps_total_count, mdsc->caps_use_count,
ctx               363 fs/ceph/caps.c 	BUG_ON(!ctx->count);
ctx               364 fs/ceph/caps.c 	BUG_ON(ctx->count > mdsc->caps_reserve_count);
ctx               367 fs/ceph/caps.c 	ctx->count--;
ctx               368 fs/ceph/caps.c 	ctx->used++;
ctx               177 fs/ceph/dir.c  static int __dcache_readdir(struct file *file,  struct dir_context *ctx,
ctx               189 fs/ceph/dir.c  	dout("__dcache_readdir %p v%u at %llx\n", dir, (unsigned)shared_gen, ctx->pos);
ctx               192 fs/ceph/dir.c  	if (ctx->pos > 2) {
ctx               209 fs/ceph/dir.c  			if (fpos_cmp(di->offset, ctx->pos) < 0) {
ctx               246 fs/ceph/dir.c  		if (fpos_cmp(ctx->pos, di->offset) <= 0) {
ctx               255 fs/ceph/dir.c  			ctx->pos = di->offset;
ctx               256 fs/ceph/dir.c  			if (!dir_emit(ctx, dentry->d_name.name,
ctx               265 fs/ceph/dir.c  			ctx->pos++;
ctx               303 fs/ceph/dir.c  static int ceph_readdir(struct file *file, struct dir_context *ctx)
ctx               315 fs/ceph/dir.c  	dout("readdir %p file %p pos %llx\n", inode, file, ctx->pos);
ctx               320 fs/ceph/dir.c  	if (ctx->pos == 0) {
ctx               322 fs/ceph/dir.c  		if (!dir_emit(ctx, ".", 1, 
ctx               326 fs/ceph/dir.c  		ctx->pos = 1;
ctx               328 fs/ceph/dir.c  	if (ctx->pos == 1) {
ctx               331 fs/ceph/dir.c  		if (!dir_emit(ctx, "..", 2,
ctx               335 fs/ceph/dir.c  		ctx->pos = 2;
ctx               347 fs/ceph/dir.c  		err = __dcache_readdir(file, ctx, shared_gen);
ctx               357 fs/ceph/dir.c  	if (need_send_readdir(dfi, ctx->pos)) {
ctx               368 fs/ceph/dir.c  		if (is_hash_order(ctx->pos)) {
ctx               372 fs/ceph/dir.c  				frag = ceph_choose_frag(ci, fpos_hash(ctx->pos),
ctx               375 fs/ceph/dir.c  			frag = fpos_frag(ctx->pos);
ctx               401 fs/ceph/dir.c  		} else if (is_hash_order(ctx->pos)) {
ctx               403 fs/ceph/dir.c  				cpu_to_le32(fpos_hash(ctx->pos));
ctx               435 fs/ceph/dir.c  				ctx->pos = ceph_make_fpos(frag,
ctx               482 fs/ceph/dir.c  	     dfi->frag, rinfo->dir_nr, ctx->pos,
ctx               491 fs/ceph/dir.c  			if (rinfo->dir_entries[i + step].offset < ctx->pos) {
ctx               505 fs/ceph/dir.c  		BUG_ON(rde->offset < ctx->pos);
ctx               507 fs/ceph/dir.c  		ctx->pos = rde->offset;
ctx               509 fs/ceph/dir.c  		     i, rinfo->dir_nr, ctx->pos,
ctx               518 fs/ceph/dir.c  		if (!dir_emit(ctx, rde->name, rde->name_len,
ctx               523 fs/ceph/dir.c  		ctx->pos++;
ctx               537 fs/ceph/dir.c  		if (is_hash_order(ctx->pos)) {
ctx               540 fs/ceph/dir.c  			if (new_pos > ctx->pos)
ctx               541 fs/ceph/dir.c  				ctx->pos = new_pos;
ctx               544 fs/ceph/dir.c  			ctx->pos = ceph_make_fpos(frag, dfi->next_offset,
ctx               335 fs/ceph/locks.c 	struct file_lock_context *ctx;
ctx               340 fs/ceph/locks.c 	ctx = inode->i_flctx;
ctx               341 fs/ceph/locks.c 	if (ctx) {
ctx               342 fs/ceph/locks.c 		spin_lock(&ctx->flc_lock);
ctx               343 fs/ceph/locks.c 		list_for_each_entry(lock, &ctx->flc_posix, fl_list)
ctx               345 fs/ceph/locks.c 		list_for_each_entry(lock, &ctx->flc_flock, fl_list)
ctx               347 fs/ceph/locks.c 		spin_unlock(&ctx->flc_lock);
ctx               394 fs/ceph/locks.c 	struct file_lock_context *ctx = inode->i_flctx;
ctx               403 fs/ceph/locks.c 	if (!ctx)
ctx               406 fs/ceph/locks.c 	spin_lock(&ctx->flc_lock);
ctx               407 fs/ceph/locks.c 	list_for_each_entry(lock, &ctx->flc_posix, fl_list) {
ctx               418 fs/ceph/locks.c 	list_for_each_entry(lock, &ctx->flc_flock, fl_list) {
ctx               430 fs/ceph/locks.c 	spin_unlock(&ctx->flc_lock);
ctx               693 fs/ceph/super.h 			     struct ceph_cap_reservation *ctx, int need);
ctx               695 fs/ceph/super.h 			       struct ceph_cap_reservation *ctx);
ctx               752 fs/ceph/super.h 				       struct ceph_rw_context *ctx)
ctx               755 fs/ceph/super.h 	list_add(&ctx->list, &cf->rw_contexts);
ctx               760 fs/ceph/super.h 				       struct ceph_rw_context *ctx)
ctx               763 fs/ceph/super.h 	list_del(&ctx->list);
ctx               770 fs/ceph/super.h 	struct ceph_rw_context *ctx, *found = NULL;
ctx               772 fs/ceph/super.h 	list_for_each_entry(ctx, &cf->rw_contexts, list) {
ctx               773 fs/ceph/super.h 		if (ctx->thread == current) {
ctx               774 fs/ceph/super.h 			found = ctx;
ctx               974 fs/ceph/super.h 				     struct ceph_acl_sec_ctx *ctx);
ctx               981 fs/ceph/super.h 					    struct ceph_acl_sec_ctx *ctx)
ctx              1037 fs/ceph/super.h 				     struct ceph_cap_reservation *ctx);
ctx                98 fs/cifs/asn1.c asn1_open(struct asn1_ctx *ctx, unsigned char *buf, unsigned int len)
ctx               100 fs/cifs/asn1.c 	ctx->begin = buf;
ctx               101 fs/cifs/asn1.c 	ctx->end = buf + len;
ctx               102 fs/cifs/asn1.c 	ctx->pointer = buf;
ctx               103 fs/cifs/asn1.c 	ctx->error = ASN1_ERR_NOERROR;
ctx               107 fs/cifs/asn1.c asn1_octet_decode(struct asn1_ctx *ctx, unsigned char *ch)
ctx               109 fs/cifs/asn1.c 	if (ctx->pointer >= ctx->end) {
ctx               110 fs/cifs/asn1.c 		ctx->error = ASN1_ERR_DEC_EMPTY;
ctx               113 fs/cifs/asn1.c 	*ch = *(ctx->pointer)++;
ctx               119 fs/cifs/asn1.c asn1_enum_decode(struct asn1_ctx *ctx, __le32 *val)
ctx               123 fs/cifs/asn1.c 	if (ctx->pointer >= ctx->end) {
ctx               124 fs/cifs/asn1.c 		ctx->error = ASN1_ERR_DEC_EMPTY;
ctx               128 fs/cifs/asn1.c 	ch = *(ctx->pointer)++; /* ch has 0xa, ptr points to length octet */
ctx               130 fs/cifs/asn1.c 		*val = *(++(ctx->pointer)); /* value has enum value */
ctx               134 fs/cifs/asn1.c 	ctx->pointer++;
ctx               140 fs/cifs/asn1.c asn1_tag_decode(struct asn1_ctx *ctx, unsigned int *tag)
ctx               147 fs/cifs/asn1.c 		if (!asn1_octet_decode(ctx, &ch))
ctx               156 fs/cifs/asn1.c asn1_id_decode(struct asn1_ctx *ctx,
ctx               161 fs/cifs/asn1.c 	if (!asn1_octet_decode(ctx, &ch))
ctx               169 fs/cifs/asn1.c 		if (!asn1_tag_decode(ctx, tag))
ctx               176 fs/cifs/asn1.c asn1_length_decode(struct asn1_ctx *ctx, unsigned int *def, unsigned int *len)
ctx               180 fs/cifs/asn1.c 	if (!asn1_octet_decode(ctx, &ch))
ctx               195 fs/cifs/asn1.c 				if (!asn1_octet_decode(ctx, &ch))
ctx               205 fs/cifs/asn1.c 	if (*len > ctx->end - ctx->pointer)
ctx               212 fs/cifs/asn1.c asn1_header_decode(struct asn1_ctx *ctx,
ctx               219 fs/cifs/asn1.c 	if (!asn1_id_decode(ctx, cls, con, tag))
ctx               222 fs/cifs/asn1.c 	if (!asn1_length_decode(ctx, &def, &len))
ctx               230 fs/cifs/asn1.c 		*eoc = ctx->pointer + len;
ctx               237 fs/cifs/asn1.c asn1_eoc_decode(struct asn1_ctx *ctx, unsigned char *eoc)
ctx               242 fs/cifs/asn1.c 		if (!asn1_octet_decode(ctx, &ch))
ctx               246 fs/cifs/asn1.c 			ctx->error = ASN1_ERR_DEC_EOC_MISMATCH;
ctx               250 fs/cifs/asn1.c 		if (!asn1_octet_decode(ctx, &ch))
ctx               254 fs/cifs/asn1.c 			ctx->error = ASN1_ERR_DEC_EOC_MISMATCH;
ctx               259 fs/cifs/asn1.c 		if (ctx->pointer != eoc) {
ctx               260 fs/cifs/asn1.c 			ctx->error = ASN1_ERR_DEC_LENGTH_MISMATCH;
ctx               390 fs/cifs/asn1.c asn1_subid_decode(struct asn1_ctx *ctx, unsigned long *subid)
ctx               397 fs/cifs/asn1.c 		if (!asn1_octet_decode(ctx, &ch))
ctx               407 fs/cifs/asn1.c asn1_oid_decode(struct asn1_ctx *ctx,
ctx               414 fs/cifs/asn1.c 	size = eoc - ctx->pointer + 1;
ctx               426 fs/cifs/asn1.c 	if (!asn1_subid_decode(ctx, &subid)) {
ctx               446 fs/cifs/asn1.c 	while (ctx->pointer < eoc) {
ctx               448 fs/cifs/asn1.c 			ctx->error = ASN1_ERR_DEC_BADVALUE;
ctx               454 fs/cifs/asn1.c 		if (!asn1_subid_decode(ctx, optr++)) {
ctx               486 fs/cifs/asn1.c 	struct asn1_ctx ctx;
ctx               494 fs/cifs/asn1.c 	asn1_open(&ctx, security_blob, length);
ctx               497 fs/cifs/asn1.c 	if (asn1_header_decode(&ctx, &end, &cls, &con, &tag) == 0) {
ctx               507 fs/cifs/asn1.c 	rc = asn1_header_decode(&ctx, &end, &cls, &con, &tag);
ctx               511 fs/cifs/asn1.c 			rc = asn1_oid_decode(&ctx, end, &oid, &oidlen);
ctx               528 fs/cifs/asn1.c 	if (asn1_header_decode(&ctx, &end, &cls, &con, &tag) == 0) {
ctx               539 fs/cifs/asn1.c 	if (asn1_header_decode(&ctx, &end, &cls, &con, &tag) == 0) {
ctx               550 fs/cifs/asn1.c 	if (asn1_header_decode(&ctx, &end, &cls, &con, &tag) == 0) {
ctx               562 fs/cifs/asn1.c 	    (&ctx, &sequence_end, &cls, &con, &tag) == 0) {
ctx               573 fs/cifs/asn1.c 	while (!asn1_eoc_decode(&ctx, sequence_end)) {
ctx               574 fs/cifs/asn1.c 		rc = asn1_header_decode(&ctx, &end, &cls, &con, &tag);
ctx               580 fs/cifs/asn1.c 			if (asn1_oid_decode(&ctx, end, &oid, &oidlen)) {
ctx               119 fs/cifs/cifsfs.h extern int cifs_readdir(struct file *file, struct dir_context *ctx);
ctx               137 fs/cifs/cifsglob.h 	char ctx[];
ctx              1313 fs/cifs/cifsglob.h 	struct cifs_aio_ctx		*ctx;
ctx              1348 fs/cifs/cifsglob.h 	struct cifs_aio_ctx		*ctx;
ctx               579 fs/cifs/cifsproto.h int setup_aio_ctx_iter(struct cifs_aio_ctx *ctx, struct iov_iter *iter, int rw);
ctx              2685 fs/cifs/file.c 	kref_put(&wdata->ctx->refcount, cifs_aio_ctx_release);
ctx              2691 fs/cifs/file.c static void collect_uncached_write_data(struct cifs_aio_ctx *ctx);
ctx              2708 fs/cifs/file.c 	collect_uncached_write_data(wdata->ctx);
ctx              2757 fs/cifs/file.c 	struct cifs_aio_ctx *ctx)
ctx              2822 fs/cifs/file.c 		     struct cifs_aio_ctx *ctx)
ctx              2864 fs/cifs/file.c 		if (ctx->direct_io) {
ctx              2951 fs/cifs/file.c 		wdata->ctx = ctx;
ctx              2952 fs/cifs/file.c 		kref_get(&ctx->refcount);
ctx              2985 fs/cifs/file.c static void collect_uncached_write_data(struct cifs_aio_ctx *ctx)
ctx              2990 fs/cifs/file.c 	struct dentry *dentry = ctx->cfile->dentry;
ctx              2993 fs/cifs/file.c 	tcon = tlink_tcon(ctx->cfile->tlink);
ctx              2996 fs/cifs/file.c 	mutex_lock(&ctx->aio_mutex);
ctx              2998 fs/cifs/file.c 	if (list_empty(&ctx->list)) {
ctx              2999 fs/cifs/file.c 		mutex_unlock(&ctx->aio_mutex);
ctx              3003 fs/cifs/file.c 	rc = ctx->rc;
ctx              3010 fs/cifs/file.c 	list_for_each_entry_safe(wdata, tmp, &ctx->list, list) {
ctx              3013 fs/cifs/file.c 				mutex_unlock(&ctx->aio_mutex);
ctx              3020 fs/cifs/file.c 				ctx->total_len += wdata->bytes;
ctx              3025 fs/cifs/file.c 				struct iov_iter tmp_from = ctx->iter;
ctx              3030 fs/cifs/file.c 				if (ctx->direct_io)
ctx              3032 fs/cifs/file.c 						wdata, &tmp_list, ctx);
ctx              3035 fs/cifs/file.c 						 wdata->offset - ctx->pos);
ctx              3039 fs/cifs/file.c 						ctx->cfile, cifs_sb, &tmp_list,
ctx              3040 fs/cifs/file.c 						ctx);
ctx              3046 fs/cifs/file.c 				list_splice(&tmp_list, &ctx->list);
ctx              3054 fs/cifs/file.c 	cifs_stats_bytes_written(tcon, ctx->total_len);
ctx              3057 fs/cifs/file.c 	ctx->rc = (rc == 0) ? ctx->total_len : rc;
ctx              3059 fs/cifs/file.c 	mutex_unlock(&ctx->aio_mutex);
ctx              3061 fs/cifs/file.c 	if (ctx->iocb && ctx->iocb->ki_complete)
ctx              3062 fs/cifs/file.c 		ctx->iocb->ki_complete(ctx->iocb, ctx->rc, 0);
ctx              3064 fs/cifs/file.c 		complete(&ctx->done);
ctx              3075 fs/cifs/file.c 	struct cifs_aio_ctx *ctx;
ctx              3101 fs/cifs/file.c 	ctx = cifs_aio_ctx_alloc();
ctx              3102 fs/cifs/file.c 	if (!ctx)
ctx              3105 fs/cifs/file.c 	ctx->cfile = cifsFileInfo_get(cfile);
ctx              3108 fs/cifs/file.c 		ctx->iocb = iocb;
ctx              3110 fs/cifs/file.c 	ctx->pos = iocb->ki_pos;
ctx              3113 fs/cifs/file.c 		ctx->direct_io = true;
ctx              3114 fs/cifs/file.c 		ctx->iter = *from;
ctx              3115 fs/cifs/file.c 		ctx->len = len;
ctx              3117 fs/cifs/file.c 		rc = setup_aio_ctx_iter(ctx, from, WRITE);
ctx              3119 fs/cifs/file.c 			kref_put(&ctx->refcount, cifs_aio_ctx_release);
ctx              3125 fs/cifs/file.c 	mutex_lock(&ctx->aio_mutex);
ctx              3127 fs/cifs/file.c 	rc = cifs_write_from_iter(iocb->ki_pos, ctx->len, &saved_from,
ctx              3128 fs/cifs/file.c 				  cfile, cifs_sb, &ctx->list, ctx);
ctx              3136 fs/cifs/file.c 	if (!list_empty(&ctx->list))
ctx              3139 fs/cifs/file.c 	mutex_unlock(&ctx->aio_mutex);
ctx              3142 fs/cifs/file.c 		kref_put(&ctx->refcount, cifs_aio_ctx_release);
ctx              3147 fs/cifs/file.c 		kref_put(&ctx->refcount, cifs_aio_ctx_release);
ctx              3151 fs/cifs/file.c 	rc = wait_for_completion_killable(&ctx->done);
ctx              3153 fs/cifs/file.c 		mutex_lock(&ctx->aio_mutex);
ctx              3154 fs/cifs/file.c 		ctx->rc = rc = -EINTR;
ctx              3155 fs/cifs/file.c 		total_written = ctx->total_len;
ctx              3156 fs/cifs/file.c 		mutex_unlock(&ctx->aio_mutex);
ctx              3158 fs/cifs/file.c 		rc = ctx->rc;
ctx              3159 fs/cifs/file.c 		total_written = ctx->total_len;
ctx              3162 fs/cifs/file.c 	kref_put(&ctx->refcount, cifs_aio_ctx_release);
ctx              3352 fs/cifs/file.c 	kref_put(&rdata->ctx->refcount, cifs_aio_ctx_release);
ctx              3393 fs/cifs/file.c static void collect_uncached_read_data(struct cifs_aio_ctx *ctx);
ctx              3402 fs/cifs/file.c 	collect_uncached_read_data(rdata->ctx);
ctx              3483 fs/cifs/file.c 			struct cifs_aio_ctx *ctx)
ctx              3546 fs/cifs/file.c 		     struct cifs_aio_ctx *ctx)
ctx              3558 fs/cifs/file.c 	struct iov_iter direct_iov = ctx->iter;
ctx              3567 fs/cifs/file.c 	if (ctx->direct_io)
ctx              3568 fs/cifs/file.c 		iov_iter_advance(&direct_iov, offset - ctx->pos);
ctx              3586 fs/cifs/file.c 		if (ctx->direct_io) {
ctx              3655 fs/cifs/file.c 		rdata->ctx = ctx;
ctx              3656 fs/cifs/file.c 		kref_get(&ctx->refcount);
ctx              3687 fs/cifs/file.c collect_uncached_read_data(struct cifs_aio_ctx *ctx)
ctx              3690 fs/cifs/file.c 	struct iov_iter *to = &ctx->iter;
ctx              3694 fs/cifs/file.c 	cifs_sb = CIFS_SB(ctx->cfile->dentry->d_sb);
ctx              3696 fs/cifs/file.c 	mutex_lock(&ctx->aio_mutex);
ctx              3698 fs/cifs/file.c 	if (list_empty(&ctx->list)) {
ctx              3699 fs/cifs/file.c 		mutex_unlock(&ctx->aio_mutex);
ctx              3703 fs/cifs/file.c 	rc = ctx->rc;
ctx              3706 fs/cifs/file.c 	list_for_each_entry_safe(rdata, tmp, &ctx->list, list) {
ctx              3709 fs/cifs/file.c 				mutex_unlock(&ctx->aio_mutex);
ctx              3728 fs/cifs/file.c 					if (!ctx->direct_io)
ctx              3737 fs/cifs/file.c 				if (ctx->direct_io) {
ctx              3744 fs/cifs/file.c 						&tmp_list, ctx);
ctx              3750 fs/cifs/file.c 						&tmp_list, ctx);
ctx              3756 fs/cifs/file.c 				list_splice(&tmp_list, &ctx->list);
ctx              3761 fs/cifs/file.c 			else if (!ctx->direct_io)
ctx              3768 fs/cifs/file.c 			ctx->total_len += rdata->got_bytes;
ctx              3774 fs/cifs/file.c 	if (!ctx->direct_io)
ctx              3775 fs/cifs/file.c 		ctx->total_len = ctx->len - iov_iter_count(to);
ctx              3781 fs/cifs/file.c 	ctx->rc = (rc == 0) ? (ssize_t)ctx->total_len : rc;
ctx              3783 fs/cifs/file.c 	mutex_unlock(&ctx->aio_mutex);
ctx              3785 fs/cifs/file.c 	if (ctx->iocb && ctx->iocb->ki_complete)
ctx              3786 fs/cifs/file.c 		ctx->iocb->ki_complete(ctx->iocb, ctx->rc, 0);
ctx              3788 fs/cifs/file.c 		complete(&ctx->done);
ctx              3801 fs/cifs/file.c 	struct cifs_aio_ctx *ctx;
ctx              3827 fs/cifs/file.c 	ctx = cifs_aio_ctx_alloc();
ctx              3828 fs/cifs/file.c 	if (!ctx)
ctx              3831 fs/cifs/file.c 	ctx->cfile = cifsFileInfo_get(cfile);
ctx              3834 fs/cifs/file.c 		ctx->iocb = iocb;
ctx              3837 fs/cifs/file.c 		ctx->should_dirty = true;
ctx              3840 fs/cifs/file.c 		ctx->pos = offset;
ctx              3841 fs/cifs/file.c 		ctx->direct_io = true;
ctx              3842 fs/cifs/file.c 		ctx->iter = *to;
ctx              3843 fs/cifs/file.c 		ctx->len = len;
ctx              3845 fs/cifs/file.c 		rc = setup_aio_ctx_iter(ctx, to, READ);
ctx              3847 fs/cifs/file.c 			kref_put(&ctx->refcount, cifs_aio_ctx_release);
ctx              3850 fs/cifs/file.c 		len = ctx->len;
ctx              3854 fs/cifs/file.c 	mutex_lock(&ctx->aio_mutex);
ctx              3856 fs/cifs/file.c 	rc = cifs_send_async_read(offset, len, cfile, cifs_sb, &ctx->list, ctx);
ctx              3859 fs/cifs/file.c 	if (!list_empty(&ctx->list))
ctx              3862 fs/cifs/file.c 	mutex_unlock(&ctx->aio_mutex);
ctx              3865 fs/cifs/file.c 		kref_put(&ctx->refcount, cifs_aio_ctx_release);
ctx              3870 fs/cifs/file.c 		kref_put(&ctx->refcount, cifs_aio_ctx_release);
ctx              3874 fs/cifs/file.c 	rc = wait_for_completion_killable(&ctx->done);
ctx              3876 fs/cifs/file.c 		mutex_lock(&ctx->aio_mutex);
ctx              3877 fs/cifs/file.c 		ctx->rc = rc = -EINTR;
ctx              3878 fs/cifs/file.c 		total_read = ctx->total_len;
ctx              3879 fs/cifs/file.c 		mutex_unlock(&ctx->aio_mutex);
ctx              3881 fs/cifs/file.c 		rc = ctx->rc;
ctx              3882 fs/cifs/file.c 		total_read = ctx->total_len;
ctx              3885 fs/cifs/file.c 	kref_put(&ctx->refcount, cifs_aio_ctx_release);
ctx               234 fs/cifs/fscache.c static void cifs_readpage_from_fscache_complete(struct page *page, void *ctx,
ctx               791 fs/cifs/misc.c 	struct cifs_aio_ctx *ctx;
ctx               798 fs/cifs/misc.c 	ctx = kzalloc(sizeof(struct cifs_aio_ctx), GFP_KERNEL);
ctx               799 fs/cifs/misc.c 	if (!ctx)
ctx               802 fs/cifs/misc.c 	INIT_LIST_HEAD(&ctx->list);
ctx               803 fs/cifs/misc.c 	mutex_init(&ctx->aio_mutex);
ctx               804 fs/cifs/misc.c 	init_completion(&ctx->done);
ctx               805 fs/cifs/misc.c 	kref_init(&ctx->refcount);
ctx               806 fs/cifs/misc.c 	return ctx;
ctx               812 fs/cifs/misc.c 	struct cifs_aio_ctx *ctx = container_of(refcount,
ctx               815 fs/cifs/misc.c 	cifsFileInfo_put(ctx->cfile);
ctx               822 fs/cifs/misc.c 	if (ctx->bv) {
ctx               825 fs/cifs/misc.c 		for (i = 0; i < ctx->npages; i++) {
ctx               826 fs/cifs/misc.c 			if (ctx->should_dirty)
ctx               827 fs/cifs/misc.c 				set_page_dirty(ctx->bv[i].bv_page);
ctx               828 fs/cifs/misc.c 			put_page(ctx->bv[i].bv_page);
ctx               830 fs/cifs/misc.c 		kvfree(ctx->bv);
ctx               833 fs/cifs/misc.c 	kfree(ctx);
ctx               839 fs/cifs/misc.c setup_aio_ctx_iter(struct cifs_aio_ctx *ctx, struct iov_iter *iter, int rw)
ctx               854 fs/cifs/misc.c 		memcpy(&ctx->iter, iter, sizeof(struct iov_iter));
ctx               855 fs/cifs/misc.c 		ctx->len = count;
ctx               921 fs/cifs/misc.c 	ctx->bv = bv;
ctx               922 fs/cifs/misc.c 	ctx->len = saved_len - count;
ctx               923 fs/cifs/misc.c 	ctx->npages = npages;
ctx               924 fs/cifs/misc.c 	iov_iter_bvec(&ctx->iter, rw, ctx->bv, npages, ctx->len);
ctx               703 fs/cifs/readdir.c 		struct dir_context *ctx,
ctx               784 fs/cifs/readdir.c 	return !dir_emit(ctx, name.name, name.len, ino, fattr.cf_dtype);
ctx               788 fs/cifs/readdir.c int cifs_readdir(struct file *file, struct dir_context *ctx)
ctx               814 fs/cifs/readdir.c 	if (!dir_emit_dots(file, ctx))
ctx               835 fs/cifs/readdir.c 	rc = find_cifs_entry(xid, tcon, ctx->pos, file, &current_entry,
ctx               841 fs/cifs/readdir.c 		cifs_dbg(FYI, "entry %lld found\n", ctx->pos);
ctx               871 fs/cifs/readdir.c 		rc = cifs_filldir(current_entry, file, ctx,
ctx               879 fs/cifs/readdir.c 		ctx->pos++;
ctx               880 fs/cifs/readdir.c 		if (ctx->pos ==
ctx               883 fs/cifs/readdir.c 				 ctx->pos, tmp_buf);
ctx                62 fs/cifs/smbencrypt.c 	struct des_ctx ctx;
ctx                71 fs/cifs/smbencrypt.c 	des_expand_key(&ctx, key2, DES_KEY_SIZE);
ctx                72 fs/cifs/smbencrypt.c 	des_encrypt(&ctx, out, in);
ctx                73 fs/cifs/smbencrypt.c 	memzero_explicit(&ctx, sizeof(ctx));
ctx               347 fs/coda/dir.c  static int coda_venus_readdir(struct file *coda_file, struct dir_context *ctx)
ctx               367 fs/coda/dir.c  	if (!dir_emit_dots(coda_file, ctx))
ctx               371 fs/coda/dir.c  		loff_t pos = ctx->pos - 2;
ctx               409 fs/coda/dir.c  			if (!dir_emit(ctx, name.name, name.len, ino, type))
ctx               414 fs/coda/dir.c  		ctx->pos += vdir->d_reclen;
ctx               422 fs/coda/dir.c  static int coda_readdir(struct file *coda_file, struct dir_context *ctx)
ctx               437 fs/coda/dir.c  				ret = host_file->f_op->iterate_shared(host_file, ctx);
ctx               442 fs/coda/dir.c  				ret = host_file->f_op->iterate(host_file, ctx);
ctx               450 fs/coda/dir.c  	return coda_venus_readdir(coda_file, ctx);
ctx              1623 fs/configfs/dir.c static int configfs_readdir(struct file *file, struct dir_context *ctx)
ctx              1632 fs/configfs/dir.c 	if (!dir_emit_dots(file, ctx))
ctx              1635 fs/configfs/dir.c 	if (ctx->pos == 2)
ctx              1672 fs/configfs/dir.c 		if (!dir_emit(ctx, name, len, ino, dt_type(next)))
ctx              1678 fs/configfs/dir.c 		ctx->pos++;
ctx               702 fs/cramfs/inode.c static int cramfs_readdir(struct file *file, struct dir_context *ctx)
ctx               710 fs/cramfs/inode.c 	if (ctx->pos >= inode->i_size)
ctx               712 fs/cramfs/inode.c 	offset = ctx->pos;
ctx               753 fs/cramfs/inode.c 		if (!dir_emit(ctx, buf, namelen, ino, mode >> 12))
ctx               756 fs/cramfs/inode.c 		ctx->pos = offset = nextoffset;
ctx                55 fs/crypto/bio.c 	struct fscrypt_ctx *ctx = container_of(work, struct fscrypt_ctx, work);
ctx                56 fs/crypto/bio.c 	struct bio *bio = ctx->bio;
ctx                59 fs/crypto/bio.c 	fscrypt_release_ctx(ctx);
ctx                63 fs/crypto/bio.c void fscrypt_enqueue_decrypt_bio(struct fscrypt_ctx *ctx, struct bio *bio)
ctx                65 fs/crypto/bio.c 	INIT_WORK(&ctx->work, completion_pages);
ctx                66 fs/crypto/bio.c 	ctx->bio = bio;
ctx                67 fs/crypto/bio.c 	fscrypt_enqueue_decrypt_work(&ctx->work);
ctx                68 fs/crypto/crypto.c void fscrypt_release_ctx(struct fscrypt_ctx *ctx)
ctx                72 fs/crypto/crypto.c 	if (ctx->flags & FS_CTX_REQUIRES_FREE_ENCRYPT_FL) {
ctx                73 fs/crypto/crypto.c 		kmem_cache_free(fscrypt_ctx_cachep, ctx);
ctx                76 fs/crypto/crypto.c 		list_add(&ctx->free_list, &fscrypt_free_ctxs);
ctx                92 fs/crypto/crypto.c 	struct fscrypt_ctx *ctx;
ctx               100 fs/crypto/crypto.c 	ctx = list_first_entry_or_null(&fscrypt_free_ctxs,
ctx               102 fs/crypto/crypto.c 	if (ctx)
ctx               103 fs/crypto/crypto.c 		list_del(&ctx->free_list);
ctx               105 fs/crypto/crypto.c 	if (!ctx) {
ctx               106 fs/crypto/crypto.c 		ctx = kmem_cache_zalloc(fscrypt_ctx_cachep, gfp_flags);
ctx               107 fs/crypto/crypto.c 		if (!ctx)
ctx               109 fs/crypto/crypto.c 		ctx->flags |= FS_CTX_REQUIRES_FREE_ENCRYPT_FL;
ctx               111 fs/crypto/crypto.c 		ctx->flags &= ~FS_CTX_REQUIRES_FREE_ENCRYPT_FL;
ctx               113 fs/crypto/crypto.c 	return ctx;
ctx               432 fs/crypto/crypto.c 		struct fscrypt_ctx *ctx;
ctx               434 fs/crypto/crypto.c 		ctx = kmem_cache_zalloc(fscrypt_ctx_cachep, GFP_NOFS);
ctx               435 fs/crypto/crypto.c 		if (!ctx)
ctx               437 fs/crypto/crypto.c 		list_add(&ctx->free_list, &fscrypt_free_ctxs);
ctx                65 fs/crypto/fscrypt_private.h static inline int fscrypt_context_size(const union fscrypt_context *ctx)
ctx                67 fs/crypto/fscrypt_private.h 	switch (ctx->version) {
ctx                69 fs/crypto/fscrypt_private.h 		BUILD_BUG_ON(sizeof(ctx->v1) != 28);
ctx                70 fs/crypto/fscrypt_private.h 		return sizeof(ctx->v1);
ctx                72 fs/crypto/fscrypt_private.h 		BUILD_BUG_ON(sizeof(ctx->v2) != 40);
ctx                73 fs/crypto/fscrypt_private.h 		return sizeof(ctx->v2);
ctx               424 fs/crypto/keysetup.c 	union fscrypt_context ctx;
ctx               436 fs/crypto/keysetup.c 	res = inode->i_sb->s_cop->get_context(inode, &ctx, sizeof(ctx));
ctx               446 fs/crypto/keysetup.c 		memset(&ctx, 0, sizeof(ctx));
ctx               447 fs/crypto/keysetup.c 		ctx.version = FSCRYPT_CONTEXT_V1;
ctx               448 fs/crypto/keysetup.c 		ctx.v1.contents_encryption_mode = FSCRYPT_MODE_AES_256_XTS;
ctx               449 fs/crypto/keysetup.c 		ctx.v1.filenames_encryption_mode = FSCRYPT_MODE_AES_256_CTS;
ctx               450 fs/crypto/keysetup.c 		memset(ctx.v1.master_key_descriptor, 0x42,
ctx               452 fs/crypto/keysetup.c 		res = sizeof(ctx.v1);
ctx               461 fs/crypto/keysetup.c 	res = fscrypt_policy_from_context(&crypt_info->ci_policy, &ctx, res);
ctx               468 fs/crypto/keysetup.c 	switch (ctx.version) {
ctx               470 fs/crypto/keysetup.c 		memcpy(crypt_info->ci_nonce, ctx.v1.nonce,
ctx               474 fs/crypto/keysetup.c 		memcpy(crypt_info->ci_nonce, ctx.v2.nonce,
ctx               115 fs/crypto/policy.c 		struct fscrypt_context_v1 *ctx = &ctx_u->v1;
ctx               117 fs/crypto/policy.c 		ctx->version = FSCRYPT_CONTEXT_V1;
ctx               118 fs/crypto/policy.c 		ctx->contents_encryption_mode =
ctx               120 fs/crypto/policy.c 		ctx->filenames_encryption_mode =
ctx               122 fs/crypto/policy.c 		ctx->flags = policy->flags;
ctx               123 fs/crypto/policy.c 		memcpy(ctx->master_key_descriptor,
ctx               125 fs/crypto/policy.c 		       sizeof(ctx->master_key_descriptor));
ctx               126 fs/crypto/policy.c 		get_random_bytes(ctx->nonce, sizeof(ctx->nonce));
ctx               127 fs/crypto/policy.c 		return sizeof(*ctx);
ctx               131 fs/crypto/policy.c 		struct fscrypt_context_v2 *ctx = &ctx_u->v2;
ctx               133 fs/crypto/policy.c 		ctx->version = FSCRYPT_CONTEXT_V2;
ctx               134 fs/crypto/policy.c 		ctx->contents_encryption_mode =
ctx               136 fs/crypto/policy.c 		ctx->filenames_encryption_mode =
ctx               138 fs/crypto/policy.c 		ctx->flags = policy->flags;
ctx               139 fs/crypto/policy.c 		memcpy(ctx->master_key_identifier,
ctx               141 fs/crypto/policy.c 		       sizeof(ctx->master_key_identifier));
ctx               142 fs/crypto/policy.c 		get_random_bytes(ctx->nonce, sizeof(ctx->nonce));
ctx               143 fs/crypto/policy.c 		return sizeof(*ctx);
ctx               171 fs/crypto/policy.c 		const struct fscrypt_context_v1 *ctx = &ctx_u->v1;
ctx               176 fs/crypto/policy.c 			ctx->contents_encryption_mode;
ctx               178 fs/crypto/policy.c 			ctx->filenames_encryption_mode;
ctx               179 fs/crypto/policy.c 		policy->flags = ctx->flags;
ctx               181 fs/crypto/policy.c 		       ctx->master_key_descriptor,
ctx               186 fs/crypto/policy.c 		const struct fscrypt_context_v2 *ctx = &ctx_u->v2;
ctx               191 fs/crypto/policy.c 			ctx->contents_encryption_mode;
ctx               193 fs/crypto/policy.c 			ctx->filenames_encryption_mode;
ctx               194 fs/crypto/policy.c 		policy->flags = ctx->flags;
ctx               195 fs/crypto/policy.c 		memcpy(policy->__reserved, ctx->__reserved,
ctx               198 fs/crypto/policy.c 		       ctx->master_key_identifier,
ctx               211 fs/crypto/policy.c 	union fscrypt_context ctx;
ctx               224 fs/crypto/policy.c 	ret = inode->i_sb->s_cop->get_context(inode, &ctx, sizeof(ctx));
ctx               228 fs/crypto/policy.c 	return fscrypt_policy_from_context(policy, &ctx, ret);
ctx               234 fs/crypto/policy.c 	union fscrypt_context ctx;
ctx               268 fs/crypto/policy.c 	ctxsize = fscrypt_new_context_from_policy(&ctx, policy);
ctx               270 fs/crypto/policy.c 	return inode->i_sb->s_cop->set_context(inode, &ctx, ctxsize, NULL);
ctx               472 fs/crypto/policy.c 	union fscrypt_context ctx;
ctx               485 fs/crypto/policy.c 	ctxsize = fscrypt_new_context_from_policy(&ctx, &ci->ci_policy);
ctx               487 fs/crypto/policy.c 	BUILD_BUG_ON(sizeof(ctx) != FSCRYPT_SET_CONTEXT_MAX_SIZE);
ctx               488 fs/crypto/policy.c 	res = parent->i_sb->s_cop->set_context(child, &ctx, ctxsize, fs_data);
ctx                48 fs/ecryptfs/file.c 	struct dir_context ctx;
ctx                57 fs/ecryptfs/file.c ecryptfs_filldir(struct dir_context *ctx, const char *lower_name,
ctx                61 fs/ecryptfs/file.c 		container_of(ctx, struct ecryptfs_getdents_callback, ctx);
ctx                87 fs/ecryptfs/file.c 	buf->caller->pos = buf->ctx.pos;
ctx               101 fs/ecryptfs/file.c static int ecryptfs_readdir(struct file *file, struct dir_context *ctx)
ctx               107 fs/ecryptfs/file.c 		.ctx.actor = ecryptfs_filldir,
ctx               108 fs/ecryptfs/file.c 		.caller = ctx,
ctx               112 fs/ecryptfs/file.c 	rc = iterate_dir(lower_file, &buf.ctx);
ctx               113 fs/ecryptfs/file.c 	ctx->pos = buf.ctx.pos;
ctx                23 fs/efs/dir.c   static int efs_readdir(struct file *file, struct dir_context *ctx)
ctx                34 fs/efs/dir.c   	block = ctx->pos >> EFS_DIRBSIZE_BITS;
ctx                37 fs/efs/dir.c   	slot  = ctx->pos & 0xff;
ctx                81 fs/efs/dir.c   			ctx->pos = (block << EFS_DIRBSIZE_BITS) | slot;
ctx                91 fs/efs/dir.c   			if (!dir_emit(ctx, nameptr, namelen, inodenum, DT_UNKNOWN)) {
ctx               101 fs/efs/dir.c   	ctx->pos = (block << EFS_DIRBSIZE_BITS) | slot;
ctx                24 fs/erofs/dir.c static int erofs_fill_dentries(struct inode *dir, struct dir_context *ctx,
ctx                57 fs/erofs/dir.c 		if (!dir_emit(ctx, de_name, de_namelen,
ctx                68 fs/erofs/dir.c static int erofs_readdir(struct file *f, struct dir_context *ctx)
ctx                73 fs/erofs/dir.c 	unsigned int i = ctx->pos / EROFS_BLKSIZ;
ctx                74 fs/erofs/dir.c 	unsigned int ofs = ctx->pos % EROFS_BLKSIZ;
ctx                78 fs/erofs/dir.c 	while (ctx->pos < dirsize) {
ctx               109 fs/erofs/dir.c 				dirsize - ctx->pos + ofs, PAGE_SIZE);
ctx               120 fs/erofs/dir.c 		err = erofs_fill_dentries(dir, ctx, de, &ofs,
ctx               127 fs/erofs/dir.c 		ctx->pos = blknr_to_addr(i) + ofs;
ctx                61 fs/eventfd.c   __u64 eventfd_signal(struct eventfd_ctx *ctx, __u64 n)
ctx                76 fs/eventfd.c   	spin_lock_irqsave(&ctx->wqh.lock, flags);
ctx                78 fs/eventfd.c   	if (ULLONG_MAX - ctx->count < n)
ctx                79 fs/eventfd.c   		n = ULLONG_MAX - ctx->count;
ctx                80 fs/eventfd.c   	ctx->count += n;
ctx                81 fs/eventfd.c   	if (waitqueue_active(&ctx->wqh))
ctx                82 fs/eventfd.c   		wake_up_locked_poll(&ctx->wqh, EPOLLIN);
ctx                84 fs/eventfd.c   	spin_unlock_irqrestore(&ctx->wqh.lock, flags);
ctx                90 fs/eventfd.c   static void eventfd_free_ctx(struct eventfd_ctx *ctx)
ctx                92 fs/eventfd.c   	if (ctx->id >= 0)
ctx                93 fs/eventfd.c   		ida_simple_remove(&eventfd_ida, ctx->id);
ctx                94 fs/eventfd.c   	kfree(ctx);
ctx                99 fs/eventfd.c   	struct eventfd_ctx *ctx = container_of(kref, struct eventfd_ctx, kref);
ctx               101 fs/eventfd.c   	eventfd_free_ctx(ctx);
ctx               111 fs/eventfd.c   void eventfd_ctx_put(struct eventfd_ctx *ctx)
ctx               113 fs/eventfd.c   	kref_put(&ctx->kref, eventfd_free);
ctx               119 fs/eventfd.c   	struct eventfd_ctx *ctx = file->private_data;
ctx               121 fs/eventfd.c   	wake_up_poll(&ctx->wqh, EPOLLHUP);
ctx               122 fs/eventfd.c   	eventfd_ctx_put(ctx);
ctx               128 fs/eventfd.c   	struct eventfd_ctx *ctx = file->private_data;
ctx               132 fs/eventfd.c   	poll_wait(file, &ctx->wqh, wait);
ctx               172 fs/eventfd.c   	count = READ_ONCE(ctx->count);
ctx               184 fs/eventfd.c   static void eventfd_ctx_do_read(struct eventfd_ctx *ctx, __u64 *cnt)
ctx               186 fs/eventfd.c   	*cnt = (ctx->flags & EFD_SEMAPHORE) ? 1 : ctx->count;
ctx               187 fs/eventfd.c   	ctx->count -= *cnt;
ctx               203 fs/eventfd.c   int eventfd_ctx_remove_wait_queue(struct eventfd_ctx *ctx, wait_queue_entry_t *wait,
ctx               208 fs/eventfd.c   	spin_lock_irqsave(&ctx->wqh.lock, flags);
ctx               209 fs/eventfd.c   	eventfd_ctx_do_read(ctx, cnt);
ctx               210 fs/eventfd.c   	__remove_wait_queue(&ctx->wqh, wait);
ctx               211 fs/eventfd.c   	if (*cnt != 0 && waitqueue_active(&ctx->wqh))
ctx               212 fs/eventfd.c   		wake_up_locked_poll(&ctx->wqh, EPOLLOUT);
ctx               213 fs/eventfd.c   	spin_unlock_irqrestore(&ctx->wqh.lock, flags);
ctx               222 fs/eventfd.c   	struct eventfd_ctx *ctx = file->private_data;
ctx               230 fs/eventfd.c   	spin_lock_irq(&ctx->wqh.lock);
ctx               232 fs/eventfd.c   	if (ctx->count > 0)
ctx               235 fs/eventfd.c   		__add_wait_queue(&ctx->wqh, &wait);
ctx               238 fs/eventfd.c   			if (ctx->count > 0) {
ctx               246 fs/eventfd.c   			spin_unlock_irq(&ctx->wqh.lock);
ctx               248 fs/eventfd.c   			spin_lock_irq(&ctx->wqh.lock);
ctx               250 fs/eventfd.c   		__remove_wait_queue(&ctx->wqh, &wait);
ctx               254 fs/eventfd.c   		eventfd_ctx_do_read(ctx, &ucnt);
ctx               255 fs/eventfd.c   		if (waitqueue_active(&ctx->wqh))
ctx               256 fs/eventfd.c   			wake_up_locked_poll(&ctx->wqh, EPOLLOUT);
ctx               258 fs/eventfd.c   	spin_unlock_irq(&ctx->wqh.lock);
ctx               269 fs/eventfd.c   	struct eventfd_ctx *ctx = file->private_data;
ctx               280 fs/eventfd.c   	spin_lock_irq(&ctx->wqh.lock);
ctx               282 fs/eventfd.c   	if (ULLONG_MAX - ctx->count > ucnt)
ctx               285 fs/eventfd.c   		__add_wait_queue(&ctx->wqh, &wait);
ctx               288 fs/eventfd.c   			if (ULLONG_MAX - ctx->count > ucnt) {
ctx               296 fs/eventfd.c   			spin_unlock_irq(&ctx->wqh.lock);
ctx               298 fs/eventfd.c   			spin_lock_irq(&ctx->wqh.lock);
ctx               300 fs/eventfd.c   		__remove_wait_queue(&ctx->wqh, &wait);
ctx               304 fs/eventfd.c   		ctx->count += ucnt;
ctx               305 fs/eventfd.c   		if (waitqueue_active(&ctx->wqh))
ctx               306 fs/eventfd.c   			wake_up_locked_poll(&ctx->wqh, EPOLLIN);
ctx               308 fs/eventfd.c   	spin_unlock_irq(&ctx->wqh.lock);
ctx               316 fs/eventfd.c   	struct eventfd_ctx *ctx = f->private_data;
ctx               318 fs/eventfd.c   	spin_lock_irq(&ctx->wqh.lock);
ctx               320 fs/eventfd.c   		   (unsigned long long)ctx->count);
ctx               321 fs/eventfd.c   	spin_unlock_irq(&ctx->wqh.lock);
ctx               322 fs/eventfd.c   	seq_printf(m, "eventfd-id: %d\n", ctx->id);
ctx               374 fs/eventfd.c   	struct eventfd_ctx *ctx;
ctx               378 fs/eventfd.c   	ctx = eventfd_ctx_fileget(f.file);
ctx               380 fs/eventfd.c   	return ctx;
ctx               395 fs/eventfd.c   	struct eventfd_ctx *ctx;
ctx               400 fs/eventfd.c   	ctx = file->private_data;
ctx               401 fs/eventfd.c   	kref_get(&ctx->kref);
ctx               402 fs/eventfd.c   	return ctx;
ctx               408 fs/eventfd.c   	struct eventfd_ctx *ctx;
ctx               418 fs/eventfd.c   	ctx = kmalloc(sizeof(*ctx), GFP_KERNEL);
ctx               419 fs/eventfd.c   	if (!ctx)
ctx               422 fs/eventfd.c   	kref_init(&ctx->kref);
ctx               423 fs/eventfd.c   	init_waitqueue_head(&ctx->wqh);
ctx               424 fs/eventfd.c   	ctx->count = count;
ctx               425 fs/eventfd.c   	ctx->flags = flags;
ctx               426 fs/eventfd.c   	ctx->id = ida_simple_get(&eventfd_ida, 0, 0, GFP_KERNEL);
ctx               428 fs/eventfd.c   	fd = anon_inode_getfd("[eventfd]", &eventfd_fops, ctx,
ctx               431 fs/eventfd.c   		eventfd_free_ctx(ctx);
ctx               119 fs/eventpoll.c 	void *ctx;
ctx               481 fs/eventpoll.c 			  void *cookie, void *ctx)
ctx               497 fs/eventpoll.c 		if (tncur->ctx == ctx &&
ctx               509 fs/eventpoll.c 	tnode.ctx = ctx;
ctx               239 fs/exportfs/expfs.c 	struct dir_context ctx;
ctx               251 fs/exportfs/expfs.c static int filldir_one(struct dir_context *ctx, const char *name, int len,
ctx               255 fs/exportfs/expfs.c 		container_of(ctx, struct getdents_callback, ctx);
ctx               289 fs/exportfs/expfs.c 		.ctx.actor = filldir_one,
ctx               326 fs/exportfs/expfs.c 		error = iterate_dir(file, &buffer.ctx);
ctx               264 fs/ext2/dir.c  ext2_readdir(struct file *file, struct dir_context *ctx)
ctx               266 fs/ext2/dir.c  	loff_t pos = ctx->pos;
ctx               291 fs/ext2/dir.c  			ctx->pos += PAGE_SIZE - offset;
ctx               298 fs/ext2/dir.c  				ctx->pos = (n<<PAGE_SHIFT) + offset;
ctx               318 fs/ext2/dir.c  				if (!dir_emit(ctx, de->name, de->name_len,
ctx               325 fs/ext2/dir.c  			ctx->pos += ext2_rec_len_from_disk(de->rec_len);
ctx               111 fs/ext4/dir.c  static int ext4_readdir(struct file *file, struct dir_context *ctx)
ctx               129 fs/ext4/dir.c  		err = ext4_dx_readdir(file, ctx);
ctx               145 fs/ext4/dir.c  		err = ext4_read_inline_dir(file, ctx,
ctx               157 fs/ext4/dir.c  	while (ctx->pos < inode->i_size) {
ctx               165 fs/ext4/dir.c  		offset = ctx->pos & (sb->s_blocksize - 1);
ctx               166 fs/ext4/dir.c  		map.m_lblk = ctx->pos >> EXT4_BLOCK_SIZE_BITS(sb);
ctx               174 fs/ext4/dir.c  			ctx->pos += map.m_len * sb->s_blocksize;
ctx               196 fs/ext4/dir.c  			if (ctx->pos > inode->i_blocks << 9)
ctx               198 fs/ext4/dir.c  			ctx->pos += sb->s_blocksize - offset;
ctx               207 fs/ext4/dir.c  					(unsigned long long)ctx->pos);
ctx               208 fs/ext4/dir.c  			ctx->pos += sb->s_blocksize - offset;
ctx               236 fs/ext4/dir.c  			ctx->pos = (ctx->pos & ~(sb->s_blocksize - 1))
ctx               241 fs/ext4/dir.c  		while (ctx->pos < inode->i_size
ctx               250 fs/ext4/dir.c  				ctx->pos = (ctx->pos |
ctx               258 fs/ext4/dir.c  					if (!dir_emit(ctx, de->name,
ctx               276 fs/ext4/dir.c  					if (!dir_emit(ctx,
ctx               283 fs/ext4/dir.c  			ctx->pos += ext4_rec_len_from_disk(de->rec_len,
ctx               286 fs/ext4/dir.c  		if ((ctx->pos < inode->i_size) && !dir_relax_shared(inode))
ctx               507 fs/ext4/dir.c  static int call_filldir(struct file *file, struct dir_context *ctx,
ctx               520 fs/ext4/dir.c  	ctx->pos = hash2pos(file, fname->hash, fname->minor_hash);
ctx               522 fs/ext4/dir.c  		if (!dir_emit(ctx, fname->name,
ctx               534 fs/ext4/dir.c  static int ext4_dx_readdir(struct file *file, struct dir_context *ctx)
ctx               542 fs/ext4/dir.c  		info = ext4_htree_create_dir_info(file, ctx->pos);
ctx               548 fs/ext4/dir.c  	if (ctx->pos == ext4_get_htree_eof(file))
ctx               552 fs/ext4/dir.c  	if (info->last_pos != ctx->pos) {
ctx               556 fs/ext4/dir.c  		info->curr_hash = pos2maj_hash(file, ctx->pos);
ctx               557 fs/ext4/dir.c  		info->curr_minor_hash = pos2min_hash(file, ctx->pos);
ctx               565 fs/ext4/dir.c  		if (call_filldir(file, ctx, info->extra_fname))
ctx               589 fs/ext4/dir.c  				ctx->pos = ext4_get_htree_eof(file);
ctx               598 fs/ext4/dir.c  		if (call_filldir(file, ctx, fname))
ctx               609 fs/ext4/dir.c  				ctx->pos = ext4_get_htree_eof(file);
ctx               617 fs/ext4/dir.c  	info->last_pos = ctx->pos;
ctx              2097 fs/ext4/ext4.h 		char ctx[4];
ctx              2100 fs/ext4/ext4.h 	BUG_ON(crypto_shash_descsize(sbi->s_chksum_driver)!=sizeof(desc.ctx));
ctx              2103 fs/ext4/ext4.h 	*(u32 *)desc.ctx = crc;
ctx              2107 fs/ext4/ext4.h 	return *(u32 *)desc.ctx;
ctx              3167 fs/ext4/ext4.h 				struct dir_context *ctx,
ctx              1440 fs/ext4/inline.c 			 struct dir_context *ctx,
ctx              1480 fs/ext4/inline.c 	offset = ctx->pos;
ctx              1531 fs/ext4/inline.c 		ctx->pos = offset;
ctx              1535 fs/ext4/inline.c 	while (ctx->pos < extra_size) {
ctx              1536 fs/ext4/inline.c 		if (ctx->pos == 0) {
ctx              1537 fs/ext4/inline.c 			if (!dir_emit(ctx, ".", 1, inode->i_ino, DT_DIR))
ctx              1539 fs/ext4/inline.c 			ctx->pos = dotdot_offset;
ctx              1543 fs/ext4/inline.c 		if (ctx->pos == dotdot_offset) {
ctx              1544 fs/ext4/inline.c 			if (!dir_emit(ctx, "..", 2, parent_ino, DT_DIR))
ctx              1546 fs/ext4/inline.c 			ctx->pos = dotdot_size;
ctx              1551 fs/ext4/inline.c 			(dir_buf + ctx->pos - extra_offset);
ctx              1553 fs/ext4/inline.c 					 extra_size, ctx->pos))
ctx              1556 fs/ext4/inline.c 			if (!dir_emit(ctx, de->name, de->name_len,
ctx              1561 fs/ext4/inline.c 		ctx->pos += ext4_rec_len_from_disk(de->rec_len, extra_size);
ctx                94 fs/ext4/readpage.c static void bio_post_read_processing(struct bio_post_read_ctx *ctx);
ctx                98 fs/ext4/readpage.c 	struct bio_post_read_ctx *ctx =
ctx               101 fs/ext4/readpage.c 	fscrypt_decrypt_bio(ctx->bio);
ctx               103 fs/ext4/readpage.c 	bio_post_read_processing(ctx);
ctx               108 fs/ext4/readpage.c 	struct bio_post_read_ctx *ctx =
ctx               110 fs/ext4/readpage.c 	struct bio *bio = ctx->bio;
ctx               120 fs/ext4/readpage.c 	mempool_free(ctx, bio_post_read_ctx_pool);
ctx               128 fs/ext4/readpage.c static void bio_post_read_processing(struct bio_post_read_ctx *ctx)
ctx               135 fs/ext4/readpage.c 	switch (++ctx->cur_step) {
ctx               137 fs/ext4/readpage.c 		if (ctx->enabled_steps & (1 << STEP_DECRYPT)) {
ctx               138 fs/ext4/readpage.c 			INIT_WORK(&ctx->work, decrypt_work);
ctx               139 fs/ext4/readpage.c 			fscrypt_enqueue_decrypt_work(&ctx->work);
ctx               142 fs/ext4/readpage.c 		ctx->cur_step++;
ctx               145 fs/ext4/readpage.c 		if (ctx->enabled_steps & (1 << STEP_VERITY)) {
ctx               146 fs/ext4/readpage.c 			INIT_WORK(&ctx->work, verity_work);
ctx               147 fs/ext4/readpage.c 			fsverity_enqueue_verify_work(&ctx->work);
ctx               150 fs/ext4/readpage.c 		ctx->cur_step++;
ctx               153 fs/ext4/readpage.c 		__read_end_io(ctx->bio);
ctx               177 fs/ext4/readpage.c 		struct bio_post_read_ctx *ctx = bio->bi_private;
ctx               179 fs/ext4/readpage.c 		ctx->cur_step = STEP_INITIAL;
ctx               180 fs/ext4/readpage.c 		bio_post_read_processing(ctx);
ctx               197 fs/ext4/readpage.c 	struct bio_post_read_ctx *ctx = NULL;
ctx               206 fs/ext4/readpage.c 		ctx = mempool_alloc(bio_post_read_ctx_pool, GFP_NOFS);
ctx               207 fs/ext4/readpage.c 		if (!ctx)
ctx               209 fs/ext4/readpage.c 		ctx->bio = bio;
ctx               210 fs/ext4/readpage.c 		ctx->enabled_steps = post_read_steps;
ctx               211 fs/ext4/readpage.c 		bio->bi_private = ctx;
ctx               213 fs/ext4/readpage.c 	return ctx;
ctx               374 fs/ext4/readpage.c 			struct bio_post_read_ctx *ctx;
ctx               380 fs/ext4/readpage.c 			ctx = get_bio_post_read_ctx(inode, bio, page->index);
ctx               381 fs/ext4/readpage.c 			if (IS_ERR(ctx)) {
ctx               389 fs/ext4/readpage.c 			bio->bi_private = ctx;
ctx              1264 fs/ext4/super.c static int ext4_get_context(struct inode *inode, void *ctx, size_t len)
ctx              1267 fs/ext4/super.c 				 EXT4_XATTR_NAME_ENCRYPTION_CONTEXT, ctx, len);
ctx              1270 fs/ext4/super.c static int ext4_set_context(struct inode *inode, const void *ctx, size_t len,
ctx              1304 fs/ext4/super.c 					    ctx, len, 0);
ctx              1333 fs/ext4/super.c 				    ctx, len, 0);
ctx               112 fs/f2fs/data.c static void bio_post_read_processing(struct bio_post_read_ctx *ctx);
ctx               116 fs/f2fs/data.c 	struct bio_post_read_ctx *ctx =
ctx               119 fs/f2fs/data.c 	fscrypt_decrypt_bio(ctx->bio);
ctx               121 fs/f2fs/data.c 	bio_post_read_processing(ctx);
ctx               126 fs/f2fs/data.c 	struct bio_post_read_ctx *ctx =
ctx               129 fs/f2fs/data.c 	fsverity_verify_bio(ctx->bio);
ctx               131 fs/f2fs/data.c 	bio_post_read_processing(ctx);
ctx               134 fs/f2fs/data.c static void bio_post_read_processing(struct bio_post_read_ctx *ctx)
ctx               141 fs/f2fs/data.c 	switch (++ctx->cur_step) {
ctx               143 fs/f2fs/data.c 		if (ctx->enabled_steps & (1 << STEP_DECRYPT)) {
ctx               144 fs/f2fs/data.c 			INIT_WORK(&ctx->work, decrypt_work);
ctx               145 fs/f2fs/data.c 			fscrypt_enqueue_decrypt_work(&ctx->work);
ctx               148 fs/f2fs/data.c 		ctx->cur_step++;
ctx               151 fs/f2fs/data.c 		if (ctx->enabled_steps & (1 << STEP_VERITY)) {
ctx               152 fs/f2fs/data.c 			INIT_WORK(&ctx->work, verity_work);
ctx               153 fs/f2fs/data.c 			fsverity_enqueue_verify_work(&ctx->work);
ctx               156 fs/f2fs/data.c 		ctx->cur_step++;
ctx               159 fs/f2fs/data.c 		__read_end_io(ctx->bio);
ctx               177 fs/f2fs/data.c 		struct bio_post_read_ctx *ctx = bio->bi_private;
ctx               179 fs/f2fs/data.c 		ctx->cur_step = STEP_INITIAL;
ctx               180 fs/f2fs/data.c 		bio_post_read_processing(ctx);
ctx               682 fs/f2fs/data.c 	struct bio_post_read_ctx *ctx;
ctx               699 fs/f2fs/data.c 		ctx = mempool_alloc(bio_post_read_ctx_pool, GFP_NOFS);
ctx               700 fs/f2fs/data.c 		if (!ctx) {
ctx               704 fs/f2fs/data.c 		ctx->bio = bio;
ctx               705 fs/f2fs/data.c 		ctx->enabled_steps = post_read_steps;
ctx               706 fs/f2fs/data.c 		bio->bi_private = ctx;
ctx               895 fs/f2fs/dir.c  int f2fs_fill_dentries(struct dir_context *ctx, struct f2fs_dentry_ptr *d,
ctx               907 fs/f2fs/dir.c  	bit_pos = ((unsigned long)ctx->pos % d->max);
ctx               920 fs/f2fs/dir.c  			ctx->pos = start_pos + bit_pos;
ctx               957 fs/f2fs/dir.c  		if (!dir_emit(ctx, de_name.name, de_name.len,
ctx               966 fs/f2fs/dir.c  		ctx->pos = start_pos + bit_pos;
ctx               974 fs/f2fs/dir.c  static int f2fs_readdir(struct file *file, struct dir_context *ctx)
ctx               981 fs/f2fs/dir.c  	loff_t start_pos = ctx->pos;
ctx               982 fs/f2fs/dir.c  	unsigned int n = ((unsigned long)ctx->pos / NR_DENTRY_IN_BLOCK);
ctx               998 fs/f2fs/dir.c  		err = f2fs_read_inline_dir(file, ctx, &fstr);
ctx              1002 fs/f2fs/dir.c  	for (; n < npages; n++, ctx->pos = n * NR_DENTRY_IN_BLOCK) {
ctx              1031 fs/f2fs/dir.c  		err = f2fs_fill_dentries(ctx, &d,
ctx              1043 fs/f2fs/dir.c  	trace_f2fs_readdir(inode, start_pos, ctx->pos, err);
ctx              1459 fs/f2fs/f2fs.h 		char ctx[4];
ctx              1463 fs/f2fs/f2fs.h 	BUG_ON(crypto_shash_descsize(sbi->s_chksum_driver) != sizeof(desc.ctx));
ctx              1466 fs/f2fs/f2fs.h 	*(u32 *)desc.ctx = crc;
ctx              1471 fs/f2fs/f2fs.h 	return *(u32 *)desc.ctx;
ctx              2974 fs/f2fs/f2fs.h int f2fs_fill_dentries(struct dir_context *ctx, struct f2fs_dentry_ptr *d,
ctx              3512 fs/f2fs/f2fs.h int f2fs_read_inline_dir(struct file *file, struct dir_context *ctx,
ctx               663 fs/f2fs/inline.c int f2fs_read_inline_dir(struct file *file, struct dir_context *ctx,
ctx               674 fs/f2fs/inline.c 	if (ctx->pos == d.max)
ctx               691 fs/f2fs/inline.c 	err = f2fs_fill_dentries(ctx, &d, 0, fstr);
ctx               693 fs/f2fs/inline.c 		ctx->pos = d.max;
ctx              2284 fs/f2fs/super.c static int f2fs_get_context(struct inode *inode, void *ctx, size_t len)
ctx              2288 fs/f2fs/super.c 				ctx, len, NULL);
ctx              2291 fs/f2fs/super.c static int f2fs_set_context(struct inode *inode, const void *ctx, size_t len,
ctx              2308 fs/f2fs/super.c 				ctx, len, fs_data, XATTR_CREATE);
ctx               543 fs/fat/dir.c   	struct dir_context ctx;
ctx               554 fs/fat/dir.c   			 struct dir_context *ctx, int short_only,
ctx               573 fs/fat/dir.c   	cpos = ctx->pos;
ctx               576 fs/fat/dir.c   		if (!dir_emit_dots(file, ctx))
ctx               578 fs/fat/dir.c   		if (ctx->pos == 2) {
ctx               658 fs/fat/dir.c   	ctx->pos = cpos - (nr_slots + 1) * sizeof(struct msdos_dir_entry);
ctx               659 fs/fat/dir.c   	if (fake_offset && ctx->pos < 2)
ctx               660 fs/fat/dir.c   		ctx->pos = 2;
ctx               663 fs/fat/dir.c   		if (!dir_emit_dot(file, ctx))
ctx               666 fs/fat/dir.c   		if (!dir_emit_dotdot(file, ctx))
ctx               677 fs/fat/dir.c   		if (!dir_emit(ctx, fill_name, fill_len, inum,
ctx               684 fs/fat/dir.c   	ctx->pos = cpos;
ctx               689 fs/fat/dir.c   		ctx->pos = 2;
ctx               691 fs/fat/dir.c   		ctx->pos = cpos;
ctx               702 fs/fat/dir.c   static int fat_readdir(struct file *file, struct dir_context *ctx)
ctx               704 fs/fat/dir.c   	return __fat_readdir(file_inode(file), file, ctx, 0, NULL);
ctx               708 fs/fat/dir.c   static int func(struct dir_context *ctx, const char *name, int name_len,   \
ctx               712 fs/fat/dir.c   		container_of(ctx, struct fat_ioctl_filldir_callback, ctx); \
ctx               766 fs/fat/dir.c   		.ctx.actor = filldir,
ctx               774 fs/fat/dir.c   	buf.ctx.pos = file->f_pos;
ctx               777 fs/fat/dir.c   		ret = __fat_readdir(inode, file, &buf.ctx,
ctx               779 fs/fat/dir.c   		file->f_pos = buf.ctx.pos;
ctx               215 fs/freevxfs/vxfs_lookup.c vxfs_readdir(struct file *fp, struct dir_context *ctx)
ctx               223 fs/freevxfs/vxfs_lookup.c 	if (ctx->pos == 0) {
ctx               224 fs/freevxfs/vxfs_lookup.c 		if (!dir_emit_dot(fp, ctx))
ctx               226 fs/freevxfs/vxfs_lookup.c 		ctx->pos++;
ctx               228 fs/freevxfs/vxfs_lookup.c 	if (ctx->pos == 1) {
ctx               229 fs/freevxfs/vxfs_lookup.c 		if (!dir_emit(ctx, "..", 2, VXFS_INO(ip)->vii_dotdot, DT_DIR))
ctx               231 fs/freevxfs/vxfs_lookup.c 		ctx->pos++;
ctx               235 fs/freevxfs/vxfs_lookup.c 	if (ctx->pos > limit)
ctx               238 fs/freevxfs/vxfs_lookup.c 	pos = ctx->pos & ~3L;
ctx               277 fs/freevxfs/vxfs_lookup.c 			rc = dir_emit(ctx, de->d_name,
ctx               292 fs/freevxfs/vxfs_lookup.c 	ctx->pos = pos | 2;
ctx               519 fs/fs_context.c 	struct legacy_fs_context *ctx = fc->fs_private;
ctx               521 fs/fs_context.c 	if (ctx) {
ctx               522 fs/fs_context.c 		if (ctx->param_type == LEGACY_FS_INDIVIDUAL_PARAMS)
ctx               523 fs/fs_context.c 			kfree(ctx->legacy_data);
ctx               524 fs/fs_context.c 		kfree(ctx);
ctx               533 fs/fs_context.c 	struct legacy_fs_context *ctx;
ctx               536 fs/fs_context.c 	ctx = kmemdup(src_ctx, sizeof(*src_ctx), GFP_KERNEL);
ctx               537 fs/fs_context.c 	if (!ctx)
ctx               540 fs/fs_context.c 	if (ctx->param_type == LEGACY_FS_INDIVIDUAL_PARAMS) {
ctx               541 fs/fs_context.c 		ctx->legacy_data = kmemdup(src_ctx->legacy_data,
ctx               543 fs/fs_context.c 		if (!ctx->legacy_data) {
ctx               544 fs/fs_context.c 			kfree(ctx);
ctx               549 fs/fs_context.c 	fc->fs_private = ctx;
ctx               559 fs/fs_context.c 	struct legacy_fs_context *ctx = fc->fs_private;
ctx               560 fs/fs_context.c 	unsigned int size = ctx->data_size;
ctx               573 fs/fs_context.c 	if (ctx->param_type == LEGACY_FS_MONOLITHIC_PARAMS)
ctx               595 fs/fs_context.c 	if (!ctx->legacy_data) {
ctx               596 fs/fs_context.c 		ctx->legacy_data = kmalloc(PAGE_SIZE, GFP_KERNEL);
ctx               597 fs/fs_context.c 		if (!ctx->legacy_data)
ctx               601 fs/fs_context.c 	ctx->legacy_data[size++] = ',';
ctx               603 fs/fs_context.c 	memcpy(ctx->legacy_data + size, param->key, len);
ctx               606 fs/fs_context.c 		ctx->legacy_data[size++] = '=';
ctx               607 fs/fs_context.c 		memcpy(ctx->legacy_data + size, param->string, param->size);
ctx               610 fs/fs_context.c 	ctx->legacy_data[size] = '\0';
ctx               611 fs/fs_context.c 	ctx->data_size = size;
ctx               612 fs/fs_context.c 	ctx->param_type = LEGACY_FS_INDIVIDUAL_PARAMS;
ctx               621 fs/fs_context.c 	struct legacy_fs_context *ctx = fc->fs_private;
ctx               623 fs/fs_context.c 	if (ctx->param_type != LEGACY_FS_UNSET_PARAMS) {
ctx               628 fs/fs_context.c 	ctx->legacy_data = data;
ctx               629 fs/fs_context.c 	ctx->param_type = LEGACY_FS_MONOLITHIC_PARAMS;
ctx               630 fs/fs_context.c 	if (!ctx->legacy_data)
ctx               635 fs/fs_context.c 	return security_sb_eat_lsm_opts(ctx->legacy_data, &fc->security);
ctx               643 fs/fs_context.c 	struct legacy_fs_context *ctx = fc->fs_private;
ctx               648 fs/fs_context.c 				      fc->source, ctx->legacy_data);
ctx               664 fs/fs_context.c 	struct legacy_fs_context *ctx = fc->fs_private;
ctx               671 fs/fs_context.c 				    ctx ? ctx->legacy_data : NULL);
ctx               970 fs/fuse/fuse_i.h int fuse_fill_super_common(struct super_block *sb, struct fuse_fs_context *ctx);
ctx              1082 fs/fuse/fuse_i.h int fuse_readdir(struct file *file, struct dir_context *ctx);
ctx               473 fs/fuse/inode.c 	struct fuse_fs_context *ctx = fc->fs_private;
ctx               489 fs/fuse/inode.c 		if (ctx->subtype)
ctx               491 fs/fuse/inode.c 		ctx->subtype = param->string;
ctx               496 fs/fuse/inode.c 		ctx->fd = result.uint_32;
ctx               497 fs/fuse/inode.c 		ctx->fd_present = 1;
ctx               503 fs/fuse/inode.c 		ctx->rootmode = result.uint_32;
ctx               504 fs/fuse/inode.c 		ctx->rootmode_present = 1;
ctx               508 fs/fuse/inode.c 		ctx->user_id = make_kuid(fc->user_ns, result.uint_32);
ctx               509 fs/fuse/inode.c 		if (!uid_valid(ctx->user_id))
ctx               511 fs/fuse/inode.c 		ctx->user_id_present = 1;
ctx               515 fs/fuse/inode.c 		ctx->group_id = make_kgid(fc->user_ns, result.uint_32);
ctx               516 fs/fuse/inode.c 		if (!gid_valid(ctx->group_id))
ctx               518 fs/fuse/inode.c 		ctx->group_id_present = 1;
ctx               522 fs/fuse/inode.c 		ctx->default_permissions = 1;
ctx               526 fs/fuse/inode.c 		ctx->allow_other = 1;
ctx               530 fs/fuse/inode.c 		ctx->max_read = result.uint_32;
ctx               534 fs/fuse/inode.c 		if (!ctx->is_bdev)
ctx               536 fs/fuse/inode.c 		ctx->blksize = result.uint_32;
ctx               548 fs/fuse/inode.c 	struct fuse_fs_context *ctx = fc->fs_private;
ctx               550 fs/fuse/inode.c 	if (ctx) {
ctx               551 fs/fuse/inode.c 		kfree(ctx->subtype);
ctx               552 fs/fuse/inode.c 		kfree(ctx);
ctx              1119 fs/fuse/inode.c int fuse_fill_super_common(struct super_block *sb, struct fuse_fs_context *ctx)
ctx              1133 fs/fuse/inode.c 	if (ctx->is_bdev) {
ctx              1136 fs/fuse/inode.c 		if (!sb_set_blocksize(sb, ctx->blksize))
ctx              1144 fs/fuse/inode.c 	sb->s_subtype = ctx->subtype;
ctx              1145 fs/fuse/inode.c 	ctx->subtype = NULL;
ctx              1178 fs/fuse/inode.c 	fc->default_permissions = ctx->default_permissions;
ctx              1179 fs/fuse/inode.c 	fc->allow_other = ctx->allow_other;
ctx              1180 fs/fuse/inode.c 	fc->user_id = ctx->user_id;
ctx              1181 fs/fuse/inode.c 	fc->group_id = ctx->group_id;
ctx              1182 fs/fuse/inode.c 	fc->max_read = max_t(unsigned, 4096, ctx->max_read);
ctx              1183 fs/fuse/inode.c 	fc->destroy = ctx->destroy;
ctx              1184 fs/fuse/inode.c 	fc->no_control = ctx->no_control;
ctx              1185 fs/fuse/inode.c 	fc->no_force_umount = ctx->no_force_umount;
ctx              1186 fs/fuse/inode.c 	fc->no_mount_options = ctx->no_mount_options;
ctx              1189 fs/fuse/inode.c 	root = fuse_get_root_inode(sb, ctx->rootmode);
ctx              1199 fs/fuse/inode.c 	if (*ctx->fudptr)
ctx              1208 fs/fuse/inode.c 	*ctx->fudptr = fud;
ctx              1224 fs/fuse/inode.c 	struct fuse_fs_context *ctx = fsc->fs_private;
ctx              1230 fs/fuse/inode.c 	file = fget(ctx->fd);
ctx              1241 fs/fuse/inode.c 	ctx->fudptr = &file->private_data;
ctx              1252 fs/fuse/inode.c 	err = fuse_fill_super_common(sb, ctx);
ctx              1275 fs/fuse/inode.c 	struct fuse_fs_context *ctx = fc->fs_private;
ctx              1277 fs/fuse/inode.c 	if (!ctx->fd_present || !ctx->rootmode_present ||
ctx              1278 fs/fuse/inode.c 	    !ctx->user_id_present || !ctx->group_id_present)
ctx              1282 fs/fuse/inode.c 	if (ctx->is_bdev)
ctx              1300 fs/fuse/inode.c 	struct fuse_fs_context *ctx;
ctx              1302 fs/fuse/inode.c 	ctx = kzalloc(sizeof(struct fuse_fs_context), GFP_KERNEL);
ctx              1303 fs/fuse/inode.c 	if (!ctx)
ctx              1306 fs/fuse/inode.c 	ctx->max_read = ~0;
ctx              1307 fs/fuse/inode.c 	ctx->blksize = FUSE_DEFAULT_BLKSIZE;
ctx              1311 fs/fuse/inode.c 		ctx->is_bdev = true;
ctx              1312 fs/fuse/inode.c 		ctx->destroy = true;
ctx              1316 fs/fuse/inode.c 	fc->fs_private = ctx;
ctx                16 fs/fuse/readdir.c static bool fuse_use_readdirplus(struct inode *dir, struct dir_context *ctx)
ctx                27 fs/fuse/readdir.c 	if (ctx->pos == 0)
ctx               112 fs/fuse/readdir.c static bool fuse_emit(struct file *file, struct dir_context *ctx,
ctx               118 fs/fuse/readdir.c 		fuse_add_dirent_to_cache(file, dirent, ctx->pos);
ctx               120 fs/fuse/readdir.c 	return dir_emit(ctx, dirent->name, dirent->namelen, dirent->ino,
ctx               125 fs/fuse/readdir.c 			 struct dir_context *ctx)
ctx               137 fs/fuse/readdir.c 		if (!fuse_emit(file, ctx, dirent))
ctx               142 fs/fuse/readdir.c 		ctx->pos = dirent->off;
ctx               274 fs/fuse/readdir.c 			     struct dir_context *ctx, u64 attr_version)
ctx               301 fs/fuse/readdir.c 			over = !fuse_emit(file, ctx, dirent);
ctx               303 fs/fuse/readdir.c 				ctx->pos = dirent->off;
ctx               317 fs/fuse/readdir.c static int fuse_readdir_uncached(struct file *file, struct dir_context *ctx)
ctx               334 fs/fuse/readdir.c 	plus = fuse_use_readdirplus(inode, ctx);
ctx               341 fs/fuse/readdir.c 		fuse_read_args_fill(&ia, file, ctx->pos, PAGE_SIZE,
ctx               344 fs/fuse/readdir.c 		fuse_read_args_fill(&ia, file, ctx->pos, PAGE_SIZE,
ctx               355 fs/fuse/readdir.c 				fuse_readdir_cache_end(file, ctx->pos);
ctx               358 fs/fuse/readdir.c 						file, ctx, attr_version);
ctx               361 fs/fuse/readdir.c 					    ctx);
ctx               379 fs/fuse/readdir.c 					       struct dir_context *ctx)
ctx               403 fs/fuse/readdir.c 		if (ff->readdir.pos == ctx->pos) {
ctx               405 fs/fuse/readdir.c 			if (!dir_emit(ctx, dirent->name, dirent->namelen,
ctx               408 fs/fuse/readdir.c 			ctx->pos = dirent->off;
ctx               431 fs/fuse/readdir.c static int fuse_readdir_cached(struct file *file, struct dir_context *ctx)
ctx               444 fs/fuse/readdir.c 	if (ff->readdir.pos != ctx->pos) {
ctx               453 fs/fuse/readdir.c 	if (!ctx->pos && fc->auto_inval_data) {
ctx               465 fs/fuse/readdir.c 		if (!ctx->pos && !fi->rdc.size) {
ctx               477 fs/fuse/readdir.c 	if (!ctx->pos) {
ctx               540 fs/fuse/readdir.c 	res = fuse_parse_cache(ff, addr, size, ctx);
ctx               565 fs/fuse/readdir.c int fuse_readdir(struct file *file, struct dir_context *ctx)
ctx               578 fs/fuse/readdir.c 		err = fuse_readdir_cached(file, ctx);
ctx               580 fs/fuse/readdir.c 		err = fuse_readdir_uncached(file, ctx);
ctx              1042 fs/fuse/virtio_fs.c 	struct fuse_fs_context ctx = {
ctx              1076 fs/fuse/virtio_fs.c 	ctx.fudptr = (void **)&fs->vqs[VQ_REQUEST].fud;
ctx              1077 fs/fuse/virtio_fs.c 	err = fuse_fill_super_common(sb, &ctx);
ctx              1268 fs/gfs2/dir.c  static int do_filldir_main(struct gfs2_inode *dip, struct dir_context *ctx,
ctx              1292 fs/gfs2/dir.c  			if (off < ctx->pos)
ctx              1294 fs/gfs2/dir.c  			ctx->pos = off;
ctx              1303 fs/gfs2/dir.c  			if (off < ctx->pos)
ctx              1305 fs/gfs2/dir.c  			ctx->pos = off;
ctx              1308 fs/gfs2/dir.c  		if (!dir_emit(ctx, (const char *)(dent + 1),
ctx              1321 fs/gfs2/dir.c  	ctx->pos++;
ctx              1371 fs/gfs2/dir.c  static int gfs2_dir_read_leaf(struct inode *inode, struct dir_context *ctx,
ctx              1462 fs/gfs2/dir.c  	error = do_filldir_main(ip, ctx, darr, entries, need_sort ?
ctx              1529 fs/gfs2/dir.c  static int dir_e_read(struct inode *inode, struct dir_context *ctx,
ctx              1541 fs/gfs2/dir.c  	hash = gfs2_dir_offset2hash(ctx->pos);
ctx              1553 fs/gfs2/dir.c  		error = gfs2_dir_read_leaf(inode, ctx,
ctx              1568 fs/gfs2/dir.c  int gfs2_dir_read(struct inode *inode, struct dir_context *ctx,
ctx              1583 fs/gfs2/dir.c  		return dir_e_read(inode, ctx, f_ra);
ctx              1617 fs/gfs2/dir.c  		error = do_filldir_main(dip, ctx, darr,
ctx                39 fs/gfs2/dir.h  extern int gfs2_dir_read(struct inode *inode, struct dir_context *ctx,
ctx                64 fs/gfs2/export.c 	struct dir_context ctx;
ctx                69 fs/gfs2/export.c static int get_name_filldir(struct dir_context *ctx, const char *name,
ctx                74 fs/gfs2/export.c 		container_of(ctx, struct get_name_filldir, ctx);
ctx                92 fs/gfs2/export.c 		.ctx.actor = get_name_filldir,
ctx               116 fs/gfs2/export.c 	error = gfs2_dir_read(dir, &gnfd.ctx, &f_ra);
ctx               103 fs/gfs2/file.c static int gfs2_readdir(struct file *file, struct dir_context *ctx)
ctx               114 fs/gfs2/file.c 	error = gfs2_dir_read(dir, ctx, &file->f_ra);
ctx                48 fs/hfs/dir.c   static int hfs_readdir(struct file *file, struct dir_context *ctx)
ctx                59 fs/hfs/dir.c   	if (ctx->pos >= inode->i_size)
ctx                70 fs/hfs/dir.c   	if (ctx->pos == 0) {
ctx                72 fs/hfs/dir.c   		if (!dir_emit_dot(file, ctx))
ctx                74 fs/hfs/dir.c   		ctx->pos = 1;
ctx                76 fs/hfs/dir.c   	if (ctx->pos == 1) {
ctx                93 fs/hfs/dir.c   		if (!dir_emit(ctx, "..", 2,
ctx                96 fs/hfs/dir.c   		ctx->pos = 2;
ctx                98 fs/hfs/dir.c   	if (ctx->pos >= inode->i_size)
ctx               100 fs/hfs/dir.c   	err = hfs_brec_goto(&fd, ctx->pos - 1);
ctx               125 fs/hfs/dir.c   			if (!dir_emit(ctx, strbuf, len,
ctx               134 fs/hfs/dir.c   			if (!dir_emit(ctx, strbuf, len,
ctx               142 fs/hfs/dir.c   		ctx->pos++;
ctx               143 fs/hfs/dir.c   		if (ctx->pos >= inode->i_size)
ctx               130 fs/hfsplus/dir.c static int hfsplus_readdir(struct file *file, struct dir_context *ctx)
ctx               157 fs/hfsplus/dir.c 	if (ctx->pos == 0) {
ctx               159 fs/hfsplus/dir.c 		if (!dir_emit_dot(file, ctx))
ctx               161 fs/hfsplus/dir.c 		ctx->pos = 1;
ctx               163 fs/hfsplus/dir.c 	if (ctx->pos == 1) {
ctx               181 fs/hfsplus/dir.c 		if (!dir_emit(ctx, "..", 2,
ctx               184 fs/hfsplus/dir.c 		ctx->pos = 2;
ctx               186 fs/hfsplus/dir.c 	if (ctx->pos >= inode->i_size)
ctx               188 fs/hfsplus/dir.c 	err = hfs_brec_goto(&fd, ctx->pos - 1);
ctx               221 fs/hfsplus/dir.c 			if (!dir_emit(ctx, strbuf, len,
ctx               248 fs/hfsplus/dir.c 			if (!dir_emit(ctx, strbuf, len,
ctx               257 fs/hfsplus/dir.c 		ctx->pos++;
ctx               258 fs/hfsplus/dir.c 		if (ctx->pos >= inode->i_size)
ctx               273 fs/hostfs/hostfs_kern.c static int hostfs_readdir(struct file *file, struct dir_context *ctx)
ctx               288 fs/hostfs/hostfs_kern.c 	next = ctx->pos;
ctx               291 fs/hostfs/hostfs_kern.c 		if (!dir_emit(ctx, name, len, ino, type))
ctx               293 fs/hostfs/hostfs_kern.c 		ctx->pos = next;
ctx                65 fs/hpfs/dir.c  static int hpfs_readdir(struct file *file, struct dir_context *ctx)
ctx               113 fs/hpfs/dir.c  	if (ctx->pos == 12) { /* diff -r requires this (note, that diff -r */
ctx               114 fs/hpfs/dir.c  		ctx->pos = 13; /* also fails on msdos filesystem in 2.0) */
ctx               117 fs/hpfs/dir.c  	if (ctx->pos == 13) {
ctx               128 fs/hpfs/dir.c  			if (hpfs_stop_cycles(inode->i_sb, ctx->pos, &c1, &c2, "hpfs_readdir")) {
ctx               132 fs/hpfs/dir.c  		if (ctx->pos == 12)
ctx               134 fs/hpfs/dir.c  		if (ctx->pos == 3 || ctx->pos == 4 || ctx->pos == 5) {
ctx               135 fs/hpfs/dir.c  			pr_err("pos==%d\n", (int)ctx->pos);
ctx               138 fs/hpfs/dir.c  		if (ctx->pos == 0) {
ctx               139 fs/hpfs/dir.c  			if (!dir_emit_dot(file, ctx))
ctx               141 fs/hpfs/dir.c  			ctx->pos = 11;
ctx               143 fs/hpfs/dir.c  		if (ctx->pos == 11) {
ctx               144 fs/hpfs/dir.c  			if (!dir_emit(ctx, "..", 2, hpfs_inode->i_parent_dir, DT_DIR))
ctx               146 fs/hpfs/dir.c  			ctx->pos = 1;
ctx               148 fs/hpfs/dir.c  		if (ctx->pos == 1) {
ctx               152 fs/hpfs/dir.c  			ctx->pos = ((loff_t) hpfs_de_as_down_as_possible(inode->i_sb, hpfs_inode->i_dno) << 4) + 1;
ctx               154 fs/hpfs/dir.c  		next_pos = ctx->pos;
ctx               156 fs/hpfs/dir.c  			ctx->pos = next_pos;
ctx               164 fs/hpfs/dir.c  					hpfs_error(inode->i_sb, "hpfs_readdir: bad ^A^A entry; pos = %08lx", (unsigned long)ctx->pos);
ctx               166 fs/hpfs/dir.c  					hpfs_error(inode->i_sb, "hpfs_readdir: bad \\377 entry; pos = %08lx", (unsigned long)ctx->pos);
ctx               169 fs/hpfs/dir.c  			ctx->pos = next_pos;
ctx               173 fs/hpfs/dir.c  		if (!dir_emit(ctx, tempname, de->namelen, le32_to_cpu(de->fnode), DT_UNKNOWN)) {
ctx               178 fs/hpfs/dir.c  		ctx->pos = next_pos;
ctx               729 fs/hugetlbfs/inode.c 					struct hugetlbfs_fs_context *ctx)
ctx               736 fs/hugetlbfs/inode.c 		inode->i_mode = S_IFDIR | ctx->mode;
ctx               737 fs/hugetlbfs/inode.c 		inode->i_uid = ctx->uid;
ctx               738 fs/hugetlbfs/inode.c 		inode->i_gid = ctx->gid;
ctx              1148 fs/hugetlbfs/inode.c 	struct hugetlbfs_fs_context *ctx = fc->fs_private;
ctx              1160 fs/hugetlbfs/inode.c 		ctx->uid = make_kuid(current_user_ns(), result.uint_32);
ctx              1161 fs/hugetlbfs/inode.c 		if (!uid_valid(ctx->uid))
ctx              1166 fs/hugetlbfs/inode.c 		ctx->gid = make_kgid(current_user_ns(), result.uint_32);
ctx              1167 fs/hugetlbfs/inode.c 		if (!gid_valid(ctx->gid))
ctx              1172 fs/hugetlbfs/inode.c 		ctx->mode = result.uint_32 & 01777U;
ctx              1179 fs/hugetlbfs/inode.c 		ctx->max_size_opt = memparse(param->string, &rest);
ctx              1180 fs/hugetlbfs/inode.c 		ctx->max_val_type = SIZE_STD;
ctx              1182 fs/hugetlbfs/inode.c 			ctx->max_val_type = SIZE_PERCENT;
ctx              1189 fs/hugetlbfs/inode.c 		ctx->nr_inodes = memparse(param->string, &rest);
ctx              1194 fs/hugetlbfs/inode.c 		ctx->hstate = size_to_hstate(ps);
ctx              1195 fs/hugetlbfs/inode.c 		if (!ctx->hstate) {
ctx              1205 fs/hugetlbfs/inode.c 		ctx->min_size_opt = memparse(param->string, &rest);
ctx              1206 fs/hugetlbfs/inode.c 		ctx->min_val_type = SIZE_STD;
ctx              1208 fs/hugetlbfs/inode.c 			ctx->min_val_type = SIZE_PERCENT;
ctx              1225 fs/hugetlbfs/inode.c 	struct hugetlbfs_fs_context *ctx = fc->fs_private;
ctx              1231 fs/hugetlbfs/inode.c 	ctx->max_hpages = hugetlbfs_size_to_hpages(ctx->hstate,
ctx              1232 fs/hugetlbfs/inode.c 						   ctx->max_size_opt,
ctx              1233 fs/hugetlbfs/inode.c 						   ctx->max_val_type);
ctx              1234 fs/hugetlbfs/inode.c 	ctx->min_hpages = hugetlbfs_size_to_hpages(ctx->hstate,
ctx              1235 fs/hugetlbfs/inode.c 						   ctx->min_size_opt,
ctx              1236 fs/hugetlbfs/inode.c 						   ctx->min_val_type);
ctx              1241 fs/hugetlbfs/inode.c 	if (ctx->max_val_type > NO_SIZE &&
ctx              1242 fs/hugetlbfs/inode.c 	    ctx->min_hpages > ctx->max_hpages) {
ctx              1253 fs/hugetlbfs/inode.c 	struct hugetlbfs_fs_context *ctx = fc->fs_private;
ctx              1261 fs/hugetlbfs/inode.c 	sbinfo->hstate		= ctx->hstate;
ctx              1262 fs/hugetlbfs/inode.c 	sbinfo->max_inodes	= ctx->nr_inodes;
ctx              1263 fs/hugetlbfs/inode.c 	sbinfo->free_inodes	= ctx->nr_inodes;
ctx              1265 fs/hugetlbfs/inode.c 	sbinfo->uid		= ctx->uid;
ctx              1266 fs/hugetlbfs/inode.c 	sbinfo->gid		= ctx->gid;
ctx              1267 fs/hugetlbfs/inode.c 	sbinfo->mode		= ctx->mode;
ctx              1274 fs/hugetlbfs/inode.c 	if (ctx->max_hpages != -1 || ctx->min_hpages != -1) {
ctx              1275 fs/hugetlbfs/inode.c 		sbinfo->spool = hugepage_new_subpool(ctx->hstate,
ctx              1276 fs/hugetlbfs/inode.c 						     ctx->max_hpages,
ctx              1277 fs/hugetlbfs/inode.c 						     ctx->min_hpages);
ctx              1282 fs/hugetlbfs/inode.c 	sb->s_blocksize = huge_page_size(ctx->hstate);
ctx              1283 fs/hugetlbfs/inode.c 	sb->s_blocksize_bits = huge_page_shift(ctx->hstate);
ctx              1287 fs/hugetlbfs/inode.c 	sb->s_root = d_make_root(hugetlbfs_get_root(sb, ctx));
ctx              1318 fs/hugetlbfs/inode.c 	struct hugetlbfs_fs_context *ctx;
ctx              1320 fs/hugetlbfs/inode.c 	ctx = kzalloc(sizeof(struct hugetlbfs_fs_context), GFP_KERNEL);
ctx              1321 fs/hugetlbfs/inode.c 	if (!ctx)
ctx              1324 fs/hugetlbfs/inode.c 	ctx->max_hpages	= -1; /* No limit on size by default */
ctx              1325 fs/hugetlbfs/inode.c 	ctx->nr_inodes	= -1; /* No limit on number of inodes by default */
ctx              1326 fs/hugetlbfs/inode.c 	ctx->uid	= current_fsuid();
ctx              1327 fs/hugetlbfs/inode.c 	ctx->gid	= current_fsgid();
ctx              1328 fs/hugetlbfs/inode.c 	ctx->mode	= 0755;
ctx              1329 fs/hugetlbfs/inode.c 	ctx->hstate	= &default_hstate;
ctx              1330 fs/hugetlbfs/inode.c 	ctx->min_hpages	= -1; /* No default minimum size */
ctx              1331 fs/hugetlbfs/inode.c 	ctx->max_val_type = NO_SIZE;
ctx              1332 fs/hugetlbfs/inode.c 	ctx->min_val_type = NO_SIZE;
ctx              1333 fs/hugetlbfs/inode.c 	fc->fs_private = ctx;
ctx              1436 fs/hugetlbfs/inode.c 		struct hugetlbfs_fs_context *ctx = fc->fs_private;
ctx              1437 fs/hugetlbfs/inode.c 		ctx->hstate = h;
ctx               315 fs/io_uring.c  	struct io_ring_ctx	*ctx;
ctx               368 fs/io_uring.c  static void io_cqring_fill_event(struct io_ring_ctx *ctx, u64 ki_user_data,
ctx               380 fs/io_uring.c  		struct io_ring_ctx *ctx = file->private_data;
ctx               382 fs/io_uring.c  		return ctx->ring_sock->sk;
ctx               391 fs/io_uring.c  	struct io_ring_ctx *ctx = container_of(ref, struct io_ring_ctx, refs);
ctx               393 fs/io_uring.c  	complete(&ctx->ctx_done);
ctx               398 fs/io_uring.c  	struct io_ring_ctx *ctx;
ctx               401 fs/io_uring.c  	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               402 fs/io_uring.c  	if (!ctx)
ctx               405 fs/io_uring.c  	if (percpu_ref_init(&ctx->refs, io_ring_ctx_ref_free,
ctx               407 fs/io_uring.c  		kfree(ctx);
ctx               411 fs/io_uring.c  	ctx->flags = p->flags;
ctx               412 fs/io_uring.c  	init_waitqueue_head(&ctx->sqo_wait);
ctx               413 fs/io_uring.c  	init_waitqueue_head(&ctx->cq_wait);
ctx               414 fs/io_uring.c  	init_completion(&ctx->ctx_done);
ctx               415 fs/io_uring.c  	init_completion(&ctx->sqo_thread_started);
ctx               416 fs/io_uring.c  	mutex_init(&ctx->uring_lock);
ctx               417 fs/io_uring.c  	init_waitqueue_head(&ctx->wait);
ctx               418 fs/io_uring.c  	for (i = 0; i < ARRAY_SIZE(ctx->pending_async); i++) {
ctx               419 fs/io_uring.c  		spin_lock_init(&ctx->pending_async[i].lock);
ctx               420 fs/io_uring.c  		INIT_LIST_HEAD(&ctx->pending_async[i].list);
ctx               421 fs/io_uring.c  		atomic_set(&ctx->pending_async[i].cnt, 0);
ctx               423 fs/io_uring.c  	spin_lock_init(&ctx->completion_lock);
ctx               424 fs/io_uring.c  	INIT_LIST_HEAD(&ctx->poll_list);
ctx               425 fs/io_uring.c  	INIT_LIST_HEAD(&ctx->cancel_list);
ctx               426 fs/io_uring.c  	INIT_LIST_HEAD(&ctx->defer_list);
ctx               427 fs/io_uring.c  	INIT_LIST_HEAD(&ctx->timeout_list);
ctx               428 fs/io_uring.c  	return ctx;
ctx               431 fs/io_uring.c  static inline bool __io_sequence_defer(struct io_ring_ctx *ctx,
ctx               434 fs/io_uring.c  	return req->sequence != ctx->cached_cq_tail + ctx->cached_sq_dropped
ctx               435 fs/io_uring.c  					+ atomic_read(&ctx->cached_cq_overflow);
ctx               438 fs/io_uring.c  static inline bool io_sequence_defer(struct io_ring_ctx *ctx,
ctx               444 fs/io_uring.c  	return __io_sequence_defer(ctx, req);
ctx               447 fs/io_uring.c  static struct io_kiocb *io_get_deferred_req(struct io_ring_ctx *ctx)
ctx               451 fs/io_uring.c  	req = list_first_entry_or_null(&ctx->defer_list, struct io_kiocb, list);
ctx               452 fs/io_uring.c  	if (req && !io_sequence_defer(ctx, req)) {
ctx               460 fs/io_uring.c  static struct io_kiocb *io_get_timeout_req(struct io_ring_ctx *ctx)
ctx               464 fs/io_uring.c  	req = list_first_entry_or_null(&ctx->timeout_list, struct io_kiocb, list);
ctx               468 fs/io_uring.c  		if (!__io_sequence_defer(ctx, req)) {
ctx               477 fs/io_uring.c  static void __io_commit_cqring(struct io_ring_ctx *ctx)
ctx               479 fs/io_uring.c  	struct io_rings *rings = ctx->rings;
ctx               481 fs/io_uring.c  	if (ctx->cached_cq_tail != READ_ONCE(rings->cq.tail)) {
ctx               483 fs/io_uring.c  		smp_store_release(&rings->cq.tail, ctx->cached_cq_tail);
ctx               485 fs/io_uring.c  		if (wq_has_sleeper(&ctx->cq_wait)) {
ctx               486 fs/io_uring.c  			wake_up_interruptible(&ctx->cq_wait);
ctx               487 fs/io_uring.c  			kill_fasync(&ctx->cq_fasync, SIGIO, POLL_IN);
ctx               492 fs/io_uring.c  static inline void io_queue_async_work(struct io_ring_ctx *ctx,
ctx               506 fs/io_uring.c  	queue_work(ctx->sqo_wq[rw], &req->work);
ctx               515 fs/io_uring.c  		atomic_inc(&req->ctx->cq_timeouts);
ctx               517 fs/io_uring.c  		io_cqring_fill_event(req->ctx, req->user_data, 0);
ctx               522 fs/io_uring.c  static void io_kill_timeouts(struct io_ring_ctx *ctx)
ctx               526 fs/io_uring.c  	spin_lock_irq(&ctx->completion_lock);
ctx               527 fs/io_uring.c  	list_for_each_entry_safe(req, tmp, &ctx->timeout_list, list)
ctx               529 fs/io_uring.c  	spin_unlock_irq(&ctx->completion_lock);
ctx               532 fs/io_uring.c  static void io_commit_cqring(struct io_ring_ctx *ctx)
ctx               536 fs/io_uring.c  	while ((req = io_get_timeout_req(ctx)) != NULL)
ctx               539 fs/io_uring.c  	__io_commit_cqring(ctx);
ctx               541 fs/io_uring.c  	while ((req = io_get_deferred_req(ctx)) != NULL) {
ctx               548 fs/io_uring.c  		io_queue_async_work(ctx, req);
ctx               552 fs/io_uring.c  static struct io_uring_cqe *io_get_cqring(struct io_ring_ctx *ctx)
ctx               554 fs/io_uring.c  	struct io_rings *rings = ctx->rings;
ctx               557 fs/io_uring.c  	tail = ctx->cached_cq_tail;
ctx               566 fs/io_uring.c  	ctx->cached_cq_tail++;
ctx               567 fs/io_uring.c  	return &rings->cqes[tail & ctx->cq_mask];
ctx               570 fs/io_uring.c  static void io_cqring_fill_event(struct io_ring_ctx *ctx, u64 ki_user_data,
ctx               580 fs/io_uring.c  	cqe = io_get_cqring(ctx);
ctx               586 fs/io_uring.c  		WRITE_ONCE(ctx->rings->cq_overflow,
ctx               587 fs/io_uring.c  				atomic_inc_return(&ctx->cached_cq_overflow));
ctx               591 fs/io_uring.c  static void io_cqring_ev_posted(struct io_ring_ctx *ctx)
ctx               593 fs/io_uring.c  	if (waitqueue_active(&ctx->wait))
ctx               594 fs/io_uring.c  		wake_up(&ctx->wait);
ctx               595 fs/io_uring.c  	if (waitqueue_active(&ctx->sqo_wait))
ctx               596 fs/io_uring.c  		wake_up(&ctx->sqo_wait);
ctx               597 fs/io_uring.c  	if (ctx->cq_ev_fd)
ctx               598 fs/io_uring.c  		eventfd_signal(ctx->cq_ev_fd, 1);
ctx               601 fs/io_uring.c  static void io_cqring_add_event(struct io_ring_ctx *ctx, u64 user_data,
ctx               606 fs/io_uring.c  	spin_lock_irqsave(&ctx->completion_lock, flags);
ctx               607 fs/io_uring.c  	io_cqring_fill_event(ctx, user_data, res);
ctx               608 fs/io_uring.c  	io_commit_cqring(ctx);
ctx               609 fs/io_uring.c  	spin_unlock_irqrestore(&ctx->completion_lock, flags);
ctx               611 fs/io_uring.c  	io_cqring_ev_posted(ctx);
ctx               614 fs/io_uring.c  static struct io_kiocb *io_get_req(struct io_ring_ctx *ctx,
ctx               620 fs/io_uring.c  	if (!percpu_ref_tryget(&ctx->refs))
ctx               654 fs/io_uring.c  	req->ctx = ctx;
ctx               662 fs/io_uring.c  	percpu_ref_put(&ctx->refs);
ctx               666 fs/io_uring.c  static void io_free_req_many(struct io_ring_ctx *ctx, void **reqs, int *nr)
ctx               670 fs/io_uring.c  		percpu_ref_put_many(&ctx->refs, *nr);
ctx               679 fs/io_uring.c  	percpu_ref_put(&req->ctx->refs);
ctx               703 fs/io_uring.c  		io_queue_async_work(req->ctx, nxt);
ctx               718 fs/io_uring.c  		io_cqring_add_event(req->ctx, link->user_data, -ECANCELED);
ctx               754 fs/io_uring.c  static inline unsigned int io_sqring_entries(struct io_ring_ctx *ctx)
ctx               756 fs/io_uring.c  	struct io_rings *rings = ctx->rings;
ctx               759 fs/io_uring.c  	return smp_load_acquire(&rings->sq.tail) - ctx->cached_sq_head;
ctx               765 fs/io_uring.c  static void io_iopoll_complete(struct io_ring_ctx *ctx, unsigned int *nr_events,
ctx               777 fs/io_uring.c  		io_cqring_fill_event(ctx, req->user_data, req->result);
ctx               790 fs/io_uring.c  					io_free_req_many(ctx, reqs, &to_free);
ctx               797 fs/io_uring.c  	io_commit_cqring(ctx);
ctx               798 fs/io_uring.c  	io_free_req_many(ctx, reqs, &to_free);
ctx               801 fs/io_uring.c  static int io_do_iopoll(struct io_ring_ctx *ctx, unsigned int *nr_events,
ctx               813 fs/io_uring.c  	spin = !ctx->poll_multi_file && *nr_events < min;
ctx               816 fs/io_uring.c  	list_for_each_entry_safe(req, tmp, &ctx->poll_list, list) {
ctx               841 fs/io_uring.c  		io_iopoll_complete(ctx, nr_events, &done);
ctx               851 fs/io_uring.c  static int io_iopoll_getevents(struct io_ring_ctx *ctx, unsigned int *nr_events,
ctx               854 fs/io_uring.c  	while (!list_empty(&ctx->poll_list) && !need_resched()) {
ctx               857 fs/io_uring.c  		ret = io_do_iopoll(ctx, nr_events, min);
ctx               871 fs/io_uring.c  static void io_iopoll_reap_events(struct io_ring_ctx *ctx)
ctx               873 fs/io_uring.c  	if (!(ctx->flags & IORING_SETUP_IOPOLL))
ctx               876 fs/io_uring.c  	mutex_lock(&ctx->uring_lock);
ctx               877 fs/io_uring.c  	while (!list_empty(&ctx->poll_list)) {
ctx               880 fs/io_uring.c  		io_iopoll_getevents(ctx, &nr_events, 1);
ctx               888 fs/io_uring.c  	mutex_unlock(&ctx->uring_lock);
ctx               891 fs/io_uring.c  static int io_iopoll_check(struct io_ring_ctx *ctx, unsigned *nr_events,
ctx               901 fs/io_uring.c  	mutex_lock(&ctx->uring_lock);
ctx               910 fs/io_uring.c  		if (io_cqring_events(ctx->rings))
ctx               924 fs/io_uring.c  			mutex_unlock(&ctx->uring_lock);
ctx               925 fs/io_uring.c  			mutex_lock(&ctx->uring_lock);
ctx               931 fs/io_uring.c  		ret = io_iopoll_getevents(ctx, nr_events, tmin);
ctx               937 fs/io_uring.c  	mutex_unlock(&ctx->uring_lock);
ctx               964 fs/io_uring.c  	io_cqring_add_event(req->ctx, req->user_data, res);
ctx               990 fs/io_uring.c  	struct io_ring_ctx *ctx = req->ctx;
ctx               997 fs/io_uring.c  	if (list_empty(&ctx->poll_list)) {
ctx               998 fs/io_uring.c  		ctx->poll_multi_file = false;
ctx               999 fs/io_uring.c  	} else if (!ctx->poll_multi_file) {
ctx              1002 fs/io_uring.c  		list_req = list_first_entry(&ctx->poll_list, struct io_kiocb,
ctx              1005 fs/io_uring.c  			ctx->poll_multi_file = true;
ctx              1013 fs/io_uring.c  		list_add(&req->list, &ctx->poll_list);
ctx              1015 fs/io_uring.c  		list_add_tail(&req->list, &ctx->poll_list);
ctx              1079 fs/io_uring.c  	struct io_ring_ctx *ctx = req->ctx;
ctx              1128 fs/io_uring.c  	if (ctx->flags & IORING_SETUP_IOPOLL) {
ctx              1165 fs/io_uring.c  static int io_import_fixed(struct io_ring_ctx *ctx, int rw,
ctx              1176 fs/io_uring.c  	if (unlikely(!ctx->user_bufs))
ctx              1180 fs/io_uring.c  	if (unlikely(buf_index >= ctx->nr_user_bufs))
ctx              1183 fs/io_uring.c  	index = array_index_nospec(buf_index, ctx->nr_user_bufs);
ctx              1184 fs/io_uring.c  	imu = &ctx->user_bufs[index];
ctx              1239 fs/io_uring.c  static ssize_t io_import_iovec(struct io_ring_ctx *ctx, int rw,
ctx              1259 fs/io_uring.c  		ssize_t ret = io_import_fixed(ctx, rw, sqe, iter);
ctx              1268 fs/io_uring.c  	if (ctx->compat)
ctx              1306 fs/io_uring.c  	struct async_list *async_list = &req->ctx->pending_async[rw];
ctx              1411 fs/io_uring.c  	ret = io_import_iovec(req->ctx, READ, s, &iovec, &iter);
ctx              1476 fs/io_uring.c  	ret = io_import_iovec(req->ctx, WRITE, s, &iovec, &iter);
ctx              1545 fs/io_uring.c  	struct io_ring_ctx *ctx = req->ctx;
ctx              1548 fs/io_uring.c  	if (unlikely(ctx->flags & IORING_SETUP_IOPOLL))
ctx              1551 fs/io_uring.c  	io_cqring_add_event(ctx, user_data, err);
ctx              1558 fs/io_uring.c  	struct io_ring_ctx *ctx = req->ctx;
ctx              1563 fs/io_uring.c  	if (unlikely(ctx->flags & IORING_SETUP_IOPOLL))
ctx              1598 fs/io_uring.c  	io_cqring_add_event(req->ctx, sqe->user_data, ret);
ctx              1605 fs/io_uring.c  	struct io_ring_ctx *ctx = req->ctx;
ctx              1611 fs/io_uring.c  	if (unlikely(ctx->flags & IORING_SETUP_IOPOLL))
ctx              1644 fs/io_uring.c  	io_cqring_add_event(req->ctx, sqe->user_data, ret);
ctx              1658 fs/io_uring.c  	if (unlikely(req->ctx->flags & IORING_SETUP_IOPOLL))
ctx              1673 fs/io_uring.c  		if (req->ctx->compat)
ctx              1697 fs/io_uring.c  	io_cqring_add_event(req->ctx, sqe->user_data, ret);
ctx              1731 fs/io_uring.c  		io_queue_async_work(req->ctx, req);
ctx              1738 fs/io_uring.c  static void io_poll_remove_all(struct io_ring_ctx *ctx)
ctx              1742 fs/io_uring.c  	spin_lock_irq(&ctx->completion_lock);
ctx              1743 fs/io_uring.c  	while (!list_empty(&ctx->cancel_list)) {
ctx              1744 fs/io_uring.c  		req = list_first_entry(&ctx->cancel_list, struct io_kiocb,list);
ctx              1747 fs/io_uring.c  	spin_unlock_irq(&ctx->completion_lock);
ctx              1756 fs/io_uring.c  	struct io_ring_ctx *ctx = req->ctx;
ctx              1760 fs/io_uring.c  	if (unlikely(req->ctx->flags & IORING_SETUP_IOPOLL))
ctx              1766 fs/io_uring.c  	spin_lock_irq(&ctx->completion_lock);
ctx              1767 fs/io_uring.c  	list_for_each_entry_safe(poll_req, next, &ctx->cancel_list, list) {
ctx              1774 fs/io_uring.c  	spin_unlock_irq(&ctx->completion_lock);
ctx              1776 fs/io_uring.c  	io_cqring_add_event(req->ctx, sqe->user_data, ret);
ctx              1781 fs/io_uring.c  static void io_poll_complete(struct io_ring_ctx *ctx, struct io_kiocb *req,
ctx              1785 fs/io_uring.c  	io_cqring_fill_event(ctx, req->user_data, mangle_poll(mask));
ctx              1786 fs/io_uring.c  	io_commit_cqring(ctx);
ctx              1794 fs/io_uring.c  	struct io_ring_ctx *ctx = req->ctx;
ctx              1798 fs/io_uring.c  	old_cred = override_creds(ctx->creds);
ctx              1810 fs/io_uring.c  	spin_lock_irq(&ctx->completion_lock);
ctx              1813 fs/io_uring.c  		spin_unlock_irq(&ctx->completion_lock);
ctx              1817 fs/io_uring.c  	io_poll_complete(ctx, req, mask);
ctx              1818 fs/io_uring.c  	spin_unlock_irq(&ctx->completion_lock);
ctx              1820 fs/io_uring.c  	io_cqring_ev_posted(ctx);
ctx              1832 fs/io_uring.c  	struct io_ring_ctx *ctx = req->ctx;
ctx              1842 fs/io_uring.c  	if (mask && spin_trylock_irqsave(&ctx->completion_lock, flags)) {
ctx              1844 fs/io_uring.c  		io_poll_complete(ctx, req, mask);
ctx              1845 fs/io_uring.c  		spin_unlock_irqrestore(&ctx->completion_lock, flags);
ctx              1847 fs/io_uring.c  		io_cqring_ev_posted(ctx);
ctx              1850 fs/io_uring.c  		io_queue_async_work(ctx, req);
ctx              1880 fs/io_uring.c  	struct io_ring_ctx *ctx = req->ctx;
ctx              1886 fs/io_uring.c  	if (unlikely(req->ctx->flags & IORING_SETUP_IOPOLL))
ctx              1915 fs/io_uring.c  	spin_lock_irq(&ctx->completion_lock);
ctx              1929 fs/io_uring.c  			list_add_tail(&req->list, &ctx->cancel_list);
ctx              1934 fs/io_uring.c  		io_poll_complete(ctx, req, mask);
ctx              1936 fs/io_uring.c  	spin_unlock_irq(&ctx->completion_lock);
ctx              1939 fs/io_uring.c  		io_cqring_ev_posted(ctx);
ctx              1947 fs/io_uring.c  	struct io_ring_ctx *ctx;
ctx              1952 fs/io_uring.c  	ctx = req->ctx;
ctx              1953 fs/io_uring.c  	atomic_inc(&ctx->cq_timeouts);
ctx              1955 fs/io_uring.c  	spin_lock_irqsave(&ctx->completion_lock, flags);
ctx              1963 fs/io_uring.c  	list_for_each_entry_continue_reverse(prev, &ctx->timeout_list, list)
ctx              1967 fs/io_uring.c  	io_cqring_fill_event(ctx, req->user_data, -ETIME);
ctx              1968 fs/io_uring.c  	io_commit_cqring(ctx);
ctx              1969 fs/io_uring.c  	spin_unlock_irqrestore(&ctx->completion_lock, flags);
ctx              1971 fs/io_uring.c  	io_cqring_ev_posted(ctx);
ctx              1980 fs/io_uring.c  	struct io_ring_ctx *ctx = req->ctx;
ctx              1985 fs/io_uring.c  	if (unlikely(ctx->flags & IORING_SETUP_IOPOLL))
ctx              2004 fs/io_uring.c  		spin_lock_irq(&ctx->completion_lock);
ctx              2005 fs/io_uring.c  		entry = ctx->timeout_list.prev;
ctx              2009 fs/io_uring.c  	req->sequence = ctx->cached_sq_head + count - 1;
ctx              2017 fs/io_uring.c  	spin_lock_irq(&ctx->completion_lock);
ctx              2018 fs/io_uring.c  	list_for_each_prev(entry, &ctx->timeout_list) {
ctx              2030 fs/io_uring.c  		tmp = (long long)ctx->cached_sq_head + count - 1;
ctx              2038 fs/io_uring.c  		if (ctx->cached_sq_head < nxt_sq_head)
ctx              2054 fs/io_uring.c  	spin_unlock_irq(&ctx->completion_lock);
ctx              2063 fs/io_uring.c  static int io_req_defer(struct io_ring_ctx *ctx, struct io_kiocb *req,
ctx              2068 fs/io_uring.c  	if (!io_sequence_defer(ctx, req) && list_empty(&ctx->defer_list))
ctx              2075 fs/io_uring.c  	spin_lock_irq(&ctx->completion_lock);
ctx              2076 fs/io_uring.c  	if (!io_sequence_defer(ctx, req) && list_empty(&ctx->defer_list)) {
ctx              2077 fs/io_uring.c  		spin_unlock_irq(&ctx->completion_lock);
ctx              2087 fs/io_uring.c  	list_add_tail(&req->list, &ctx->defer_list);
ctx              2088 fs/io_uring.c  	spin_unlock_irq(&ctx->completion_lock);
ctx              2092 fs/io_uring.c  static int __io_submit_sqe(struct io_ring_ctx *ctx, struct io_kiocb *req,
ctx              2099 fs/io_uring.c  	if (unlikely(s->index >= ctx->sq_entries))
ctx              2152 fs/io_uring.c  	if (ctx->flags & IORING_SETUP_IOPOLL) {
ctx              2158 fs/io_uring.c  			mutex_lock(&ctx->uring_lock);
ctx              2161 fs/io_uring.c  			mutex_unlock(&ctx->uring_lock);
ctx              2167 fs/io_uring.c  static struct async_list *io_async_list_from_sqe(struct io_ring_ctx *ctx,
ctx              2173 fs/io_uring.c  		return &ctx->pending_async[READ];
ctx              2176 fs/io_uring.c  		return &ctx->pending_async[WRITE];
ctx              2194 fs/io_uring.c  	struct io_ring_ctx *ctx = req->ctx;
ctx              2202 fs/io_uring.c  	old_cred = override_creds(ctx->creds);
ctx              2203 fs/io_uring.c  	async_list = io_async_list_from_sqe(ctx, req->submit.sqe);
ctx              2224 fs/io_uring.c  			if (!mmget_not_zero(ctx->sqo_mm)) {
ctx              2227 fs/io_uring.c  				cur_mm = ctx->sqo_mm;
ctx              2238 fs/io_uring.c  				ret = __io_submit_sqe(ctx, req, s, false);
ctx              2255 fs/io_uring.c  			io_cqring_add_event(ctx, sqe->user_data, ret);
ctx              2372 fs/io_uring.c  static int io_req_set_file(struct io_ring_ctx *ctx, const struct sqe_submit *s,
ctx              2394 fs/io_uring.c  		if (unlikely(!ctx->user_files ||
ctx              2395 fs/io_uring.c  		    (unsigned) fd >= ctx->nr_user_files))
ctx              2397 fs/io_uring.c  		req->file = ctx->user_files[fd];
ctx              2410 fs/io_uring.c  static int __io_queue_sqe(struct io_ring_ctx *ctx, struct io_kiocb *req,
ctx              2415 fs/io_uring.c  	ret = __io_submit_sqe(ctx, req, s, true);
ctx              2431 fs/io_uring.c  			list = io_async_list_from_sqe(ctx, s->sqe);
ctx              2436 fs/io_uring.c  				io_queue_async_work(ctx, req);
ctx              2452 fs/io_uring.c  		io_cqring_add_event(ctx, req->user_data, ret);
ctx              2461 fs/io_uring.c  static int io_queue_sqe(struct io_ring_ctx *ctx, struct io_kiocb *req,
ctx              2466 fs/io_uring.c  	ret = io_req_defer(ctx, req, s);
ctx              2470 fs/io_uring.c  			io_cqring_add_event(ctx, s->sqe->user_data, ret);
ctx              2475 fs/io_uring.c  	return __io_queue_sqe(ctx, req, s);
ctx              2478 fs/io_uring.c  static int io_queue_link_head(struct io_ring_ctx *ctx, struct io_kiocb *req,
ctx              2485 fs/io_uring.c  		return io_queue_sqe(ctx, req, s);
ctx              2493 fs/io_uring.c  	ret = io_req_defer(ctx, req, s);
ctx              2498 fs/io_uring.c  			io_cqring_add_event(ctx, s->sqe->user_data, ret);
ctx              2510 fs/io_uring.c  	spin_lock_irq(&ctx->completion_lock);
ctx              2511 fs/io_uring.c  	list_add_tail(&shadow->list, &ctx->defer_list);
ctx              2512 fs/io_uring.c  	spin_unlock_irq(&ctx->completion_lock);
ctx              2515 fs/io_uring.c  		return __io_queue_sqe(ctx, req, s);
ctx              2522 fs/io_uring.c  static void io_submit_sqe(struct io_ring_ctx *ctx, struct sqe_submit *s,
ctx              2535 fs/io_uring.c  	req = io_get_req(ctx, state);
ctx              2541 fs/io_uring.c  	ret = io_req_set_file(ctx, s, state, req);
ctx              2546 fs/io_uring.c  		io_cqring_add_event(ctx, s->sqe->user_data, ret);
ctx              2595 fs/io_uring.c  		io_queue_sqe(ctx, req, s);
ctx              2615 fs/io_uring.c  				  struct io_ring_ctx *ctx, unsigned max_ios)
ctx              2623 fs/io_uring.c  static void io_commit_sqring(struct io_ring_ctx *ctx)
ctx              2625 fs/io_uring.c  	struct io_rings *rings = ctx->rings;
ctx              2627 fs/io_uring.c  	if (ctx->cached_sq_head != READ_ONCE(rings->sq.head)) {
ctx              2633 fs/io_uring.c  		smp_store_release(&rings->sq.head, ctx->cached_sq_head);
ctx              2645 fs/io_uring.c  static bool io_get_sqring(struct io_ring_ctx *ctx, struct sqe_submit *s)
ctx              2647 fs/io_uring.c  	struct io_rings *rings = ctx->rings;
ctx              2648 fs/io_uring.c  	u32 *sq_array = ctx->sq_array;
ctx              2659 fs/io_uring.c  	head = ctx->cached_sq_head;
ctx              2664 fs/io_uring.c  	head = READ_ONCE(sq_array[head & ctx->sq_mask]);
ctx              2665 fs/io_uring.c  	if (head < ctx->sq_entries) {
ctx              2667 fs/io_uring.c  		s->sqe = &ctx->sq_sqes[head];
ctx              2668 fs/io_uring.c  		s->sequence = ctx->cached_sq_head;
ctx              2669 fs/io_uring.c  		ctx->cached_sq_head++;
ctx              2674 fs/io_uring.c  	ctx->cached_sq_head++;
ctx              2675 fs/io_uring.c  	ctx->cached_sq_dropped++;
ctx              2676 fs/io_uring.c  	WRITE_ONCE(rings->sq_dropped, ctx->cached_sq_dropped);
ctx              2680 fs/io_uring.c  static int io_submit_sqes(struct io_ring_ctx *ctx, unsigned int nr,
ctx              2690 fs/io_uring.c  		io_submit_state_start(&state, ctx, nr);
ctx              2697 fs/io_uring.c  		if (!io_get_sqring(ctx, &s))
ctx              2705 fs/io_uring.c  			io_queue_link_head(ctx, link, &link->submit, shadow_req);
ctx              2713 fs/io_uring.c  				shadow_req = io_get_req(ctx, NULL);
ctx              2724 fs/io_uring.c  			io_cqring_add_event(ctx, s.sqe->user_data,
ctx              2730 fs/io_uring.c  			io_submit_sqe(ctx, &s, statep, &link);
ctx              2736 fs/io_uring.c  		io_queue_link_head(ctx, link, &link->submit, shadow_req);
ctx              2745 fs/io_uring.c  	struct io_ring_ctx *ctx = data;
ctx              2753 fs/io_uring.c  	complete(&ctx->sqo_thread_started);
ctx              2757 fs/io_uring.c  	old_cred = override_creds(ctx->creds);
ctx              2767 fs/io_uring.c  			if (ctx->flags & IORING_SETUP_IOPOLL) {
ctx              2776 fs/io_uring.c  				mutex_lock(&ctx->uring_lock);
ctx              2777 fs/io_uring.c  				if (!list_empty(&ctx->poll_list))
ctx              2778 fs/io_uring.c  					io_iopoll_getevents(ctx, &nr_events, 0);
ctx              2781 fs/io_uring.c  				mutex_unlock(&ctx->uring_lock);
ctx              2792 fs/io_uring.c  				timeout = jiffies + ctx->sq_thread_idle;
ctx              2795 fs/io_uring.c  		to_submit = io_sqring_entries(ctx);
ctx              2819 fs/io_uring.c  			prepare_to_wait(&ctx->sqo_wait, &wait,
ctx              2823 fs/io_uring.c  			ctx->rings->sq_flags |= IORING_SQ_NEED_WAKEUP;
ctx              2827 fs/io_uring.c  			to_submit = io_sqring_entries(ctx);
ctx              2830 fs/io_uring.c  					finish_wait(&ctx->sqo_wait, &wait);
ctx              2836 fs/io_uring.c  				finish_wait(&ctx->sqo_wait, &wait);
ctx              2838 fs/io_uring.c  				ctx->rings->sq_flags &= ~IORING_SQ_NEED_WAKEUP;
ctx              2841 fs/io_uring.c  			finish_wait(&ctx->sqo_wait, &wait);
ctx              2843 fs/io_uring.c  			ctx->rings->sq_flags &= ~IORING_SQ_NEED_WAKEUP;
ctx              2848 fs/io_uring.c  			mm_fault = !mmget_not_zero(ctx->sqo_mm);
ctx              2850 fs/io_uring.c  				use_mm(ctx->sqo_mm);
ctx              2851 fs/io_uring.c  				cur_mm = ctx->sqo_mm;
ctx              2855 fs/io_uring.c  		to_submit = min(to_submit, ctx->sq_entries);
ctx              2856 fs/io_uring.c  		inflight += io_submit_sqes(ctx, to_submit, cur_mm != NULL,
ctx              2860 fs/io_uring.c  		io_commit_sqring(ctx);
ctx              2875 fs/io_uring.c  static int io_ring_submit(struct io_ring_ctx *ctx, unsigned int to_submit)
ctx              2884 fs/io_uring.c  		io_submit_state_start(&state, ctx, to_submit);
ctx              2891 fs/io_uring.c  		if (!io_get_sqring(ctx, &s))
ctx              2899 fs/io_uring.c  			io_queue_link_head(ctx, link, &link->submit, shadow_req);
ctx              2907 fs/io_uring.c  				shadow_req = io_get_req(ctx, NULL);
ctx              2921 fs/io_uring.c  		io_submit_sqe(ctx, &s, statep, &link);
ctx              2925 fs/io_uring.c  		io_queue_link_head(ctx, link, &link->submit, shadow_req);
ctx              2929 fs/io_uring.c  	io_commit_sqring(ctx);
ctx              2936 fs/io_uring.c  	struct io_ring_ctx *ctx;
ctx              2943 fs/io_uring.c  	struct io_ring_ctx *ctx = iowq->ctx;
ctx              2950 fs/io_uring.c  	return io_cqring_events(ctx->rings) >= iowq->to_wait ||
ctx              2951 fs/io_uring.c  			atomic_read(&ctx->cq_timeouts) != iowq->nr_timeouts;
ctx              2970 fs/io_uring.c  static int io_cqring_wait(struct io_ring_ctx *ctx, int min_events,
ctx              2979 fs/io_uring.c  		.ctx		= ctx,
ctx              2982 fs/io_uring.c  	struct io_rings *rings = ctx->rings;
ctx              3002 fs/io_uring.c  	iowq.nr_timeouts = atomic_read(&ctx->cq_timeouts);
ctx              3004 fs/io_uring.c  		prepare_to_wait_exclusive(&ctx->wait, &iowq.wq,
ctx              3014 fs/io_uring.c  	finish_wait(&ctx->wait, &iowq.wq);
ctx              3023 fs/io_uring.c  static void __io_sqe_files_unregister(struct io_ring_ctx *ctx)
ctx              3026 fs/io_uring.c  	if (ctx->ring_sock) {
ctx              3027 fs/io_uring.c  		struct sock *sock = ctx->ring_sock->sk;
ctx              3036 fs/io_uring.c  	for (i = 0; i < ctx->nr_user_files; i++)
ctx              3037 fs/io_uring.c  		fput(ctx->user_files[i]);
ctx              3041 fs/io_uring.c  static int io_sqe_files_unregister(struct io_ring_ctx *ctx)
ctx              3043 fs/io_uring.c  	if (!ctx->user_files)
ctx              3046 fs/io_uring.c  	__io_sqe_files_unregister(ctx);
ctx              3047 fs/io_uring.c  	kfree(ctx->user_files);
ctx              3048 fs/io_uring.c  	ctx->user_files = NULL;
ctx              3049 fs/io_uring.c  	ctx->nr_user_files = 0;
ctx              3053 fs/io_uring.c  static void io_sq_thread_stop(struct io_ring_ctx *ctx)
ctx              3055 fs/io_uring.c  	if (ctx->sqo_thread) {
ctx              3056 fs/io_uring.c  		wait_for_completion(&ctx->sqo_thread_started);
ctx              3062 fs/io_uring.c  		kthread_park(ctx->sqo_thread);
ctx              3063 fs/io_uring.c  		kthread_stop(ctx->sqo_thread);
ctx              3064 fs/io_uring.c  		ctx->sqo_thread = NULL;
ctx              3068 fs/io_uring.c  static void io_finish_async(struct io_ring_ctx *ctx)
ctx              3072 fs/io_uring.c  	io_sq_thread_stop(ctx);
ctx              3074 fs/io_uring.c  	for (i = 0; i < ARRAY_SIZE(ctx->sqo_wq); i++) {
ctx              3075 fs/io_uring.c  		if (ctx->sqo_wq[i]) {
ctx              3076 fs/io_uring.c  			destroy_workqueue(ctx->sqo_wq[i]);
ctx              3077 fs/io_uring.c  			ctx->sqo_wq[i] = NULL;
ctx              3085 fs/io_uring.c  	struct io_ring_ctx *ctx = skb->sk->sk_user_data;
ctx              3088 fs/io_uring.c  	for (i = 0; i < ARRAY_SIZE(ctx->sqo_wq); i++)
ctx              3089 fs/io_uring.c  		if (ctx->sqo_wq[i])
ctx              3090 fs/io_uring.c  			flush_workqueue(ctx->sqo_wq[i]);
ctx              3100 fs/io_uring.c  static int __io_sqe_files_scm(struct io_ring_ctx *ctx, int nr, int offset)
ctx              3102 fs/io_uring.c  	struct sock *sk = ctx->ring_sock->sk;
ctx              3120 fs/io_uring.c  	fpl->user = get_uid(ctx->user);
ctx              3122 fs/io_uring.c  		fpl->fp[i] = get_file(ctx->user_files[i + offset]);
ctx              3142 fs/io_uring.c  static int io_sqe_files_scm(struct io_ring_ctx *ctx)
ctx              3148 fs/io_uring.c  	left = ctx->nr_user_files;
ctx              3152 fs/io_uring.c  		ret = __io_sqe_files_scm(ctx, this_files, total);
ctx              3162 fs/io_uring.c  	while (total < ctx->nr_user_files) {
ctx              3163 fs/io_uring.c  		fput(ctx->user_files[total]);
ctx              3170 fs/io_uring.c  static int io_sqe_files_scm(struct io_ring_ctx *ctx)
ctx              3176 fs/io_uring.c  static int io_sqe_files_register(struct io_ring_ctx *ctx, void __user *arg,
ctx              3183 fs/io_uring.c  	if (ctx->user_files)
ctx              3190 fs/io_uring.c  	ctx->user_files = kcalloc(nr_args, sizeof(struct file *), GFP_KERNEL);
ctx              3191 fs/io_uring.c  	if (!ctx->user_files)
ctx              3199 fs/io_uring.c  		ctx->user_files[i] = fget(fd);
ctx              3202 fs/io_uring.c  		if (!ctx->user_files[i])
ctx              3211 fs/io_uring.c  		if (ctx->user_files[i]->f_op == &io_uring_fops) {
ctx              3212 fs/io_uring.c  			fput(ctx->user_files[i]);
ctx              3215 fs/io_uring.c  		ctx->nr_user_files++;
ctx              3220 fs/io_uring.c  		for (i = 0; i < ctx->nr_user_files; i++)
ctx              3221 fs/io_uring.c  			fput(ctx->user_files[i]);
ctx              3223 fs/io_uring.c  		kfree(ctx->user_files);
ctx              3224 fs/io_uring.c  		ctx->user_files = NULL;
ctx              3225 fs/io_uring.c  		ctx->nr_user_files = 0;
ctx              3229 fs/io_uring.c  	ret = io_sqe_files_scm(ctx);
ctx              3231 fs/io_uring.c  		io_sqe_files_unregister(ctx);
ctx              3236 fs/io_uring.c  static int io_sq_offload_start(struct io_ring_ctx *ctx,
ctx              3242 fs/io_uring.c  	ctx->sqo_mm = current->mm;
ctx              3244 fs/io_uring.c  	if (ctx->flags & IORING_SETUP_SQPOLL) {
ctx              3249 fs/io_uring.c  		ctx->sq_thread_idle = msecs_to_jiffies(p->sq_thread_idle);
ctx              3250 fs/io_uring.c  		if (!ctx->sq_thread_idle)
ctx              3251 fs/io_uring.c  			ctx->sq_thread_idle = HZ;
ctx              3262 fs/io_uring.c  			ctx->sqo_thread = kthread_create_on_cpu(io_sq_thread,
ctx              3263 fs/io_uring.c  							ctx, cpu,
ctx              3266 fs/io_uring.c  			ctx->sqo_thread = kthread_create(io_sq_thread, ctx,
ctx              3269 fs/io_uring.c  		if (IS_ERR(ctx->sqo_thread)) {
ctx              3270 fs/io_uring.c  			ret = PTR_ERR(ctx->sqo_thread);
ctx              3271 fs/io_uring.c  			ctx->sqo_thread = NULL;
ctx              3274 fs/io_uring.c  		wake_up_process(ctx->sqo_thread);
ctx              3282 fs/io_uring.c  	ctx->sqo_wq[0] = alloc_workqueue("io_ring-wq",
ctx              3284 fs/io_uring.c  			min(ctx->sq_entries - 1, 2 * num_online_cpus()));
ctx              3285 fs/io_uring.c  	if (!ctx->sqo_wq[0]) {
ctx              3297 fs/io_uring.c  	ctx->sqo_wq[1] = alloc_workqueue("io_ring-write-wq",
ctx              3299 fs/io_uring.c  	if (!ctx->sqo_wq[1]) {
ctx              3306 fs/io_uring.c  	io_finish_async(ctx);
ctx              3307 fs/io_uring.c  	mmdrop(ctx->sqo_mm);
ctx              3308 fs/io_uring.c  	ctx->sqo_mm = NULL;
ctx              3396 fs/io_uring.c  static int io_sqe_buffer_unregister(struct io_ring_ctx *ctx)
ctx              3400 fs/io_uring.c  	if (!ctx->user_bufs)
ctx              3403 fs/io_uring.c  	for (i = 0; i < ctx->nr_user_bufs; i++) {
ctx              3404 fs/io_uring.c  		struct io_mapped_ubuf *imu = &ctx->user_bufs[i];
ctx              3409 fs/io_uring.c  		if (ctx->account_mem)
ctx              3410 fs/io_uring.c  			io_unaccount_mem(ctx->user, imu->nr_bvecs);
ctx              3415 fs/io_uring.c  	kfree(ctx->user_bufs);
ctx              3416 fs/io_uring.c  	ctx->user_bufs = NULL;
ctx              3417 fs/io_uring.c  	ctx->nr_user_bufs = 0;
ctx              3421 fs/io_uring.c  static int io_copy_iov(struct io_ring_ctx *ctx, struct iovec *dst,
ctx              3427 fs/io_uring.c  	if (ctx->compat) {
ctx              3446 fs/io_uring.c  static int io_sqe_buffer_register(struct io_ring_ctx *ctx, void __user *arg,
ctx              3454 fs/io_uring.c  	if (ctx->user_bufs)
ctx              3459 fs/io_uring.c  	ctx->user_bufs = kcalloc(nr_args, sizeof(struct io_mapped_ubuf),
ctx              3461 fs/io_uring.c  	if (!ctx->user_bufs)
ctx              3465 fs/io_uring.c  		struct io_mapped_ubuf *imu = &ctx->user_bufs[i];
ctx              3471 fs/io_uring.c  		ret = io_copy_iov(ctx, &iov, arg, i);
ctx              3493 fs/io_uring.c  		if (ctx->account_mem) {
ctx              3494 fs/io_uring.c  			ret = io_account_mem(ctx->user, nr_pages);
ctx              3510 fs/io_uring.c  				if (ctx->account_mem)
ctx              3511 fs/io_uring.c  					io_unaccount_mem(ctx->user, nr_pages);
ctx              3521 fs/io_uring.c  			if (ctx->account_mem)
ctx              3522 fs/io_uring.c  				io_unaccount_mem(ctx->user, nr_pages);
ctx              3553 fs/io_uring.c  			if (ctx->account_mem)
ctx              3554 fs/io_uring.c  				io_unaccount_mem(ctx->user, nr_pages);
ctx              3576 fs/io_uring.c  		ctx->nr_user_bufs++;
ctx              3584 fs/io_uring.c  	io_sqe_buffer_unregister(ctx);
ctx              3588 fs/io_uring.c  static int io_eventfd_register(struct io_ring_ctx *ctx, void __user *arg)
ctx              3593 fs/io_uring.c  	if (ctx->cq_ev_fd)
ctx              3599 fs/io_uring.c  	ctx->cq_ev_fd = eventfd_ctx_fdget(fd);
ctx              3600 fs/io_uring.c  	if (IS_ERR(ctx->cq_ev_fd)) {
ctx              3601 fs/io_uring.c  		int ret = PTR_ERR(ctx->cq_ev_fd);
ctx              3602 fs/io_uring.c  		ctx->cq_ev_fd = NULL;
ctx              3609 fs/io_uring.c  static int io_eventfd_unregister(struct io_ring_ctx *ctx)
ctx              3611 fs/io_uring.c  	if (ctx->cq_ev_fd) {
ctx              3612 fs/io_uring.c  		eventfd_ctx_put(ctx->cq_ev_fd);
ctx              3613 fs/io_uring.c  		ctx->cq_ev_fd = NULL;
ctx              3620 fs/io_uring.c  static void io_ring_ctx_free(struct io_ring_ctx *ctx)
ctx              3622 fs/io_uring.c  	io_finish_async(ctx);
ctx              3623 fs/io_uring.c  	if (ctx->sqo_mm)
ctx              3624 fs/io_uring.c  		mmdrop(ctx->sqo_mm);
ctx              3626 fs/io_uring.c  	io_iopoll_reap_events(ctx);
ctx              3627 fs/io_uring.c  	io_sqe_buffer_unregister(ctx);
ctx              3628 fs/io_uring.c  	io_sqe_files_unregister(ctx);
ctx              3629 fs/io_uring.c  	io_eventfd_unregister(ctx);
ctx              3632 fs/io_uring.c  	if (ctx->ring_sock) {
ctx              3633 fs/io_uring.c  		ctx->ring_sock->file = NULL; /* so that iput() is called */
ctx              3634 fs/io_uring.c  		sock_release(ctx->ring_sock);
ctx              3638 fs/io_uring.c  	io_mem_free(ctx->rings);
ctx              3639 fs/io_uring.c  	io_mem_free(ctx->sq_sqes);
ctx              3641 fs/io_uring.c  	percpu_ref_exit(&ctx->refs);
ctx              3642 fs/io_uring.c  	if (ctx->account_mem)
ctx              3643 fs/io_uring.c  		io_unaccount_mem(ctx->user,
ctx              3644 fs/io_uring.c  				ring_pages(ctx->sq_entries, ctx->cq_entries));
ctx              3645 fs/io_uring.c  	free_uid(ctx->user);
ctx              3646 fs/io_uring.c  	if (ctx->creds)
ctx              3647 fs/io_uring.c  		put_cred(ctx->creds);
ctx              3648 fs/io_uring.c  	kfree(ctx);
ctx              3653 fs/io_uring.c  	struct io_ring_ctx *ctx = file->private_data;
ctx              3656 fs/io_uring.c  	poll_wait(file, &ctx->cq_wait, wait);
ctx              3662 fs/io_uring.c  	if (READ_ONCE(ctx->rings->sq.tail) - ctx->cached_sq_head !=
ctx              3663 fs/io_uring.c  	    ctx->rings->sq_ring_entries)
ctx              3665 fs/io_uring.c  	if (READ_ONCE(ctx->rings->cq.head) != ctx->cached_cq_tail)
ctx              3673 fs/io_uring.c  	struct io_ring_ctx *ctx = file->private_data;
ctx              3675 fs/io_uring.c  	return fasync_helper(fd, file, on, &ctx->cq_fasync);
ctx              3678 fs/io_uring.c  static void io_ring_ctx_wait_and_kill(struct io_ring_ctx *ctx)
ctx              3680 fs/io_uring.c  	mutex_lock(&ctx->uring_lock);
ctx              3681 fs/io_uring.c  	percpu_ref_kill(&ctx->refs);
ctx              3682 fs/io_uring.c  	mutex_unlock(&ctx->uring_lock);
ctx              3684 fs/io_uring.c  	io_kill_timeouts(ctx);
ctx              3685 fs/io_uring.c  	io_poll_remove_all(ctx);
ctx              3686 fs/io_uring.c  	io_iopoll_reap_events(ctx);
ctx              3687 fs/io_uring.c  	wait_for_completion(&ctx->ctx_done);
ctx              3688 fs/io_uring.c  	io_ring_ctx_free(ctx);
ctx              3693 fs/io_uring.c  	struct io_ring_ctx *ctx = file->private_data;
ctx              3696 fs/io_uring.c  	io_ring_ctx_wait_and_kill(ctx);
ctx              3704 fs/io_uring.c  	struct io_ring_ctx *ctx = file->private_data;
ctx              3712 fs/io_uring.c  		ptr = ctx->rings;
ctx              3715 fs/io_uring.c  		ptr = ctx->sq_sqes;
ctx              3733 fs/io_uring.c  	struct io_ring_ctx *ctx;
ctx              3750 fs/io_uring.c  	ctx = f.file->private_data;
ctx              3751 fs/io_uring.c  	if (!percpu_ref_tryget(&ctx->refs))
ctx              3760 fs/io_uring.c  	if (ctx->flags & IORING_SETUP_SQPOLL) {
ctx              3762 fs/io_uring.c  			wake_up(&ctx->sqo_wait);
ctx              3765 fs/io_uring.c  		to_submit = min(to_submit, ctx->sq_entries);
ctx              3767 fs/io_uring.c  		mutex_lock(&ctx->uring_lock);
ctx              3768 fs/io_uring.c  		submitted = io_ring_submit(ctx, to_submit);
ctx              3769 fs/io_uring.c  		mutex_unlock(&ctx->uring_lock);
ctx              3777 fs/io_uring.c  		min_complete = min(min_complete, ctx->cq_entries);
ctx              3779 fs/io_uring.c  		if (ctx->flags & IORING_SETUP_IOPOLL) {
ctx              3780 fs/io_uring.c  			ret = io_iopoll_check(ctx, &nr_events, min_complete);
ctx              3782 fs/io_uring.c  			ret = io_cqring_wait(ctx, min_complete, sig, sigsz);
ctx              3787 fs/io_uring.c  	percpu_ref_put(&ctx->refs);
ctx              3800 fs/io_uring.c  static int io_allocate_scq_urings(struct io_ring_ctx *ctx,
ctx              3814 fs/io_uring.c  	ctx->rings = rings;
ctx              3815 fs/io_uring.c  	ctx->sq_array = (u32 *)((char *)rings + sq_array_offset);
ctx              3820 fs/io_uring.c  	ctx->sq_mask = rings->sq_ring_mask;
ctx              3821 fs/io_uring.c  	ctx->cq_mask = rings->cq_ring_mask;
ctx              3822 fs/io_uring.c  	ctx->sq_entries = rings->sq_ring_entries;
ctx              3823 fs/io_uring.c  	ctx->cq_entries = rings->cq_ring_entries;
ctx              3827 fs/io_uring.c  		io_mem_free(ctx->rings);
ctx              3828 fs/io_uring.c  		ctx->rings = NULL;
ctx              3832 fs/io_uring.c  	ctx->sq_sqes = io_mem_alloc(size);
ctx              3833 fs/io_uring.c  	if (!ctx->sq_sqes) {
ctx              3834 fs/io_uring.c  		io_mem_free(ctx->rings);
ctx              3835 fs/io_uring.c  		ctx->rings = NULL;
ctx              3848 fs/io_uring.c  static int io_uring_get_fd(struct io_ring_ctx *ctx)
ctx              3855 fs/io_uring.c  				&ctx->ring_sock);
ctx              3864 fs/io_uring.c  	file = anon_inode_getfile("[io_uring]", &io_uring_fops, ctx,
ctx              3873 fs/io_uring.c  	ctx->ring_sock->file = file;
ctx              3874 fs/io_uring.c  	ctx->ring_sock->sk->sk_user_data = ctx;
ctx              3880 fs/io_uring.c  	sock_release(ctx->ring_sock);
ctx              3881 fs/io_uring.c  	ctx->ring_sock = NULL;
ctx              3889 fs/io_uring.c  	struct io_ring_ctx *ctx;
ctx              3917 fs/io_uring.c  	ctx = io_ring_ctx_alloc(p);
ctx              3918 fs/io_uring.c  	if (!ctx) {
ctx              3925 fs/io_uring.c  	ctx->compat = in_compat_syscall();
ctx              3926 fs/io_uring.c  	ctx->account_mem = account_mem;
ctx              3927 fs/io_uring.c  	ctx->user = user;
ctx              3929 fs/io_uring.c  	ctx->creds = get_current_cred();
ctx              3930 fs/io_uring.c  	if (!ctx->creds) {
ctx              3935 fs/io_uring.c  	ret = io_allocate_scq_urings(ctx, p);
ctx              3939 fs/io_uring.c  	ret = io_sq_offload_start(ctx, p);
ctx              3950 fs/io_uring.c  	p->sq_off.array = (char *)ctx->sq_array - (char *)ctx->rings;
ctx              3964 fs/io_uring.c  	ret = io_uring_get_fd(ctx);
ctx              3971 fs/io_uring.c  	io_ring_ctx_wait_and_kill(ctx);
ctx              4013 fs/io_uring.c  static int __io_uring_register(struct io_ring_ctx *ctx, unsigned opcode,
ctx              4015 fs/io_uring.c  	__releases(ctx->uring_lock)
ctx              4016 fs/io_uring.c  	__acquires(ctx->uring_lock)
ctx              4025 fs/io_uring.c  	if (percpu_ref_is_dying(&ctx->refs))
ctx              4028 fs/io_uring.c  	percpu_ref_kill(&ctx->refs);
ctx              4037 fs/io_uring.c  	mutex_unlock(&ctx->uring_lock);
ctx              4038 fs/io_uring.c  	wait_for_completion(&ctx->ctx_done);
ctx              4039 fs/io_uring.c  	mutex_lock(&ctx->uring_lock);
ctx              4043 fs/io_uring.c  		ret = io_sqe_buffer_register(ctx, arg, nr_args);
ctx              4049 fs/io_uring.c  		ret = io_sqe_buffer_unregister(ctx);
ctx              4052 fs/io_uring.c  		ret = io_sqe_files_register(ctx, arg, nr_args);
ctx              4058 fs/io_uring.c  		ret = io_sqe_files_unregister(ctx);
ctx              4064 fs/io_uring.c  		ret = io_eventfd_register(ctx, arg);
ctx              4070 fs/io_uring.c  		ret = io_eventfd_unregister(ctx);
ctx              4078 fs/io_uring.c  	reinit_completion(&ctx->ctx_done);
ctx              4079 fs/io_uring.c  	percpu_ref_reinit(&ctx->refs);
ctx              4086 fs/io_uring.c  	struct io_ring_ctx *ctx;
ctx              4098 fs/io_uring.c  	ctx = f.file->private_data;
ctx              4100 fs/io_uring.c  	mutex_lock(&ctx->uring_lock);
ctx              4101 fs/io_uring.c  	ret = __io_uring_register(ctx, opcode, arg, nr_args);
ctx              4102 fs/io_uring.c  	mutex_unlock(&ctx->uring_lock);
ctx               210 fs/iomap/buffered-io.c 	struct iomap_readpage_ctx *ctx = data;
ctx               211 fs/iomap/buffered-io.c 	struct page *page = ctx->cur_page;
ctx               235 fs/iomap/buffered-io.c 	ctx->cur_page_in_bio = true;
ctx               241 fs/iomap/buffered-io.c 	if (ctx->bio && bio_end_sector(ctx->bio) == sector)
ctx               245 fs/iomap/buffered-io.c 	    __bio_try_merge_page(ctx->bio, page, plen, poff, &same_page)) {
ctx               259 fs/iomap/buffered-io.c 	if (!ctx->bio || !is_contig || bio_full(ctx->bio, plen)) {
ctx               263 fs/iomap/buffered-io.c 		if (ctx->bio)
ctx               264 fs/iomap/buffered-io.c 			submit_bio(ctx->bio);
ctx               266 fs/iomap/buffered-io.c 		if (ctx->is_readahead) /* same as readahead_gfp_mask */
ctx               268 fs/iomap/buffered-io.c 		ctx->bio = bio_alloc(gfp, min(BIO_MAX_PAGES, nr_vecs));
ctx               269 fs/iomap/buffered-io.c 		ctx->bio->bi_opf = REQ_OP_READ;
ctx               270 fs/iomap/buffered-io.c 		if (ctx->is_readahead)
ctx               271 fs/iomap/buffered-io.c 			ctx->bio->bi_opf |= REQ_RAHEAD;
ctx               272 fs/iomap/buffered-io.c 		ctx->bio->bi_iter.bi_sector = sector;
ctx               273 fs/iomap/buffered-io.c 		bio_set_dev(ctx->bio, iomap->bdev);
ctx               274 fs/iomap/buffered-io.c 		ctx->bio->bi_end_io = iomap_read_end_io;
ctx               277 fs/iomap/buffered-io.c 	bio_add_page(ctx->bio, page, plen, poff);
ctx               291 fs/iomap/buffered-io.c 	struct iomap_readpage_ctx ctx = { .cur_page = page };
ctx               298 fs/iomap/buffered-io.c 				PAGE_SIZE - poff, 0, ops, &ctx,
ctx               307 fs/iomap/buffered-io.c 	if (ctx.bio) {
ctx               308 fs/iomap/buffered-io.c 		submit_bio(ctx.bio);
ctx               309 fs/iomap/buffered-io.c 		WARN_ON_ONCE(!ctx.cur_page_in_bio);
ctx               311 fs/iomap/buffered-io.c 		WARN_ON_ONCE(ctx.cur_page_in_bio);
ctx               356 fs/iomap/buffered-io.c 	struct iomap_readpage_ctx *ctx = data;
ctx               360 fs/iomap/buffered-io.c 		if (ctx->cur_page && offset_in_page(pos + done) == 0) {
ctx               361 fs/iomap/buffered-io.c 			if (!ctx->cur_page_in_bio)
ctx               362 fs/iomap/buffered-io.c 				unlock_page(ctx->cur_page);
ctx               363 fs/iomap/buffered-io.c 			put_page(ctx->cur_page);
ctx               364 fs/iomap/buffered-io.c 			ctx->cur_page = NULL;
ctx               366 fs/iomap/buffered-io.c 		if (!ctx->cur_page) {
ctx               367 fs/iomap/buffered-io.c 			ctx->cur_page = iomap_next_page(inode, ctx->pages,
ctx               369 fs/iomap/buffered-io.c 			if (!ctx->cur_page)
ctx               371 fs/iomap/buffered-io.c 			ctx->cur_page_in_bio = false;
ctx               374 fs/iomap/buffered-io.c 				ctx, iomap);
ctx               384 fs/iomap/buffered-io.c 	struct iomap_readpage_ctx ctx = {
ctx               394 fs/iomap/buffered-io.c 				&ctx, iomap_readpages_actor);
ctx               404 fs/iomap/buffered-io.c 	if (ctx.bio)
ctx               405 fs/iomap/buffered-io.c 		submit_bio(ctx.bio);
ctx               406 fs/iomap/buffered-io.c 	if (ctx.cur_page) {
ctx               407 fs/iomap/buffered-io.c 		if (!ctx.cur_page_in_bio)
ctx               408 fs/iomap/buffered-io.c 			unlock_page(ctx.cur_page);
ctx               409 fs/iomap/buffered-io.c 		put_page(ctx.cur_page);
ctx               416 fs/iomap/buffered-io.c 	WARN_ON_ONCE(!ret && !list_empty(ctx.pages));
ctx                49 fs/iomap/fiemap.c 	struct fiemap_ctx *ctx = data;
ctx                55 fs/iomap/fiemap.c 	ret = iomap_to_fiemap(ctx->fi, &ctx->prev, 0);
ctx                56 fs/iomap/fiemap.c 	ctx->prev = *iomap;
ctx                70 fs/iomap/fiemap.c 	struct fiemap_ctx ctx;
ctx                73 fs/iomap/fiemap.c 	memset(&ctx, 0, sizeof(ctx));
ctx                74 fs/iomap/fiemap.c 	ctx.fi = fi;
ctx                75 fs/iomap/fiemap.c 	ctx.prev.type = IOMAP_HOLE;
ctx                88 fs/iomap/fiemap.c 		ret = iomap_apply(inode, start, len, IOMAP_REPORT, ops, &ctx,
ctx               102 fs/iomap/fiemap.c 	if (ctx.prev.type != IOMAP_HOLE) {
ctx               103 fs/iomap/fiemap.c 		ret = iomap_to_fiemap(fi, &ctx.prev, FIEMAP_EXTENT_LAST);
ctx                83 fs/isofs/dir.c 		struct dir_context *ctx,
ctx                98 fs/isofs/dir.c 	offset = ctx->pos & (bufsize - 1);
ctx                99 fs/isofs/dir.c 	block = ctx->pos >> bufbits;
ctx               101 fs/isofs/dir.c 	while (ctx->pos < inode->i_size) {
ctx               123 fs/isofs/dir.c 			ctx->pos = (ctx->pos + ISOFS_BLOCK_SIZE) & ~(ISOFS_BLOCK_SIZE - 1);
ctx               124 fs/isofs/dir.c 			block = ctx->pos >> bufbits;
ctx               168 fs/isofs/dir.c 			ctx->pos += de_len;
ctx               175 fs/isofs/dir.c 			if (!dir_emit_dot(file, ctx))
ctx               177 fs/isofs/dir.c 			ctx->pos += de_len;
ctx               185 fs/isofs/dir.c 			if (!dir_emit_dotdot(file, ctx))
ctx               187 fs/isofs/dir.c 			ctx->pos += de_len;
ctx               201 fs/isofs/dir.c 			ctx->pos += de_len;
ctx               233 fs/isofs/dir.c 			if (!dir_emit(ctx, p, len, inode_number, DT_UNKNOWN))
ctx               236 fs/isofs/dir.c 		ctx->pos += de_len;
ctx               250 fs/isofs/dir.c static int isofs_readdir(struct file *file, struct dir_context *ctx)
ctx               263 fs/isofs/dir.c 	result = do_isofs_readdir(inode, file, ctx, tmpname, tmpde);
ctx               120 fs/jffs2/dir.c static int jffs2_readdir(struct file *file, struct dir_context *ctx)
ctx               129 fs/jffs2/dir.c 	if (!dir_emit_dots(file, ctx))
ctx               136 fs/jffs2/dir.c 		if (curofs < ctx->pos) {
ctx               138 fs/jffs2/dir.c 				  fd->name, fd->ino, fd->type, curofs, (unsigned long)ctx->pos);
ctx               144 fs/jffs2/dir.c 			ctx->pos++;
ctx               148 fs/jffs2/dir.c 			  (unsigned long)ctx->pos, fd->name, fd->ino, fd->type);
ctx               149 fs/jffs2/dir.c 		if (!dir_emit(ctx, fd->name, strlen(fd->name), fd->ino, fd->type))
ctx               151 fs/jffs2/dir.c 		ctx->pos++;
ctx               296 fs/jffs2/super.c 	struct jffs2_sb_info *ctx;
ctx               298 fs/jffs2/super.c 	ctx = kzalloc(sizeof(struct jffs2_sb_info), GFP_KERNEL);
ctx               299 fs/jffs2/super.c 	if (!ctx)
ctx               302 fs/jffs2/super.c 	fc->s_fs_info = ctx;
ctx              2993 fs/jfs/jfs_dtree.c int jfs_readdir(struct file *file, struct dir_context *ctx)
ctx              3024 fs/jfs/jfs_dtree.c 	if (ctx->pos == DIREND)
ctx              3036 fs/jfs/jfs_dtree.c 		dir_index = (u32) ctx->pos;
ctx              3052 fs/jfs/jfs_dtree.c 				ctx->pos = DIREND;
ctx              3058 fs/jfs/jfs_dtree.c 				ctx->pos = DIREND;
ctx              3064 fs/jfs/jfs_dtree.c 					ctx->pos = DIREND;
ctx              3069 fs/jfs/jfs_dtree.c 					ctx->pos = DIREND;
ctx              3078 fs/jfs/jfs_dtree.c 				ctx->pos = DIREND;
ctx              3084 fs/jfs/jfs_dtree.c 				ctx->pos = DIREND;
ctx              3092 fs/jfs/jfs_dtree.c 				ctx->pos = 1;
ctx              3093 fs/jfs/jfs_dtree.c 				if (!dir_emit(ctx, ".", 1, ip->i_ino, DT_DIR))
ctx              3099 fs/jfs/jfs_dtree.c 			ctx->pos = 2;
ctx              3100 fs/jfs/jfs_dtree.c 			if (!dir_emit(ctx, "..", 2, PARENT(ip), DT_DIR))
ctx              3107 fs/jfs/jfs_dtree.c 				ctx->pos = DIREND;
ctx              3125 fs/jfs/jfs_dtree.c 		dtpos = ctx->pos;
ctx              3128 fs/jfs/jfs_dtree.c 			ctx->pos = 1;
ctx              3129 fs/jfs/jfs_dtree.c 			if (!dir_emit(ctx, ".", 1, ip->i_ino, DT_DIR))
ctx              3132 fs/jfs/jfs_dtree.c 			ctx->pos = dtpos;
ctx              3138 fs/jfs/jfs_dtree.c 				if (!dir_emit(ctx, "..", 2, PARENT(ip), DT_DIR))
ctx              3145 fs/jfs/jfs_dtree.c 			ctx->pos = dtpos;
ctx              3149 fs/jfs/jfs_dtree.c 			ctx->pos = DIREND;
ctx              3153 fs/jfs/jfs_dtree.c 		if ((rc = dtReadNext(ip, &ctx->pos, &btstack))) {
ctx              3156 fs/jfs/jfs_dtree.c 			ctx->pos = DIREND;
ctx              3164 fs/jfs/jfs_dtree.c 			ctx->pos = DIREND;
ctx              3173 fs/jfs/jfs_dtree.c 		ctx->pos = DIREND;
ctx              3293 fs/jfs/jfs_dtree.c 			ctx->pos = jfs_dirent->position;
ctx              3294 fs/jfs/jfs_dtree.c 			if (!dir_emit(ctx, jfs_dirent->name,
ctx              3307 fs/jfs/jfs_dtree.c 			ctx->pos = DIREND;
ctx               255 fs/jfs/jfs_dtree.h extern int jfs_readdir(struct file *file, struct dir_context *ctx);
ctx              1660 fs/kernfs/dir.c static int kernfs_fop_readdir(struct file *file, struct dir_context *ctx)
ctx              1667 fs/kernfs/dir.c 	if (!dir_emit_dots(file, ctx))
ctx              1674 fs/kernfs/dir.c 	for (pos = kernfs_dir_pos(ns, parent, ctx->pos, pos);
ctx              1676 fs/kernfs/dir.c 	     pos = kernfs_dir_next_pos(ns, parent, ctx->pos, pos)) {
ctx              1682 fs/kernfs/dir.c 		ctx->pos = pos->hash;
ctx              1687 fs/kernfs/dir.c 		if (!dir_emit(ctx, name, len, ino, type))
ctx              1693 fs/kernfs/dir.c 	ctx->pos = INT_MAX;
ctx               184 fs/libfs.c     int dcache_readdir(struct file *file, struct dir_context *ctx)
ctx               192 fs/libfs.c     	if (!dir_emit_dots(file, ctx))
ctx               195 fs/libfs.c     	if (ctx->pos == 2)
ctx               203 fs/libfs.c     		if (!dir_emit(ctx, next->d_name.name, next->d_name.len,
ctx               206 fs/libfs.c     		ctx->pos++;
ctx               248 fs/libfs.c     	struct pseudo_fs_context *ctx = fc->fs_private;
ctx               254 fs/libfs.c     	s->s_magic = ctx->magic;
ctx               255 fs/libfs.c     	s->s_op = ctx->ops ?: &simple_super_operations;
ctx               256 fs/libfs.c     	s->s_xattr = ctx->xattr;
ctx               273 fs/libfs.c     	s->s_d_op = ctx->dops;
ctx               299 fs/libfs.c     	struct pseudo_fs_context *ctx;
ctx               301 fs/libfs.c     	ctx = kzalloc(sizeof(struct pseudo_fs_context), GFP_KERNEL);
ctx               302 fs/libfs.c     	if (likely(ctx)) {
ctx               303 fs/libfs.c     		ctx->magic = magic;
ctx               304 fs/libfs.c     		fc->fs_private = ctx;
ctx               309 fs/libfs.c     	return ctx;
ctx              1261 fs/libfs.c     static int empty_dir_readdir(struct file *file, struct dir_context *ctx)
ctx              1263 fs/libfs.c     	dir_emit_dots(file, ctx);
ctx               251 fs/locks.c     	struct file_lock_context *ctx;
ctx               254 fs/locks.c     	ctx = smp_load_acquire(&inode->i_flctx);
ctx               255 fs/locks.c     	if (likely(ctx) || type == F_UNLCK)
ctx               258 fs/locks.c     	ctx = kmem_cache_alloc(flctx_cache, GFP_KERNEL);
ctx               259 fs/locks.c     	if (!ctx)
ctx               262 fs/locks.c     	spin_lock_init(&ctx->flc_lock);
ctx               263 fs/locks.c     	INIT_LIST_HEAD(&ctx->flc_flock);
ctx               264 fs/locks.c     	INIT_LIST_HEAD(&ctx->flc_posix);
ctx               265 fs/locks.c     	INIT_LIST_HEAD(&ctx->flc_lease);
ctx               271 fs/locks.c     	if (cmpxchg(&inode->i_flctx, NULL, ctx)) {
ctx               272 fs/locks.c     		kmem_cache_free(flctx_cache, ctx);
ctx               273 fs/locks.c     		ctx = smp_load_acquire(&inode->i_flctx);
ctx               276 fs/locks.c     	trace_locks_get_lock_context(inode, type, ctx);
ctx               277 fs/locks.c     	return ctx;
ctx               293 fs/locks.c     	struct file_lock_context *ctx = inode->i_flctx;
ctx               295 fs/locks.c     	if (unlikely(!list_empty(&ctx->flc_flock) ||
ctx               296 fs/locks.c     		     !list_empty(&ctx->flc_posix) ||
ctx               297 fs/locks.c     		     !list_empty(&ctx->flc_lease))) {
ctx               301 fs/locks.c     		locks_dump_ctx_list(&ctx->flc_flock, "FLOCK");
ctx               302 fs/locks.c     		locks_dump_ctx_list(&ctx->flc_posix, "POSIX");
ctx               303 fs/locks.c     		locks_dump_ctx_list(&ctx->flc_lease, "LEASE");
ctx               326 fs/locks.c     	struct file_lock_context *ctx = inode->i_flctx;
ctx               328 fs/locks.c     	if (unlikely(ctx)) {
ctx               330 fs/locks.c     		kmem_cache_free(flctx_cache, ctx);
ctx               955 fs/locks.c     	struct file_lock_context *ctx;
ctx               958 fs/locks.c     	ctx = smp_load_acquire(&inode->i_flctx);
ctx               959 fs/locks.c     	if (!ctx || list_empty_careful(&ctx->flc_posix)) {
ctx               964 fs/locks.c     	spin_lock(&ctx->flc_lock);
ctx               965 fs/locks.c     	list_for_each_entry(cfl, &ctx->flc_posix, fl_list) {
ctx               973 fs/locks.c     	spin_unlock(&ctx->flc_lock);
ctx              1063 fs/locks.c     	struct file_lock_context *ctx;
ctx              1068 fs/locks.c     	ctx = locks_get_lock_context(inode, request->fl_type);
ctx              1069 fs/locks.c     	if (!ctx) {
ctx              1082 fs/locks.c     	spin_lock(&ctx->flc_lock);
ctx              1086 fs/locks.c     	list_for_each_entry(fl, &ctx->flc_flock, fl_list) {
ctx              1103 fs/locks.c     	list_for_each_entry(fl, &ctx->flc_flock, fl_list) {
ctx              1117 fs/locks.c     	locks_insert_lock_ctx(new_fl, &ctx->flc_flock);
ctx              1122 fs/locks.c     	spin_unlock(&ctx->flc_lock);
ctx              1139 fs/locks.c     	struct file_lock_context *ctx;
ctx              1144 fs/locks.c     	ctx = locks_get_lock_context(inode, request->fl_type);
ctx              1145 fs/locks.c     	if (!ctx)
ctx              1162 fs/locks.c     	spin_lock(&ctx->flc_lock);
ctx              1169 fs/locks.c     		list_for_each_entry(fl, &ctx->flc_posix, fl_list) {
ctx              1204 fs/locks.c     	list_for_each_entry(fl, &ctx->flc_posix, fl_list) {
ctx              1210 fs/locks.c     	list_for_each_entry_safe_from(fl, tmp, &ctx->flc_posix, fl_list) {
ctx              1339 fs/locks.c     	spin_unlock(&ctx->flc_lock);
ctx              1411 fs/locks.c     	struct file_lock_context *ctx;
ctx              1414 fs/locks.c     	ctx = smp_load_acquire(&inode->i_flctx);
ctx              1415 fs/locks.c     	if (!ctx || list_empty_careful(&ctx->flc_posix))
ctx              1421 fs/locks.c     	spin_lock(&ctx->flc_lock);
ctx              1423 fs/locks.c     	list_for_each_entry(fl, &ctx->flc_posix, fl_list) {
ctx              1430 fs/locks.c     	spin_unlock(&ctx->flc_lock);
ctx              1542 fs/locks.c     	struct file_lock_context *ctx = inode->i_flctx;
ctx              1545 fs/locks.c     	lockdep_assert_held(&ctx->flc_lock);
ctx              1547 fs/locks.c     	list_for_each_entry_safe(fl, tmp, &ctx->flc_lease, fl_list) {
ctx              1578 fs/locks.c     	struct file_lock_context *ctx = inode->i_flctx;
ctx              1581 fs/locks.c     	lockdep_assert_held(&ctx->flc_lock);
ctx              1583 fs/locks.c     	list_for_each_entry(fl, &ctx->flc_lease, fl_list) {
ctx              1606 fs/locks.c     	struct file_lock_context *ctx;
ctx              1618 fs/locks.c     	ctx = smp_load_acquire(&inode->i_flctx);
ctx              1619 fs/locks.c     	if (!ctx) {
ctx              1625 fs/locks.c     	spin_lock(&ctx->flc_lock);
ctx              1639 fs/locks.c     	list_for_each_entry_safe(fl, tmp, &ctx->flc_lease, fl_list) {
ctx              1657 fs/locks.c     	if (list_empty(&ctx->flc_lease))
ctx              1667 fs/locks.c     	fl = list_first_entry(&ctx->flc_lease, struct file_lock, fl_list);
ctx              1675 fs/locks.c     	spin_unlock(&ctx->flc_lock);
ctx              1684 fs/locks.c     	spin_lock(&ctx->flc_lock);
ctx              1699 fs/locks.c     	spin_unlock(&ctx->flc_lock);
ctx              1720 fs/locks.c     	struct file_lock_context *ctx;
ctx              1723 fs/locks.c     	ctx = smp_load_acquire(&inode->i_flctx);
ctx              1724 fs/locks.c     	if (ctx && !list_empty_careful(&ctx->flc_lease)) {
ctx              1725 fs/locks.c     		spin_lock(&ctx->flc_lock);
ctx              1726 fs/locks.c     		fl = list_first_entry_or_null(&ctx->flc_lease,
ctx              1730 fs/locks.c     		spin_unlock(&ctx->flc_lock);
ctx              1765 fs/locks.c     	struct file_lock_context *ctx;
ctx              1769 fs/locks.c     	ctx = smp_load_acquire(&inode->i_flctx);
ctx              1770 fs/locks.c     	if (ctx && !list_empty_careful(&ctx->flc_lease)) {
ctx              1772 fs/locks.c     		spin_lock(&ctx->flc_lock);
ctx              1774 fs/locks.c     		list_for_each_entry(fl, &ctx->flc_lease, fl_list) {
ctx              1780 fs/locks.c     		spin_unlock(&ctx->flc_lock);
ctx              1836 fs/locks.c     	struct file_lock_context *ctx;
ctx              1845 fs/locks.c     	ctx = locks_get_lock_context(inode, arg);
ctx              1846 fs/locks.c     	if (!ctx)
ctx              1868 fs/locks.c     	spin_lock(&ctx->flc_lock);
ctx              1883 fs/locks.c     	list_for_each_entry(fl, &ctx->flc_lease, fl_list) {
ctx              1916 fs/locks.c     	locks_insert_lock_ctx(lease, &ctx->flc_lease);
ctx              1937 fs/locks.c     	spin_unlock(&ctx->flc_lock);
ctx              1952 fs/locks.c     	struct file_lock_context *ctx;
ctx              1955 fs/locks.c     	ctx = smp_load_acquire(&inode->i_flctx);
ctx              1956 fs/locks.c     	if (!ctx) {
ctx              1962 fs/locks.c     	spin_lock(&ctx->flc_lock);
ctx              1963 fs/locks.c     	list_for_each_entry(fl, &ctx->flc_lease, fl_list) {
ctx              1973 fs/locks.c     	spin_unlock(&ctx->flc_lock);
ctx              2697 fs/locks.c     	struct file_lock_context *ctx;
ctx              2704 fs/locks.c     	ctx =  smp_load_acquire(&inode->i_flctx);
ctx              2705 fs/locks.c     	if (!ctx || list_empty(&ctx->flc_posix))
ctx              2751 fs/locks.c     locks_remove_lease(struct file *filp, struct file_lock_context *ctx)
ctx              2756 fs/locks.c     	if (list_empty(&ctx->flc_lease))
ctx              2760 fs/locks.c     	spin_lock(&ctx->flc_lock);
ctx              2761 fs/locks.c     	list_for_each_entry_safe(fl, tmp, &ctx->flc_lease, fl_list)
ctx              2764 fs/locks.c     	spin_unlock(&ctx->flc_lock);
ctx              2775 fs/locks.c     	struct file_lock_context *ctx;
ctx              2777 fs/locks.c     	ctx = smp_load_acquire(&locks_inode(filp)->i_flctx);
ctx              2778 fs/locks.c     	if (!ctx)
ctx              2785 fs/locks.c     	locks_remove_flock(filp, ctx);
ctx              2788 fs/locks.c     	locks_remove_lease(filp, ctx);
ctx              2790 fs/locks.c     	spin_lock(&ctx->flc_lock);
ctx              2791 fs/locks.c     	locks_check_ctx_file_list(filp, &ctx->flc_posix, "POSIX");
ctx              2792 fs/locks.c     	locks_check_ctx_file_list(filp, &ctx->flc_flock, "FLOCK");
ctx              2793 fs/locks.c     	locks_check_ctx_file_list(filp, &ctx->flc_lease, "LEASE");
ctx              2794 fs/locks.c     	spin_unlock(&ctx->flc_lock);
ctx              2943 fs/locks.c     	struct file_lock_context *ctx;
ctx              2946 fs/locks.c     	ctx = smp_load_acquire(&inode->i_flctx);
ctx              2947 fs/locks.c     	if (!ctx)
ctx              2950 fs/locks.c     	spin_lock(&ctx->flc_lock);
ctx              2951 fs/locks.c     	__show_fd_locks(f, &ctx->flc_flock, &id, filp, files);
ctx              2952 fs/locks.c     	__show_fd_locks(f, &ctx->flc_posix, &id, filp, files);
ctx              2953 fs/locks.c     	__show_fd_locks(f, &ctx->flc_lease, &id, filp, files);
ctx              2954 fs/locks.c     	spin_unlock(&ctx->flc_lock);
ctx                81 fs/minix/dir.c static int minix_readdir(struct file *file, struct dir_context *ctx)
ctx                88 fs/minix/dir.c 	unsigned long pos = ctx->pos;
ctx                92 fs/minix/dir.c 	ctx->pos = pos = ALIGN(pos, chunk_size);
ctx               122 fs/minix/dir.c 				if (!dir_emit(ctx, name, l,
ctx               128 fs/minix/dir.c 			ctx->pos += chunk_size;
ctx               141 fs/nfs/delegation.c 	struct nfs_open_context *ctx;
ctx               149 fs/nfs/delegation.c 	list_for_each_entry_rcu(ctx, &nfsi->open_files, list) {
ctx               150 fs/nfs/delegation.c 		state = ctx->state;
ctx               159 fs/nfs/delegation.c 		if (!get_nfs_open_context(ctx))
ctx               166 fs/nfs/delegation.c 		err = nfs4_open_delegation_recall(ctx, state, stateid);
ctx               172 fs/nfs/delegation.c 		put_nfs_open_context(ctx);
ctx                66 fs/nfs/delegation.h int nfs4_open_delegation_recall(struct nfs_open_context *ctx, struct nfs4_state *state, const nfs4_stateid *stateid);
ctx                74 fs/nfs/dir.c   	struct nfs_open_dir_context *ctx;
ctx                75 fs/nfs/dir.c   	ctx = kmalloc(sizeof(*ctx), GFP_KERNEL);
ctx                76 fs/nfs/dir.c   	if (ctx != NULL) {
ctx                77 fs/nfs/dir.c   		ctx->duped = 0;
ctx                78 fs/nfs/dir.c   		ctx->attr_gencount = nfsi->attr_gencount;
ctx                79 fs/nfs/dir.c   		ctx->dir_cookie = 0;
ctx                80 fs/nfs/dir.c   		ctx->dup_cookie = 0;
ctx                81 fs/nfs/dir.c   		ctx->cred = get_cred(cred);
ctx                87 fs/nfs/dir.c   		list_add(&ctx->list, &nfsi->open_files);
ctx                89 fs/nfs/dir.c   		return ctx;
ctx                94 fs/nfs/dir.c   static void put_nfs_open_dir_context(struct inode *dir, struct nfs_open_dir_context *ctx)
ctx                97 fs/nfs/dir.c   	list_del(&ctx->list);
ctx                99 fs/nfs/dir.c   	put_cred(ctx->cred);
ctx               100 fs/nfs/dir.c   	kfree(ctx);
ctx               110 fs/nfs/dir.c   	struct nfs_open_dir_context *ctx;
ctx               116 fs/nfs/dir.c   	ctx = alloc_nfs_open_dir_context(inode, current_cred());
ctx               117 fs/nfs/dir.c   	if (IS_ERR(ctx)) {
ctx               118 fs/nfs/dir.c   		res = PTR_ERR(ctx);
ctx               121 fs/nfs/dir.c   	filp->private_data = ctx;
ctx               151 fs/nfs/dir.c   	struct dir_context *ctx;
ctx               245 fs/nfs/dir.c   	loff_t diff = desc->ctx->pos - desc->current_index;
ctx               284 fs/nfs/dir.c   			struct nfs_open_dir_context *ctx = desc->file->private_data;
ctx               287 fs/nfs/dir.c   			if (ctx->attr_gencount != nfsi->attr_gencount ||
ctx               289 fs/nfs/dir.c   				ctx->duped = 0;
ctx               290 fs/nfs/dir.c   				ctx->attr_gencount = nfsi->attr_gencount;
ctx               291 fs/nfs/dir.c   			} else if (new_pos < desc->ctx->pos) {
ctx               292 fs/nfs/dir.c   				if (ctx->duped > 0
ctx               293 fs/nfs/dir.c   				    && ctx->dup_cookie == *desc->dir_cookie) {
ctx               304 fs/nfs/dir.c   				ctx->dup_cookie = *desc->dir_cookie;
ctx               305 fs/nfs/dir.c   				ctx->duped = -1;
ctx               307 fs/nfs/dir.c   			desc->ctx->pos = new_pos;
ctx               348 fs/nfs/dir.c   	struct nfs_open_dir_context *ctx = file->private_data;
ctx               349 fs/nfs/dir.c   	const struct cred *cred = ctx->cred;
ctx               412 fs/nfs/dir.c   bool nfs_use_readdirplus(struct inode *dir, struct dir_context *ctx)
ctx               418 fs/nfs/dir.c   	if (ctx->pos == 0)
ctx               769 fs/nfs/dir.c   	struct nfs_open_dir_context *ctx = file->private_data;
ctx               776 fs/nfs/dir.c   		if (!dir_emit(desc->ctx, ent->string.name, ent->string.len,
ctx               781 fs/nfs/dir.c   		desc->ctx->pos++;
ctx               786 fs/nfs/dir.c   		if (ctx->duped != 0)
ctx               787 fs/nfs/dir.c   			ctx->duped = 1;
ctx               816 fs/nfs/dir.c   	struct nfs_open_dir_context *ctx = desc->file->private_data;
ctx               830 fs/nfs/dir.c   	ctx->duped = 0;
ctx               851 fs/nfs/dir.c   static int nfs_readdir(struct file *file, struct dir_context *ctx)
ctx               861 fs/nfs/dir.c   			file, (long long)ctx->pos);
ctx               873 fs/nfs/dir.c   	desc->ctx = ctx;
ctx               876 fs/nfs/dir.c   	desc->plus = nfs_use_readdirplus(inode, ctx);
ctx               878 fs/nfs/dir.c   	if (ctx->pos == 0 || nfs_attribute_cache_expired(inode))
ctx              1499 fs/nfs/dir.c   static int nfs_finish_open(struct nfs_open_context *ctx,
ctx              1509 fs/nfs/dir.c   		nfs_file_set_open_context(file, ctx);
ctx              1521 fs/nfs/dir.c   	struct nfs_open_context *ctx;
ctx              1582 fs/nfs/dir.c   	ctx = create_nfs_open_context(dentry, open_flags, file);
ctx              1583 fs/nfs/dir.c   	err = PTR_ERR(ctx);
ctx              1584 fs/nfs/dir.c   	if (IS_ERR(ctx))
ctx              1587 fs/nfs/dir.c   	trace_nfs_atomic_open_enter(dir, ctx, open_flags);
ctx              1588 fs/nfs/dir.c   	inode = NFS_PROTO(dir)->open_context(dir, ctx, open_flags, &attr, &created);
ctx              1593 fs/nfs/dir.c   		trace_nfs_atomic_open_exit(dir, ctx, open_flags, err);
ctx              1594 fs/nfs/dir.c   		put_nfs_open_context(ctx);
ctx              1615 fs/nfs/dir.c   	err = nfs_finish_open(ctx, ctx->dentry, file, open_flags);
ctx              1616 fs/nfs/dir.c   	trace_nfs_atomic_open_exit(dir, ctx, open_flags, err);
ctx              1617 fs/nfs/dir.c   	put_nfs_open_context(ctx);
ctx                71 fs/nfs/direct.c 	struct nfs_open_context	*ctx;		/* file open context info */
ctx               322 fs/nfs/direct.c 	if (dreq->ctx != NULL)
ctx               323 fs/nfs/direct.c 		put_nfs_open_context(dreq->ctx);
ctx               481 fs/nfs/direct.c 			req = nfs_create_request(dreq->ctx, pagevec[i],
ctx               570 fs/nfs/direct.c 	dreq->ctx = get_nfs_open_context(nfs_file_open_context(iocb->ki_filp));
ctx               571 fs/nfs/direct.c 	l_ctx = nfs_get_lock_context(dreq->ctx);
ctx               887 fs/nfs/direct.c 			req = nfs_create_request(dreq->ctx, pagevec[i],
ctx               989 fs/nfs/direct.c 	dreq->ctx = get_nfs_open_context(nfs_file_open_context(iocb->ki_filp));
ctx               990 fs/nfs/direct.c 	l_ctx = nfs_get_lock_context(dreq->ctx);
ctx               207 fs/nfs/file.c  	struct nfs_open_context *ctx = nfs_file_open_context(file);
ctx               215 fs/nfs/file.c  	do_resend = test_and_clear_bit(NFS_CONTEXT_RESEND_WRITES, &ctx->flags);
ctx               223 fs/nfs/file.c  	do_resend |= test_bit(NFS_CONTEXT_RESEND_WRITES, &ctx->flags);
ctx               359 fs/nfs/file.c  	struct nfs_open_context *ctx = nfs_file_open_context(file);
ctx               394 fs/nfs/file.c  	if (nfs_ctx_key_to_expire(ctx, mapping->host)) {
ctx               585 fs/nfs/file.c  	struct nfs_open_context *ctx;
ctx               587 fs/nfs/file.c  	ctx = nfs_file_open_context(filp);
ctx               588 fs/nfs/file.c  	if (nfs_ctx_key_to_expire(ctx, inode))
ctx               884 fs/nfs/filelayout/filelayout.c 		      struct nfs_open_context *ctx,
ctx               896 fs/nfs/filelayout/filelayout.c 	lseg = pnfs_update_layout(ino, ctx, pos, count, iomode, strict_iomode,
ctx               405 fs/nfs/fscache.c int __nfs_readpage_from_fscache(struct nfs_open_context *ctx,
ctx               417 fs/nfs/fscache.c 					 ctx,
ctx               444 fs/nfs/fscache.c int __nfs_readpages_from_fscache(struct nfs_open_context *ctx,
ctx               459 fs/nfs/fscache.c 					  ctx,
ctx               130 fs/nfs/fscache.h static inline int nfs_readpage_from_fscache(struct nfs_open_context *ctx,
ctx               135 fs/nfs/fscache.h 		return __nfs_readpage_from_fscache(ctx, inode, page);
ctx               142 fs/nfs/fscache.h static inline int nfs_readpages_from_fscache(struct nfs_open_context *ctx,
ctx               149 fs/nfs/fscache.h 		return __nfs_readpages_from_fscache(ctx, inode, mapping, pages,
ctx               215 fs/nfs/fscache.h static inline int nfs_readpage_from_fscache(struct nfs_open_context *ctx,
ctx               221 fs/nfs/fscache.h static inline int nfs_readpages_from_fscache(struct nfs_open_context *ctx,
ctx               862 fs/nfs/inode.c static struct nfs_lock_context *__nfs_find_lock_context(struct nfs_open_context *ctx)
ctx               866 fs/nfs/inode.c 	list_for_each_entry_rcu(pos, &ctx->lock_context.list, list) {
ctx               875 fs/nfs/inode.c struct nfs_lock_context *nfs_get_lock_context(struct nfs_open_context *ctx)
ctx               878 fs/nfs/inode.c 	struct inode *inode = d_inode(ctx->dentry);
ctx               881 fs/nfs/inode.c 	res = __nfs_find_lock_context(ctx);
ctx               889 fs/nfs/inode.c 		res = __nfs_find_lock_context(ctx);
ctx               891 fs/nfs/inode.c 			new->open_context = get_nfs_open_context(ctx);
ctx               894 fs/nfs/inode.c 						&ctx->lock_context.list);
ctx               909 fs/nfs/inode.c 	struct nfs_open_context *ctx = l_ctx->open_context;
ctx               910 fs/nfs/inode.c 	struct inode *inode = d_inode(ctx->dentry);
ctx               916 fs/nfs/inode.c 	put_nfs_open_context(ctx);
ctx               930 fs/nfs/inode.c void nfs_close_context(struct nfs_open_context *ctx, int is_sync)
ctx               936 fs/nfs/inode.c 	if (!(ctx->mode & FMODE_WRITE))
ctx               940 fs/nfs/inode.c 	inode = d_inode(ctx->dentry);
ctx               961 fs/nfs/inode.c 	struct nfs_open_context *ctx;
ctx               963 fs/nfs/inode.c 	ctx = kmalloc(sizeof(*ctx), GFP_KERNEL);
ctx               964 fs/nfs/inode.c 	if (!ctx)
ctx               967 fs/nfs/inode.c 	ctx->dentry = dget(dentry);
ctx               969 fs/nfs/inode.c 		ctx->cred = get_cred(filp->f_cred);
ctx               971 fs/nfs/inode.c 		ctx->cred = get_current_cred();
ctx               972 fs/nfs/inode.c 	ctx->ll_cred = NULL;
ctx               973 fs/nfs/inode.c 	ctx->state = NULL;
ctx               974 fs/nfs/inode.c 	ctx->mode = f_mode;
ctx               975 fs/nfs/inode.c 	ctx->flags = 0;
ctx               976 fs/nfs/inode.c 	ctx->error = 0;
ctx               977 fs/nfs/inode.c 	ctx->flock_owner = (fl_owner_t)filp;
ctx               978 fs/nfs/inode.c 	nfs_init_lock_context(&ctx->lock_context);
ctx               979 fs/nfs/inode.c 	ctx->lock_context.open_context = ctx;
ctx               980 fs/nfs/inode.c 	INIT_LIST_HEAD(&ctx->list);
ctx               981 fs/nfs/inode.c 	ctx->mdsthreshold = NULL;
ctx               982 fs/nfs/inode.c 	return ctx;
ctx               986 fs/nfs/inode.c struct nfs_open_context *get_nfs_open_context(struct nfs_open_context *ctx)
ctx               988 fs/nfs/inode.c 	if (ctx != NULL && refcount_inc_not_zero(&ctx->lock_context.count))
ctx               989 fs/nfs/inode.c 		return ctx;
ctx               994 fs/nfs/inode.c static void __put_nfs_open_context(struct nfs_open_context *ctx, int is_sync)
ctx               996 fs/nfs/inode.c 	struct inode *inode = d_inode(ctx->dentry);
ctx               997 fs/nfs/inode.c 	struct super_block *sb = ctx->dentry->d_sb;
ctx               999 fs/nfs/inode.c 	if (!refcount_dec_and_test(&ctx->lock_context.count))
ctx              1001 fs/nfs/inode.c 	if (!list_empty(&ctx->list)) {
ctx              1003 fs/nfs/inode.c 		list_del_rcu(&ctx->list);
ctx              1007 fs/nfs/inode.c 		NFS_PROTO(inode)->close_context(ctx, is_sync);
ctx              1008 fs/nfs/inode.c 	put_cred(ctx->cred);
ctx              1009 fs/nfs/inode.c 	dput(ctx->dentry);
ctx              1011 fs/nfs/inode.c 	put_rpccred(ctx->ll_cred);
ctx              1012 fs/nfs/inode.c 	kfree(ctx->mdsthreshold);
ctx              1013 fs/nfs/inode.c 	kfree_rcu(ctx, rcu_head);
ctx              1016 fs/nfs/inode.c void put_nfs_open_context(struct nfs_open_context *ctx)
ctx              1018 fs/nfs/inode.c 	__put_nfs_open_context(ctx, 0);
ctx              1022 fs/nfs/inode.c static void put_nfs_open_context_sync(struct nfs_open_context *ctx)
ctx              1024 fs/nfs/inode.c 	__put_nfs_open_context(ctx, 1);
ctx              1031 fs/nfs/inode.c void nfs_inode_attach_open_context(struct nfs_open_context *ctx)
ctx              1033 fs/nfs/inode.c 	struct inode *inode = d_inode(ctx->dentry);
ctx              1041 fs/nfs/inode.c 	list_add_tail_rcu(&ctx->list, &nfsi->open_files);
ctx              1046 fs/nfs/inode.c void nfs_file_set_open_context(struct file *filp, struct nfs_open_context *ctx)
ctx              1048 fs/nfs/inode.c 	filp->private_data = get_nfs_open_context(ctx);
ctx              1049 fs/nfs/inode.c 	if (list_empty(&ctx->list))
ctx              1050 fs/nfs/inode.c 		nfs_inode_attach_open_context(ctx);
ctx              1060 fs/nfs/inode.c 	struct nfs_open_context *pos, *ctx = NULL;
ctx              1068 fs/nfs/inode.c 		ctx = get_nfs_open_context(pos);
ctx              1069 fs/nfs/inode.c 		if (ctx)
ctx              1073 fs/nfs/inode.c 	return ctx;
ctx              1078 fs/nfs/inode.c 	struct nfs_open_context *ctx = nfs_file_open_context(filp);
ctx              1080 fs/nfs/inode.c 	if (ctx) {
ctx              1081 fs/nfs/inode.c 		struct inode *inode = d_inode(ctx->dentry);
ctx              1087 fs/nfs/inode.c 		if (ctx->error < 0)
ctx              1090 fs/nfs/inode.c 		put_nfs_open_context_sync(ctx);
ctx              1099 fs/nfs/inode.c 	struct nfs_open_context *ctx;
ctx              1101 fs/nfs/inode.c 	ctx = alloc_nfs_open_context(file_dentry(filp), filp->f_mode, filp);
ctx              1102 fs/nfs/inode.c 	if (IS_ERR(ctx))
ctx              1103 fs/nfs/inode.c 		return PTR_ERR(ctx);
ctx              1104 fs/nfs/inode.c 	nfs_file_set_open_context(filp, ctx);
ctx              1105 fs/nfs/inode.c 	put_nfs_open_context(ctx);
ctx               342 fs/nfs/internal.h void nfs_close_context(struct nfs_open_context *ctx, int is_sync);
ctx               511 fs/nfs/internal.h bool nfs_ctx_key_to_expire(struct nfs_open_context *ctx, struct inode *inode);
ctx               910 fs/nfs/nfs3proc.c 	struct nfs_open_context *ctx;
ctx               912 fs/nfs/nfs3proc.c 		ctx = l_ctx->open_context;
ctx               914 fs/nfs/nfs3proc.c 		put_nfs_open_context(ctx);
ctx               929 fs/nfs/nfs3proc.c 	struct nfs_open_context *ctx = nfs_file_open_context(filp);
ctx               933 fs/nfs/nfs3proc.c 		l_ctx = nfs_get_lock_context(ctx);
ctx               937 fs/nfs/nfs3proc.c 			set_bit(NFS_CONTEXT_UNLOCK, &ctx->flags);
ctx               143 fs/nfs/nfs42proc.c 	struct nfs_open_context *ctx = nfs_file_open_context(dst);
ctx               168 fs/nfs/nfs42proc.c 	copy->parent_state = ctx->state;
ctx               421 fs/nfs/nfs42proc.c 	struct nfs_open_context *ctx = nfs_file_open_context(dst);
ctx               425 fs/nfs/nfs42proc.c 		.rpc_cred = ctx->cred,
ctx               311 fs/nfs/nfs4_fs.h 		const struct nfs_open_context *ctx,
ctx                28 fs/nfs/nfs4file.c 	struct nfs_open_context *ctx;
ctx                60 fs/nfs/nfs4file.c 	ctx = alloc_nfs_open_context(file_dentry(filp), filp->f_mode, filp);
ctx                61 fs/nfs/nfs4file.c 	err = PTR_ERR(ctx);
ctx                62 fs/nfs/nfs4file.c 	if (IS_ERR(ctx))
ctx                72 fs/nfs/nfs4file.c 	inode = NFS_PROTO(dir)->open_context(dir, ctx, openflags, &attr, NULL);
ctx                89 fs/nfs/nfs4file.c 	nfs_file_set_open_context(filp, ctx);
ctx                94 fs/nfs/nfs4file.c 	put_nfs_open_context(ctx);
ctx                98 fs/nfs/nfs4proc.c 			    struct nfs_open_context *ctx, struct nfs4_label *ilabel,
ctx              1200 fs/nfs/nfs4proc.c static fmode_t _nfs4_ctx_to_accessmode(const struct nfs_open_context *ctx)
ctx              1202 fs/nfs/nfs4proc.c 	 return ctx->mode & (FMODE_READ|FMODE_WRITE|FMODE_EXEC);
ctx              1205 fs/nfs/nfs4proc.c static fmode_t _nfs4_ctx_to_openmode(const struct nfs_open_context *ctx)
ctx              1207 fs/nfs/nfs4proc.c 	fmode_t ret = ctx->mode & (FMODE_READ|FMODE_WRITE);
ctx              1209 fs/nfs/nfs4proc.c 	return (ctx->mode & FMODE_EXEC) ? FMODE_READ | ret : ret;
ctx              2024 fs/nfs/nfs4proc.c 	struct nfs_open_context *ctx;
ctx              2027 fs/nfs/nfs4proc.c 	list_for_each_entry_rcu(ctx, &nfsi->open_files, list) {
ctx              2028 fs/nfs/nfs4proc.c 		if (ctx->state != state)
ctx              2030 fs/nfs/nfs4proc.c 		if ((ctx->mode & mode) != mode)
ctx              2032 fs/nfs/nfs4proc.c 		if (!get_nfs_open_context(ctx))
ctx              2035 fs/nfs/nfs4proc.c 		return ctx;
ctx              2044 fs/nfs/nfs4proc.c 	struct nfs_open_context *ctx;
ctx              2046 fs/nfs/nfs4proc.c 	ctx = nfs4_state_find_open_context_mode(state, FMODE_READ|FMODE_WRITE);
ctx              2047 fs/nfs/nfs4proc.c 	if (!IS_ERR(ctx))
ctx              2048 fs/nfs/nfs4proc.c 		return ctx;
ctx              2049 fs/nfs/nfs4proc.c 	ctx = nfs4_state_find_open_context_mode(state, FMODE_WRITE);
ctx              2050 fs/nfs/nfs4proc.c 	if (!IS_ERR(ctx))
ctx              2051 fs/nfs/nfs4proc.c 		return ctx;
ctx              2055 fs/nfs/nfs4proc.c static struct nfs4_opendata *nfs4_open_recoverdata_alloc(struct nfs_open_context *ctx,
ctx              2060 fs/nfs/nfs4proc.c 	opendata = nfs4_opendata_alloc(ctx->dentry, state->owner, 0, 0,
ctx              2130 fs/nfs/nfs4proc.c static int _nfs4_do_open_reclaim(struct nfs_open_context *ctx, struct nfs4_state *state)
ctx              2137 fs/nfs/nfs4proc.c 	opendata = nfs4_open_recoverdata_alloc(ctx, state,
ctx              2152 fs/nfs/nfs4proc.c static int nfs4_do_open_reclaim(struct nfs_open_context *ctx, struct nfs4_state *state)
ctx              2158 fs/nfs/nfs4proc.c 		err = _nfs4_do_open_reclaim(ctx, state);
ctx              2159 fs/nfs/nfs4proc.c 		trace_nfs4_open_reclaim(ctx, 0, err);
ctx              2171 fs/nfs/nfs4proc.c 	struct nfs_open_context *ctx;
ctx              2174 fs/nfs/nfs4proc.c 	ctx = nfs4_state_find_open_context(state);
ctx              2175 fs/nfs/nfs4proc.c 	if (IS_ERR(ctx))
ctx              2179 fs/nfs/nfs4proc.c 	ret = nfs4_do_open_reclaim(ctx, state);
ctx              2180 fs/nfs/nfs4proc.c 	put_nfs_open_context(ctx);
ctx              2238 fs/nfs/nfs4proc.c int nfs4_open_delegation_recall(struct nfs_open_context *ctx,
ctx              2245 fs/nfs/nfs4proc.c 	opendata = nfs4_open_recoverdata_alloc(ctx, state,
ctx              2483 fs/nfs/nfs4proc.c 			      struct nfs_open_context *ctx)
ctx              2511 fs/nfs/nfs4proc.c 	if (!ctx) {
ctx              2517 fs/nfs/nfs4proc.c 		pnfs_lgopen_prepare(data, ctx);
ctx              2601 fs/nfs/nfs4proc.c 			   struct nfs_open_context *ctx)
ctx              2609 fs/nfs/nfs4proc.c 	status = nfs4_run_open_task(data, ctx);
ctx              2651 fs/nfs/nfs4proc.c static int _nfs4_open_expired(struct nfs_open_context *ctx, struct nfs4_state *state)
ctx              2656 fs/nfs/nfs4proc.c 	opendata = nfs4_open_recoverdata_alloc(ctx, state,
ctx              2662 fs/nfs/nfs4proc.c 		d_drop(ctx->dentry);
ctx              2667 fs/nfs/nfs4proc.c static int nfs4_do_open_expired(struct nfs_open_context *ctx, struct nfs4_state *state)
ctx              2674 fs/nfs/nfs4proc.c 		err = _nfs4_open_expired(ctx, state);
ctx              2675 fs/nfs/nfs4proc.c 		trace_nfs4_open_expired(ctx, 0, err);
ctx              2693 fs/nfs/nfs4proc.c 	struct nfs_open_context *ctx;
ctx              2696 fs/nfs/nfs4proc.c 	ctx = nfs4_state_find_open_context(state);
ctx              2697 fs/nfs/nfs4proc.c 	if (IS_ERR(ctx))
ctx              2699 fs/nfs/nfs4proc.c 	ret = nfs4_do_open_expired(ctx, state);
ctx              2700 fs/nfs/nfs4proc.c 	put_nfs_open_context(ctx);
ctx              2958 fs/nfs/nfs4proc.c 		int flags, struct nfs_open_context *ctx)
ctx              2964 fs/nfs/nfs4proc.c 	fmode_t acc_mode = _nfs4_ctx_to_accessmode(ctx);
ctx              2973 fs/nfs/nfs4proc.c 	ret = _nfs4_proc_open(opendata, ctx);
ctx              2981 fs/nfs/nfs4proc.c 	ctx->state = state;
ctx              2996 fs/nfs/nfs4proc.c 			dput(ctx->dentry);
ctx              2997 fs/nfs/nfs4proc.c 			ctx->dentry = dentry = alias;
ctx              3015 fs/nfs/nfs4proc.c 	pnfs_parse_lgopen(state->inode, opendata->lgp, ctx);
ctx              3023 fs/nfs/nfs4proc.c 		nfs_inode_attach_open_context(ctx);
ctx              3038 fs/nfs/nfs4proc.c 			struct nfs_open_context *ctx,
ctx              3047 fs/nfs/nfs4proc.c 	struct dentry *dentry = ctx->dentry;
ctx              3048 fs/nfs/nfs4proc.c 	const struct cred *cred = ctx->cred;
ctx              3049 fs/nfs/nfs4proc.c 	struct nfs4_threshold **ctx_th = &ctx->mdsthreshold;
ctx              3050 fs/nfs/nfs4proc.c 	fmode_t fmode = _nfs4_ctx_to_openmode(ctx);
ctx              3096 fs/nfs/nfs4proc.c 	status = _nfs4_open_and_get_state(opendata, flags, ctx);
ctx              3099 fs/nfs/nfs4proc.c 	state = ctx->state;
ctx              3115 fs/nfs/nfs4proc.c 					ctx, label, olabel);
ctx              3149 fs/nfs/nfs4proc.c 					struct nfs_open_context *ctx,
ctx              3171 fs/nfs/nfs4proc.c 		status = _nfs4_do_open(dir, ctx, flags, &c, opened);
ctx              3172 fs/nfs/nfs4proc.c 		res = ctx->state;
ctx              3173 fs/nfs/nfs4proc.c 		trace_nfs4_open_file(ctx, flags, status);
ctx              3226 fs/nfs/nfs4proc.c 			    struct nfs_open_context *ctx)
ctx              3249 fs/nfs/nfs4proc.c 	} else if (ctx != NULL && ctx->state) {
ctx              3251 fs/nfs/nfs4proc.c 		if (!nfs4_valid_open_stateid(ctx->state))
ctx              3253 fs/nfs/nfs4proc.c 		l_ctx = nfs_get_lock_context(ctx);
ctx              3256 fs/nfs/nfs4proc.c 		status = nfs4_select_rw_stateid(ctx->state, FMODE_WRITE, l_ctx,
ctx              3271 fs/nfs/nfs4proc.c 	if (status == 0 && ctx != NULL)
ctx              3279 fs/nfs/nfs4proc.c 			   struct nfs_open_context *ctx, struct nfs4_label *ilabel,
ctx              3284 fs/nfs/nfs4proc.c 	struct nfs4_state *state = ctx ? ctx->state : NULL;
ctx              3309 fs/nfs/nfs4proc.c 		err = _nfs4_do_setattr(inode, &arg, &res, cred, ctx);
ctx              3696 fs/nfs/nfs4proc.c nfs4_atomic_open(struct inode *dir, struct nfs_open_context *ctx,
ctx              3702 fs/nfs/nfs4proc.c 	label = nfs4_label_init_security(dir, ctx->dentry, attr, &l);
ctx              3705 fs/nfs/nfs4proc.c 	state = nfs4_do_open(dir, ctx, open_flags, attr, label, opened);
ctx              3714 fs/nfs/nfs4proc.c static void nfs4_close_context(struct nfs_open_context *ctx, int is_sync)
ctx              3716 fs/nfs/nfs4proc.c 	if (ctx->state == NULL)
ctx              3719 fs/nfs/nfs4proc.c 		nfs4_close_sync(ctx->state, _nfs4_ctx_to_openmode(ctx));
ctx              3721 fs/nfs/nfs4proc.c 		nfs4_close_state(ctx->state, _nfs4_ctx_to_openmode(ctx));
ctx              4133 fs/nfs/nfs4proc.c 	struct nfs_open_context *ctx = NULL;
ctx              4155 fs/nfs/nfs4proc.c 		ctx = nfs_file_open_context(sattr->ia_file);
ctx              4156 fs/nfs/nfs4proc.c 		if (ctx)
ctx              4157 fs/nfs/nfs4proc.c 			cred = ctx->cred;
ctx              4168 fs/nfs/nfs4proc.c 	status = nfs4_do_setattr(inode, cred, fattr, sattr, ctx, NULL, label);
ctx              4456 fs/nfs/nfs4proc.c 	struct nfs_open_context *ctx;
ctx              4460 fs/nfs/nfs4proc.c 	ctx = alloc_nfs_open_context(dentry, FMODE_READ, NULL);
ctx              4461 fs/nfs/nfs4proc.c 	if (IS_ERR(ctx))
ctx              4462 fs/nfs/nfs4proc.c 		return PTR_ERR(ctx);
ctx              4468 fs/nfs/nfs4proc.c 	state = nfs4_do_open(dir, ctx, flags, sattr, ilabel, NULL);
ctx              4475 fs/nfs/nfs4proc.c 	put_nfs_open_context(ctx);
ctx              5113 fs/nfs/nfs4proc.c 		const struct nfs_open_context *ctx,
ctx              5117 fs/nfs/nfs4proc.c 	return nfs4_select_rw_stateid(ctx->state, fmode, l_ctx, stateid, NULL);
ctx              5122 fs/nfs/nfs4proc.c 		const struct nfs_open_context *ctx,
ctx              5129 fs/nfs/nfs4proc.c 	if (nfs4_set_rw_stateid(&current_stateid, ctx, l_ctx, fmode) == -EIO)
ctx              6482 fs/nfs/nfs4proc.c 	struct nfs_open_context *ctx;
ctx              6490 fs/nfs/nfs4proc.c 		struct nfs_open_context *ctx,
ctx              6507 fs/nfs/nfs4proc.c 	p->ctx = get_nfs_open_context(ctx);
ctx              6508 fs/nfs/nfs4proc.c 	p->l_ctx = nfs_get_lock_context(ctx);
ctx              6524 fs/nfs/nfs4proc.c 	put_nfs_open_context(calldata->ctx);
ctx              6607 fs/nfs/nfs4proc.c 		struct nfs_open_context *ctx,
ctx              6614 fs/nfs/nfs4proc.c 		.rpc_cred = ctx->cred,
ctx              6632 fs/nfs/nfs4proc.c 		set_bit(NFS_CONTEXT_UNLOCK, &ctx->flags);
ctx              6634 fs/nfs/nfs4proc.c 	data = nfs4_alloc_unlockdata(fl, ctx, lsp, seqid);
ctx              6700 fs/nfs/nfs4proc.c 	struct nfs_open_context *ctx;
ctx              6709 fs/nfs/nfs4proc.c 		struct nfs_open_context *ctx, struct nfs4_lock_state *lsp,
ctx              6736 fs/nfs/nfs4proc.c 	p->ctx = get_nfs_open_context(ctx);
ctx              6802 fs/nfs/nfs4proc.c 		renew_lease(NFS_SERVER(d_inode(data->ctx->dentry)),
ctx              6845 fs/nfs/nfs4proc.c 		task = nfs4_do_unlck(&data->fl, data->ctx, data->lsp,
ctx              6853 fs/nfs/nfs4proc.c 	put_nfs_open_context(data->ctx);
ctx              7162 fs/nfs/nfs4proc.c 	struct nfs_open_context *ctx;
ctx              7167 fs/nfs/nfs4proc.c 	ctx = nfs_file_open_context(filp);
ctx              7168 fs/nfs/nfs4proc.c 	state = ctx->state;
ctx              9079 fs/nfs/nfs4proc.c 			exception->state = lgp->args.ctx->state;
ctx              9178 fs/nfs/nfs4proc.c 	trace_nfs4_layoutget(lgp->args.ctx,
ctx              1433 fs/nfs/nfs4state.c 	struct nfs_open_context *ctx;
ctx              1438 fs/nfs/nfs4state.c 	list_for_each_entry_rcu(ctx, &nfsi->open_files, list) {
ctx              1439 fs/nfs/nfs4state.c 		state = ctx->state;
ctx              1467 fs/nfs/nfs4state.c 	struct nfs_open_context *ctx;
ctx              1470 fs/nfs/nfs4state.c 	list_for_each_entry_rcu(ctx, &nfsi->open_files, list) {
ctx              1471 fs/nfs/nfs4state.c 		if (ctx->state != state)
ctx              1473 fs/nfs/nfs4state.c 		set_bit(NFS_CONTEXT_BAD, &ctx->flags);
ctx              1475 fs/nfs/nfs4state.c 				"error = %d\n", ctx->dentry, err);
ctx               604 fs/nfs/nfs4trace.h 			const struct nfs_open_context *ctx,
ctx               609 fs/nfs/nfs4trace.h 		TP_ARGS(ctx, flags, error),
ctx               619 fs/nfs/nfs4trace.h 			__string(name, ctx->dentry->d_name.name)
ctx               627 fs/nfs/nfs4trace.h 			const struct nfs4_state *state = ctx->state;
ctx               632 fs/nfs/nfs4trace.h 			__entry->fmode = (__force unsigned int)ctx->mode;
ctx               633 fs/nfs/nfs4trace.h 			__entry->dev = ctx->dentry->d_sb->s_dev;
ctx               657 fs/nfs/nfs4trace.h 			__entry->dir = NFS_FILEID(d_inode(ctx->dentry->d_parent));
ctx               658 fs/nfs/nfs4trace.h 			__assign_str(name, ctx->dentry->d_name.name);
ctx               685 fs/nfs/nfs4trace.h 				const struct nfs_open_context *ctx, \
ctx               689 fs/nfs/nfs4trace.h 			TP_ARGS(ctx, flags, error))
ctx              1693 fs/nfs/nfs4trace.h 			const struct nfs_open_context *ctx,
ctx              1700 fs/nfs/nfs4trace.h 		TP_ARGS(ctx, args, res, layout_stateid, error),
ctx              1717 fs/nfs/nfs4trace.h 			const struct inode *inode = d_inode(ctx->dentry);
ctx              1718 fs/nfs/nfs4trace.h 			const struct nfs4_state *state = ctx->state;
ctx               378 fs/nfs/nfstrace.h 			const struct nfs_open_context *ctx,
ctx               382 fs/nfs/nfstrace.h 		TP_ARGS(dir, ctx, flags),
ctx               389 fs/nfs/nfstrace.h 			__string(name, ctx->dentry->d_name.name)
ctx               396 fs/nfs/nfstrace.h 			__entry->fmode = (__force unsigned int)ctx->mode;
ctx               397 fs/nfs/nfstrace.h 			__assign_str(name, ctx->dentry->d_name.name);
ctx               414 fs/nfs/nfstrace.h 			const struct nfs_open_context *ctx,
ctx               419 fs/nfs/nfstrace.h 		TP_ARGS(dir, ctx, flags, error),
ctx               427 fs/nfs/nfstrace.h 			__string(name, ctx->dentry->d_name.name)
ctx               435 fs/nfs/nfstrace.h 			__entry->fmode = (__force unsigned int)ctx->mode;
ctx               436 fs/nfs/nfstrace.h 			__assign_str(name, ctx->dentry->d_name.name);
ctx               305 fs/nfs/pagelist.c 	struct nfs_open_context *ctx = l_ctx->open_context;
ctx               307 fs/nfs/pagelist.c 	if (test_bit(NFS_CONTEXT_BAD, &ctx->flags))
ctx               346 fs/nfs/pagelist.c nfs_create_request(struct nfs_open_context *ctx, struct page *page,
ctx               349 fs/nfs/pagelist.c 	struct nfs_lock_context *l_ctx = nfs_get_lock_context(ctx);
ctx               418 fs/nfs/pagelist.c 	struct nfs_open_context *ctx;
ctx               427 fs/nfs/pagelist.c 			ctx = l_ctx->open_context;
ctx               428 fs/nfs/pagelist.c 			if (test_bit(NFS_CONTEXT_UNLOCK, &ctx->flags))
ctx               429 fs/nfs/pagelist.c 				rpc_wake_up(&NFS_SERVER(d_inode(ctx->dentry))->uoc_rpcwaitq);
ctx               957 fs/nfs/pnfs.c  pnfs_find_server(struct inode *inode, struct nfs_open_context *ctx)
ctx               964 fs/nfs/pnfs.c  		struct dentry *parent_dir = dget_parent(ctx->dentry);
ctx              1011 fs/nfs/pnfs.c  	   struct nfs_open_context *ctx,
ctx              1016 fs/nfs/pnfs.c  	struct nfs_server *server = pnfs_find_server(ino, ctx);
ctx              1061 fs/nfs/pnfs.c  	lgp->args.ctx = get_nfs_open_context(ctx);
ctx              1064 fs/nfs/pnfs.c  	lgp->cred = get_cred(ctx->cred);
ctx              1076 fs/nfs/pnfs.c  	put_nfs_open_context(lgp->args.ctx);
ctx              1353 fs/nfs/pnfs.c  	struct nfs_open_context *ctx;
ctx              1390 fs/nfs/pnfs.c  	list_for_each_entry_rcu(ctx, &nfsi->open_files, list) {
ctx              1391 fs/nfs/pnfs.c  		state = ctx->state;
ctx              1646 fs/nfs/pnfs.c  		      struct nfs_open_context *ctx,
ctx              1660 fs/nfs/pnfs.c  	lo->plh_lc_cred = get_cred(ctx->cred);
ctx              1667 fs/nfs/pnfs.c  		       struct nfs_open_context *ctx,
ctx              1680 fs/nfs/pnfs.c  	new = alloc_init_layout_hdr(ino, ctx, gfp_flags);
ctx              1771 fs/nfs/pnfs.c  static bool pnfs_within_mdsthreshold(struct nfs_open_context *ctx,
ctx              1774 fs/nfs/pnfs.c  	struct nfs4_threshold *t = ctx->mdsthreshold;
ctx              1881 fs/nfs/pnfs.c  		   struct nfs_open_context *ctx,
ctx              1910 fs/nfs/pnfs.c  	if (pnfs_within_mdsthreshold(ctx, ino, iomode)) {
ctx              1922 fs/nfs/pnfs.c  	lo = pnfs_find_alloc_layout(ino, ctx, gfp_flags);
ctx              1993 fs/nfs/pnfs.c  		status = nfs4_select_rw_stateid(ctx->state,
ctx              2004 fs/nfs/pnfs.c  			nfs4_schedule_stateid_recovery(server, ctx->state);
ctx              2053 fs/nfs/pnfs.c  	lgp = pnfs_alloc_init_layoutget_args(ino, ctx, &stateid, &arg, gfp_flags);
ctx              2136 fs/nfs/pnfs.c  _pnfs_grab_empty_layout(struct inode *ino, struct nfs_open_context *ctx)
ctx              2141 fs/nfs/pnfs.c  	lo = pnfs_find_alloc_layout(ino, ctx, GFP_KERNEL);
ctx              2166 fs/nfs/pnfs.c  				     struct nfs_open_context *ctx)
ctx              2183 fs/nfs/pnfs.c  	lo = _pnfs_grab_empty_layout(ino, ctx);
ctx              2186 fs/nfs/pnfs.c  	lgp = pnfs_alloc_init_layoutget_args(ino, ctx, &current_stateid,
ctx              2199 fs/nfs/pnfs.c  				     struct nfs_open_context *ctx)
ctx              2209 fs/nfs/pnfs.c  	lgp = pnfs_alloc_init_layoutget_args(NULL, ctx, &current_stateid,
ctx              2219 fs/nfs/pnfs.c  			 struct nfs_open_context *ctx)
ctx              2230 fs/nfs/pnfs.c  		_lgopen_prepare_attached(data, ctx);
ctx              2232 fs/nfs/pnfs.c  		_lgopen_prepare_floating(data, ctx);
ctx              2236 fs/nfs/pnfs.c  		       struct nfs_open_context *ctx)
ctx              2269 fs/nfs/pnfs.c  		lo = _pnfs_grab_empty_layout(ino, ctx);
ctx               304 fs/nfs/pnfs.h  					       struct nfs_open_context *ctx,
ctx               393 fs/nfs/pnfs.h  			 struct nfs_open_context *ctx);
ctx               395 fs/nfs/pnfs.h  		       struct nfs_open_context *ctx);
ctx               809 fs/nfs/pnfs.h  		struct nfs_open_context *ctx)
ctx               815 fs/nfs/pnfs.h  		struct nfs_open_context *ctx)
ctx               117 fs/nfs/read.c  int nfs_readpage_async(struct nfs_open_context *ctx, struct inode *inode,
ctx               128 fs/nfs/read.c  	new = nfs_create_request(ctx, page, 0, len);
ctx               312 fs/nfs/read.c  	struct nfs_open_context *ctx;
ctx               340 fs/nfs/read.c  		ctx = nfs_find_open_context(inode, NULL, FMODE_READ);
ctx               341 fs/nfs/read.c  		if (ctx == NULL)
ctx               344 fs/nfs/read.c  		ctx = get_nfs_open_context(nfs_file_open_context(file));
ctx               347 fs/nfs/read.c  		error = nfs_readpage_from_fscache(ctx, inode, page);
ctx               352 fs/nfs/read.c  	xchg(&ctx->error, 0);
ctx               353 fs/nfs/read.c  	error = nfs_readpage_async(ctx, inode, page);
ctx               357 fs/nfs/read.c  			error = xchg(&ctx->error, 0);
ctx               360 fs/nfs/read.c  	put_nfs_open_context(ctx);
ctx               369 fs/nfs/read.c  	struct nfs_open_context *ctx;
ctx               384 fs/nfs/read.c  	new = nfs_create_request(desc->ctx, page, 0, len);
ctx               426 fs/nfs/read.c  		desc.ctx = nfs_find_open_context(inode, NULL, FMODE_READ);
ctx               427 fs/nfs/read.c  		if (desc.ctx == NULL)
ctx               430 fs/nfs/read.c  		desc.ctx = get_nfs_open_context(nfs_file_open_context(filp));
ctx               435 fs/nfs/read.c  	ret = nfs_readpages_from_fscache(desc.ctx, inode, mapping,
ctx               455 fs/nfs/read.c  	put_nfs_open_context(desc.ctx);
ctx               969 fs/nfs/write.c 		struct nfs_open_context *ctx = nfs_req_openctx(req);
ctx               970 fs/nfs/write.c 		struct inode *inode = d_inode(ctx->dentry);
ctx              1171 fs/nfs/write.c static struct nfs_page * nfs_setup_write_request(struct nfs_open_context* ctx,
ctx              1180 fs/nfs/write.c 	req = nfs_create_request(ctx, page, offset, bytes);
ctx              1188 fs/nfs/write.c static int nfs_writepage_setup(struct nfs_open_context *ctx, struct page *page,
ctx              1193 fs/nfs/write.c 	req = nfs_setup_write_request(ctx, page, offset, count);
ctx              1206 fs/nfs/write.c 	struct nfs_open_context *ctx = nfs_file_open_context(file);
ctx              1225 fs/nfs/write.c 			!nfs_match_open_context(nfs_req_openctx(req), ctx);
ctx              1252 fs/nfs/write.c 	struct nfs_open_context *ctx = nfs_file_open_context(filp);
ctx              1254 fs/nfs/write.c 	if (nfs_ctx_key_to_expire(ctx, inode) &&
ctx              1255 fs/nfs/write.c 	    !ctx->ll_cred)
ctx              1264 fs/nfs/write.c bool nfs_ctx_key_to_expire(struct nfs_open_context *ctx, struct inode *inode)
ctx              1267 fs/nfs/write.c 	struct rpc_cred *cred = ctx->ll_cred;
ctx              1269 fs/nfs/write.c 		.cred = ctx->cred,
ctx              1274 fs/nfs/write.c 		ctx->ll_cred = NULL;
ctx              1281 fs/nfs/write.c 	ctx->ll_cred = cred;
ctx              1366 fs/nfs/write.c 	struct nfs_open_context *ctx = nfs_file_open_context(file);
ctx              1384 fs/nfs/write.c 	status = nfs_writepage_setup(ctx, page, offset, count);
ctx              1748 fs/nfs/write.c 	struct nfs_open_context *ctx = nfs_req_openctx(first);
ctx              1749 fs/nfs/write.c 	struct inode *inode = d_inode(ctx->dentry);
ctx              1757 fs/nfs/write.c 	data->cred	  = ctx->cred;
ctx              1770 fs/nfs/write.c 	data->context     = get_nfs_open_context(ctx);
ctx               273 fs/nfsd/nfs4recover.c 	struct dir_context ctx;
ctx               281 fs/nfsd/nfs4recover.c 	struct nfs4_dir_ctx *ctx =
ctx               282 fs/nfsd/nfs4recover.c 		container_of(__ctx, struct nfs4_dir_ctx, ctx);
ctx               292 fs/nfsd/nfs4recover.c 	list_add(&entry->list, &ctx->names);
ctx               301 fs/nfsd/nfs4recover.c 	struct nfs4_dir_ctx ctx = {
ctx               302 fs/nfsd/nfs4recover.c 		.ctx.actor = nfsd4_build_namelist,
ctx               303 fs/nfsd/nfs4recover.c 		.names = LIST_HEAD_INIT(ctx.names)
ctx               318 fs/nfsd/nfs4recover.c 	status = iterate_dir(nn->rec_file, &ctx.ctx);
ctx               321 fs/nfsd/nfs4recover.c 	list_for_each_entry_safe(entry, tmp, &ctx.names, list) {
ctx               338 fs/nfsd/nfs4recover.c 	list_for_each_entry_safe(entry, tmp, &ctx.names, list) {
ctx              1849 fs/nfsd/vfs.c  	struct dir_context ctx;
ctx              1855 fs/nfsd/vfs.c  static int nfsd_buffered_filldir(struct dir_context *ctx, const char *name,
ctx              1860 fs/nfsd/vfs.c  		container_of(ctx, struct readdir_data, ctx);
ctx              1888 fs/nfsd/vfs.c  		.ctx.actor = nfsd_buffered_filldir,
ctx              1904 fs/nfsd/vfs.c  		host_err = iterate_dir(file, &buf.ctx);
ctx               263 fs/nilfs2/dir.c static int nilfs_readdir(struct file *file, struct dir_context *ctx)
ctx               265 fs/nilfs2/dir.c 	loff_t pos = ctx->pos;
ctx               282 fs/nilfs2/dir.c 			ctx->pos += PAGE_SIZE - offset;
ctx               303 fs/nilfs2/dir.c 				if (!dir_emit(ctx, de->name, de->name_len,
ctx               309 fs/nilfs2/dir.c 			ctx->pos += nilfs_rec_len_from_disk(de->rec_len);
ctx               265 fs/nsfs.c      	struct pseudo_fs_context *ctx = init_pseudo(fc, NSFS_MAGIC);
ctx               266 fs/nsfs.c      	if (!ctx)
ctx               268 fs/nsfs.c      	ctx->ops = &nsfs_ops;
ctx               269 fs/nsfs.c      	ctx->dops = &ns_dentry_operations;
ctx               387 fs/ntfs/aops.c 	ntfs_attr_search_ctx *ctx;
ctx               470 fs/ntfs/aops.c 	ctx = ntfs_attr_get_search_ctx(base_ni, mrec);
ctx               471 fs/ntfs/aops.c 	if (unlikely(!ctx)) {
ctx               476 fs/ntfs/aops.c 			CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx               479 fs/ntfs/aops.c 	attr_len = le32_to_cpu(ctx->attr->data.resident.value_length);
ctx               491 fs/ntfs/aops.c 	memcpy(addr, (u8*)ctx->attr +
ctx               492 fs/ntfs/aops.c 			le16_to_cpu(ctx->attr->data.resident.value_offset),
ctx               499 fs/ntfs/aops.c 	ntfs_attr_put_search_ctx(ctx);
ctx              1345 fs/ntfs/aops.c 	ntfs_attr_search_ctx *ctx = NULL;
ctx              1442 fs/ntfs/aops.c 		ctx = NULL;
ctx              1453 fs/ntfs/aops.c 	ctx = ntfs_attr_get_search_ctx(base_ni, m);
ctx              1454 fs/ntfs/aops.c 	if (unlikely(!ctx)) {
ctx              1459 fs/ntfs/aops.c 			CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx              1469 fs/ntfs/aops.c 	attr_len = le32_to_cpu(ctx->attr->data.resident.value_length);
ctx              1478 fs/ntfs/aops.c 		err = ntfs_resident_attr_value_resize(ctx->mrec, ctx->attr,
ctx              1485 fs/ntfs/aops.c 	memcpy((u8*)ctx->attr +
ctx              1486 fs/ntfs/aops.c 			le16_to_cpu(ctx->attr->data.resident.value_offset),
ctx              1492 fs/ntfs/aops.c 	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1496 fs/ntfs/aops.c 	mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1497 fs/ntfs/aops.c 	ntfs_attr_put_search_ctx(ctx);
ctx              1517 fs/ntfs/aops.c 	if (ctx)
ctx              1518 fs/ntfs/aops.c 		ntfs_attr_put_search_ctx(ctx);
ctx                70 fs/ntfs/attrib.c int ntfs_map_runlist_nolock(ntfs_inode *ni, VCN vcn, ntfs_attr_search_ctx *ctx)
ctx                89 fs/ntfs/attrib.c 	if (!ctx) {
ctx                94 fs/ntfs/attrib.c 		ctx = ntfs_attr_get_search_ctx(base_ni, m);
ctx                95 fs/ntfs/attrib.c 		if (unlikely(!ctx)) {
ctx               102 fs/ntfs/attrib.c 		BUG_ON(IS_ERR(ctx->mrec));
ctx               103 fs/ntfs/attrib.c 		a = ctx->attr;
ctx               129 fs/ntfs/attrib.c 			old_ctx = *ctx;
ctx               147 fs/ntfs/attrib.c 			ntfs_attr_reinit_search_ctx(ctx);
ctx               153 fs/ntfs/attrib.c 				CASE_SENSITIVE, vcn, NULL, 0, ctx);
ctx               159 fs/ntfs/attrib.c 		BUG_ON(!ctx->attr->non_resident);
ctx               161 fs/ntfs/attrib.c 	a = ctx->attr;
ctx               180 fs/ntfs/attrib.c 		if (likely(ctx))
ctx               181 fs/ntfs/attrib.c 			ntfs_attr_put_search_ctx(ctx);
ctx               197 fs/ntfs/attrib.c 			if (ctx->ntfs_ino != old_ctx.ntfs_ino) {
ctx               202 fs/ntfs/attrib.c 				if (ctx->base_ntfs_ino && ctx->ntfs_ino !=
ctx               203 fs/ntfs/attrib.c 						ctx->base_ntfs_ino) {
ctx               204 fs/ntfs/attrib.c 					unmap_extent_mft_record(ctx->ntfs_ino);
ctx               205 fs/ntfs/attrib.c 					ctx->mrec = ctx->base_mrec;
ctx               206 fs/ntfs/attrib.c 					BUG_ON(!ctx->mrec);
ctx               216 fs/ntfs/attrib.c 					ctx->mrec = map_mft_record(
ctx               228 fs/ntfs/attrib.c 					if (IS_ERR(ctx->mrec)) {
ctx               229 fs/ntfs/attrib.c 						if (PTR_ERR(ctx->mrec) ==
ctx               241 fs/ntfs/attrib.c 			if (ctx->mrec != old_ctx.mrec) {
ctx               242 fs/ntfs/attrib.c 				if (!IS_ERR(ctx->mrec))
ctx               244 fs/ntfs/attrib.c 							(u8*)ctx->mrec +
ctx               247 fs/ntfs/attrib.c 				old_ctx.mrec = ctx->mrec;
ctx               251 fs/ntfs/attrib.c 		*ctx = old_ctx;
ctx               451 fs/ntfs/attrib.c 		ntfs_attr_search_ctx *ctx)
ctx               460 fs/ntfs/attrib.c 			ni->mft_no, (unsigned long long)vcn, ctx ? "" : "out");
ctx               496 fs/ntfs/attrib.c 		if (IS_ERR(ctx->mrec))
ctx               497 fs/ntfs/attrib.c 			err = PTR_ERR(ctx->mrec);
ctx               503 fs/ntfs/attrib.c 			err = ntfs_map_runlist_nolock(ni, vcn, ctx);
ctx               577 fs/ntfs/attrib.c 		const u8 *val, const u32 val_len, ntfs_attr_search_ctx *ctx)
ctx               580 fs/ntfs/attrib.c 	ntfs_volume *vol = ctx->ntfs_ino->vol;
ctx               588 fs/ntfs/attrib.c 	if (ctx->is_first) {
ctx               589 fs/ntfs/attrib.c 		a = ctx->attr;
ctx               590 fs/ntfs/attrib.c 		ctx->is_first = false;
ctx               592 fs/ntfs/attrib.c 		a = (ATTR_RECORD*)((u8*)ctx->attr +
ctx               593 fs/ntfs/attrib.c 				le32_to_cpu(ctx->attr->length));
ctx               595 fs/ntfs/attrib.c 		if ((u8*)a < (u8*)ctx->mrec || (u8*)a > (u8*)ctx->mrec +
ctx               596 fs/ntfs/attrib.c 				le32_to_cpu(ctx->mrec->bytes_allocated))
ctx               598 fs/ntfs/attrib.c 		ctx->attr = a;
ctx               846 fs/ntfs/attrib.c 		const u8 *val, const u32 val_len, ntfs_attr_search_ctx *ctx)
ctx               858 fs/ntfs/attrib.c 	ni = ctx->ntfs_ino;
ctx               859 fs/ntfs/attrib.c 	base_ni = ctx->base_ntfs_ino;
ctx               863 fs/ntfs/attrib.c 		base_ni = ctx->base_ntfs_ino = ctx->ntfs_ino;
ctx               864 fs/ntfs/attrib.c 		ctx->base_mrec = ctx->mrec;
ctx               867 fs/ntfs/attrib.c 		ctx->base_attr = ctx->attr;
ctx               873 fs/ntfs/attrib.c 	if (!ctx->al_entry)
ctx               874 fs/ntfs/attrib.c 		ctx->al_entry = (ATTR_LIST_ENTRY*)al_start;
ctx               879 fs/ntfs/attrib.c 	if (ctx->is_first) {
ctx               880 fs/ntfs/attrib.c 		al_entry = ctx->al_entry;
ctx               881 fs/ntfs/attrib.c 		ctx->is_first = false;
ctx               883 fs/ntfs/attrib.c 		al_entry = (ATTR_LIST_ENTRY*)((u8*)ctx->al_entry +
ctx               884 fs/ntfs/attrib.c 				le16_to_cpu(ctx->al_entry->length));
ctx               890 fs/ntfs/attrib.c 		ctx->al_entry = al_entry;
ctx               983 fs/ntfs/attrib.c 				ni = ctx->ntfs_ino = base_ni;
ctx               984 fs/ntfs/attrib.c 				ctx->mrec = ctx->base_mrec;
ctx               987 fs/ntfs/attrib.c 				ctx->mrec = map_extent_mft_record(base_ni,
ctx               990 fs/ntfs/attrib.c 				if (IS_ERR(ctx->mrec)) {
ctx               998 fs/ntfs/attrib.c 					err = PTR_ERR(ctx->mrec);
ctx              1005 fs/ntfs/attrib.c 				ctx->ntfs_ino = ni;
ctx              1007 fs/ntfs/attrib.c 			ctx->attr = (ATTR_RECORD*)((u8*)ctx->mrec +
ctx              1008 fs/ntfs/attrib.c 					le16_to_cpu(ctx->mrec->attrs_offset));
ctx              1025 fs/ntfs/attrib.c 		a = ctx->attr;
ctx              1031 fs/ntfs/attrib.c 		if ((u8*)a < (u8*)ctx->mrec || (u8*)a > (u8*)ctx->mrec +
ctx              1032 fs/ntfs/attrib.c 				le32_to_cpu(ctx->mrec->bytes_allocated))
ctx              1052 fs/ntfs/attrib.c 		ctx->attr = a;
ctx              1079 fs/ntfs/attrib.c 		ctx->ntfs_ino = base_ni;
ctx              1080 fs/ntfs/attrib.c 		ctx->mrec = ctx->base_mrec;
ctx              1081 fs/ntfs/attrib.c 		ctx->attr = ctx->base_attr;
ctx              1092 fs/ntfs/attrib.c 		ntfs_attr_reinit_search_ctx(ctx);
ctx              1094 fs/ntfs/attrib.c 				ctx);
ctx              1111 fs/ntfs/attrib.c 	ctx->mrec = ctx->base_mrec;
ctx              1112 fs/ntfs/attrib.c 	ctx->attr = (ATTR_RECORD*)((u8*)ctx->mrec +
ctx              1113 fs/ntfs/attrib.c 			le16_to_cpu(ctx->mrec->attrs_offset));
ctx              1114 fs/ntfs/attrib.c 	ctx->is_first = true;
ctx              1115 fs/ntfs/attrib.c 	ctx->ntfs_ino = base_ni;
ctx              1116 fs/ntfs/attrib.c 	ctx->base_ntfs_ino = NULL;
ctx              1117 fs/ntfs/attrib.c 	ctx->base_mrec = NULL;
ctx              1118 fs/ntfs/attrib.c 	ctx->base_attr = NULL;
ctx              1128 fs/ntfs/attrib.c 				ctx);
ctx              1176 fs/ntfs/attrib.c 		ntfs_attr_search_ctx *ctx)
ctx              1181 fs/ntfs/attrib.c 	BUG_ON(IS_ERR(ctx->mrec));
ctx              1182 fs/ntfs/attrib.c 	if (ctx->base_ntfs_ino)
ctx              1183 fs/ntfs/attrib.c 		base_ni = ctx->base_ntfs_ino;
ctx              1185 fs/ntfs/attrib.c 		base_ni = ctx->ntfs_ino;
ctx              1190 fs/ntfs/attrib.c 				ctx);
ctx              1192 fs/ntfs/attrib.c 			val, val_len, ctx);
ctx              1203 fs/ntfs/attrib.c static inline void ntfs_attr_init_search_ctx(ntfs_attr_search_ctx *ctx,
ctx              1206 fs/ntfs/attrib.c 	*ctx = (ntfs_attr_search_ctx) {
ctx              1226 fs/ntfs/attrib.c void ntfs_attr_reinit_search_ctx(ntfs_attr_search_ctx *ctx)
ctx              1228 fs/ntfs/attrib.c 	if (likely(!ctx->base_ntfs_ino)) {
ctx              1230 fs/ntfs/attrib.c 		ctx->is_first = true;
ctx              1232 fs/ntfs/attrib.c 		ctx->attr = (ATTR_RECORD*)((u8*)ctx->mrec +
ctx              1233 fs/ntfs/attrib.c 				le16_to_cpu(ctx->mrec->attrs_offset));
ctx              1238 fs/ntfs/attrib.c 		ctx->al_entry = NULL;
ctx              1241 fs/ntfs/attrib.c 	if (ctx->ntfs_ino != ctx->base_ntfs_ino)
ctx              1242 fs/ntfs/attrib.c 		unmap_extent_mft_record(ctx->ntfs_ino);
ctx              1243 fs/ntfs/attrib.c 	ntfs_attr_init_search_ctx(ctx, ctx->base_ntfs_ino, ctx->base_mrec);
ctx              1257 fs/ntfs/attrib.c 	ntfs_attr_search_ctx *ctx;
ctx              1259 fs/ntfs/attrib.c 	ctx = kmem_cache_alloc(ntfs_attr_ctx_cache, GFP_NOFS);
ctx              1260 fs/ntfs/attrib.c 	if (ctx)
ctx              1261 fs/ntfs/attrib.c 		ntfs_attr_init_search_ctx(ctx, ni, mrec);
ctx              1262 fs/ntfs/attrib.c 	return ctx;
ctx              1272 fs/ntfs/attrib.c void ntfs_attr_put_search_ctx(ntfs_attr_search_ctx *ctx)
ctx              1274 fs/ntfs/attrib.c 	if (ctx->base_ntfs_ino && ctx->ntfs_ino != ctx->base_ntfs_ino)
ctx              1275 fs/ntfs/attrib.c 		unmap_extent_mft_record(ctx->ntfs_ino);
ctx              1276 fs/ntfs/attrib.c 	kmem_cache_free(ntfs_attr_ctx_cache, ctx);
ctx              1529 fs/ntfs/attrib.c 	ntfs_attr_search_ctx *ctx;
ctx              1602 fs/ntfs/attrib.c 		ctx = NULL;
ctx              1605 fs/ntfs/attrib.c 	ctx = ntfs_attr_get_search_ctx(base_ni, m);
ctx              1606 fs/ntfs/attrib.c 	if (unlikely(!ctx)) {
ctx              1611 fs/ntfs/attrib.c 			CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx              1617 fs/ntfs/attrib.c 	m = ctx->mrec;
ctx              1618 fs/ntfs/attrib.c 	a = ctx->attr;
ctx              1729 fs/ntfs/attrib.c 	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1730 fs/ntfs/attrib.c 	mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1731 fs/ntfs/attrib.c 	ntfs_attr_put_search_ctx(ctx);
ctx              1803 fs/ntfs/attrib.c 	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1804 fs/ntfs/attrib.c 	mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1806 fs/ntfs/attrib.c 	if (ctx)
ctx              1807 fs/ntfs/attrib.c 		ntfs_attr_put_search_ctx(ctx);
ctx              1900 fs/ntfs/attrib.c 	ntfs_attr_search_ctx *ctx;
ctx              1982 fs/ntfs/attrib.c 		ctx = NULL;
ctx              1985 fs/ntfs/attrib.c 	ctx = ntfs_attr_get_search_ctx(base_ni, m);
ctx              1986 fs/ntfs/attrib.c 	if (unlikely(!ctx)) {
ctx              2017 fs/ntfs/attrib.c 			CASE_SENSITIVE, vcn, NULL, 0, ctx);
ctx              2023 fs/ntfs/attrib.c 	m = ctx->mrec;
ctx              2024 fs/ntfs/attrib.c 	a = ctx->attr;
ctx              2060 fs/ntfs/attrib.c 	ntfs_attr_put_search_ctx(ctx);
ctx              2313 fs/ntfs/attrib.c 		flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              2314 fs/ntfs/attrib.c 		mark_mft_record_dirty(ctx->ntfs_ino);
ctx              2315 fs/ntfs/attrib.c 		ntfs_attr_reinit_search_ctx(ctx);
ctx              2317 fs/ntfs/attrib.c 				CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx              2321 fs/ntfs/attrib.c 		a = ctx->attr;
ctx              2352 fs/ntfs/attrib.c 	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              2353 fs/ntfs/attrib.c 	mark_mft_record_dirty(ctx->ntfs_ino);
ctx              2355 fs/ntfs/attrib.c 	ntfs_attr_put_search_ctx(ctx);
ctx              2370 fs/ntfs/attrib.c 	ntfs_attr_reinit_search_ctx(ctx);
ctx              2373 fs/ntfs/attrib.c 			ctx)) {
ctx              2391 fs/ntfs/attrib.c 		ntfs_attr_put_search_ctx(ctx);
ctx              2401 fs/ntfs/attrib.c 	ctx->attr->data.non_resident.highest_vcn = cpu_to_sle64(
ctx              2405 fs/ntfs/attrib.c 	if (ntfs_cluster_free(ni, ll, -1, ctx) < 0) {
ctx              2411 fs/ntfs/attrib.c 	m = ctx->mrec;
ctx              2412 fs/ntfs/attrib.c 	a = ctx->attr;
ctx              2444 fs/ntfs/attrib.c 			flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              2445 fs/ntfs/attrib.c 			mark_mft_record_dirty(ctx->ntfs_ino);
ctx              2449 fs/ntfs/attrib.c 	if (ctx)
ctx              2450 fs/ntfs/attrib.c 		ntfs_attr_put_search_ctx(ctx);
ctx                50 fs/ntfs/attrib.h 		ntfs_attr_search_ctx *ctx);
ctx                57 fs/ntfs/attrib.h 		const VCN vcn, ntfs_attr_search_ctx *ctx);
ctx                62 fs/ntfs/attrib.h 		ntfs_attr_search_ctx *ctx);
ctx                74 fs/ntfs/attrib.h extern void ntfs_attr_reinit_search_ctx(ntfs_attr_search_ctx *ctx);
ctx                77 fs/ntfs/attrib.h extern void ntfs_attr_put_search_ctx(ntfs_attr_search_ctx *ctx);
ctx                77 fs/ntfs/dir.c  	ntfs_attr_search_ctx *ctx;
ctx                94 fs/ntfs/dir.c  	ctx = ntfs_attr_get_search_ctx(dir_ni, m);
ctx                95 fs/ntfs/dir.c  	if (unlikely(!ctx)) {
ctx               101 fs/ntfs/dir.c  			0, ctx);
ctx               112 fs/ntfs/dir.c  	ir = (INDEX_ROOT*)((u8*)ctx->attr +
ctx               113 fs/ntfs/dir.c  			le16_to_cpu(ctx->attr->data.resident.value_offset));
ctx               124 fs/ntfs/dir.c  		if ((u8*)ie < (u8*)ctx->mrec || (u8*)ie +
ctx               176 fs/ntfs/dir.c  			ntfs_attr_put_search_ctx(ctx);
ctx               275 fs/ntfs/dir.c  			ntfs_attr_put_search_ctx(ctx);
ctx               297 fs/ntfs/dir.c  	ntfs_attr_put_search_ctx(ctx);
ctx               300 fs/ntfs/dir.c  	ctx = NULL;
ctx               578 fs/ntfs/dir.c  	if (ctx)
ctx               579 fs/ntfs/dir.c  		ntfs_attr_put_search_ctx(ctx);
ctx               630 fs/ntfs/dir.c  	ntfs_attr_search_ctx *ctx;
ctx               645 fs/ntfs/dir.c  	ctx = ntfs_attr_get_search_ctx(dir_ni, m);
ctx               646 fs/ntfs/dir.c  	if (!ctx) {
ctx               652 fs/ntfs/dir.c  			0, ctx);
ctx               663 fs/ntfs/dir.c  	ir = (INDEX_ROOT*)((u8*)ctx->attr +
ctx               664 fs/ntfs/dir.c  			le16_to_cpu(ctx->attr->data.resident.value_offset));
ctx               675 fs/ntfs/dir.c  		if ((u8*)ie < (u8*)ctx->mrec || (u8*)ie +
ctx               710 fs/ntfs/dir.c  			ntfs_attr_put_search_ctx(ctx);
ctx               775 fs/ntfs/dir.c  	ntfs_attr_put_search_ctx(ctx);
ctx               778 fs/ntfs/dir.c  	ctx = NULL;
ctx               978 fs/ntfs/dir.c  	if (ctx)
ctx               979 fs/ntfs/dir.c  		ntfs_attr_put_search_ctx(ctx);
ctx              1102 fs/ntfs/dir.c  	ntfs_attr_search_ctx *ctx;
ctx              1115 fs/ntfs/dir.c  	ctx = NULL;
ctx              1135 fs/ntfs/dir.c  	ctx = ntfs_attr_get_search_ctx(ndir, m);
ctx              1136 fs/ntfs/dir.c  	if (unlikely(!ctx)) {
ctx              1144 fs/ntfs/dir.c  			0, ctx);
ctx              1160 fs/ntfs/dir.c  	rc = le32_to_cpu(ctx->attr->data.resident.value_length);
ctx              1167 fs/ntfs/dir.c  	memcpy(ir, (u8*)ctx->attr +
ctx              1168 fs/ntfs/dir.c  			le16_to_cpu(ctx->attr->data.resident.value_offset), rc);
ctx              1169 fs/ntfs/dir.c  	ntfs_attr_put_search_ctx(ctx);
ctx              1171 fs/ntfs/dir.c  	ctx = NULL;
ctx              1424 fs/ntfs/dir.c  	if (ctx)
ctx              1425 fs/ntfs/dir.c  		ntfs_attr_put_search_ctx(ctx);
ctx               105 fs/ntfs/file.c 	ntfs_attr_search_ctx *ctx = NULL;
ctx               137 fs/ntfs/file.c 	ctx = ntfs_attr_get_search_ctx(base_ni, m);
ctx               138 fs/ntfs/file.c 	if (unlikely(!ctx)) {
ctx               143 fs/ntfs/file.c 			CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx               149 fs/ntfs/file.c 	m = ctx->mrec;
ctx               150 fs/ntfs/file.c 	a = ctx->attr;
ctx               181 fs/ntfs/file.c 		ctx = ntfs_attr_get_search_ctx(base_ni, m);
ctx               182 fs/ntfs/file.c 		if (unlikely(!ctx)) {
ctx               187 fs/ntfs/file.c 				CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx               193 fs/ntfs/file.c 		m = ctx->mrec;
ctx               194 fs/ntfs/file.c 		a = ctx->attr;
ctx               199 fs/ntfs/file.c 		flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx               200 fs/ntfs/file.c 		mark_mft_record_dirty(ctx->ntfs_ino);
ctx               203 fs/ntfs/file.c 		ntfs_attr_put_search_ctx(ctx);
ctx               204 fs/ntfs/file.c 		ctx = NULL;
ctx               278 fs/ntfs/file.c 	ctx = ntfs_attr_get_search_ctx(base_ni, m);
ctx               279 fs/ntfs/file.c 	if (unlikely(!ctx)) {
ctx               284 fs/ntfs/file.c 			CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx               290 fs/ntfs/file.c 	m = ctx->mrec;
ctx               291 fs/ntfs/file.c 	a = ctx->attr;
ctx               295 fs/ntfs/file.c 	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx               296 fs/ntfs/file.c 	mark_mft_record_dirty(ctx->ntfs_ino);
ctx               297 fs/ntfs/file.c 	if (ctx)
ctx               298 fs/ntfs/file.c 		ntfs_attr_put_search_ctx(ctx);
ctx               309 fs/ntfs/file.c 	if (ctx)
ctx               310 fs/ntfs/file.c 		ntfs_attr_put_search_ctx(ctx);
ctx               583 fs/ntfs/file.c 	ntfs_attr_search_ctx *ctx = NULL;
ctx              1019 fs/ntfs/file.c 		ctx = ntfs_attr_get_search_ctx(base_ni, m);
ctx              1020 fs/ntfs/file.c 		if (unlikely(!ctx)) {
ctx              1027 fs/ntfs/file.c 				CASE_SENSITIVE, bh_cpos, NULL, 0, ctx);
ctx              1033 fs/ntfs/file.c 		m = ctx->mrec;
ctx              1034 fs/ntfs/file.c 		a = ctx->attr;
ctx              1129 fs/ntfs/file.c 				flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1130 fs/ntfs/file.c 				mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1131 fs/ntfs/file.c 				ntfs_attr_reinit_search_ctx(ctx);
ctx              1134 fs/ntfs/file.c 						0, NULL, 0, ctx);
ctx              1140 fs/ntfs/file.c 				a = ctx->attr;
ctx              1149 fs/ntfs/file.c 		flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1150 fs/ntfs/file.c 		mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1151 fs/ntfs/file.c 		ntfs_attr_put_search_ctx(ctx);
ctx              1228 fs/ntfs/file.c 		ntfs_attr_reinit_search_ctx(ctx);
ctx              1230 fs/ntfs/file.c 				CASE_SENSITIVE, bh_cpos, NULL, 0, ctx)) {
ctx              1238 fs/ntfs/file.c 			flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1239 fs/ntfs/file.c 			mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1247 fs/ntfs/file.c 			m = ctx->mrec;
ctx              1248 fs/ntfs/file.c 			a = ctx->attr;
ctx              1311 fs/ntfs/file.c 			flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1312 fs/ntfs/file.c 			mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1317 fs/ntfs/file.c 		ntfs_attr_put_search_ctx(ctx);
ctx              1392 fs/ntfs/file.c 	ntfs_attr_search_ctx *ctx;
ctx              1456 fs/ntfs/file.c 		ctx = NULL;
ctx              1460 fs/ntfs/file.c 	ctx = ntfs_attr_get_search_ctx(base_ni, m);
ctx              1461 fs/ntfs/file.c 	if (unlikely(!ctx)) {
ctx              1466 fs/ntfs/file.c 			CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx              1472 fs/ntfs/file.c 	a = ctx->attr;
ctx              1485 fs/ntfs/file.c 	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1486 fs/ntfs/file.c 	mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1487 fs/ntfs/file.c 	ntfs_attr_put_search_ctx(ctx);
ctx              1492 fs/ntfs/file.c 	if (ctx)
ctx              1493 fs/ntfs/file.c 		ntfs_attr_put_search_ctx(ctx);
ctx              1547 fs/ntfs/file.c 	ntfs_attr_search_ctx *ctx;
ctx              1583 fs/ntfs/file.c 		ctx = NULL;
ctx              1586 fs/ntfs/file.c 	ctx = ntfs_attr_get_search_ctx(base_ni, m);
ctx              1587 fs/ntfs/file.c 	if (unlikely(!ctx)) {
ctx              1592 fs/ntfs/file.c 			CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx              1598 fs/ntfs/file.c 	a = ctx->attr;
ctx              1645 fs/ntfs/file.c 	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1646 fs/ntfs/file.c 	mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1647 fs/ntfs/file.c 	ntfs_attr_put_search_ctx(ctx);
ctx              1673 fs/ntfs/file.c 	if (ctx)
ctx              1674 fs/ntfs/file.c 		ntfs_attr_put_search_ctx(ctx);
ctx               431 fs/ntfs/inode.c static int ntfs_is_extended_system_file(ntfs_attr_search_ctx *ctx)
ctx               436 fs/ntfs/inode.c 	ntfs_attr_reinit_search_ctx(ctx);
ctx               439 fs/ntfs/inode.c 	nr_links = le16_to_cpu(ctx->mrec->link_count);
ctx               443 fs/ntfs/inode.c 			ctx))) {
ctx               445 fs/ntfs/inode.c 		ATTR_RECORD *attr = ctx->attr;
ctx               454 fs/ntfs/inode.c 		if (p < (u8*)ctx->mrec || (u8*)p > (u8*)ctx->mrec +
ctx               455 fs/ntfs/inode.c 				le32_to_cpu(ctx->mrec->bytes_in_use)) {
ctx               457 fs/ntfs/inode.c 			ntfs_error(ctx->ntfs_ino->vol->sb, "Corrupt file name "
ctx               462 fs/ntfs/inode.c 			ntfs_error(ctx->ntfs_ino->vol->sb, "Non-resident file "
ctx               467 fs/ntfs/inode.c 			ntfs_error(ctx->ntfs_ino->vol->sb, "File name with "
ctx               473 fs/ntfs/inode.c 			ntfs_error(ctx->ntfs_ino->vol->sb, "Unindexed file "
ctx               489 fs/ntfs/inode.c 		ntfs_error(ctx->ntfs_ino->vol->sb, "Inode hard link count "
ctx               531 fs/ntfs/inode.c 	ntfs_attr_search_ctx *ctx;
ctx               554 fs/ntfs/inode.c 	ctx = ntfs_attr_get_search_ctx(ni, m);
ctx               555 fs/ntfs/inode.c 	if (!ctx) {
ctx               616 fs/ntfs/inode.c 			ctx);
ctx               629 fs/ntfs/inode.c 	a = ctx->attr;
ctx               658 fs/ntfs/inode.c 	ntfs_attr_reinit_search_ctx(ctx);
ctx               659 fs/ntfs/inode.c 	err = ntfs_attr_lookup(AT_ATTRIBUTE_LIST, NULL, 0, 0, 0, NULL, 0, ctx);
ctx               671 fs/ntfs/inode.c 		a = ctx->attr;
ctx               736 fs/ntfs/inode.c 					(u8*)ctx->mrec + vol->mft_record_size) {
ctx               760 fs/ntfs/inode.c 		ntfs_attr_reinit_search_ctx(ctx);
ctx               762 fs/ntfs/inode.c 				0, NULL, 0, ctx);
ctx               773 fs/ntfs/inode.c 		a = ctx->attr;
ctx               808 fs/ntfs/inode.c 		if (ir_end > (u8*)ctx->mrec + vol->mft_record_size) {
ctx               878 fs/ntfs/inode.c 			ntfs_attr_put_search_ctx(ctx);
ctx               881 fs/ntfs/inode.c 			ctx = NULL;
ctx               886 fs/ntfs/inode.c 		ntfs_attr_reinit_search_ctx(ctx);
ctx               888 fs/ntfs/inode.c 				CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx               900 fs/ntfs/inode.c 		a = ctx->attr;
ctx               948 fs/ntfs/inode.c 		ntfs_attr_put_search_ctx(ctx);
ctx               951 fs/ntfs/inode.c 		ctx = NULL;
ctx               984 fs/ntfs/inode.c 		ntfs_attr_reinit_search_ctx(ctx);
ctx               992 fs/ntfs/inode.c 		err = ntfs_attr_lookup(AT_DATA, NULL, 0, 0, 0, NULL, 0, ctx);
ctx              1016 fs/ntfs/inode.c 			if (ntfs_is_extended_system_file(ctx) > 0)
ctx              1023 fs/ntfs/inode.c 		a = ctx->attr;
ctx              1123 fs/ntfs/inode.c 		ntfs_attr_put_search_ctx(ctx);
ctx              1126 fs/ntfs/inode.c 		ctx = NULL;
ctx              1158 fs/ntfs/inode.c 	if (ctx)
ctx              1159 fs/ntfs/inode.c 		ntfs_attr_put_search_ctx(ctx);
ctx              1199 fs/ntfs/inode.c 	ntfs_attr_search_ctx *ctx;
ctx              1226 fs/ntfs/inode.c 	ctx = ntfs_attr_get_search_ctx(base_ni, m);
ctx              1227 fs/ntfs/inode.c 	if (!ctx) {
ctx              1233 fs/ntfs/inode.c 			CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx              1236 fs/ntfs/inode.c 	a = ctx->attr;
ctx              1401 fs/ntfs/inode.c 	ntfs_attr_put_search_ctx(ctx);
ctx              1410 fs/ntfs/inode.c 	if (ctx)
ctx              1411 fs/ntfs/inode.c 		ntfs_attr_put_search_ctx(ctx);
ctx              1466 fs/ntfs/inode.c 	ntfs_attr_search_ctx *ctx;
ctx              1491 fs/ntfs/inode.c 	ctx = ntfs_attr_get_search_ctx(base_ni, m);
ctx              1492 fs/ntfs/inode.c 	if (!ctx) {
ctx              1498 fs/ntfs/inode.c 			CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx              1505 fs/ntfs/inode.c 	a = ctx->attr;
ctx              1530 fs/ntfs/inode.c 	if (ir_end > (u8*)ctx->mrec + vol->mft_record_size) {
ctx              1581 fs/ntfs/inode.c 		ntfs_attr_put_search_ctx(ctx);
ctx              1584 fs/ntfs/inode.c 		ctx = NULL;
ctx              1589 fs/ntfs/inode.c 	ntfs_attr_reinit_search_ctx(ctx);
ctx              1591 fs/ntfs/inode.c 			CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx              1602 fs/ntfs/inode.c 	a = ctx->attr;
ctx              1645 fs/ntfs/inode.c 	ntfs_attr_put_search_ctx(ctx);
ctx              1648 fs/ntfs/inode.c 	ctx = NULL;
ctx              1691 fs/ntfs/inode.c 	if (ctx)
ctx              1692 fs/ntfs/inode.c 		ntfs_attr_put_search_ctx(ctx);
ctx              1750 fs/ntfs/inode.c 	ntfs_attr_search_ctx *ctx;
ctx              1825 fs/ntfs/inode.c 	ctx = ntfs_attr_get_search_ctx(ni, m);
ctx              1826 fs/ntfs/inode.c 	if (!ctx) {
ctx              1832 fs/ntfs/inode.c 	err = ntfs_attr_lookup(AT_ATTRIBUTE_LIST, NULL, 0, 0, 0, NULL, 0, ctx);
ctx              1847 fs/ntfs/inode.c 		a = ctx->attr;
ctx              1911 fs/ntfs/inode.c 					(u8*)ctx->mrec + vol->mft_record_size) {
ctx              1982 fs/ntfs/inode.c 	ntfs_attr_reinit_search_ctx(ctx);
ctx              1988 fs/ntfs/inode.c 			ctx))) {
ctx              1992 fs/ntfs/inode.c 		a = ctx->attr;
ctx              2083 fs/ntfs/inode.c 				ntfs_attr_put_search_ctx(ctx);
ctx              2137 fs/ntfs/inode.c 	ntfs_attr_put_search_ctx(ctx);
ctx              2154 fs/ntfs/inode.c 	ntfs_attr_put_search_ctx(ctx);
ctx              2338 fs/ntfs/inode.c 	ntfs_attr_search_ctx *ctx;
ctx              2365 fs/ntfs/inode.c 		ctx = NULL;
ctx              2369 fs/ntfs/inode.c 	ctx = ntfs_attr_get_search_ctx(base_ni, m);
ctx              2370 fs/ntfs/inode.c 	if (unlikely(!ctx)) {
ctx              2378 fs/ntfs/inode.c 			CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx              2391 fs/ntfs/inode.c 	m = ctx->mrec;
ctx              2392 fs/ntfs/inode.c 	a = ctx->attr;
ctx              2474 fs/ntfs/inode.c 		flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              2475 fs/ntfs/inode.c 		mark_mft_record_dirty(ctx->ntfs_ino);
ctx              2523 fs/ntfs/inode.c 	ntfs_attr_put_search_ctx(ctx);
ctx              2625 fs/ntfs/inode.c 		flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              2626 fs/ntfs/inode.c 		mark_mft_record_dirty(ctx->ntfs_ino);
ctx              2656 fs/ntfs/inode.c 			ntfs_attr_put_search_ctx(ctx);
ctx              2673 fs/ntfs/inode.c 			vol->cluster_size_bits, -1, ctx);
ctx              2674 fs/ntfs/inode.c 	m = ctx->mrec;
ctx              2675 fs/ntfs/inode.c 	a = ctx->attr;
ctx              2770 fs/ntfs/inode.c 	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              2771 fs/ntfs/inode.c 	mark_mft_record_dirty(ctx->ntfs_ino);
ctx              2773 fs/ntfs/inode.c 	ntfs_attr_put_search_ctx(ctx);
ctx              2812 fs/ntfs/inode.c 	if (ctx)
ctx              2813 fs/ntfs/inode.c 		ntfs_attr_put_search_ctx(ctx);
ctx              2935 fs/ntfs/inode.c 	ntfs_attr_search_ctx *ctx;
ctx              2960 fs/ntfs/inode.c 	ctx = ntfs_attr_get_search_ctx(ni, m);
ctx              2961 fs/ntfs/inode.c 	if (unlikely(!ctx)) {
ctx              2966 fs/ntfs/inode.c 			CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx              2968 fs/ntfs/inode.c 		ntfs_attr_put_search_ctx(ctx);
ctx              2971 fs/ntfs/inode.c 	si = (STANDARD_INFORMATION*)((u8*)ctx->attr +
ctx              2972 fs/ntfs/inode.c 			le16_to_cpu(ctx->attr->data.resident.value_offset));
ctx              3018 fs/ntfs/inode.c 		flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              3019 fs/ntfs/inode.c 		if (!NInoTestSetDirty(ctx->ntfs_ino))
ctx              3020 fs/ntfs/inode.c 			mark_ntfs_record_dirty(ctx->ntfs_ino->page,
ctx              3021 fs/ntfs/inode.c 					ctx->ntfs_ino->page_ofs);
ctx              3023 fs/ntfs/inode.c 	ntfs_attr_put_search_ctx(ctx);
ctx               836 fs/ntfs/lcnalloc.c 		ntfs_attr_search_ctx *ctx, const bool is_rollback)
ctx               866 fs/ntfs/lcnalloc.c 	rl = ntfs_attr_find_vcn_nolock(ni, start_vcn, ctx);
ctx               920 fs/ntfs/lcnalloc.c 			rl = ntfs_attr_find_vcn_nolock(ni, vcn, ctx);
ctx               988 fs/ntfs/lcnalloc.c 	delta = __ntfs_cluster_free(ni, start_vcn, total_freed, ctx, true);
ctx                35 fs/ntfs/lcnalloc.h 		s64 count, ntfs_attr_search_ctx *ctx, const bool is_rollback);
ctx                94 fs/ntfs/lcnalloc.h 		s64 count, ntfs_attr_search_ctx *ctx)
ctx                96 fs/ntfs/lcnalloc.h 	return __ntfs_cluster_free(ni, start_vcn, count, ctx, false);
ctx              1281 fs/ntfs/mft.c  	ntfs_attr_search_ctx *ctx = NULL;
ctx              1389 fs/ntfs/mft.c  	ctx = ntfs_attr_get_search_ctx(mft_ni, mrec);
ctx              1390 fs/ntfs/mft.c  	if (unlikely(!ctx)) {
ctx              1397 fs/ntfs/mft.c  			0, ctx);
ctx              1405 fs/ntfs/mft.c  	a = ctx->attr;
ctx              1426 fs/ntfs/mft.c  	ret = ntfs_attr_record_resize(ctx->mrec, a, mp_size +
ctx              1466 fs/ntfs/mft.c  		flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1467 fs/ntfs/mft.c  		mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1468 fs/ntfs/mft.c  		ntfs_attr_reinit_search_ctx(ctx);
ctx              1471 fs/ntfs/mft.c  				0, ctx);
ctx              1477 fs/ntfs/mft.c  		a = ctx->attr;
ctx              1485 fs/ntfs/mft.c  	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1486 fs/ntfs/mft.c  	mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1487 fs/ntfs/mft.c  	ntfs_attr_put_search_ctx(ctx);
ctx              1493 fs/ntfs/mft.c  	ntfs_attr_reinit_search_ctx(ctx);
ctx              1496 fs/ntfs/mft.c  			0, ctx)) {
ctx              1502 fs/ntfs/mft.c  		ntfs_attr_put_search_ctx(ctx);
ctx              1512 fs/ntfs/mft.c  	a = ctx->attr;
ctx              1542 fs/ntfs/mft.c  		if (ntfs_attr_record_resize(ctx->mrec, a, old_alen)) {
ctx              1547 fs/ntfs/mft.c  		flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1548 fs/ntfs/mft.c  		mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1550 fs/ntfs/mft.c  	if (ctx)
ctx              1551 fs/ntfs/mft.c  		ntfs_attr_put_search_ctx(ctx);
ctx              1578 fs/ntfs/mft.c  	ntfs_attr_search_ctx *ctx;
ctx              1593 fs/ntfs/mft.c  	ctx = ntfs_attr_get_search_ctx(mft_ni, mrec);
ctx              1594 fs/ntfs/mft.c  	if (unlikely(!ctx)) {
ctx              1600 fs/ntfs/mft.c  			mftbmp_ni->name_len, CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx              1608 fs/ntfs/mft.c  	a = ctx->attr;
ctx              1627 fs/ntfs/mft.c  	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1628 fs/ntfs/mft.c  	mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1629 fs/ntfs/mft.c  	ntfs_attr_put_search_ctx(ctx);
ctx              1646 fs/ntfs/mft.c  	ctx = ntfs_attr_get_search_ctx(mft_ni, mrec);
ctx              1647 fs/ntfs/mft.c  	if (unlikely(!ctx)) {
ctx              1653 fs/ntfs/mft.c  			mftbmp_ni->name_len, CASE_SENSITIVE, 0, NULL, 0, ctx)) {
ctx              1658 fs/ntfs/mft.c  		ntfs_attr_put_search_ctx(ctx);
ctx              1663 fs/ntfs/mft.c  	a = ctx->attr;
ctx              1673 fs/ntfs/mft.c  	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1674 fs/ntfs/mft.c  	mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1675 fs/ntfs/mft.c  	ntfs_attr_put_search_ctx(ctx);
ctx              1717 fs/ntfs/mft.c  	ntfs_attr_search_ctx *ctx = NULL;
ctx              1822 fs/ntfs/mft.c  	ctx = ntfs_attr_get_search_ctx(mft_ni, mrec);
ctx              1823 fs/ntfs/mft.c  	if (unlikely(!ctx)) {
ctx              1829 fs/ntfs/mft.c  			CASE_SENSITIVE, rl[1].vcn, NULL, 0, ctx);
ctx              1837 fs/ntfs/mft.c  	a = ctx->attr;
ctx              1858 fs/ntfs/mft.c  	ret = ntfs_attr_record_resize(ctx->mrec, a, mp_size +
ctx              1905 fs/ntfs/mft.c  		flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1906 fs/ntfs/mft.c  		mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1907 fs/ntfs/mft.c  		ntfs_attr_reinit_search_ctx(ctx);
ctx              1910 fs/ntfs/mft.c  				ctx);
ctx              1916 fs/ntfs/mft.c  		a = ctx->attr;
ctx              1924 fs/ntfs/mft.c  	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1925 fs/ntfs/mft.c  	mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1926 fs/ntfs/mft.c  	ntfs_attr_put_search_ctx(ctx);
ctx              1932 fs/ntfs/mft.c  	ntfs_attr_reinit_search_ctx(ctx);
ctx              1934 fs/ntfs/mft.c  			CASE_SENSITIVE, rl[1].vcn, NULL, 0, ctx)) {
ctx              1940 fs/ntfs/mft.c  		ntfs_attr_put_search_ctx(ctx);
ctx              1950 fs/ntfs/mft.c  	ctx->attr->data.non_resident.highest_vcn =
ctx              1953 fs/ntfs/mft.c  	if (ntfs_cluster_free(mft_ni, old_last_vcn, -1, ctx) < 0) {
ctx              1958 fs/ntfs/mft.c  	a = ctx->attr;
ctx              1964 fs/ntfs/mft.c  	if (mp_rebuilt && !IS_ERR(ctx->mrec)) {
ctx              1974 fs/ntfs/mft.c  		if (ntfs_attr_record_resize(ctx->mrec, a, old_alen)) {
ctx              1979 fs/ntfs/mft.c  		flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              1980 fs/ntfs/mft.c  		mark_mft_record_dirty(ctx->ntfs_ino);
ctx              1981 fs/ntfs/mft.c  	} else if (IS_ERR(ctx->mrec)) {
ctx              1986 fs/ntfs/mft.c  	if (ctx)
ctx              1987 fs/ntfs/mft.c  		ntfs_attr_put_search_ctx(ctx);
ctx              2246 fs/ntfs/mft.c  	ntfs_attr_search_ctx *ctx;
ctx              2460 fs/ntfs/mft.c  	ctx = ntfs_attr_get_search_ctx(mft_ni, m);
ctx              2461 fs/ntfs/mft.c  	if (unlikely(!ctx)) {
ctx              2468 fs/ntfs/mft.c  			CASE_SENSITIVE, 0, NULL, 0, ctx);
ctx              2472 fs/ntfs/mft.c  		ntfs_attr_put_search_ctx(ctx);
ctx              2476 fs/ntfs/mft.c  	a = ctx->attr;
ctx              2484 fs/ntfs/mft.c  	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx              2485 fs/ntfs/mft.c  	mark_mft_record_dirty(ctx->ntfs_ino);
ctx              2486 fs/ntfs/mft.c  	ntfs_attr_put_search_ctx(ctx);
ctx               165 fs/ntfs/namei.c 	ntfs_attr_search_ctx *ctx;
ctx               189 fs/ntfs/namei.c 			ctx = NULL;
ctx               192 fs/ntfs/namei.c 		ctx = ntfs_attr_get_search_ctx(ni, m);
ctx               193 fs/ntfs/namei.c 		if (unlikely(!ctx)) {
ctx               202 fs/ntfs/namei.c 					NULL, 0, ctx);
ctx               212 fs/ntfs/namei.c 			a = ctx->attr;
ctx               219 fs/ntfs/namei.c 			fn = (FILE_NAME_ATTR*)((u8*)ctx->attr + le16_to_cpu(
ctx               220 fs/ntfs/namei.c 					ctx->attr->data.resident.value_offset));
ctx               231 fs/ntfs/namei.c 		ntfs_attr_put_search_ctx(ctx);
ctx               235 fs/ntfs/namei.c 	ctx = NULL;
ctx               252 fs/ntfs/namei.c 	if (ctx)
ctx               253 fs/ntfs/namei.c 		ntfs_attr_put_search_ctx(ctx);
ctx               291 fs/ntfs/namei.c 	ntfs_attr_search_ctx *ctx;
ctx               303 fs/ntfs/namei.c 	ctx = ntfs_attr_get_search_ctx(ni, mrec);
ctx               304 fs/ntfs/namei.c 	if (unlikely(!ctx)) {
ctx               310 fs/ntfs/namei.c 			0, ctx);
ctx               312 fs/ntfs/namei.c 		ntfs_attr_put_search_ctx(ctx);
ctx               320 fs/ntfs/namei.c 	attr = ctx->attr;
ctx               331 fs/ntfs/namei.c 	ntfs_attr_put_search_ctx(ctx);
ctx               366 fs/ntfs/super.c 	ntfs_attr_search_ctx *ctx;
ctx               379 fs/ntfs/super.c 	ctx = ntfs_attr_get_search_ctx(ni, m);
ctx               380 fs/ntfs/super.c 	if (!ctx) {
ctx               385 fs/ntfs/super.c 			ctx);
ctx               388 fs/ntfs/super.c 	vi = (VOLUME_INFORMATION*)((u8*)ctx->attr +
ctx               389 fs/ntfs/super.c 			le16_to_cpu(ctx->attr->data.resident.value_offset));
ctx               391 fs/ntfs/super.c 	flush_dcache_mft_record_page(ctx->ntfs_ino);
ctx               392 fs/ntfs/super.c 	mark_mft_record_dirty(ctx->ntfs_ino);
ctx               393 fs/ntfs/super.c 	ntfs_attr_put_search_ctx(ctx);
ctx               399 fs/ntfs/super.c 	if (ctx)
ctx               400 fs/ntfs/super.c 		ntfs_attr_put_search_ctx(ctx);
ctx              1763 fs/ntfs/super.c 	ntfs_attr_search_ctx *ctx;
ctx              1861 fs/ntfs/super.c 	if (!(ctx = ntfs_attr_get_search_ctx(NTFS_I(vol->vol_ino), m))) {
ctx              1866 fs/ntfs/super.c 			ctx) || ctx->attr->non_resident || ctx->attr->flags) {
ctx              1868 fs/ntfs/super.c 		ntfs_attr_put_search_ctx(ctx);
ctx              1873 fs/ntfs/super.c 	vi = (VOLUME_INFORMATION*)((char*)ctx->attr +
ctx              1874 fs/ntfs/super.c 			le16_to_cpu(ctx->attr->data.resident.value_offset));
ctx              1876 fs/ntfs/super.c 	if ((u8*)vi < (u8*)ctx->attr || (u8*)vi +
ctx              1877 fs/ntfs/super.c 			le32_to_cpu(ctx->attr->data.resident.value_length) >
ctx              1878 fs/ntfs/super.c 			(u8*)ctx->attr + le32_to_cpu(ctx->attr->length))
ctx              1884 fs/ntfs/super.c 	ntfs_attr_put_search_ctx(ctx);
ctx              1737 fs/ocfs2/dir.c 				    struct dir_context *ctx)
ctx              1740 fs/ocfs2/dir.c 	unsigned long offset = ctx->pos;
ctx              1756 fs/ocfs2/dir.c 	while (ctx->pos < i_size_read(inode)) {
ctx              1776 fs/ocfs2/dir.c 			ctx->pos = offset = i;
ctx              1780 fs/ocfs2/dir.c 		de = (struct ocfs2_dir_entry *) (data->id_data + ctx->pos);
ctx              1781 fs/ocfs2/dir.c 		if (!ocfs2_check_dir_entry(inode, de, di_bh, ctx->pos)) {
ctx              1783 fs/ocfs2/dir.c 			ctx->pos = i_size_read(inode);
ctx              1788 fs/ocfs2/dir.c 			if (!dir_emit(ctx, de->name, de->name_len,
ctx              1793 fs/ocfs2/dir.c 		ctx->pos += le16_to_cpu(de->rec_len);
ctx              1806 fs/ocfs2/dir.c 				    struct dir_context *ctx,
ctx              1819 fs/ocfs2/dir.c 	offset = ctx->pos & (sb->s_blocksize - 1);
ctx              1821 fs/ocfs2/dir.c 	while (ctx->pos < i_size_read(inode)) {
ctx              1822 fs/ocfs2/dir.c 		blk = ctx->pos >> sb->s_blocksize_bits;
ctx              1825 fs/ocfs2/dir.c 			ctx->pos += sb->s_blocksize - offset;
ctx              1866 fs/ocfs2/dir.c 			ctx->pos = (ctx->pos & ~(sb->s_blocksize - 1))
ctx              1871 fs/ocfs2/dir.c 		while (ctx->pos < i_size_read(inode)
ctx              1877 fs/ocfs2/dir.c 				ctx->pos = (ctx->pos | (sb->s_blocksize - 1)) + 1;
ctx              1881 fs/ocfs2/dir.c 				if (!dir_emit(ctx, de->name,
ctx              1891 fs/ocfs2/dir.c 			ctx->pos += le16_to_cpu(de->rec_len);
ctx              1903 fs/ocfs2/dir.c 				 struct dir_context *ctx,
ctx              1907 fs/ocfs2/dir.c 		return ocfs2_dir_foreach_blk_id(inode, f_version, ctx);
ctx              1908 fs/ocfs2/dir.c 	return ocfs2_dir_foreach_blk_el(inode, f_version, ctx, persist);
ctx              1915 fs/ocfs2/dir.c int ocfs2_dir_foreach(struct inode *inode, struct dir_context *ctx)
ctx              1918 fs/ocfs2/dir.c 	ocfs2_dir_foreach_blk(inode, &version, ctx, true);
ctx              1926 fs/ocfs2/dir.c int ocfs2_readdir(struct file *file, struct dir_context *ctx)
ctx              1950 fs/ocfs2/dir.c 	error = ocfs2_dir_foreach_blk(inode, &file->f_version, ctx, false);
ctx              2031 fs/ocfs2/dir.c 	struct dir_context ctx;
ctx              2037 fs/ocfs2/dir.c static int ocfs2_empty_dir_filldir(struct dir_context *ctx, const char *name,
ctx              2042 fs/ocfs2/dir.c 		container_of(ctx, struct ocfs2_empty_dir_priv, ctx);
ctx              2119 fs/ocfs2/dir.c 		.ctx.actor = ocfs2_empty_dir_filldir,
ctx              2132 fs/ocfs2/dir.c 	ret = ocfs2_dir_foreach(inode, &priv.ctx);
ctx                81 fs/ocfs2/dir.h int ocfs2_readdir(struct file *file, struct dir_context *ctx);
ctx                82 fs/ocfs2/dir.h int ocfs2_dir_foreach(struct inode *inode, struct dir_context *ctx);
ctx              2028 fs/ocfs2/journal.c 	struct dir_context	ctx;
ctx              2034 fs/ocfs2/journal.c static int ocfs2_orphan_filldir(struct dir_context *ctx, const char *name,
ctx              2039 fs/ocfs2/journal.c 		container_of(ctx, struct ocfs2_orphan_filldir_priv, ctx);
ctx              2087 fs/ocfs2/journal.c 		.ctx.actor = ocfs2_orphan_filldir,
ctx              2109 fs/ocfs2/journal.c 	status = ocfs2_dir_foreach(orphan_dir_inode, &priv.ctx);
ctx               329 fs/omfs/dir.c  static bool omfs_fill_chain(struct inode *dir, struct dir_context *ctx,
ctx               360 fs/omfs/dir.c  		if (!dir_emit(ctx, oi->i_name,
ctx               367 fs/omfs/dir.c  		ctx->pos++;
ctx               407 fs/omfs/dir.c  static int omfs_readdir(struct file *file, struct dir_context *ctx)
ctx               415 fs/omfs/dir.c  	if (ctx->pos >> 32)
ctx               418 fs/omfs/dir.c  	if (ctx->pos < 1 << 20) {
ctx               419 fs/omfs/dir.c  		if (!dir_emit_dots(file, ctx))
ctx               421 fs/omfs/dir.c  		ctx->pos = 1 << 20;
ctx               427 fs/omfs/dir.c  	hchain = (ctx->pos >> 20) - 1;
ctx               428 fs/omfs/dir.c  	hindex = ctx->pos & 0xfffff;
ctx               438 fs/omfs/dir.c  		if (!omfs_fill_chain(dir, ctx, fsblock, hindex))
ctx               441 fs/omfs/dir.c  		ctx->pos = (hchain+2) << 20;
ctx               265 fs/openpromfs/inode.c static int openpromfs_readdir(struct file *file, struct dir_context *ctx)
ctx               276 fs/openpromfs/inode.c 	if (ctx->pos == 0) {
ctx               277 fs/openpromfs/inode.c 		if (!dir_emit(ctx, ".", 1, inode->i_ino, DT_DIR))
ctx               279 fs/openpromfs/inode.c 		ctx->pos = 1;
ctx               281 fs/openpromfs/inode.c 	if (ctx->pos == 1) {
ctx               282 fs/openpromfs/inode.c 		if (!dir_emit(ctx, "..", 2,
ctx               287 fs/openpromfs/inode.c 		ctx->pos = 2;
ctx               289 fs/openpromfs/inode.c 	i = ctx->pos - 2;
ctx               298 fs/openpromfs/inode.c 		if (!dir_emit(ctx,
ctx               304 fs/openpromfs/inode.c 		ctx->pos++;
ctx               315 fs/openpromfs/inode.c 		if (!dir_emit(ctx, prop->name, strlen(prop->name),
ctx               319 fs/openpromfs/inode.c 		ctx->pos++;
ctx               190 fs/orangefs/dir.c     struct dir_context *ctx)
ctx               197 fs/orangefs/dir.c 	i = ctx->pos & ~PART_MASK;
ctx               226 fs/orangefs/dir.c 		if (!dir_emit(ctx, s, *len,
ctx               233 fs/orangefs/dir.c 		ctx->pos = (ctx->pos & PART_MASK) | i;
ctx               243 fs/orangefs/dir.c     struct dir_context *ctx)
ctx               248 fs/orangefs/dir.c 	count = ((ctx->pos & PART_MASK) >> PART_SHIFT) - 1;
ctx               263 fs/orangefs/dir.c 		r = fill_from_part(part, ctx);
ctx               274 fs/orangefs/dir.c 			ctx->pos = (ctx->pos & PART_MASK) +
ctx               305 fs/orangefs/dir.c     struct dir_context *ctx)
ctx               319 fs/orangefs/dir.c 	if (ctx->pos == 0) {
ctx               320 fs/orangefs/dir.c 		if (!dir_emit_dot(file, ctx))
ctx               322 fs/orangefs/dir.c 		ctx->pos++;
ctx               324 fs/orangefs/dir.c 	if (ctx->pos == 1) {
ctx               325 fs/orangefs/dir.c 		if (!dir_emit_dotdot(file, ctx))
ctx               327 fs/orangefs/dir.c 		ctx->pos = 1 << PART_SHIFT;
ctx               334 fs/orangefs/dir.c 	if ((ctx->pos & PART_MASK) == 0)
ctx               344 fs/orangefs/dir.c 	    ctx->pos > od->end) {
ctx               349 fs/orangefs/dir.c 	if (od->token == ORANGEFS_ITERATE_END && ctx->pos > od->end)
ctx               353 fs/orangefs/dir.c 	if (ctx->pos < od->end) {
ctx               354 fs/orangefs/dir.c 		r = orangefs_dir_fill(oi, od, dentry, ctx);
ctx               364 fs/orangefs/dir.c 		r = orangefs_dir_fill(oi, od, dentry, ctx);
ctx               793 fs/overlayfs/copy_up.c 	struct ovl_copy_up_ctx ctx = {
ctx               799 fs/overlayfs/copy_up.c 	if (WARN_ON(!ctx.workdir))
ctx               802 fs/overlayfs/copy_up.c 	ovl_path_lower(dentry, &ctx.lowerpath);
ctx               803 fs/overlayfs/copy_up.c 	err = vfs_getattr(&ctx.lowerpath, &ctx.stat,
ctx               808 fs/overlayfs/copy_up.c 	ctx.metacopy = ovl_need_meta_copy_up(dentry, ctx.stat.mode, flags);
ctx               812 fs/overlayfs/copy_up.c 		ctx.destdir = parentpath.dentry;
ctx               813 fs/overlayfs/copy_up.c 		ctx.destname = dentry->d_name;
ctx               815 fs/overlayfs/copy_up.c 		err = vfs_getattr(&parentpath, &ctx.pstat,
ctx               824 fs/overlayfs/copy_up.c 		ctx.stat.size = 0;
ctx               826 fs/overlayfs/copy_up.c 	if (S_ISLNK(ctx.stat.mode)) {
ctx               827 fs/overlayfs/copy_up.c 		ctx.link = vfs_get_link(ctx.lowerpath.dentry, &done);
ctx               828 fs/overlayfs/copy_up.c 		if (IS_ERR(ctx.link))
ctx               829 fs/overlayfs/copy_up.c 			return PTR_ERR(ctx.link);
ctx               839 fs/overlayfs/copy_up.c 			err = ovl_do_copy_up(&ctx);
ctx               841 fs/overlayfs/copy_up.c 			err = ovl_link_up(&ctx);
ctx               843 fs/overlayfs/copy_up.c 			err = ovl_copy_up_meta_inode_data(&ctx);
ctx                63 fs/overlayfs/namei.c static int ovl_acceptable(void *ctx, struct dentry *dentry)
ctx                77 fs/overlayfs/namei.c 	return is_subdir(dentry, ((struct vfsmount *)ctx)->mnt_root);
ctx                39 fs/overlayfs/readdir.c 	struct dir_context ctx;
ctx               253 fs/overlayfs/readdir.c static int ovl_fill_merge(struct dir_context *ctx, const char *name,
ctx               258 fs/overlayfs/readdir.c 		container_of(ctx, struct ovl_readdir_data, ctx);
ctx               305 fs/overlayfs/readdir.c 	rdd->ctx.pos = 0;
ctx               309 fs/overlayfs/readdir.c 		err = iterate_dir(realfile, &rdd->ctx);
ctx               361 fs/overlayfs/readdir.c 		.ctx.actor = ovl_fill_merge,
ctx               532 fs/overlayfs/readdir.c static int ovl_fill_plain(struct dir_context *ctx, const char *name,
ctx               538 fs/overlayfs/readdir.c 		container_of(ctx, struct ovl_readdir_data, ctx);
ctx               558 fs/overlayfs/readdir.c 		.ctx.actor = ovl_fill_plain,
ctx               644 fs/overlayfs/readdir.c 	struct dir_context ctx;
ctx               650 fs/overlayfs/readdir.c static int ovl_fill_real(struct dir_context *ctx, const char *name,
ctx               655 fs/overlayfs/readdir.c 		container_of(ctx, struct ovl_readdir_translate, ctx);
ctx               689 fs/overlayfs/readdir.c static int ovl_iterate_real(struct file *file, struct dir_context *ctx)
ctx               696 fs/overlayfs/readdir.c 		.ctx.actor = ovl_fill_real,
ctx               697 fs/overlayfs/readdir.c 		.orig_ctx = ctx,
ctx               723 fs/overlayfs/readdir.c 	err = iterate_dir(od->realfile, &rdt.ctx);
ctx               724 fs/overlayfs/readdir.c 	ctx->pos = rdt.ctx.pos;
ctx               730 fs/overlayfs/readdir.c static int ovl_iterate(struct file *file, struct dir_context *ctx)
ctx               737 fs/overlayfs/readdir.c 	if (!ctx->pos)
ctx               750 fs/overlayfs/readdir.c 			return ovl_iterate_real(file, ctx);
ctx               752 fs/overlayfs/readdir.c 		return iterate_dir(od->realfile, ctx);
ctx               763 fs/overlayfs/readdir.c 		ovl_seek_cursor(od, ctx->pos);
ctx               774 fs/overlayfs/readdir.c 			if (!dir_emit(ctx, p->name, p->len, p->ino, p->type))
ctx               778 fs/overlayfs/readdir.c 		ctx->pos++;
ctx               986 fs/overlayfs/readdir.c static int ovl_check_d_type(struct dir_context *ctx, const char *name,
ctx               991 fs/overlayfs/readdir.c 		container_of(ctx, struct ovl_readdir_data, ctx);
ctx              1011 fs/overlayfs/readdir.c 		.ctx.actor = ovl_check_d_type,
ctx              1030 fs/overlayfs/readdir.c 		.ctx.actor = ovl_fill_merge,
ctx              1095 fs/overlayfs/readdir.c 		.ctx.actor = ovl_fill_merge,
ctx              1189 fs/pipe.c      	struct pseudo_fs_context *ctx = init_pseudo(fc, PIPEFS_MAGIC);
ctx              1190 fs/pipe.c      	if (!ctx)
ctx              1192 fs/pipe.c      	ctx->ops = &pipefs_ops;
ctx              1193 fs/pipe.c      	ctx->dops = &pipefs_dentry_operations;
ctx              1882 fs/proc/base.c bool proc_fill_cache(struct file *file, struct dir_context *ctx,
ctx              1915 fs/proc/base.c 	return dir_emit(ctx, name, len, ino, type);
ctx              2163 fs/proc/base.c proc_map_files_readdir(struct file *file, struct dir_context *ctx)
ctx              2185 fs/proc/base.c 	if (!dir_emit_dots(file, ctx))
ctx              2213 fs/proc/base.c 		if (++pos <= ctx->pos)
ctx              2237 fs/proc/base.c 		if (!proc_fill_cache(file, ctx,
ctx              2243 fs/proc/base.c 		ctx->pos++;
ctx              2504 fs/proc/base.c static int proc_pident_readdir(struct file *file, struct dir_context *ctx,
ctx              2513 fs/proc/base.c 	if (!dir_emit_dots(file, ctx))
ctx              2516 fs/proc/base.c 	if (ctx->pos >= nents + 2)
ctx              2519 fs/proc/base.c 	for (p = ents + (ctx->pos - 2); p < ents + nents; p++) {
ctx              2520 fs/proc/base.c 		if (!proc_fill_cache(file, ctx, p->name, p->len,
ctx              2523 fs/proc/base.c 		ctx->pos++;
ctx              2614 fs/proc/base.c 			     struct dir_context *ctx) \
ctx              2616 fs/proc/base.c 	return proc_pident_readdir(filp, ctx, \
ctx              2661 fs/proc/base.c static int proc_attr_dir_readdir(struct file *file, struct dir_context *ctx)
ctx              2663 fs/proc/base.c 	return proc_pident_readdir(file, ctx, 
ctx              3102 fs/proc/base.c static int proc_tgid_base_readdir(struct file *file, struct dir_context *ctx)
ctx              3104 fs/proc/base.c 	return proc_pident_readdir(file, ctx,
ctx              3314 fs/proc/base.c int proc_pid_readdir(struct file *file, struct dir_context *ctx)
ctx              3318 fs/proc/base.c 	loff_t pos = ctx->pos;
ctx              3325 fs/proc/base.c 		if (!dir_emit(ctx, "self", 4, inode->i_ino, DT_LNK))
ctx              3327 fs/proc/base.c 		ctx->pos = pos = pos + 1;
ctx              3331 fs/proc/base.c 		if (!dir_emit(ctx, "thread-self", 11, inode->i_ino, DT_LNK))
ctx              3333 fs/proc/base.c 		ctx->pos = pos = pos + 1;
ctx              3348 fs/proc/base.c 		ctx->pos = iter.tgid + TGID_OFFSET;
ctx              3349 fs/proc/base.c 		if (!proc_fill_cache(file, ctx, name, len,
ctx              3355 fs/proc/base.c 	ctx->pos = PID_MAX_LIMIT + TGID_OFFSET;
ctx              3492 fs/proc/base.c static int proc_tid_base_readdir(struct file *file, struct dir_context *ctx)
ctx              3494 fs/proc/base.c 	return proc_pident_readdir(file, ctx,
ctx              3649 fs/proc/base.c static int proc_task_readdir(struct file *file, struct dir_context *ctx)
ctx              3659 fs/proc/base.c 	if (!dir_emit_dots(file, ctx))
ctx              3668 fs/proc/base.c 	for (task = first_tid(proc_pid(inode), tid, ctx->pos - 2, ns);
ctx              3670 fs/proc/base.c 	     task = next_tid(task), ctx->pos++) {
ctx              3675 fs/proc/base.c 		if (!proc_fill_cache(file, ctx, name, len,
ctx               228 fs/proc/fd.c   static int proc_readfd_common(struct file *file, struct dir_context *ctx,
ctx               238 fs/proc/fd.c   	if (!dir_emit_dots(file, ctx))
ctx               245 fs/proc/fd.c   	for (fd = ctx->pos - 2;
ctx               247 fs/proc/fd.c   	     fd++, ctx->pos++) {
ctx               261 fs/proc/fd.c   		if (!proc_fill_cache(file, ctx,
ctx               276 fs/proc/fd.c   static int proc_readfd(struct file *file, struct dir_context *ctx)
ctx               278 fs/proc/fd.c   	return proc_readfd_common(file, ctx, proc_fd_instantiate);
ctx               348 fs/proc/fd.c   static int proc_readfdinfo(struct file *file, struct dir_context *ctx)
ctx               350 fs/proc/fd.c   	return proc_readfd_common(file, ctx,
ctx               282 fs/proc/generic.c int proc_readdir_de(struct file *file, struct dir_context *ctx,
ctx               287 fs/proc/generic.c 	if (!dir_emit_dots(file, ctx))
ctx               290 fs/proc/generic.c 	i = ctx->pos - 2;
ctx               308 fs/proc/generic.c 		if (!dir_emit(ctx, de->name, de->namelen,
ctx               313 fs/proc/generic.c 		ctx->pos++;
ctx               323 fs/proc/generic.c int proc_readdir(struct file *file, struct dir_context *ctx)
ctx               327 fs/proc/generic.c 	return proc_readdir_de(file, ctx, PDE(inode));
ctx               110 fs/proc/namespaces.c static int proc_ns_dir_readdir(struct file *file, struct dir_context *ctx)
ctx               118 fs/proc/namespaces.c 	if (!dir_emit_dots(file, ctx))
ctx               120 fs/proc/namespaces.c 	if (ctx->pos >= 2 + ARRAY_SIZE(ns_entries))
ctx               122 fs/proc/namespaces.c 	entry = ns_entries + (ctx->pos - 2);
ctx               126 fs/proc/namespaces.c 		if (!proc_fill_cache(file, ctx, ops->name, strlen(ops->name),
ctx               129 fs/proc/namespaces.c 		ctx->pos++;
ctx               312 fs/proc/proc_net.c static int proc_tgid_net_readdir(struct file *file, struct dir_context *ctx)
ctx               320 fs/proc/proc_net.c 		ret = proc_readdir_de(file, ctx, net->proc_net);
ctx               689 fs/proc/proc_sysctl.c 				struct dir_context *ctx,
ctx               734 fs/proc/proc_sysctl.c 	return dir_emit(ctx, qname.name, qname.len, ino, type);
ctx               738 fs/proc/proc_sysctl.c 				    struct dir_context *ctx,
ctx               752 fs/proc/proc_sysctl.c 	ret = proc_sys_fill_cache(file, ctx, head, table);
ctx               760 fs/proc/proc_sysctl.c 		struct dir_context *ctx)
ctx               764 fs/proc/proc_sysctl.c 	if ((*pos)++ < ctx->pos)
ctx               768 fs/proc/proc_sysctl.c 		res = proc_sys_link_fill_cache(file, ctx, head, table);
ctx               770 fs/proc/proc_sysctl.c 		res = proc_sys_fill_cache(file, ctx, head, table);
ctx               773 fs/proc/proc_sysctl.c 		ctx->pos = *pos;
ctx               778 fs/proc/proc_sysctl.c static int proc_sys_readdir(struct file *file, struct dir_context *ctx)
ctx               791 fs/proc/proc_sysctl.c 	if (!dir_emit_dots(file, ctx))
ctx               797 fs/proc/proc_sysctl.c 		if (!scan(h, entry, &pos, file, ctx)) {
ctx                57 fs/proc/root.c 	struct proc_fs_context *ctx = fc->fs_private;
ctx                67 fs/proc/root.c 		ctx->gid = result.uint_32;
ctx                71 fs/proc/root.c 		ctx->hidepid = result.uint_32;
ctx                72 fs/proc/root.c 		if (ctx->hidepid < HIDEPID_OFF ||
ctx                73 fs/proc/root.c 		    ctx->hidepid > HIDEPID_INVISIBLE)
ctx                81 fs/proc/root.c 	ctx->mask |= 1 << opt;
ctx                90 fs/proc/root.c 	struct proc_fs_context *ctx = fc->fs_private;
ctx                92 fs/proc/root.c 	if (ctx->mask & (1 << Opt_gid))
ctx                93 fs/proc/root.c 		pid_ns->pid_gid = make_kgid(user_ns, ctx->gid);
ctx                94 fs/proc/root.c 	if (ctx->mask & (1 << Opt_hidepid))
ctx                95 fs/proc/root.c 		pid_ns->hide_pid = ctx->hidepid;
ctx               158 fs/proc/root.c 	struct proc_fs_context *ctx = fc->fs_private;
ctx               160 fs/proc/root.c 	return get_tree_keyed(fc, proc_fill_super, ctx->pid_ns);
ctx               165 fs/proc/root.c 	struct proc_fs_context *ctx = fc->fs_private;
ctx               167 fs/proc/root.c 	put_pid_ns(ctx->pid_ns);
ctx               168 fs/proc/root.c 	kfree(ctx);
ctx               180 fs/proc/root.c 	struct proc_fs_context *ctx;
ctx               182 fs/proc/root.c 	ctx = kzalloc(sizeof(struct proc_fs_context), GFP_KERNEL);
ctx               183 fs/proc/root.c 	if (!ctx)
ctx               186 fs/proc/root.c 	ctx->pid_ns = get_pid_ns(task_active_pid_ns(current));
ctx               188 fs/proc/root.c 	fc->user_ns = get_user_ns(ctx->pid_ns->user_ns);
ctx               189 fs/proc/root.c 	fc->fs_private = ctx;
ctx               254 fs/proc/root.c static int proc_root_readdir(struct file *file, struct dir_context *ctx)
ctx               256 fs/proc/root.c 	if (ctx->pos < FIRST_PROCESS_ENTRY) {
ctx               257 fs/proc/root.c 		int error = proc_readdir(file, ctx);
ctx               260 fs/proc/root.c 		ctx->pos = FIRST_PROCESS_ENTRY;
ctx               263 fs/proc/root.c 	return proc_pid_readdir(file, ctx);
ctx               303 fs/proc/root.c 	struct proc_fs_context *ctx;
ctx               316 fs/proc/root.c 	ctx = fc->fs_private;
ctx               317 fs/proc/root.c 	if (ctx->pid_ns != ns) {
ctx               318 fs/proc/root.c 		put_pid_ns(ctx->pid_ns);
ctx               320 fs/proc/root.c 		ctx->pid_ns = ns;
ctx               289 fs/pstore/platform.c 	struct crypto_comp *ctx;
ctx               320 fs/pstore/platform.c 	ctx = crypto_alloc_comp(zbackend->name, 0, 0);
ctx               321 fs/pstore/platform.c 	if (IS_ERR_OR_NULL(ctx)) {
ctx               324 fs/pstore/platform.c 		       PTR_ERR(ctx));
ctx               329 fs/pstore/platform.c 	tfm = ctx;
ctx                18 fs/qnx4/dir.c  static int qnx4_readdir(struct file *file, struct dir_context *ctx)
ctx                30 fs/qnx4/dir.c  	QNX4DEBUG((KERN_INFO "pos                 = %ld\n", (long) ctx->pos));
ctx                32 fs/qnx4/dir.c  	while (ctx->pos < inode->i_size) {
ctx                33 fs/qnx4/dir.c  		blknum = qnx4_block_map(inode, ctx->pos >> QNX4_BLOCK_SIZE_BITS);
ctx                39 fs/qnx4/dir.c  		ix = (ctx->pos >> QNX4_DIR_ENTRY_SIZE_BITS) % QNX4_INODES_PER_BLOCK;
ctx                40 fs/qnx4/dir.c  		for (; ix < QNX4_INODES_PER_BLOCK; ix++, ctx->pos += QNX4_DIR_ENTRY_SIZE) {
ctx                61 fs/qnx4/dir.c  			if (!dir_emit(ctx, de->di_fname, size, ino, DT_UNKNOWN)) {
ctx                64 fs/qnx6/dir.c  			struct dir_context *ctx,
ctx               102 fs/qnx6/dir.c  	if (!dir_emit(ctx, lf->lf_fname, lf_size, de_inode, DT_UNKNOWN)) {
ctx               112 fs/qnx6/dir.c  static int qnx6_readdir(struct file *file, struct dir_context *ctx)
ctx               117 fs/qnx6/dir.c  	loff_t pos = ctx->pos & ~(QNX6_DIR_ENTRY_SIZE - 1);
ctx               123 fs/qnx6/dir.c  	ctx->pos = pos;
ctx               124 fs/qnx6/dir.c  	if (ctx->pos >= inode->i_size)
ctx               135 fs/qnx6/dir.c  			ctx->pos = (n + 1) << PAGE_SHIFT;
ctx               139 fs/qnx6/dir.c  		for (; i < limit; i++, de++, ctx->pos += QNX6_DIR_ENTRY_SIZE) {
ctx               152 fs/qnx6/dir.c  					ctx, no_inode)) {
ctx               160 fs/qnx6/dir.c  				if (!dir_emit(ctx, de->de_fname, size,
ctx                40 fs/readdir.c   int iterate_dir(struct file *file, struct dir_context *ctx)
ctx                63 fs/readdir.c   		ctx->pos = file->f_pos;
ctx                65 fs/readdir.c   			res = file->f_op->iterate_shared(file, ctx);
ctx                67 fs/readdir.c   			res = file->f_op->iterate(file, ctx);
ctx                68 fs/readdir.c   		file->f_pos = ctx->pos;
ctx               138 fs/readdir.c   	struct dir_context ctx;
ctx               143 fs/readdir.c   static int fillonedir(struct dir_context *ctx, const char *name, int namlen,
ctx               147 fs/readdir.c   		container_of(ctx, struct readdir_callback, ctx);
ctx               182 fs/readdir.c   		.ctx.actor = fillonedir,
ctx               189 fs/readdir.c   	error = iterate_dir(f.file, &buf.ctx);
ctx               211 fs/readdir.c   	struct dir_context ctx;
ctx               218 fs/readdir.c   static int filldir(struct dir_context *ctx, const char *name, int namlen,
ctx               223 fs/readdir.c   		container_of(ctx, struct getdents_callback, ctx);
ctx               272 fs/readdir.c   		.ctx.actor = filldir,
ctx               285 fs/readdir.c   	error = iterate_dir(f.file, &buf.ctx);
ctx               292 fs/readdir.c   		if (put_user(buf.ctx.pos, &lastdirent->d_off))
ctx               302 fs/readdir.c   	struct dir_context ctx;
ctx               309 fs/readdir.c   static int filldir64(struct dir_context *ctx, const char *name, int namlen,
ctx               314 fs/readdir.c   		container_of(ctx, struct getdents_callback64, ctx);
ctx               358 fs/readdir.c   		.ctx.actor = filldir64,
ctx               371 fs/readdir.c   	error = iterate_dir(f.file, &buf.ctx);
ctx               376 fs/readdir.c   		typeof(lastdirent->d_off) d_off = buf.ctx.pos;
ctx               404 fs/readdir.c   	struct dir_context ctx;
ctx               409 fs/readdir.c   static int compat_fillonedir(struct dir_context *ctx, const char *name,
ctx               414 fs/readdir.c   		container_of(ctx, struct compat_readdir_callback, ctx);
ctx               449 fs/readdir.c   		.ctx.actor = compat_fillonedir,
ctx               456 fs/readdir.c   	error = iterate_dir(f.file, &buf.ctx);
ctx               472 fs/readdir.c   	struct dir_context ctx;
ctx               479 fs/readdir.c   static int compat_filldir(struct dir_context *ctx, const char *name, int namlen,
ctx               484 fs/readdir.c   		container_of(ctx, struct compat_getdents_callback, ctx);
ctx               531 fs/readdir.c   		.ctx.actor = compat_filldir,
ctx               544 fs/readdir.c   	error = iterate_dir(f.file, &buf.ctx);
ctx               549 fs/readdir.c   		if (put_user(buf.ctx.pos, &lastdirent->d_off))
ctx                60 fs/reiserfs/dir.c int reiserfs_readdir_inode(struct inode *inode, struct dir_context *ctx)
ctx                87 fs/reiserfs/dir.c 	make_cpu_key(&pos_key, inode, ctx->pos ?: DOT_OFFSET, TYPE_DIRENTRY, 3);
ctx               177 fs/reiserfs/dir.c 				ctx->pos = deh_offset(deh);
ctx               210 fs/reiserfs/dir.c 				    (ctx, local_buf, d_reclen, d_ino,
ctx               264 fs/reiserfs/dir.c 	ctx->pos = next_pos;
ctx               272 fs/reiserfs/dir.c static int reiserfs_readdir(struct file *file, struct dir_context *ctx)
ctx               274 fs/reiserfs/dir.c 	return reiserfs_readdir_inode(file_inode(file), ctx);
ctx               185 fs/reiserfs/xattr.c 	struct dir_context ctx;
ctx               193 fs/reiserfs/xattr.c fill_with_dentries(struct dir_context *ctx, const char *name, int namelen,
ctx               197 fs/reiserfs/xattr.c 		container_of(ctx, struct reiserfs_dentry_buf, ctx);
ctx               245 fs/reiserfs/xattr.c 		.ctx.actor = fill_with_dentries,
ctx               265 fs/reiserfs/xattr.c 		err = reiserfs_readdir_inode(d_inode(dir), &buf.ctx);
ctx               793 fs/reiserfs/xattr.c 	struct dir_context ctx;
ctx               800 fs/reiserfs/xattr.c static int listxattr_filler(struct dir_context *ctx, const char *name,
ctx               805 fs/reiserfs/xattr.c 		container_of(ctx, struct listxattr_buf, ctx);
ctx               843 fs/reiserfs/xattr.c 		.ctx.actor = listxattr_filler,
ctx               864 fs/reiserfs/xattr.c 	err = reiserfs_readdir_inode(d_inode(dir), &buf.ctx);
ctx               151 fs/romfs/super.c static int romfs_readdir(struct file *file, struct dir_context *ctx)
ctx               162 fs/romfs/super.c 	offset = ctx->pos;
ctx               175 fs/romfs/super.c 			ctx->pos = offset;
ctx               178 fs/romfs/super.c 		ctx->pos = offset;
ctx               199 fs/romfs/super.c 		if (!dir_emit(ctx, fsname, j, ino,
ctx                63 fs/signalfd.c  	struct signalfd_ctx *ctx = file->private_data;
ctx                69 fs/signalfd.c  	if (next_signal(&current->pending, &ctx->sigmask) ||
ctx                71 fs/signalfd.c  			&ctx->sigmask))
ctx               166 fs/signalfd.c  static ssize_t signalfd_dequeue(struct signalfd_ctx *ctx, kernel_siginfo_t *info,
ctx               173 fs/signalfd.c  	ret = dequeue_signal(current, &ctx->sigmask, info);
ctx               188 fs/signalfd.c  		ret = dequeue_signal(current, &ctx->sigmask, info);
ctx               215 fs/signalfd.c  	struct signalfd_ctx *ctx = file->private_data;
ctx               227 fs/signalfd.c  		ret = signalfd_dequeue(ctx, &info, nonblock);
ctx               244 fs/signalfd.c  	struct signalfd_ctx *ctx = f->private_data;
ctx               247 fs/signalfd.c  	sigmask = ctx->sigmask;
ctx               265 fs/signalfd.c  	struct signalfd_ctx *ctx;
ctx               278 fs/signalfd.c  		ctx = kmalloc(sizeof(*ctx), GFP_KERNEL);
ctx               279 fs/signalfd.c  		if (!ctx)
ctx               282 fs/signalfd.c  		ctx->sigmask = *mask;
ctx               288 fs/signalfd.c  		ufd = anon_inode_getfd("[signalfd]", &signalfd_fops, ctx,
ctx               291 fs/signalfd.c  			kfree(ctx);
ctx               296 fs/signalfd.c  		ctx = f.file->private_data;
ctx               302 fs/signalfd.c  		ctx->sigmask = *mask;
ctx                97 fs/squashfs/dir.c static int squashfs_readdir(struct file *file, struct dir_context *ctx)
ctx               123 fs/squashfs/dir.c 	while (ctx->pos < 3) {
ctx               127 fs/squashfs/dir.c 		if (ctx->pos == 0) {
ctx               137 fs/squashfs/dir.c 		if (!dir_emit(ctx, name, size, i_ino,
ctx               141 fs/squashfs/dir.c 		ctx->pos += size;
ctx               148 fs/squashfs/dir.c 				ctx->pos);
ctx               188 fs/squashfs/dir.c 			if (ctx->pos >= length)
ctx               199 fs/squashfs/dir.c 			if (!dir_emit(ctx, dire->name, size,
ctx               204 fs/squashfs/dir.c 			ctx->pos = length;
ctx                64 fs/sysv/dir.c  static int sysv_readdir(struct file *file, struct dir_context *ctx)
ctx                66 fs/sysv/dir.c  	unsigned long pos = ctx->pos;
ctx                73 fs/sysv/dir.c  	ctx->pos = pos = (pos + SYSV_DIRSIZE-1) & ~(SYSV_DIRSIZE-1);
ctx                90 fs/sysv/dir.c  		for ( ;(char*)de <= limit; de++, ctx->pos += sizeof(*de)) {
ctx                96 fs/sysv/dir.c  			if (!dir_emit(ctx, name, strnlen(name,SYSV_NAMELEN),
ctx                51 fs/timerfd.c   static inline bool isalarm(struct timerfd_ctx *ctx)
ctx                53 fs/timerfd.c   	return ctx->clockid == CLOCK_REALTIME_ALARM ||
ctx                54 fs/timerfd.c   		ctx->clockid == CLOCK_BOOTTIME_ALARM;
ctx                62 fs/timerfd.c   static void timerfd_triggered(struct timerfd_ctx *ctx)
ctx                66 fs/timerfd.c   	spin_lock_irqsave(&ctx->wqh.lock, flags);
ctx                67 fs/timerfd.c   	ctx->expired = 1;
ctx                68 fs/timerfd.c   	ctx->ticks++;
ctx                69 fs/timerfd.c   	wake_up_locked_poll(&ctx->wqh, EPOLLIN);
ctx                70 fs/timerfd.c   	spin_unlock_irqrestore(&ctx->wqh.lock, flags);
ctx                75 fs/timerfd.c   	struct timerfd_ctx *ctx = container_of(htmr, struct timerfd_ctx,
ctx                77 fs/timerfd.c   	timerfd_triggered(ctx);
ctx                84 fs/timerfd.c   	struct timerfd_ctx *ctx = container_of(alarm, struct timerfd_ctx,
ctx                86 fs/timerfd.c   	timerfd_triggered(ctx);
ctx                99 fs/timerfd.c   	struct timerfd_ctx *ctx;
ctx               103 fs/timerfd.c   	list_for_each_entry_rcu(ctx, &cancel_list, clist) {
ctx               104 fs/timerfd.c   		if (!ctx->might_cancel)
ctx               106 fs/timerfd.c   		spin_lock_irqsave(&ctx->wqh.lock, flags);
ctx               107 fs/timerfd.c   		if (ctx->moffs != moffs) {
ctx               108 fs/timerfd.c   			ctx->moffs = KTIME_MAX;
ctx               109 fs/timerfd.c   			ctx->ticks++;
ctx               110 fs/timerfd.c   			wake_up_locked_poll(&ctx->wqh, EPOLLIN);
ctx               112 fs/timerfd.c   		spin_unlock_irqrestore(&ctx->wqh.lock, flags);
ctx               117 fs/timerfd.c   static void __timerfd_remove_cancel(struct timerfd_ctx *ctx)
ctx               119 fs/timerfd.c   	if (ctx->might_cancel) {
ctx               120 fs/timerfd.c   		ctx->might_cancel = false;
ctx               122 fs/timerfd.c   		list_del_rcu(&ctx->clist);
ctx               127 fs/timerfd.c   static void timerfd_remove_cancel(struct timerfd_ctx *ctx)
ctx               129 fs/timerfd.c   	spin_lock(&ctx->cancel_lock);
ctx               130 fs/timerfd.c   	__timerfd_remove_cancel(ctx);
ctx               131 fs/timerfd.c   	spin_unlock(&ctx->cancel_lock);
ctx               134 fs/timerfd.c   static bool timerfd_canceled(struct timerfd_ctx *ctx)
ctx               136 fs/timerfd.c   	if (!ctx->might_cancel || ctx->moffs != KTIME_MAX)
ctx               138 fs/timerfd.c   	ctx->moffs = ktime_mono_to_real(0);
ctx               142 fs/timerfd.c   static void timerfd_setup_cancel(struct timerfd_ctx *ctx, int flags)
ctx               144 fs/timerfd.c   	spin_lock(&ctx->cancel_lock);
ctx               145 fs/timerfd.c   	if ((ctx->clockid == CLOCK_REALTIME ||
ctx               146 fs/timerfd.c   	     ctx->clockid == CLOCK_REALTIME_ALARM) &&
ctx               148 fs/timerfd.c   		if (!ctx->might_cancel) {
ctx               149 fs/timerfd.c   			ctx->might_cancel = true;
ctx               151 fs/timerfd.c   			list_add_rcu(&ctx->clist, &cancel_list);
ctx               155 fs/timerfd.c   		__timerfd_remove_cancel(ctx);
ctx               157 fs/timerfd.c   	spin_unlock(&ctx->cancel_lock);
ctx               160 fs/timerfd.c   static ktime_t timerfd_get_remaining(struct timerfd_ctx *ctx)
ctx               164 fs/timerfd.c   	if (isalarm(ctx))
ctx               165 fs/timerfd.c   		remaining = alarm_expires_remaining(&ctx->t.alarm);
ctx               167 fs/timerfd.c   		remaining = hrtimer_expires_remaining_adjusted(&ctx->t.tmr);
ctx               172 fs/timerfd.c   static int timerfd_setup(struct timerfd_ctx *ctx, int flags,
ctx               177 fs/timerfd.c   	int clockid = ctx->clockid;
ctx               183 fs/timerfd.c   	ctx->expired = 0;
ctx               184 fs/timerfd.c   	ctx->ticks = 0;
ctx               185 fs/timerfd.c   	ctx->tintv = timespec64_to_ktime(ktmr->it_interval);
ctx               187 fs/timerfd.c   	if (isalarm(ctx)) {
ctx               188 fs/timerfd.c   		alarm_init(&ctx->t.alarm,
ctx               189 fs/timerfd.c   			   ctx->clockid == CLOCK_REALTIME_ALARM ?
ctx               193 fs/timerfd.c   		hrtimer_init(&ctx->t.tmr, clockid, htmode);
ctx               194 fs/timerfd.c   		hrtimer_set_expires(&ctx->t.tmr, texp);
ctx               195 fs/timerfd.c   		ctx->t.tmr.function = timerfd_tmrproc;
ctx               199 fs/timerfd.c   		if (isalarm(ctx)) {
ctx               201 fs/timerfd.c   				alarm_start(&ctx->t.alarm, texp);
ctx               203 fs/timerfd.c   				alarm_start_relative(&ctx->t.alarm, texp);
ctx               205 fs/timerfd.c   			hrtimer_start(&ctx->t.tmr, texp, htmode);
ctx               208 fs/timerfd.c   		if (timerfd_canceled(ctx))
ctx               212 fs/timerfd.c   	ctx->settime_flags = flags & TFD_SETTIME_FLAGS;
ctx               218 fs/timerfd.c   	struct timerfd_ctx *ctx = file->private_data;
ctx               220 fs/timerfd.c   	timerfd_remove_cancel(ctx);
ctx               222 fs/timerfd.c   	if (isalarm(ctx))
ctx               223 fs/timerfd.c   		alarm_cancel(&ctx->t.alarm);
ctx               225 fs/timerfd.c   		hrtimer_cancel(&ctx->t.tmr);
ctx               226 fs/timerfd.c   	kfree_rcu(ctx, rcu);
ctx               232 fs/timerfd.c   	struct timerfd_ctx *ctx = file->private_data;
ctx               236 fs/timerfd.c   	poll_wait(file, &ctx->wqh, wait);
ctx               238 fs/timerfd.c   	spin_lock_irqsave(&ctx->wqh.lock, flags);
ctx               239 fs/timerfd.c   	if (ctx->ticks)
ctx               241 fs/timerfd.c   	spin_unlock_irqrestore(&ctx->wqh.lock, flags);
ctx               249 fs/timerfd.c   	struct timerfd_ctx *ctx = file->private_data;
ctx               255 fs/timerfd.c   	spin_lock_irq(&ctx->wqh.lock);
ctx               259 fs/timerfd.c   		res = wait_event_interruptible_locked_irq(ctx->wqh, ctx->ticks);
ctx               266 fs/timerfd.c   	if (timerfd_canceled(ctx)) {
ctx               267 fs/timerfd.c   		ctx->ticks = 0;
ctx               268 fs/timerfd.c   		ctx->expired = 0;
ctx               272 fs/timerfd.c   	if (ctx->ticks) {
ctx               273 fs/timerfd.c   		ticks = ctx->ticks;
ctx               275 fs/timerfd.c   		if (ctx->expired && ctx->tintv) {
ctx               282 fs/timerfd.c   			if (isalarm(ctx)) {
ctx               284 fs/timerfd.c   					&ctx->t.alarm, ctx->tintv) - 1;
ctx               285 fs/timerfd.c   				alarm_restart(&ctx->t.alarm);
ctx               287 fs/timerfd.c   				ticks += hrtimer_forward_now(&ctx->t.tmr,
ctx               288 fs/timerfd.c   							     ctx->tintv) - 1;
ctx               289 fs/timerfd.c   				hrtimer_restart(&ctx->t.tmr);
ctx               292 fs/timerfd.c   		ctx->expired = 0;
ctx               293 fs/timerfd.c   		ctx->ticks = 0;
ctx               295 fs/timerfd.c   	spin_unlock_irq(&ctx->wqh.lock);
ctx               304 fs/timerfd.c   	struct timerfd_ctx *ctx = file->private_data;
ctx               307 fs/timerfd.c   	spin_lock_irq(&ctx->wqh.lock);
ctx               308 fs/timerfd.c   	t.it_value = ktime_to_timespec(timerfd_get_remaining(ctx));
ctx               309 fs/timerfd.c   	t.it_interval = ktime_to_timespec(ctx->tintv);
ctx               310 fs/timerfd.c   	spin_unlock_irq(&ctx->wqh.lock);
ctx               318 fs/timerfd.c   		   ctx->clockid,
ctx               319 fs/timerfd.c   		   (unsigned long long)ctx->ticks,
ctx               320 fs/timerfd.c   		   ctx->settime_flags,
ctx               333 fs/timerfd.c   	struct timerfd_ctx *ctx = file->private_data;
ctx               345 fs/timerfd.c   		spin_lock_irq(&ctx->wqh.lock);
ctx               346 fs/timerfd.c   		if (!timerfd_canceled(ctx)) {
ctx               347 fs/timerfd.c   			ctx->ticks = ticks;
ctx               348 fs/timerfd.c   			wake_up_locked_poll(&ctx->wqh, EPOLLIN);
ctx               351 fs/timerfd.c   		spin_unlock_irq(&ctx->wqh.lock);
ctx               390 fs/timerfd.c   	struct timerfd_ctx *ctx;
ctx               409 fs/timerfd.c   	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
ctx               410 fs/timerfd.c   	if (!ctx)
ctx               413 fs/timerfd.c   	init_waitqueue_head(&ctx->wqh);
ctx               414 fs/timerfd.c   	spin_lock_init(&ctx->cancel_lock);
ctx               415 fs/timerfd.c   	ctx->clockid = clockid;
ctx               417 fs/timerfd.c   	if (isalarm(ctx))
ctx               418 fs/timerfd.c   		alarm_init(&ctx->t.alarm,
ctx               419 fs/timerfd.c   			   ctx->clockid == CLOCK_REALTIME_ALARM ?
ctx               423 fs/timerfd.c   		hrtimer_init(&ctx->t.tmr, clockid, HRTIMER_MODE_ABS);
ctx               425 fs/timerfd.c   	ctx->moffs = ktime_mono_to_real(0);
ctx               427 fs/timerfd.c   	ufd = anon_inode_getfd("[timerfd]", &timerfd_fops, ctx,
ctx               430 fs/timerfd.c   		kfree(ctx);
ctx               440 fs/timerfd.c   	struct timerfd_ctx *ctx;
ctx               450 fs/timerfd.c   	ctx = f.file->private_data;
ctx               452 fs/timerfd.c   	if (isalarm(ctx) && !capable(CAP_WAKE_ALARM)) {
ctx               457 fs/timerfd.c   	timerfd_setup_cancel(ctx, flags);
ctx               464 fs/timerfd.c   		spin_lock_irq(&ctx->wqh.lock);
ctx               466 fs/timerfd.c   		if (isalarm(ctx)) {
ctx               467 fs/timerfd.c   			if (alarm_try_to_cancel(&ctx->t.alarm) >= 0)
ctx               470 fs/timerfd.c   			if (hrtimer_try_to_cancel(&ctx->t.tmr) >= 0)
ctx               473 fs/timerfd.c   		spin_unlock_irq(&ctx->wqh.lock);
ctx               475 fs/timerfd.c   		if (isalarm(ctx))
ctx               476 fs/timerfd.c   			hrtimer_cancel_wait_running(&ctx->t.alarm.timer);
ctx               478 fs/timerfd.c   			hrtimer_cancel_wait_running(&ctx->t.tmr);
ctx               487 fs/timerfd.c   	if (ctx->expired && ctx->tintv) {
ctx               488 fs/timerfd.c   		if (isalarm(ctx))
ctx               489 fs/timerfd.c   			alarm_forward_now(&ctx->t.alarm, ctx->tintv);
ctx               491 fs/timerfd.c   			hrtimer_forward_now(&ctx->t.tmr, ctx->tintv);
ctx               494 fs/timerfd.c   	old->it_value = ktime_to_timespec64(timerfd_get_remaining(ctx));
ctx               495 fs/timerfd.c   	old->it_interval = ktime_to_timespec64(ctx->tintv);
ctx               500 fs/timerfd.c   	ret = timerfd_setup(ctx, flags, new);
ctx               502 fs/timerfd.c   	spin_unlock_irq(&ctx->wqh.lock);
ctx               510 fs/timerfd.c   	struct timerfd_ctx *ctx;
ctx               514 fs/timerfd.c   	ctx = f.file->private_data;
ctx               516 fs/timerfd.c   	spin_lock_irq(&ctx->wqh.lock);
ctx               517 fs/timerfd.c   	if (ctx->expired && ctx->tintv) {
ctx               518 fs/timerfd.c   		ctx->expired = 0;
ctx               520 fs/timerfd.c   		if (isalarm(ctx)) {
ctx               521 fs/timerfd.c   			ctx->ticks +=
ctx               523 fs/timerfd.c   					&ctx->t.alarm, ctx->tintv) - 1;
ctx               524 fs/timerfd.c   			alarm_restart(&ctx->t.alarm);
ctx               526 fs/timerfd.c   			ctx->ticks +=
ctx               527 fs/timerfd.c   				hrtimer_forward_now(&ctx->t.tmr, ctx->tintv)
ctx               529 fs/timerfd.c   			hrtimer_restart(&ctx->t.tmr);
ctx               532 fs/timerfd.c   	t->it_value = ktime_to_timespec64(timerfd_get_remaining(ctx));
ctx               533 fs/timerfd.c   	t->it_interval = ktime_to_timespec64(ctx->tintv);
ctx               534 fs/timerfd.c   	spin_unlock_irq(&ctx->wqh.lock);
ctx                 4 fs/ubifs/crypto.c static int ubifs_crypt_get_context(struct inode *inode, void *ctx, size_t len)
ctx                 7 fs/ubifs/crypto.c 			       ctx, len);
ctx                10 fs/ubifs/crypto.c static int ubifs_crypt_set_context(struct inode *inode, const void *ctx,
ctx                19 fs/ubifs/crypto.c 			       ctx, len, 0, false);
ctx               495 fs/ubifs/dir.c static int ubifs_readdir(struct file *file, struct dir_context *ctx)
ctx               506 fs/ubifs/dir.c 	dbg_gen("dir ino %lu, f_pos %#llx", dir->i_ino, ctx->pos);
ctx               508 fs/ubifs/dir.c 	if (ctx->pos > UBIFS_S_KEY_HASH_MASK || ctx->pos == 2)
ctx               546 fs/ubifs/dir.c 	if (ctx->pos < 2) {
ctx               548 fs/ubifs/dir.c 		if (!dir_emit_dots(file, ctx)) {
ctx               563 fs/ubifs/dir.c 		ctx->pos = key_hash_flash(c, &dent->key);
ctx               573 fs/ubifs/dir.c 		dent_key_init_hash(c, &key, dir->i_ino, ctx->pos);
ctx               580 fs/ubifs/dir.c 		ctx->pos = key_hash_flash(c, &dent->key);
ctx               608 fs/ubifs/dir.c 		if (!dir_emit(ctx, fstr.name, fstr.len,
ctx               625 fs/ubifs/dir.c 		ctx->pos = key_hash_flash(c, &dent->key);
ctx               649 fs/ubifs/dir.c 	ctx->pos = 2;
ctx                39 fs/udf/dir.c   static int udf_readdir(struct file *file, struct dir_context *ctx)
ctx                62 fs/udf/dir.c   	if (ctx->pos == 0) {
ctx                63 fs/udf/dir.c   		if (!dir_emit_dot(file, ctx))
ctx                65 fs/udf/dir.c   		ctx->pos = 1;
ctx                67 fs/udf/dir.c   	nf_pos = (ctx->pos - 1) << 2;
ctx               127 fs/udf/dir.c   		ctx->pos = (nf_pos >> 2) + 1;
ctx               174 fs/udf/dir.c   			if (!dir_emit_dotdot(file, ctx))
ctx               185 fs/udf/dir.c   		if (!dir_emit(ctx, fname, flen, iblock, DT_UNKNOWN))
ctx               189 fs/udf/dir.c   	ctx->pos = (nf_pos >> 2) + 1;
ctx               423 fs/ufs/dir.c   ufs_readdir(struct file *file, struct dir_context *ctx)
ctx               425 fs/ufs/dir.c   	loff_t pos = ctx->pos;
ctx               450 fs/ufs/dir.c   			ctx->pos += PAGE_SIZE - offset;
ctx               457 fs/ufs/dir.c   				ctx->pos = (n<<PAGE_SHIFT) + offset;
ctx               475 fs/ufs/dir.c   				if (!dir_emit(ctx, de->d_name,
ctx               483 fs/ufs/dir.c   			ctx->pos += fs16_to_cpu(sb, de->d_reclen);
ctx               876 fs/ufs/inode.c static inline void free_data(struct to_free *ctx, u64 from, unsigned count)
ctx               878 fs/ufs/inode.c 	if (ctx->count && ctx->to != from) {
ctx               879 fs/ufs/inode.c 		ufs_free_blocks(ctx->inode, ctx->to - ctx->count, ctx->count);
ctx               880 fs/ufs/inode.c 		ctx->count = 0;
ctx               882 fs/ufs/inode.c 	ctx->count += count;
ctx               883 fs/ufs/inode.c 	ctx->to = from + count;
ctx               895 fs/ufs/inode.c 	struct to_free ctx = {.inode = inode};
ctx               950 fs/ufs/inode.c 		free_data(&ctx, tmp, uspi->s_fpb);
ctx               953 fs/ufs/inode.c 	free_data(&ctx, 0, 0);
ctx               994 fs/ufs/inode.c 		struct to_free ctx = {.inode = inode};
ctx              1000 fs/ufs/inode.c 				free_data(&ctx, block, uspi->s_fpb);
ctx              1002 fs/ufs/inode.c 		free_data(&ctx, 0, 0);
ctx              1028 fs/ufs/inode.c 		struct to_free ctx = {.inode = inode};
ctx              1038 fs/ufs/inode.c 				free_data(&ctx, block, uspi->s_fpb);
ctx              1041 fs/ufs/inode.c 		free_data(&ctx, 0, 0);
ctx                88 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx;
ctx                97 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx;
ctx               151 fs/userfaultfd.c static void userfaultfd_ctx_get(struct userfaultfd_ctx *ctx)
ctx               153 fs/userfaultfd.c 	refcount_inc(&ctx->refcount);
ctx               164 fs/userfaultfd.c static void userfaultfd_ctx_put(struct userfaultfd_ctx *ctx)
ctx               166 fs/userfaultfd.c 	if (refcount_dec_and_test(&ctx->refcount)) {
ctx               167 fs/userfaultfd.c 		VM_BUG_ON(spin_is_locked(&ctx->fault_pending_wqh.lock));
ctx               168 fs/userfaultfd.c 		VM_BUG_ON(waitqueue_active(&ctx->fault_pending_wqh));
ctx               169 fs/userfaultfd.c 		VM_BUG_ON(spin_is_locked(&ctx->fault_wqh.lock));
ctx               170 fs/userfaultfd.c 		VM_BUG_ON(waitqueue_active(&ctx->fault_wqh));
ctx               171 fs/userfaultfd.c 		VM_BUG_ON(spin_is_locked(&ctx->event_wqh.lock));
ctx               172 fs/userfaultfd.c 		VM_BUG_ON(waitqueue_active(&ctx->event_wqh));
ctx               173 fs/userfaultfd.c 		VM_BUG_ON(spin_is_locked(&ctx->fd_wqh.lock));
ctx               174 fs/userfaultfd.c 		VM_BUG_ON(waitqueue_active(&ctx->fd_wqh));
ctx               175 fs/userfaultfd.c 		mmdrop(ctx->mm);
ctx               176 fs/userfaultfd.c 		kmem_cache_free(userfaultfd_ctx_cachep, ctx);
ctx               227 fs/userfaultfd.c static inline bool userfaultfd_huge_must_wait(struct userfaultfd_ctx *ctx,
ctx               233 fs/userfaultfd.c 	struct mm_struct *mm = ctx->mm;
ctx               259 fs/userfaultfd.c static inline bool userfaultfd_huge_must_wait(struct userfaultfd_ctx *ctx,
ctx               276 fs/userfaultfd.c static inline bool userfaultfd_must_wait(struct userfaultfd_ctx *ctx,
ctx               281 fs/userfaultfd.c 	struct mm_struct *mm = ctx->mm;
ctx               355 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx;
ctx               381 fs/userfaultfd.c 	ctx = vmf->vma->vm_userfaultfd_ctx.ctx;
ctx               382 fs/userfaultfd.c 	if (!ctx)
ctx               385 fs/userfaultfd.c 	BUG_ON(ctx->mm != mm);
ctx               390 fs/userfaultfd.c 	if (ctx->features & UFFD_FEATURE_SIGBUS)
ctx               398 fs/userfaultfd.c 	if (unlikely(READ_ONCE(ctx->released))) {
ctx               456 fs/userfaultfd.c 	userfaultfd_ctx_get(ctx);
ctx               461 fs/userfaultfd.c 			ctx->features);
ctx               462 fs/userfaultfd.c 	uwq.ctx = ctx;
ctx               471 fs/userfaultfd.c 	spin_lock_irq(&ctx->fault_pending_wqh.lock);
ctx               476 fs/userfaultfd.c 	__add_wait_queue(&ctx->fault_pending_wqh, &uwq.wq);
ctx               483 fs/userfaultfd.c 	spin_unlock_irq(&ctx->fault_pending_wqh.lock);
ctx               486 fs/userfaultfd.c 		must_wait = userfaultfd_must_wait(ctx, vmf->address, vmf->flags,
ctx               489 fs/userfaultfd.c 		must_wait = userfaultfd_huge_must_wait(ctx, vmf->vma,
ctx               494 fs/userfaultfd.c 	if (likely(must_wait && !READ_ONCE(ctx->released) &&
ctx               497 fs/userfaultfd.c 		wake_up_poll(&ctx->fd_wqh, EPOLLIN);
ctx               517 fs/userfaultfd.c 			    READ_ONCE(ctx->released) ||
ctx               565 fs/userfaultfd.c 		spin_lock_irq(&ctx->fault_pending_wqh.lock);
ctx               571 fs/userfaultfd.c 		spin_unlock_irq(&ctx->fault_pending_wqh.lock);
ctx               578 fs/userfaultfd.c 	userfaultfd_ctx_put(ctx);
ctx               584 fs/userfaultfd.c static void userfaultfd_event_wait_completion(struct userfaultfd_ctx *ctx,
ctx               592 fs/userfaultfd.c 	ewq->ctx = ctx;
ctx               596 fs/userfaultfd.c 	spin_lock_irq(&ctx->event_wqh.lock);
ctx               601 fs/userfaultfd.c 	__add_wait_queue(&ctx->event_wqh, &ewq->wq);
ctx               606 fs/userfaultfd.c 		if (READ_ONCE(ctx->released) ||
ctx               614 fs/userfaultfd.c 			__remove_wait_queue(&ctx->event_wqh, &ewq->wq);
ctx               626 fs/userfaultfd.c 		spin_unlock_irq(&ctx->event_wqh.lock);
ctx               628 fs/userfaultfd.c 		wake_up_poll(&ctx->fd_wqh, EPOLLIN);
ctx               631 fs/userfaultfd.c 		spin_lock_irq(&ctx->event_wqh.lock);
ctx               634 fs/userfaultfd.c 	spin_unlock_irq(&ctx->event_wqh.lock);
ctx               645 fs/userfaultfd.c 			if (vma->vm_userfaultfd_ctx.ctx == release_new_ctx) {
ctx               659 fs/userfaultfd.c 	WRITE_ONCE(ctx->mmap_changing, false);
ctx               660 fs/userfaultfd.c 	userfaultfd_ctx_put(ctx);
ctx               663 fs/userfaultfd.c static void userfaultfd_event_complete(struct userfaultfd_ctx *ctx,
ctx               667 fs/userfaultfd.c 	wake_up_locked(&ctx->event_wqh);
ctx               668 fs/userfaultfd.c 	__remove_wait_queue(&ctx->event_wqh, &ewq->wq);
ctx               673 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx = NULL, *octx;
ctx               676 fs/userfaultfd.c 	octx = vma->vm_userfaultfd_ctx.ctx;
ctx               685 fs/userfaultfd.c 			ctx = fctx->new;
ctx               689 fs/userfaultfd.c 	if (!ctx) {
ctx               694 fs/userfaultfd.c 		ctx = kmem_cache_alloc(userfaultfd_ctx_cachep, GFP_KERNEL);
ctx               695 fs/userfaultfd.c 		if (!ctx) {
ctx               700 fs/userfaultfd.c 		refcount_set(&ctx->refcount, 1);
ctx               701 fs/userfaultfd.c 		ctx->flags = octx->flags;
ctx               702 fs/userfaultfd.c 		ctx->state = UFFD_STATE_RUNNING;
ctx               703 fs/userfaultfd.c 		ctx->features = octx->features;
ctx               704 fs/userfaultfd.c 		ctx->released = false;
ctx               705 fs/userfaultfd.c 		ctx->mmap_changing = false;
ctx               706 fs/userfaultfd.c 		ctx->mm = vma->vm_mm;
ctx               707 fs/userfaultfd.c 		mmgrab(ctx->mm);
ctx               712 fs/userfaultfd.c 		fctx->new = ctx;
ctx               716 fs/userfaultfd.c 	vma->vm_userfaultfd_ctx.ctx = ctx;
ctx               722 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx = fctx->orig;
ctx               730 fs/userfaultfd.c 	userfaultfd_event_wait_completion(ctx, &ewq);
ctx               747 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx;
ctx               749 fs/userfaultfd.c 	ctx = vma->vm_userfaultfd_ctx.ctx;
ctx               751 fs/userfaultfd.c 	if (!ctx)
ctx               754 fs/userfaultfd.c 	if (ctx->features & UFFD_FEATURE_EVENT_REMAP) {
ctx               755 fs/userfaultfd.c 		vm_ctx->ctx = ctx;
ctx               756 fs/userfaultfd.c 		userfaultfd_ctx_get(ctx);
ctx               757 fs/userfaultfd.c 		WRITE_ONCE(ctx->mmap_changing, true);
ctx               769 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx = vm_ctx->ctx;
ctx               772 fs/userfaultfd.c 	if (!ctx)
ctx               776 fs/userfaultfd.c 		userfaultfd_ctx_put(ctx);
ctx               787 fs/userfaultfd.c 	userfaultfd_event_wait_completion(ctx, &ewq);
ctx               794 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx;
ctx               797 fs/userfaultfd.c 	ctx = vma->vm_userfaultfd_ctx.ctx;
ctx               798 fs/userfaultfd.c 	if (!ctx || !(ctx->features & UFFD_FEATURE_EVENT_REMOVE))
ctx               801 fs/userfaultfd.c 	userfaultfd_ctx_get(ctx);
ctx               802 fs/userfaultfd.c 	WRITE_ONCE(ctx->mmap_changing, true);
ctx               811 fs/userfaultfd.c 	userfaultfd_event_wait_completion(ctx, &ewq);
ctx               816 fs/userfaultfd.c static bool has_unmap_ctx(struct userfaultfd_ctx *ctx, struct list_head *unmaps,
ctx               822 fs/userfaultfd.c 		if (unmap_ctx->ctx == ctx && unmap_ctx->start == start &&
ctx               835 fs/userfaultfd.c 		struct userfaultfd_ctx *ctx = vma->vm_userfaultfd_ctx.ctx;
ctx               837 fs/userfaultfd.c 		if (!ctx || !(ctx->features & UFFD_FEATURE_EVENT_UNMAP) ||
ctx               838 fs/userfaultfd.c 		    has_unmap_ctx(ctx, unmaps, start, end))
ctx               845 fs/userfaultfd.c 		userfaultfd_ctx_get(ctx);
ctx               846 fs/userfaultfd.c 		WRITE_ONCE(ctx->mmap_changing, true);
ctx               847 fs/userfaultfd.c 		unmap_ctx->ctx = ctx;
ctx               858 fs/userfaultfd.c 	struct userfaultfd_unmap_ctx *ctx, *n;
ctx               861 fs/userfaultfd.c 	list_for_each_entry_safe(ctx, n, uf, list) {
ctx               865 fs/userfaultfd.c 		ewq.msg.arg.remove.start = ctx->start;
ctx               866 fs/userfaultfd.c 		ewq.msg.arg.remove.end = ctx->end;
ctx               868 fs/userfaultfd.c 		userfaultfd_event_wait_completion(ctx->ctx, &ewq);
ctx               870 fs/userfaultfd.c 		list_del(&ctx->list);
ctx               871 fs/userfaultfd.c 		kfree(ctx);
ctx               877 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx = file->private_data;
ctx               878 fs/userfaultfd.c 	struct mm_struct *mm = ctx->mm;
ctx               885 fs/userfaultfd.c 	WRITE_ONCE(ctx->released, true);
ctx               903 fs/userfaultfd.c 		BUG_ON(!!vma->vm_userfaultfd_ctx.ctx ^
ctx               905 fs/userfaultfd.c 		if (vma->vm_userfaultfd_ctx.ctx != ctx) {
ctx               932 fs/userfaultfd.c 	spin_lock_irq(&ctx->fault_pending_wqh.lock);
ctx               933 fs/userfaultfd.c 	__wake_up_locked_key(&ctx->fault_pending_wqh, TASK_NORMAL, &range);
ctx               934 fs/userfaultfd.c 	__wake_up(&ctx->fault_wqh, TASK_NORMAL, 1, &range);
ctx               935 fs/userfaultfd.c 	spin_unlock_irq(&ctx->fault_pending_wqh.lock);
ctx               938 fs/userfaultfd.c 	wake_up_all(&ctx->event_wqh);
ctx               940 fs/userfaultfd.c 	wake_up_poll(&ctx->fd_wqh, EPOLLHUP);
ctx               941 fs/userfaultfd.c 	userfaultfd_ctx_put(ctx);
ctx               965 fs/userfaultfd.c 		struct userfaultfd_ctx *ctx)
ctx               967 fs/userfaultfd.c 	return find_userfault_in(&ctx->fault_pending_wqh);
ctx               971 fs/userfaultfd.c 		struct userfaultfd_ctx *ctx)
ctx               973 fs/userfaultfd.c 	return find_userfault_in(&ctx->event_wqh);
ctx               978 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx = file->private_data;
ctx               981 fs/userfaultfd.c 	poll_wait(file, &ctx->fd_wqh, wait);
ctx               983 fs/userfaultfd.c 	switch (ctx->state) {
ctx              1005 fs/userfaultfd.c 		if (waitqueue_active(&ctx->fault_pending_wqh))
ctx              1007 fs/userfaultfd.c 		else if (waitqueue_active(&ctx->event_wqh))
ctx              1019 fs/userfaultfd.c static int resolve_userfault_fork(struct userfaultfd_ctx *ctx,
ctx              1035 fs/userfaultfd.c static ssize_t userfaultfd_ctx_read(struct userfaultfd_ctx *ctx, int no_wait,
ctx              1052 fs/userfaultfd.c 	spin_lock_irq(&ctx->fd_wqh.lock);
ctx              1053 fs/userfaultfd.c 	__add_wait_queue(&ctx->fd_wqh, &wait);
ctx              1056 fs/userfaultfd.c 		spin_lock(&ctx->fault_pending_wqh.lock);
ctx              1057 fs/userfaultfd.c 		uwq = find_userfault(ctx);
ctx              1066 fs/userfaultfd.c 			write_seqcount_begin(&ctx->refile_seq);
ctx              1090 fs/userfaultfd.c 			add_wait_queue(&ctx->fault_wqh, &uwq->wq);
ctx              1092 fs/userfaultfd.c 			write_seqcount_end(&ctx->refile_seq);
ctx              1096 fs/userfaultfd.c 			spin_unlock(&ctx->fault_pending_wqh.lock);
ctx              1100 fs/userfaultfd.c 		spin_unlock(&ctx->fault_pending_wqh.lock);
ctx              1102 fs/userfaultfd.c 		spin_lock(&ctx->event_wqh.lock);
ctx              1103 fs/userfaultfd.c 		uwq = find_userfault_evt(ctx);
ctx              1118 fs/userfaultfd.c 				spin_unlock(&ctx->event_wqh.lock);
ctx              1123 fs/userfaultfd.c 			userfaultfd_event_complete(ctx, uwq);
ctx              1124 fs/userfaultfd.c 			spin_unlock(&ctx->event_wqh.lock);
ctx              1128 fs/userfaultfd.c 		spin_unlock(&ctx->event_wqh.lock);
ctx              1138 fs/userfaultfd.c 		spin_unlock_irq(&ctx->fd_wqh.lock);
ctx              1140 fs/userfaultfd.c 		spin_lock_irq(&ctx->fd_wqh.lock);
ctx              1142 fs/userfaultfd.c 	__remove_wait_queue(&ctx->fd_wqh, &wait);
ctx              1144 fs/userfaultfd.c 	spin_unlock_irq(&ctx->fd_wqh.lock);
ctx              1147 fs/userfaultfd.c 		ret = resolve_userfault_fork(ctx, fork_nctx, msg);
ctx              1148 fs/userfaultfd.c 		spin_lock_irq(&ctx->event_wqh.lock);
ctx              1170 fs/userfaultfd.c 			__add_wait_queue(&ctx->event_wqh, &uwq->wq);
ctx              1178 fs/userfaultfd.c 				userfaultfd_event_complete(ctx, uwq);
ctx              1194 fs/userfaultfd.c 		spin_unlock_irq(&ctx->event_wqh.lock);
ctx              1203 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx = file->private_data;
ctx              1208 fs/userfaultfd.c 	if (ctx->state == UFFD_STATE_WAIT_API)
ctx              1214 fs/userfaultfd.c 		_ret = userfaultfd_ctx_read(ctx, no_wait, &msg);
ctx              1230 fs/userfaultfd.c static void __wake_userfault(struct userfaultfd_ctx *ctx,
ctx              1233 fs/userfaultfd.c 	spin_lock_irq(&ctx->fault_pending_wqh.lock);
ctx              1235 fs/userfaultfd.c 	if (waitqueue_active(&ctx->fault_pending_wqh))
ctx              1236 fs/userfaultfd.c 		__wake_up_locked_key(&ctx->fault_pending_wqh, TASK_NORMAL,
ctx              1238 fs/userfaultfd.c 	if (waitqueue_active(&ctx->fault_wqh))
ctx              1239 fs/userfaultfd.c 		__wake_up(&ctx->fault_wqh, TASK_NORMAL, 1, range);
ctx              1240 fs/userfaultfd.c 	spin_unlock_irq(&ctx->fault_pending_wqh.lock);
ctx              1243 fs/userfaultfd.c static __always_inline void wake_userfault(struct userfaultfd_ctx *ctx,
ctx              1265 fs/userfaultfd.c 		seq = read_seqcount_begin(&ctx->refile_seq);
ctx              1266 fs/userfaultfd.c 		need_wakeup = waitqueue_active(&ctx->fault_pending_wqh) ||
ctx              1267 fs/userfaultfd.c 			waitqueue_active(&ctx->fault_wqh);
ctx              1269 fs/userfaultfd.c 	} while (read_seqcount_retry(&ctx->refile_seq, seq));
ctx              1271 fs/userfaultfd.c 		__wake_userfault(ctx, range);
ctx              1302 fs/userfaultfd.c static int userfaultfd_register(struct userfaultfd_ctx *ctx,
ctx              1305 fs/userfaultfd.c 	struct mm_struct *mm = ctx->mm;
ctx              1384 fs/userfaultfd.c 		BUG_ON(!!cur->vm_userfaultfd_ctx.ctx ^
ctx              1425 fs/userfaultfd.c 		if (cur->vm_userfaultfd_ctx.ctx &&
ctx              1426 fs/userfaultfd.c 		    cur->vm_userfaultfd_ctx.ctx != ctx)
ctx              1447 fs/userfaultfd.c 		BUG_ON(vma->vm_userfaultfd_ctx.ctx &&
ctx              1448 fs/userfaultfd.c 		       vma->vm_userfaultfd_ctx.ctx != ctx);
ctx              1455 fs/userfaultfd.c 		if (vma->vm_userfaultfd_ctx.ctx == ctx &&
ctx              1467 fs/userfaultfd.c 				 ((struct vm_userfaultfd_ctx){ ctx }));
ctx              1489 fs/userfaultfd.c 		vma->vm_userfaultfd_ctx.ctx = ctx;
ctx              1514 fs/userfaultfd.c static int userfaultfd_unregister(struct userfaultfd_ctx *ctx,
ctx              1517 fs/userfaultfd.c 	struct mm_struct *mm = ctx->mm;
ctx              1573 fs/userfaultfd.c 		BUG_ON(!!cur->vm_userfaultfd_ctx.ctx ^
ctx              1603 fs/userfaultfd.c 		if (!vma->vm_userfaultfd_ctx.ctx)
ctx              1622 fs/userfaultfd.c 			wake_userfault(vma->vm_userfaultfd_ctx.ctx, &range);
ctx              1669 fs/userfaultfd.c static int userfaultfd_wake(struct userfaultfd_ctx *ctx,
ctx              1681 fs/userfaultfd.c 	ret = validate_range(ctx->mm, &uffdio_wake.start, uffdio_wake.len);
ctx              1694 fs/userfaultfd.c 	wake_userfault(ctx, &range);
ctx              1701 fs/userfaultfd.c static int userfaultfd_copy(struct userfaultfd_ctx *ctx,
ctx              1712 fs/userfaultfd.c 	if (READ_ONCE(ctx->mmap_changing))
ctx              1721 fs/userfaultfd.c 	ret = validate_range(ctx->mm, &uffdio_copy.dst, uffdio_copy.len);
ctx              1734 fs/userfaultfd.c 	if (mmget_not_zero(ctx->mm)) {
ctx              1735 fs/userfaultfd.c 		ret = mcopy_atomic(ctx->mm, uffdio_copy.dst, uffdio_copy.src,
ctx              1736 fs/userfaultfd.c 				   uffdio_copy.len, &ctx->mmap_changing);
ctx              1737 fs/userfaultfd.c 		mmput(ctx->mm);
ctx              1750 fs/userfaultfd.c 		wake_userfault(ctx, &range);
ctx              1757 fs/userfaultfd.c static int userfaultfd_zeropage(struct userfaultfd_ctx *ctx,
ctx              1768 fs/userfaultfd.c 	if (READ_ONCE(ctx->mmap_changing))
ctx              1777 fs/userfaultfd.c 	ret = validate_range(ctx->mm, &uffdio_zeropage.range.start,
ctx              1785 fs/userfaultfd.c 	if (mmget_not_zero(ctx->mm)) {
ctx              1786 fs/userfaultfd.c 		ret = mfill_zeropage(ctx->mm, uffdio_zeropage.range.start,
ctx              1788 fs/userfaultfd.c 				     &ctx->mmap_changing);
ctx              1789 fs/userfaultfd.c 		mmput(ctx->mm);
ctx              1802 fs/userfaultfd.c 		wake_userfault(ctx, &range);
ctx              1822 fs/userfaultfd.c static int userfaultfd_api(struct userfaultfd_ctx *ctx,
ctx              1831 fs/userfaultfd.c 	if (ctx->state != UFFD_STATE_WAIT_API)
ctx              1849 fs/userfaultfd.c 	ctx->state = UFFD_STATE_RUNNING;
ctx              1851 fs/userfaultfd.c 	ctx->features = uffd_ctx_features(features);
ctx              1866 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx = file->private_data;
ctx              1868 fs/userfaultfd.c 	if (cmd != UFFDIO_API && ctx->state == UFFD_STATE_WAIT_API)
ctx              1873 fs/userfaultfd.c 		ret = userfaultfd_api(ctx, arg);
ctx              1876 fs/userfaultfd.c 		ret = userfaultfd_register(ctx, arg);
ctx              1879 fs/userfaultfd.c 		ret = userfaultfd_unregister(ctx, arg);
ctx              1882 fs/userfaultfd.c 		ret = userfaultfd_wake(ctx, arg);
ctx              1885 fs/userfaultfd.c 		ret = userfaultfd_copy(ctx, arg);
ctx              1888 fs/userfaultfd.c 		ret = userfaultfd_zeropage(ctx, arg);
ctx              1897 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx = f->private_data;
ctx              1901 fs/userfaultfd.c 	spin_lock_irq(&ctx->fault_pending_wqh.lock);
ctx              1902 fs/userfaultfd.c 	list_for_each_entry(wq, &ctx->fault_pending_wqh.head, entry) {
ctx              1906 fs/userfaultfd.c 	list_for_each_entry(wq, &ctx->fault_wqh.head, entry) {
ctx              1909 fs/userfaultfd.c 	spin_unlock_irq(&ctx->fault_pending_wqh.lock);
ctx              1917 fs/userfaultfd.c 		   pending, total, UFFD_API, ctx->features,
ctx              1936 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx = (struct userfaultfd_ctx *) mem;
ctx              1938 fs/userfaultfd.c 	init_waitqueue_head(&ctx->fault_pending_wqh);
ctx              1939 fs/userfaultfd.c 	init_waitqueue_head(&ctx->fault_wqh);
ctx              1940 fs/userfaultfd.c 	init_waitqueue_head(&ctx->event_wqh);
ctx              1941 fs/userfaultfd.c 	init_waitqueue_head(&ctx->fd_wqh);
ctx              1942 fs/userfaultfd.c 	seqcount_init(&ctx->refile_seq);
ctx              1947 fs/userfaultfd.c 	struct userfaultfd_ctx *ctx;
ctx              1962 fs/userfaultfd.c 	ctx = kmem_cache_alloc(userfaultfd_ctx_cachep, GFP_KERNEL);
ctx              1963 fs/userfaultfd.c 	if (!ctx)
ctx              1966 fs/userfaultfd.c 	refcount_set(&ctx->refcount, 1);
ctx              1967 fs/userfaultfd.c 	ctx->flags = flags;
ctx              1968 fs/userfaultfd.c 	ctx->features = 0;
ctx              1969 fs/userfaultfd.c 	ctx->state = UFFD_STATE_WAIT_API;
ctx              1970 fs/userfaultfd.c 	ctx->released = false;
ctx              1971 fs/userfaultfd.c 	ctx->mmap_changing = false;
ctx              1972 fs/userfaultfd.c 	ctx->mm = current->mm;
ctx              1974 fs/userfaultfd.c 	mmgrab(ctx->mm);
ctx              1976 fs/userfaultfd.c 	fd = anon_inode_getfd("[userfaultfd]", &userfaultfd_fops, ctx,
ctx              1979 fs/userfaultfd.c 		mmdrop(ctx->mm);
ctx              1980 fs/userfaultfd.c 		kmem_cache_free(userfaultfd_ctx_cachep, ctx);
ctx               124 fs/xfs/libxfs/xfs_dir2_priv.h 		       struct dir_context *ctx, size_t bufsize);
ctx                46 fs/xfs/xfs_dir2_readdir.c 	struct dir_context	*ctx)
ctx                67 fs/xfs/xfs_dir2_readdir.c 	if (xfs_dir2_dataptr_to_db(geo, ctx->pos) > geo->datablk)
ctx                84 fs/xfs/xfs_dir2_readdir.c 	if (ctx->pos <= dot_offset) {
ctx                85 fs/xfs/xfs_dir2_readdir.c 		ctx->pos = dot_offset & 0x7fffffff;
ctx                86 fs/xfs/xfs_dir2_readdir.c 		if (!dir_emit(ctx, ".", 1, dp->i_ino, DT_DIR))
ctx                93 fs/xfs/xfs_dir2_readdir.c 	if (ctx->pos <= dotdot_offset) {
ctx                95 fs/xfs/xfs_dir2_readdir.c 		ctx->pos = dotdot_offset & 0x7fffffff;
ctx                96 fs/xfs/xfs_dir2_readdir.c 		if (!dir_emit(ctx, "..", 2, ino, DT_DIR))
ctx               110 fs/xfs/xfs_dir2_readdir.c 		if (ctx->pos > off) {
ctx               117 fs/xfs/xfs_dir2_readdir.c 		ctx->pos = off & 0x7fffffff;
ctx               118 fs/xfs/xfs_dir2_readdir.c 		if (!dir_emit(ctx, (char *)sfep->name, sfep->namelen, ino,
ctx               124 fs/xfs/xfs_dir2_readdir.c 	ctx->pos = xfs_dir2_db_off_to_dataptr(geo, geo->datablk + 1, 0) &
ctx               135 fs/xfs/xfs_dir2_readdir.c 	struct dir_context	*ctx)
ctx               153 fs/xfs/xfs_dir2_readdir.c 	if (xfs_dir2_dataptr_to_db(geo, ctx->pos) > geo->datablk)
ctx               166 fs/xfs/xfs_dir2_readdir.c 	wantoff = xfs_dir2_dataptr_to_off(geo, ctx->pos);
ctx               206 fs/xfs/xfs_dir2_readdir.c 		ctx->pos = cook & 0x7fffffff;
ctx               211 fs/xfs/xfs_dir2_readdir.c 		if (!dir_emit(ctx, (char *)dep->name, dep->namelen,
ctx               223 fs/xfs/xfs_dir2_readdir.c 	ctx->pos = xfs_dir2_db_off_to_dataptr(geo, geo->datablk + 1, 0) &
ctx               342 fs/xfs/xfs_dir2_readdir.c 	struct dir_context	*ctx,
ctx               363 fs/xfs/xfs_dir2_readdir.c 	if (ctx->pos >= XFS_DIR2_MAX_DATAPTR)
ctx               370 fs/xfs/xfs_dir2_readdir.c 	curoff = xfs_dir2_dataptr_to_byte(ctx->pos);
ctx               458 fs/xfs/xfs_dir2_readdir.c 		ctx->pos = xfs_dir2_byte_to_dataptr(curoff) & 0x7fffffff;
ctx               459 fs/xfs/xfs_dir2_readdir.c 		if (!dir_emit(ctx, (char *)dep->name, dep->namelen,
ctx               477 fs/xfs/xfs_dir2_readdir.c 		ctx->pos = XFS_DIR2_MAX_DATAPTR & 0x7fffffff;
ctx               479 fs/xfs/xfs_dir2_readdir.c 		ctx->pos = xfs_dir2_byte_to_dataptr(curoff) & 0x7fffffff;
ctx               497 fs/xfs/xfs_dir2_readdir.c 	struct dir_context	*ctx,
ctx               517 fs/xfs/xfs_dir2_readdir.c 		rval = xfs_dir2_sf_getdents(&args, ctx);
ctx               521 fs/xfs/xfs_dir2_readdir.c 		rval = xfs_dir2_block_getdents(&args, ctx);
ctx               523 fs/xfs/xfs_dir2_readdir.c 		rval = xfs_dir2_leaf_getdents(&args, ctx, bufsize);
ctx              1078 fs/xfs/xfs_file.c 	struct dir_context *ctx)
ctx              1098 fs/xfs/xfs_file.c 	return xfs_readdir(NULL, ip, ctx, bufsize);
ctx               392 fs/xfs/xfs_log_cil.c 	struct xfs_cil_ctx	*ctx = cil->xc_ctx;
ctx               412 fs/xfs/xfs_log_cil.c 	ctx->nvecs += diff_iovecs;
ctx               416 fs/xfs/xfs_log_cil.c 		list_splice_init(&tp->t_busy, &ctx->busy_extents);
ctx               425 fs/xfs/xfs_log_cil.c 	if (ctx->ticket->t_curr_res == 0) {
ctx               426 fs/xfs/xfs_log_cil.c 		ctx_res = ctx->ticket->t_unit_res;
ctx               427 fs/xfs/xfs_log_cil.c 		ctx->ticket->t_curr_res = ctx_res;
ctx               433 fs/xfs/xfs_log_cil.c 	if (len > 0 && (ctx->space_used / iclog_space !=
ctx               434 fs/xfs/xfs_log_cil.c 				(ctx->space_used + len) / iclog_space)) {
ctx               438 fs/xfs/xfs_log_cil.c 		ctx->ticket->t_unit_res += split_res;
ctx               439 fs/xfs/xfs_log_cil.c 		ctx->ticket->t_curr_res += split_res;
ctx               444 fs/xfs/xfs_log_cil.c 	ctx->space_used += len;
ctx               504 fs/xfs/xfs_log_cil.c 	struct xfs_cil_ctx	*ctx =
ctx               506 fs/xfs/xfs_log_cil.c 	struct xfs_mount	*mp = ctx->cil->xc_log->l_mp;
ctx               508 fs/xfs/xfs_log_cil.c 	xfs_extent_busy_clear(mp, &ctx->busy_extents, false);
ctx               509 fs/xfs/xfs_log_cil.c 	kmem_free(ctx);
ctx               521 fs/xfs/xfs_log_cil.c 	struct xfs_cil_ctx	*ctx = bio->bi_private;
ctx               523 fs/xfs/xfs_log_cil.c 	INIT_WORK(&ctx->discard_endio_work, xlog_discard_endio_work);
ctx               524 fs/xfs/xfs_log_cil.c 	queue_work(xfs_discard_wq, &ctx->discard_endio_work);
ctx               531 fs/xfs/xfs_log_cil.c 	struct xfs_cil_ctx	*ctx)
ctx               533 fs/xfs/xfs_log_cil.c 	struct list_head	*list = &ctx->busy_extents;
ctx               561 fs/xfs/xfs_log_cil.c 		bio->bi_private = ctx;
ctx               565 fs/xfs/xfs_log_cil.c 		xlog_discard_endio_work(&ctx->discard_endio_work);
ctx               577 fs/xfs/xfs_log_cil.c 	struct xfs_cil_ctx	*ctx,
ctx               580 fs/xfs/xfs_log_cil.c 	struct xfs_mount	*mp = ctx->cil->xc_log->l_mp;
ctx               590 fs/xfs/xfs_log_cil.c 		spin_lock(&ctx->cil->xc_push_lock);
ctx               591 fs/xfs/xfs_log_cil.c 		wake_up_all(&ctx->cil->xc_commit_wait);
ctx               592 fs/xfs/xfs_log_cil.c 		spin_unlock(&ctx->cil->xc_push_lock);
ctx               595 fs/xfs/xfs_log_cil.c 	xfs_trans_committed_bulk(ctx->cil->xc_log->l_ailp, ctx->lv_chain,
ctx               596 fs/xfs/xfs_log_cil.c 					ctx->start_lsn, abort);
ctx               598 fs/xfs/xfs_log_cil.c 	xfs_extent_busy_sort(&ctx->busy_extents);
ctx               599 fs/xfs/xfs_log_cil.c 	xfs_extent_busy_clear(mp, &ctx->busy_extents,
ctx               602 fs/xfs/xfs_log_cil.c 	spin_lock(&ctx->cil->xc_push_lock);
ctx               603 fs/xfs/xfs_log_cil.c 	list_del(&ctx->committing);
ctx               604 fs/xfs/xfs_log_cil.c 	spin_unlock(&ctx->cil->xc_push_lock);
ctx               606 fs/xfs/xfs_log_cil.c 	xlog_cil_free_logvec(ctx->lv_chain);
ctx               608 fs/xfs/xfs_log_cil.c 	if (!list_empty(&ctx->busy_extents))
ctx               609 fs/xfs/xfs_log_cil.c 		xlog_discard_busy_extents(mp, ctx);
ctx               611 fs/xfs/xfs_log_cil.c 		kmem_free(ctx);
ctx               619 fs/xfs/xfs_log_cil.c 	struct xfs_cil_ctx	*ctx;
ctx               621 fs/xfs/xfs_log_cil.c 	while ((ctx = list_first_entry_or_null(list,
ctx               623 fs/xfs/xfs_log_cil.c 		list_del(&ctx->iclog_entry);
ctx               624 fs/xfs/xfs_log_cil.c 		xlog_cil_committed(ctx, aborted);
ctx               648 fs/xfs/xfs_log_cil.c 	struct xfs_cil_ctx	*ctx;
ctx               667 fs/xfs/xfs_log_cil.c 	ctx = cil->xc_ctx;
ctx               671 fs/xfs/xfs_log_cil.c 	ASSERT(push_seq <= ctx->sequence);
ctx               715 fs/xfs/xfs_log_cil.c 	list_add(&ctx->committing, &cil->xc_committing);
ctx               732 fs/xfs/xfs_log_cil.c 		if (!ctx->lv_chain)
ctx               733 fs/xfs/xfs_log_cil.c 			ctx->lv_chain = item->li_lv;
ctx               749 fs/xfs/xfs_log_cil.c 	new_ctx->sequence = ctx->sequence + 1;
ctx               792 fs/xfs/xfs_log_cil.c 	tic = ctx->ticket;
ctx               804 fs/xfs/xfs_log_cil.c 	lvhdr.lv_next = ctx->lv_chain;
ctx               806 fs/xfs/xfs_log_cil.c 	error = xlog_write(log, &lvhdr, tic, &ctx->start_lsn, NULL, 0);
ctx               831 fs/xfs/xfs_log_cil.c 		if (new_ctx->sequence >= ctx->sequence)
ctx               856 fs/xfs/xfs_log_cil.c 	list_add_tail(&ctx->iclog_entry, &commit_iclog->ic_callbacks);
ctx               865 fs/xfs/xfs_log_cil.c 	ctx->commit_lsn = commit_lsn;
ctx               881 fs/xfs/xfs_log_cil.c 	xlog_cil_committed(ctx, true);
ctx              1062 fs/xfs/xfs_log_cil.c 	struct xfs_cil_ctx	*ctx;
ctx              1082 fs/xfs/xfs_log_cil.c 	list_for_each_entry(ctx, &cil->xc_committing, committing) {
ctx              1090 fs/xfs/xfs_log_cil.c 		if (ctx->sequence > sequence)
ctx              1092 fs/xfs/xfs_log_cil.c 		if (!ctx->commit_lsn) {
ctx              1100 fs/xfs/xfs_log_cil.c 		if (ctx->sequence != sequence)
ctx              1103 fs/xfs/xfs_log_cil.c 		commit_lsn = ctx->commit_lsn;
ctx              1155 fs/xfs/xfs_log_cil.c 	struct xfs_cil_ctx *ctx;
ctx              1160 fs/xfs/xfs_log_cil.c 	ctx = lip->li_mountp->m_log->l_cilp->xc_ctx;
ctx              1167 fs/xfs/xfs_log_cil.c 	if (XFS_LSN_CMP(lip->li_seq, ctx->sequence) != 0)
ctx              1180 fs/xfs/xfs_log_cil.c 	struct xfs_cil_ctx *ctx;
ctx              1186 fs/xfs/xfs_log_cil.c 	ctx = kmem_zalloc(sizeof(*ctx), KM_MAYFAIL);
ctx              1187 fs/xfs/xfs_log_cil.c 	if (!ctx) {
ctx              1200 fs/xfs/xfs_log_cil.c 	INIT_LIST_HEAD(&ctx->committing);
ctx              1201 fs/xfs/xfs_log_cil.c 	INIT_LIST_HEAD(&ctx->busy_extents);
ctx              1202 fs/xfs/xfs_log_cil.c 	ctx->sequence = 1;
ctx              1203 fs/xfs/xfs_log_cil.c 	ctx->cil = cil;
ctx              1204 fs/xfs/xfs_log_cil.c 	cil->xc_ctx = ctx;
ctx              1205 fs/xfs/xfs_log_cil.c 	cil->xc_current_sequence = ctx->sequence;
ctx                40 fs/xfs/xfs_trace.h 	TP_PROTO(struct xfs_attr_list_context *ctx),
ctx                41 fs/xfs/xfs_trace.h 	TP_ARGS(ctx),
ctx                56 fs/xfs/xfs_trace.h 		__entry->dev = VFS_I(ctx->dp)->i_sb->s_dev;
ctx                57 fs/xfs/xfs_trace.h 		__entry->ino = ctx->dp->i_ino;
ctx                58 fs/xfs/xfs_trace.h 		__entry->hashval = ctx->cursor->hashval;
ctx                59 fs/xfs/xfs_trace.h 		__entry->blkno = ctx->cursor->blkno;
ctx                60 fs/xfs/xfs_trace.h 		__entry->offset = ctx->cursor->offset;
ctx                61 fs/xfs/xfs_trace.h 		__entry->alist = ctx->alist;
ctx                62 fs/xfs/xfs_trace.h 		__entry->bufsize = ctx->bufsize;
ctx                63 fs/xfs/xfs_trace.h 		__entry->count = ctx->count;
ctx                64 fs/xfs/xfs_trace.h 		__entry->firstu = ctx->firstu;
ctx                65 fs/xfs/xfs_trace.h 		__entry->flags = ctx->flags;
ctx                86 fs/xfs/xfs_trace.h 	TP_PROTO(struct xfs_attr_list_context *ctx), \
ctx                87 fs/xfs/xfs_trace.h 	TP_ARGS(ctx))
ctx               163 fs/xfs/xfs_trace.h 	TP_PROTO(struct xfs_attr_list_context *ctx,
ctx               165 fs/xfs/xfs_trace.h 	TP_ARGS(ctx, btree),
ctx               182 fs/xfs/xfs_trace.h 		__entry->dev = VFS_I(ctx->dp)->i_sb->s_dev;
ctx               183 fs/xfs/xfs_trace.h 		__entry->ino = ctx->dp->i_ino;
ctx               184 fs/xfs/xfs_trace.h 		__entry->hashval = ctx->cursor->hashval;
ctx               185 fs/xfs/xfs_trace.h 		__entry->blkno = ctx->cursor->blkno;
ctx               186 fs/xfs/xfs_trace.h 		__entry->offset = ctx->cursor->offset;
ctx               187 fs/xfs/xfs_trace.h 		__entry->alist = ctx->alist;
ctx               188 fs/xfs/xfs_trace.h 		__entry->bufsize = ctx->bufsize;
ctx               189 fs/xfs/xfs_trace.h 		__entry->count = ctx->count;
ctx               190 fs/xfs/xfs_trace.h 		__entry->firstu = ctx->firstu;
ctx               191 fs/xfs/xfs_trace.h 		__entry->flags = ctx->flags;
ctx                68 include/crypto/aes.h int aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
ctx                77 include/crypto/aes.h void aes_encrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in);
ctx                85 include/crypto/aes.h void aes_decrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in);
ctx                20 include/crypto/arc4.h int arc4_setkey(struct arc4_ctx *ctx, const u8 *in_key, unsigned int key_len);
ctx                21 include/crypto/arc4.h void arc4_crypt(struct arc4_ctx *ctx, u8 *out, const u8 *in, unsigned int len);
ctx                21 include/crypto/cast5.h void __cast5_encrypt(struct cast5_ctx *ctx, u8 *dst, const u8 *src);
ctx                22 include/crypto/cast5.h void __cast5_decrypt(struct cast5_ctx *ctx, u8 *dst, const u8 *src);
ctx                18 include/crypto/cast6.h int __cast6_setkey(struct cast6_ctx *ctx, const u8 *key,
ctx                22 include/crypto/cast6.h void __cast6_encrypt(struct cast6_ctx *ctx, u8 *dst, const u8 *src);
ctx                23 include/crypto/cast6.h void __cast6_decrypt(struct cast6_ctx *ctx, u8 *dst, const u8 *src);
ctx                44 include/crypto/chacha.h void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv);
ctx                27 include/crypto/des.h void des_encrypt(const struct des_ctx *ctx, u8 *dst, const u8 *src);
ctx                28 include/crypto/des.h void des_decrypt(const struct des_ctx *ctx, u8 *dst, const u8 *src);
ctx                42 include/crypto/des.h int des_expand_key(struct des_ctx *ctx, const u8 *key, unsigned int keylen);
ctx                54 include/crypto/des.h int des3_ede_expand_key(struct des3_ede_ctx *ctx, const u8 *key,
ctx               157 include/crypto/hash.h #define SHASH_DESC_ON_STACK(shash, ctx)				  \
ctx               182 include/crypto/if_alg.h 	struct af_alg_ctx *ctx = ask->private;
ctx               185 include/crypto/if_alg.h 			  ctx->used, 0);
ctx               208 include/crypto/if_alg.h 	struct af_alg_ctx *ctx = ask->private;
ctx               211 include/crypto/if_alg.h 		     atomic_read(&ctx->rcvused), 0);
ctx                30 include/crypto/internal/scompress.h 	void (*free_ctx)(struct crypto_scomp *tfm, void *ctx);
ctx                33 include/crypto/internal/scompress.h 			void *ctx);
ctx                36 include/crypto/internal/scompress.h 			  void *ctx);
ctx                71 include/crypto/internal/scompress.h 					 void *ctx)
ctx                73 include/crypto/internal/scompress.h 	return crypto_scomp_alg(tfm)->free_ctx(tfm, ctx);
ctx                78 include/crypto/internal/scompress.h 					u8 *dst, unsigned int *dlen, void *ctx)
ctx                80 include/crypto/internal/scompress.h 	return crypto_scomp_alg(tfm)->compress(tfm, src, slen, dst, dlen, ctx);
ctx                86 include/crypto/internal/scompress.h 					  void *ctx)
ctx                89 include/crypto/internal/scompress.h 						 ctx);
ctx               245 include/crypto/internal/skcipher.h 	struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
ctx               247 include/crypto/internal/skcipher.h 	return ctx->cipher;
ctx                21 include/crypto/serpent.h int __serpent_setkey(struct serpent_ctx *ctx, const u8 *key,
ctx                25 include/crypto/serpent.h void __serpent_encrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src);
ctx                26 include/crypto/serpent.h void __serpent_decrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src);
ctx                25 include/crypto/sm4.h int crypto_sm4_expand_key(struct crypto_sm4_ctx *ctx, const u8 *in_key,
ctx                21 include/crypto/twofish.h int __twofish_setkey(struct twofish_ctx *ctx, const u8 *key,
ctx               117 include/drm/drm_atomic_helper.h 				   struct drm_modeset_acquire_ctx *ctx);
ctx               119 include/drm/drm_atomic_helper.h 				    struct drm_modeset_acquire_ctx *ctx);
ctx               121 include/drm/drm_atomic_helper.h 				 struct drm_modeset_acquire_ctx *ctx);
ctx               124 include/drm/drm_atomic_helper.h 				  struct drm_modeset_acquire_ctx *ctx);
ctx               128 include/drm/drm_atomic_helper.h 				  struct drm_modeset_acquire_ctx *ctx);
ctx               131 include/drm/drm_atomic_helper.h 					      struct drm_modeset_acquire_ctx *ctx);
ctx               139 include/drm/drm_atomic_helper.h 				struct drm_modeset_acquire_ctx *ctx);
ctx               146 include/drm/drm_atomic_helper.h 				struct drm_modeset_acquire_ctx *ctx);
ctx               150 include/drm/drm_atomic_helper.h 				       struct drm_modeset_acquire_ctx *ctx);
ctx               483 include/drm/drm_crtc.h 			 struct drm_modeset_acquire_ctx *ctx);
ctx               509 include/drm/drm_crtc.h 			  struct drm_modeset_acquire_ctx *ctx);
ctx               568 include/drm/drm_crtc.h 			 struct drm_modeset_acquire_ctx *ctx);
ctx               587 include/drm/drm_crtc.h 				struct drm_modeset_acquire_ctx *ctx);
ctx                48 include/drm/drm_crtc_helper.h 			       struct drm_modeset_acquire_ctx *ctx);
ctx               901 include/drm/drm_modeset_helper_vtables.h 			  struct drm_modeset_acquire_ctx *ctx,
ctx                91 include/drm/drm_modeset_lock.h void drm_modeset_acquire_init(struct drm_modeset_acquire_ctx *ctx,
ctx                93 include/drm/drm_modeset_lock.h void drm_modeset_acquire_fini(struct drm_modeset_acquire_ctx *ctx);
ctx                94 include/drm/drm_modeset_lock.h void drm_modeset_drop_locks(struct drm_modeset_acquire_ctx *ctx);
ctx                95 include/drm/drm_modeset_lock.h int drm_modeset_backoff(struct drm_modeset_acquire_ctx *ctx);
ctx               118 include/drm/drm_modeset_lock.h 		struct drm_modeset_acquire_ctx *ctx);
ctx               131 include/drm/drm_modeset_lock.h 			     struct drm_modeset_acquire_ctx *ctx);
ctx               157 include/drm/drm_modeset_lock.h #define DRM_MODESET_LOCK_ALL_BEGIN(dev, ctx, flags, ret)		\
ctx               158 include/drm/drm_modeset_lock.h 	drm_modeset_acquire_init(&ctx, flags);				\
ctx               160 include/drm/drm_modeset_lock.h 	ret = drm_modeset_lock_all_ctx(dev, &ctx);			\
ctx               182 include/drm/drm_modeset_lock.h #define DRM_MODESET_LOCK_ALL_END(ctx, ret)				\
ctx               185 include/drm/drm_modeset_lock.h 		ret = drm_modeset_backoff(&ctx);			\
ctx               189 include/drm/drm_modeset_lock.h 	drm_modeset_drop_locks(&ctx);					\
ctx               190 include/drm/drm_modeset_lock.h 	drm_modeset_acquire_fini(&ctx);
ctx               270 include/drm/drm_plane.h 			    struct drm_modeset_acquire_ctx *ctx);
ctx               288 include/drm/drm_plane.h 			     struct drm_modeset_acquire_ctx *ctx);
ctx                16 include/drm/drm_probe_helper.h 			    struct drm_modeset_acquire_ctx *ctx,
ctx               359 include/drm/ttm/ttm_bo_api.h 		    struct ttm_operation_ctx *ctx);
ctx               526 include/drm/ttm/ttm_bo_api.h 			 struct ttm_operation_ctx *ctx,
ctx               766 include/drm/ttm/ttm_bo_api.h 			struct ttm_operation_ctx *ctx);
ctx               248 include/drm/ttm/ttm_bo_driver.h 			struct ttm_operation_ctx *ctx);
ctx               311 include/drm/ttm/ttm_bo_driver.h 		    struct ttm_operation_ctx *ctx,
ctx               583 include/drm/ttm/ttm_bo_driver.h 		     struct ttm_operation_ctx *ctx);
ctx               816 include/drm/ttm/ttm_bo_driver.h 		    struct ttm_operation_ctx *ctx,
ctx               838 include/drm/ttm/ttm_bo_driver.h 		       struct ttm_operation_ctx *ctx,
ctx                86 include/drm/ttm/ttm_memory.h 				struct ttm_operation_ctx *ctx);
ctx                91 include/drm/ttm/ttm_memory.h 				     struct ttm_operation_ctx *ctx);
ctx                97 include/drm/ttm/ttm_memory.h 			uint64_t num_pages, struct ttm_operation_ctx *ctx);
ctx                50 include/drm/ttm/ttm_page_alloc.h int ttm_pool_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx);
ctx                65 include/drm/ttm/ttm_page_alloc.h 				struct ttm_operation_ctx *ctx);
ctx                94 include/drm/ttm/ttm_page_alloc.h 			struct ttm_operation_ctx *ctx);
ctx               112 include/drm/ttm/ttm_page_alloc.h 				struct ttm_operation_ctx *ctx)
ctx               187 include/drm/ttm/ttm_tt.h 		struct ttm_operation_ctx *ctx);
ctx               239 include/drm/ttm/ttm_tt.h int ttm_tt_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx);
ctx               268 include/drm/ttm/ttm_tt.h int ttm_agp_tt_populate(struct ttm_tt *ttm, struct ttm_operation_ctx *ctx);
ctx               102 include/kvm/arm_arch_timer.h #define arch_timer_ctx_index(ctx)	((ctx) - vcpu_timer((ctx)->vcpu)->timers)
ctx                22 include/linux/audit.h 	char		ctx[0];
ctx               134 include/linux/audit.h void audit_log(struct audit_context *ctx, gfp_t gfp_mask, int type,
ctx               137 include/linux/audit.h extern struct audit_buffer *audit_log_start(struct audit_context *ctx, gfp_t gfp_mask, int type);
ctx               189 include/linux/audit.h void audit_log(struct audit_context *ctx, gfp_t gfp_mask, int type,
ctx               192 include/linux/audit.h static inline struct audit_buffer *audit_log_start(struct audit_context *ctx,
ctx               282 include/linux/audit.h static inline void audit_set_context(struct task_struct *task, struct audit_context *ctx)
ctx               284 include/linux/audit.h 	task->audit_context = ctx;
ctx               535 include/linux/audit.h static inline void audit_set_context(struct task_struct *task, struct audit_context *ctx)
ctx               239 include/linux/blk-cgroup.h 		   char *input, struct blkg_conf_ctx *ctx);
ctx               240 include/linux/blk-cgroup.h void blkg_conf_finish(struct blkg_conf_ctx *ctx);
ctx               363 include/linux/blk-mq.h #define hctx_for_each_ctx(hctx, ctx, i)					\
ctx               365 include/linux/blk-mq.h 	     ({ ctx = (hctx)->ctxs[(i)]; 1; }); (i)++)
ctx               497 include/linux/bpf.h 		     void *ctx, u64 ctx_size, bpf_ctx_copy_t ctx_copy);
ctx               538 include/linux/bpf.h #define __BPF_PROG_RUN_ARRAY(array, ctx, func, check_non_null)	\
ctx               552 include/linux/bpf.h 			_ret &= func(_prog, ctx);	\
ctx               583 include/linux/bpf.h #define BPF_PROG_CGROUP_INET_EGRESS_RUN_ARRAY(array, ctx, func)		\
ctx               597 include/linux/bpf.h 			ret = func(_prog, ctx);		\
ctx               611 include/linux/bpf.h #define BPF_PROG_RUN_ARRAY(array, ctx, func)		\
ctx               612 include/linux/bpf.h 	__BPF_PROG_RUN_ARRAY(array, ctx, func, false)
ctx               614 include/linux/bpf.h #define BPF_PROG_RUN_ARRAY_CHECK(array, ctx, func)	\
ctx               615 include/linux/bpf.h 	__BPF_PROG_RUN_ARRAY(array, ctx, func, true)
ctx               287 include/linux/ccp.h 	struct scatterlist *ctx;
ctx               113 include/linux/dma-resv.h 				struct ww_acquire_ctx *ctx)
ctx               115 include/linux/dma-resv.h 	return ww_mutex_lock(&obj->lock, ctx);
ctx               134 include/linux/dma-resv.h 					      struct ww_acquire_ctx *ctx)
ctx               136 include/linux/dma-resv.h 	return ww_mutex_lock_interruptible(&obj->lock, ctx);
ctx               149 include/linux/dma-resv.h 				      struct ww_acquire_ctx *ctx)
ctx               151 include/linux/dma-resv.h 	ww_mutex_lock_slow(&obj->lock, ctx);
ctx               165 include/linux/dma-resv.h 						   struct ww_acquire_ctx *ctx)
ctx               167 include/linux/dma-resv.h 	return ww_mutex_lock_slow_interruptible(&obj->lock, ctx);
ctx               209 include/linux/dma-resv.h 	return READ_ONCE(obj->lock.ctx);
ctx                37 include/linux/eventfd.h void eventfd_ctx_put(struct eventfd_ctx *ctx);
ctx                41 include/linux/eventfd.h __u64 eventfd_signal(struct eventfd_ctx *ctx, __u64 n);
ctx                42 include/linux/eventfd.h int eventfd_ctx_remove_wait_queue(struct eventfd_ctx *ctx, wait_queue_entry_t *wait,
ctx                64 include/linux/eventfd.h static inline int eventfd_signal(struct eventfd_ctx *ctx, int n)
ctx                69 include/linux/eventfd.h static inline void eventfd_ctx_put(struct eventfd_ctx *ctx)
ctx                74 include/linux/eventfd.h static inline int eventfd_ctx_remove_wait_queue(struct eventfd_ctx *ctx,
ctx               539 include/linux/filter.h 	unsigned int		(*bpf_func)(const void *ctx,
ctx               556 include/linux/filter.h #define BPF_PROG_RUN(prog, ctx)	({				\
ctx               562 include/linux/filter.h 		ret = (*(prog)->bpf_func)(ctx, (prog)->insnsi);	\
ctx               569 include/linux/filter.h 		ret = (*(prog)->bpf_func)(ctx, (prog)->insnsi);	\
ctx               456 include/linux/firewire.h int fw_iso_context_set_channels(struct fw_iso_context *ctx, u64 *channels);
ctx               457 include/linux/firewire.h int fw_iso_context_queue(struct fw_iso_context *ctx,
ctx               461 include/linux/firewire.h void fw_iso_context_queue_flush(struct fw_iso_context *ctx);
ctx               462 include/linux/firewire.h int fw_iso_context_flush_completions(struct fw_iso_context *ctx);
ctx               463 include/linux/firewire.h int fw_iso_context_start(struct fw_iso_context *ctx,
ctx               465 include/linux/firewire.h int fw_iso_context_stop(struct fw_iso_context *ctx);
ctx               466 include/linux/firewire.h void fw_iso_context_destroy(struct fw_iso_context *ctx);
ctx              3546 include/linux/fs.h static inline bool dir_emit(struct dir_context *ctx,
ctx              3550 include/linux/fs.h 	return ctx->actor(ctx, name, namelen, ctx->pos, ino, type) == 0;
ctx              3552 include/linux/fs.h static inline bool dir_emit_dot(struct file *file, struct dir_context *ctx)
ctx              3554 include/linux/fs.h 	return ctx->actor(ctx, ".", 1, ctx->pos,
ctx              3557 include/linux/fs.h static inline bool dir_emit_dotdot(struct file *file, struct dir_context *ctx)
ctx              3559 include/linux/fs.h 	return ctx->actor(ctx, "..", 2, ctx->pos,
ctx              3562 include/linux/fs.h static inline bool dir_emit_dots(struct file *file, struct dir_context *ctx)
ctx              3564 include/linux/fs.h 	if (ctx->pos == 0) {
ctx              3565 include/linux/fs.h 		if (!dir_emit_dot(file, ctx))
ctx              3567 include/linux/fs.h 		ctx->pos = 1;
ctx              3569 include/linux/fs.h 	if (ctx->pos == 1) {
ctx              3570 include/linux/fs.h 		if (!dir_emit_dotdot(file, ctx))
ctx              3572 include/linux/fs.h 		ctx->pos = 2;
ctx               247 include/linux/fscrypt.h extern void fscrypt_enqueue_decrypt_bio(struct fscrypt_ctx *ctx,
ctx               303 include/linux/fscrypt.h static inline void fscrypt_release_ctx(struct fscrypt_ctx *ctx)
ctx               487 include/linux/fscrypt.h static inline void fscrypt_enqueue_decrypt_bio(struct fscrypt_ctx *ctx,
ctx                74 include/linux/fsl/bestcomm/bestcomm_priv.h 	u32				*ctx;
ctx               169 include/linux/if_team.h 	int (*getter)(struct team *team, struct team_gsetter_ctx *ctx);
ctx               170 include/linux/if_team.h 	int (*setter)(struct team *team, struct team_gsetter_ctx *ctx);
ctx              1619 include/linux/jbd2.h 		char ctx[JBD_MAX_CHECKSUM_SIZE];
ctx              1627 include/linux/jbd2.h 	*(u32 *)desc.ctx = crc;
ctx              1632 include/linux/jbd2.h 	return *(u32 *)desc.ctx;
ctx              1519 include/linux/lsm_hooks.h 					const struct qstr *name, void **ctx,
ctx              1698 include/linux/lsm_hooks.h 	int (*inode_notifysecctx)(struct inode *inode, void *ctx, u32 ctxlen);
ctx              1699 include/linux/lsm_hooks.h 	int (*inode_setsecctx)(struct dentry *dentry, void *ctx, u32 ctxlen);
ctx              1700 include/linux/lsm_hooks.h 	int (*inode_getsecctx)(struct inode *inode, void **ctx, u32 *ctxlen);
ctx              1775 include/linux/lsm_hooks.h 	void (*xfrm_policy_free_security)(struct xfrm_sec_ctx *ctx);
ctx              1776 include/linux/lsm_hooks.h 	int (*xfrm_policy_delete_security)(struct xfrm_sec_ctx *ctx);
ctx              1784 include/linux/lsm_hooks.h 	int (*xfrm_policy_lookup)(struct xfrm_sec_ctx *ctx, u32 fl_secid,
ctx                55 include/linux/mailbox/brcm-message.h 	void *ctx;
ctx               907 include/linux/mlx5/driver.h 	struct mlx5_async_ctx *ctx;
ctx               912 include/linux/mlx5/driver.h 			     struct mlx5_async_ctx *ctx);
ctx               913 include/linux/mlx5/driver.h void mlx5_cmd_cleanup_async_ctx(struct mlx5_async_ctx *ctx);
ctx               914 include/linux/mlx5/driver.h int mlx5_cmd_exec_cb(struct mlx5_async_ctx *ctx, void *in, int in_size,
ctx              5894 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_tisc_bits ctx;
ctx              5933 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_tirc_bits ctx;
ctx              5962 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_sqc_bits ctx;
ctx              6034 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_rqtc_bits ctx;
ctx              6069 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_rqc_bits ctx;
ctx              6105 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_rmpc_bits ctx;
ctx              7279 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_tisc_bits ctx;
ctx              7303 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_tirc_bits ctx;
ctx              7355 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_sqc_bits ctx;
ctx              7433 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_rqc_bits ctx;
ctx              7457 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_rmpc_bits ctx;
ctx              9764 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_lagc_bits ctx;
ctx              9786 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_lagc_bits ctx;
ctx              9795 include/linux/mlx5/mlx5_ifc.h 	struct mlx5_ifc_lagc_bits ctx;
ctx               279 include/linux/mm_types.h 	struct userfaultfd_ctx *ctx;
ctx               602 include/linux/mtd/rawnand.h 	} ctx;
ctx               630 include/linux/mtd/rawnand.h 		.ctx.cmd.opcode = id,					\
ctx               637 include/linux/mtd/rawnand.h 		.ctx.addr = {						\
ctx               647 include/linux/mtd/rawnand.h 		.ctx.data = {						\
ctx               658 include/linux/mtd/rawnand.h 		.ctx.data = {						\
ctx               669 include/linux/mtd/rawnand.h 		.ctx.data = {						\
ctx               680 include/linux/mtd/rawnand.h 		.ctx.data = {						\
ctx               691 include/linux/mtd/rawnand.h 		.ctx.waitrdy.timeout_ms = tout_ms,			\
ctx               758 include/linux/mtd/rawnand.h 	} ctx;
ctx               771 include/linux/mtd/rawnand.h 		.ctx.addr.maxcycles = _maxcycles,		\
ctx               778 include/linux/mtd/rawnand.h 		.ctx.data.maxlen = _maxlen,			\
ctx               785 include/linux/mtd/rawnand.h 		.ctx.data.maxlen = _maxlen,			\
ctx               884 include/linux/mtd/rawnand.h 			 instr->ctx.cmd.opcode);
ctx               888 include/linux/mtd/rawnand.h 			 instr->ctx.addr.naddrs,
ctx               889 include/linux/mtd/rawnand.h 			 instr->ctx.addr.naddrs < 64 ?
ctx               890 include/linux/mtd/rawnand.h 			 instr->ctx.addr.naddrs : 64,
ctx               891 include/linux/mtd/rawnand.h 			 instr->ctx.addr.addrs);
ctx               895 include/linux/mtd/rawnand.h 			 instr->ctx.data.len,
ctx               896 include/linux/mtd/rawnand.h 			 instr->ctx.data.force_8bit ?
ctx               901 include/linux/mtd/rawnand.h 			 instr->ctx.data.len,
ctx               902 include/linux/mtd/rawnand.h 			 instr->ctx.data.force_8bit ?
ctx               907 include/linux/mtd/rawnand.h 			 instr->ctx.waitrdy.timeout_ms);
ctx               196 include/linux/netlink.h 		u8		ctx[48];
ctx               393 include/linux/nfs_fs.h extern struct nfs_open_context *get_nfs_open_context(struct nfs_open_context *ctx);
ctx               394 include/linux/nfs_fs.h extern void put_nfs_open_context(struct nfs_open_context *ctx);
ctx               397 include/linux/nfs_fs.h extern void nfs_inode_attach_open_context(struct nfs_open_context *ctx);
ctx               398 include/linux/nfs_fs.h extern void nfs_file_set_open_context(struct file *filp, struct nfs_open_context *ctx);
ctx               400 include/linux/nfs_fs.h extern struct nfs_lock_context *nfs_get_lock_context(struct nfs_open_context *ctx);
ctx               469 include/linux/nfs_fs.h 		struct nfs_open_context *ctx =
ctx               471 include/linux/nfs_fs.h 		if (ctx)
ctx               472 include/linux/nfs_fs.h 			return ctx->cred;
ctx               116 include/linux/nfs_page.h extern	struct nfs_page *nfs_create_request(struct nfs_open_context *ctx,
ctx               255 include/linux/nfs_xdr.h 	struct nfs_open_context *ctx;
ctx              1700 include/linux/nfs_xdr.h 	void	(*close_context)(struct nfs_open_context *ctx, int);
ctx              1702 include/linux/nfs_xdr.h 				struct nfs_open_context *ctx,
ctx               194 include/linux/ntb.h 	void (*link_event)(void *ctx);
ctx               195 include/linux/ntb.h 	void (*db_event)(void *ctx, int db_vector);
ctx               196 include/linux/ntb.h 	void (*msg_event)(void *ctx);
ctx               422 include/linux/ntb.h 	void				*ctx;
ctx               502 include/linux/ntb.h int ntb_set_ctx(struct ntb_dev *ntb, void *ctx,
ctx              1643 include/linux/ntb.h int ntb_msi_init(struct ntb_dev *ntb, void (*desc_changed)(void *ctx));
ctx              1660 include/linux/ntb.h 			       void (*desc_changed)(void *ctx))
ctx               405 include/linux/perf_event.h 	void (*sched_task)		(struct perf_event_context *ctx,
ctx               645 include/linux/perf_event.h 	struct perf_event_context	*ctx;
ctx               806 include/linux/perf_event.h 	struct perf_event_context	ctx;
ctx               868 include/linux/perf_event.h perf_cgroup_from_task(struct task_struct *task, struct perf_event_context *ctx)
ctx               871 include/linux/perf_event.h 					   ctx ? lockdep_is_held(&ctx->lock)
ctx              1069 include/linux/perf_event.h 	return event->ctx->pmu->task_ctx_nr == perf_sw_context;
ctx              1202 include/linux/perf_event.h static inline int perf_callchain_store_context(struct perf_callchain_entry_ctx *ctx, u64 ip)
ctx              1204 include/linux/perf_event.h 	if (ctx->contexts < sysctl_perf_event_max_contexts_per_stack) {
ctx              1205 include/linux/perf_event.h 		struct perf_callchain_entry *entry = ctx->entry;
ctx              1207 include/linux/perf_event.h 		++ctx->contexts;
ctx              1210 include/linux/perf_event.h 		ctx->contexts_maxed = true;
ctx              1215 include/linux/perf_event.h static inline int perf_callchain_store(struct perf_callchain_entry_ctx *ctx, u64 ip)
ctx              1217 include/linux/perf_event.h 	if (ctx->nr < ctx->max_stack && !ctx->contexts_maxed) {
ctx              1218 include/linux/perf_event.h 		struct perf_callchain_entry *entry = ctx->entry;
ctx              1220 include/linux/perf_event.h 		++ctx->nr;
ctx               308 include/linux/security.h 					const struct qstr *name, void **ctx,
ctx               444 include/linux/security.h int security_inode_notifysecctx(struct inode *inode, void *ctx, u32 ctxlen);
ctx               445 include/linux/security.h int security_inode_setsecctx(struct dentry *dentry, void *ctx, u32 ctxlen);
ctx               446 include/linux/security.h int security_inode_getsecctx(struct inode *inode, void **ctx, u32 *ctxlen);
ctx               699 include/linux/security.h 						 void **ctx,
ctx              1259 include/linux/security.h static inline int security_inode_notifysecctx(struct inode *inode, void *ctx, u32 ctxlen)
ctx              1263 include/linux/security.h static inline int security_inode_setsecctx(struct dentry *dentry, void *ctx, u32 ctxlen)
ctx              1267 include/linux/security.h static inline int security_inode_getsecctx(struct inode *inode, void **ctx, u32 *ctxlen)
ctx              1577 include/linux/security.h void security_xfrm_policy_free(struct xfrm_sec_ctx *ctx);
ctx              1578 include/linux/security.h int security_xfrm_policy_delete(struct xfrm_sec_ctx *ctx);
ctx              1584 include/linux/security.h int security_xfrm_policy_lookup(struct xfrm_sec_ctx *ctx, u32 fl_secid, u8 dir);
ctx              1605 include/linux/security.h static inline void security_xfrm_policy_free(struct xfrm_sec_ctx *ctx)
ctx              1609 include/linux/security.h static inline int security_xfrm_policy_delete(struct xfrm_sec_ctx *ctx)
ctx              1635 include/linux/security.h static inline int security_xfrm_policy_lookup(struct xfrm_sec_ctx *ctx, u32 fl_secid, u8 dir)
ctx               466 include/linux/skbuff.h 			void *ctx;
ctx              1281 include/linux/skbuff.h bool bpf_flow_dissect(struct bpf_prog *prog, struct bpf_flow_dissector *ctx,
ctx               289 include/linux/syscalls.h asmlinkage long sys_io_setup(unsigned nr_reqs, aio_context_t __user *ctx);
ctx               290 include/linux/syscalls.h asmlinkage long sys_io_destroy(aio_context_t ctx);
ctx                96 include/linux/tee_drv.h 	int (*open)(struct tee_context *ctx);
ctx                97 include/linux/tee_drv.h 	void (*release)(struct tee_context *ctx);
ctx                98 include/linux/tee_drv.h 	int (*open_session)(struct tee_context *ctx,
ctx               101 include/linux/tee_drv.h 	int (*close_session)(struct tee_context *ctx, u32 session);
ctx               102 include/linux/tee_drv.h 	int (*invoke_func)(struct tee_context *ctx,
ctx               105 include/linux/tee_drv.h 	int (*cancel_req)(struct tee_context *ctx, u32 cancel_id, u32 session);
ctx               106 include/linux/tee_drv.h 	int (*supp_recv)(struct tee_context *ctx, u32 *func, u32 *num_params,
ctx               108 include/linux/tee_drv.h 	int (*supp_send)(struct tee_context *ctx, u32 ret, u32 num_params,
ctx               110 include/linux/tee_drv.h 	int (*shm_register)(struct tee_context *ctx, struct tee_shm *shm,
ctx               113 include/linux/tee_drv.h 	int (*shm_unregister)(struct tee_context *ctx, struct tee_shm *shm);
ctx               189 include/linux/tee_drv.h 	struct tee_context *ctx;
ctx               319 include/linux/tee_drv.h struct tee_shm *tee_shm_alloc(struct tee_context *ctx, size_t size, u32 flags);
ctx               342 include/linux/tee_drv.h struct tee_shm *tee_shm_register(struct tee_context *ctx, unsigned long addr,
ctx               454 include/linux/tee_drv.h struct tee_shm *tee_shm_get_from_id(struct tee_context *ctx, int id);
ctx               482 include/linux/tee_drv.h void tee_client_close_context(struct tee_context *ctx);
ctx               489 include/linux/tee_drv.h void tee_client_get_version(struct tee_context *ctx,
ctx               502 include/linux/tee_drv.h int tee_client_open_session(struct tee_context *ctx,
ctx               514 include/linux/tee_drv.h int tee_client_close_session(struct tee_context *ctx, u32 session);
ctx               525 include/linux/tee_drv.h int tee_client_invoke_func(struct tee_context *ctx,
ctx               538 include/linux/tee_drv.h int tee_client_cancel_req(struct tee_context *ctx,
ctx               273 include/linux/timekeeping.h 				void *ctx),
ctx               274 include/linux/timekeeping.h 			void *ctx,
ctx               477 include/linux/trace_events.h unsigned int trace_call_bpf(struct trace_event_call *call, void *ctx);
ctx               489 include/linux/trace_events.h static inline unsigned int trace_call_bpf(struct trace_event_call *call, void *ctx)
ctx                42 include/linux/uprobes.h 				enum uprobe_filter_ctx ctx,
ctx               137 include/linux/uprobes.h extern bool arch_uretprobe_is_alive(struct return_instance *ret, enum rp_check ctx, struct pt_regs *regs);
ctx               151 include/linux/usb/cdc_ncm.h int cdc_ncm_rx_verify_nth16(struct cdc_ncm_ctx *ctx, struct sk_buff *skb_in);
ctx                47 include/linux/userfaultfd_k.h 	return vma->vm_userfaultfd_ctx.ctx == vm_ctx.ctx;
ctx               115 include/linux/userfaultfd_k.h 					   struct vm_userfaultfd_ctx *ctx)
ctx               119 include/linux/userfaultfd_k.h static inline void mremap_userfaultfd_complete(struct vm_userfaultfd_ctx *ctx,
ctx                41 include/linux/verification.h 				  int (*view_content)(void *ctx,
ctx                44 include/linux/verification.h 				  void *ctx);
ctx                49 include/linux/verification.h 				    int (*view_content)(void *ctx,
ctx                53 include/linux/verification.h 				    void *ctx);
ctx                51 include/linux/virtio.h 			    void *ctx,
ctx                70 include/linux/virtio.h 			    void **ctx);
ctx                81 include/linux/virtio_config.h 			const char * const names[], const bool *ctx,
ctx               203 include/linux/virtio_config.h 			const char * const names[], const bool *ctx,
ctx               206 include/linux/virtio_config.h 	return vdev->config->find_vqs(vdev, nvqs, vqs, callbacks, names, ctx,
ctx                75 include/linux/virtio_ring.h 					 bool ctx,
ctx                85 include/linux/virtio_ring.h 					bool ctx,
ctx                99 include/linux/virtio_ring.h 				      bool ctx,
ctx                53 include/linux/ww_mutex.h 	struct ww_acquire_ctx *ctx;
ctx                99 include/linux/ww_mutex.h 	lock->ctx = NULL;
ctx               129 include/linux/ww_mutex.h static inline void ww_acquire_init(struct ww_acquire_ctx *ctx,
ctx               132 include/linux/ww_mutex.h 	ctx->task = current;
ctx               133 include/linux/ww_mutex.h 	ctx->stamp = atomic_long_inc_return_relaxed(&ww_class->stamp);
ctx               134 include/linux/ww_mutex.h 	ctx->acquired = 0;
ctx               135 include/linux/ww_mutex.h 	ctx->wounded = false;
ctx               136 include/linux/ww_mutex.h 	ctx->is_wait_die = ww_class->is_wait_die;
ctx               138 include/linux/ww_mutex.h 	ctx->ww_class = ww_class;
ctx               139 include/linux/ww_mutex.h 	ctx->done_acquire = 0;
ctx               140 include/linux/ww_mutex.h 	ctx->contending_lock = NULL;
ctx               143 include/linux/ww_mutex.h 	debug_check_no_locks_freed((void *)ctx, sizeof(*ctx));
ctx               144 include/linux/ww_mutex.h 	lockdep_init_map(&ctx->dep_map, ww_class->acquire_name,
ctx               146 include/linux/ww_mutex.h 	mutex_acquire(&ctx->dep_map, 0, 0, _RET_IP_);
ctx               149 include/linux/ww_mutex.h 	ctx->deadlock_inject_interval = 1;
ctx               150 include/linux/ww_mutex.h 	ctx->deadlock_inject_countdown = ctx->stamp & 0xf;
ctx               165 include/linux/ww_mutex.h static inline void ww_acquire_done(struct ww_acquire_ctx *ctx)
ctx               168 include/linux/ww_mutex.h 	lockdep_assert_held(ctx);
ctx               170 include/linux/ww_mutex.h 	DEBUG_LOCKS_WARN_ON(ctx->done_acquire);
ctx               171 include/linux/ww_mutex.h 	ctx->done_acquire = 1;
ctx               182 include/linux/ww_mutex.h static inline void ww_acquire_fini(struct ww_acquire_ctx *ctx)
ctx               185 include/linux/ww_mutex.h 	mutex_release(&ctx->dep_map, 0, _THIS_IP_);
ctx               187 include/linux/ww_mutex.h 	DEBUG_LOCKS_WARN_ON(ctx->acquired);
ctx               193 include/linux/ww_mutex.h 		ctx->done_acquire = 1;
ctx               197 include/linux/ww_mutex.h 		ctx->acquired = ~0U;
ctx               230 include/linux/ww_mutex.h extern int /* __must_check */ ww_mutex_lock(struct ww_mutex *lock, struct ww_acquire_ctx *ctx);
ctx               263 include/linux/ww_mutex.h 						    struct ww_acquire_ctx *ctx);
ctx               289 include/linux/ww_mutex.h ww_mutex_lock_slow(struct ww_mutex *lock, struct ww_acquire_ctx *ctx)
ctx               293 include/linux/ww_mutex.h 	DEBUG_LOCKS_WARN_ON(!ctx->contending_lock);
ctx               295 include/linux/ww_mutex.h 	ret = ww_mutex_lock(lock, ctx);
ctx               326 include/linux/ww_mutex.h 				 struct ww_acquire_ctx *ctx)
ctx               329 include/linux/ww_mutex.h 	DEBUG_LOCKS_WARN_ON(!ctx->contending_lock);
ctx               331 include/linux/ww_mutex.h 	return ww_mutex_lock_interruptible(lock, ctx);
ctx               256 include/linux/zstd.h size_t ZSTD_compressCCtx(ZSTD_CCtx *ctx, void *dst, size_t dstCapacity,
ctx               299 include/linux/zstd.h size_t ZSTD_decompressDCtx(ZSTD_DCtx *ctx, void *dst, size_t dstCapacity,
ctx               325 include/linux/zstd.h size_t ZSTD_compress_usingDict(ZSTD_CCtx *ctx, void *dst, size_t dstCapacity,
ctx               346 include/linux/zstd.h size_t ZSTD_decompress_usingDict(ZSTD_DCtx *ctx, void *dst, size_t dstCapacity,
ctx               117 include/media/dvb_vb2.h static inline int dvb_vb2_init(struct dvb_vb2_ctx *ctx,
ctx               122 include/media/dvb_vb2.h static inline int dvb_vb2_release(struct dvb_vb2_ctx *ctx)
ctx               126 include/media/dvb_vb2.h #define dvb_vb2_is_streaming(ctx) (0)
ctx               127 include/media/dvb_vb2.h #define dvb_vb2_fill_buffer(ctx, file, wait, flags) (0)
ctx               129 include/media/dvb_vb2.h static inline __poll_t dvb_vb2_poll(struct dvb_vb2_ctx *ctx,
ctx               144 include/media/dvb_vb2.h int dvb_vb2_init(struct dvb_vb2_ctx *ctx, const char *name, int non_blocking);
ctx               151 include/media/dvb_vb2.h int dvb_vb2_release(struct dvb_vb2_ctx *ctx);
ctx               159 include/media/dvb_vb2.h int dvb_vb2_is_streaming(struct dvb_vb2_ctx *ctx);
ctx               170 include/media/dvb_vb2.h int dvb_vb2_fill_buffer(struct dvb_vb2_ctx *ctx,
ctx               186 include/media/dvb_vb2.h __poll_t dvb_vb2_poll(struct dvb_vb2_ctx *ctx, struct file *file,
ctx               198 include/media/dvb_vb2.h int dvb_vb2_stream_on(struct dvb_vb2_ctx *ctx);
ctx               207 include/media/dvb_vb2.h int dvb_vb2_stream_off(struct dvb_vb2_ctx *ctx);
ctx               220 include/media/dvb_vb2.h int dvb_vb2_reqbufs(struct dvb_vb2_ctx *ctx, struct dmx_requestbuffers *req);
ctx               232 include/media/dvb_vb2.h int dvb_vb2_querybuf(struct dvb_vb2_ctx *ctx, struct dmx_buffer *b);
ctx               244 include/media/dvb_vb2.h int dvb_vb2_expbuf(struct dvb_vb2_ctx *ctx, struct dmx_exportbuffer *exp);
ctx               255 include/media/dvb_vb2.h int dvb_vb2_qbuf(struct dvb_vb2_ctx *ctx, struct dmx_buffer *b);
ctx               267 include/media/dvb_vb2.h int dvb_vb2_dqbuf(struct dvb_vb2_ctx *ctx, struct dmx_buffer *b);
ctx               278 include/media/dvb_vb2.h int dvb_vb2_mmap(struct dvb_vb2_ctx *ctx, struct vm_area_struct *vma);
ctx                82 include/misc/cxl.h int cxl_release_context(struct cxl_context *ctx);
ctx                88 include/misc/cxl.h int cxl_set_priv(struct cxl_context *ctx, void *priv);
ctx                89 include/misc/cxl.h void *cxl_get_priv(struct cxl_context *ctx);
ctx               115 include/misc/cxl.h int cxl_start_context(struct cxl_context *ctx, u64 wed,
ctx               120 include/misc/cxl.h int cxl_stop_context(struct cxl_context *ctx);
ctx               123 include/misc/cxl.h int cxl_afu_reset(struct cxl_context *ctx);
ctx               130 include/misc/cxl.h void cxl_set_master(struct cxl_context *ctx);
ctx               136 include/misc/cxl.h void __iomem *cxl_psa_map(struct cxl_context *ctx);
ctx               140 include/misc/cxl.h int cxl_process_element(struct cxl_context *ctx);
ctx               181 include/misc/cxl.h struct file *cxl_get_fd(struct cxl_context *ctx, struct file_operations *fops,
ctx               189 include/misc/cxl.h int cxl_start_work(struct cxl_context *ctx,
ctx               248 include/misc/cxl.h 						struct cxl_context *ctx);
ctx               249 include/misc/cxl.h 	void (*event_delivered) (struct cxl_context *ctx,
ctx               258 include/misc/cxl.h void cxl_set_driver_ops(struct cxl_context *ctx,
ctx               262 include/misc/cxl.h void cxl_context_events_pending(struct cxl_context *ctx,
ctx               146 include/misc/ocxl.h void ocxl_context_free(struct ocxl_context *ctx);
ctx               156 include/misc/ocxl.h int ocxl_context_attach(struct ocxl_context *ctx, u64 amr,
ctx               165 include/misc/ocxl.h int ocxl_context_detach(struct ocxl_context *ctx);
ctx               176 include/misc/ocxl.h extern int ocxl_afu_irq_alloc(struct ocxl_context *ctx, int *irq_id);
ctx               185 include/misc/ocxl.h extern int ocxl_afu_irq_free(struct ocxl_context *ctx, int irq_id);
ctx               196 include/misc/ocxl.h extern u64 ocxl_afu_irq_get_addr(struct ocxl_context *ctx, int irq_id);
ctx               208 include/misc/ocxl.h int ocxl_irq_set_handler(struct ocxl_context *ctx, int irq_id,
ctx               124 include/net/6lowpan.h static inline bool lowpan_iphc_ctx_is_active(const struct lowpan_iphc_ctx *ctx)
ctx               126 include/net/6lowpan.h 	return test_bit(LOWPAN_IPHC_CTX_FLAG_ACTIVE, &ctx->flags);
ctx               130 include/net/6lowpan.h lowpan_iphc_ctx_is_compression(const struct lowpan_iphc_ctx *ctx)
ctx               132 include/net/6lowpan.h 	return test_bit(LOWPAN_IPHC_CTX_FLAG_COMPRESSION, &ctx->flags);
ctx               138 include/net/6lowpan.h 	struct lowpan_iphc_ctx_table ctx;
ctx               160 include/net/codel.h typedef void (*codel_skb_drop_t)(struct sk_buff *skb, void *ctx);
ctx               101 include/net/codel_impl.h 			      void *ctx,
ctx               142 include/net/codel_impl.h static struct sk_buff *codel_dequeue(void *ctx,
ctx               152 include/net/codel_impl.h 	struct sk_buff *skb = dequeue_func(vars, ctx);
ctx               161 include/net/codel_impl.h 	drop = codel_should_drop(skb, ctx, vars, params, stats,
ctx               191 include/net/codel_impl.h 				drop_func(skb, ctx);
ctx               193 include/net/codel_impl.h 				skb = dequeue_func(vars, ctx);
ctx               194 include/net/codel_impl.h 				if (!codel_should_drop(skb, ctx,
ctx               217 include/net/codel_impl.h 			drop_func(skb, ctx);
ctx               220 include/net/codel_impl.h 			skb = dequeue_func(vars, ctx);
ctx               221 include/net/codel_impl.h 			drop = codel_should_drop(skb, ctx, vars, params,
ctx               378 include/net/devlink.h 		   struct devlink_param_gset_ctx *ctx);
ctx               380 include/net/devlink.h 		   struct devlink_param_gset_ctx *ctx);
ctx               182 include/net/flow_offload.h 			u32		ctx;
ctx              3969 include/net/mac80211.h 			   struct ieee80211_chanctx_conf *ctx);
ctx              3971 include/net/mac80211.h 			       struct ieee80211_chanctx_conf *ctx);
ctx              3973 include/net/mac80211.h 			       struct ieee80211_chanctx_conf *ctx,
ctx              3977 include/net/mac80211.h 				  struct ieee80211_chanctx_conf *ctx);
ctx              3980 include/net/mac80211.h 				     struct ieee80211_chanctx_conf *ctx);
ctx                13 include/net/netfilter/nf_dup_netdev.h int nft_fwd_dup_netdev_offload(struct nft_offload_ctx *ctx,
ctx               187 include/net/netfilter/nf_tables.h int nft_data_init(const struct nft_ctx *ctx,
ctx               210 include/net/netfilter/nf_tables.h int nft_validate_register_store(const struct nft_ctx *ctx,
ctx               250 include/net/netfilter/nf_tables.h 	int		(*fn)(const struct nft_ctx *ctx,
ctx               353 include/net/netfilter/nf_tables.h 	void				(*walk)(const struct nft_ctx *ctx,
ctx               494 include/net/netfilter/nf_tables.h void nf_tables_deactivate_set(const struct nft_ctx *ctx, struct nft_set *set,
ctx               497 include/net/netfilter/nf_tables.h int nf_tables_bind_set(const struct nft_ctx *ctx, struct nft_set *set,
ctx               499 include/net/netfilter/nf_tables.h void nf_tables_destroy_set(const struct nft_ctx *ctx, struct nft_set *set);
ctx               784 include/net/netfilter/nf_tables.h 	int				(*init)(const struct nft_ctx *ctx,
ctx               787 include/net/netfilter/nf_tables.h 	void				(*activate)(const struct nft_ctx *ctx,
ctx               789 include/net/netfilter/nf_tables.h 	void				(*deactivate)(const struct nft_ctx *ctx,
ctx               792 include/net/netfilter/nf_tables.h 	void				(*destroy)(const struct nft_ctx *ctx,
ctx               794 include/net/netfilter/nf_tables.h 	void				(*destroy_clone)(const struct nft_ctx *ctx,
ctx               798 include/net/netfilter/nf_tables.h 	int				(*validate)(const struct nft_ctx *ctx,
ctx               803 include/net/netfilter/nf_tables.h 	int				(*offload)(struct nft_offload_ctx *ctx,
ctx               832 include/net/netfilter/nf_tables.h struct nft_expr *nft_expr_init(const struct nft_ctx *ctx,
ctx               834 include/net/netfilter/nf_tables.h void nft_expr_destroy(const struct nft_ctx *ctx, struct nft_expr *expr);
ctx               924 include/net/netfilter/nf_tables.h int nft_chain_validate(const struct nft_ctx *ctx, const struct nft_chain *chain);
ctx               999 include/net/netfilter/nf_tables.h int __nft_release_basechain(struct nft_ctx *ctx);
ctx              1134 include/net/netfilter/nf_tables.h 	int				(*init)(const struct nft_ctx *ctx,
ctx              1137 include/net/netfilter/nf_tables.h 	void				(*destroy)(const struct nft_ctx *ctx,
ctx              1187 include/net/netfilter/nf_tables.h void nf_tables_deactivate_flowtable(const struct nft_ctx *ctx,
ctx              1363 include/net/netfilter/nf_tables.h 	struct nft_ctx			ctx;
ctx                33 include/net/netfilter/nf_tables_offload.h void nft_offload_set_dependency(struct nft_offload_ctx *ctx,
ctx                35 include/net/netfilter/nf_tables_offload.h void nft_offload_update_dependency(struct nft_offload_ctx *ctx,
ctx                22 include/net/netfilter/nft_fib.h int nft_fib_init(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx                24 include/net/netfilter/nft_fib.h int nft_fib_validate(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx                17 include/net/netfilter/nft_meta.h int nft_meta_get_init(const struct nft_ctx *ctx,
ctx                21 include/net/netfilter/nft_meta.h int nft_meta_set_init(const struct nft_ctx *ctx,
ctx                39 include/net/netfilter/nft_meta.h void nft_meta_set_destroy(const struct nft_ctx *ctx,
ctx                42 include/net/netfilter/nft_meta.h int nft_meta_set_validate(const struct nft_ctx *ctx,
ctx                17 include/net/netfilter/nft_reject.h int nft_reject_validate(const struct nft_ctx *ctx,
ctx                21 include/net/netfilter/nft_reject.h int nft_reject_init(const struct nft_ctx *ctx,
ctx               156 include/net/request_sock.h 	struct tcp_fastopen_context __rcu *ctx; /* cipher context for cookie */
ctx              1684 include/net/tcp.h 	struct tcp_fastopen_context *ctx;
ctx              1686 include/net/tcp.h 	ctx = rcu_dereference(inet_csk(sk)->icsk_accept_queue.fastopenq.ctx);
ctx              1687 include/net/tcp.h 	if (!ctx)
ctx              1688 include/net/tcp.h 		ctx = rcu_dereference(sock_net(sk)->ipv4.tcp_fastopen_ctx);
ctx              1689 include/net/tcp.h 	return ctx;
ctx              1704 include/net/tcp.h int tcp_fastopen_context_len(const struct tcp_fastopen_context *ctx)
ctx              1706 include/net/tcp.h 	return ctx->num;
ctx               306 include/net/tls.h 			    struct tls_context *ctx,
ctx               351 include/net/tls.h void tls_ctx_free(struct sock *sk, struct tls_context *ctx);
ctx               358 include/net/tls.h int tls_set_sw_offload(struct sock *sk, struct tls_context *ctx, int tx);
ctx               359 include/net/tls.h void tls_sw_strparser_arm(struct sock *sk, struct tls_context *ctx);
ctx               397 include/net/tls.h int tls_push_sg(struct sock *sk, struct tls_context *ctx,
ctx               400 include/net/tls.h int tls_push_partial_record(struct sock *sk, struct tls_context *ctx,
ctx               402 include/net/tls.h void tls_free_partial_record(struct sock *sk, struct tls_context *ctx);
ctx               409 include/net/tls.h static inline bool tls_is_partially_sent_record(struct tls_context *ctx)
ctx               411 include/net/tls.h 	return !!ctx->partially_sent_record;
ctx               419 include/net/tls.h static inline bool is_tx_ready(struct tls_sw_context_tx *ctx)
ctx               423 include/net/tls.h 	rec = list_first_entry(&ctx->tx_list, struct tls_rec, list);
ctx               430 include/net/tls.h static inline u16 tls_user_config(struct tls_context *ctx, bool tx)
ctx               432 include/net/tls.h 	u16 config = tx ? ctx->tx_conf : ctx->rx_conf;
ctx               493 include/net/tls.h 					 struct cipher_context *ctx)
ctx               495 include/net/tls.h 	if (tls_bigint_increment(ctx->rec_seq, prot->rec_seq_size))
ctx               499 include/net/tls.h 		tls_bigint_increment(ctx->iv + TLS_CIPHER_AES_GCM_128_SALT_SIZE,
ctx               503 include/net/tls.h static inline void tls_fill_prepend(struct tls_context *ctx,
ctx               509 include/net/tls.h 	struct tls_prot_info *prot = &ctx->prot_info;
ctx               517 include/net/tls.h 		       ctx->tx.iv + TLS_CIPHER_AES_GCM_128_SALT_SIZE, iv_size);
ctx               586 include/net/tls.h 	struct tls_context *ctx = tls_get_ctx(sk);
ctx               588 include/net/tls.h 	if (!ctx)
ctx               590 include/net/tls.h 	return !!tls_sw_ctx_tx(ctx);
ctx               593 include/net/tls.h void tls_sw_write_space(struct sock *sk, struct tls_context *ctx);
ctx               594 include/net/tls.h void tls_device_write_space(struct sock *sk, struct tls_context *ctx);
ctx               673 include/net/tls.h int tls_set_device_offload(struct sock *sk, struct tls_context *ctx);
ctx               675 include/net/tls.h int tls_set_device_offload_rx(struct sock *sk, struct tls_context *ctx);
ctx               684 include/net/tls.h tls_set_device_offload(struct sock *sk, struct tls_context *ctx)
ctx               692 include/net/tls.h tls_set_device_offload_rx(struct sock *sk, struct tls_context *ctx)
ctx              1641 include/net/xfrm.h 					  struct xfrm_sec_ctx *ctx, int delete,
ctx                45 include/rdma/rw.h int rdma_rw_ctx_init(struct rdma_rw_ctx *ctx, struct ib_qp *qp, u8 port_num,
ctx                48 include/rdma/rw.h void rdma_rw_ctx_destroy(struct rdma_rw_ctx *ctx, struct ib_qp *qp, u8 port_num,
ctx                52 include/rdma/rw.h int rdma_rw_ctx_signature_init(struct rdma_rw_ctx *ctx, struct ib_qp *qp,
ctx                57 include/rdma/rw.h void rdma_rw_ctx_destroy_signature(struct rdma_rw_ctx *ctx, struct ib_qp *qp,
ctx                62 include/rdma/rw.h struct ib_send_wr *rdma_rw_ctx_wrs(struct rdma_rw_ctx *ctx, struct ib_qp *qp,
ctx                64 include/rdma/rw.h int rdma_rw_ctx_post(struct rdma_rw_ctx *ctx, struct ib_qp *qp, u8 port_num,
ctx                40 include/soc/fsl/dpaa2-global.h 			__le64 ctx;
ctx                84 include/soc/fsl/dpaa2-io.h 	void (*cb)(struct dpaa2_io_notification_ctx *ctx);
ctx                97 include/soc/fsl/dpaa2-io.h 			      struct dpaa2_io_notification_ctx *ctx,
ctx               100 include/soc/fsl/dpaa2-io.h 				 struct dpaa2_io_notification_ctx *ctx,
ctx               103 include/soc/fsl/dpaa2-io.h 			   struct dpaa2_io_notification_ctx *ctx);
ctx                30 include/sound/soc-acpi.h 				    struct snd_soc_acpi_package_context *ctx);
ctx                45 include/sound/soc-acpi.h 				   struct snd_soc_acpi_package_context *ctx)
ctx                39 include/trace/events/filelock.h 	TP_PROTO(struct inode *inode, int type, struct file_lock_context *ctx),
ctx                41 include/trace/events/filelock.h 	TP_ARGS(inode, type, ctx),
ctx                47 include/trace/events/filelock.h 		__field(struct file_lock_context *, ctx)
ctx                54 include/trace/events/filelock.h 		__entry->ctx = ctx;
ctx                59 include/trace/events/filelock.h 		  __entry->i_ino, show_fl_type(__entry->type), __entry->ctx)
ctx               110 include/uapi/drm/lima_drm.h 	__u32 ctx;         /* in, context handle task is submitted to */
ctx                23 include/video/imx-ipu-image-convert.h 	struct ipu_image_convert_ctx *ctx;
ctx                41 include/video/imx-ipu-image-convert.h 				       void *ctx);
ctx               119 include/video/imx-ipu-image-convert.h void ipu_image_convert_unprepare(struct ipu_image_convert_ctx *ctx);
ctx               150 include/video/imx-ipu-image-convert.h void ipu_image_convert_abort(struct ipu_image_convert_ctx *ctx);
ctx               366 ipc/mqueue.c   	struct mqueue_fs_context *ctx = fc->fs_private;
ctx               368 ipc/mqueue.c   	return get_tree_keyed(fc, mqueue_fill_super, ctx->ipc_ns);
ctx               373 ipc/mqueue.c   	struct mqueue_fs_context *ctx = fc->fs_private;
ctx               375 ipc/mqueue.c   	put_ipc_ns(ctx->ipc_ns);
ctx               376 ipc/mqueue.c   	kfree(ctx);
ctx               381 ipc/mqueue.c   	struct mqueue_fs_context *ctx;
ctx               383 ipc/mqueue.c   	ctx = kzalloc(sizeof(struct mqueue_fs_context), GFP_KERNEL);
ctx               384 ipc/mqueue.c   	if (!ctx)
ctx               387 ipc/mqueue.c   	ctx->ipc_ns = get_ipc_ns(current->nsproxy->ipc_ns);
ctx               389 ipc/mqueue.c   	fc->user_ns = get_user_ns(ctx->ipc_ns->user_ns);
ctx               390 ipc/mqueue.c   	fc->fs_private = ctx;
ctx               397 ipc/mqueue.c   	struct mqueue_fs_context *ctx;
ctx               405 ipc/mqueue.c   	ctx = fc->fs_private;
ctx               406 ipc/mqueue.c   	put_ipc_ns(ctx->ipc_ns);
ctx               407 ipc/mqueue.c   	ctx->ipc_ns = get_ipc_ns(ns);
ctx               409 ipc/mqueue.c   	fc->user_ns = get_user_ns(ctx->ipc_ns->user_ns);
ctx               195 kernel/audit.c 	struct audit_context *ctx;	/* NULL or associated context */
ctx              1180 kernel/audit.c 	char			*ctx = NULL;
ctx              1425 kernel/audit.c 			err = security_secid_to_secctx(audit_sig_sid, &ctx, &len);
ctx              1432 kernel/audit.c 				security_release_secctx(ctx, len);
ctx              1438 kernel/audit.c 			memcpy(sig_data->ctx, ctx, len);
ctx              1439 kernel/audit.c 			security_release_secctx(ctx, len);
ctx              1676 kernel/audit.c static struct audit_buffer *audit_buffer_alloc(struct audit_context *ctx,
ctx              1691 kernel/audit.c 	ab->ctx = ctx;
ctx              1725 kernel/audit.c static inline void audit_get_stamp(struct audit_context *ctx,
ctx              1728 kernel/audit.c 	if (!ctx || !auditsc_get_stamp(ctx, t, serial)) {
ctx              1749 kernel/audit.c struct audit_buffer *audit_log_start(struct audit_context *ctx, gfp_t gfp_mask,
ctx              1798 kernel/audit.c 	ab = audit_buffer_alloc(ctx, gfp_mask, type);
ctx              1804 kernel/audit.c 	audit_get_stamp(ab->ctx, &t, &serial);
ctx              2064 kernel/audit.c 	char *ctx = NULL;
ctx              2073 kernel/audit.c 	error = security_secid_to_secctx(sid, &ctx, &len);
ctx              2080 kernel/audit.c 	audit_log_format(ab, " subj=%s", ctx);
ctx              2081 kernel/audit.c 	security_release_secctx(ctx, len);
ctx              2342 kernel/audit.c void audit_log(struct audit_context *ctx, gfp_t gfp_mask, int type,
ctx              2348 kernel/audit.c 	ab = audit_log_start(ctx, gfp_mask, type);
ctx               252 kernel/audit.h extern int auditsc_get_stamp(struct audit_context *ctx,
ctx               291 kernel/audit.h 				struct audit_context *ctx);
ctx               133 kernel/auditsc.c static int audit_match_perm(struct audit_context *ctx, int mask)
ctx               136 kernel/auditsc.c 	if (unlikely(!ctx))
ctx               138 kernel/auditsc.c 	n = ctx->major;
ctx               140 kernel/auditsc.c 	switch (audit_classify_syscall(ctx->arch, n)) {
ctx               164 kernel/auditsc.c 		return mask & ACC_MODE(ctx->argv[1]);
ctx               166 kernel/auditsc.c 		return mask & ACC_MODE(ctx->argv[2]);
ctx               168 kernel/auditsc.c 		return ((mask & AUDIT_PERM_WRITE) && ctx->argv[0] == SYS_BIND);
ctx               176 kernel/auditsc.c static int audit_match_filetype(struct audit_context *ctx, int val)
ctx               181 kernel/auditsc.c 	if (unlikely(!ctx))
ctx               184 kernel/auditsc.c 	list_for_each_entry(n, &ctx->names_list, list) {
ctx               203 kernel/auditsc.c static void audit_set_auditable(struct audit_context *ctx)
ctx               205 kernel/auditsc.c 	if (!ctx->prio) {
ctx               206 kernel/auditsc.c 		ctx->prio = 1;
ctx               207 kernel/auditsc.c 		ctx->current_state = AUDIT_RECORD_CONTEXT;
ctx               211 kernel/auditsc.c static int put_tree_ref(struct audit_context *ctx, struct audit_chunk *chunk)
ctx               213 kernel/auditsc.c 	struct audit_tree_refs *p = ctx->trees;
ctx               214 kernel/auditsc.c 	int left = ctx->tree_count;
ctx               217 kernel/auditsc.c 		ctx->tree_count = left;
ctx               225 kernel/auditsc.c 		ctx->trees = p;
ctx               226 kernel/auditsc.c 		ctx->tree_count = 30;
ctx               232 kernel/auditsc.c static int grow_tree_refs(struct audit_context *ctx)
ctx               234 kernel/auditsc.c 	struct audit_tree_refs *p = ctx->trees;
ctx               235 kernel/auditsc.c 	ctx->trees = kzalloc(sizeof(struct audit_tree_refs), GFP_KERNEL);
ctx               236 kernel/auditsc.c 	if (!ctx->trees) {
ctx               237 kernel/auditsc.c 		ctx->trees = p;
ctx               241 kernel/auditsc.c 		p->next = ctx->trees;
ctx               243 kernel/auditsc.c 		ctx->first_trees = ctx->trees;
ctx               244 kernel/auditsc.c 	ctx->tree_count = 31;
ctx               248 kernel/auditsc.c static void unroll_tree_refs(struct audit_context *ctx,
ctx               255 kernel/auditsc.c 		p = ctx->first_trees;
ctx               262 kernel/auditsc.c 	for (q = p; q != ctx->trees; q = q->next, n = 31) {
ctx               268 kernel/auditsc.c 	while (n-- > ctx->tree_count) {
ctx               272 kernel/auditsc.c 	ctx->trees = p;
ctx               273 kernel/auditsc.c 	ctx->tree_count = count;
ctx               276 kernel/auditsc.c static void free_tree_refs(struct audit_context *ctx)
ctx               279 kernel/auditsc.c 	for (p = ctx->first_trees; p; p = q) {
ctx               285 kernel/auditsc.c static int match_tree_refs(struct audit_context *ctx, struct audit_tree *tree)
ctx               292 kernel/auditsc.c 	for (p = ctx->first_trees; p != ctx->trees; p = p->next) {
ctx               299 kernel/auditsc.c 		for (n = ctx->tree_count; n < 31; n++)
ctx               309 kernel/auditsc.c 			     struct audit_context *ctx)
ctx               320 kernel/auditsc.c 	if (ctx) {
ctx               321 kernel/auditsc.c 		list_for_each_entry(n, &ctx->names_list, list) {
ctx               333 kernel/auditsc.c 			     struct audit_context *ctx)
ctx               344 kernel/auditsc.c 	if (ctx) {
ctx               345 kernel/auditsc.c 		list_for_each_entry(n, &ctx->names_list, list) {
ctx               357 kernel/auditsc.c 			       struct audit_context *ctx,
ctx               363 kernel/auditsc.c 		return audit_compare_uid(cred->uid, name, f, ctx);
ctx               365 kernel/auditsc.c 		return audit_compare_gid(cred->gid, name, f, ctx);
ctx               367 kernel/auditsc.c 		return audit_compare_uid(cred->euid, name, f, ctx);
ctx               369 kernel/auditsc.c 		return audit_compare_gid(cred->egid, name, f, ctx);
ctx               371 kernel/auditsc.c 		return audit_compare_uid(audit_get_loginuid(tsk), name, f, ctx);
ctx               373 kernel/auditsc.c 		return audit_compare_uid(cred->suid, name, f, ctx);
ctx               375 kernel/auditsc.c 		return audit_compare_gid(cred->sgid, name, f, ctx);
ctx               377 kernel/auditsc.c 		return audit_compare_uid(cred->fsuid, name, f, ctx);
ctx               379 kernel/auditsc.c 		return audit_compare_gid(cred->fsgid, name, f, ctx);
ctx               440 kernel/auditsc.c 			      struct audit_context *ctx,
ctx               464 kernel/auditsc.c 			if (ctx) {
ctx               465 kernel/auditsc.c 				if (!ctx->ppid)
ctx               466 kernel/auditsc.c 					ctx->ppid = task_ppid_nr(tsk);
ctx               467 kernel/auditsc.c 				result = audit_comparator(ctx->ppid, f->op, f->val);
ctx               521 kernel/auditsc.c 			if (ctx)
ctx               522 kernel/auditsc.c 				result = audit_comparator(ctx->arch, f->op, f->val);
ctx               526 kernel/auditsc.c 			if (ctx && ctx->return_valid)
ctx               527 kernel/auditsc.c 				result = audit_comparator(ctx->return_code, f->op, f->val);
ctx               530 kernel/auditsc.c 			if (ctx && ctx->return_valid) {
ctx               532 kernel/auditsc.c 					result = audit_comparator(ctx->return_valid, f->op, AUDITSC_SUCCESS);
ctx               534 kernel/auditsc.c 					result = audit_comparator(ctx->return_valid, f->op, AUDITSC_FAILURE);
ctx               542 kernel/auditsc.c 			} else if (ctx) {
ctx               543 kernel/auditsc.c 				list_for_each_entry(n, &ctx->names_list, list) {
ctx               557 kernel/auditsc.c 			} else if (ctx) {
ctx               558 kernel/auditsc.c 				list_for_each_entry(n, &ctx->names_list, list) {
ctx               570 kernel/auditsc.c 			else if (ctx) {
ctx               571 kernel/auditsc.c 				list_for_each_entry(n, &ctx->names_list, list) {
ctx               582 kernel/auditsc.c 			} else if (ctx) {
ctx               583 kernel/auditsc.c 				list_for_each_entry(n, &ctx->names_list, list) {
ctx               594 kernel/auditsc.c 			} else if (ctx) {
ctx               595 kernel/auditsc.c 				list_for_each_entry(n, &ctx->names_list, list) {
ctx               613 kernel/auditsc.c 			if (ctx) {
ctx               614 kernel/auditsc.c 				result = match_tree_refs(ctx, rule->tree);
ctx               627 kernel/auditsc.c 			if (ctx->sockaddr)
ctx               628 kernel/auditsc.c 				result = audit_comparator(ctx->sockaddr->ss_family,
ctx               666 kernel/auditsc.c 				} else if (ctx) {
ctx               667 kernel/auditsc.c 					list_for_each_entry(n, &ctx->names_list, list) {
ctx               679 kernel/auditsc.c 				if (!ctx || ctx->type != AUDIT_IPC)
ctx               681 kernel/auditsc.c 				if (security_audit_rule_match(ctx->ipc.osid,
ctx               691 kernel/auditsc.c 			if (ctx)
ctx               692 kernel/auditsc.c 				result = audit_comparator(ctx->argv[f->type-AUDIT_ARG0], f->op, f->val);
ctx               699 kernel/auditsc.c 			result = audit_match_perm(ctx, f->val);
ctx               704 kernel/auditsc.c 			result = audit_match_filetype(ctx, f->val);
ctx               709 kernel/auditsc.c 			result = audit_field_compare(tsk, cred, f, ctx, name);
ctx               716 kernel/auditsc.c 	if (ctx) {
ctx               717 kernel/auditsc.c 		if (rule->prio <= ctx->prio)
ctx               720 kernel/auditsc.c 			kfree(ctx->filterkey);
ctx               721 kernel/auditsc.c 			ctx->filterkey = kstrdup(rule->filterkey, GFP_ATOMIC);
ctx               723 kernel/auditsc.c 		ctx->prio = rule->prio;
ctx               781 kernel/auditsc.c 					     struct audit_context *ctx,
ctx               792 kernel/auditsc.c 		if (audit_in_mask(&e->rule, ctx->major) &&
ctx               793 kernel/auditsc.c 		    audit_filter_rules(tsk, &e->rule, ctx, NULL,
ctx               796 kernel/auditsc.c 			ctx->current_state = state;
ctx               810 kernel/auditsc.c 				   struct audit_context *ctx) {
ctx               817 kernel/auditsc.c 		if (audit_in_mask(&e->rule, ctx->major) &&
ctx               818 kernel/auditsc.c 		    audit_filter_rules(tsk, &e->rule, ctx, n, &state, false)) {
ctx               819 kernel/auditsc.c 			ctx->current_state = state;
ctx               831 kernel/auditsc.c void audit_filter_inodes(struct task_struct *tsk, struct audit_context *ctx)
ctx               840 kernel/auditsc.c 	list_for_each_entry(n, &ctx->names_list, list) {
ctx               841 kernel/auditsc.c 		if (audit_filter_inode_name(tsk, n, ctx))
ctx               960 kernel/auditsc.c 	char *ctx = NULL;
ctx               972 kernel/auditsc.c 		if (security_secid_to_secctx(sid, &ctx, &len)) {
ctx               976 kernel/auditsc.c 			audit_log_format(ab, " obj=%s", ctx);
ctx               977 kernel/auditsc.c 			security_release_secctx(ctx, len);
ctx              1213 kernel/auditsc.c 			char *ctx = NULL;
ctx              1215 kernel/auditsc.c 			if (security_secid_to_secctx(osid, &ctx, &len)) {
ctx              1219 kernel/auditsc.c 				audit_log_format(ab, " obj=%s", ctx);
ctx              1220 kernel/auditsc.c 				security_release_secctx(ctx, len);
ctx              1363 kernel/auditsc.c 		char *ctx = NULL;
ctx              1367 kernel/auditsc.c 			n->osid, &ctx, &len)) {
ctx              1372 kernel/auditsc.c 			audit_log_format(ab, " obj=%s", ctx);
ctx              1373 kernel/auditsc.c 			security_release_secctx(ctx, len);
ctx              2169 kernel/auditsc.c int auditsc_get_stamp(struct audit_context *ctx,
ctx              2172 kernel/auditsc.c 	if (!ctx->in_syscall)
ctx              2174 kernel/auditsc.c 	if (!ctx->serial)
ctx              2175 kernel/auditsc.c 		ctx->serial = audit_serial();
ctx              2176 kernel/auditsc.c 	t->tv_sec  = ctx->ctime.tv_sec;
ctx              2177 kernel/auditsc.c 	t->tv_nsec = ctx->ctime.tv_nsec;
ctx              2178 kernel/auditsc.c 	*serial    = ctx->serial;
ctx              2179 kernel/auditsc.c 	if (!ctx->prio) {
ctx              2180 kernel/auditsc.c 		ctx->prio = 1;
ctx              2181 kernel/auditsc.c 		ctx->current_state = AUDIT_RECORD_CONTEXT;
ctx              2389 kernel/auditsc.c 	struct audit_context *ctx = audit_context();
ctx              2397 kernel/auditsc.c 	if (!ctx->target_pid) {
ctx              2398 kernel/auditsc.c 		ctx->target_pid = task_tgid_nr(t);
ctx              2399 kernel/auditsc.c 		ctx->target_auid = audit_get_loginuid(t);
ctx              2400 kernel/auditsc.c 		ctx->target_uid = t_uid;
ctx              2401 kernel/auditsc.c 		ctx->target_sessionid = audit_get_sessionid(t);
ctx              2402 kernel/auditsc.c 		security_task_getsecid(t, &ctx->target_sid);
ctx              2403 kernel/auditsc.c 		memcpy(ctx->target_comm, t->comm, TASK_COMM_LEN);
ctx              2407 kernel/auditsc.c 	axp = (void *)ctx->aux_pids;
ctx              2414 kernel/auditsc.c 		axp->d.next = ctx->aux_pids;
ctx              2415 kernel/auditsc.c 		ctx->aux_pids = (void *)axp;
ctx              2642 kernel/auditsc.c 	struct audit_context *ctx = audit_context();
ctx              2643 kernel/auditsc.c 	if (likely(!ctx || !ctx->in_syscall))
ctx              2645 kernel/auditsc.c 	return &ctx->killed_trees;
ctx               708 kernel/bpf/cgroup.c 	struct bpf_sock_addr_kern ctx = {
ctx               723 kernel/bpf/cgroup.c 	if (!ctx.uaddr) {
ctx               725 kernel/bpf/cgroup.c 		ctx.uaddr = (struct sockaddr *)&unspec;
ctx               729 kernel/bpf/cgroup.c 	ret = BPF_PROG_RUN_ARRAY(cgrp->bpf.effective[type], &ctx, BPF_PROG_RUN);
ctx               768 kernel/bpf/cgroup.c 	struct bpf_cgroup_dev_ctx ctx = {
ctx               777 kernel/bpf/cgroup.c 	allow = BPF_PROG_RUN_ARRAY(cgrp->bpf.effective[type], &ctx,
ctx               890 kernel/bpf/cgroup.c 	struct bpf_sysctl_kern ctx = {
ctx               904 kernel/bpf/cgroup.c 	ctx.cur_val = kmalloc_track_caller(ctx.cur_len, GFP_KERNEL);
ctx               905 kernel/bpf/cgroup.c 	if (ctx.cur_val) {
ctx               911 kernel/bpf/cgroup.c 		if (table->proc_handler(table, 0, (void __user *)ctx.cur_val,
ctx               912 kernel/bpf/cgroup.c 					&ctx.cur_len, &pos)) {
ctx               914 kernel/bpf/cgroup.c 			ctx.cur_len = 0;
ctx               919 kernel/bpf/cgroup.c 		ctx.cur_len = 0;
ctx               926 kernel/bpf/cgroup.c 		ctx.new_val = kmalloc_track_caller(PAGE_SIZE, GFP_KERNEL);
ctx               927 kernel/bpf/cgroup.c 		ctx.new_len = min_t(size_t, PAGE_SIZE, *pcount);
ctx               928 kernel/bpf/cgroup.c 		if (!ctx.new_val ||
ctx               929 kernel/bpf/cgroup.c 		    copy_from_user(ctx.new_val, buf, ctx.new_len))
ctx               931 kernel/bpf/cgroup.c 			ctx.new_len = 0;
ctx               936 kernel/bpf/cgroup.c 	ret = BPF_PROG_RUN_ARRAY(cgrp->bpf.effective[type], &ctx, BPF_PROG_RUN);
ctx               939 kernel/bpf/cgroup.c 	kfree(ctx.cur_val);
ctx               941 kernel/bpf/cgroup.c 	if (ret == 1 && ctx.new_updated) {
ctx               942 kernel/bpf/cgroup.c 		*new_buf = ctx.new_val;
ctx               943 kernel/bpf/cgroup.c 		*pcount = ctx.new_len;
ctx               945 kernel/bpf/cgroup.c 		kfree(ctx.new_val);
ctx               967 kernel/bpf/cgroup.c static int sockopt_alloc_buf(struct bpf_sockopt_kern *ctx, int max_optlen)
ctx               972 kernel/bpf/cgroup.c 	ctx->optval = kzalloc(max_optlen, GFP_USER);
ctx               973 kernel/bpf/cgroup.c 	if (!ctx->optval)
ctx               976 kernel/bpf/cgroup.c 	ctx->optval_end = ctx->optval + max_optlen;
ctx               981 kernel/bpf/cgroup.c static void sockopt_free_buf(struct bpf_sockopt_kern *ctx)
ctx               983 kernel/bpf/cgroup.c 	kfree(ctx->optval);
ctx               991 kernel/bpf/cgroup.c 	struct bpf_sockopt_kern ctx = {
ctx              1012 kernel/bpf/cgroup.c 	ret = sockopt_alloc_buf(&ctx, max_optlen);
ctx              1016 kernel/bpf/cgroup.c 	ctx.optlen = *optlen;
ctx              1018 kernel/bpf/cgroup.c 	if (copy_from_user(ctx.optval, optval, *optlen) != 0) {
ctx              1025 kernel/bpf/cgroup.c 				 &ctx, BPF_PROG_RUN);
ctx              1033 kernel/bpf/cgroup.c 	if (ctx.optlen == -1) {
ctx              1036 kernel/bpf/cgroup.c 	} else if (ctx.optlen > max_optlen || ctx.optlen < -1) {
ctx              1044 kernel/bpf/cgroup.c 		*level = ctx.level;
ctx              1045 kernel/bpf/cgroup.c 		*optname = ctx.optname;
ctx              1046 kernel/bpf/cgroup.c 		*optlen = ctx.optlen;
ctx              1047 kernel/bpf/cgroup.c 		*kernel_optval = ctx.optval;
ctx              1052 kernel/bpf/cgroup.c 		sockopt_free_buf(&ctx);
ctx              1063 kernel/bpf/cgroup.c 	struct bpf_sockopt_kern ctx = {
ctx              1079 kernel/bpf/cgroup.c 	ret = sockopt_alloc_buf(&ctx, max_optlen);
ctx              1083 kernel/bpf/cgroup.c 	ctx.optlen = max_optlen;
ctx              1093 kernel/bpf/cgroup.c 		if (get_user(ctx.optlen, optlen)) {
ctx              1098 kernel/bpf/cgroup.c 		if (ctx.optlen > max_optlen)
ctx              1099 kernel/bpf/cgroup.c 			ctx.optlen = max_optlen;
ctx              1101 kernel/bpf/cgroup.c 		if (copy_from_user(ctx.optval, optval, ctx.optlen) != 0) {
ctx              1109 kernel/bpf/cgroup.c 				 &ctx, BPF_PROG_RUN);
ctx              1117 kernel/bpf/cgroup.c 	if (ctx.optlen > max_optlen) {
ctx              1125 kernel/bpf/cgroup.c 	if (ctx.retval != 0 && ctx.retval != retval) {
ctx              1130 kernel/bpf/cgroup.c 	if (copy_to_user(optval, ctx.optval, ctx.optlen) ||
ctx              1131 kernel/bpf/cgroup.c 	    put_user(ctx.optlen, optlen)) {
ctx              1136 kernel/bpf/cgroup.c 	ret = ctx.retval;
ctx              1139 kernel/bpf/cgroup.c 	sockopt_free_buf(&ctx);
ctx              1176 kernel/bpf/cgroup.c BPF_CALL_4(bpf_sysctl_get_name, struct bpf_sysctl_kern *, ctx, char *, buf,
ctx              1185 kernel/bpf/cgroup.c 		if (!ctx->head)
ctx              1187 kernel/bpf/cgroup.c 		tmp_ret = sysctl_cpy_dir(ctx->head->parent, &buf, &buf_len);
ctx              1192 kernel/bpf/cgroup.c 	ret = strscpy(buf, ctx->table->procname, buf_len);
ctx              1233 kernel/bpf/cgroup.c BPF_CALL_3(bpf_sysctl_get_current_value, struct bpf_sysctl_kern *, ctx,
ctx              1236 kernel/bpf/cgroup.c 	return copy_sysctl_value(buf, buf_len, ctx->cur_val, ctx->cur_len);
ctx              1248 kernel/bpf/cgroup.c BPF_CALL_3(bpf_sysctl_get_new_value, struct bpf_sysctl_kern *, ctx, char *, buf,
ctx              1251 kernel/bpf/cgroup.c 	if (!ctx->write) {
ctx              1256 kernel/bpf/cgroup.c 	return copy_sysctl_value(buf, buf_len, ctx->new_val, ctx->new_len);
ctx              1268 kernel/bpf/cgroup.c BPF_CALL_3(bpf_sysctl_set_new_value, struct bpf_sysctl_kern *, ctx,
ctx              1271 kernel/bpf/cgroup.c 	if (!ctx->write || !ctx->new_val || !ctx->new_len || !buf || !buf_len)
ctx              1277 kernel/bpf/cgroup.c 	memcpy(ctx->new_val, buf, buf_len);
ctx              1278 kernel/bpf/cgroup.c 	ctx->new_len = buf_len;
ctx              1279 kernel/bpf/cgroup.c 	ctx->new_updated = 1;
ctx              1568 kernel/bpf/core.c static unsigned int PROG_NAME(stack_size)(const void *ctx, const struct bpf_insn *insn) \
ctx              1574 kernel/bpf/core.c 	ARG1 = (u64) (unsigned long) ctx; \
ctx              1612 kernel/bpf/core.c static unsigned int (*interpreters[])(const void *ctx,
ctx              1638 kernel/bpf/core.c static unsigned int __bpf_prog_ret0_warn(const void *ctx,
ctx              1759 kernel/bpf/core.c static unsigned int __bpf_prog_ret1(const void *ctx,
ctx              2053 kernel/bpf/core.c 		 void *ctx, u64 ctx_size, bpf_ctx_copy_t ctx_copy)
ctx               249 kernel/bpf/offload.c static unsigned int bpf_prog_warn_on_exec(const void *ctx,
ctx               219 kernel/cgroup/cgroup-internal.h void init_cgroup_root(struct cgroup_fs_context *ctx);
ctx               276 kernel/cgroup/cgroup-internal.h int cgroup1_reconfigure(struct fs_context *ctx);
ctx               909 kernel/cgroup/cgroup-v1.c 	struct cgroup_fs_context *ctx = cgroup_fc2context(fc);
ctx               924 kernel/cgroup/cgroup-v1.c 			ctx->subsys_mask |= (1 << i);
ctx               935 kernel/cgroup/cgroup-v1.c 		ctx->none = true;
ctx               938 kernel/cgroup/cgroup-v1.c 		ctx->all_ss = true;
ctx               941 kernel/cgroup/cgroup-v1.c 		ctx->flags |= CGRP_ROOT_NOPREFIX;
ctx               944 kernel/cgroup/cgroup-v1.c 		ctx->cpuset_clone_children = true;
ctx               947 kernel/cgroup/cgroup-v1.c 		ctx->flags |= CGRP_ROOT_CPUSET_V2_MODE;
ctx               950 kernel/cgroup/cgroup-v1.c 		ctx->flags |= CGRP_ROOT_XATTR;
ctx               954 kernel/cgroup/cgroup-v1.c 		if (ctx->release_agent)
ctx               956 kernel/cgroup/cgroup-v1.c 		ctx->release_agent = param->string;
ctx               978 kernel/cgroup/cgroup-v1.c 		if (ctx->name)
ctx               980 kernel/cgroup/cgroup-v1.c 		ctx->name = param->string;
ctx               989 kernel/cgroup/cgroup-v1.c 	struct cgroup_fs_context *ctx = cgroup_fc2context(fc);
ctx              1002 kernel/cgroup/cgroup-v1.c 	ctx->subsys_mask &= enabled;
ctx              1008 kernel/cgroup/cgroup-v1.c 	if (!ctx->subsys_mask && !ctx->none && !ctx->name)
ctx              1009 kernel/cgroup/cgroup-v1.c 		ctx->all_ss = true;
ctx              1011 kernel/cgroup/cgroup-v1.c 	if (ctx->all_ss) {
ctx              1013 kernel/cgroup/cgroup-v1.c 		if (ctx->subsys_mask)
ctx              1016 kernel/cgroup/cgroup-v1.c 		ctx->subsys_mask = enabled;
ctx              1023 kernel/cgroup/cgroup-v1.c 	if (!ctx->subsys_mask && !ctx->name)
ctx              1031 kernel/cgroup/cgroup-v1.c 	if ((ctx->flags & CGRP_ROOT_NOPREFIX) && (ctx->subsys_mask & mask))
ctx              1035 kernel/cgroup/cgroup-v1.c 	if (ctx->subsys_mask && ctx->none)
ctx              1043 kernel/cgroup/cgroup-v1.c 	struct cgroup_fs_context *ctx = cgroup_fc2context(fc);
ctx              1056 kernel/cgroup/cgroup-v1.c 	if (ctx->subsys_mask != root->subsys_mask || ctx->release_agent)
ctx              1060 kernel/cgroup/cgroup-v1.c 	added_mask = ctx->subsys_mask & ~root->subsys_mask;
ctx              1061 kernel/cgroup/cgroup-v1.c 	removed_mask = root->subsys_mask & ~ctx->subsys_mask;
ctx              1064 kernel/cgroup/cgroup-v1.c 	if ((ctx->flags ^ root->flags) ||
ctx              1065 kernel/cgroup/cgroup-v1.c 	    (ctx->name && strcmp(ctx->name, root->name))) {
ctx              1067 kernel/cgroup/cgroup-v1.c 		       ctx->flags, ctx->name ?: "", root->flags, root->name);
ctx              1084 kernel/cgroup/cgroup-v1.c 	if (ctx->release_agent) {
ctx              1086 kernel/cgroup/cgroup-v1.c 		strcpy(root->release_agent_path, ctx->release_agent);
ctx              1115 kernel/cgroup/cgroup-v1.c 	struct cgroup_fs_context *ctx = cgroup_fc2context(fc);
ctx              1133 kernel/cgroup/cgroup-v1.c 		if (!(ctx->subsys_mask & (1 << i)) ||
ctx              1153 kernel/cgroup/cgroup-v1.c 		if (ctx->name) {
ctx              1154 kernel/cgroup/cgroup-v1.c 			if (strcmp(ctx->name, root->name))
ctx              1163 kernel/cgroup/cgroup-v1.c 		if ((ctx->subsys_mask || ctx->none) &&
ctx              1164 kernel/cgroup/cgroup-v1.c 		    (ctx->subsys_mask != root->subsys_mask)) {
ctx              1170 kernel/cgroup/cgroup-v1.c 		if (root->flags ^ ctx->flags)
ctx              1173 kernel/cgroup/cgroup-v1.c 		ctx->root = root;
ctx              1182 kernel/cgroup/cgroup-v1.c 	if (!ctx->subsys_mask && !ctx->none)
ctx              1186 kernel/cgroup/cgroup-v1.c 	if (ctx->ns != &init_cgroup_ns)
ctx              1193 kernel/cgroup/cgroup-v1.c 	ctx->root = root;
ctx              1194 kernel/cgroup/cgroup-v1.c 	init_cgroup_root(ctx);
ctx              1196 kernel/cgroup/cgroup-v1.c 	ret = cgroup_setup_root(root, ctx->subsys_mask);
ctx              1204 kernel/cgroup/cgroup-v1.c 	struct cgroup_fs_context *ctx = cgroup_fc2context(fc);
ctx              1208 kernel/cgroup/cgroup-v1.c 	if (!ns_capable(ctx->ns->user_ns, CAP_SYS_ADMIN))
ctx              1214 kernel/cgroup/cgroup-v1.c 	if (!ret && !percpu_ref_tryget_live(&ctx->root->cgrp.self.refcnt))
ctx              1222 kernel/cgroup/cgroup-v1.c 	if (!ret && percpu_ref_is_dying(&ctx->root->cgrp.self.refcnt)) {
ctx              1835 kernel/cgroup/cgroup.c 	struct cgroup_fs_context *ctx = cgroup_fc2context(fc);
ctx              1845 kernel/cgroup/cgroup.c 		ctx->flags |= CGRP_ROOT_NS_DELEGATE;
ctx              1848 kernel/cgroup/cgroup.c 		ctx->flags |= CGRP_ROOT_MEMORY_LOCAL_EVENTS;
ctx              1880 kernel/cgroup/cgroup.c 	struct cgroup_fs_context *ctx = cgroup_fc2context(fc);
ctx              1882 kernel/cgroup/cgroup.c 	apply_cgroup_root_flags(ctx->flags);
ctx              1970 kernel/cgroup/cgroup.c void init_cgroup_root(struct cgroup_fs_context *ctx)
ctx              1972 kernel/cgroup/cgroup.c 	struct cgroup_root *root = ctx->root;
ctx              1981 kernel/cgroup/cgroup.c 	root->flags = ctx->flags;
ctx              1982 kernel/cgroup/cgroup.c 	if (ctx->release_agent)
ctx              1983 kernel/cgroup/cgroup.c 		strscpy(root->release_agent_path, ctx->release_agent, PATH_MAX);
ctx              1984 kernel/cgroup/cgroup.c 	if (ctx->name)
ctx              1985 kernel/cgroup/cgroup.c 		strscpy(root->name, ctx->name, MAX_CGROUP_ROOT_NAMELEN);
ctx              1986 kernel/cgroup/cgroup.c 	if (ctx->cpuset_clone_children)
ctx              2093 kernel/cgroup/cgroup.c 	struct cgroup_fs_context *ctx = cgroup_fc2context(fc);
ctx              2096 kernel/cgroup/cgroup.c 	ctx->kfc.root = ctx->root->kf_root;
ctx              2098 kernel/cgroup/cgroup.c 		ctx->kfc.magic = CGROUP2_SUPER_MAGIC;
ctx              2100 kernel/cgroup/cgroup.c 		ctx->kfc.magic = CGROUP_SUPER_MAGIC;
ctx              2107 kernel/cgroup/cgroup.c 	if (!ret && ctx->ns != &init_cgroup_ns) {
ctx              2115 kernel/cgroup/cgroup.c 		cgrp = cset_cgroup_from_root(ctx->ns->root_cset, ctx->root);
ctx              2130 kernel/cgroup/cgroup.c 	if (!ctx->kfc.new_sb_created)
ctx              2131 kernel/cgroup/cgroup.c 		cgroup_put(&ctx->root->cgrp);
ctx              2141 kernel/cgroup/cgroup.c 	struct cgroup_fs_context *ctx = cgroup_fc2context(fc);
ctx              2143 kernel/cgroup/cgroup.c 	kfree(ctx->name);
ctx              2144 kernel/cgroup/cgroup.c 	kfree(ctx->release_agent);
ctx              2145 kernel/cgroup/cgroup.c 	put_cgroup_ns(ctx->ns);
ctx              2147 kernel/cgroup/cgroup.c 	kfree(ctx);
ctx              2152 kernel/cgroup/cgroup.c 	struct cgroup_fs_context *ctx = cgroup_fc2context(fc);
ctx              2157 kernel/cgroup/cgroup.c 	ctx->root = &cgrp_dfl_root;
ctx              2161 kernel/cgroup/cgroup.c 		apply_cgroup_root_flags(ctx->flags);
ctx              2185 kernel/cgroup/cgroup.c 	struct cgroup_fs_context *ctx;
ctx              2187 kernel/cgroup/cgroup.c 	ctx = kzalloc(sizeof(struct cgroup_fs_context), GFP_KERNEL);
ctx              2188 kernel/cgroup/cgroup.c 	if (!ctx)
ctx              2198 kernel/cgroup/cgroup.c 	ctx->ns = current->nsproxy->cgroup_ns;
ctx              2199 kernel/cgroup/cgroup.c 	get_cgroup_ns(ctx->ns);
ctx              2200 kernel/cgroup/cgroup.c 	fc->fs_private = &ctx->kfc;
ctx              2206 kernel/cgroup/cgroup.c 	fc->user_ns = get_user_ns(ctx->ns->user_ns);
ctx              2260 kernel/cgroup/cgroup.c 	struct cgroup_fs_context *ctx;
ctx              2271 kernel/cgroup/cgroup.c 	ctx = cgroup_fc2context(fc);
ctx              2272 kernel/cgroup/cgroup.c 	ctx->subsys_mask = 1 << cpuset_cgrp_id;
ctx              2273 kernel/cgroup/cgroup.c 	ctx->flags |= CGRP_ROOT_NOPREFIX;
ctx              2274 kernel/cgroup/cgroup.c 	ctx->release_agent = agent;
ctx              5693 kernel/cgroup/cgroup.c 	static struct cgroup_fs_context __initdata ctx;
ctx              5697 kernel/cgroup/cgroup.c 	ctx.root = &cgrp_dfl_root;
ctx              5698 kernel/cgroup/cgroup.c 	init_cgroup_root(&ctx);
ctx               182 kernel/events/callchain.c 	struct perf_callchain_entry_ctx ctx;
ctx               192 kernel/events/callchain.c 	ctx.entry     = entry;
ctx               193 kernel/events/callchain.c 	ctx.max_stack = max_stack;
ctx               194 kernel/events/callchain.c 	ctx.nr	      = entry->nr = init_nr;
ctx               195 kernel/events/callchain.c 	ctx.contexts       = 0;
ctx               196 kernel/events/callchain.c 	ctx.contexts_maxed = false;
ctx               200 kernel/events/callchain.c 			perf_callchain_store_context(&ctx, PERF_CONTEXT_KERNEL);
ctx               201 kernel/events/callchain.c 		perf_callchain_kernel(&ctx, regs);
ctx               219 kernel/events/callchain.c 				perf_callchain_store_context(&ctx, PERF_CONTEXT_USER);
ctx               223 kernel/events/callchain.c 			perf_callchain_user(&ctx, regs);
ctx               151 kernel/events/core.c __get_cpu_context(struct perf_event_context *ctx)
ctx               153 kernel/events/core.c 	return this_cpu_ptr(ctx->pmu->pmu_cpu_context);
ctx               157 kernel/events/core.c 			  struct perf_event_context *ctx)
ctx               159 kernel/events/core.c 	raw_spin_lock(&cpuctx->ctx.lock);
ctx               160 kernel/events/core.c 	if (ctx)
ctx               161 kernel/events/core.c 		raw_spin_lock(&ctx->lock);
ctx               165 kernel/events/core.c 			    struct perf_event_context *ctx)
ctx               167 kernel/events/core.c 	if (ctx)
ctx               168 kernel/events/core.c 		raw_spin_unlock(&ctx->lock);
ctx               169 kernel/events/core.c 	raw_spin_unlock(&cpuctx->ctx.lock);
ctx               211 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx               212 kernel/events/core.c 	struct perf_cpu_context *cpuctx = __get_cpu_context(ctx);
ctx               223 kernel/events/core.c 	if (ctx->task) {
ctx               224 kernel/events/core.c 		if (ctx->task != current) {
ctx               236 kernel/events/core.c 		WARN_ON_ONCE(!ctx->is_active);
ctx               241 kernel/events/core.c 		WARN_ON_ONCE(task_ctx != ctx);
ctx               243 kernel/events/core.c 		WARN_ON_ONCE(&cpuctx->ctx != ctx);
ctx               246 kernel/events/core.c 	efs->func(event, cpuctx, ctx, efs->data);
ctx               255 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx               256 kernel/events/core.c 	struct task_struct *task = READ_ONCE(ctx->task); /* verified in event_function */
ctx               269 kernel/events/core.c 		lockdep_assert_held(&ctx->mutex);
ctx               284 kernel/events/core.c 	raw_spin_lock_irq(&ctx->lock);
ctx               289 kernel/events/core.c 	task = ctx->task;
ctx               291 kernel/events/core.c 		raw_spin_unlock_irq(&ctx->lock);
ctx               294 kernel/events/core.c 	if (ctx->is_active) {
ctx               295 kernel/events/core.c 		raw_spin_unlock_irq(&ctx->lock);
ctx               298 kernel/events/core.c 	func(event, NULL, ctx, data);
ctx               299 kernel/events/core.c 	raw_spin_unlock_irq(&ctx->lock);
ctx               308 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx               309 kernel/events/core.c 	struct perf_cpu_context *cpuctx = __get_cpu_context(ctx);
ctx               310 kernel/events/core.c 	struct task_struct *task = READ_ONCE(ctx->task);
ctx               319 kernel/events/core.c 		task_ctx = ctx;
ctx               324 kernel/events/core.c 	task = ctx->task;
ctx               334 kernel/events/core.c 		if (ctx->is_active) {
ctx               338 kernel/events/core.c 			if (WARN_ON_ONCE(cpuctx->task_ctx != ctx))
ctx               342 kernel/events/core.c 		WARN_ON_ONCE(&cpuctx->ctx != ctx);
ctx               345 kernel/events/core.c 	func(event, cpuctx, ctx, data);
ctx               571 kernel/events/core.c static void update_context_time(struct perf_event_context *ctx);
ctx               678 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx               679 kernel/events/core.c 	struct perf_cpu_context *cpuctx = __get_cpu_context(ctx);
ctx               755 kernel/events/core.c 	cgrp = perf_cgroup_from_task(current, event->ctx);
ctx               765 kernel/events/core.c 			  struct perf_event_context *ctx)
ctx               776 kernel/events/core.c 	if (!task || !ctx->nr_cgroups)
ctx               779 kernel/events/core.c 	cgrp = perf_cgroup_from_task(task, ctx);
ctx               784 kernel/events/core.c 		info->timestamp = ctx->timestamp;
ctx               813 kernel/events/core.c 		WARN_ON_ONCE(cpuctx->ctx.nr_cgroups == 0);
ctx               816 kernel/events/core.c 		perf_pmu_disable(cpuctx->ctx.pmu);
ctx               837 kernel/events/core.c 							     &cpuctx->ctx);
ctx               840 kernel/events/core.c 		perf_pmu_enable(cpuctx->ctx.pmu);
ctx               949 kernel/events/core.c 			 struct perf_event_context *ctx, bool add)
ctx               961 kernel/events/core.c 	cpuctx = __get_cpu_context(ctx);
ctx               970 kernel/events/core.c 		struct perf_cgroup *cgrp = perf_cgroup_from_task(current, ctx);
ctx               976 kernel/events/core.c 	if (add && ctx->nr_cgroups++)
ctx               978 kernel/events/core.c 	else if (!add && --ctx->nr_cgroups)
ctx              1035 kernel/events/core.c 			  struct perf_event_context *ctx)
ctx              1056 kernel/events/core.c 			 struct perf_event_context *ctx, bool add)
ctx              1093 kernel/events/core.c 	struct pmu *pmu = cpuctx->ctx.pmu;
ctx              1118 kernel/events/core.c 	struct pmu *pmu = cpuctx->ctx.pmu;
ctx              1158 kernel/events/core.c static void perf_event_ctx_activate(struct perf_event_context *ctx)
ctx              1164 kernel/events/core.c 	WARN_ON(!list_empty(&ctx->active_ctx_list));
ctx              1166 kernel/events/core.c 	list_add(&ctx->active_ctx_list, head);
ctx              1169 kernel/events/core.c static void perf_event_ctx_deactivate(struct perf_event_context *ctx)
ctx              1173 kernel/events/core.c 	WARN_ON(list_empty(&ctx->active_ctx_list));
ctx              1175 kernel/events/core.c 	list_del_init(&ctx->active_ctx_list);
ctx              1178 kernel/events/core.c static void get_ctx(struct perf_event_context *ctx)
ctx              1180 kernel/events/core.c 	refcount_inc(&ctx->refcount);
ctx              1185 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              1187 kernel/events/core.c 	ctx = container_of(head, struct perf_event_context, rcu_head);
ctx              1188 kernel/events/core.c 	kfree(ctx->task_ctx_data);
ctx              1189 kernel/events/core.c 	kfree(ctx);
ctx              1192 kernel/events/core.c static void put_ctx(struct perf_event_context *ctx)
ctx              1194 kernel/events/core.c 	if (refcount_dec_and_test(&ctx->refcount)) {
ctx              1195 kernel/events/core.c 		if (ctx->parent_ctx)
ctx              1196 kernel/events/core.c 			put_ctx(ctx->parent_ctx);
ctx              1197 kernel/events/core.c 		if (ctx->task && ctx->task != TASK_TOMBSTONE)
ctx              1198 kernel/events/core.c 			put_task_struct(ctx->task);
ctx              1199 kernel/events/core.c 		call_rcu(&ctx->rcu_head, free_ctx);
ctx              1272 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              1276 kernel/events/core.c 	ctx = READ_ONCE(event->ctx);
ctx              1277 kernel/events/core.c 	if (!refcount_inc_not_zero(&ctx->refcount)) {
ctx              1283 kernel/events/core.c 	mutex_lock_nested(&ctx->mutex, nesting);
ctx              1284 kernel/events/core.c 	if (event->ctx != ctx) {
ctx              1285 kernel/events/core.c 		mutex_unlock(&ctx->mutex);
ctx              1286 kernel/events/core.c 		put_ctx(ctx);
ctx              1290 kernel/events/core.c 	return ctx;
ctx              1300 kernel/events/core.c 				  struct perf_event_context *ctx)
ctx              1302 kernel/events/core.c 	mutex_unlock(&ctx->mutex);
ctx              1303 kernel/events/core.c 	put_ctx(ctx);
ctx              1312 kernel/events/core.c unclone_ctx(struct perf_event_context *ctx)
ctx              1314 kernel/events/core.c 	struct perf_event_context *parent_ctx = ctx->parent_ctx;
ctx              1316 kernel/events/core.c 	lockdep_assert_held(&ctx->lock);
ctx              1319 kernel/events/core.c 		ctx->parent_ctx = NULL;
ctx              1320 kernel/events/core.c 	ctx->generation++;
ctx              1375 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              1389 kernel/events/core.c 	ctx = rcu_dereference(task->perf_event_ctxp[ctxn]);
ctx              1390 kernel/events/core.c 	if (ctx) {
ctx              1401 kernel/events/core.c 		raw_spin_lock(&ctx->lock);
ctx              1402 kernel/events/core.c 		if (ctx != rcu_dereference(task->perf_event_ctxp[ctxn])) {
ctx              1403 kernel/events/core.c 			raw_spin_unlock(&ctx->lock);
ctx              1409 kernel/events/core.c 		if (ctx->task == TASK_TOMBSTONE ||
ctx              1410 kernel/events/core.c 		    !refcount_inc_not_zero(&ctx->refcount)) {
ctx              1411 kernel/events/core.c 			raw_spin_unlock(&ctx->lock);
ctx              1412 kernel/events/core.c 			ctx = NULL;
ctx              1414 kernel/events/core.c 			WARN_ON_ONCE(ctx->task != task);
ctx              1418 kernel/events/core.c 	if (!ctx)
ctx              1420 kernel/events/core.c 	return ctx;
ctx              1431 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              1434 kernel/events/core.c 	ctx = perf_lock_task_context(task, ctxn, &flags);
ctx              1435 kernel/events/core.c 	if (ctx) {
ctx              1436 kernel/events/core.c 		++ctx->pin_count;
ctx              1437 kernel/events/core.c 		raw_spin_unlock_irqrestore(&ctx->lock, flags);
ctx              1439 kernel/events/core.c 	return ctx;
ctx              1442 kernel/events/core.c static void perf_unpin_context(struct perf_event_context *ctx)
ctx              1446 kernel/events/core.c 	raw_spin_lock_irqsave(&ctx->lock, flags);
ctx              1447 kernel/events/core.c 	--ctx->pin_count;
ctx              1448 kernel/events/core.c 	raw_spin_unlock_irqrestore(&ctx->lock, flags);
ctx              1454 kernel/events/core.c static void update_context_time(struct perf_event_context *ctx)
ctx              1458 kernel/events/core.c 	ctx->time += now - ctx->timestamp;
ctx              1459 kernel/events/core.c 	ctx->timestamp = now;
ctx              1464 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx              1469 kernel/events/core.c 	return ctx ? ctx->time : 0;
ctx              1474 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx              1477 kernel/events/core.c 	lockdep_assert_held(&ctx->lock);
ctx              1487 kernel/events/core.c 	if (!ctx->task)
ctx              1507 kernel/events/core.c get_event_groups(struct perf_event *event, struct perf_event_context *ctx)
ctx              1510 kernel/events/core.c 		return &ctx->pinned_groups;
ctx              1512 kernel/events/core.c 		return &ctx->flexible_groups;
ctx              1582 kernel/events/core.c add_event_to_groups(struct perf_event *event, struct perf_event_context *ctx)
ctx              1586 kernel/events/core.c 	groups = get_event_groups(event, ctx);
ctx              1608 kernel/events/core.c del_event_from_groups(struct perf_event *event, struct perf_event_context *ctx)
ctx              1612 kernel/events/core.c 	groups = get_event_groups(event, ctx);
ctx              1670 kernel/events/core.c list_add_event(struct perf_event *event, struct perf_event_context *ctx)
ctx              1672 kernel/events/core.c 	lockdep_assert_held(&ctx->lock);
ctx              1686 kernel/events/core.c 		add_event_to_groups(event, ctx);
ctx              1689 kernel/events/core.c 	list_update_cgroup_event(event, ctx, true);
ctx              1691 kernel/events/core.c 	list_add_rcu(&event->event_entry, &ctx->event_list);
ctx              1692 kernel/events/core.c 	ctx->nr_events++;
ctx              1694 kernel/events/core.c 		ctx->nr_stat++;
ctx              1696 kernel/events/core.c 	ctx->generation++;
ctx              1827 kernel/events/core.c 	lockdep_assert_held(&event->ctx->lock);
ctx              1840 kernel/events/core.c 	WARN_ON_ONCE(group_leader->ctx != event->ctx);
ctx              1858 kernel/events/core.c list_del_event(struct perf_event *event, struct perf_event_context *ctx)
ctx              1860 kernel/events/core.c 	WARN_ON_ONCE(event->ctx != ctx);
ctx              1861 kernel/events/core.c 	lockdep_assert_held(&ctx->lock);
ctx              1871 kernel/events/core.c 	list_update_cgroup_event(event, ctx, false);
ctx              1873 kernel/events/core.c 	ctx->nr_events--;
ctx              1875 kernel/events/core.c 		ctx->nr_stat--;
ctx              1880 kernel/events/core.c 		del_event_from_groups(event, ctx);
ctx              1892 kernel/events/core.c 	ctx->generation++;
ctx              1910 kernel/events/core.c 			    struct perf_event_context *ctx);
ctx              1914 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx              1915 kernel/events/core.c 	struct perf_cpu_context *cpuctx = __get_cpu_context(ctx);
ctx              1944 kernel/events/core.c 		event_sched_out(iter, cpuctx, ctx);
ctx              1981 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx              1983 kernel/events/core.c 	lockdep_assert_held(&ctx->lock);
ctx              2018 kernel/events/core.c 			add_event_to_groups(sibling, event->ctx);
ctx              2022 kernel/events/core.c 					&ctx->pinned_active : &ctx->flexible_active;
ctx              2028 kernel/events/core.c 		WARN_ON_ONCE(sibling->ctx != event->ctx);
ctx              2080 kernel/events/core.c 		  struct perf_event_context *ctx)
ctx              2084 kernel/events/core.c 	WARN_ON_ONCE(event->ctx != ctx);
ctx              2085 kernel/events/core.c 	lockdep_assert_held(&ctx->lock);
ctx              2110 kernel/events/core.c 	if (!--ctx->nr_active)
ctx              2111 kernel/events/core.c 		perf_event_ctx_deactivate(ctx);
ctx              2113 kernel/events/core.c 		ctx->nr_freq--;
ctx              2123 kernel/events/core.c 		struct perf_event_context *ctx)
ctx              2130 kernel/events/core.c 	perf_pmu_disable(ctx->pmu);
ctx              2132 kernel/events/core.c 	event_sched_out(group_event, cpuctx, ctx);
ctx              2138 kernel/events/core.c 		event_sched_out(event, cpuctx, ctx);
ctx              2140 kernel/events/core.c 	perf_pmu_enable(ctx->pmu);
ctx              2157 kernel/events/core.c 			   struct perf_event_context *ctx,
ctx              2162 kernel/events/core.c 	if (ctx->is_active & EVENT_TIME) {
ctx              2163 kernel/events/core.c 		update_context_time(ctx);
ctx              2167 kernel/events/core.c 	event_sched_out(event, cpuctx, ctx);
ctx              2170 kernel/events/core.c 	list_del_event(event, ctx);
ctx              2172 kernel/events/core.c 	if (!ctx->nr_events && ctx->is_active) {
ctx              2173 kernel/events/core.c 		ctx->is_active = 0;
ctx              2174 kernel/events/core.c 		if (ctx->task) {
ctx              2175 kernel/events/core.c 			WARN_ON_ONCE(cpuctx->task_ctx != ctx);
ctx              2193 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx              2195 kernel/events/core.c 	lockdep_assert_held(&ctx->mutex);
ctx              2212 kernel/events/core.c 		raw_spin_lock_irq(&ctx->lock);
ctx              2214 kernel/events/core.c 		raw_spin_unlock_irq(&ctx->lock);
ctx              2223 kernel/events/core.c 				 struct perf_event_context *ctx,
ctx              2229 kernel/events/core.c 	if (ctx->is_active & EVENT_TIME) {
ctx              2230 kernel/events/core.c 		update_context_time(ctx);
ctx              2235 kernel/events/core.c 		group_sched_out(event, cpuctx, ctx);
ctx              2237 kernel/events/core.c 		event_sched_out(event, cpuctx, ctx);
ctx              2258 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx              2260 kernel/events/core.c 	raw_spin_lock_irq(&ctx->lock);
ctx              2262 kernel/events/core.c 		raw_spin_unlock_irq(&ctx->lock);
ctx              2265 kernel/events/core.c 	raw_spin_unlock_irq(&ctx->lock);
ctx              2281 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              2283 kernel/events/core.c 	ctx = perf_event_ctx_lock(event);
ctx              2285 kernel/events/core.c 	perf_event_ctx_unlock(event, ctx);
ctx              2297 kernel/events/core.c 				 struct perf_event_context *ctx)
ctx              2327 kernel/events/core.c 		event->shadow_ctx_time = event->tstamp - ctx->timestamp;
ctx              2338 kernel/events/core.c 		 struct perf_event_context *ctx)
ctx              2342 kernel/events/core.c 	lockdep_assert_held(&ctx->lock);
ctx              2368 kernel/events/core.c 	perf_set_shadow_time(event, ctx);
ctx              2381 kernel/events/core.c 	if (!ctx->nr_active++)
ctx              2382 kernel/events/core.c 		perf_event_ctx_activate(ctx);
ctx              2384 kernel/events/core.c 		ctx->nr_freq++;
ctx              2398 kernel/events/core.c 	       struct perf_event_context *ctx)
ctx              2401 kernel/events/core.c 	struct pmu *pmu = ctx->pmu;
ctx              2408 kernel/events/core.c 	if (event_sched_in(group_event, cpuctx, ctx)) {
ctx              2418 kernel/events/core.c 		if (event_sched_in(event, cpuctx, ctx)) {
ctx              2437 kernel/events/core.c 		event_sched_out(event, cpuctx, ctx);
ctx              2439 kernel/events/core.c 	event_sched_out(group_event, cpuctx, ctx);
ctx              2480 kernel/events/core.c 			       struct perf_event_context *ctx)
ctx              2482 kernel/events/core.c 	list_add_event(event, ctx);
ctx              2486 kernel/events/core.c static void ctx_sched_out(struct perf_event_context *ctx,
ctx              2490 kernel/events/core.c ctx_sched_in(struct perf_event_context *ctx,
ctx              2496 kernel/events/core.c 			       struct perf_event_context *ctx,
ctx              2502 kernel/events/core.c 	if (WARN_ON_ONCE(ctx != cpuctx->task_ctx))
ctx              2505 kernel/events/core.c 	ctx_sched_out(ctx, cpuctx, event_type);
ctx              2509 kernel/events/core.c 				struct perf_event_context *ctx,
ctx              2513 kernel/events/core.c 	if (ctx)
ctx              2514 kernel/events/core.c 		ctx_sched_in(ctx, cpuctx, EVENT_PINNED, task);
ctx              2516 kernel/events/core.c 	if (ctx)
ctx              2517 kernel/events/core.c 		ctx_sched_in(ctx, cpuctx, EVENT_FLEXIBLE, task);
ctx              2551 kernel/events/core.c 	perf_pmu_disable(cpuctx->ctx.pmu);
ctx              2568 kernel/events/core.c 	perf_pmu_enable(cpuctx->ctx.pmu);
ctx              2590 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx              2591 kernel/events/core.c 	struct perf_cpu_context *cpuctx = __get_cpu_context(ctx);
ctx              2596 kernel/events/core.c 	raw_spin_lock(&cpuctx->ctx.lock);
ctx              2597 kernel/events/core.c 	if (ctx->task) {
ctx              2598 kernel/events/core.c 		raw_spin_lock(&ctx->lock);
ctx              2599 kernel/events/core.c 		task_ctx = ctx;
ctx              2601 kernel/events/core.c 		reprogram = (ctx->task == current);
ctx              2610 kernel/events/core.c 		if (task_curr(ctx->task) && !reprogram) {
ctx              2615 kernel/events/core.c 		WARN_ON_ONCE(reprogram && cpuctx->task_ctx && cpuctx->task_ctx != ctx);
ctx              2626 kernel/events/core.c 		struct perf_cgroup *cgrp = perf_cgroup_from_task(current, ctx);
ctx              2633 kernel/events/core.c 		ctx_sched_out(ctx, cpuctx, EVENT_TIME);
ctx              2634 kernel/events/core.c 		add_event_to_ctx(event, ctx);
ctx              2637 kernel/events/core.c 		add_event_to_ctx(event, ctx);
ctx              2647 kernel/events/core.c 					struct perf_event_context *ctx);
ctx              2655 kernel/events/core.c perf_install_in_context(struct perf_event_context *ctx,
ctx              2659 kernel/events/core.c 	struct task_struct *task = READ_ONCE(ctx->task);
ctx              2661 kernel/events/core.c 	lockdep_assert_held(&ctx->mutex);
ctx              2663 kernel/events/core.c 	WARN_ON_ONCE(!exclusive_event_installable(event, ctx));
ctx              2672 kernel/events/core.c 	smp_store_release(&event->ctx, ctx);
ctx              2720 kernel/events/core.c 	raw_spin_lock_irq(&ctx->lock);
ctx              2721 kernel/events/core.c 	task = ctx->task;
ctx              2728 kernel/events/core.c 		raw_spin_unlock_irq(&ctx->lock);
ctx              2736 kernel/events/core.c 		raw_spin_unlock_irq(&ctx->lock);
ctx              2739 kernel/events/core.c 	add_event_to_ctx(event, ctx);
ctx              2740 kernel/events/core.c 	raw_spin_unlock_irq(&ctx->lock);
ctx              2748 kernel/events/core.c 				struct perf_event_context *ctx,
ctx              2758 kernel/events/core.c 	if (ctx->is_active)
ctx              2759 kernel/events/core.c 		ctx_sched_out(ctx, cpuctx, EVENT_TIME);
ctx              2763 kernel/events/core.c 	if (!ctx->is_active)
ctx              2767 kernel/events/core.c 		ctx_sched_in(ctx, cpuctx, EVENT_TIME, current);
ctx              2776 kernel/events/core.c 		ctx_sched_in(ctx, cpuctx, EVENT_TIME, current);
ctx              2781 kernel/events/core.c 	if (ctx->task)
ctx              2782 kernel/events/core.c 		WARN_ON_ONCE(task_ctx != ctx);
ctx              2798 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx              2800 kernel/events/core.c 	raw_spin_lock_irq(&ctx->lock);
ctx              2803 kernel/events/core.c 		raw_spin_unlock_irq(&ctx->lock);
ctx              2816 kernel/events/core.c 	raw_spin_unlock_irq(&ctx->lock);
ctx              2826 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              2828 kernel/events/core.c 	ctx = perf_event_ctx_lock(event);
ctx              2830 kernel/events/core.c 	perf_event_ctx_unlock(event, ctx);
ctx              2959 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              2962 kernel/events/core.c 	ctx = perf_event_ctx_lock(event);
ctx              2964 kernel/events/core.c 	perf_event_ctx_unlock(event, ctx);
ctx              3000 kernel/events/core.c static void ctx_sched_out(struct perf_event_context *ctx,
ctx              3005 kernel/events/core.c 	int is_active = ctx->is_active;
ctx              3007 kernel/events/core.c 	lockdep_assert_held(&ctx->lock);
ctx              3009 kernel/events/core.c 	if (likely(!ctx->nr_events)) {
ctx              3013 kernel/events/core.c 		WARN_ON_ONCE(ctx->is_active);
ctx              3014 kernel/events/core.c 		if (ctx->task)
ctx              3019 kernel/events/core.c 	ctx->is_active &= ~event_type;
ctx              3020 kernel/events/core.c 	if (!(ctx->is_active & EVENT_ALL))
ctx              3021 kernel/events/core.c 		ctx->is_active = 0;
ctx              3023 kernel/events/core.c 	if (ctx->task) {
ctx              3024 kernel/events/core.c 		WARN_ON_ONCE(cpuctx->task_ctx != ctx);
ctx              3025 kernel/events/core.c 		if (!ctx->is_active)
ctx              3041 kernel/events/core.c 		update_context_time(ctx);
ctx              3045 kernel/events/core.c 	is_active ^= ctx->is_active; /* changed bits */
ctx              3047 kernel/events/core.c 	if (!ctx->nr_active || !(is_active & EVENT_ALL))
ctx              3054 kernel/events/core.c 	ctx->rotate_necessary = 0;
ctx              3056 kernel/events/core.c 	perf_pmu_disable(ctx->pmu);
ctx              3058 kernel/events/core.c 		list_for_each_entry_safe(event, tmp, &ctx->pinned_active, active_list)
ctx              3059 kernel/events/core.c 			group_sched_out(event, cpuctx, ctx);
ctx              3063 kernel/events/core.c 		list_for_each_entry_safe(event, tmp, &ctx->flexible_active, active_list)
ctx              3064 kernel/events/core.c 			group_sched_out(event, cpuctx, ctx);
ctx              3066 kernel/events/core.c 	perf_pmu_enable(ctx->pmu);
ctx              3145 kernel/events/core.c static void perf_event_sync_stat(struct perf_event_context *ctx,
ctx              3150 kernel/events/core.c 	if (!ctx->nr_stat)
ctx              3153 kernel/events/core.c 	update_context_time(ctx);
ctx              3155 kernel/events/core.c 	event = list_first_entry(&ctx->event_list,
ctx              3161 kernel/events/core.c 	while (&event->event_entry != &ctx->event_list &&
ctx              3174 kernel/events/core.c 	struct perf_event_context *ctx = task->perf_event_ctxp[ctxn];
ctx              3180 kernel/events/core.c 	if (likely(!ctx))
ctx              3183 kernel/events/core.c 	cpuctx = __get_cpu_context(ctx);
ctx              3192 kernel/events/core.c 	parent = rcu_dereference(ctx->parent_ctx);
ctx              3199 kernel/events/core.c 	if (next_parent == ctx || next_ctx == parent || next_parent == parent) {
ctx              3209 kernel/events/core.c 		raw_spin_lock(&ctx->lock);
ctx              3211 kernel/events/core.c 		if (context_equiv(ctx, next_ctx)) {
ctx              3212 kernel/events/core.c 			WRITE_ONCE(ctx->task, next);
ctx              3215 kernel/events/core.c 			swap(ctx->task_ctx_data, next_ctx->task_ctx_data);
ctx              3225 kernel/events/core.c 			RCU_INIT_POINTER(next->perf_event_ctxp[ctxn], ctx);
ctx              3229 kernel/events/core.c 			perf_event_sync_stat(ctx, next_ctx);
ctx              3232 kernel/events/core.c 		raw_spin_unlock(&ctx->lock);
ctx              3238 kernel/events/core.c 		raw_spin_lock(&ctx->lock);
ctx              3239 kernel/events/core.c 		task_ctx_sched_out(cpuctx, ctx, EVENT_ALL);
ctx              3240 kernel/events/core.c 		raw_spin_unlock(&ctx->lock);
ctx              3286 kernel/events/core.c 		pmu = cpuctx->ctx.pmu; /* software PMUs will not have sched_task */
ctx              3347 kernel/events/core.c 	ctx_sched_out(&cpuctx->ctx, cpuctx, event_type);
ctx              3382 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              3398 kernel/events/core.c 		if (!group_sched_in(event, sid->cpuctx, sid->ctx))
ctx              3399 kernel/events/core.c 			list_add_tail(&event->active_list, &sid->ctx->pinned_active);
ctx              3423 kernel/events/core.c 		int ret = group_sched_in(event, sid->cpuctx, sid->ctx);
ctx              3426 kernel/events/core.c 			sid->ctx->rotate_necessary = 1;
ctx              3429 kernel/events/core.c 		list_add_tail(&event->active_list, &sid->ctx->flexible_active);
ctx              3436 kernel/events/core.c ctx_pinned_sched_in(struct perf_event_context *ctx,
ctx              3440 kernel/events/core.c 		.ctx = ctx,
ctx              3445 kernel/events/core.c 	visit_groups_merge(&ctx->pinned_groups,
ctx              3451 kernel/events/core.c ctx_flexible_sched_in(struct perf_event_context *ctx,
ctx              3455 kernel/events/core.c 		.ctx = ctx,
ctx              3460 kernel/events/core.c 	visit_groups_merge(&ctx->flexible_groups,
ctx              3466 kernel/events/core.c ctx_sched_in(struct perf_event_context *ctx,
ctx              3471 kernel/events/core.c 	int is_active = ctx->is_active;
ctx              3474 kernel/events/core.c 	lockdep_assert_held(&ctx->lock);
ctx              3476 kernel/events/core.c 	if (likely(!ctx->nr_events))
ctx              3479 kernel/events/core.c 	ctx->is_active |= (event_type | EVENT_TIME);
ctx              3480 kernel/events/core.c 	if (ctx->task) {
ctx              3482 kernel/events/core.c 			cpuctx->task_ctx = ctx;
ctx              3484 kernel/events/core.c 			WARN_ON_ONCE(cpuctx->task_ctx != ctx);
ctx              3487 kernel/events/core.c 	is_active ^= ctx->is_active; /* changed bits */
ctx              3492 kernel/events/core.c 		ctx->timestamp = now;
ctx              3493 kernel/events/core.c 		perf_cgroup_set_timestamp(task, ctx);
ctx              3501 kernel/events/core.c 		ctx_pinned_sched_in(ctx, cpuctx);
ctx              3505 kernel/events/core.c 		ctx_flexible_sched_in(ctx, cpuctx);
ctx              3512 kernel/events/core.c 	struct perf_event_context *ctx = &cpuctx->ctx;
ctx              3514 kernel/events/core.c 	ctx_sched_in(ctx, cpuctx, event_type, task);
ctx              3517 kernel/events/core.c static void perf_event_context_sched_in(struct perf_event_context *ctx,
ctx              3522 kernel/events/core.c 	cpuctx = __get_cpu_context(ctx);
ctx              3523 kernel/events/core.c 	if (cpuctx->task_ctx == ctx)
ctx              3526 kernel/events/core.c 	perf_ctx_lock(cpuctx, ctx);
ctx              3531 kernel/events/core.c 	if (!ctx->nr_events)
ctx              3534 kernel/events/core.c 	perf_pmu_disable(ctx->pmu);
ctx              3543 kernel/events/core.c 	if (!RB_EMPTY_ROOT(&ctx->pinned_groups.tree))
ctx              3545 kernel/events/core.c 	perf_event_sched_in(cpuctx, ctx, task);
ctx              3546 kernel/events/core.c 	perf_pmu_enable(ctx->pmu);
ctx              3549 kernel/events/core.c 	perf_ctx_unlock(cpuctx, ctx);
ctx              3566 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              3580 kernel/events/core.c 		ctx = task->perf_event_ctxp[ctxn];
ctx              3581 kernel/events/core.c 		if (likely(!ctx))
ctx              3584 kernel/events/core.c 		perf_event_context_sched_in(ctx, task);
ctx              3704 kernel/events/core.c static void perf_adjust_freq_unthr_context(struct perf_event_context *ctx,
ctx              3717 kernel/events/core.c 	if (!(ctx->nr_freq || needs_unthr))
ctx              3720 kernel/events/core.c 	raw_spin_lock(&ctx->lock);
ctx              3721 kernel/events/core.c 	perf_pmu_disable(ctx->pmu);
ctx              3723 kernel/events/core.c 	list_for_each_entry_rcu(event, &ctx->event_list, event_entry) {
ctx              3767 kernel/events/core.c 	perf_pmu_enable(ctx->pmu);
ctx              3768 kernel/events/core.c 	raw_spin_unlock(&ctx->lock);
ctx              3774 kernel/events/core.c static void rotate_ctx(struct perf_event_context *ctx, struct perf_event *event)
ctx              3780 kernel/events/core.c 	if (ctx->rotate_disable)
ctx              3783 kernel/events/core.c 	perf_event_groups_delete(&ctx->flexible_groups, event);
ctx              3784 kernel/events/core.c 	perf_event_groups_insert(&ctx->flexible_groups, event);
ctx              3789 kernel/events/core.c ctx_event_to_rotate(struct perf_event_context *ctx)
ctx              3794 kernel/events/core.c 	event = list_first_entry_or_null(&ctx->flexible_active,
ctx              3799 kernel/events/core.c 		event = rb_entry_safe(rb_first(&ctx->flexible_groups.tree),
ctx              3817 kernel/events/core.c 	cpu_rotate = cpuctx->ctx.rotate_necessary;
ctx              3825 kernel/events/core.c 	perf_pmu_disable(cpuctx->ctx.pmu);
ctx              3830 kernel/events/core.c 		cpu_event = ctx_event_to_rotate(&cpuctx->ctx);
ctx              3844 kernel/events/core.c 		rotate_ctx(&cpuctx->ctx, cpu_event);
ctx              3848 kernel/events/core.c 	perf_pmu_enable(cpuctx->ctx.pmu);
ctx              3857 kernel/events/core.c 	struct perf_event_context *ctx, *tmp;
ctx              3866 kernel/events/core.c 	list_for_each_entry_safe(ctx, tmp, head, active_ctx_list)
ctx              3867 kernel/events/core.c 		perf_adjust_freq_unthr_context(ctx, throttled);
ctx              3871 kernel/events/core.c 				struct perf_event_context *ctx)
ctx              3891 kernel/events/core.c 	struct perf_event_context *ctx, *clone_ctx = NULL;
ctx              3899 kernel/events/core.c 	ctx = current->perf_event_ctxp[ctxn];
ctx              3900 kernel/events/core.c 	if (!ctx || !ctx->nr_events)
ctx              3903 kernel/events/core.c 	cpuctx = __get_cpu_context(ctx);
ctx              3904 kernel/events/core.c 	perf_ctx_lock(cpuctx, ctx);
ctx              3905 kernel/events/core.c 	ctx_sched_out(ctx, cpuctx, EVENT_TIME);
ctx              3906 kernel/events/core.c 	list_for_each_entry(event, &ctx->event_list, event_entry) {
ctx              3907 kernel/events/core.c 		enabled |= event_enable_on_exec(event, ctx);
ctx              3915 kernel/events/core.c 		clone_ctx = unclone_ctx(ctx);
ctx              3916 kernel/events/core.c 		ctx_resched(cpuctx, ctx, event_type);
ctx              3918 kernel/events/core.c 		ctx_sched_in(ctx, cpuctx, EVENT_TIME, current);
ctx              3920 kernel/events/core.c 	perf_ctx_unlock(cpuctx, ctx);
ctx              3959 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx              3960 kernel/events/core.c 	struct perf_cpu_context *cpuctx = __get_cpu_context(ctx);
ctx              3970 kernel/events/core.c 	if (ctx->task && cpuctx->task_ctx != ctx)
ctx              3973 kernel/events/core.c 	raw_spin_lock(&ctx->lock);
ctx              3974 kernel/events/core.c 	if (ctx->is_active & EVENT_TIME) {
ctx              3975 kernel/events/core.c 		update_context_time(ctx);
ctx              4009 kernel/events/core.c 	raw_spin_unlock(&ctx->lock);
ctx              4140 kernel/events/core.c 		struct perf_event_context *ctx = event->ctx;
ctx              4143 kernel/events/core.c 		raw_spin_lock_irqsave(&ctx->lock, flags);
ctx              4146 kernel/events/core.c 			raw_spin_unlock_irqrestore(&ctx->lock, flags);
ctx              4154 kernel/events/core.c 		if (ctx->is_active & EVENT_TIME) {
ctx              4155 kernel/events/core.c 			update_context_time(ctx);
ctx              4162 kernel/events/core.c 		raw_spin_unlock_irqrestore(&ctx->lock, flags);
ctx              4171 kernel/events/core.c static void __perf_event_init_context(struct perf_event_context *ctx)
ctx              4173 kernel/events/core.c 	raw_spin_lock_init(&ctx->lock);
ctx              4174 kernel/events/core.c 	mutex_init(&ctx->mutex);
ctx              4175 kernel/events/core.c 	INIT_LIST_HEAD(&ctx->active_ctx_list);
ctx              4176 kernel/events/core.c 	perf_event_groups_init(&ctx->pinned_groups);
ctx              4177 kernel/events/core.c 	perf_event_groups_init(&ctx->flexible_groups);
ctx              4178 kernel/events/core.c 	INIT_LIST_HEAD(&ctx->event_list);
ctx              4179 kernel/events/core.c 	INIT_LIST_HEAD(&ctx->pinned_active);
ctx              4180 kernel/events/core.c 	INIT_LIST_HEAD(&ctx->flexible_active);
ctx              4181 kernel/events/core.c 	refcount_set(&ctx->refcount, 1);
ctx              4187 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              4189 kernel/events/core.c 	ctx = kzalloc(sizeof(struct perf_event_context), GFP_KERNEL);
ctx              4190 kernel/events/core.c 	if (!ctx)
ctx              4193 kernel/events/core.c 	__perf_event_init_context(ctx);
ctx              4195 kernel/events/core.c 		ctx->task = get_task_struct(task);
ctx              4196 kernel/events/core.c 	ctx->pmu = pmu;
ctx              4198 kernel/events/core.c 	return ctx;
ctx              4228 kernel/events/core.c 	struct perf_event_context *ctx, *clone_ctx = NULL;
ctx              4241 kernel/events/core.c 		ctx = &cpuctx->ctx;
ctx              4242 kernel/events/core.c 		get_ctx(ctx);
ctx              4243 kernel/events/core.c 		++ctx->pin_count;
ctx              4245 kernel/events/core.c 		return ctx;
ctx              4262 kernel/events/core.c 	ctx = perf_lock_task_context(task, ctxn, &flags);
ctx              4263 kernel/events/core.c 	if (ctx) {
ctx              4264 kernel/events/core.c 		clone_ctx = unclone_ctx(ctx);
ctx              4265 kernel/events/core.c 		++ctx->pin_count;
ctx              4267 kernel/events/core.c 		if (task_ctx_data && !ctx->task_ctx_data) {
ctx              4268 kernel/events/core.c 			ctx->task_ctx_data = task_ctx_data;
ctx              4271 kernel/events/core.c 		raw_spin_unlock_irqrestore(&ctx->lock, flags);
ctx              4276 kernel/events/core.c 		ctx = alloc_perf_context(pmu, task);
ctx              4278 kernel/events/core.c 		if (!ctx)
ctx              4282 kernel/events/core.c 			ctx->task_ctx_data = task_ctx_data;
ctx              4297 kernel/events/core.c 			get_ctx(ctx);
ctx              4298 kernel/events/core.c 			++ctx->pin_count;
ctx              4299 kernel/events/core.c 			rcu_assign_pointer(task->perf_event_ctxp[ctxn], ctx);
ctx              4304 kernel/events/core.c 			put_ctx(ctx);
ctx              4313 kernel/events/core.c 	return ctx;
ctx              4520 kernel/events/core.c 					struct perf_event_context *ctx)
ctx              4525 kernel/events/core.c 	lockdep_assert_held(&ctx->mutex);
ctx              4530 kernel/events/core.c 	list_for_each_entry(iter_event, &ctx->event_list, event_entry) {
ctx              4585 kernel/events/core.c 	if (event->ctx)
ctx              4586 kernel/events/core.c 		put_ctx(event->ctx);
ctx              4676 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx              4684 kernel/events/core.c 	if (!ctx) {
ctx              4693 kernel/events/core.c 	ctx = perf_event_ctx_lock(event);
ctx              4694 kernel/events/core.c 	WARN_ON_ONCE(ctx->parent_ctx);
ctx              4697 kernel/events/core.c 	raw_spin_lock_irq(&ctx->lock);
ctx              4710 kernel/events/core.c 	raw_spin_unlock_irq(&ctx->lock);
ctx              4712 kernel/events/core.c 	perf_event_ctx_unlock(event, ctx);
ctx              4722 kernel/events/core.c 		ctx = READ_ONCE(child->ctx);
ctx              4731 kernel/events/core.c 		get_ctx(ctx);
ctx              4739 kernel/events/core.c 		mutex_lock(&ctx->mutex);
ctx              4760 kernel/events/core.c 		mutex_unlock(&ctx->mutex);
ctx              4761 kernel/events/core.c 		put_ctx(ctx);
ctx              4767 kernel/events/core.c 		void *var = &child->ctx->refcount;
ctx              4826 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              4829 kernel/events/core.c 	ctx = perf_event_ctx_lock(event);
ctx              4831 kernel/events/core.c 	perf_event_ctx_unlock(event, ctx);
ctx              4840 kernel/events/core.c 	struct perf_event_context *ctx = leader->ctx;
ctx              4850 kernel/events/core.c 	raw_spin_lock_irqsave(&ctx->lock, flags);
ctx              4880 kernel/events/core.c 	raw_spin_unlock_irqrestore(&ctx->lock, flags);
ctx              4888 kernel/events/core.c 	struct perf_event_context *ctx = leader->ctx;
ctx              4892 kernel/events/core.c 	lockdep_assert_held(&ctx->mutex);
ctx              4984 kernel/events/core.c 	WARN_ON_ONCE(event->ctx->parent_ctx);
ctx              4997 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              5000 kernel/events/core.c 	ctx = perf_event_ctx_lock(event);
ctx              5002 kernel/events/core.c 	perf_event_ctx_unlock(event, ctx);
ctx              5048 kernel/events/core.c 	WARN_ON_ONCE(event->ctx->parent_ctx);
ctx              5060 kernel/events/core.c 	struct perf_event_context *ctx = event->ctx;
ctx              5063 kernel/events/core.c 	lockdep_assert_held(&ctx->mutex);
ctx              5074 kernel/events/core.c 				struct perf_event_context *ctx,
ctx              5089 kernel/events/core.c 		perf_pmu_disable(ctx->pmu);
ctx              5105 kernel/events/core.c 		perf_pmu_enable(ctx->pmu);
ctx              5261 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              5264 kernel/events/core.c 	ctx = perf_event_ctx_lock(event);
ctx              5266 kernel/events/core.c 	perf_event_ctx_unlock(event, ctx);
ctx              5295 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              5300 kernel/events/core.c 		ctx = perf_event_ctx_lock(event);
ctx              5302 kernel/events/core.c 		perf_event_ctx_unlock(event, ctx);
ctx              5311 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              5316 kernel/events/core.c 		ctx = perf_event_ctx_lock(event);
ctx              5318 kernel/events/core.c 		perf_event_ctx_unlock(event, ctx);
ctx              5799 kernel/events/core.c 	WARN_ON_ONCE(event->ctx->parent_ctx);
ctx              6567 kernel/events/core.c 	bool crosstask = event->ctx->task && event->ctx->task != current;
ctx              6799 kernel/events/core.c perf_iterate_ctx(struct perf_event_context *ctx,
ctx              6805 kernel/events/core.c 	list_for_each_entry_rcu(event, &ctx->event_list, event_entry) {
ctx              6828 kernel/events/core.c 		if (!smp_load_acquire(&event->ctx))
ctx              6849 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              6868 kernel/events/core.c 		ctx = rcu_dereference(current->perf_event_ctxp[ctxn]);
ctx              6869 kernel/events/core.c 		if (ctx)
ctx              6870 kernel/events/core.c 			perf_iterate_ctx(ctx, output, data, false);
ctx              6912 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              6917 kernel/events/core.c 		ctx = current->perf_event_ctxp[ctxn];
ctx              6918 kernel/events/core.c 		if (!ctx)
ctx              6923 kernel/events/core.c 		perf_iterate_ctx(ctx, perf_event_addr_filters_exec, NULL,
ctx              6966 kernel/events/core.c 	struct pmu *pmu = event->ctx->pmu;
ctx              6973 kernel/events/core.c 	perf_iterate_ctx(&cpuctx->ctx, __perf_event_output_stop, &ro, false);
ctx              7642 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              7654 kernel/events/core.c 		ctx = rcu_dereference(current->perf_event_ctxp[ctxn]);
ctx              7655 kernel/events/core.c 		if (!ctx)
ctx              7658 kernel/events/core.c 		perf_iterate_ctx(ctx, __perf_addr_filters_adjust, vma, true);
ctx              7796 kernel/events/core.c 	if (event->ctx->task) {
ctx              7814 kernel/events/core.c 	if (event->ctx->task)
ctx              8405 kernel/events/core.c 					  lockdep_is_held(&event->ctx->lock));
ctx              8751 kernel/events/core.c 		struct perf_event_context *ctx;
ctx              8755 kernel/events/core.c 		ctx = rcu_dereference(task->perf_event_ctxp[perf_sw_context]);
ctx              8756 kernel/events/core.c 		if (!ctx)
ctx              8759 kernel/events/core.c 		list_for_each_entry_rcu(event, &ctx->event_list, event_entry) {
ctx              8977 kernel/events/core.c 	struct bpf_perf_event_data_kern ctx = {
ctx              8983 kernel/events/core.c 	ctx.regs = perf_arch_bpf_user_pt_regs(regs);
ctx              8988 kernel/events/core.c 	ret = BPF_PROG_RUN(event->prog, &ctx);
ctx              9236 kernel/events/core.c 	struct task_struct *task = READ_ONCE(event->ctx->task);
ctx              9250 kernel/events/core.c 		mm = get_task_mm(event->ctx->task);
ctx              9454 kernel/events/core.c 				if (!event->ctx->task)
ctx              9507 kernel/events/core.c 	lockdep_assert_held(&event->ctx->mutex);
ctx              9548 kernel/events/core.c 		struct perf_event_context *ctx = event->ctx;
ctx              9561 kernel/events/core.c 		mutex_unlock(&ctx->mutex);
ctx              9563 kernel/events/core.c 		mutex_lock(&ctx->mutex);
ctx              9758 kernel/events/core.c 	local64_set(&event->hw.prev_count, event->ctx->time);
ctx              9765 kernel/events/core.c 	task_clock_event_update(event, event->ctx->time);
ctx              9785 kernel/events/core.c 	u64 delta = now - event->ctx->timestamp;
ctx              9786 kernel/events/core.c 	u64 time = event->ctx->time + delta;
ctx              10115 kernel/events/core.c 		__perf_event_init_context(&cpuctx->ctx);
ctx              10116 kernel/events/core.c 		lockdep_set_class(&cpuctx->ctx.mutex, &cpuctx_mutex);
ctx              10117 kernel/events/core.c 		lockdep_set_class(&cpuctx->ctx.lock, &cpuctx_lock);
ctx              10118 kernel/events/core.c 		cpuctx->ctx.pmu = pmu;
ctx              10209 kernel/events/core.c 	struct perf_event_context *ctx = NULL;
ctx              10226 kernel/events/core.c 		ctx = perf_event_ctx_lock_nested(event->group_leader,
ctx              10228 kernel/events/core.c 		BUG_ON(!ctx);
ctx              10234 kernel/events/core.c 	if (ctx)
ctx              10235 kernel/events/core.c 		perf_event_ctx_unlock(event->group_leader, ctx);
ctx              10768 kernel/events/core.c 	if (output_event->cpu == -1 && output_event->ctx != event->ctx)
ctx              10866 kernel/events/core.c 			     struct perf_event_context *ctx)
ctx              10872 kernel/events/core.c 	gctx = READ_ONCE(group_leader->ctx);
ctx              10879 kernel/events/core.c 	mutex_lock_double(&gctx->mutex, &ctx->mutex);
ctx              10881 kernel/events/core.c 	if (group_leader->ctx != gctx) {
ctx              10882 kernel/events/core.c 		mutex_unlock(&ctx->mutex);
ctx              10906 kernel/events/core.c 	struct perf_event_context *ctx, *uninitialized_var(gctx);
ctx              11057 kernel/events/core.c 			pmu = group_leader->ctx->pmu;
ctx              11073 kernel/events/core.c 	ctx = find_get_context(pmu, task, event);
ctx              11074 kernel/events/core.c 	if (IS_ERR(ctx)) {
ctx              11075 kernel/events/core.c 		err = PTR_ERR(ctx);
ctx              11108 kernel/events/core.c 		if (group_leader->ctx->task != ctx->task)
ctx              11116 kernel/events/core.c 		if (!move_group && group_leader->ctx != ctx)
ctx              11141 kernel/events/core.c 		gctx = __perf_event_ctx_lock_double(group_leader, ctx);
ctx              11158 kernel/events/core.c 			if (gctx != ctx) {
ctx              11171 kernel/events/core.c 		if (!exclusive_event_installable(group_leader, ctx))
ctx              11175 kernel/events/core.c 			if (!exclusive_event_installable(sibling, ctx))
ctx              11179 kernel/events/core.c 		mutex_lock(&ctx->mutex);
ctx              11182 kernel/events/core.c 	if (ctx->task == TASK_TOMBSTONE) {
ctx              11200 kernel/events/core.c 			container_of(ctx, struct perf_cpu_context, ctx);
ctx              11217 kernel/events/core.c 	if (!exclusive_event_installable(event, ctx)) {
ctx              11222 kernel/events/core.c 	WARN_ON_ONCE(ctx->parent_ctx);
ctx              11260 kernel/events/core.c 			perf_install_in_context(ctx, sibling, sibling->cpu);
ctx              11261 kernel/events/core.c 			get_ctx(ctx);
ctx              11270 kernel/events/core.c 		perf_install_in_context(ctx, group_leader, group_leader->cpu);
ctx              11271 kernel/events/core.c 		get_ctx(ctx);
ctx              11285 kernel/events/core.c 	perf_install_in_context(ctx, event, event->cpu);
ctx              11286 kernel/events/core.c 	perf_unpin_context(ctx);
ctx              11290 kernel/events/core.c 	mutex_unlock(&ctx->mutex);
ctx              11314 kernel/events/core.c 	mutex_unlock(&ctx->mutex);
ctx              11318 kernel/events/core.c 	perf_unpin_context(ctx);
ctx              11319 kernel/events/core.c 	put_ctx(ctx);
ctx              11353 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              11377 kernel/events/core.c 	ctx = find_get_context(event->pmu, task, event);
ctx              11378 kernel/events/core.c 	if (IS_ERR(ctx)) {
ctx              11379 kernel/events/core.c 		err = PTR_ERR(ctx);
ctx              11383 kernel/events/core.c 	WARN_ON_ONCE(ctx->parent_ctx);
ctx              11384 kernel/events/core.c 	mutex_lock(&ctx->mutex);
ctx              11385 kernel/events/core.c 	if (ctx->task == TASK_TOMBSTONE) {
ctx              11398 kernel/events/core.c 			container_of(ctx, struct perf_cpu_context, ctx);
ctx              11405 kernel/events/core.c 	if (!exclusive_event_installable(event, ctx)) {
ctx              11410 kernel/events/core.c 	perf_install_in_context(ctx, event, event->cpu);
ctx              11411 kernel/events/core.c 	perf_unpin_context(ctx);
ctx              11412 kernel/events/core.c 	mutex_unlock(&ctx->mutex);
ctx              11417 kernel/events/core.c 	mutex_unlock(&ctx->mutex);
ctx              11418 kernel/events/core.c 	perf_unpin_context(ctx);
ctx              11419 kernel/events/core.c 	put_ctx(ctx);
ctx              11434 kernel/events/core.c 	src_ctx = &per_cpu_ptr(pmu->pmu_cpu_context, src_cpu)->ctx;
ctx              11435 kernel/events/core.c 	dst_ctx = &per_cpu_ptr(pmu->pmu_cpu_context, dst_cpu)->ctx;
ctx              11557 kernel/events/core.c 	WARN_ON_ONCE(parent_event->ctx->parent_ctx);
ctx              11669 kernel/events/core.c 			    struct perf_event_context *ctx)
ctx              11682 kernel/events/core.c 	raw_spin_lock_irq(&ctx->lock);
ctx              11684 kernel/events/core.c 	list_del_event(event, ctx);
ctx              11685 kernel/events/core.c 	raw_spin_unlock_irq(&ctx->lock);
ctx              11698 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              11703 kernel/events/core.c 		ctx = task->perf_event_ctxp[ctxn];
ctx              11704 kernel/events/core.c 		if (!ctx)
ctx              11707 kernel/events/core.c 		mutex_lock(&ctx->mutex);
ctx              11708 kernel/events/core.c 		raw_spin_lock_irq(&ctx->lock);
ctx              11716 kernel/events/core.c 		WRITE_ONCE(ctx->task, TASK_TOMBSTONE);
ctx              11718 kernel/events/core.c 		raw_spin_unlock_irq(&ctx->lock);
ctx              11720 kernel/events/core.c 		list_for_each_entry_safe(event, tmp, &ctx->event_list, event_entry)
ctx              11721 kernel/events/core.c 			perf_free_event(event, ctx);
ctx              11723 kernel/events/core.c 		mutex_unlock(&ctx->mutex);
ctx              11739 kernel/events/core.c 		wait_var_event(&ctx->refcount, refcount_read(&ctx->refcount) == 1);
ctx              11740 kernel/events/core.c 		put_ctx(ctx); /* must be last */
ctx              11869 kernel/events/core.c 	child_event->ctx = child_ctx;
ctx              12147 kernel/events/core.c 	struct perf_event_context *ctx = __info;
ctx              12148 kernel/events/core.c 	struct perf_cpu_context *cpuctx = __get_cpu_context(ctx);
ctx              12151 kernel/events/core.c 	raw_spin_lock(&ctx->lock);
ctx              12152 kernel/events/core.c 	ctx_sched_out(ctx, cpuctx, EVENT_TIME);
ctx              12153 kernel/events/core.c 	list_for_each_entry(event, &ctx->event_list, event_entry)
ctx              12154 kernel/events/core.c 		__perf_remove_from_context(event, cpuctx, ctx, (void *)DETACH_GROUP);
ctx              12155 kernel/events/core.c 	raw_spin_unlock(&ctx->lock);
ctx              12161 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              12167 kernel/events/core.c 		ctx = &cpuctx->ctx;
ctx              12169 kernel/events/core.c 		mutex_lock(&ctx->mutex);
ctx              12170 kernel/events/core.c 		smp_call_function_single(cpu, __perf_event_exit_context, ctx, 1);
ctx              12172 kernel/events/core.c 		mutex_unlock(&ctx->mutex);
ctx              12186 kernel/events/core.c 	struct perf_event_context *ctx;
ctx              12195 kernel/events/core.c 		ctx = &cpuctx->ctx;
ctx              12197 kernel/events/core.c 		mutex_lock(&ctx->mutex);
ctx              12199 kernel/events/core.c 		mutex_unlock(&ctx->mutex);
ctx               507 kernel/events/hw_breakpoint.c 	if (irqs_disabled() && bp->ctx && bp->ctx->task == current)
ctx               880 kernel/events/uprobes.c 				   enum uprobe_filter_ctx ctx, struct mm_struct *mm)
ctx               882 kernel/events/uprobes.c 	return !uc->filter || uc->filter(uc, ctx, mm);
ctx               886 kernel/events/uprobes.c 			 enum uprobe_filter_ctx ctx, struct mm_struct *mm)
ctx               893 kernel/events/uprobes.c 		ret = consumer_filter(uc, ctx, mm);
ctx              1858 kernel/events/uprobes.c 	enum rp_check ctx = chained ? RP_CHECK_CHAIN_CALL : RP_CHECK_CALL;
ctx              1860 kernel/events/uprobes.c 	while (ri && !arch_uretprobe_is_alive(ri, ctx, regs)) {
ctx              2184 kernel/events/uprobes.c bool __weak arch_uretprobe_is_alive(struct return_instance *ret, enum rp_check ctx,
ctx               373 kernel/locking/locktorture.c 	struct ww_acquire_ctx ctx;
ctx               384 kernel/locking/locktorture.c 	ww_acquire_init(&ctx, &torture_ww_class);
ctx               389 kernel/locking/locktorture.c 		err = ww_mutex_lock(ll->lock, &ctx);
ctx               400 kernel/locking/locktorture.c 		ww_mutex_lock_slow(ll->lock, &ctx);
ctx               404 kernel/locking/locktorture.c 	ww_acquire_fini(&ctx);
ctx               315 kernel/locking/mutex.c 	DEBUG_LOCKS_WARN_ON(ww->ctx);
ctx               343 kernel/locking/mutex.c 	ww->ctx = ww_ctx;
ctx               465 kernel/locking/mutex.c ww_mutex_set_context_fastpath(struct ww_mutex *lock, struct ww_acquire_ctx *ctx)
ctx               467 kernel/locking/mutex.c 	ww_mutex_lock_acquired(lock, ctx);
ctx               495 kernel/locking/mutex.c 	__ww_mutex_check_waiters(&lock->base, ctx);
ctx               520 kernel/locking/mutex.c 	if (ww_ctx->acquired > 0 && READ_ONCE(ww->ctx))
ctx               761 kernel/locking/mutex.c 	if (lock->ctx) {
ctx               763 kernel/locking/mutex.c 		DEBUG_LOCKS_WARN_ON(!lock->ctx->acquired);
ctx               765 kernel/locking/mutex.c 		if (lock->ctx->acquired > 0)
ctx               766 kernel/locking/mutex.c 			lock->ctx->acquired--;
ctx               767 kernel/locking/mutex.c 		lock->ctx = NULL;
ctx               806 kernel/locking/mutex.c 		      struct ww_acquire_ctx *ctx)
ctx               809 kernel/locking/mutex.c 	struct ww_acquire_ctx *hold_ctx = READ_ONCE(ww->ctx);
ctx               812 kernel/locking/mutex.c 	if (ctx->acquired == 0)
ctx               815 kernel/locking/mutex.c 	if (!ctx->is_wait_die) {
ctx               816 kernel/locking/mutex.c 		if (ctx->wounded)
ctx               817 kernel/locking/mutex.c 			return __ww_mutex_kill(lock, ctx);
ctx               822 kernel/locking/mutex.c 	if (hold_ctx && __ww_ctx_stamp_after(ctx, hold_ctx))
ctx               823 kernel/locking/mutex.c 		return __ww_mutex_kill(lock, ctx);
ctx               834 kernel/locking/mutex.c 		return __ww_mutex_kill(lock, ctx);
ctx               916 kernel/locking/mutex.c 		__ww_mutex_wound(lock, ww_ctx, ww->ctx);
ctx               943 kernel/locking/mutex.c 		if (unlikely(ww_ctx == READ_ONCE(ww->ctx)))
ctx              1159 kernel/locking/mutex.c ww_mutex_deadlock_injection(struct ww_mutex *lock, struct ww_acquire_ctx *ctx)
ctx              1164 kernel/locking/mutex.c 	if (ctx->deadlock_inject_countdown-- == 0) {
ctx              1165 kernel/locking/mutex.c 		tmp = ctx->deadlock_inject_interval;
ctx              1171 kernel/locking/mutex.c 		ctx->deadlock_inject_interval = tmp;
ctx              1172 kernel/locking/mutex.c 		ctx->deadlock_inject_countdown = tmp;
ctx              1173 kernel/locking/mutex.c 		ctx->contending_lock = lock;
ctx              1185 kernel/locking/mutex.c ww_mutex_lock(struct ww_mutex *lock, struct ww_acquire_ctx *ctx)
ctx              1191 kernel/locking/mutex.c 			       0, ctx ? &ctx->dep_map : NULL, _RET_IP_,
ctx              1192 kernel/locking/mutex.c 			       ctx);
ctx              1193 kernel/locking/mutex.c 	if (!ret && ctx && ctx->acquired > 1)
ctx              1194 kernel/locking/mutex.c 		return ww_mutex_deadlock_injection(lock, ctx);
ctx              1201 kernel/locking/mutex.c ww_mutex_lock_interruptible(struct ww_mutex *lock, struct ww_acquire_ctx *ctx)
ctx              1207 kernel/locking/mutex.c 			      0, ctx ? &ctx->dep_map : NULL, _RET_IP_,
ctx              1208 kernel/locking/mutex.c 			      ctx);
ctx              1210 kernel/locking/mutex.c 	if (!ret && ctx && ctx->acquired > 1)
ctx              1211 kernel/locking/mutex.c 		return ww_mutex_deadlock_injection(lock, ctx);
ctx              1380 kernel/locking/mutex.c __ww_mutex_lock_slowpath(struct ww_mutex *lock, struct ww_acquire_ctx *ctx)
ctx              1383 kernel/locking/mutex.c 			       _RET_IP_, ctx);
ctx              1388 kernel/locking/mutex.c 					    struct ww_acquire_ctx *ctx)
ctx              1391 kernel/locking/mutex.c 			       _RET_IP_, ctx);
ctx              1428 kernel/locking/mutex.c ww_mutex_lock(struct ww_mutex *lock, struct ww_acquire_ctx *ctx)
ctx              1433 kernel/locking/mutex.c 		if (ctx)
ctx              1434 kernel/locking/mutex.c 			ww_mutex_set_context_fastpath(lock, ctx);
ctx              1438 kernel/locking/mutex.c 	return __ww_mutex_lock_slowpath(lock, ctx);
ctx              1443 kernel/locking/mutex.c ww_mutex_lock_interruptible(struct ww_mutex *lock, struct ww_acquire_ctx *ctx)
ctx              1448 kernel/locking/mutex.c 		if (ctx)
ctx              1449 kernel/locking/mutex.c 			ww_mutex_set_context_fastpath(lock, ctx);
ctx              1453 kernel/locking/mutex.c 	return __ww_mutex_lock_interruptible_slowpath(lock, ctx);
ctx                52 kernel/locking/test-ww_mutex.c 	struct ww_acquire_ctx ctx;
ctx                56 kernel/locking/test-ww_mutex.c 	ww_acquire_init(&ctx, &ww_class);
ctx                67 kernel/locking/test-ww_mutex.c 	ww_mutex_lock(&mtx.mutex, (flags & TEST_MTX_CTX) ? &ctx : NULL);
ctx                84 kernel/locking/test-ww_mutex.c 	ww_acquire_fini(&ctx);
ctx               115 kernel/locking/test-ww_mutex.c 	struct ww_acquire_ctx ctx;
ctx               119 kernel/locking/test-ww_mutex.c 	ww_acquire_init(&ctx, &ww_class);
ctx               121 kernel/locking/test-ww_mutex.c 	ww_mutex_lock(&mutex, &ctx);
ctx               130 kernel/locking/test-ww_mutex.c 	ret = ww_mutex_lock(&mutex, &ctx);
ctx               143 kernel/locking/test-ww_mutex.c 	ww_acquire_fini(&ctx);
ctx               160 kernel/locking/test-ww_mutex.c 	struct ww_acquire_ctx ctx;
ctx               163 kernel/locking/test-ww_mutex.c 	ww_acquire_init(&ctx, &ww_class);
ctx               164 kernel/locking/test-ww_mutex.c 	ww_mutex_lock(&abba->b_mutex, &ctx);
ctx               169 kernel/locking/test-ww_mutex.c 	err = ww_mutex_lock(&abba->a_mutex, &ctx);
ctx               172 kernel/locking/test-ww_mutex.c 		ww_mutex_lock_slow(&abba->a_mutex, &ctx);
ctx               173 kernel/locking/test-ww_mutex.c 		err = ww_mutex_lock(&abba->b_mutex, &ctx);
ctx               179 kernel/locking/test-ww_mutex.c 	ww_acquire_fini(&ctx);
ctx               187 kernel/locking/test-ww_mutex.c 	struct ww_acquire_ctx ctx;
ctx               199 kernel/locking/test-ww_mutex.c 	ww_acquire_init(&ctx, &ww_class);
ctx               200 kernel/locking/test-ww_mutex.c 	ww_mutex_lock(&abba.a_mutex, &ctx);
ctx               205 kernel/locking/test-ww_mutex.c 	err = ww_mutex_lock(&abba.b_mutex, &ctx);
ctx               208 kernel/locking/test-ww_mutex.c 		ww_mutex_lock_slow(&abba.b_mutex, &ctx);
ctx               209 kernel/locking/test-ww_mutex.c 		err = ww_mutex_lock(&abba.a_mutex, &ctx);
ctx               215 kernel/locking/test-ww_mutex.c 	ww_acquire_fini(&ctx);
ctx               249 kernel/locking/test-ww_mutex.c 	struct ww_acquire_ctx ctx;
ctx               252 kernel/locking/test-ww_mutex.c 	ww_acquire_init(&ctx, &ww_class);
ctx               253 kernel/locking/test-ww_mutex.c 	ww_mutex_lock(&cycle->a_mutex, &ctx);
ctx               258 kernel/locking/test-ww_mutex.c 	err = ww_mutex_lock(cycle->b_mutex, &ctx);
ctx               262 kernel/locking/test-ww_mutex.c 		ww_mutex_lock_slow(cycle->b_mutex, &ctx);
ctx               263 kernel/locking/test-ww_mutex.c 		erra = ww_mutex_lock(&cycle->a_mutex, &ctx);
ctx               270 kernel/locking/test-ww_mutex.c 	ww_acquire_fini(&ctx);
ctx               383 kernel/locking/test-ww_mutex.c 	struct ww_acquire_ctx ctx;
ctx               394 kernel/locking/test-ww_mutex.c 		ww_acquire_init(&ctx, &ww_class);
ctx               401 kernel/locking/test-ww_mutex.c 			err = ww_mutex_lock(&locks[order[n]], &ctx);
ctx               415 kernel/locking/test-ww_mutex.c 			ww_mutex_lock_slow(&locks[order[contended]], &ctx);
ctx               425 kernel/locking/test-ww_mutex.c 		ww_acquire_fini(&ctx);
ctx               441 kernel/locking/test-ww_mutex.c 	struct ww_acquire_ctx ctx;
ctx               462 kernel/locking/test-ww_mutex.c 		ww_acquire_init(&ctx, &ww_class);
ctx               465 kernel/locking/test-ww_mutex.c 			err = ww_mutex_lock(ll->lock, &ctx);
ctx               479 kernel/locking/test-ww_mutex.c 			ww_mutex_lock_slow(ll->lock, &ctx);
ctx               487 kernel/locking/test-ww_mutex.c 		ww_acquire_fini(&ctx);
ctx              1120 kernel/time/timekeeping.c 				   void *ctx),
ctx              1121 kernel/time/timekeeping.c 				  void *ctx,
ctx              1142 kernel/time/timekeeping.c 		ret = get_time_fn(&xtstamp->device, &system_counterval, ctx);
ctx                79 kernel/trace/bpf_trace.c unsigned int trace_call_bpf(struct trace_event_call *call, void *ctx)
ctx               114 kernel/trace/bpf_trace.c 	ret = BPF_PROG_RUN_ARRAY_CHECK(call->prog_array, ctx, BPF_PROG_RUN);
ctx               514 kernel/trace/bpf_trace.c 		     void *ctx, u64 ctx_size, bpf_ctx_copy_t ctx_copy)
ctx               520 kernel/trace/bpf_trace.c 		.data		= ctx,
ctx               895 kernel/trace/bpf_trace.c BPF_CALL_3(bpf_perf_prog_read_value, struct bpf_perf_event_data_kern *, ctx,
ctx               902 kernel/trace/bpf_trace.c 	err = perf_event_read_local(ctx->event, &buf->counter, &buf->enabled,
ctx              1036 kernel/trace/trace_uprobe.c 				enum uprobe_filter_ctx ctx,
ctx              1309 kernel/trace/trace_uprobe.c 				enum uprobe_filter_ctx ctx, struct mm_struct *mm)
ctx              3879 kernel/workqueue.c static void apply_wqattrs_cleanup(struct apply_wqattrs_ctx *ctx)
ctx              3881 kernel/workqueue.c 	if (ctx) {
ctx              3885 kernel/workqueue.c 			put_pwq_unlocked(ctx->pwq_tbl[node]);
ctx              3886 kernel/workqueue.c 		put_pwq_unlocked(ctx->dfl_pwq);
ctx              3888 kernel/workqueue.c 		free_workqueue_attrs(ctx->attrs);
ctx              3890 kernel/workqueue.c 		kfree(ctx);
ctx              3899 kernel/workqueue.c 	struct apply_wqattrs_ctx *ctx;
ctx              3905 kernel/workqueue.c 	ctx = kzalloc(struct_size(ctx, pwq_tbl, nr_node_ids), GFP_KERNEL);
ctx              3909 kernel/workqueue.c 	if (!ctx || !new_attrs || !tmp_attrs)
ctx              3934 kernel/workqueue.c 	ctx->dfl_pwq = alloc_unbound_pwq(wq, new_attrs);
ctx              3935 kernel/workqueue.c 	if (!ctx->dfl_pwq)
ctx              3940 kernel/workqueue.c 			ctx->pwq_tbl[node] = alloc_unbound_pwq(wq, tmp_attrs);
ctx              3941 kernel/workqueue.c 			if (!ctx->pwq_tbl[node])
ctx              3944 kernel/workqueue.c 			ctx->dfl_pwq->refcnt++;
ctx              3945 kernel/workqueue.c 			ctx->pwq_tbl[node] = ctx->dfl_pwq;
ctx              3952 kernel/workqueue.c 	ctx->attrs = new_attrs;
ctx              3954 kernel/workqueue.c 	ctx->wq = wq;
ctx              3956 kernel/workqueue.c 	return ctx;
ctx              3961 kernel/workqueue.c 	apply_wqattrs_cleanup(ctx);
ctx              3966 kernel/workqueue.c static void apply_wqattrs_commit(struct apply_wqattrs_ctx *ctx)
ctx              3971 kernel/workqueue.c 	mutex_lock(&ctx->wq->mutex);
ctx              3973 kernel/workqueue.c 	copy_workqueue_attrs(ctx->wq->unbound_attrs, ctx->attrs);
ctx              3977 kernel/workqueue.c 		ctx->pwq_tbl[node] = numa_pwq_tbl_install(ctx->wq, node,
ctx              3978 kernel/workqueue.c 							  ctx->pwq_tbl[node]);
ctx              3981 kernel/workqueue.c 	link_pwq(ctx->dfl_pwq);
ctx              3982 kernel/workqueue.c 	swap(ctx->wq->dfl_pwq, ctx->dfl_pwq);
ctx              3984 kernel/workqueue.c 	mutex_unlock(&ctx->wq->mutex);
ctx              4003 kernel/workqueue.c 	struct apply_wqattrs_ctx *ctx;
ctx              4017 kernel/workqueue.c 	ctx = apply_wqattrs_prepare(wq, attrs);
ctx              4018 kernel/workqueue.c 	if (!ctx)
ctx              4022 kernel/workqueue.c 	apply_wqattrs_commit(ctx);
ctx              4023 kernel/workqueue.c 	apply_wqattrs_cleanup(ctx);
ctx              5246 kernel/workqueue.c 	struct apply_wqattrs_ctx *ctx, *n;
ctx              5257 kernel/workqueue.c 		ctx = apply_wqattrs_prepare(wq, wq->unbound_attrs);
ctx              5258 kernel/workqueue.c 		if (!ctx) {
ctx              5263 kernel/workqueue.c 		list_add_tail(&ctx->list, &ctxs);
ctx              5266 kernel/workqueue.c 	list_for_each_entry_safe(ctx, n, &ctxs, list) {
ctx              5268 kernel/workqueue.c 			apply_wqattrs_commit(ctx);
ctx              5269 kernel/workqueue.c 		apply_wqattrs_cleanup(ctx);
ctx                61 lib/crc-t10dif.c 		char ctx[2];
ctx                70 lib/crc-t10dif.c 	*(__u16 *)desc.ctx = crc;
ctx                77 lib/crc-t10dif.c 	return *(__u16 *)desc.ctx;
ctx               185 lib/crypto/aes.c int aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
ctx               196 lib/crypto/aes.c 	ctx->key_length = key_len;
ctx               199 lib/crypto/aes.c 		ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32));
ctx               202 lib/crypto/aes.c 		u32 *rki = ctx->key_enc + (i * kwords);
ctx               231 lib/crypto/aes.c 	ctx->key_dec[0] = ctx->key_enc[key_len + 24];
ctx               232 lib/crypto/aes.c 	ctx->key_dec[1] = ctx->key_enc[key_len + 25];
ctx               233 lib/crypto/aes.c 	ctx->key_dec[2] = ctx->key_enc[key_len + 26];
ctx               234 lib/crypto/aes.c 	ctx->key_dec[3] = ctx->key_enc[key_len + 27];
ctx               237 lib/crypto/aes.c 		ctx->key_dec[i]     = inv_mix_columns(ctx->key_enc[j]);
ctx               238 lib/crypto/aes.c 		ctx->key_dec[i + 1] = inv_mix_columns(ctx->key_enc[j + 1]);
ctx               239 lib/crypto/aes.c 		ctx->key_dec[i + 2] = inv_mix_columns(ctx->key_enc[j + 2]);
ctx               240 lib/crypto/aes.c 		ctx->key_dec[i + 3] = inv_mix_columns(ctx->key_enc[j + 3]);
ctx               243 lib/crypto/aes.c 	ctx->key_dec[i]     = ctx->key_enc[0];
ctx               244 lib/crypto/aes.c 	ctx->key_dec[i + 1] = ctx->key_enc[1];
ctx               245 lib/crypto/aes.c 	ctx->key_dec[i + 2] = ctx->key_enc[2];
ctx               246 lib/crypto/aes.c 	ctx->key_dec[i + 3] = ctx->key_enc[3];
ctx               258 lib/crypto/aes.c void aes_encrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
ctx               260 lib/crypto/aes.c 	const u32 *rkp = ctx->key_enc + 4;
ctx               261 lib/crypto/aes.c 	int rounds = 6 + ctx->key_length / 4;
ctx               265 lib/crypto/aes.c 	st0[0] = ctx->key_enc[0] ^ get_unaligned_le32(in);
ctx               266 lib/crypto/aes.c 	st0[1] = ctx->key_enc[1] ^ get_unaligned_le32(in + 4);
ctx               267 lib/crypto/aes.c 	st0[2] = ctx->key_enc[2] ^ get_unaligned_le32(in + 8);
ctx               268 lib/crypto/aes.c 	st0[3] = ctx->key_enc[3] ^ get_unaligned_le32(in + 12);
ctx               309 lib/crypto/aes.c void aes_decrypt(const struct crypto_aes_ctx *ctx, u8 *out, const u8 *in)
ctx               311 lib/crypto/aes.c 	const u32 *rkp = ctx->key_dec + 4;
ctx               312 lib/crypto/aes.c 	int rounds = 6 + ctx->key_length / 4;
ctx               316 lib/crypto/aes.c 	st0[0] = ctx->key_dec[0] ^ get_unaligned_le32(in);
ctx               317 lib/crypto/aes.c 	st0[1] = ctx->key_dec[1] ^ get_unaligned_le32(in + 4);
ctx               318 lib/crypto/aes.c 	st0[2] = ctx->key_dec[2] ^ get_unaligned_le32(in + 8);
ctx               319 lib/crypto/aes.c 	st0[3] = ctx->key_dec[3] ^ get_unaligned_le32(in + 12);
ctx                13 lib/crypto/arc4.c int arc4_setkey(struct arc4_ctx *ctx, const u8 *in_key, unsigned int key_len)
ctx                17 lib/crypto/arc4.c 	ctx->x = 1;
ctx                18 lib/crypto/arc4.c 	ctx->y = 0;
ctx                21 lib/crypto/arc4.c 		ctx->S[i] = i;
ctx                24 lib/crypto/arc4.c 		u32 a = ctx->S[i];
ctx                27 lib/crypto/arc4.c 		ctx->S[i] = ctx->S[j];
ctx                28 lib/crypto/arc4.c 		ctx->S[j] = a;
ctx                37 lib/crypto/arc4.c void arc4_crypt(struct arc4_ctx *ctx, u8 *out, const u8 *in, unsigned int len)
ctx                39 lib/crypto/arc4.c 	u32 *const S = ctx->S;
ctx                46 lib/crypto/arc4.c 	x = ctx->x;
ctx                47 lib/crypto/arc4.c 	y = ctx->y;
ctx                69 lib/crypto/arc4.c 	ctx->x = x;
ctx                70 lib/crypto/arc4.c 	ctx->y = y;
ctx               699 lib/crypto/des.c int des_expand_key(struct des_ctx *ctx, const u8 *key, unsigned int keylen)
ctx               704 lib/crypto/des.c 	return des_ekey(ctx->expkey, key) ? 0 : -ENOKEY;
ctx               781 lib/crypto/des.c void des_encrypt(const struct des_ctx *ctx, u8 *dst, const u8 *src)
ctx               783 lib/crypto/des.c 	const u32 *K = ctx->expkey;
ctx               802 lib/crypto/des.c void des_decrypt(const struct des_ctx *ctx, u8 *dst, const u8 *src)
ctx               804 lib/crypto/des.c 	const u32 *K = ctx->expkey + DES_EXPKEY_WORDS - 2;
ctx               823 lib/crypto/des.c int des3_ede_expand_key(struct des3_ede_ctx *ctx, const u8 *key,
ctx               826 lib/crypto/des.c 	u32 *pe = ctx->expkey;
ctx                41 lib/libcrc32c.c 	u32 ret, *ctx = (u32 *)shash_desc_ctx(shash);
ctx                45 lib/libcrc32c.c 	*ctx = crc;
ctx                50 lib/libcrc32c.c 	ret = *ctx;
ctx                51 lib/libcrc32c.c 	barrier_data(ctx);
ctx              1317 lib/locking-selftest.c 	if (WARN_ON(!o.ctx) ||
ctx              1355 lib/locking-selftest.c 	o.ctx = (void *)~0UL;
ctx              1358 lib/locking-selftest.c 	WARN_ON(o.ctx != (void *)~0UL);
ctx              1361 lib/locking-selftest.c 	o.ctx = (void *)~0UL;
ctx              1367 lib/locking-selftest.c 	WARN_ON(o.ctx != (void *)~0UL);
ctx              1370 lib/locking-selftest.c 	o.ctx = (void *)~0UL;
ctx              1376 lib/locking-selftest.c 	WARN_ON(o.ctx != (void *)~0UL);
ctx              1379 lib/locking-selftest.c 	o.ctx = (void *)~0UL;
ctx              1386 lib/locking-selftest.c 	WARN_ON(o.ctx != (void *)~0UL);
ctx              1389 lib/locking-selftest.c 	o.ctx = (void *)~0UL;
ctx              1394 lib/locking-selftest.c 	WARN_ON(o.ctx != (void *)~0UL);
ctx              1397 lib/locking-selftest.c 	o.ctx = (void *)~0UL;
ctx              1400 lib/locking-selftest.c 	WARN_ON(o.ctx != (void *)~0UL);
ctx              1468 lib/locking-selftest.c 	o.ctx = &t2;
ctx              1477 lib/locking-selftest.c 	o2.ctx = &t2;
ctx              1490 lib/locking-selftest.c 	o2.ctx = NULL;
ctx              1504 lib/locking-selftest.c 	o2.ctx = &t2;
ctx              1516 lib/locking-selftest.c 	o2.ctx = NULL;
ctx              1529 lib/locking-selftest.c 	o2.ctx = &t2;
ctx              1542 lib/locking-selftest.c 	o2.ctx = NULL;
ctx              1555 lib/locking-selftest.c 	o2.ctx = &t2;
ctx              1567 lib/locking-selftest.c 	o2.ctx = NULL;
ctx              1580 lib/locking-selftest.c 	o2.ctx = &t2;
ctx              1601 lib/locking-selftest.c 	o2.ctx = &t2;
ctx              1622 lib/locking-selftest.c 	o2.ctx = &t2;
ctx              1626 lib/locking-selftest.c 	o3.ctx = &t2;
ctx              1648 lib/locking-selftest.c 	o2.ctx = &t2;
ctx              1652 lib/locking-selftest.c 	o3.ctx = &t2;
ctx              1673 lib/locking-selftest.c 	o2.ctx = &t2;
ctx              1698 lib/locking-selftest.c 	o2.ctx = &t2;
ctx               466 lib/lz4/lz4_compress.c 	LZ4_stream_t_internal *ctx = &((LZ4_stream_t *)state)->internal_donotuse;
ctx               480 lib/lz4/lz4_compress.c 			return LZ4_compress_generic(ctx, source,
ctx               485 lib/lz4/lz4_compress.c 			return LZ4_compress_generic(ctx, source,
ctx               491 lib/lz4/lz4_compress.c 			return LZ4_compress_generic(ctx, source,
ctx               496 lib/lz4/lz4_compress.c 			return LZ4_compress_generic(ctx, source,
ctx               523 lib/lz4/lz4_compress.c 	LZ4_stream_t_internal * const ctx,
ctx               564 lib/lz4/lz4_compress.c 	LZ4_putPosition(ip, ctx->hashTable, tableType, base);
ctx               588 lib/lz4/lz4_compress.c 				match = LZ4_getPositionOnHash(h, ctx->hashTable,
ctx               593 lib/lz4/lz4_compress.c 					ctx->hashTable, tableType,
ctx               671 lib/lz4/lz4_compress.c 		LZ4_putPosition(ip - 2, ctx->hashTable, tableType, base);
ctx               674 lib/lz4/lz4_compress.c 		match = LZ4_getPosition(ip, ctx->hashTable, tableType, base);
ctx               675 lib/lz4/lz4_compress.c 		LZ4_putPosition(ip, ctx->hashTable, tableType, base);
ctx               339 lib/lz4/lz4hc_compress.c 	LZ4HC_CCtx_internal *const ctx,
ctx               373 lib/lz4/lz4hc_compress.c 	ctx->end += inputSize;
ctx               379 lib/lz4/lz4hc_compress.c 		ml = LZ4HC_InsertAndFindBestMatch(ctx, ip,
ctx               393 lib/lz4/lz4hc_compress.c 			ml2 = LZ4HC_InsertAndGetWiderMatch(ctx,
ctx               455 lib/lz4/lz4hc_compress.c 			ml3 = LZ4HC_InsertAndGetWiderMatch(ctx,
ctx               589 lib/lz4/lz4hc_compress.c 	LZ4HC_CCtx_internal *ctx = &((LZ4_streamHC_t *)state)->internal_donotuse;
ctx               598 lib/lz4/lz4hc_compress.c 	LZ4HC_init(ctx, (const BYTE *)src);
ctx               601 lib/lz4/lz4hc_compress.c 		return LZ4HC_compress_generic(ctx, src, dst,
ctx               604 lib/lz4/lz4hc_compress.c 		return LZ4HC_compress_generic(ctx, src, dst,
ctx               127 lib/mpi/mpi-internal.h void mpihelp_release_karatsuba_ctx(struct karatsuba_ctx *ctx);
ctx               142 lib/mpi/mpi-internal.h 			       struct karatsuba_ctx *ctx);
ctx               324 lib/mpi/mpih-mul.c 			   struct karatsuba_ctx *ctx)
ctx               328 lib/mpi/mpih-mul.c 	if (!ctx->tspace || ctx->tspace_size < vsize) {
ctx               329 lib/mpi/mpih-mul.c 		if (ctx->tspace)
ctx               330 lib/mpi/mpih-mul.c 			mpi_free_limb_space(ctx->tspace);
ctx               331 lib/mpi/mpih-mul.c 		ctx->tspace = mpi_alloc_limb_space(2 * vsize);
ctx               332 lib/mpi/mpih-mul.c 		if (!ctx->tspace)
ctx               334 lib/mpi/mpih-mul.c 		ctx->tspace_size = vsize;
ctx               337 lib/mpi/mpih-mul.c 	MPN_MUL_N_RECURSE(prodp, up, vp, vsize, ctx->tspace);
ctx               343 lib/mpi/mpih-mul.c 		if (!ctx->tp || ctx->tp_size < vsize) {
ctx               344 lib/mpi/mpih-mul.c 			if (ctx->tp)
ctx               345 lib/mpi/mpih-mul.c 				mpi_free_limb_space(ctx->tp);
ctx               346 lib/mpi/mpih-mul.c 			ctx->tp = mpi_alloc_limb_space(2 * vsize);
ctx               347 lib/mpi/mpih-mul.c 			if (!ctx->tp) {
ctx               348 lib/mpi/mpih-mul.c 				if (ctx->tspace)
ctx               349 lib/mpi/mpih-mul.c 					mpi_free_limb_space(ctx->tspace);
ctx               350 lib/mpi/mpih-mul.c 				ctx->tspace = NULL;
ctx               353 lib/mpi/mpih-mul.c 			ctx->tp_size = vsize;
ctx               357 lib/mpi/mpih-mul.c 			MPN_MUL_N_RECURSE(ctx->tp, up, vp, vsize, ctx->tspace);
ctx               358 lib/mpi/mpih-mul.c 			cy = mpihelp_add_n(prodp, prodp, ctx->tp, vsize);
ctx               359 lib/mpi/mpih-mul.c 			mpihelp_add_1(prodp + vsize, ctx->tp + vsize, vsize,
ctx               370 lib/mpi/mpih-mul.c 			if (mpihelp_mul(ctx->tspace, vp, vsize, up, usize, &tmp)
ctx               374 lib/mpi/mpih-mul.c 			if (!ctx->next) {
ctx               375 lib/mpi/mpih-mul.c 				ctx->next = kzalloc(sizeof *ctx, GFP_KERNEL);
ctx               376 lib/mpi/mpih-mul.c 				if (!ctx->next)
ctx               379 lib/mpi/mpih-mul.c 			if (mpihelp_mul_karatsuba_case(ctx->tspace,
ctx               382 lib/mpi/mpih-mul.c 						       ctx->next) < 0)
ctx               386 lib/mpi/mpih-mul.c 		cy = mpihelp_add_n(prodp, prodp, ctx->tspace, vsize);
ctx               387 lib/mpi/mpih-mul.c 		mpihelp_add_1(prodp + vsize, ctx->tspace + vsize, usize, cy);
ctx               393 lib/mpi/mpih-mul.c void mpihelp_release_karatsuba_ctx(struct karatsuba_ctx *ctx)
ctx               397 lib/mpi/mpih-mul.c 	if (ctx->tp)
ctx               398 lib/mpi/mpih-mul.c 		mpi_free_limb_space(ctx->tp);
ctx               399 lib/mpi/mpih-mul.c 	if (ctx->tspace)
ctx               400 lib/mpi/mpih-mul.c 		mpi_free_limb_space(ctx->tspace);
ctx               401 lib/mpi/mpih-mul.c 	for (ctx = ctx->next; ctx; ctx = ctx2) {
ctx               402 lib/mpi/mpih-mul.c 		ctx2 = ctx->next;
ctx               403 lib/mpi/mpih-mul.c 		if (ctx->tp)
ctx               404 lib/mpi/mpih-mul.c 			mpi_free_limb_space(ctx->tp);
ctx               405 lib/mpi/mpih-mul.c 		if (ctx->tspace)
ctx               406 lib/mpi/mpih-mul.c 			mpi_free_limb_space(ctx->tspace);
ctx               407 lib/mpi/mpih-mul.c 		kfree(ctx);
ctx               432 lib/mpi/mpih-mul.c 	struct karatsuba_ctx ctx;
ctx               478 lib/mpi/mpih-mul.c 	memset(&ctx, 0, sizeof ctx);
ctx               479 lib/mpi/mpih-mul.c 	if (mpihelp_mul_karatsuba_case(prodp, up, usize, vp, vsize, &ctx) < 0)
ctx               481 lib/mpi/mpih-mul.c 	mpihelp_release_karatsuba_ctx(&ctx);
ctx               141 lib/zstd/compress.c const seqStore_t *ZSTD_getSeqStore(const ZSTD_CCtx *ctx) /* hidden interface */ { return &(ctx->seqStore); }
ctx              1104 lib/zstd/compress.c static void ZSTD_compressBlock_fast(ZSTD_CCtx *ctx, const void *src, size_t srcSize)
ctx              1106 lib/zstd/compress.c 	const U32 mls = ctx->params.cParams.searchLength;
ctx              1109 lib/zstd/compress.c 	case 4: ZSTD_compressBlock_fast_generic(ctx, src, srcSize, 4); return;
ctx              1110 lib/zstd/compress.c 	case 5: ZSTD_compressBlock_fast_generic(ctx, src, srcSize, 5); return;
ctx              1111 lib/zstd/compress.c 	case 6: ZSTD_compressBlock_fast_generic(ctx, src, srcSize, 6); return;
ctx              1112 lib/zstd/compress.c 	case 7: ZSTD_compressBlock_fast_generic(ctx, src, srcSize, 7); return;
ctx              1116 lib/zstd/compress.c static void ZSTD_compressBlock_fast_extDict_generic(ZSTD_CCtx *ctx, const void *src, size_t srcSize, const U32 mls)
ctx              1118 lib/zstd/compress.c 	U32 *hashTable = ctx->hashTable;
ctx              1119 lib/zstd/compress.c 	const U32 hBits = ctx->params.cParams.hashLog;
ctx              1120 lib/zstd/compress.c 	seqStore_t *seqStorePtr = &(ctx->seqStore);
ctx              1121 lib/zstd/compress.c 	const BYTE *const base = ctx->base;
ctx              1122 lib/zstd/compress.c 	const BYTE *const dictBase = ctx->dictBase;
ctx              1126 lib/zstd/compress.c 	const U32 lowestIndex = ctx->lowLimit;
ctx              1128 lib/zstd/compress.c 	const U32 dictLimit = ctx->dictLimit;
ctx              1133 lib/zstd/compress.c 	U32 offset_1 = ctx->rep[0], offset_2 = ctx->rep[1];
ctx              1209 lib/zstd/compress.c 	ctx->repToConfirm[0] = offset_1;
ctx              1210 lib/zstd/compress.c 	ctx->repToConfirm[1] = offset_2;
ctx              1220 lib/zstd/compress.c static void ZSTD_compressBlock_fast_extDict(ZSTD_CCtx *ctx, const void *src, size_t srcSize)
ctx              1222 lib/zstd/compress.c 	U32 const mls = ctx->params.cParams.searchLength;
ctx              1225 lib/zstd/compress.c 	case 4: ZSTD_compressBlock_fast_extDict_generic(ctx, src, srcSize, 4); return;
ctx              1226 lib/zstd/compress.c 	case 5: ZSTD_compressBlock_fast_extDict_generic(ctx, src, srcSize, 5); return;
ctx              1227 lib/zstd/compress.c 	case 6: ZSTD_compressBlock_fast_extDict_generic(ctx, src, srcSize, 6); return;
ctx              1228 lib/zstd/compress.c 	case 7: ZSTD_compressBlock_fast_extDict_generic(ctx, src, srcSize, 7); return;
ctx              1383 lib/zstd/compress.c static void ZSTD_compressBlock_doubleFast(ZSTD_CCtx *ctx, const void *src, size_t srcSize)
ctx              1385 lib/zstd/compress.c 	const U32 mls = ctx->params.cParams.searchLength;
ctx              1388 lib/zstd/compress.c 	case 4: ZSTD_compressBlock_doubleFast_generic(ctx, src, srcSize, 4); return;
ctx              1389 lib/zstd/compress.c 	case 5: ZSTD_compressBlock_doubleFast_generic(ctx, src, srcSize, 5); return;
ctx              1390 lib/zstd/compress.c 	case 6: ZSTD_compressBlock_doubleFast_generic(ctx, src, srcSize, 6); return;
ctx              1391 lib/zstd/compress.c 	case 7: ZSTD_compressBlock_doubleFast_generic(ctx, src, srcSize, 7); return;
ctx              1395 lib/zstd/compress.c static void ZSTD_compressBlock_doubleFast_extDict_generic(ZSTD_CCtx *ctx, const void *src, size_t srcSize, const U32 mls)
ctx              1397 lib/zstd/compress.c 	U32 *const hashLong = ctx->hashTable;
ctx              1398 lib/zstd/compress.c 	U32 const hBitsL = ctx->params.cParams.hashLog;
ctx              1399 lib/zstd/compress.c 	U32 *const hashSmall = ctx->chainTable;
ctx              1400 lib/zstd/compress.c 	U32 const hBitsS = ctx->params.cParams.chainLog;
ctx              1401 lib/zstd/compress.c 	seqStore_t *seqStorePtr = &(ctx->seqStore);
ctx              1402 lib/zstd/compress.c 	const BYTE *const base = ctx->base;
ctx              1403 lib/zstd/compress.c 	const BYTE *const dictBase = ctx->dictBase;
ctx              1407 lib/zstd/compress.c 	const U32 lowestIndex = ctx->lowLimit;
ctx              1409 lib/zstd/compress.c 	const U32 dictLimit = ctx->dictLimit;
ctx              1414 lib/zstd/compress.c 	U32 offset_1 = ctx->rep[0], offset_2 = ctx->rep[1];
ctx              1532 lib/zstd/compress.c 	ctx->repToConfirm[0] = offset_1;
ctx              1533 lib/zstd/compress.c 	ctx->repToConfirm[1] = offset_2;
ctx              1543 lib/zstd/compress.c static void ZSTD_compressBlock_doubleFast_extDict(ZSTD_CCtx *ctx, const void *src, size_t srcSize)
ctx              1545 lib/zstd/compress.c 	U32 const mls = ctx->params.cParams.searchLength;
ctx              1548 lib/zstd/compress.c 	case 4: ZSTD_compressBlock_doubleFast_extDict_generic(ctx, src, srcSize, 4); return;
ctx              1549 lib/zstd/compress.c 	case 5: ZSTD_compressBlock_doubleFast_extDict_generic(ctx, src, srcSize, 5); return;
ctx              1550 lib/zstd/compress.c 	case 6: ZSTD_compressBlock_doubleFast_extDict_generic(ctx, src, srcSize, 6); return;
ctx              1551 lib/zstd/compress.c 	case 7: ZSTD_compressBlock_doubleFast_extDict_generic(ctx, src, srcSize, 7); return;
ctx              1897 lib/zstd/compress.c void ZSTD_compressBlock_lazy_generic(ZSTD_CCtx *ctx, const void *src, size_t srcSize, const U32 searchMethod, const U32 depth)
ctx              1899 lib/zstd/compress.c 	seqStore_t *seqStorePtr = &(ctx->seqStore);
ctx              1905 lib/zstd/compress.c 	const BYTE *const base = ctx->base + ctx->dictLimit;
ctx              1907 lib/zstd/compress.c 	U32 const maxSearches = 1 << ctx->params.cParams.searchLog;
ctx              1908 lib/zstd/compress.c 	U32 const mls = ctx->params.cParams.searchLength;
ctx              1912 lib/zstd/compress.c 	U32 offset_1 = ctx->rep[0], offset_2 = ctx->rep[1], savedOffset = 0;
ctx              1916 lib/zstd/compress.c 	ctx->nextToUpdate3 = ctx->nextToUpdate;
ctx              1942 lib/zstd/compress.c 			size_t const ml2 = searchMax(ctx, ip, iend, &offsetFound, maxSearches, mls);
ctx              1965 lib/zstd/compress.c 					size_t const ml2 = searchMax(ctx, ip, iend, &offset2, maxSearches, mls);
ctx              1986 lib/zstd/compress.c 						size_t const ml2 = searchMax(ctx, ip, iend, &offset2, maxSearches, mls);
ctx              2038 lib/zstd/compress.c 	ctx->repToConfirm[0] = offset_1 ? offset_1 : savedOffset;
ctx              2039 lib/zstd/compress.c 	ctx->repToConfirm[1] = offset_2 ? offset_2 : savedOffset;
ctx              2049 lib/zstd/compress.c static void ZSTD_compressBlock_btlazy2(ZSTD_CCtx *ctx, const void *src, size_t srcSize) { ZSTD_compressBlock_lazy_generic(ctx, src, srcSize, 1, 2); }
ctx              2051 lib/zstd/compress.c static void ZSTD_compressBlock_lazy2(ZSTD_CCtx *ctx, const void *src, size_t srcSize) { ZSTD_compressBlock_lazy_generic(ctx, src, srcSize, 0, 2); }
ctx              2053 lib/zstd/compress.c static void ZSTD_compressBlock_lazy(ZSTD_CCtx *ctx, const void *src, size_t srcSize) { ZSTD_compressBlock_lazy_generic(ctx, src, srcSize, 0, 1); }
ctx              2055 lib/zstd/compress.c static void ZSTD_compressBlock_greedy(ZSTD_CCtx *ctx, const void *src, size_t srcSize) { ZSTD_compressBlock_lazy_generic(ctx, src, srcSize, 0, 0); }
ctx              2058 lib/zstd/compress.c void ZSTD_compressBlock_lazy_extDict_generic(ZSTD_CCtx *ctx, const void *src, size_t srcSize, const U32 searchMethod, const U32 depth)
ctx              2060 lib/zstd/compress.c 	seqStore_t *seqStorePtr = &(ctx->seqStore);
ctx              2066 lib/zstd/compress.c 	const BYTE *const base = ctx->base;
ctx              2067 lib/zstd/compress.c 	const U32 dictLimit = ctx->dictLimit;
ctx              2068 lib/zstd/compress.c 	const U32 lowestIndex = ctx->lowLimit;
ctx              2070 lib/zstd/compress.c 	const BYTE *const dictBase = ctx->dictBase;
ctx              2072 lib/zstd/compress.c 	const BYTE *const dictStart = dictBase + ctx->lowLimit;
ctx              2074 lib/zstd/compress.c 	const U32 maxSearches = 1 << ctx->params.cParams.searchLog;
ctx              2075 lib/zstd/compress.c 	const U32 mls = ctx->params.cParams.searchLength;
ctx              2080 lib/zstd/compress.c 	U32 offset_1 = ctx->rep[0], offset_2 = ctx->rep[1];
ctx              2083 lib/zstd/compress.c 	ctx->nextToUpdate3 = ctx->nextToUpdate;
ctx              2112 lib/zstd/compress.c 			size_t const ml2 = searchMax(ctx, ip, iend, &offsetFound, maxSearches, mls);
ctx              2149 lib/zstd/compress.c 					size_t const ml2 = searchMax(ctx, ip, iend, &offset2, maxSearches, mls);
ctx              2184 lib/zstd/compress.c 						size_t const ml2 = searchMax(ctx, ip, iend, &offset2, maxSearches, mls);
ctx              2241 lib/zstd/compress.c 	ctx->repToConfirm[0] = offset_1;
ctx              2242 lib/zstd/compress.c 	ctx->repToConfirm[1] = offset_2;
ctx              2252 lib/zstd/compress.c void ZSTD_compressBlock_greedy_extDict(ZSTD_CCtx *ctx, const void *src, size_t srcSize) { ZSTD_compressBlock_lazy_extDict_generic(ctx, src, srcSize, 0, 0); }
ctx              2254 lib/zstd/compress.c static void ZSTD_compressBlock_lazy_extDict(ZSTD_CCtx *ctx, const void *src, size_t srcSize)
ctx              2256 lib/zstd/compress.c 	ZSTD_compressBlock_lazy_extDict_generic(ctx, src, srcSize, 0, 1);
ctx              2259 lib/zstd/compress.c static void ZSTD_compressBlock_lazy2_extDict(ZSTD_CCtx *ctx, const void *src, size_t srcSize)
ctx              2261 lib/zstd/compress.c 	ZSTD_compressBlock_lazy_extDict_generic(ctx, src, srcSize, 0, 2);
ctx              2264 lib/zstd/compress.c static void ZSTD_compressBlock_btlazy2_extDict(ZSTD_CCtx *ctx, const void *src, size_t srcSize)
ctx              2266 lib/zstd/compress.c 	ZSTD_compressBlock_lazy_extDict_generic(ctx, src, srcSize, 1, 2);
ctx              2272 lib/zstd/compress.c static void ZSTD_compressBlock_btopt(ZSTD_CCtx *ctx, const void *src, size_t srcSize)
ctx              2275 lib/zstd/compress.c 	ZSTD_compressBlock_opt_generic(ctx, src, srcSize, 0);
ctx              2277 lib/zstd/compress.c 	(void)ctx;
ctx              2284 lib/zstd/compress.c static void ZSTD_compressBlock_btopt2(ZSTD_CCtx *ctx, const void *src, size_t srcSize)
ctx              2287 lib/zstd/compress.c 	ZSTD_compressBlock_opt_generic(ctx, src, srcSize, 1);
ctx              2289 lib/zstd/compress.c 	(void)ctx;
ctx              2296 lib/zstd/compress.c static void ZSTD_compressBlock_btopt_extDict(ZSTD_CCtx *ctx, const void *src, size_t srcSize)
ctx              2299 lib/zstd/compress.c 	ZSTD_compressBlock_opt_extDict_generic(ctx, src, srcSize, 0);
ctx              2301 lib/zstd/compress.c 	(void)ctx;
ctx              2308 lib/zstd/compress.c static void ZSTD_compressBlock_btopt2_extDict(ZSTD_CCtx *ctx, const void *src, size_t srcSize)
ctx              2311 lib/zstd/compress.c 	ZSTD_compressBlock_opt_extDict_generic(ctx, src, srcSize, 1);
ctx              2313 lib/zstd/compress.c 	(void)ctx;
ctx              2320 lib/zstd/compress.c typedef void (*ZSTD_blockCompressor)(ZSTD_CCtx *ctx, const void *src, size_t srcSize);
ctx              2825 lib/zstd/compress.c size_t ZSTD_compress_usingDict(ZSTD_CCtx *ctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize, const void *dict, size_t dictSize,
ctx              2828 lib/zstd/compress.c 	return ZSTD_compress_internal(ctx, dst, dstCapacity, src, srcSize, dict, dictSize, params);
ctx              2831 lib/zstd/compress.c size_t ZSTD_compressCCtx(ZSTD_CCtx *ctx, void *dst, size_t dstCapacity, const void *src, size_t srcSize, ZSTD_parameters params)
ctx              2833 lib/zstd/compress.c 	return ZSTD_compress_internal(ctx, dst, dstCapacity, src, srcSize, NULL, 0, params);
ctx               212 lib/zstd/zstd_internal.h const seqStore_t *ZSTD_getSeqStore(const ZSTD_CCtx *ctx);
ctx               407 lib/zstd/zstd_opt.h void ZSTD_compressBlock_opt_generic(ZSTD_CCtx *ctx, const void *src, size_t srcSize, const int ultra)
ctx               409 lib/zstd/zstd_opt.h 	seqStore_t *seqStorePtr = &(ctx->seqStore);
ctx               415 lib/zstd/zstd_opt.h 	const BYTE *const base = ctx->base;
ctx               416 lib/zstd/zstd_opt.h 	const BYTE *const prefixStart = base + ctx->dictLimit;
ctx               418 lib/zstd/zstd_opt.h 	const U32 maxSearches = 1U << ctx->params.cParams.searchLog;
ctx               419 lib/zstd/zstd_opt.h 	const U32 sufficient_len = ctx->params.cParams.targetLength;
ctx               420 lib/zstd/zstd_opt.h 	const U32 mls = ctx->params.cParams.searchLength;
ctx               421 lib/zstd/zstd_opt.h 	const U32 minMatch = (ctx->params.cParams.searchLength == 3) ? 3 : 4;
ctx               429 lib/zstd/zstd_opt.h 	ctx->nextToUpdate3 = ctx->nextToUpdate;
ctx               435 lib/zstd/zstd_opt.h 			rep[i] = ctx->rep[i];
ctx               472 lib/zstd/zstd_opt.h 		match_num = ZSTD_BtGetAllMatches_selectMLS(ctx, ip, iend, maxSearches, mls, matches, minMatch);
ctx               591 lib/zstd/zstd_opt.h 			match_num = ZSTD_BtGetAllMatches_selectMLS(ctx, inr, iend, maxSearches, mls, matches, best_mlen);
ctx               688 lib/zstd/zstd_opt.h 			ctx->repToConfirm[i] = rep[i];
ctx               700 lib/zstd/zstd_opt.h void ZSTD_compressBlock_opt_extDict_generic(ZSTD_CCtx *ctx, const void *src, size_t srcSize, const int ultra)
ctx               702 lib/zstd/zstd_opt.h 	seqStore_t *seqStorePtr = &(ctx->seqStore);
ctx               708 lib/zstd/zstd_opt.h 	const BYTE *const base = ctx->base;
ctx               709 lib/zstd/zstd_opt.h 	const U32 lowestIndex = ctx->lowLimit;
ctx               710 lib/zstd/zstd_opt.h 	const U32 dictLimit = ctx->dictLimit;
ctx               712 lib/zstd/zstd_opt.h 	const BYTE *const dictBase = ctx->dictBase;
ctx               715 lib/zstd/zstd_opt.h 	const U32 maxSearches = 1U << ctx->params.cParams.searchLog;
ctx               716 lib/zstd/zstd_opt.h 	const U32 sufficient_len = ctx->params.cParams.targetLength;
ctx               717 lib/zstd/zstd_opt.h 	const U32 mls = ctx->params.cParams.searchLength;
ctx               718 lib/zstd/zstd_opt.h 	const U32 minMatch = (ctx->params.cParams.searchLength == 3) ? 3 : 4;
ctx               729 lib/zstd/zstd_opt.h 			rep[i] = ctx->rep[i];
ctx               732 lib/zstd/zstd_opt.h 	ctx->nextToUpdate3 = ctx->nextToUpdate;
ctx               780 lib/zstd/zstd_opt.h 		match_num = ZSTD_BtGetAllMatches_selectMLS_extDict(ctx, ip, iend, maxSearches, mls, matches, minMatch); /* first search (depth 0) */
ctx               905 lib/zstd/zstd_opt.h 			match_num = ZSTD_BtGetAllMatches_selectMLS_extDict(ctx, inr, iend, maxSearches, mls, matches, minMatch);
ctx              1003 lib/zstd/zstd_opt.h 			ctx->repToConfirm[i] = rep[i];
ctx               320 mm/gup.c       				    struct follow_page_context *ctx)
ctx               368 mm/gup.c       		page = follow_devmap_pmd(vma, address, pmd, flags, &ctx->pgmap);
ctx               374 mm/gup.c       		return follow_page_pte(vma, address, pmd, flags, &ctx->pgmap);
ctx               394 mm/gup.c       		return follow_page_pte(vma, address, pmd, flags, &ctx->pgmap);
ctx               424 mm/gup.c       			follow_page_pte(vma, address, pmd, flags, &ctx->pgmap);
ctx               428 mm/gup.c       	ctx->page_mask = HPAGE_PMD_NR - 1;
ctx               435 mm/gup.c       				    struct follow_page_context *ctx)
ctx               461 mm/gup.c       		page = follow_devmap_pud(vma, address, pud, flags, &ctx->pgmap);
ctx               469 mm/gup.c       	return follow_pmd_mask(vma, address, pud, flags, ctx);
ctx               475 mm/gup.c       				    struct follow_page_context *ctx)
ctx               495 mm/gup.c       	return follow_pud_mask(vma, address, p4d, flags, ctx);
ctx               519 mm/gup.c       			      struct follow_page_context *ctx)
ctx               525 mm/gup.c       	ctx->page_mask = 0;
ctx               554 mm/gup.c       	return follow_p4d_mask(vma, address, pgd, flags, ctx);
ctx               560 mm/gup.c       	struct follow_page_context ctx = { NULL };
ctx               563 mm/gup.c       	page = follow_page_mask(vma, address, foll_flags, &ctx);
ctx               564 mm/gup.c       	if (ctx.pgmap)
ctx               565 mm/gup.c       		put_dev_pagemap(ctx.pgmap);
ctx               795 mm/gup.c       	struct follow_page_context ctx = { NULL };
ctx               826 mm/gup.c       				ctx.page_mask = 0;
ctx               858 mm/gup.c       		page = follow_page_mask(vma, start, foll_flags, &ctx);
ctx               890 mm/gup.c       			ctx.page_mask = 0;
ctx               895 mm/gup.c       			ctx.page_mask = 0;
ctx               897 mm/gup.c       		page_increm = 1 + (~(start >> PAGE_SHIFT) & ctx.page_mask);
ctx               905 mm/gup.c       	if (ctx.pgmap)
ctx               906 mm/gup.c       		put_dev_pagemap(ctx.pgmap);
ctx              3414 mm/shmem.c     	struct shmem_options *ctx = fc->fs_private;
ctx              3435 mm/shmem.c     		ctx->blocks = DIV_ROUND_UP(size, PAGE_SIZE);
ctx              3436 mm/shmem.c     		ctx->seen |= SHMEM_SEEN_BLOCKS;
ctx              3439 mm/shmem.c     		ctx->blocks = memparse(param->string, &rest);
ctx              3442 mm/shmem.c     		ctx->seen |= SHMEM_SEEN_BLOCKS;
ctx              3445 mm/shmem.c     		ctx->inodes = memparse(param->string, &rest);
ctx              3448 mm/shmem.c     		ctx->seen |= SHMEM_SEEN_INODES;
ctx              3451 mm/shmem.c     		ctx->mode = result.uint_32 & 07777;
ctx              3454 mm/shmem.c     		ctx->uid = make_kuid(current_user_ns(), result.uint_32);
ctx              3455 mm/shmem.c     		if (!uid_valid(ctx->uid))
ctx              3459 mm/shmem.c     		ctx->gid = make_kgid(current_user_ns(), result.uint_32);
ctx              3460 mm/shmem.c     		if (!gid_valid(ctx->gid))
ctx              3464 mm/shmem.c     		ctx->huge = result.uint_32;
ctx              3465 mm/shmem.c     		if (ctx->huge != SHMEM_HUGE_NEVER &&
ctx              3469 mm/shmem.c     		ctx->seen |= SHMEM_SEEN_HUGE;
ctx              3473 mm/shmem.c     			mpol_put(ctx->mpol);
ctx              3474 mm/shmem.c     			ctx->mpol = NULL;
ctx              3475 mm/shmem.c     			if (mpol_parse_str(param->string, &ctx->mpol))
ctx              3542 mm/shmem.c     	struct shmem_options *ctx = fc->fs_private;
ctx              3549 mm/shmem.c     	if ((ctx->seen & SHMEM_SEEN_BLOCKS) && ctx->blocks) {
ctx              3555 mm/shmem.c     					   ctx->blocks) > 0) {
ctx              3560 mm/shmem.c     	if ((ctx->seen & SHMEM_SEEN_INODES) && ctx->inodes) {
ctx              3565 mm/shmem.c     		if (ctx->inodes < inodes) {
ctx              3571 mm/shmem.c     	if (ctx->seen & SHMEM_SEEN_HUGE)
ctx              3572 mm/shmem.c     		sbinfo->huge = ctx->huge;
ctx              3573 mm/shmem.c     	if (ctx->seen & SHMEM_SEEN_BLOCKS)
ctx              3574 mm/shmem.c     		sbinfo->max_blocks  = ctx->blocks;
ctx              3575 mm/shmem.c     	if (ctx->seen & SHMEM_SEEN_INODES) {
ctx              3576 mm/shmem.c     		sbinfo->max_inodes  = ctx->inodes;
ctx              3577 mm/shmem.c     		sbinfo->free_inodes = ctx->inodes - inodes;
ctx              3583 mm/shmem.c     	if (ctx->mpol) {
ctx              3585 mm/shmem.c     		sbinfo->mpol = ctx->mpol;	/* transfers initial ref */
ctx              3586 mm/shmem.c     		ctx->mpol = NULL;
ctx              3635 mm/shmem.c     	struct shmem_options *ctx = fc->fs_private;
ctx              3655 mm/shmem.c     		if (!(ctx->seen & SHMEM_SEEN_BLOCKS))
ctx              3656 mm/shmem.c     			ctx->blocks = shmem_default_max_blocks();
ctx              3657 mm/shmem.c     		if (!(ctx->seen & SHMEM_SEEN_INODES))
ctx              3658 mm/shmem.c     			ctx->inodes = shmem_default_max_inodes();
ctx              3667 mm/shmem.c     	sbinfo->max_blocks = ctx->blocks;
ctx              3668 mm/shmem.c     	sbinfo->free_inodes = sbinfo->max_inodes = ctx->inodes;
ctx              3669 mm/shmem.c     	sbinfo->uid = ctx->uid;
ctx              3670 mm/shmem.c     	sbinfo->gid = ctx->gid;
ctx              3671 mm/shmem.c     	sbinfo->mode = ctx->mode;
ctx              3672 mm/shmem.c     	sbinfo->huge = ctx->huge;
ctx              3673 mm/shmem.c     	sbinfo->mpol = ctx->mpol;
ctx              3674 mm/shmem.c     	ctx->mpol = NULL;
ctx              3718 mm/shmem.c     	struct shmem_options *ctx = fc->fs_private;
ctx              3720 mm/shmem.c     	if (ctx) {
ctx              3721 mm/shmem.c     		mpol_put(ctx->mpol);
ctx              3722 mm/shmem.c     		kfree(ctx);
ctx              3874 mm/shmem.c     	struct shmem_options *ctx;
ctx              3876 mm/shmem.c     	ctx = kzalloc(sizeof(struct shmem_options), GFP_KERNEL);
ctx              3877 mm/shmem.c     	if (!ctx)
ctx              3880 mm/shmem.c     	ctx->mode = 0777 | S_ISVTX;
ctx              3881 mm/shmem.c     	ctx->uid = current_fsuid();
ctx              3882 mm/shmem.c     	ctx->gid = current_fsgid();
ctx              3884 mm/shmem.c     	fc->fs_private = ctx;
ctx               232 mm/userfaultfd.c 		if (!dst_vma->vm_userfaultfd_ctx.ctx)
ctx               486 mm/userfaultfd.c 	if (!dst_vma->vm_userfaultfd_ctx.ctx)
ctx                35 net/6lowpan/core.c 	spin_lock_init(&lowpan_dev(dev)->ctx.lock);
ctx                37 net/6lowpan/core.c 		lowpan_dev(dev)->ctx.table[i].id = i;
ctx               136 net/6lowpan/core.c 				  &lowpan_dev(dev)->ctx.table[i].flags);
ctx                19 net/6lowpan/debugfs.c 	struct lowpan_iphc_ctx *ctx = data;
ctx                25 net/6lowpan/debugfs.c 		set_bit(LOWPAN_IPHC_CTX_FLAG_ACTIVE, &ctx->flags);
ctx                27 net/6lowpan/debugfs.c 		clear_bit(LOWPAN_IPHC_CTX_FLAG_ACTIVE, &ctx->flags);
ctx                44 net/6lowpan/debugfs.c 	struct lowpan_iphc_ctx *ctx = data;
ctx                50 net/6lowpan/debugfs.c 		set_bit(LOWPAN_IPHC_CTX_FLAG_COMPRESSION, &ctx->flags);
ctx                52 net/6lowpan/debugfs.c 		clear_bit(LOWPAN_IPHC_CTX_FLAG_COMPRESSION, &ctx->flags);
ctx                68 net/6lowpan/debugfs.c 	struct lowpan_iphc_ctx *ctx = data;
ctx                70 net/6lowpan/debugfs.c 		container_of(ctx, struct lowpan_iphc_ctx_table, table[ctx->id]);
ctx                76 net/6lowpan/debugfs.c 	ctx->plen = val;
ctx                84 net/6lowpan/debugfs.c 	struct lowpan_iphc_ctx *ctx = data;
ctx                86 net/6lowpan/debugfs.c 		container_of(ctx, struct lowpan_iphc_ctx_table, table[ctx->id]);
ctx                89 net/6lowpan/debugfs.c 	*val = ctx->plen;
ctx                99 net/6lowpan/debugfs.c 	struct lowpan_iphc_ctx *ctx = file->private;
ctx               101 net/6lowpan/debugfs.c 		container_of(ctx, struct lowpan_iphc_ctx_table, table[ctx->id]);
ctx               105 net/6lowpan/debugfs.c 		   be16_to_cpu(ctx->pfx.s6_addr16[0]),
ctx               106 net/6lowpan/debugfs.c 		   be16_to_cpu(ctx->pfx.s6_addr16[1]),
ctx               107 net/6lowpan/debugfs.c 		   be16_to_cpu(ctx->pfx.s6_addr16[2]),
ctx               108 net/6lowpan/debugfs.c 		   be16_to_cpu(ctx->pfx.s6_addr16[3]),
ctx               109 net/6lowpan/debugfs.c 		   be16_to_cpu(ctx->pfx.s6_addr16[4]),
ctx               110 net/6lowpan/debugfs.c 		   be16_to_cpu(ctx->pfx.s6_addr16[5]),
ctx               111 net/6lowpan/debugfs.c 		   be16_to_cpu(ctx->pfx.s6_addr16[6]),
ctx               112 net/6lowpan/debugfs.c 		   be16_to_cpu(ctx->pfx.s6_addr16[7]));
ctx               129 net/6lowpan/debugfs.c 	struct lowpan_iphc_ctx *ctx = file->private;
ctx               131 net/6lowpan/debugfs.c 		container_of(ctx, struct lowpan_iphc_ctx_table, table[ctx->id]);
ctx               151 net/6lowpan/debugfs.c 		ctx->pfx.s6_addr16[i] = cpu_to_be16(addr[i] & 0xffff);
ctx               167 net/6lowpan/debugfs.c 					struct dentry *ctx, u8 id)
ctx               177 net/6lowpan/debugfs.c 	root = debugfs_create_dir(buf, ctx);
ctx               179 net/6lowpan/debugfs.c 	debugfs_create_file("active", 0644, root, &ldev->ctx.table[id],
ctx               182 net/6lowpan/debugfs.c 	debugfs_create_file("compression", 0644, root, &ldev->ctx.table[id],
ctx               185 net/6lowpan/debugfs.c 	debugfs_create_file("prefix", 0644, root, &ldev->ctx.table[id],
ctx               188 net/6lowpan/debugfs.c 	debugfs_create_file("prefix_len", 0644, root, &ldev->ctx.table[id],
ctx               255 net/6lowpan/debugfs.c 	debugfs_create_file("show", 0644, contexts, &lowpan_dev(dev)->ctx,
ctx               192 net/6lowpan/iphc.c 	struct lowpan_iphc_ctx *ret = &lowpan_dev(dev)->ctx.table[id];
ctx               204 net/6lowpan/iphc.c 	struct lowpan_iphc_ctx *table = lowpan_dev(dev)->ctx.table;
ctx               248 net/6lowpan/iphc.c 	struct lowpan_iphc_ctx *table = lowpan_dev(dev)->ctx.table;
ctx               365 net/6lowpan/iphc.c 					   const struct lowpan_iphc_ctx *ctx,
ctx               384 net/6lowpan/iphc.c 		ipv6_addr_prefix_copy(ipaddr, &ctx->pfx, ctx->plen);
ctx               391 net/6lowpan/iphc.c 		ipv6_addr_prefix_copy(ipaddr, &ctx->pfx, ctx->plen);
ctx               404 net/6lowpan/iphc.c 		ipv6_addr_prefix_copy(ipaddr, &ctx->pfx, ctx->plen);
ctx               480 net/6lowpan/iphc.c 						 struct lowpan_iphc_ctx *ctx,
ctx               494 net/6lowpan/iphc.c 	ipaddr->s6_addr[3] = ctx->plen;
ctx               496 net/6lowpan/iphc.c 	ipv6_addr_prefix(&network_pfx, &ctx->pfx, ctx->plen);
ctx               660 net/6lowpan/iphc.c 		spin_lock_bh(&lowpan_dev(dev)->ctx.lock);
ctx               663 net/6lowpan/iphc.c 			spin_unlock_bh(&lowpan_dev(dev)->ctx.lock);
ctx               671 net/6lowpan/iphc.c 		spin_unlock_bh(&lowpan_dev(dev)->ctx.lock);
ctx               688 net/6lowpan/iphc.c 		spin_lock_bh(&lowpan_dev(dev)->ctx.lock);
ctx               691 net/6lowpan/iphc.c 			spin_unlock_bh(&lowpan_dev(dev)->ctx.lock);
ctx               700 net/6lowpan/iphc.c 		spin_unlock_bh(&lowpan_dev(dev)->ctx.lock);
ctx               712 net/6lowpan/iphc.c 		spin_lock_bh(&lowpan_dev(dev)->ctx.lock);
ctx               715 net/6lowpan/iphc.c 			spin_unlock_bh(&lowpan_dev(dev)->ctx.lock);
ctx               724 net/6lowpan/iphc.c 		spin_unlock_bh(&lowpan_dev(dev)->ctx.lock);
ctx               792 net/6lowpan/iphc.c 				       const struct lowpan_iphc_ctx *ctx,
ctx               808 net/6lowpan/iphc.c 		ipv6_addr_prefix_copy(&tmp, &ctx->pfx, ctx->plen);
ctx               818 net/6lowpan/iphc.c 		ipv6_addr_prefix_copy(&tmp, &ctx->pfx, ctx->plen);
ctx               832 net/6lowpan/iphc.c 				   const struct lowpan_iphc_ctx *ctx,
ctx               840 net/6lowpan/iphc.c 	if (ctx)
ctx               841 net/6lowpan/iphc.c 		ipv6_addr_prefix_copy(&tmp, &ctx->pfx, ctx->plen);
ctx               848 net/6lowpan/iphc.c 				   const struct lowpan_iphc_ctx *ctx,
ctx               856 net/6lowpan/iphc.c 		if (lowpan_iphc_compress_ctx_802154_lladdr(ipaddr, ctx,
ctx               863 net/6lowpan/iphc.c 		if (lowpan_iphc_addr_equal(dev, ctx, ipaddr, lladdr)) {
ctx               876 net/6lowpan/iphc.c 	ipv6_addr_prefix_copy(&tmp, &ctx->pfx, ctx->plen);
ctx               887 net/6lowpan/iphc.c 	ipv6_addr_prefix_copy(&tmp, &ctx->pfx, ctx->plen);
ctx              1086 net/6lowpan/iphc.c 					      const struct lowpan_iphc_ctx *ctx,
ctx              1166 net/6lowpan/iphc.c 	spin_lock_bh(&lowpan_dev(dev)->ctx.lock);
ctx              1175 net/6lowpan/iphc.c 	spin_unlock_bh(&lowpan_dev(dev)->ctx.lock);
ctx              1177 net/6lowpan/iphc.c 	spin_lock_bh(&lowpan_dev(dev)->ctx.lock);
ctx              1183 net/6lowpan/iphc.c 	spin_unlock_bh(&lowpan_dev(dev)->ctx.lock);
ctx               381 net/bluetooth/smp.c 	struct crypto_aes_ctx ctx;
ctx               390 net/bluetooth/smp.c 	err = aes_expandkey(&ctx, tmp, 16);
ctx               399 net/bluetooth/smp.c 	aes_encrypt(&ctx, data, data);
ctx               406 net/bluetooth/smp.c 	memzero_explicit(&ctx, sizeof (ctx));
ctx                17 net/bpf/test_run.c static int bpf_test_run(struct bpf_prog *prog, void *ctx, u32 repeat,
ctx                44 net/bpf/test_run.c 		*retval = BPF_PROG_RUN(prog, ctx);
ctx               247 net/bpf/test_run.c 	struct __sk_buff *ctx = NULL;
ctx               260 net/bpf/test_run.c 	ctx = bpf_ctx_init(kattr, sizeof(struct __sk_buff));
ctx               261 net/bpf/test_run.c 	if (IS_ERR(ctx)) {
ctx               263 net/bpf/test_run.c 		return PTR_ERR(ctx);
ctx               283 net/bpf/test_run.c 		kfree(ctx);
ctx               292 net/bpf/test_run.c 		kfree(ctx);
ctx               307 net/bpf/test_run.c 	ret = convert___skb_to_skb(skb, ctx);
ctx               324 net/bpf/test_run.c 	convert_skb_to___skb(skb, ctx);
ctx               332 net/bpf/test_run.c 		ret = bpf_ctx_finish(kattr, uattr, ctx,
ctx               338 net/bpf/test_run.c 	kfree(ctx);
ctx               380 net/bpf/test_run.c static int verify_user_bpf_flow_keys(struct bpf_flow_keys *ctx)
ctx               383 net/bpf/test_run.c 	if (!range_is_zero(ctx, 0, offsetof(struct bpf_flow_keys, flags)))
ctx               388 net/bpf/test_run.c 	if (!range_is_zero(ctx, offsetof(struct bpf_flow_keys, flags) +
ctx               401 net/bpf/test_run.c 	struct bpf_flow_dissector ctx = {};
ctx               440 net/bpf/test_run.c 	ctx.flow_keys = &flow_keys;
ctx               441 net/bpf/test_run.c 	ctx.data = data;
ctx               442 net/bpf/test_run.c 	ctx.data_end = (__u8 *)data + size;
ctx               448 net/bpf/test_run.c 		retval = bpf_flow_dissect(prog, &ctx, eth->h_proto, ETH_HLEN,
ctx                69 net/bridge/netfilter/nft_meta_bridge.c static int nft_meta_bridge_get_init(const struct nft_ctx *ctx,
ctx                87 net/bridge/netfilter/nft_meta_bridge.c 		return nft_meta_get_init(ctx, expr, tb);
ctx                91 net/bridge/netfilter/nft_meta_bridge.c 	return nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               115 net/bridge/netfilter/nft_meta_bridge.c nft_meta_bridge_select_ops(const struct nft_ctx *ctx,
ctx               359 net/bridge/netfilter/nft_reject_bridge.c static int nft_reject_bridge_validate(const struct nft_ctx *ctx,
ctx               363 net/bridge/netfilter/nft_reject_bridge.c 	return nft_chain_validate_hooks(ctx->chain, (1 << NF_BR_PRE_ROUTING) |
ctx               367 net/bridge/netfilter/nft_reject_bridge.c static int nft_reject_bridge_init(const struct nft_ctx *ctx,
ctx              2948 net/core/devlink.c 			     struct devlink_param_gset_ctx *ctx)
ctx              2952 net/core/devlink.c 	return param->get(devlink, param->id, ctx);
ctx              2957 net/core/devlink.c 			     struct devlink_param_gset_ctx *ctx)
ctx              2961 net/core/devlink.c 	return param->set(devlink, param->id, ctx);
ctx              3042 net/core/devlink.c 	struct devlink_param_gset_ctx ctx;
ctx              3061 net/core/devlink.c 			ctx.cmode = i;
ctx              3062 net/core/devlink.c 			err = devlink_param_get(devlink, param, &ctx);
ctx              3065 net/core/devlink.c 			param_value[i] = ctx.val;
ctx              3312 net/core/devlink.c 	struct devlink_param_gset_ctx ctx;
ctx              3352 net/core/devlink.c 		ctx.val = value;
ctx              3353 net/core/devlink.c 		ctx.cmode = cmode;
ctx              3354 net/core/devlink.c 		err = devlink_param_set(devlink, param, &ctx);
ctx              3100 net/core/ethtool.c 			act->queue.ctx = input->rss_ctx;
ctx              1732 net/core/filter.c 	   const struct bpf_flow_dissector *, ctx, u32, offset,
ctx              1740 net/core/filter.c 	if (unlikely(!ctx->skb))
ctx              1743 net/core/filter.c 	ptr = skb_header_pointer(ctx->skb, offset, len, to);
ctx              4175 net/core/filter.c BPF_CALL_1(bpf_get_socket_cookie_sock_addr, struct bpf_sock_addr_kern *, ctx)
ctx              4177 net/core/filter.c 	return sock_gen_cookie(ctx->sk);
ctx              4187 net/core/filter.c BPF_CALL_1(bpf_get_socket_cookie_sock_ops, struct bpf_sock_ops_kern *, ctx)
ctx              4189 net/core/filter.c 	return sock_gen_cookie(ctx->sk);
ctx              4507 net/core/filter.c BPF_CALL_3(bpf_bind, struct bpf_sock_addr_kern *, ctx, struct sockaddr *, addr,
ctx              4511 net/core/filter.c 	struct sock *sk = ctx->sk;
ctx              4840 net/core/filter.c BPF_CALL_4(bpf_xdp_fib_lookup, struct xdp_buff *, ctx,
ctx              4852 net/core/filter.c 		return bpf_ipv4_fib_lookup(dev_net(ctx->rxq->dev), params,
ctx              4857 net/core/filter.c 		return bpf_ipv6_fib_lookup(dev_net(ctx->rxq->dev), params,
ctx              5426 net/core/filter.c BPF_CALL_5(bpf_xdp_sk_lookup_udp, struct xdp_buff *, ctx,
ctx              5429 net/core/filter.c 	struct net *caller_net = dev_net(ctx->rxq->dev);
ctx              5430 net/core/filter.c 	int ifindex = ctx->rxq->dev->ifindex;
ctx              5449 net/core/filter.c BPF_CALL_5(bpf_xdp_skc_lookup_tcp, struct xdp_buff *, ctx,
ctx              5452 net/core/filter.c 	struct net *caller_net = dev_net(ctx->rxq->dev);
ctx              5453 net/core/filter.c 	int ifindex = ctx->rxq->dev->ifindex;
ctx              5472 net/core/filter.c BPF_CALL_5(bpf_xdp_sk_lookup_tcp, struct xdp_buff *, ctx,
ctx              5475 net/core/filter.c 	struct net *caller_net = dev_net(ctx->rxq->dev);
ctx              5476 net/core/filter.c 	int ifindex = ctx->rxq->dev->ifindex;
ctx              5495 net/core/filter.c BPF_CALL_5(bpf_sock_addr_skc_lookup_tcp, struct bpf_sock_addr_kern *, ctx,
ctx              5499 net/core/filter.c 					       sock_net(ctx->sk), 0,
ctx              5514 net/core/filter.c BPF_CALL_5(bpf_sock_addr_sk_lookup_tcp, struct bpf_sock_addr_kern *, ctx,
ctx              5518 net/core/filter.c 					      sock_net(ctx->sk), 0, IPPROTO_TCP,
ctx              5533 net/core/filter.c BPF_CALL_5(bpf_sock_addr_sk_lookup_udp, struct bpf_sock_addr_kern *, ctx,
ctx              5537 net/core/filter.c 					      sock_net(ctx->sk), 0, IPPROTO_UDP,
ctx               843 net/core/flow_dissector.c bool bpf_flow_dissect(struct bpf_prog *prog, struct bpf_flow_dissector *ctx,
ctx               846 net/core/flow_dissector.c 	struct bpf_flow_keys *flow_keys = ctx->flow_keys;
ctx               864 net/core/flow_dissector.c 	result = BPF_PROG_RUN(prog, ctx);
ctx               965 net/core/flow_dissector.c 			struct bpf_flow_dissector ctx = {
ctx               973 net/core/flow_dissector.c 				ctx.skb = skb;
ctx               981 net/core/flow_dissector.c 			ret = bpf_flow_dissect(attached, &ctx, n_proto, nhoff,
ctx                72 net/core/netclassid_cgroup.c 	struct update_classid_context *ctx = (void *)v;
ctx                77 net/core/netclassid_cgroup.c 		sock_cgroup_set_classid(&sock->sk->sk_cgrp_data, ctx->classid);
ctx                80 net/core/netclassid_cgroup.c 	if (--ctx->batch == 0) {
ctx                81 net/core/netclassid_cgroup.c 		ctx->batch = UPDATE_CLASSID_BATCH;
ctx                89 net/core/netclassid_cgroup.c 	struct update_classid_context ctx = {
ctx                97 net/core/netclassid_cgroup.c 		fd = iterate_fd(p->files, fd, update_classid_sock, &ctx);
ctx                67 net/ipv4/netfilter/nf_nat_snmp_basic_main.c static void fast_csum(struct snmp_ctx *ctx, unsigned char offset)
ctx                73 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 		memcpy(&s[1], &ctx->from, 4);
ctx                74 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 		memcpy(&s[7], &ctx->to, 4);
ctx                83 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 		memcpy(&s[0], &ctx->from, 4);
ctx                84 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 		memcpy(&s[4], &ctx->to, 4);
ctx                91 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 	*ctx->check = csum_fold(csum_partial(s, size,
ctx                92 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 					     ~csum_unfold(*ctx->check)));
ctx               108 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 	struct snmp_ctx *ctx = (struct snmp_ctx *)context;
ctx               114 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 	if (*pdata == ctx->from) {
ctx               116 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 			 (void *)&ctx->from, (void *)&ctx->to);
ctx               118 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 		if (*ctx->check)
ctx               119 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 			fast_csum(ctx, (unsigned char *)data - ctx->begin);
ctx               120 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 		*pdata = ctx->to;
ctx               132 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 	struct snmp_ctx ctx;
ctx               136 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 		ctx.from = ct->tuplehash[dir].tuple.src.u3.ip;
ctx               137 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 		ctx.to = ct->tuplehash[!dir].tuple.dst.u3.ip;
ctx               139 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 		ctx.from = ct->tuplehash[!dir].tuple.src.u3.ip;
ctx               140 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 		ctx.to = ct->tuplehash[dir].tuple.dst.u3.ip;
ctx               143 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 	if (ctx.from == ctx.to)
ctx               146 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 	ctx.begin = (unsigned char *)udph + sizeof(struct udphdr);
ctx               147 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 	ctx.check = &udph->check;
ctx               148 net/ipv4/netfilter/nf_nat_snmp_basic_main.c 	ret = asn1_ber_decoder(&nf_nat_snmp_basic_decoder, &ctx, data, datalen);
ctx                33 net/ipv4/netfilter/nft_dup_ipv4.c static int nft_dup_ipv4_init(const struct nft_ctx *ctx,
ctx               167 net/ipv4/netfilter/nft_fib_ipv4.c nft_fib4_select_ops(const struct nft_ctx *ctx,
ctx               310 net/ipv4/sysctl_net_ipv4.c 	struct tcp_fastopen_context *ctx;
ctx               321 net/ipv4/sysctl_net_ipv4.c 	ctx = rcu_dereference(net->ipv4.tcp_fastopen_ctx);
ctx               322 net/ipv4/sysctl_net_ipv4.c 	if (ctx) {
ctx               323 net/ipv4/sysctl_net_ipv4.c 		n_keys = tcp_fastopen_context_len(ctx);
ctx               324 net/ipv4/sysctl_net_ipv4.c 		memcpy(&key[0], &ctx->key[0], TCP_FASTOPEN_KEY_LENGTH * n_keys);
ctx              3531 net/ipv4/tcp.c 		struct tcp_fastopen_context *ctx;
ctx              3538 net/ipv4/tcp.c 		ctx = rcu_dereference(icsk->icsk_accept_queue.fastopenq.ctx);
ctx              3539 net/ipv4/tcp.c 		if (ctx) {
ctx              3540 net/ipv4/tcp.c 			key_len = tcp_fastopen_context_len(ctx) *
ctx              3542 net/ipv4/tcp.c 			memcpy(&key[0], &ctx->key[0], key_len);
ctx                38 net/ipv4/tcp_fastopen.c 	struct tcp_fastopen_context *ctx =
ctx                41 net/ipv4/tcp_fastopen.c 	kzfree(ctx);
ctx                46 net/ipv4/tcp_fastopen.c 	struct tcp_fastopen_context *ctx;
ctx                48 net/ipv4/tcp_fastopen.c 	ctx = rcu_dereference_protected(
ctx                49 net/ipv4/tcp_fastopen.c 			inet_csk(sk)->icsk_accept_queue.fastopenq.ctx, 1);
ctx                50 net/ipv4/tcp_fastopen.c 	if (ctx)
ctx                51 net/ipv4/tcp_fastopen.c 		call_rcu(&ctx->rcu, tcp_fastopen_ctx_free);
ctx                72 net/ipv4/tcp_fastopen.c 	struct tcp_fastopen_context *ctx, *octx;
ctx                76 net/ipv4/tcp_fastopen.c 	ctx = kmalloc(sizeof(*ctx), GFP_KERNEL);
ctx                77 net/ipv4/tcp_fastopen.c 	if (!ctx) {
ctx                82 net/ipv4/tcp_fastopen.c 	ctx->key[0].key[0] = get_unaligned_le64(primary_key);
ctx                83 net/ipv4/tcp_fastopen.c 	ctx->key[0].key[1] = get_unaligned_le64(primary_key + 8);
ctx                85 net/ipv4/tcp_fastopen.c 		ctx->key[1].key[0] = get_unaligned_le64(backup_key);
ctx                86 net/ipv4/tcp_fastopen.c 		ctx->key[1].key[1] = get_unaligned_le64(backup_key + 8);
ctx                87 net/ipv4/tcp_fastopen.c 		ctx->num = 2;
ctx                89 net/ipv4/tcp_fastopen.c 		ctx->num = 1;
ctx                95 net/ipv4/tcp_fastopen.c 		octx = rcu_dereference_protected(q->ctx,
ctx                97 net/ipv4/tcp_fastopen.c 		rcu_assign_pointer(q->ctx, ctx);
ctx               101 net/ipv4/tcp_fastopen.c 		rcu_assign_pointer(net->ipv4.tcp_fastopen_ctx, ctx);
ctx               151 net/ipv4/tcp_fastopen.c 	struct tcp_fastopen_context *ctx;
ctx               154 net/ipv4/tcp_fastopen.c 	ctx = tcp_fastopen_get_ctx(sk);
ctx               155 net/ipv4/tcp_fastopen.c 	if (ctx)
ctx               156 net/ipv4/tcp_fastopen.c 		__tcp_fastopen_cookie_gen_cipher(req, syn, &ctx->key[0], foc);
ctx               212 net/ipv4/tcp_fastopen.c 	struct tcp_fastopen_context *ctx;
ctx               216 net/ipv4/tcp_fastopen.c 	ctx = tcp_fastopen_get_ctx(sk);
ctx               217 net/ipv4/tcp_fastopen.c 	if (!ctx)
ctx               219 net/ipv4/tcp_fastopen.c 	for (i = 0; i < tcp_fastopen_context_len(ctx); i++) {
ctx               220 net/ipv4/tcp_fastopen.c 		__tcp_fastopen_cookie_gen_cipher(req, syn, &ctx->key[i], foc);
ctx                31 net/ipv6/netfilter/nft_dup_ipv6.c static int nft_dup_ipv6_init(const struct nft_ctx *ctx,
ctx               212 net/ipv6/netfilter/nft_fib_ipv6.c nft_fib6_select_ops(const struct nft_ctx *ctx,
ctx                14 net/mac80211/chan.c 					  struct ieee80211_chanctx *ctx)
ctx                21 net/mac80211/chan.c 	list_for_each_entry(sdata, &ctx->assigned_vifs, assigned_chanctx_list)
ctx                28 net/mac80211/chan.c 					  struct ieee80211_chanctx *ctx)
ctx                35 net/mac80211/chan.c 	list_for_each_entry(sdata, &ctx->reserved_vifs, reserved_chanctx_list)
ctx                42 net/mac80211/chan.c 			       struct ieee80211_chanctx *ctx)
ctx                44 net/mac80211/chan.c 	return ieee80211_chanctx_num_assigned(local, ctx) +
ctx                45 net/mac80211/chan.c 	       ieee80211_chanctx_num_reserved(local, ctx);
ctx                50 net/mac80211/chan.c 	struct ieee80211_chanctx *ctx;
ctx                55 net/mac80211/chan.c 	list_for_each_entry(ctx, &local->chanctx_list, list)
ctx                83 net/mac80211/chan.c 				   struct ieee80211_chanctx *ctx,
ctx                90 net/mac80211/chan.c 	list_for_each_entry(sdata, &ctx->reserved_vifs,
ctx               106 net/mac80211/chan.c 				       struct ieee80211_chanctx *ctx,
ctx               113 net/mac80211/chan.c 	list_for_each_entry(sdata, &ctx->assigned_vifs,
ctx               132 net/mac80211/chan.c 				   struct ieee80211_chanctx *ctx,
ctx               137 net/mac80211/chan.c 	compat = ieee80211_chanctx_reserved_chandef(local, ctx, compat);
ctx               141 net/mac80211/chan.c 	compat = ieee80211_chanctx_non_reserved_chandef(local, ctx, compat);
ctx               150 net/mac80211/chan.c 				      struct ieee80211_chanctx *ctx,
ctx               155 net/mac80211/chan.c 	if (ieee80211_chanctx_combined_chandef(local, ctx, def))
ctx               158 net/mac80211/chan.c 	if (!list_empty(&ctx->reserved_vifs) &&
ctx               159 net/mac80211/chan.c 	    ieee80211_chanctx_reserved_chandef(local, ctx, def))
ctx               170 net/mac80211/chan.c 	struct ieee80211_chanctx *ctx;
ctx               177 net/mac80211/chan.c 	list_for_each_entry(ctx, &local->chanctx_list, list) {
ctx               178 net/mac80211/chan.c 		if (ctx->replace_state == IEEE80211_CHANCTX_WILL_BE_REPLACED)
ctx               181 net/mac80211/chan.c 		if (ctx->mode == IEEE80211_CHANCTX_EXCLUSIVE)
ctx               184 net/mac80211/chan.c 		if (!ieee80211_chanctx_can_reserve_chandef(local, ctx,
ctx               188 net/mac80211/chan.c 		return ctx;
ctx               309 net/mac80211/chan.c 				      struct ieee80211_chanctx *ctx)
ctx               317 net/mac80211/chan.c 	if (ctx->conf.def.width == NL80211_CHAN_WIDTH_5 ||
ctx               318 net/mac80211/chan.c 	    ctx->conf.def.width == NL80211_CHAN_WIDTH_10 ||
ctx               319 net/mac80211/chan.c 	    ctx->conf.radar_enabled) {
ctx               320 net/mac80211/chan.c 		ctx->conf.min_def = ctx->conf.def;
ctx               324 net/mac80211/chan.c 	max_bw = ieee80211_get_chanctx_max_required_bw(local, &ctx->conf);
ctx               327 net/mac80211/chan.c 	min_def = ctx->conf.def;
ctx               331 net/mac80211/chan.c 	if (cfg80211_chandef_identical(&ctx->conf.min_def, &min_def))
ctx               334 net/mac80211/chan.c 	ctx->conf.min_def = min_def;
ctx               335 net/mac80211/chan.c 	if (!ctx->driver_present)
ctx               338 net/mac80211/chan.c 	drv_change_chanctx(local, ctx, IEEE80211_CHANCTX_CHANGE_MIN_WIDTH);
ctx               342 net/mac80211/chan.c 				     struct ieee80211_chanctx *ctx,
ctx               345 net/mac80211/chan.c 	if (cfg80211_chandef_identical(&ctx->conf.def, chandef)) {
ctx               346 net/mac80211/chan.c 		ieee80211_recalc_chanctx_min_def(local, ctx);
ctx               350 net/mac80211/chan.c 	WARN_ON(!cfg80211_chandef_compatible(&ctx->conf.def, chandef));
ctx               352 net/mac80211/chan.c 	ctx->conf.def = *chandef;
ctx               353 net/mac80211/chan.c 	drv_change_chanctx(local, ctx, IEEE80211_CHANCTX_CHANGE_WIDTH);
ctx               354 net/mac80211/chan.c 	ieee80211_recalc_chanctx_min_def(local, ctx);
ctx               367 net/mac80211/chan.c 	struct ieee80211_chanctx *ctx;
ctx               374 net/mac80211/chan.c 	list_for_each_entry(ctx, &local->chanctx_list, list) {
ctx               377 net/mac80211/chan.c 		if (ctx->replace_state != IEEE80211_CHANCTX_REPLACE_NONE)
ctx               380 net/mac80211/chan.c 		if (ctx->mode == IEEE80211_CHANCTX_EXCLUSIVE)
ctx               383 net/mac80211/chan.c 		compat = cfg80211_chandef_compatible(&ctx->conf.def, chandef);
ctx               387 net/mac80211/chan.c 		compat = ieee80211_chanctx_reserved_chandef(local, ctx,
ctx               392 net/mac80211/chan.c 		ieee80211_change_chanctx(local, ctx, compat);
ctx               394 net/mac80211/chan.c 		return ctx;
ctx               420 net/mac80211/chan.c 				 struct ieee80211_chanctx *ctx)
ctx               422 net/mac80211/chan.c 	struct ieee80211_chanctx_conf *conf = &ctx->conf;
ctx               451 net/mac80211/chan.c 	struct ieee80211_chanctx *ctx;
ctx               455 net/mac80211/chan.c 	ctx = kzalloc(sizeof(*ctx) + local->hw.chanctx_data_size, GFP_KERNEL);
ctx               456 net/mac80211/chan.c 	if (!ctx)
ctx               459 net/mac80211/chan.c 	INIT_LIST_HEAD(&ctx->assigned_vifs);
ctx               460 net/mac80211/chan.c 	INIT_LIST_HEAD(&ctx->reserved_vifs);
ctx               461 net/mac80211/chan.c 	ctx->conf.def = *chandef;
ctx               462 net/mac80211/chan.c 	ctx->conf.rx_chains_static = 1;
ctx               463 net/mac80211/chan.c 	ctx->conf.rx_chains_dynamic = 1;
ctx               464 net/mac80211/chan.c 	ctx->mode = mode;
ctx               465 net/mac80211/chan.c 	ctx->conf.radar_enabled = false;
ctx               466 net/mac80211/chan.c 	ieee80211_recalc_chanctx_min_def(local, ctx);
ctx               468 net/mac80211/chan.c 	return ctx;
ctx               472 net/mac80211/chan.c 				 struct ieee80211_chanctx *ctx)
ctx               481 net/mac80211/chan.c 		local->hw.conf.radar_enabled = ctx->conf.radar_enabled;
ctx               489 net/mac80211/chan.c 		local->_oper_chandef = ctx->conf.def;
ctx               492 net/mac80211/chan.c 		err = drv_add_chanctx(local, ctx);
ctx               507 net/mac80211/chan.c 	struct ieee80211_chanctx *ctx;
ctx               513 net/mac80211/chan.c 	ctx = ieee80211_alloc_chanctx(local, chandef, mode);
ctx               514 net/mac80211/chan.c 	if (!ctx)
ctx               517 net/mac80211/chan.c 	err = ieee80211_add_chanctx(local, ctx);
ctx               519 net/mac80211/chan.c 		kfree(ctx);
ctx               523 net/mac80211/chan.c 	list_add_rcu(&ctx->list, &local->chanctx_list);
ctx               524 net/mac80211/chan.c 	return ctx;
ctx               528 net/mac80211/chan.c 				  struct ieee80211_chanctx *ctx)
ctx               548 net/mac80211/chan.c 		drv_remove_chanctx(local, ctx);
ctx               555 net/mac80211/chan.c 				   struct ieee80211_chanctx *ctx)
ctx               559 net/mac80211/chan.c 	WARN_ON_ONCE(ieee80211_chanctx_refcount(local, ctx) != 0);
ctx               561 net/mac80211/chan.c 	list_del_rcu(&ctx->list);
ctx               562 net/mac80211/chan.c 	ieee80211_del_chanctx(local, ctx);
ctx               563 net/mac80211/chan.c 	kfree_rcu(ctx, rcu_head);
ctx               567 net/mac80211/chan.c 				       struct ieee80211_chanctx *ctx)
ctx               569 net/mac80211/chan.c 	struct ieee80211_chanctx_conf *conf = &ctx->conf;
ctx               613 net/mac80211/chan.c 	ieee80211_change_chanctx(local, ctx, compat);
ctx               835 net/mac80211/chan.c 	struct ieee80211_chanctx *ctx = sdata->reserved_chanctx;
ctx               839 net/mac80211/chan.c 	if (WARN_ON(!ctx))
ctx               845 net/mac80211/chan.c 	if (ieee80211_chanctx_refcount(sdata->local, ctx) == 0) {
ctx               846 net/mac80211/chan.c 		if (ctx->replace_state == IEEE80211_CHANCTX_REPLACES_OTHER) {
ctx               847 net/mac80211/chan.c 			if (WARN_ON(!ctx->replace_ctx))
ctx               850 net/mac80211/chan.c 			WARN_ON(ctx->replace_ctx->replace_state !=
ctx               852 net/mac80211/chan.c 			WARN_ON(ctx->replace_ctx->replace_ctx != ctx);
ctx               854 net/mac80211/chan.c 			ctx->replace_ctx->replace_ctx = NULL;
ctx               855 net/mac80211/chan.c 			ctx->replace_ctx->replace_state =
ctx               858 net/mac80211/chan.c 			list_del_rcu(&ctx->list);
ctx               859 net/mac80211/chan.c 			kfree_rcu(ctx, rcu_head);
ctx               861 net/mac80211/chan.c 			ieee80211_free_chanctx(sdata->local, ctx);
ctx               874 net/mac80211/chan.c 	struct ieee80211_chanctx *new_ctx, *curr_ctx, *ctx;
ctx               915 net/mac80211/chan.c 				list_for_each_entry(ctx, &local->chanctx_list,
ctx               917 net/mac80211/chan.c 					if (ctx->replace_state !=
ctx               921 net/mac80211/chan.c 					if (!list_empty(&ctx->reserved_vifs))
ctx               924 net/mac80211/chan.c 					curr_ctx = ctx;
ctx              1184 net/mac80211/chan.c 	struct ieee80211_chanctx *ctx, *old_ctx;
ctx              1195 net/mac80211/chan.c 	list_for_each_entry(ctx, &local->chanctx_list, list) {
ctx              1196 net/mac80211/chan.c 		if (ctx->replace_state != IEEE80211_CHANCTX_REPLACES_OTHER)
ctx              1199 net/mac80211/chan.c 		if (WARN_ON(!ctx->replace_ctx)) {
ctx              1204 net/mac80211/chan.c 		list_for_each_entry(sdata, &ctx->reserved_vifs,
ctx              1213 net/mac80211/chan.c 			vif_chsw[i].new_ctx = &ctx->conf;
ctx              1229 net/mac80211/chan.c 	struct ieee80211_chanctx *ctx;
ctx              1235 net/mac80211/chan.c 	list_for_each_entry(ctx, &local->chanctx_list, list) {
ctx              1236 net/mac80211/chan.c 		if (ctx->replace_state != IEEE80211_CHANCTX_REPLACES_OTHER)
ctx              1239 net/mac80211/chan.c 		if (!list_empty(&ctx->replace_ctx->assigned_vifs))
ctx              1242 net/mac80211/chan.c 		ieee80211_del_chanctx(local, ctx->replace_ctx);
ctx              1243 net/mac80211/chan.c 		err = ieee80211_add_chanctx(local, ctx);
ctx              1251 net/mac80211/chan.c 	WARN_ON(ieee80211_add_chanctx(local, ctx));
ctx              1252 net/mac80211/chan.c 	list_for_each_entry_continue_reverse(ctx, &local->chanctx_list, list) {
ctx              1253 net/mac80211/chan.c 		if (ctx->replace_state != IEEE80211_CHANCTX_REPLACES_OTHER)
ctx              1256 net/mac80211/chan.c 		if (!list_empty(&ctx->replace_ctx->assigned_vifs))
ctx              1259 net/mac80211/chan.c 		ieee80211_del_chanctx(local, ctx);
ctx              1260 net/mac80211/chan.c 		WARN_ON(ieee80211_add_chanctx(local, ctx->replace_ctx));
ctx              1269 net/mac80211/chan.c 	struct ieee80211_chanctx *ctx, *ctx_tmp, *old_ctx;
ctx              1293 net/mac80211/chan.c 	list_for_each_entry(ctx, &local->chanctx_list, list) {
ctx              1294 net/mac80211/chan.c 		if (ctx->replace_state != IEEE80211_CHANCTX_REPLACES_OTHER)
ctx              1297 net/mac80211/chan.c 		if (WARN_ON(!ctx->replace_ctx)) {
ctx              1303 net/mac80211/chan.c 			new_ctx = ctx;
ctx              1311 net/mac80211/chan.c 		list_for_each_entry(sdata, &ctx->replace_ctx->assigned_vifs,
ctx              1332 net/mac80211/chan.c 		ctx->conf.radar_enabled = false;
ctx              1333 net/mac80211/chan.c 		list_for_each_entry(sdata, &ctx->reserved_vifs,
ctx              1351 net/mac80211/chan.c 				ctx->conf.radar_enabled = true;
ctx              1392 net/mac80211/chan.c 	list_for_each_entry(ctx, &local->chanctx_list, list) {
ctx              1393 net/mac80211/chan.c 		if (ctx->replace_state != IEEE80211_CHANCTX_REPLACES_OTHER)
ctx              1396 net/mac80211/chan.c 		if (WARN_ON(!ctx->replace_ctx)) {
ctx              1401 net/mac80211/chan.c 		list_for_each_entry(sdata, &ctx->reserved_vifs,
ctx              1408 net/mac80211/chan.c 			rcu_assign_pointer(sdata->vif.chanctx_conf, &ctx->conf);
ctx              1430 net/mac80211/chan.c 		ieee80211_recalc_chanctx_chantype(local, ctx);
ctx              1431 net/mac80211/chan.c 		ieee80211_recalc_smps_chanctx(local, ctx);
ctx              1432 net/mac80211/chan.c 		ieee80211_recalc_radar_chanctx(local, ctx);
ctx              1433 net/mac80211/chan.c 		ieee80211_recalc_chanctx_min_def(local, ctx);
ctx              1435 net/mac80211/chan.c 		list_for_each_entry_safe(sdata, sdata_tmp, &ctx->reserved_vifs,
ctx              1437 net/mac80211/chan.c 			if (ieee80211_vif_get_chanctx(sdata) != ctx)
ctx              1442 net/mac80211/chan.c 				  &ctx->assigned_vifs);
ctx              1455 net/mac80211/chan.c 		list_for_each_entry_safe(sdata, sdata_tmp, &ctx->reserved_vifs,
ctx              1461 net/mac80211/chan.c 			if (WARN_ON(sdata->reserved_chanctx != ctx))
ctx              1489 net/mac80211/chan.c 	list_for_each_entry_safe(ctx, ctx_tmp, &local->chanctx_list, list) {
ctx              1490 net/mac80211/chan.c 		if (ctx->replace_state != IEEE80211_CHANCTX_WILL_BE_REPLACED)
ctx              1493 net/mac80211/chan.c 		ctx->replace_ctx->replace_ctx = NULL;
ctx              1494 net/mac80211/chan.c 		ctx->replace_ctx->replace_state =
ctx              1497 net/mac80211/chan.c 		list_del_rcu(&ctx->list);
ctx              1498 net/mac80211/chan.c 		kfree_rcu(ctx, rcu_head);
ctx              1504 net/mac80211/chan.c 	list_for_each_entry(ctx, &local->chanctx_list, list) {
ctx              1505 net/mac80211/chan.c 		if (ctx->replace_state != IEEE80211_CHANCTX_REPLACES_OTHER)
ctx              1508 net/mac80211/chan.c 		list_for_each_entry_safe(sdata, sdata_tmp, &ctx->reserved_vifs,
ctx              1522 net/mac80211/chan.c 	struct ieee80211_chanctx *ctx;
ctx              1532 net/mac80211/chan.c 	ctx = container_of(conf, struct ieee80211_chanctx, conf);
ctx              1545 net/mac80211/chan.c 	if (ieee80211_chanctx_refcount(local, ctx) == 0)
ctx              1546 net/mac80211/chan.c 		ieee80211_free_chanctx(local, ctx);
ctx              1560 net/mac80211/chan.c 	struct ieee80211_chanctx *ctx;
ctx              1587 net/mac80211/chan.c 	ctx = ieee80211_find_chanctx(local, chandef, mode);
ctx              1588 net/mac80211/chan.c 	if (!ctx)
ctx              1589 net/mac80211/chan.c 		ctx = ieee80211_new_chanctx(local, chandef, mode);
ctx              1590 net/mac80211/chan.c 	if (IS_ERR(ctx)) {
ctx              1591 net/mac80211/chan.c 		ret = PTR_ERR(ctx);
ctx              1597 net/mac80211/chan.c 	ret = ieee80211_assign_vif_chanctx(sdata, ctx);
ctx              1600 net/mac80211/chan.c 		if (ieee80211_chanctx_refcount(local, ctx) == 0)
ctx              1601 net/mac80211/chan.c 			ieee80211_free_chanctx(local, ctx);
ctx              1605 net/mac80211/chan.c 	ieee80211_recalc_smps_chanctx(local, ctx);
ctx              1606 net/mac80211/chan.c 	ieee80211_recalc_radar_chanctx(local, ctx);
ctx              1685 net/mac80211/chan.c 	struct ieee80211_chanctx *ctx;
ctx              1712 net/mac80211/chan.c 	ctx = container_of(conf, struct ieee80211_chanctx, conf);
ctx              1720 net/mac80211/chan.c 	switch (ctx->replace_state) {
ctx              1722 net/mac80211/chan.c 		if (!ieee80211_chanctx_reserved_chandef(local, ctx, compat)) {
ctx              1742 net/mac80211/chan.c 	ieee80211_recalc_chanctx_chantype(local, ctx);
ctx              1789 net/mac80211/chan.c 	struct ieee80211_chanctx *ctx;
ctx              1792 net/mac80211/chan.c 	list_for_each_entry_rcu(ctx, &local->chanctx_list, list)
ctx              1793 net/mac80211/chan.c 		if (ctx->driver_present)
ctx              1794 net/mac80211/chan.c 			iter(hw, &ctx->conf, iter_data);
ctx               856 net/mac80211/driver-ops.h 				  struct ieee80211_chanctx *ctx)
ctx               862 net/mac80211/driver-ops.h 	trace_drv_add_chanctx(local, ctx);
ctx               864 net/mac80211/driver-ops.h 		ret = local->ops->add_chanctx(&local->hw, &ctx->conf);
ctx               867 net/mac80211/driver-ops.h 		ctx->driver_present = true;
ctx               873 net/mac80211/driver-ops.h 				      struct ieee80211_chanctx *ctx)
ctx               877 net/mac80211/driver-ops.h 	if (WARN_ON(!ctx->driver_present))
ctx               880 net/mac80211/driver-ops.h 	trace_drv_remove_chanctx(local, ctx);
ctx               882 net/mac80211/driver-ops.h 		local->ops->remove_chanctx(&local->hw, &ctx->conf);
ctx               884 net/mac80211/driver-ops.h 	ctx->driver_present = false;
ctx               888 net/mac80211/driver-ops.h 				      struct ieee80211_chanctx *ctx,
ctx               893 net/mac80211/driver-ops.h 	trace_drv_change_chanctx(local, ctx, changed);
ctx               895 net/mac80211/driver-ops.h 		WARN_ON_ONCE(!ctx->driver_present);
ctx               896 net/mac80211/driver-ops.h 		local->ops->change_chanctx(&local->hw, &ctx->conf, changed);
ctx               903 net/mac80211/driver-ops.h 					 struct ieee80211_chanctx *ctx)
ctx               910 net/mac80211/driver-ops.h 	trace_drv_assign_vif_chanctx(local, sdata, ctx);
ctx               912 net/mac80211/driver-ops.h 		WARN_ON_ONCE(!ctx->driver_present);
ctx               915 net/mac80211/driver-ops.h 						     &ctx->conf);
ctx               924 net/mac80211/driver-ops.h 					    struct ieee80211_chanctx *ctx)
ctx               931 net/mac80211/driver-ops.h 	trace_drv_unassign_vif_chanctx(local, sdata, ctx);
ctx               933 net/mac80211/driver-ops.h 		WARN_ON_ONCE(!ctx->driver_present);
ctx               936 net/mac80211/driver-ops.h 						 &ctx->conf);
ctx              2195 net/mac80211/ieee80211_i.h 			       struct ieee80211_chanctx *ctx);
ctx              2200 net/mac80211/ieee80211_i.h 				      struct ieee80211_chanctx *ctx);
ctx              2227 net/mac80211/ieee80211_i.h 				       struct ieee80211_chanctx *ctx);
ctx                53 net/mac80211/key.h 	struct tkip_ctx ctx;
ctx              1253 net/mac80211/tdls.c 	struct ieee80211_chanctx *ctx;
ctx              1263 net/mac80211/tdls.c 		ctx = container_of(conf, struct ieee80211_chanctx, conf);
ctx              1264 net/mac80211/tdls.c 		ieee80211_recalc_chanctx_chantype(local, ctx);
ctx              1282 net/mac80211/tdls.c 				ieee80211_recalc_chanctx_chantype(local, ctx);
ctx                82 net/mac80211/tkip.c static void tkip_mixing_phase1(const u8 *tk, struct tkip_ctx *ctx,
ctx                86 net/mac80211/tkip.c 	u16 *p1k = ctx->p1k;
ctx               102 net/mac80211/tkip.c 	ctx->state = TKIP_STATE_PHASE1_DONE;
ctx               103 net/mac80211/tkip.c 	ctx->p1k_iv32 = tsc_IV32;
ctx               106 net/mac80211/tkip.c static void tkip_mixing_phase2(const u8 *tk, struct tkip_ctx *ctx,
ctx               110 net/mac80211/tkip.c 	const u16 *p1k = ctx->p1k;
ctx               155 net/mac80211/tkip.c 	struct tkip_ctx *ctx = &key->u.tkip.tx;
ctx               167 net/mac80211/tkip.c 	if (ctx->p1k_iv32 != iv32 || ctx->state == TKIP_STATE_NOT_INIT)
ctx               168 net/mac80211/tkip.c 		tkip_mixing_phase1(tk, ctx, sdata->vif.addr, iv32);
ctx               176 net/mac80211/tkip.c 	struct tkip_ctx *ctx = &key->u.tkip.tx;
ctx               180 net/mac80211/tkip.c 	memcpy(p1k, ctx->p1k, sizeof(ctx->p1k));
ctx               189 net/mac80211/tkip.c 	struct tkip_ctx ctx;
ctx               191 net/mac80211/tkip.c 	tkip_mixing_phase1(tk, &ctx, ta, iv32);
ctx               192 net/mac80211/tkip.c 	memcpy(p1k, ctx.p1k, sizeof(ctx.p1k));
ctx               202 net/mac80211/tkip.c 	struct tkip_ctx *ctx = &key->u.tkip.tx;
ctx               210 net/mac80211/tkip.c 	tkip_mixing_phase2(tk, ctx, iv16, p2k);
ctx               222 net/mac80211/tkip.c int ieee80211_tkip_encrypt_data(struct arc4_ctx *ctx,
ctx               231 net/mac80211/tkip.c 	return ieee80211_wep_encrypt_data(ctx, rc4key, 16,
ctx               239 net/mac80211/tkip.c int ieee80211_tkip_decrypt_data(struct arc4_ctx *ctx,
ctx               280 net/mac80211/tkip.c 		rx_ctx->ctx.state != TKIP_STATE_NOT_INIT)))))
ctx               285 net/mac80211/tkip.c 		rx_ctx->ctx.state = TKIP_STATE_PHASE1_HW_UPLOADED;
ctx               289 net/mac80211/tkip.c 	if (rx_ctx->ctx.state == TKIP_STATE_NOT_INIT ||
ctx               292 net/mac80211/tkip.c 		tkip_mixing_phase1(tk, &rx_ctx->ctx, ta, iv32);
ctx               296 net/mac80211/tkip.c 	    rx_ctx->ctx.state != TKIP_STATE_PHASE1_HW_UPLOADED) {
ctx               303 net/mac80211/tkip.c 				iv32, rx_ctx->ctx.p1k);
ctx               304 net/mac80211/tkip.c 		rx_ctx->ctx.state = TKIP_STATE_PHASE1_HW_UPLOADED;
ctx               307 net/mac80211/tkip.c 	tkip_mixing_phase2(tk, &rx_ctx->ctx, iv16, rc4key);
ctx               309 net/mac80211/tkip.c 	res = ieee80211_wep_decrypt_data(ctx, rc4key, 16, pos, payload_len - 12);
ctx                13 net/mac80211/tkip.h int ieee80211_tkip_encrypt_data(struct arc4_ctx *ctx,
ctx                24 net/mac80211/tkip.h int ieee80211_tkip_decrypt_data(struct arc4_ctx *ctx,
ctx                71 net/mac80211/trace.h #define CHANCTX_ASSIGN	CHANDEF_ASSIGN(&ctx->conf.def)					\
ctx                72 net/mac80211/trace.h 			MIN_CHANDEF_ASSIGN(&ctx->conf.min_def)				\
ctx                73 net/mac80211/trace.h 			__entry->rx_chains_static = ctx->conf.rx_chains_static;		\
ctx                74 net/mac80211/trace.h 			__entry->rx_chains_dynamic = ctx->conf.rx_chains_dynamic
ctx              1482 net/mac80211/trace.h 		 struct ieee80211_chanctx *ctx),
ctx              1484 net/mac80211/trace.h 	TP_ARGS(local, ctx),
ctx              1504 net/mac80211/trace.h 		 struct ieee80211_chanctx *ctx),
ctx              1505 net/mac80211/trace.h 	TP_ARGS(local, ctx)
ctx              1510 net/mac80211/trace.h 		 struct ieee80211_chanctx *ctx),
ctx              1511 net/mac80211/trace.h 	TP_ARGS(local, ctx)
ctx              1516 net/mac80211/trace.h 		 struct ieee80211_chanctx *ctx,
ctx              1519 net/mac80211/trace.h 	TP_ARGS(local, ctx, changed),
ctx              1627 net/mac80211/trace.h 		 struct ieee80211_chanctx *ctx),
ctx              1629 net/mac80211/trace.h 	TP_ARGS(local, sdata, ctx),
ctx              1652 net/mac80211/trace.h 		 struct ieee80211_chanctx *ctx),
ctx              1653 net/mac80211/trace.h 	TP_ARGS(local, sdata, ctx)
ctx              1659 net/mac80211/trace.h 		 struct ieee80211_chanctx *ctx),
ctx              1660 net/mac80211/trace.h 	TP_ARGS(local, sdata, ctx)
ctx              1306 net/mac80211/tx.c 					  void *ctx)
ctx              1313 net/mac80211/tx.c 	txqi = ctx;
ctx              1326 net/mac80211/tx.c 			    void *ctx)
ctx              1332 net/mac80211/tx.c 	txqi = ctx;
ctx              2016 net/mac80211/util.c 	struct ieee80211_chanctx *ctx;
ctx              2046 net/mac80211/util.c 	list_for_each_entry(ctx, &local->chanctx_list, list)
ctx              2047 net/mac80211/util.c 		ctx->driver_present = false;
ctx              2057 net/mac80211/util.c 	struct ieee80211_chanctx *ctx;
ctx              2066 net/mac80211/util.c 		ctx = container_of(conf, struct ieee80211_chanctx, conf);
ctx              2067 net/mac80211/util.c 		drv_assign_vif_chanctx(local, sdata, ctx);
ctx              2138 net/mac80211/util.c 	struct ieee80211_chanctx *ctx;
ctx              2263 net/mac80211/util.c 		list_for_each_entry(ctx, &local->chanctx_list, list)
ctx              2264 net/mac80211/util.c 			if (ctx->replace_state !=
ctx              2266 net/mac80211/util.c 				WARN_ON(drv_add_chanctx(local, ctx));
ctx              3349 net/mac80211/util.c 	struct ieee80211_chanctx *ctx;
ctx              3353 net/mac80211/util.c 	list_for_each_entry(ctx, &local->chanctx_list, list) {
ctx              3354 net/mac80211/util.c 		if (ctx->replace_state == IEEE80211_CHANCTX_REPLACES_OTHER)
ctx              3358 net/mac80211/util.c 		chandef = ctx->conf.def;
ctx              3812 net/mac80211/util.c 					 struct ieee80211_chanctx *ctx)
ctx              3819 net/mac80211/util.c 	if (WARN_ON(ctx->replace_state == IEEE80211_CHANCTX_WILL_BE_REPLACED))
ctx              3822 net/mac80211/util.c 	list_for_each_entry(sdata, &ctx->reserved_vifs, reserved_chanctx_list)
ctx              3830 net/mac80211/util.c 	WARN_ON(ctx->replace_state == IEEE80211_CHANCTX_REPLACES_OTHER &&
ctx              3831 net/mac80211/util.c 		!list_empty(&ctx->assigned_vifs));
ctx              3833 net/mac80211/util.c 	list_for_each_entry(sdata, &ctx->assigned_vifs, assigned_chanctx_list)
ctx              3848 net/mac80211/util.c 	struct ieee80211_chanctx *ctx;
ctx              3889 net/mac80211/util.c 	list_for_each_entry(ctx, &local->chanctx_list, list) {
ctx              3890 net/mac80211/util.c 		if (ctx->replace_state == IEEE80211_CHANCTX_WILL_BE_REPLACED)
ctx              3893 net/mac80211/util.c 			ieee80211_chanctx_radar_detect(local, ctx);
ctx              3894 net/mac80211/util.c 		if (ctx->mode == IEEE80211_CHANCTX_EXCLUSIVE) {
ctx              3900 net/mac80211/util.c 						&ctx->conf.def))
ctx              3939 net/mac80211/util.c 	struct ieee80211_chanctx *ctx;
ctx              3946 net/mac80211/util.c 	list_for_each_entry(ctx, &local->chanctx_list, list) {
ctx              3947 net/mac80211/util.c 		if (ctx->replace_state == IEEE80211_CHANCTX_WILL_BE_REPLACED)
ctx              3953 net/mac80211/util.c 			ieee80211_chanctx_radar_detect(local, ctx);
ctx               110 net/mac80211/wep.c int ieee80211_wep_encrypt_data(struct arc4_ctx *ctx, u8 *rc4key,
ctx               118 net/mac80211/wep.c 	arc4_setkey(ctx, rc4key, klen);
ctx               119 net/mac80211/wep.c 	arc4_crypt(ctx, data, data, data_len + IEEE80211_WEP_ICV_LEN);
ctx               120 net/mac80211/wep.c 	memzero_explicit(ctx, sizeof(*ctx));
ctx               167 net/mac80211/wep.c int ieee80211_wep_decrypt_data(struct arc4_ctx *ctx, u8 *rc4key,
ctx               172 net/mac80211/wep.c 	arc4_setkey(ctx, rc4key, klen);
ctx               173 net/mac80211/wep.c 	arc4_crypt(ctx, data, data, data_len + IEEE80211_WEP_ICV_LEN);
ctx               174 net/mac80211/wep.c 	memzero_explicit(ctx, sizeof(*ctx));
ctx                17 net/mac80211/wep.h int ieee80211_wep_encrypt_data(struct arc4_ctx *ctx, u8 *rc4key,
ctx                22 net/mac80211/wep.h int ieee80211_wep_decrypt_data(struct arc4_ctx *ctx, u8 *rc4key,
ctx                54 net/netfilter/nf_dup_netdev.c int nft_fwd_dup_netdev_offload(struct nft_offload_ctx *ctx,
ctx                62 net/netfilter/nf_dup_netdev.c 	dev = dev_get_by_index(ctx->net, oif);
ctx                66 net/netfilter/nf_dup_netdev.c 	entry = &flow->rule->action.entries[ctx->num_actions++];
ctx                86 net/netfilter/nf_tables_api.c static void nft_ctx_init(struct nft_ctx *ctx,
ctx                95 net/netfilter/nf_tables_api.c 	ctx->net	= net;
ctx                96 net/netfilter/nf_tables_api.c 	ctx->family	= family;
ctx                97 net/netfilter/nf_tables_api.c 	ctx->level	= 0;
ctx                98 net/netfilter/nf_tables_api.c 	ctx->table	= table;
ctx                99 net/netfilter/nf_tables_api.c 	ctx->chain	= chain;
ctx               100 net/netfilter/nf_tables_api.c 	ctx->nla   	= nla;
ctx               101 net/netfilter/nf_tables_api.c 	ctx->portid	= NETLINK_CB(skb).portid;
ctx               102 net/netfilter/nf_tables_api.c 	ctx->report	= nlmsg_report(nlh);
ctx               103 net/netfilter/nf_tables_api.c 	ctx->flags	= nlh->nlmsg_flags;
ctx               104 net/netfilter/nf_tables_api.c 	ctx->seq	= nlh->nlmsg_seq;
ctx               107 net/netfilter/nf_tables_api.c static struct nft_trans *nft_trans_alloc_gfp(const struct nft_ctx *ctx,
ctx               117 net/netfilter/nf_tables_api.c 	trans->ctx	= *ctx;
ctx               122 net/netfilter/nf_tables_api.c static struct nft_trans *nft_trans_alloc(const struct nft_ctx *ctx,
ctx               125 net/netfilter/nf_tables_api.c 	return nft_trans_alloc_gfp(ctx, msg_type, size, GFP_KERNEL);
ctx               134 net/netfilter/nf_tables_api.c static void nft_set_trans_bind(const struct nft_ctx *ctx, struct nft_set *set)
ctx               136 net/netfilter/nf_tables_api.c 	struct net *net = ctx->net;
ctx               195 net/netfilter/nf_tables_api.c static int nft_trans_table_add(struct nft_ctx *ctx, int msg_type)
ctx               199 net/netfilter/nf_tables_api.c 	trans = nft_trans_alloc(ctx, msg_type, sizeof(struct nft_trans_table));
ctx               204 net/netfilter/nf_tables_api.c 		nft_activate_next(ctx->net, ctx->table);
ctx               206 net/netfilter/nf_tables_api.c 	list_add_tail(&trans->list, &ctx->net->nft.commit_list);
ctx               210 net/netfilter/nf_tables_api.c static int nft_deltable(struct nft_ctx *ctx)
ctx               214 net/netfilter/nf_tables_api.c 	err = nft_trans_table_add(ctx, NFT_MSG_DELTABLE);
ctx               218 net/netfilter/nf_tables_api.c 	nft_deactivate_next(ctx->net, ctx->table);
ctx               222 net/netfilter/nf_tables_api.c static struct nft_trans *nft_trans_chain_add(struct nft_ctx *ctx, int msg_type)
ctx               226 net/netfilter/nf_tables_api.c 	trans = nft_trans_alloc(ctx, msg_type, sizeof(struct nft_trans_chain));
ctx               231 net/netfilter/nf_tables_api.c 		nft_activate_next(ctx->net, ctx->chain);
ctx               233 net/netfilter/nf_tables_api.c 	list_add_tail(&trans->list, &ctx->net->nft.commit_list);
ctx               237 net/netfilter/nf_tables_api.c static int nft_delchain(struct nft_ctx *ctx)
ctx               241 net/netfilter/nf_tables_api.c 	trans = nft_trans_chain_add(ctx, NFT_MSG_DELCHAIN);
ctx               245 net/netfilter/nf_tables_api.c 	ctx->table->use--;
ctx               246 net/netfilter/nf_tables_api.c 	nft_deactivate_next(ctx->net, ctx->chain);
ctx               251 net/netfilter/nf_tables_api.c static void nft_rule_expr_activate(const struct nft_ctx *ctx,
ctx               259 net/netfilter/nf_tables_api.c 			expr->ops->activate(ctx, expr);
ctx               265 net/netfilter/nf_tables_api.c static void nft_rule_expr_deactivate(const struct nft_ctx *ctx,
ctx               274 net/netfilter/nf_tables_api.c 			expr->ops->deactivate(ctx, expr, phase);
ctx               281 net/netfilter/nf_tables_api.c nf_tables_delrule_deactivate(struct nft_ctx *ctx, struct nft_rule *rule)
ctx               284 net/netfilter/nf_tables_api.c 	if (nft_is_active_next(ctx->net, rule)) {
ctx               285 net/netfilter/nf_tables_api.c 		nft_deactivate_next(ctx->net, rule);
ctx               286 net/netfilter/nf_tables_api.c 		ctx->chain->use--;
ctx               292 net/netfilter/nf_tables_api.c static struct nft_trans *nft_trans_rule_add(struct nft_ctx *ctx, int msg_type,
ctx               297 net/netfilter/nf_tables_api.c 	trans = nft_trans_alloc(ctx, msg_type, sizeof(struct nft_trans_rule));
ctx               301 net/netfilter/nf_tables_api.c 	if (msg_type == NFT_MSG_NEWRULE && ctx->nla[NFTA_RULE_ID] != NULL) {
ctx               303 net/netfilter/nf_tables_api.c 			ntohl(nla_get_be32(ctx->nla[NFTA_RULE_ID]));
ctx               306 net/netfilter/nf_tables_api.c 	list_add_tail(&trans->list, &ctx->net->nft.commit_list);
ctx               311 net/netfilter/nf_tables_api.c static int nft_delrule(struct nft_ctx *ctx, struct nft_rule *rule)
ctx               316 net/netfilter/nf_tables_api.c 	trans = nft_trans_rule_add(ctx, NFT_MSG_DELRULE, rule);
ctx               320 net/netfilter/nf_tables_api.c 	err = nf_tables_delrule_deactivate(ctx, rule);
ctx               325 net/netfilter/nf_tables_api.c 	nft_rule_expr_deactivate(ctx, rule, NFT_TRANS_PREPARE);
ctx               330 net/netfilter/nf_tables_api.c static int nft_delrule_by_chain(struct nft_ctx *ctx)
ctx               335 net/netfilter/nf_tables_api.c 	list_for_each_entry(rule, &ctx->chain->rules, list) {
ctx               336 net/netfilter/nf_tables_api.c 		if (!nft_is_active_next(ctx->net, rule))
ctx               339 net/netfilter/nf_tables_api.c 		err = nft_delrule(ctx, rule);
ctx               346 net/netfilter/nf_tables_api.c static int nft_trans_set_add(const struct nft_ctx *ctx, int msg_type,
ctx               351 net/netfilter/nf_tables_api.c 	trans = nft_trans_alloc(ctx, msg_type, sizeof(struct nft_trans_set));
ctx               355 net/netfilter/nf_tables_api.c 	if (msg_type == NFT_MSG_NEWSET && ctx->nla[NFTA_SET_ID] != NULL) {
ctx               357 net/netfilter/nf_tables_api.c 			ntohl(nla_get_be32(ctx->nla[NFTA_SET_ID]));
ctx               358 net/netfilter/nf_tables_api.c 		nft_activate_next(ctx->net, set);
ctx               361 net/netfilter/nf_tables_api.c 	list_add_tail(&trans->list, &ctx->net->nft.commit_list);
ctx               366 net/netfilter/nf_tables_api.c static int nft_delset(const struct nft_ctx *ctx, struct nft_set *set)
ctx               370 net/netfilter/nf_tables_api.c 	err = nft_trans_set_add(ctx, NFT_MSG_DELSET, set);
ctx               374 net/netfilter/nf_tables_api.c 	nft_deactivate_next(ctx->net, set);
ctx               375 net/netfilter/nf_tables_api.c 	ctx->table->use--;
ctx               380 net/netfilter/nf_tables_api.c static int nft_trans_obj_add(struct nft_ctx *ctx, int msg_type,
ctx               385 net/netfilter/nf_tables_api.c 	trans = nft_trans_alloc(ctx, msg_type, sizeof(struct nft_trans_obj));
ctx               390 net/netfilter/nf_tables_api.c 		nft_activate_next(ctx->net, obj);
ctx               393 net/netfilter/nf_tables_api.c 	list_add_tail(&trans->list, &ctx->net->nft.commit_list);
ctx               398 net/netfilter/nf_tables_api.c static int nft_delobj(struct nft_ctx *ctx, struct nft_object *obj)
ctx               402 net/netfilter/nf_tables_api.c 	err = nft_trans_obj_add(ctx, NFT_MSG_DELOBJ, obj);
ctx               406 net/netfilter/nf_tables_api.c 	nft_deactivate_next(ctx->net, obj);
ctx               407 net/netfilter/nf_tables_api.c 	ctx->table->use--;
ctx               412 net/netfilter/nf_tables_api.c static int nft_trans_flowtable_add(struct nft_ctx *ctx, int msg_type,
ctx               417 net/netfilter/nf_tables_api.c 	trans = nft_trans_alloc(ctx, msg_type,
ctx               423 net/netfilter/nf_tables_api.c 		nft_activate_next(ctx->net, flowtable);
ctx               426 net/netfilter/nf_tables_api.c 	list_add_tail(&trans->list, &ctx->net->nft.commit_list);
ctx               431 net/netfilter/nf_tables_api.c static int nft_delflowtable(struct nft_ctx *ctx,
ctx               436 net/netfilter/nf_tables_api.c 	err = nft_trans_flowtable_add(ctx, NFT_MSG_DELFLOWTABLE, flowtable);
ctx               440 net/netfilter/nf_tables_api.c 	nft_deactivate_next(ctx->net, flowtable);
ctx               441 net/netfilter/nf_tables_api.c 	ctx->table->use--;
ctx               627 net/netfilter/nf_tables_api.c static void nf_tables_table_notify(const struct nft_ctx *ctx, int event)
ctx               632 net/netfilter/nf_tables_api.c 	if (!ctx->report &&
ctx               633 net/netfilter/nf_tables_api.c 	    !nfnetlink_has_listeners(ctx->net, NFNLGRP_NFTABLES))
ctx               640 net/netfilter/nf_tables_api.c 	err = nf_tables_fill_table_info(skb, ctx->net, ctx->portid, ctx->seq,
ctx               641 net/netfilter/nf_tables_api.c 					event, 0, ctx->family, ctx->table);
ctx               647 net/netfilter/nf_tables_api.c 	nfnetlink_send(skb, ctx->net, ctx->portid, NFNLGRP_NFTABLES,
ctx               648 net/netfilter/nf_tables_api.c 		       ctx->report, GFP_KERNEL);
ctx               651 net/netfilter/nf_tables_api.c 	nfnetlink_set_err(ctx->net, ctx->portid, NFNLGRP_NFTABLES, -ENOBUFS);
ctx               803 net/netfilter/nf_tables_api.c static int nf_tables_updtable(struct nft_ctx *ctx)
ctx               809 net/netfilter/nf_tables_api.c 	if (!ctx->nla[NFTA_TABLE_FLAGS])
ctx               812 net/netfilter/nf_tables_api.c 	flags = ntohl(nla_get_be32(ctx->nla[NFTA_TABLE_FLAGS]));
ctx               816 net/netfilter/nf_tables_api.c 	if (flags == ctx->table->flags)
ctx               819 net/netfilter/nf_tables_api.c 	trans = nft_trans_alloc(ctx, NFT_MSG_NEWTABLE,
ctx               825 net/netfilter/nf_tables_api.c 	    !(ctx->table->flags & NFT_TABLE_F_DORMANT)) {
ctx               828 net/netfilter/nf_tables_api.c 		   ctx->table->flags & NFT_TABLE_F_DORMANT) {
ctx               829 net/netfilter/nf_tables_api.c 		ret = nf_tables_table_enable(ctx->net, ctx->table);
ctx               831 net/netfilter/nf_tables_api.c 			ctx->table->flags &= ~NFT_TABLE_F_DORMANT;
ctx               839 net/netfilter/nf_tables_api.c 	list_add_tail(&trans->list, &ctx->net->nft.commit_list);
ctx               908 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx               925 net/netfilter/nf_tables_api.c 		nft_ctx_init(&ctx, net, skb, nlh, family, table, NULL, nla);
ctx               926 net/netfilter/nf_tables_api.c 		return nf_tables_updtable(&ctx);
ctx               956 net/netfilter/nf_tables_api.c 	nft_ctx_init(&ctx, net, skb, nlh, family, table, NULL, nla);
ctx               957 net/netfilter/nf_tables_api.c 	err = nft_trans_table_add(&ctx, NFT_MSG_NEWTABLE);
ctx               973 net/netfilter/nf_tables_api.c static int nft_flush_table(struct nft_ctx *ctx)
ctx               981 net/netfilter/nf_tables_api.c 	list_for_each_entry(chain, &ctx->table->chains, list) {
ctx               982 net/netfilter/nf_tables_api.c 		if (!nft_is_active_next(ctx->net, chain))
ctx               985 net/netfilter/nf_tables_api.c 		ctx->chain = chain;
ctx               987 net/netfilter/nf_tables_api.c 		err = nft_delrule_by_chain(ctx);
ctx               992 net/netfilter/nf_tables_api.c 	list_for_each_entry_safe(set, ns, &ctx->table->sets, list) {
ctx               993 net/netfilter/nf_tables_api.c 		if (!nft_is_active_next(ctx->net, set))
ctx              1000 net/netfilter/nf_tables_api.c 		err = nft_delset(ctx, set);
ctx              1005 net/netfilter/nf_tables_api.c 	list_for_each_entry_safe(flowtable, nft, &ctx->table->flowtables, list) {
ctx              1006 net/netfilter/nf_tables_api.c 		if (!nft_is_active_next(ctx->net, flowtable))
ctx              1009 net/netfilter/nf_tables_api.c 		err = nft_delflowtable(ctx, flowtable);
ctx              1014 net/netfilter/nf_tables_api.c 	list_for_each_entry_safe(obj, ne, &ctx->table->objects, list) {
ctx              1015 net/netfilter/nf_tables_api.c 		if (!nft_is_active_next(ctx->net, obj))
ctx              1018 net/netfilter/nf_tables_api.c 		err = nft_delobj(ctx, obj);
ctx              1023 net/netfilter/nf_tables_api.c 	list_for_each_entry_safe(chain, nc, &ctx->table->chains, list) {
ctx              1024 net/netfilter/nf_tables_api.c 		if (!nft_is_active_next(ctx->net, chain))
ctx              1027 net/netfilter/nf_tables_api.c 		ctx->chain = chain;
ctx              1029 net/netfilter/nf_tables_api.c 		err = nft_delchain(ctx);
ctx              1034 net/netfilter/nf_tables_api.c 	err = nft_deltable(ctx);
ctx              1039 net/netfilter/nf_tables_api.c static int nft_flush(struct nft_ctx *ctx, int family)
ctx              1042 net/netfilter/nf_tables_api.c 	const struct nlattr * const *nla = ctx->nla;
ctx              1045 net/netfilter/nf_tables_api.c 	list_for_each_entry_safe(table, nt, &ctx->net->nft.tables, list) {
ctx              1049 net/netfilter/nf_tables_api.c 		ctx->family = table->family;
ctx              1051 net/netfilter/nf_tables_api.c 		if (!nft_is_active_next(ctx->net, table))
ctx              1058 net/netfilter/nf_tables_api.c 		ctx->table = table;
ctx              1060 net/netfilter/nf_tables_api.c 		err = nft_flush_table(ctx);
ctx              1078 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              1080 net/netfilter/nf_tables_api.c 	nft_ctx_init(&ctx, net, skb, nlh, 0, NULL, NULL, nla);
ctx              1083 net/netfilter/nf_tables_api.c 		return nft_flush(&ctx, family);
ctx              1102 net/netfilter/nf_tables_api.c 	ctx.family = family;
ctx              1103 net/netfilter/nf_tables_api.c 	ctx.table = table;
ctx              1105 net/netfilter/nf_tables_api.c 	return nft_flush_table(&ctx);
ctx              1108 net/netfilter/nf_tables_api.c static void nf_tables_table_destroy(struct nft_ctx *ctx)
ctx              1110 net/netfilter/nf_tables_api.c 	if (WARN_ON(ctx->table->use > 0))
ctx              1113 net/netfilter/nf_tables_api.c 	rhltable_destroy(&ctx->table->chains_ht);
ctx              1114 net/netfilter/nf_tables_api.c 	kfree(ctx->table->name);
ctx              1115 net/netfilter/nf_tables_api.c 	kfree(ctx->table);
ctx              1330 net/netfilter/nf_tables_api.c static void nf_tables_chain_notify(const struct nft_ctx *ctx, int event)
ctx              1335 net/netfilter/nf_tables_api.c 	if (!ctx->report &&
ctx              1336 net/netfilter/nf_tables_api.c 	    !nfnetlink_has_listeners(ctx->net, NFNLGRP_NFTABLES))
ctx              1343 net/netfilter/nf_tables_api.c 	err = nf_tables_fill_chain_info(skb, ctx->net, ctx->portid, ctx->seq,
ctx              1344 net/netfilter/nf_tables_api.c 					event, 0, ctx->family, ctx->table,
ctx              1345 net/netfilter/nf_tables_api.c 					ctx->chain);
ctx              1351 net/netfilter/nf_tables_api.c 	nfnetlink_send(skb, ctx->net, ctx->portid, NFNLGRP_NFTABLES,
ctx              1352 net/netfilter/nf_tables_api.c 		       ctx->report, GFP_KERNEL);
ctx              1355 net/netfilter/nf_tables_api.c 	nfnetlink_set_err(ctx->net, ctx->portid, NFNLGRP_NFTABLES, -ENOBUFS);
ctx              1493 net/netfilter/nf_tables_api.c 	struct nft_base_chain *chain = nft_base_chain(trans->ctx.chain);
ctx              1499 net/netfilter/nf_tables_api.c 			   lockdep_commit_lock_is_held(trans->ctx.net));
ctx              1519 net/netfilter/nf_tables_api.c static void nf_tables_chain_destroy(struct nft_ctx *ctx)
ctx              1521 net/netfilter/nf_tables_api.c 	struct nft_chain *chain = ctx->chain;
ctx              1650 net/netfilter/nf_tables_api.c static int nf_tables_addchain(struct nft_ctx *ctx, u8 family, u8 genmask,
ctx              1653 net/netfilter/nf_tables_api.c 	const struct nlattr * const *nla = ctx->nla;
ctx              1654 net/netfilter/nf_tables_api.c 	struct nft_table *table = ctx->table;
ctx              1657 net/netfilter/nf_tables_api.c 	struct net *net = ctx->net;
ctx              1717 net/netfilter/nf_tables_api.c 	ctx->chain = chain;
ctx              1747 net/netfilter/nf_tables_api.c 	trans = nft_trans_chain_add(ctx, NFT_MSG_NEWCHAIN);
ctx              1766 net/netfilter/nf_tables_api.c 	nf_tables_chain_destroy(ctx);
ctx              1771 net/netfilter/nf_tables_api.c static int nf_tables_updchain(struct nft_ctx *ctx, u8 genmask, u8 policy,
ctx              1774 net/netfilter/nf_tables_api.c 	const struct nlattr * const *nla = ctx->nla;
ctx              1775 net/netfilter/nf_tables_api.c 	struct nft_table *table = ctx->table;
ctx              1776 net/netfilter/nf_tables_api.c 	struct nft_chain *chain = ctx->chain;
ctx              1791 net/netfilter/nf_tables_api.c 		err = nft_chain_parse_hook(ctx->net, nla, &hook, ctx->family,
ctx              1816 net/netfilter/nf_tables_api.c 		chain2 = nft_chain_lookup(ctx->net, table,
ctx              1832 net/netfilter/nf_tables_api.c 	trans = nft_trans_alloc(ctx, NFT_MSG_NEWCHAIN,
ctx              1856 net/netfilter/nf_tables_api.c 		list_for_each_entry(tmp, &ctx->net->nft.commit_list, list) {
ctx              1858 net/netfilter/nf_tables_api.c 			    tmp->ctx.table == table &&
ctx              1869 net/netfilter/nf_tables_api.c 	list_add_tail(&trans->list, &ctx->net->nft.commit_list);
ctx              1890 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              1952 net/netfilter/nf_tables_api.c 	nft_ctx_init(&ctx, net, skb, nlh, family, table, chain, nla);
ctx              1963 net/netfilter/nf_tables_api.c 		return nf_tables_updchain(&ctx, genmask, policy, flags);
ctx              1966 net/netfilter/nf_tables_api.c 	return nf_tables_addchain(&ctx, family, genmask, policy, flags);
ctx              1981 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              2009 net/netfilter/nf_tables_api.c 	nft_ctx_init(&ctx, net, skb, nlh, family, table, chain, nla);
ctx              2017 net/netfilter/nf_tables_api.c 		err = nft_delrule(&ctx, rule);
ctx              2030 net/netfilter/nf_tables_api.c 	return nft_delchain(&ctx);
ctx              2176 net/netfilter/nf_tables_api.c static int nf_tables_expr_parse(const struct nft_ctx *ctx,
ctx              2190 net/netfilter/nf_tables_api.c 	type = nft_expr_type_get(ctx->net, ctx->family, tb[NFTA_EXPR_NAME]);
ctx              2204 net/netfilter/nf_tables_api.c 		ops = type->select_ops(ctx,
ctx              2210 net/netfilter/nf_tables_api.c 				if (nft_expr_type_request_module(ctx->net,
ctx              2211 net/netfilter/nf_tables_api.c 								 ctx->family,
ctx              2228 net/netfilter/nf_tables_api.c static int nf_tables_newexpr(const struct nft_ctx *ctx,
ctx              2237 net/netfilter/nf_tables_api.c 		err = ops->init(ctx, expr, (const struct nlattr **)info->tb);
ctx              2248 net/netfilter/nf_tables_api.c static void nf_tables_expr_destroy(const struct nft_ctx *ctx,
ctx              2254 net/netfilter/nf_tables_api.c 		expr->ops->destroy(ctx, expr);
ctx              2258 net/netfilter/nf_tables_api.c struct nft_expr *nft_expr_init(const struct nft_ctx *ctx,
ctx              2266 net/netfilter/nf_tables_api.c 	err = nf_tables_expr_parse(ctx, nla, &info);
ctx              2275 net/netfilter/nf_tables_api.c 	err = nf_tables_newexpr(ctx, &info, expr);
ctx              2292 net/netfilter/nf_tables_api.c void nft_expr_destroy(const struct nft_ctx *ctx, struct nft_expr *expr)
ctx              2294 net/netfilter/nf_tables_api.c 	nf_tables_expr_destroy(ctx, expr);
ctx              2402 net/netfilter/nf_tables_api.c static void nf_tables_rule_notify(const struct nft_ctx *ctx,
ctx              2408 net/netfilter/nf_tables_api.c 	if (!ctx->report &&
ctx              2409 net/netfilter/nf_tables_api.c 	    !nfnetlink_has_listeners(ctx->net, NFNLGRP_NFTABLES))
ctx              2416 net/netfilter/nf_tables_api.c 	err = nf_tables_fill_rule_info(skb, ctx->net, ctx->portid, ctx->seq,
ctx              2417 net/netfilter/nf_tables_api.c 				       event, 0, ctx->family, ctx->table,
ctx              2418 net/netfilter/nf_tables_api.c 				       ctx->chain, rule, NULL);
ctx              2424 net/netfilter/nf_tables_api.c 	nfnetlink_send(skb, ctx->net, ctx->portid, NFNLGRP_NFTABLES,
ctx              2425 net/netfilter/nf_tables_api.c 		       ctx->report, GFP_KERNEL);
ctx              2428 net/netfilter/nf_tables_api.c 	nfnetlink_set_err(ctx->net, ctx->portid, NFNLGRP_NFTABLES, -ENOBUFS);
ctx              2477 net/netfilter/nf_tables_api.c 	const struct nft_rule_dump_ctx *ctx = cb->data;
ctx              2491 net/netfilter/nf_tables_api.c 		if (ctx && ctx->table && strcmp(ctx->table, table->name) != 0)
ctx              2494 net/netfilter/nf_tables_api.c 		if (ctx && ctx->table && ctx->chain) {
ctx              2497 net/netfilter/nf_tables_api.c 			list = rhltable_lookup(&table->chains_ht, ctx->chain,
ctx              2517 net/netfilter/nf_tables_api.c 		if (ctx && ctx->table)
ctx              2530 net/netfilter/nf_tables_api.c 	struct nft_rule_dump_ctx *ctx = NULL;
ctx              2533 net/netfilter/nf_tables_api.c 		ctx = kzalloc(sizeof(*ctx), GFP_ATOMIC);
ctx              2534 net/netfilter/nf_tables_api.c 		if (!ctx)
ctx              2538 net/netfilter/nf_tables_api.c 			ctx->table = nla_strdup(nla[NFTA_RULE_TABLE],
ctx              2540 net/netfilter/nf_tables_api.c 			if (!ctx->table) {
ctx              2541 net/netfilter/nf_tables_api.c 				kfree(ctx);
ctx              2546 net/netfilter/nf_tables_api.c 			ctx->chain = nla_strdup(nla[NFTA_RULE_CHAIN],
ctx              2548 net/netfilter/nf_tables_api.c 			if (!ctx->chain) {
ctx              2549 net/netfilter/nf_tables_api.c 				kfree(ctx->table);
ctx              2550 net/netfilter/nf_tables_api.c 				kfree(ctx);
ctx              2556 net/netfilter/nf_tables_api.c 	cb->data = ctx;
ctx              2562 net/netfilter/nf_tables_api.c 	struct nft_rule_dump_ctx *ctx = cb->data;
ctx              2564 net/netfilter/nf_tables_api.c 	if (ctx) {
ctx              2565 net/netfilter/nf_tables_api.c 		kfree(ctx->table);
ctx              2566 net/netfilter/nf_tables_api.c 		kfree(ctx->chain);
ctx              2567 net/netfilter/nf_tables_api.c 		kfree(ctx);
ctx              2634 net/netfilter/nf_tables_api.c static void nf_tables_rule_destroy(const struct nft_ctx *ctx,
ctx              2646 net/netfilter/nf_tables_api.c 		nf_tables_expr_destroy(ctx, expr);
ctx              2652 net/netfilter/nf_tables_api.c static void nf_tables_rule_release(const struct nft_ctx *ctx,
ctx              2655 net/netfilter/nf_tables_api.c 	nft_rule_expr_deactivate(ctx, rule, NFT_TRANS_RELEASE);
ctx              2656 net/netfilter/nf_tables_api.c 	nf_tables_rule_destroy(ctx, rule);
ctx              2659 net/netfilter/nf_tables_api.c int nft_chain_validate(const struct nft_ctx *ctx, const struct nft_chain *chain)
ctx              2666 net/netfilter/nf_tables_api.c 	if (ctx->level == NFT_JUMP_STACK_SIZE)
ctx              2670 net/netfilter/nf_tables_api.c 		if (!nft_is_active_next(ctx->net, rule))
ctx              2677 net/netfilter/nf_tables_api.c 			err = expr->ops->validate(ctx, expr, &data);
ctx              2690 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx = {
ctx              2700 net/netfilter/nf_tables_api.c 		ctx.chain = chain;
ctx              2701 net/netfilter/nf_tables_api.c 		err = nft_chain_validate(&ctx, chain);
ctx              2730 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              2791 net/netfilter/nf_tables_api.c 	nft_ctx_init(&ctx, net, skb, nlh, family, table, chain, nla);
ctx              2808 net/netfilter/nf_tables_api.c 			err = nf_tables_expr_parse(&ctx, tmp, &info[n]);
ctx              2845 net/netfilter/nf_tables_api.c 		err = nf_tables_newexpr(&ctx, &info[i], expr);
ctx              2857 net/netfilter/nf_tables_api.c 		trans = nft_trans_rule_add(&ctx, NFT_MSG_NEWRULE, rule);
ctx              2862 net/netfilter/nf_tables_api.c 		err = nft_delrule(&ctx, old_rule);
ctx              2870 net/netfilter/nf_tables_api.c 		trans = nft_trans_rule_add(&ctx, NFT_MSG_NEWRULE, rule);
ctx              2904 net/netfilter/nf_tables_api.c 	nf_tables_rule_release(&ctx, rule);
ctx              2944 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              2961 net/netfilter/nf_tables_api.c 	nft_ctx_init(&ctx, net, skb, nlh, family, table, chain, nla);
ctx              2971 net/netfilter/nf_tables_api.c 			err = nft_delrule(&ctx, rule);
ctx              2979 net/netfilter/nf_tables_api.c 			err = nft_delrule(&ctx, rule);
ctx              2981 net/netfilter/nf_tables_api.c 			err = nft_delrule_by_chain(&ctx);
ctx              2988 net/netfilter/nf_tables_api.c 			ctx.chain = chain;
ctx              2989 net/netfilter/nf_tables_api.c 			err = nft_delrule_by_chain(&ctx);
ctx              3036 net/netfilter/nf_tables_api.c nft_select_set_ops(const struct nft_ctx *ctx,
ctx              3046 net/netfilter/nf_tables_api.c 	lockdep_assert_held(&ctx->net->nft.commit_mutex);
ctx              3050 net/netfilter/nf_tables_api.c 		if (nft_request_module(ctx->net, "nft-set") == -EAGAIN)
ctx              3133 net/netfilter/nf_tables_api.c static int nft_ctx_init_from_setattr(struct nft_ctx *ctx, struct net *net,
ctx              3153 net/netfilter/nf_tables_api.c 	nft_ctx_init(ctx, net, skb, nlh, family, table, NULL, nla);
ctx              3224 net/netfilter/nf_tables_api.c static int nf_tables_set_alloc_name(struct nft_ctx *ctx, struct nft_set *set,
ctx              3241 net/netfilter/nf_tables_api.c 		list_for_each_entry(i, &ctx->table->sets, list) {
ctx              3244 net/netfilter/nf_tables_api.c 			if (!nft_is_active_next(ctx->net, set))
ctx              3267 net/netfilter/nf_tables_api.c 	list_for_each_entry(i, &ctx->table->sets, list) {
ctx              3268 net/netfilter/nf_tables_api.c 		if (!nft_is_active_next(ctx->net, i))
ctx              3297 net/netfilter/nf_tables_api.c static int nf_tables_fill_set(struct sk_buff *skb, const struct nft_ctx *ctx,
ctx              3303 net/netfilter/nf_tables_api.c 	u32 portid = ctx->portid;
ctx              3304 net/netfilter/nf_tables_api.c 	u32 seq = ctx->seq;
ctx              3313 net/netfilter/nf_tables_api.c 	nfmsg->nfgen_family	= ctx->family;
ctx              3315 net/netfilter/nf_tables_api.c 	nfmsg->res_id		= htons(ctx->net->nft.base_seq & 0xffff);
ctx              3317 net/netfilter/nf_tables_api.c 	if (nla_put_string(skb, NFTA_SET_TABLE, ctx->table->name))
ctx              3375 net/netfilter/nf_tables_api.c static void nf_tables_set_notify(const struct nft_ctx *ctx,
ctx              3380 net/netfilter/nf_tables_api.c 	u32 portid = ctx->portid;
ctx              3383 net/netfilter/nf_tables_api.c 	if (!ctx->report &&
ctx              3384 net/netfilter/nf_tables_api.c 	    !nfnetlink_has_listeners(ctx->net, NFNLGRP_NFTABLES))
ctx              3391 net/netfilter/nf_tables_api.c 	err = nf_tables_fill_set(skb, ctx, set, event, 0);
ctx              3397 net/netfilter/nf_tables_api.c 	nfnetlink_send(skb, ctx->net, portid, NFNLGRP_NFTABLES, ctx->report,
ctx              3401 net/netfilter/nf_tables_api.c 	nfnetlink_set_err(ctx->net, portid, NFNLGRP_NFTABLES, -ENOBUFS);
ctx              3410 net/netfilter/nf_tables_api.c 	struct nft_ctx *ctx = cb->data, ctx_set;
ctx              3419 net/netfilter/nf_tables_api.c 		if (ctx->family != NFPROTO_UNSPEC &&
ctx              3420 net/netfilter/nf_tables_api.c 		    ctx->family != table->family)
ctx              3423 net/netfilter/nf_tables_api.c 		if (ctx->table && ctx->table != table)
ctx              3439 net/netfilter/nf_tables_api.c 			ctx_set = *ctx;
ctx              3489 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              3495 net/netfilter/nf_tables_api.c 	err = nft_ctx_init_from_setattr(&ctx, net, skb, nlh, nla, extack,
ctx              3505 net/netfilter/nf_tables_api.c 			.data = &ctx,
ctx              3518 net/netfilter/nf_tables_api.c 	set = nft_set_lookup(ctx.table, nla[NFTA_SET_NAME], genmask);
ctx              3526 net/netfilter/nf_tables_api.c 	err = nf_tables_fill_set(skb2, &ctx, set, NFT_MSG_NEWSET, 0);
ctx              3565 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              3677 net/netfilter/nf_tables_api.c 	nft_ctx_init(&ctx, net, skb, nlh, family, table, NULL, nla);
ctx              3699 net/netfilter/nf_tables_api.c 	ops = nft_select_set_ops(&ctx, nla, &desc, policy);
ctx              3723 net/netfilter/nf_tables_api.c 	err = nf_tables_set_alloc_name(&ctx, set, name);
ctx              3756 net/netfilter/nf_tables_api.c 	err = nft_trans_set_add(&ctx, NFT_MSG_NEWSET, set);
ctx              3795 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              3803 net/netfilter/nf_tables_api.c 	err = nft_ctx_init_from_setattr(&ctx, net, skb, nlh, nla, extack,
ctx              3810 net/netfilter/nf_tables_api.c 		set = nft_set_lookup_byhandle(ctx.table, attr, genmask);
ctx              3813 net/netfilter/nf_tables_api.c 		set = nft_set_lookup(ctx.table, attr, genmask);
ctx              3826 net/netfilter/nf_tables_api.c 	return nft_delset(&ctx, set);
ctx              3829 net/netfilter/nf_tables_api.c static int nf_tables_bind_check_setelem(const struct nft_ctx *ctx,
ctx              3838 net/netfilter/nf_tables_api.c 	return nft_validate_register_store(ctx, dreg, nft_set_ext_data(ext),
ctx              3844 net/netfilter/nf_tables_api.c int nf_tables_bind_set(const struct nft_ctx *ctx, struct nft_set *set,
ctx              3866 net/netfilter/nf_tables_api.c 		iter.genmask	= nft_genmask_next(ctx->net);
ctx              3872 net/netfilter/nf_tables_api.c 		set->ops->walk(ctx, set, &iter);
ctx              3877 net/netfilter/nf_tables_api.c 	binding->chain = ctx->chain;
ctx              3879 net/netfilter/nf_tables_api.c 	nft_set_trans_bind(ctx, set);
ctx              3886 net/netfilter/nf_tables_api.c static void nf_tables_unbind_set(const struct nft_ctx *ctx, struct nft_set *set,
ctx              3894 net/netfilter/nf_tables_api.c 			nf_tables_set_notify(ctx, set, NFT_MSG_DELSET,
ctx              3899 net/netfilter/nf_tables_api.c void nf_tables_deactivate_set(const struct nft_ctx *ctx, struct nft_set *set,
ctx              3912 net/netfilter/nf_tables_api.c 		nf_tables_unbind_set(ctx, set, binding,
ctx              3918 net/netfilter/nf_tables_api.c void nf_tables_destroy_set(const struct nft_ctx *ctx, struct nft_set *set)
ctx              3984 net/netfilter/nf_tables_api.c static int nft_ctx_init_from_elemattr(struct nft_ctx *ctx, struct net *net,
ctx              4002 net/netfilter/nf_tables_api.c 	nft_ctx_init(ctx, net, skb, nlh, family, table, NULL, nla);
ctx              4086 net/netfilter/nf_tables_api.c static int nf_tables_dump_setelem(const struct nft_ctx *ctx,
ctx              4099 net/netfilter/nf_tables_api.c 	struct nft_ctx		ctx;
ctx              4118 net/netfilter/nf_tables_api.c 		if (dump_ctx->ctx.family != NFPROTO_UNSPEC &&
ctx              4119 net/netfilter/nf_tables_api.c 		    dump_ctx->ctx.family != table->family)
ctx              4122 net/netfilter/nf_tables_api.c 		if (table != dump_ctx->ctx.table)
ctx              4169 net/netfilter/nf_tables_api.c 	set->ops->walk(&dump_ctx->ctx, set, &args.iter);
ctx              4204 net/netfilter/nf_tables_api.c 				       const struct nft_ctx *ctx, u32 seq,
ctx              4221 net/netfilter/nf_tables_api.c 	nfmsg->nfgen_family	= ctx->family;
ctx              4223 net/netfilter/nf_tables_api.c 	nfmsg->res_id		= htons(ctx->net->nft.base_seq & 0xffff);
ctx              4225 net/netfilter/nf_tables_api.c 	if (nla_put_string(skb, NFTA_SET_TABLE, ctx->table->name))
ctx              4264 net/netfilter/nf_tables_api.c static int nft_get_set_elem(struct nft_ctx *ctx, struct nft_set *set,
ctx              4287 net/netfilter/nf_tables_api.c 	err = nft_data_init(ctx, &elem.key.val, sizeof(elem.key), &desc,
ctx              4298 net/netfilter/nf_tables_api.c 	priv = set->ops->get(ctx->net, set, &elem, flags);
ctx              4309 net/netfilter/nf_tables_api.c 	err = nf_tables_fill_setelem_info(skb, ctx, ctx->seq, ctx->portid,
ctx              4314 net/netfilter/nf_tables_api.c 	err = nfnetlink_unicast(skb, ctx->net, ctx->portid, MSG_DONTWAIT);
ctx              4336 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              4339 net/netfilter/nf_tables_api.c 	err = nft_ctx_init_from_elemattr(&ctx, net, skb, nlh, nla, extack,
ctx              4344 net/netfilter/nf_tables_api.c 	set = nft_set_lookup(ctx.table, nla[NFTA_SET_ELEM_LIST_SET], genmask);
ctx              4357 net/netfilter/nf_tables_api.c 			.ctx = ctx,
ctx              4368 net/netfilter/nf_tables_api.c 		err = nft_get_set_elem(&ctx, set, attr);
ctx              4376 net/netfilter/nf_tables_api.c static void nf_tables_setelem_notify(const struct nft_ctx *ctx,
ctx              4381 net/netfilter/nf_tables_api.c 	struct net *net = ctx->net;
ctx              4382 net/netfilter/nf_tables_api.c 	u32 portid = ctx->portid;
ctx              4386 net/netfilter/nf_tables_api.c 	if (!ctx->report && !nfnetlink_has_listeners(net, NFNLGRP_NFTABLES))
ctx              4393 net/netfilter/nf_tables_api.c 	err = nf_tables_fill_setelem_info(skb, ctx, 0, portid, event, flags,
ctx              4400 net/netfilter/nf_tables_api.c 	nfnetlink_send(skb, net, portid, NFNLGRP_NFTABLES, ctx->report,
ctx              4407 net/netfilter/nf_tables_api.c static struct nft_trans *nft_trans_elem_alloc(struct nft_ctx *ctx,
ctx              4413 net/netfilter/nf_tables_api.c 	trans = nft_trans_alloc(ctx, msg_type, sizeof(struct nft_trans_elem));
ctx              4454 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx = {
ctx              4466 net/netfilter/nf_tables_api.c 			expr->ops->destroy_clone(&ctx, expr);
ctx              4469 net/netfilter/nf_tables_api.c 			nf_tables_expr_destroy(&ctx, expr);
ctx              4481 net/netfilter/nf_tables_api.c static void nf_tables_set_elem_destroy(const struct nft_ctx *ctx,
ctx              4487 net/netfilter/nf_tables_api.c 		nf_tables_expr_destroy(ctx, nft_set_ext_expr(ext));
ctx              4491 net/netfilter/nf_tables_api.c static int nft_add_set_elem(struct nft_ctx *ctx, struct nft_set *set,
ctx              4495 net/netfilter/nf_tables_api.c 	u8 genmask = nft_genmask_next(ctx->net);
ctx              4568 net/netfilter/nf_tables_api.c 	err = nft_data_init(ctx, &elem.key.val, sizeof(elem.key), &d1,
ctx              4588 net/netfilter/nf_tables_api.c 		obj = nft_obj_lookup(ctx->net, ctx->table,
ctx              4599 net/netfilter/nf_tables_api.c 		err = nft_data_init(ctx, &data, sizeof(data), &d2,
ctx              4611 net/netfilter/nf_tables_api.c 				.net	= ctx->net,
ctx              4612 net/netfilter/nf_tables_api.c 				.family	= ctx->family,
ctx              4613 net/netfilter/nf_tables_api.c 				.table	= ctx->table,
ctx              4629 net/netfilter/nf_tables_api.c 				nft_validate_state_update(ctx->net,
ctx              4667 net/netfilter/nf_tables_api.c 	trans = nft_trans_elem_alloc(ctx, NFT_MSG_NEWSETELEM, set);
ctx              4671 net/netfilter/nf_tables_api.c 	ext->genmask = nft_genmask_cur(ctx->net) | NFT_SET_ELEM_BUSY_MASK;
ctx              4672 net/netfilter/nf_tables_api.c 	err = set->ops->insert(ctx->net, set, &elem, &ext2);
ctx              4703 net/netfilter/nf_tables_api.c 	list_add_tail(&trans->list, &ctx->net->nft.commit_list);
ctx              4707 net/netfilter/nf_tables_api.c 	set->ops->remove(ctx->net, set, &elem);
ctx              4731 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              4737 net/netfilter/nf_tables_api.c 	err = nft_ctx_init_from_elemattr(&ctx, net, skb, nlh, nla, extack,
ctx              4742 net/netfilter/nf_tables_api.c 	set = nft_set_lookup_global(net, ctx.table, nla[NFTA_SET_ELEM_LIST_SET],
ctx              4751 net/netfilter/nf_tables_api.c 		err = nft_add_set_elem(&ctx, set, attr, nlh->nlmsg_flags);
ctx              4757 net/netfilter/nf_tables_api.c 		return nft_table_validate(net, ctx.table);
ctx              4809 net/netfilter/nf_tables_api.c static int nft_del_setelem(struct nft_ctx *ctx, struct nft_set *set,
ctx              4839 net/netfilter/nf_tables_api.c 	err = nft_data_init(ctx, &elem.key.val, sizeof(elem.key), &desc,
ctx              4860 net/netfilter/nf_tables_api.c 	trans = nft_trans_elem_alloc(ctx, NFT_MSG_DELSETELEM, set);
ctx              4866 net/netfilter/nf_tables_api.c 	priv = set->ops->deactivate(ctx->net, set, &elem);
ctx              4874 net/netfilter/nf_tables_api.c 	nft_set_elem_deactivate(ctx->net, set, &elem);
ctx              4877 net/netfilter/nf_tables_api.c 	list_add_tail(&trans->list, &ctx->net->nft.commit_list);
ctx              4890 net/netfilter/nf_tables_api.c static int nft_flush_set(const struct nft_ctx *ctx,
ctx              4898 net/netfilter/nf_tables_api.c 	trans = nft_trans_alloc_gfp(ctx, NFT_MSG_DELSETELEM,
ctx              4903 net/netfilter/nf_tables_api.c 	if (!set->ops->flush(ctx->net, set, elem->priv)) {
ctx              4909 net/netfilter/nf_tables_api.c 	nft_set_elem_deactivate(ctx->net, set, elem);
ctx              4912 net/netfilter/nf_tables_api.c 	list_add_tail(&trans->list, &ctx->net->nft.commit_list);
ctx              4928 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              4931 net/netfilter/nf_tables_api.c 	err = nft_ctx_init_from_elemattr(&ctx, net, skb, nlh, nla, extack,
ctx              4936 net/netfilter/nf_tables_api.c 	set = nft_set_lookup(ctx.table, nla[NFTA_SET_ELEM_LIST_SET], genmask);
ctx              4947 net/netfilter/nf_tables_api.c 		set->ops->walk(&ctx, set, &iter);
ctx              4953 net/netfilter/nf_tables_api.c 		err = nft_del_setelem(&ctx, set, attr);
ctx              5083 net/netfilter/nf_tables_api.c static struct nft_object *nft_obj_init(const struct nft_ctx *ctx,
ctx              5106 net/netfilter/nf_tables_api.c 		ops = type->select_ops(ctx, (const struct nlattr * const *)tb);
ctx              5120 net/netfilter/nf_tables_api.c 	err = ops->init(ctx, (const struct nlattr * const *)tb, obj);
ctx              5183 net/netfilter/nf_tables_api.c static int nf_tables_updobj(const struct nft_ctx *ctx,
ctx              5192 net/netfilter/nf_tables_api.c 	trans = nft_trans_alloc(ctx, NFT_MSG_NEWOBJ,
ctx              5197 net/netfilter/nf_tables_api.c 	newobj = nft_obj_init(ctx, type, attr);
ctx              5206 net/netfilter/nf_tables_api.c 	list_add_tail(&trans->list, &ctx->net->nft.commit_list);
ctx              5226 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              5258 net/netfilter/nf_tables_api.c 		nft_ctx_init(&ctx, net, skb, nlh, family, table, NULL, nla);
ctx              5260 net/netfilter/nf_tables_api.c 		return nf_tables_updobj(&ctx, type, nla[NFTA_OBJ_DATA], obj);
ctx              5263 net/netfilter/nf_tables_api.c 	nft_ctx_init(&ctx, net, skb, nlh, family, table, NULL, nla);
ctx              5269 net/netfilter/nf_tables_api.c 	obj = nft_obj_init(&ctx, type, nla[NFTA_OBJ_DATA]);
ctx              5283 net/netfilter/nf_tables_api.c 	err = nft_trans_obj_add(&ctx, NFT_MSG_NEWOBJ, obj);
ctx              5303 net/netfilter/nf_tables_api.c 		obj->ops->destroy(&ctx, obj);
ctx              5509 net/netfilter/nf_tables_api.c static void nft_obj_destroy(const struct nft_ctx *ctx, struct nft_object *obj)
ctx              5512 net/netfilter/nf_tables_api.c 		obj->ops->destroy(ctx, obj);
ctx              5530 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              5561 net/netfilter/nf_tables_api.c 	nft_ctx_init(&ctx, net, skb, nlh, family, table, NULL, nla);
ctx              5563 net/netfilter/nf_tables_api.c 	return nft_delobj(&ctx, obj);
ctx              5595 net/netfilter/nf_tables_api.c static void nf_tables_obj_notify(const struct nft_ctx *ctx,
ctx              5598 net/netfilter/nf_tables_api.c 	nft_obj_notify(ctx->net, ctx->table, obj, ctx->portid, ctx->seq, event,
ctx              5599 net/netfilter/nf_tables_api.c 		       ctx->family, ctx->report, GFP_KERNEL);
ctx              5644 net/netfilter/nf_tables_api.c void nf_tables_deactivate_flowtable(const struct nft_ctx *ctx,
ctx              5674 net/netfilter/nf_tables_api.c static int nf_tables_parse_devices(const struct nft_ctx *ctx,
ctx              5690 net/netfilter/nf_tables_api.c 		dev = __dev_get_by_name(ctx->net, ifname);
ctx              5717 net/netfilter/nf_tables_api.c static int nf_tables_flowtable_parse_hook(const struct nft_ctx *ctx,
ctx              5743 net/netfilter/nf_tables_api.c 	err = nf_tables_parse_devices(ctx, tb[NFTA_FLOWTABLE_HOOK_DEVS],
ctx              5824 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              5856 net/netfilter/nf_tables_api.c 	nft_ctx_init(&ctx, net, skb, nlh, family, table, NULL, nla);
ctx              5882 net/netfilter/nf_tables_api.c 	err = nf_tables_flowtable_parse_hook(&ctx, nla[NFTA_FLOWTABLE_HOOK],
ctx              5909 net/netfilter/nf_tables_api.c 	err = nft_trans_flowtable_add(&ctx, NFT_MSG_NEWFLOWTABLE, flowtable);
ctx              5947 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx;
ctx              5978 net/netfilter/nf_tables_api.c 	nft_ctx_init(&ctx, net, skb, nlh, family, table, NULL, nla);
ctx              5980 net/netfilter/nf_tables_api.c 	return nft_delflowtable(&ctx, flowtable);
ctx              6184 net/netfilter/nf_tables_api.c static void nf_tables_flowtable_notify(struct nft_ctx *ctx,
ctx              6191 net/netfilter/nf_tables_api.c 	if (ctx->report &&
ctx              6192 net/netfilter/nf_tables_api.c 	    !nfnetlink_has_listeners(ctx->net, NFNLGRP_NFTABLES))
ctx              6199 net/netfilter/nf_tables_api.c 	err = nf_tables_fill_flowtable_info(skb, ctx->net, ctx->portid,
ctx              6200 net/netfilter/nf_tables_api.c 					    ctx->seq, event, 0,
ctx              6201 net/netfilter/nf_tables_api.c 					    ctx->family, flowtable);
ctx              6207 net/netfilter/nf_tables_api.c 	nfnetlink_send(skb, ctx->net, ctx->portid, NFNLGRP_NFTABLES,
ctx              6208 net/netfilter/nf_tables_api.c 		       ctx->report, GFP_KERNEL);
ctx              6211 net/netfilter/nf_tables_api.c 	nfnetlink_set_err(ctx->net, ctx->portid, NFNLGRP_NFTABLES, -ENOBUFS);
ctx              6499 net/netfilter/nf_tables_api.c 	if (!nft_is_base_chain(trans->ctx.chain))
ctx              6502 net/netfilter/nf_tables_api.c 	basechain = nft_base_chain(trans->ctx.chain);
ctx              6511 net/netfilter/nf_tables_api.c 		rhltable_remove(&trans->ctx.table->chains_ht,
ctx              6512 net/netfilter/nf_tables_api.c 				&trans->ctx.chain->rhlhead,
ctx              6514 net/netfilter/nf_tables_api.c 		swap(trans->ctx.chain->name, nft_trans_chain_name(trans));
ctx              6515 net/netfilter/nf_tables_api.c 		rhltable_insert_key(&trans->ctx.table->chains_ht,
ctx              6516 net/netfilter/nf_tables_api.c 				    trans->ctx.chain->name,
ctx              6517 net/netfilter/nf_tables_api.c 				    &trans->ctx.chain->rhlhead,
ctx              6521 net/netfilter/nf_tables_api.c 	if (!nft_is_base_chain(trans->ctx.chain))
ctx              6526 net/netfilter/nf_tables_api.c 	basechain = nft_base_chain(trans->ctx.chain);
ctx              6554 net/netfilter/nf_tables_api.c 		nf_tables_table_destroy(&trans->ctx);
ctx              6561 net/netfilter/nf_tables_api.c 		nf_tables_chain_destroy(&trans->ctx);
ctx              6564 net/netfilter/nf_tables_api.c 		nf_tables_rule_destroy(&trans->ctx, nft_trans_rule(trans));
ctx              6570 net/netfilter/nf_tables_api.c 		nf_tables_set_elem_destroy(&trans->ctx,
ctx              6575 net/netfilter/nf_tables_api.c 		nft_obj_destroy(&trans->ctx, nft_trans_obj(trans));
ctx              6583 net/netfilter/nf_tables_api.c 		put_net(trans->ctx.net);
ctx              6644 net/netfilter/nf_tables_api.c 		struct nft_chain *chain = trans->ctx.chain;
ctx              6770 net/netfilter/nf_tables_api.c 	get_net(trans->ctx.net);
ctx              6811 net/netfilter/nf_tables_api.c 			chain = trans->ctx.chain;
ctx              6842 net/netfilter/nf_tables_api.c 								trans->ctx.table);
ctx              6843 net/netfilter/nf_tables_api.c 					trans->ctx.table->flags |= NFT_TABLE_F_DORMANT;
ctx              6846 net/netfilter/nf_tables_api.c 				nft_clear(net, trans->ctx.table);
ctx              6848 net/netfilter/nf_tables_api.c 			nf_tables_table_notify(&trans->ctx, NFT_MSG_NEWTABLE);
ctx              6852 net/netfilter/nf_tables_api.c 			list_del_rcu(&trans->ctx.table->list);
ctx              6853 net/netfilter/nf_tables_api.c 			nf_tables_table_notify(&trans->ctx, NFT_MSG_DELTABLE);
ctx              6858 net/netfilter/nf_tables_api.c 				nf_tables_chain_notify(&trans->ctx, NFT_MSG_NEWCHAIN);
ctx              6862 net/netfilter/nf_tables_api.c 				nft_clear(net, trans->ctx.chain);
ctx              6863 net/netfilter/nf_tables_api.c 				nf_tables_chain_notify(&trans->ctx, NFT_MSG_NEWCHAIN);
ctx              6868 net/netfilter/nf_tables_api.c 			nft_chain_del(trans->ctx.chain);
ctx              6869 net/netfilter/nf_tables_api.c 			nf_tables_chain_notify(&trans->ctx, NFT_MSG_DELCHAIN);
ctx              6870 net/netfilter/nf_tables_api.c 			nf_tables_unregister_hook(trans->ctx.net,
ctx              6871 net/netfilter/nf_tables_api.c 						  trans->ctx.table,
ctx              6872 net/netfilter/nf_tables_api.c 						  trans->ctx.chain);
ctx              6875 net/netfilter/nf_tables_api.c 			nft_clear(trans->ctx.net, nft_trans_rule(trans));
ctx              6876 net/netfilter/nf_tables_api.c 			nf_tables_rule_notify(&trans->ctx,
ctx              6883 net/netfilter/nf_tables_api.c 			nf_tables_rule_notify(&trans->ctx,
ctx              6886 net/netfilter/nf_tables_api.c 			nft_rule_expr_deactivate(&trans->ctx,
ctx              6897 net/netfilter/nf_tables_api.c 				trans->ctx.table->use--;
ctx              6899 net/netfilter/nf_tables_api.c 			nf_tables_set_notify(&trans->ctx, nft_trans_set(trans),
ctx              6905 net/netfilter/nf_tables_api.c 			nf_tables_set_notify(&trans->ctx, nft_trans_set(trans),
ctx              6912 net/netfilter/nf_tables_api.c 			nf_tables_setelem_notify(&trans->ctx, te->set,
ctx              6920 net/netfilter/nf_tables_api.c 			nf_tables_setelem_notify(&trans->ctx, te->set,
ctx              6930 net/netfilter/nf_tables_api.c 				nf_tables_obj_notify(&trans->ctx,
ctx              6935 net/netfilter/nf_tables_api.c 				nf_tables_obj_notify(&trans->ctx,
ctx              6943 net/netfilter/nf_tables_api.c 			nf_tables_obj_notify(&trans->ctx, nft_trans_obj(trans),
ctx              6948 net/netfilter/nf_tables_api.c 			nf_tables_flowtable_notify(&trans->ctx,
ctx              6955 net/netfilter/nf_tables_api.c 			nf_tables_flowtable_notify(&trans->ctx,
ctx              6989 net/netfilter/nf_tables_api.c 		nf_tables_table_destroy(&trans->ctx);
ctx              6992 net/netfilter/nf_tables_api.c 		nf_tables_chain_destroy(&trans->ctx);
ctx              6995 net/netfilter/nf_tables_api.c 		nf_tables_rule_destroy(&trans->ctx, nft_trans_rule(trans));
ctx              7005 net/netfilter/nf_tables_api.c 		nft_obj_destroy(&trans->ctx, nft_trans_obj(trans));
ctx              7026 net/netfilter/nf_tables_api.c 								trans->ctx.table);
ctx              7027 net/netfilter/nf_tables_api.c 					trans->ctx.table->flags |= NFT_TABLE_F_DORMANT;
ctx              7031 net/netfilter/nf_tables_api.c 				list_del_rcu(&trans->ctx.table->list);
ctx              7035 net/netfilter/nf_tables_api.c 			nft_clear(trans->ctx.net, trans->ctx.table);
ctx              7044 net/netfilter/nf_tables_api.c 				trans->ctx.table->use--;
ctx              7045 net/netfilter/nf_tables_api.c 				nft_chain_del(trans->ctx.chain);
ctx              7046 net/netfilter/nf_tables_api.c 				nf_tables_unregister_hook(trans->ctx.net,
ctx              7047 net/netfilter/nf_tables_api.c 							  trans->ctx.table,
ctx              7048 net/netfilter/nf_tables_api.c 							  trans->ctx.chain);
ctx              7052 net/netfilter/nf_tables_api.c 			trans->ctx.table->use++;
ctx              7053 net/netfilter/nf_tables_api.c 			nft_clear(trans->ctx.net, trans->ctx.chain);
ctx              7057 net/netfilter/nf_tables_api.c 			trans->ctx.chain->use--;
ctx              7059 net/netfilter/nf_tables_api.c 			nft_rule_expr_deactivate(&trans->ctx,
ctx              7064 net/netfilter/nf_tables_api.c 			trans->ctx.chain->use++;
ctx              7065 net/netfilter/nf_tables_api.c 			nft_clear(trans->ctx.net, nft_trans_rule(trans));
ctx              7066 net/netfilter/nf_tables_api.c 			nft_rule_expr_activate(&trans->ctx, nft_trans_rule(trans));
ctx              7070 net/netfilter/nf_tables_api.c 			trans->ctx.table->use--;
ctx              7078 net/netfilter/nf_tables_api.c 			trans->ctx.table->use++;
ctx              7079 net/netfilter/nf_tables_api.c 			nft_clear(trans->ctx.net, nft_trans_set(trans));
ctx              7105 net/netfilter/nf_tables_api.c 				trans->ctx.table->use--;
ctx              7110 net/netfilter/nf_tables_api.c 			trans->ctx.table->use++;
ctx              7111 net/netfilter/nf_tables_api.c 			nft_clear(trans->ctx.net, nft_trans_obj(trans));
ctx              7115 net/netfilter/nf_tables_api.c 			trans->ctx.table->use--;
ctx              7121 net/netfilter/nf_tables_api.c 			trans->ctx.table->use++;
ctx              7122 net/netfilter/nf_tables_api.c 			nft_clear(trans->ctx.net, nft_trans_flowtable(trans));
ctx              7225 net/netfilter/nf_tables_api.c static int nf_tables_check_loops(const struct nft_ctx *ctx,
ctx              7228 net/netfilter/nf_tables_api.c static int nf_tables_loop_check_setelem(const struct nft_ctx *ctx,
ctx              7244 net/netfilter/nf_tables_api.c 		return nf_tables_check_loops(ctx, data->verdict.chain);
ctx              7250 net/netfilter/nf_tables_api.c static int nf_tables_check_loops(const struct nft_ctx *ctx,
ctx              7259 net/netfilter/nf_tables_api.c 	if (ctx->chain == chain)
ctx              7279 net/netfilter/nf_tables_api.c 				err = nf_tables_check_loops(ctx,
ctx              7289 net/netfilter/nf_tables_api.c 	list_for_each_entry(set, &ctx->table->sets, list) {
ctx              7290 net/netfilter/nf_tables_api.c 		if (!nft_is_active_next(ctx->net, set))
ctx              7301 net/netfilter/nf_tables_api.c 			iter.genmask	= nft_genmask_next(ctx->net);
ctx              7307 net/netfilter/nf_tables_api.c 			set->ops->walk(ctx, set, &iter);
ctx              7422 net/netfilter/nf_tables_api.c int nft_validate_register_store(const struct nft_ctx *ctx,
ctx              7437 net/netfilter/nf_tables_api.c 			err = nf_tables_check_loops(ctx, data->verdict.chain);
ctx              7465 net/netfilter/nf_tables_api.c static int nft_verdict_init(const struct nft_ctx *ctx, struct nft_data *data,
ctx              7468 net/netfilter/nf_tables_api.c 	u8 genmask = nft_genmask_next(ctx->net);
ctx              7501 net/netfilter/nf_tables_api.c 		chain = nft_chain_lookup(ctx->net, ctx->table,
ctx              7553 net/netfilter/nf_tables_api.c static int nft_value_init(const struct nft_ctx *ctx,
ctx              7597 net/netfilter/nf_tables_api.c int nft_data_init(const struct nft_ctx *ctx,
ctx              7610 net/netfilter/nf_tables_api.c 		return nft_value_init(ctx, data, size, desc,
ctx              7612 net/netfilter/nf_tables_api.c 	if (tb[NFTA_DATA_VERDICT] && ctx != NULL)
ctx              7613 net/netfilter/nf_tables_api.c 		return nft_verdict_init(ctx, data, desc, tb[NFTA_DATA_VERDICT]);
ctx              7667 net/netfilter/nf_tables_api.c int __nft_release_basechain(struct nft_ctx *ctx)
ctx              7671 net/netfilter/nf_tables_api.c 	if (WARN_ON(!nft_is_base_chain(ctx->chain)))
ctx              7674 net/netfilter/nf_tables_api.c 	nf_tables_unregister_hook(ctx->net, ctx->chain->table, ctx->chain);
ctx              7675 net/netfilter/nf_tables_api.c 	list_for_each_entry_safe(rule, nr, &ctx->chain->rules, list) {
ctx              7677 net/netfilter/nf_tables_api.c 		ctx->chain->use--;
ctx              7678 net/netfilter/nf_tables_api.c 		nf_tables_rule_release(ctx, rule);
ctx              7680 net/netfilter/nf_tables_api.c 	nft_chain_del(ctx->chain);
ctx              7681 net/netfilter/nf_tables_api.c 	ctx->table->use--;
ctx              7682 net/netfilter/nf_tables_api.c 	nf_tables_chain_destroy(ctx);
ctx              7696 net/netfilter/nf_tables_api.c 	struct nft_ctx ctx = {
ctx              7702 net/netfilter/nf_tables_api.c 		ctx.family = table->family;
ctx              7707 net/netfilter/nf_tables_api.c 		ctx.table = table;
ctx              7709 net/netfilter/nf_tables_api.c 			ctx.chain = chain;
ctx              7713 net/netfilter/nf_tables_api.c 				nf_tables_rule_release(&ctx, rule);
ctx              7729 net/netfilter/nf_tables_api.c 			nft_obj_destroy(&ctx, obj);
ctx              7732 net/netfilter/nf_tables_api.c 			ctx.chain = chain;
ctx              7735 net/netfilter/nf_tables_api.c 			nf_tables_chain_destroy(&ctx);
ctx              7738 net/netfilter/nf_tables_api.c 		nf_tables_table_destroy(&ctx);
ctx                34 net/netfilter/nf_tables_offload.c 	struct nft_offload_ctx *ctx;
ctx                56 net/netfilter/nf_tables_offload.c 	ctx = kzalloc(sizeof(struct nft_offload_ctx), GFP_KERNEL);
ctx                57 net/netfilter/nf_tables_offload.c 	if (!ctx) {
ctx                61 net/netfilter/nf_tables_offload.c 	ctx->net = net;
ctx                62 net/netfilter/nf_tables_offload.c 	ctx->dep.type = NFT_OFFLOAD_DEP_UNSPEC;
ctx                69 net/netfilter/nf_tables_offload.c 		err = expr->ops->offload(ctx, flow, expr);
ctx                75 net/netfilter/nf_tables_offload.c 	flow->proto = ctx->dep.l3num;
ctx                76 net/netfilter/nf_tables_offload.c 	kfree(ctx);
ctx                80 net/netfilter/nf_tables_offload.c 	kfree(ctx);
ctx               105 net/netfilter/nf_tables_offload.c void nft_offload_set_dependency(struct nft_offload_ctx *ctx,
ctx               108 net/netfilter/nf_tables_offload.c 	ctx->dep.type = type;
ctx               111 net/netfilter/nf_tables_offload.c void nft_offload_update_dependency(struct nft_offload_ctx *ctx,
ctx               114 net/netfilter/nf_tables_offload.c 	switch (ctx->dep.type) {
ctx               117 net/netfilter/nf_tables_offload.c 		memcpy(&ctx->dep.l3num, data, sizeof(__u16));
ctx               121 net/netfilter/nf_tables_offload.c 		memcpy(&ctx->dep.protonum, data, sizeof(__u8));
ctx               126 net/netfilter/nf_tables_offload.c 	ctx->dep.type = NFT_OFFLOAD_DEP_UNSPEC;
ctx               335 net/netfilter/nf_tables_offload.c 		if (trans->ctx.family != NFPROTO_NETDEV)
ctx               340 net/netfilter/nf_tables_offload.c 			if (!(trans->ctx.chain->flags & NFT_CHAIN_HW_OFFLOAD) ||
ctx               345 net/netfilter/nf_tables_offload.c 			err = nft_flow_offload_chain(trans->ctx.chain, &policy,
ctx               349 net/netfilter/nf_tables_offload.c 			if (!(trans->ctx.chain->flags & NFT_CHAIN_HW_OFFLOAD))
ctx               353 net/netfilter/nf_tables_offload.c 			err = nft_flow_offload_chain(trans->ctx.chain, &policy,
ctx               357 net/netfilter/nf_tables_offload.c 			if (!(trans->ctx.chain->flags & NFT_CHAIN_HW_OFFLOAD))
ctx               360 net/netfilter/nf_tables_offload.c 			if (trans->ctx.flags & NLM_F_REPLACE ||
ctx               361 net/netfilter/nf_tables_offload.c 			    !(trans->ctx.flags & NLM_F_APPEND)) {
ctx               365 net/netfilter/nf_tables_offload.c 			err = nft_flow_offload_rule(trans->ctx.chain,
ctx               371 net/netfilter/nf_tables_offload.c 			if (!(trans->ctx.chain->flags & NFT_CHAIN_HW_OFFLOAD))
ctx               374 net/netfilter/nf_tables_offload.c 			err = nft_flow_offload_rule(trans->ctx.chain,
ctx               386 net/netfilter/nf_tables_offload.c 		if (trans->ctx.family != NFPROTO_NETDEV)
ctx               391 net/netfilter/nf_tables_offload.c 			if (!(trans->ctx.chain->flags & NFT_CHAIN_HW_OFFLOAD))
ctx                67 net/netfilter/nfnetlink_osf.c 			     struct nf_osf_hdr_ctx *ctx)
ctx                69 net/netfilter/nfnetlink_osf.c 	const __u8 *optpinit = ctx->optp;
ctx                75 net/netfilter/nfnetlink_osf.c 	if (ctx->totlen != f->ss || !nf_osf_ttl(skb, ttl_check, f->ttl))
ctx                91 net/netfilter/nfnetlink_osf.c 	    ctx->optsize > MAX_IPOPTLEN ||
ctx                92 net/netfilter/nfnetlink_osf.c 	    ctx->optsize != foptsize)
ctx                98 net/netfilter/nfnetlink_osf.c 		if (f->opt[optnum].kind == *ctx->optp) {
ctx               100 net/netfilter/nfnetlink_osf.c 			const __u8 *optend = ctx->optp + len;
ctx               104 net/netfilter/nfnetlink_osf.c 			switch (*ctx->optp) {
ctx               106 net/netfilter/nfnetlink_osf.c 				mss = ctx->optp[3];
ctx               108 net/netfilter/nfnetlink_osf.c 				mss |= ctx->optp[2];
ctx               116 net/netfilter/nfnetlink_osf.c 			ctx->optp = optend;
ctx               129 net/netfilter/nfnetlink_osf.c 			if (f->wss.val == 0 || ctx->window == f->wss.val)
ctx               141 net/netfilter/nfnetlink_osf.c 			if (ctx->window == f->wss.val * mss ||
ctx               142 net/netfilter/nfnetlink_osf.c 			    ctx->window == f->wss.val * SMART_MSS_1 ||
ctx               143 net/netfilter/nfnetlink_osf.c 			    ctx->window == f->wss.val * SMART_MSS_2)
ctx               147 net/netfilter/nfnetlink_osf.c 			if (ctx->window == f->wss.val * (mss + 40) ||
ctx               148 net/netfilter/nfnetlink_osf.c 			    ctx->window == f->wss.val * (SMART_MSS_1 + 40) ||
ctx               149 net/netfilter/nfnetlink_osf.c 			    ctx->window == f->wss.val * (SMART_MSS_2 + 40))
ctx               153 net/netfilter/nfnetlink_osf.c 			if ((ctx->window % f->wss.val) == 0)
ctx               160 net/netfilter/nfnetlink_osf.c 		ctx->optp = optpinit;
ctx               165 net/netfilter/nfnetlink_osf.c static const struct tcphdr *nf_osf_hdr_ctx_init(struct nf_osf_hdr_ctx *ctx,
ctx               180 net/netfilter/nfnetlink_osf.c 	ctx->totlen = ntohs(ip->tot_len);
ctx               181 net/netfilter/nfnetlink_osf.c 	ctx->df = ntohs(ip->frag_off) & IP_DF;
ctx               182 net/netfilter/nfnetlink_osf.c 	ctx->window = ntohs(tcp->window);
ctx               185 net/netfilter/nfnetlink_osf.c 		ctx->optsize = tcp->doff * 4 - sizeof(struct tcphdr);
ctx               187 net/netfilter/nfnetlink_osf.c 		ctx->optp = skb_header_pointer(skb, ip_hdrlen(skb) +
ctx               188 net/netfilter/nfnetlink_osf.c 				sizeof(struct tcphdr), ctx->optsize, opts);
ctx               206 net/netfilter/nfnetlink_osf.c 	struct nf_osf_hdr_ctx ctx;
ctx               210 net/netfilter/nfnetlink_osf.c 	memset(&ctx, 0, sizeof(ctx));
ctx               212 net/netfilter/nfnetlink_osf.c 	tcp = nf_osf_hdr_ctx_init(&ctx, skb, ip, opts, &_tcph);
ctx               218 net/netfilter/nfnetlink_osf.c 	list_for_each_entry_rcu(kf, &nf_osf_fingers[ctx.df], finger_entry) {
ctx               225 net/netfilter/nfnetlink_osf.c 		if (!nf_osf_match_one(skb, f, ttl_check, &ctx))
ctx               267 net/netfilter/nfnetlink_osf.c 	struct nf_osf_hdr_ctx ctx;
ctx               271 net/netfilter/nfnetlink_osf.c 	memset(&ctx, 0, sizeof(ctx));
ctx               273 net/netfilter/nfnetlink_osf.c 	tcp = nf_osf_hdr_ctx_init(&ctx, skb, ip, opts, &_tcph);
ctx               277 net/netfilter/nfnetlink_osf.c 	list_for_each_entry_rcu(kf, &nf_osf_fingers[ctx.df], finger_entry) {
ctx               279 net/netfilter/nfnetlink_osf.c 		if (!nf_osf_match_one(skb, f, ttl_check, &ctx))
ctx                46 net/netfilter/nft_bitwise.c static int nft_bitwise_init(const struct nft_ctx *ctx,
ctx                74 net/netfilter/nft_bitwise.c 	err = nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               132 net/netfilter/nft_bitwise.c static int nft_bitwise_offload(struct nft_offload_ctx *ctx,
ctx               137 net/netfilter/nft_bitwise.c 	struct nft_offload_reg *reg = &ctx->regs[priv->dreg];
ctx                95 net/netfilter/nft_byteorder.c static int nft_byteorder_init(const struct nft_ctx *ctx,
ctx               146 net/netfilter/nft_byteorder.c 	return nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               287 net/netfilter/nft_chain_filter.c 			     struct nft_ctx *ctx)
ctx               289 net/netfilter/nft_chain_filter.c 	struct nft_base_chain *basechain = nft_base_chain(ctx->chain);
ctx               303 net/netfilter/nft_chain_filter.c 		__nft_release_basechain(ctx);
ctx               320 net/netfilter/nft_chain_filter.c 	struct nft_ctx ctx = {
ctx               328 net/netfilter/nft_chain_filter.c 	mutex_lock(&ctx.net->nft.commit_mutex);
ctx               329 net/netfilter/nft_chain_filter.c 	list_for_each_entry(table, &ctx.net->nft.tables, list) {
ctx               333 net/netfilter/nft_chain_filter.c 		ctx.family = table->family;
ctx               334 net/netfilter/nft_chain_filter.c 		ctx.table = table;
ctx               339 net/netfilter/nft_chain_filter.c 			ctx.chain = chain;
ctx               340 net/netfilter/nft_chain_filter.c 			nft_netdev_event(event, dev, &ctx);
ctx               343 net/netfilter/nft_chain_filter.c 	mutex_unlock(&ctx.net->nft.commit_mutex);
ctx                71 net/netfilter/nft_cmp.c static int nft_cmp_init(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx               117 net/netfilter/nft_cmp.c static int __nft_cmp_offload(struct nft_offload_ctx *ctx,
ctx               121 net/netfilter/nft_cmp.c 	struct nft_offload_reg *reg = &ctx->regs[priv->sreg];
ctx               134 net/netfilter/nft_cmp.c 	nft_offload_update_dependency(ctx, &priv->data, priv->len);
ctx               139 net/netfilter/nft_cmp.c static int nft_cmp_offload(struct nft_offload_ctx *ctx,
ctx               145 net/netfilter/nft_cmp.c 	return __nft_cmp_offload(ctx, flow, priv);
ctx               157 net/netfilter/nft_cmp.c static int nft_cmp_fast_init(const struct nft_ctx *ctx,
ctx               185 net/netfilter/nft_cmp.c static int nft_cmp_fast_offload(struct nft_offload_ctx *ctx,
ctx               201 net/netfilter/nft_cmp.c 	return __nft_cmp_offload(ctx, flow, &cmp);
ctx               234 net/netfilter/nft_cmp.c nft_cmp_select_ops(const struct nft_ctx *ctx, const struct nlattr * const tb[])
ctx                30 net/netfilter/nft_compat.c static int nft_compat_chain_validate_dependency(const struct nft_ctx *ctx,
ctx                34 net/netfilter/nft_compat.c 	const struct nft_chain *chain = ctx->chain;
ctx                43 net/netfilter/nft_compat.c 		if (ctx->family != NFPROTO_BRIDGE)
ctx               136 net/netfilter/nft_compat.c 			   const struct nft_ctx *ctx,
ctx               140 net/netfilter/nft_compat.c 	par->net	= ctx->net;
ctx               141 net/netfilter/nft_compat.c 	par->table	= ctx->table->name;
ctx               142 net/netfilter/nft_compat.c 	switch (ctx->family) {
ctx               164 net/netfilter/nft_compat.c 	if (nft_is_base_chain(ctx->chain)) {
ctx               166 net/netfilter/nft_compat.c 						nft_base_chain(ctx->chain);
ctx               173 net/netfilter/nft_compat.c 	par->family	= ctx->family;
ctx               217 net/netfilter/nft_compat.c nft_target_init(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx               231 net/netfilter/nft_compat.c 	if (ctx->nla[NFTA_RULE_COMPAT]) {
ctx               232 net/netfilter/nft_compat.c 		ret = nft_parse_compat(ctx->nla[NFTA_RULE_COMPAT], &proto, &inv);
ctx               237 net/netfilter/nft_compat.c 	nft_target_set_tgchk_param(&par, ctx, target, info, &e, proto, inv);
ctx               251 net/netfilter/nft_compat.c nft_target_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr)
ctx               258 net/netfilter/nft_compat.c 	par.net = ctx->net;
ctx               261 net/netfilter/nft_compat.c 	par.family = ctx->family;
ctx               304 net/netfilter/nft_compat.c static int nft_target_validate(const struct nft_ctx *ctx,
ctx               312 net/netfilter/nft_compat.c 	if (nft_is_base_chain(ctx->chain)) {
ctx               314 net/netfilter/nft_compat.c 						nft_base_chain(ctx->chain);
ctx               321 net/netfilter/nft_compat.c 		ret = nft_compat_chain_validate_dependency(ctx, target->table);
ctx               380 net/netfilter/nft_compat.c nft_match_set_mtchk_param(struct xt_mtchk_param *par, const struct nft_ctx *ctx,
ctx               384 net/netfilter/nft_compat.c 	par->net	= ctx->net;
ctx               385 net/netfilter/nft_compat.c 	par->table	= ctx->table->name;
ctx               386 net/netfilter/nft_compat.c 	switch (ctx->family) {
ctx               408 net/netfilter/nft_compat.c 	if (nft_is_base_chain(ctx->chain)) {
ctx               410 net/netfilter/nft_compat.c 						nft_base_chain(ctx->chain);
ctx               417 net/netfilter/nft_compat.c 	par->family	= ctx->family;
ctx               432 net/netfilter/nft_compat.c __nft_match_init(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx               446 net/netfilter/nft_compat.c 	if (ctx->nla[NFTA_RULE_COMPAT]) {
ctx               447 net/netfilter/nft_compat.c 		ret = nft_parse_compat(ctx->nla[NFTA_RULE_COMPAT], &proto, &inv);
ctx               452 net/netfilter/nft_compat.c 	nft_match_set_mtchk_param(&par, ctx, match, info, &e, proto, inv);
ctx               458 net/netfilter/nft_compat.c nft_match_init(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx               461 net/netfilter/nft_compat.c 	return __nft_match_init(ctx, expr, tb, nft_expr_priv(expr));
ctx               465 net/netfilter/nft_compat.c nft_match_large_init(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx               476 net/netfilter/nft_compat.c 	ret = __nft_match_init(ctx, expr, tb, priv->info);
ctx               483 net/netfilter/nft_compat.c __nft_match_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx               490 net/netfilter/nft_compat.c 	par.net = ctx->net;
ctx               493 net/netfilter/nft_compat.c 	par.family = ctx->family;
ctx               502 net/netfilter/nft_compat.c nft_match_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr)
ctx               504 net/netfilter/nft_compat.c 	__nft_match_destroy(ctx, expr, nft_expr_priv(expr));
ctx               508 net/netfilter/nft_compat.c nft_match_large_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr)
ctx               512 net/netfilter/nft_compat.c 	__nft_match_destroy(ctx, expr, priv->info);
ctx               545 net/netfilter/nft_compat.c static int nft_match_validate(const struct nft_ctx *ctx,
ctx               553 net/netfilter/nft_compat.c 	if (nft_is_base_chain(ctx->chain)) {
ctx               555 net/netfilter/nft_compat.c 						nft_base_chain(ctx->chain);
ctx               562 net/netfilter/nft_compat.c 		ret = nft_compat_chain_validate_dependency(ctx, match->table);
ctx               704 net/netfilter/nft_compat.c nft_match_select_ops(const struct nft_ctx *ctx,
ctx               721 net/netfilter/nft_compat.c 	family = ctx->family;
ctx               784 net/netfilter/nft_compat.c nft_target_select_ops(const struct nft_ctx *ctx,
ctx               800 net/netfilter/nft_compat.c 	family = ctx->family;
ctx                59 net/netfilter/nft_connlimit.c static int nft_connlimit_do_init(const struct nft_ctx *ctx,
ctx                83 net/netfilter/nft_connlimit.c 	return nf_ct_netns_get(ctx->net, ctx->family);
ctx                86 net/netfilter/nft_connlimit.c static void nft_connlimit_do_destroy(const struct nft_ctx *ctx,
ctx                89 net/netfilter/nft_connlimit.c 	nf_ct_netns_put(ctx->net, ctx->family);
ctx               117 net/netfilter/nft_connlimit.c static int nft_connlimit_obj_init(const struct nft_ctx *ctx,
ctx               123 net/netfilter/nft_connlimit.c 	return nft_connlimit_do_init(ctx, tb, priv);
ctx               126 net/netfilter/nft_connlimit.c static void nft_connlimit_obj_destroy(const struct nft_ctx *ctx,
ctx               131 net/netfilter/nft_connlimit.c 	nft_connlimit_do_destroy(ctx, priv);
ctx               181 net/netfilter/nft_connlimit.c static int nft_connlimit_init(const struct nft_ctx *ctx,
ctx               187 net/netfilter/nft_connlimit.c 	return nft_connlimit_do_init(ctx, tb, priv);
ctx               190 net/netfilter/nft_connlimit.c static void nft_connlimit_destroy(const struct nft_ctx *ctx,
ctx               195 net/netfilter/nft_connlimit.c 	nft_connlimit_do_destroy(ctx, priv);
ctx               210 net/netfilter/nft_connlimit.c static void nft_connlimit_destroy_clone(const struct nft_ctx *ctx,
ctx                82 net/netfilter/nft_counter.c static int nft_counter_obj_init(const struct nft_ctx *ctx,
ctx                96 net/netfilter/nft_counter.c static void nft_counter_obj_destroy(const struct nft_ctx *ctx,
ctx               210 net/netfilter/nft_counter.c static int nft_counter_init(const struct nft_ctx *ctx,
ctx               219 net/netfilter/nft_counter.c static void nft_counter_destroy(const struct nft_ctx *ctx,
ctx               387 net/netfilter/nft_ct.c static int nft_ct_get_init(const struct nft_ctx *ctx,
ctx               441 net/netfilter/nft_ct.c 		switch (ctx->family) {
ctx               504 net/netfilter/nft_ct.c 	err = nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               509 net/netfilter/nft_ct.c 	err = nf_ct_netns_get(ctx->net, ctx->family);
ctx               516 net/netfilter/nft_ct.c 		nf_ct_set_acct(ctx->net, true);
ctx               521 net/netfilter/nft_ct.c static void __nft_ct_set_destroy(const struct nft_ctx *ctx, struct nft_ct *priv)
ctx               526 net/netfilter/nft_ct.c 		nf_connlabels_put(ctx->net);
ctx               539 net/netfilter/nft_ct.c static int nft_ct_set_init(const struct nft_ctx *ctx,
ctx               562 net/netfilter/nft_ct.c 		err = nf_connlabels_get(ctx->net, (len * BITS_PER_BYTE) - 1);
ctx               610 net/netfilter/nft_ct.c 	err = nf_ct_netns_get(ctx->net, ctx->family);
ctx               617 net/netfilter/nft_ct.c 	__nft_ct_set_destroy(ctx, priv);
ctx               621 net/netfilter/nft_ct.c static void nft_ct_get_destroy(const struct nft_ctx *ctx,
ctx               624 net/netfilter/nft_ct.c 	nf_ct_netns_put(ctx->net, ctx->family);
ctx               627 net/netfilter/nft_ct.c static void nft_ct_set_destroy(const struct nft_ctx *ctx,
ctx               632 net/netfilter/nft_ct.c 	__nft_ct_set_destroy(ctx, priv);
ctx               633 net/netfilter/nft_ct.c 	nf_ct_netns_put(ctx->net, ctx->family);
ctx               731 net/netfilter/nft_ct.c nft_ct_select_ops(const struct nft_ctx *ctx,
ctx               860 net/netfilter/nft_ct.c static int nft_ct_timeout_obj_init(const struct nft_ctx *ctx,
ctx               867 net/netfilter/nft_ct.c 	int l3num = ctx->family;
ctx               895 net/netfilter/nft_ct.c 	ret = nft_ct_timeout_parse_policy(&timeout->data, l4proto, ctx->net,
ctx               903 net/netfilter/nft_ct.c 	ret = nf_ct_netns_get(ctx->net, ctx->family);
ctx               916 net/netfilter/nft_ct.c static void nft_ct_timeout_obj_destroy(const struct nft_ctx *ctx,
ctx               922 net/netfilter/nft_ct.c 	nf_ct_untimeout(ctx->net, timeout);
ctx               923 net/netfilter/nft_ct.c 	nf_ct_netns_put(ctx->net, ctx->family);
ctx               976 net/netfilter/nft_ct.c static int nft_ct_helper_obj_init(const struct nft_ctx *ctx,
ctx               983 net/netfilter/nft_ct.c 	int family = ctx->family;
ctx              1003 net/netfilter/nft_ct.c 		if (ctx->family == NFPROTO_IPV6)
ctx              1010 net/netfilter/nft_ct.c 		if (ctx->family == NFPROTO_IPV4)
ctx              1035 net/netfilter/nft_ct.c 	err = nf_ct_netns_get(ctx->net, ctx->family);
ctx              1049 net/netfilter/nft_ct.c static void nft_ct_helper_obj_destroy(const struct nft_ctx *ctx,
ctx              1059 net/netfilter/nft_ct.c 	nf_ct_netns_put(ctx->net, ctx->family);
ctx              1165 net/netfilter/nft_ct.c static int nft_ct_expect_obj_init(const struct nft_ctx *ctx,
ctx              1177 net/netfilter/nft_ct.c 	priv->l3num = ctx->family;
ctx              1186 net/netfilter/nft_ct.c 	return nf_ct_netns_get(ctx->net, ctx->family);
ctx              1189 net/netfilter/nft_ct.c static void nft_ct_expect_obj_destroy(const struct nft_ctx *ctx,
ctx              1192 net/netfilter/nft_ct.c 	nf_ct_netns_put(ctx->net, ctx->family);
ctx                34 net/netfilter/nft_dup_netdev.c static int nft_dup_netdev_init(const struct nft_ctx *ctx,
ctx                60 net/netfilter/nft_dup_netdev.c static int nft_dup_netdev_offload(struct nft_offload_ctx *ctx,
ctx                65 net/netfilter/nft_dup_netdev.c 	int oif = ctx->regs[priv->sreg_dev].data.data[0];
ctx                67 net/netfilter/nft_dup_netdev.c 	return nft_fwd_dup_netdev_offload(ctx, flow, FLOW_ACTION_MIRRED, oif);
ctx               128 net/netfilter/nft_dynset.c static int nft_dynset_init(const struct nft_ctx *ctx,
ctx               133 net/netfilter/nft_dynset.c 	u8 genmask = nft_genmask_next(ctx->net);
ctx               138 net/netfilter/nft_dynset.c 	lockdep_assert_held(&ctx->net->nft.commit_mutex);
ctx               154 net/netfilter/nft_dynset.c 	set = nft_set_lookup_global(ctx->net, ctx->table,
ctx               209 net/netfilter/nft_dynset.c 		priv->expr = nft_expr_init(ctx, tb[NFTA_DYNSET_EXPR]);
ctx               240 net/netfilter/nft_dynset.c 	err = nf_tables_bind_set(ctx, set, &priv->binding);
ctx               252 net/netfilter/nft_dynset.c 		nft_expr_destroy(ctx, priv->expr);
ctx               256 net/netfilter/nft_dynset.c static void nft_dynset_deactivate(const struct nft_ctx *ctx,
ctx               262 net/netfilter/nft_dynset.c 	nf_tables_deactivate_set(ctx, priv->set, &priv->binding, phase);
ctx               265 net/netfilter/nft_dynset.c static void nft_dynset_activate(const struct nft_ctx *ctx,
ctx               273 net/netfilter/nft_dynset.c static void nft_dynset_destroy(const struct nft_ctx *ctx,
ctx               279 net/netfilter/nft_dynset.c 		nft_expr_destroy(ctx, priv->expr);
ctx               281 net/netfilter/nft_dynset.c 	nf_tables_destroy_set(ctx, priv->set);
ctx               315 net/netfilter/nft_exthdr.c static int nft_exthdr_init(const struct nft_ctx *ctx,
ctx               359 net/netfilter/nft_exthdr.c 	return nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               363 net/netfilter/nft_exthdr.c static int nft_exthdr_tcp_set_init(const struct nft_ctx *ctx,
ctx               412 net/netfilter/nft_exthdr.c static int nft_exthdr_ipv4_init(const struct nft_ctx *ctx,
ctx               417 net/netfilter/nft_exthdr.c 	int err = nft_exthdr_init(ctx, expr, tb);
ctx               505 net/netfilter/nft_exthdr.c nft_exthdr_select_ops(const struct nft_ctx *ctx,
ctx               529 net/netfilter/nft_exthdr.c 		if (ctx->family != NFPROTO_IPV6) {
ctx                28 net/netfilter/nft_fib.c int nft_fib_validate(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx                60 net/netfilter/nft_fib.c 	return nft_chain_validate_hooks(ctx->chain, hooks);
ctx                64 net/netfilter/nft_fib.c int nft_fib_init(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx               109 net/netfilter/nft_fib.c 	err = nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               144 net/netfilter/nft_flow_offload.c static int nft_flow_offload_validate(const struct nft_ctx *ctx,
ctx               150 net/netfilter/nft_flow_offload.c 	return nft_chain_validate_hooks(ctx->chain, hook_mask);
ctx               158 net/netfilter/nft_flow_offload.c static int nft_flow_offload_init(const struct nft_ctx *ctx,
ctx               163 net/netfilter/nft_flow_offload.c 	u8 genmask = nft_genmask_next(ctx->net);
ctx               169 net/netfilter/nft_flow_offload.c 	flowtable = nft_flowtable_lookup(ctx->table, tb[NFTA_FLOW_TABLE_NAME],
ctx               177 net/netfilter/nft_flow_offload.c 	return nf_ct_netns_get(ctx->net, ctx->family);
ctx               180 net/netfilter/nft_flow_offload.c static void nft_flow_offload_deactivate(const struct nft_ctx *ctx,
ctx               186 net/netfilter/nft_flow_offload.c 	nf_tables_deactivate_flowtable(ctx, priv->flowtable, phase);
ctx               189 net/netfilter/nft_flow_offload.c static void nft_flow_offload_activate(const struct nft_ctx *ctx,
ctx               197 net/netfilter/nft_flow_offload.c static void nft_flow_offload_destroy(const struct nft_ctx *ctx,
ctx               200 net/netfilter/nft_flow_offload.c 	nf_ct_netns_put(ctx->net, ctx->family);
ctx                44 net/netfilter/nft_fwd_netdev.c static int nft_fwd_netdev_init(const struct nft_ctx *ctx,
ctx                70 net/netfilter/nft_fwd_netdev.c static int nft_fwd_netdev_offload(struct nft_offload_ctx *ctx,
ctx                75 net/netfilter/nft_fwd_netdev.c 	int oif = ctx->regs[priv->sreg_dev].data.data[0];
ctx                77 net/netfilter/nft_fwd_netdev.c 	return nft_fwd_dup_netdev_offload(ctx, flow, FLOW_ACTION_REDIRECT, oif);
ctx               146 net/netfilter/nft_fwd_netdev.c static int nft_fwd_neigh_init(const struct nft_ctx *ctx,
ctx               196 net/netfilter/nft_fwd_netdev.c static int nft_fwd_validate(const struct nft_ctx *ctx,
ctx               200 net/netfilter/nft_fwd_netdev.c 	return nft_chain_validate_hooks(ctx->chain, (1 << NF_NETDEV_INGRESS));
ctx               224 net/netfilter/nft_fwd_netdev.c nft_fwd_select_ops(const struct nft_ctx *ctx,
ctx                69 net/netfilter/nft_hash.c static int nft_jhash_init(const struct nft_ctx *ctx,
ctx               112 net/netfilter/nft_hash.c 	       nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               116 net/netfilter/nft_hash.c static int nft_symhash_init(const struct nft_ctx *ctx,
ctx               138 net/netfilter/nft_hash.c 	return nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               207 net/netfilter/nft_hash.c nft_hash_select_ops(const struct nft_ctx *ctx,
ctx                32 net/netfilter/nft_immediate.c static int nft_immediate_init(const struct nft_ctx *ctx,
ctx                44 net/netfilter/nft_immediate.c 	err = nft_data_init(ctx, &priv->data, sizeof(priv->data), &desc,
ctx                52 net/netfilter/nft_immediate.c 	err = nft_validate_register_store(ctx, priv->dreg, &priv->data,
ctx                64 net/netfilter/nft_immediate.c static void nft_immediate_activate(const struct nft_ctx *ctx,
ctx                72 net/netfilter/nft_immediate.c static void nft_immediate_deactivate(const struct nft_ctx *ctx,
ctx                98 net/netfilter/nft_immediate.c static int nft_immediate_validate(const struct nft_ctx *ctx,
ctx               103 net/netfilter/nft_immediate.c 	struct nft_ctx *pctx = (struct nft_ctx *)ctx;
ctx               116 net/netfilter/nft_immediate.c 		err = nft_chain_validate(ctx, data->verdict.chain);
ctx               128 net/netfilter/nft_immediate.c static int nft_immediate_offload_verdict(struct nft_offload_ctx *ctx,
ctx               135 net/netfilter/nft_immediate.c 	entry = &flow->rule->action.entries[ctx->num_actions++];
ctx               152 net/netfilter/nft_immediate.c static int nft_immediate_offload(struct nft_offload_ctx *ctx,
ctx               159 net/netfilter/nft_immediate.c 		return nft_immediate_offload_verdict(ctx, flow, priv);
ctx               161 net/netfilter/nft_immediate.c 	memcpy(&ctx->regs[priv->dreg].data, &priv->data, sizeof(priv->data));
ctx               147 net/netfilter/nft_limit.c static int nft_limit_pkts_init(const struct nft_ctx *ctx,
ctx               189 net/netfilter/nft_limit.c static int nft_limit_bytes_init(const struct nft_ctx *ctx,
ctx               215 net/netfilter/nft_limit.c nft_limit_select_ops(const struct nft_ctx *ctx,
ctx               249 net/netfilter/nft_limit.c static int nft_limit_obj_pkts_init(const struct nft_ctx *ctx,
ctx               293 net/netfilter/nft_limit.c static int nft_limit_obj_bytes_init(const struct nft_ctx *ctx,
ctx               321 net/netfilter/nft_limit.c nft_limit_obj_select_ops(const struct nft_ctx *ctx,
ctx               131 net/netfilter/nft_log.c static int nft_log_init(const struct nft_ctx *ctx,
ctx               199 net/netfilter/nft_log.c 	err = nf_logger_find_get(ctx->family, li->type);
ctx               211 net/netfilter/nft_log.c static void nft_log_destroy(const struct nft_ctx *ctx,
ctx               223 net/netfilter/nft_log.c 	nf_logger_put(ctx->family, li->type);
ctx                57 net/netfilter/nft_lookup.c static int nft_lookup_init(const struct nft_ctx *ctx,
ctx                62 net/netfilter/nft_lookup.c 	u8 genmask = nft_genmask_next(ctx->net);
ctx                71 net/netfilter/nft_lookup.c 	set = nft_set_lookup_global(ctx->net, ctx->table, tb[NFTA_LOOKUP_SET],
ctx               101 net/netfilter/nft_lookup.c 		err = nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               110 net/netfilter/nft_lookup.c 	err = nf_tables_bind_set(ctx, set, &priv->binding);
ctx               118 net/netfilter/nft_lookup.c static void nft_lookup_deactivate(const struct nft_ctx *ctx,
ctx               124 net/netfilter/nft_lookup.c 	nf_tables_deactivate_set(ctx, priv->set, &priv->binding, phase);
ctx               127 net/netfilter/nft_lookup.c static void nft_lookup_activate(const struct nft_ctx *ctx,
ctx               135 net/netfilter/nft_lookup.c static void nft_lookup_destroy(const struct nft_ctx *ctx,
ctx               140 net/netfilter/nft_lookup.c 	nf_tables_destroy_set(ctx, priv->set);
ctx               163 net/netfilter/nft_lookup.c static int nft_lookup_validate_setelem(const struct nft_ctx *ctx,
ctx               169 net/netfilter/nft_lookup.c 	struct nft_ctx *pctx = (struct nft_ctx *)ctx;
ctx               182 net/netfilter/nft_lookup.c 		err = nft_chain_validate(ctx, data->verdict.chain);
ctx               194 net/netfilter/nft_lookup.c static int nft_lookup_validate(const struct nft_ctx *ctx,
ctx               205 net/netfilter/nft_lookup.c 	iter.genmask	= nft_genmask_next(ctx->net);
ctx               211 net/netfilter/nft_lookup.c 	priv->set->ops->walk(ctx, priv->set, &iter);
ctx                28 net/netfilter/nft_masq.c static int nft_masq_validate(const struct nft_ctx *ctx,
ctx                34 net/netfilter/nft_masq.c 	err = nft_chain_validate_dependency(ctx->chain, NFT_CHAIN_T_NAT);
ctx                38 net/netfilter/nft_masq.c 	return nft_chain_validate_hooks(ctx->chain,
ctx                42 net/netfilter/nft_masq.c static int nft_masq_init(const struct nft_ctx *ctx,
ctx                77 net/netfilter/nft_masq.c 	return nf_ct_netns_get(ctx->net, ctx->family);
ctx               122 net/netfilter/nft_masq.c nft_masq_ipv4_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr)
ctx               124 net/netfilter/nft_masq.c 	nf_ct_netns_put(ctx->net, NFPROTO_IPV4);
ctx               168 net/netfilter/nft_masq.c nft_masq_ipv6_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr)
ctx               170 net/netfilter/nft_masq.c 	nf_ct_netns_put(ctx->net, NFPROTO_IPV6);
ctx               223 net/netfilter/nft_masq.c nft_masq_inet_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr)
ctx               225 net/netfilter/nft_masq.c 	nf_ct_netns_put(ctx->net, NFPROTO_INET);
ctx               317 net/netfilter/nft_meta.c int nft_meta_get_init(const struct nft_ctx *ctx,
ctx               384 net/netfilter/nft_meta.c 	return nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               389 net/netfilter/nft_meta.c static int nft_meta_get_validate(const struct nft_ctx *ctx,
ctx               400 net/netfilter/nft_meta.c 	switch (ctx->family) {
ctx               415 net/netfilter/nft_meta.c 	return nft_chain_validate_hooks(ctx->chain, hooks);
ctx               421 net/netfilter/nft_meta.c int nft_meta_set_validate(const struct nft_ctx *ctx,
ctx               431 net/netfilter/nft_meta.c 	switch (ctx->family) {
ctx               447 net/netfilter/nft_meta.c 	return nft_chain_validate_hooks(ctx->chain, hooks);
ctx               451 net/netfilter/nft_meta.c int nft_meta_set_init(const struct nft_ctx *ctx,
ctx               522 net/netfilter/nft_meta.c void nft_meta_set_destroy(const struct nft_ctx *ctx,
ctx               532 net/netfilter/nft_meta.c static int nft_meta_get_offload(struct nft_offload_ctx *ctx,
ctx               537 net/netfilter/nft_meta.c 	struct nft_offload_reg *reg = &ctx->regs[priv->dreg];
ctx               543 net/netfilter/nft_meta.c 		nft_offload_set_dependency(ctx, NFT_OFFLOAD_DEP_NETWORK);
ctx               548 net/netfilter/nft_meta.c 		nft_offload_set_dependency(ctx, NFT_OFFLOAD_DEP_TRANSPORT);
ctx               578 net/netfilter/nft_meta.c nft_meta_select_ops(const struct nft_ctx *ctx,
ctx               588 net/netfilter/nft_meta.c 	if (ctx->family == NFPROTO_BRIDGE)
ctx               611 net/netfilter/nft_meta.c 	char *ctx;
ctx               623 net/netfilter/nft_meta.c 	err = security_secctx_to_secid(priv->ctx, strlen(priv->ctx), &tmp_secid);
ctx               647 net/netfilter/nft_meta.c static int nft_secmark_obj_init(const struct nft_ctx *ctx,
ctx               657 net/netfilter/nft_meta.c 	priv->ctx = nla_strdup(tb[NFTA_SECMARK_CTX], GFP_KERNEL);
ctx               658 net/netfilter/nft_meta.c 	if (!priv->ctx)
ctx               663 net/netfilter/nft_meta.c 		kfree(priv->ctx);
ctx               678 net/netfilter/nft_meta.c 	if (nla_put_string(skb, NFTA_SECMARK_CTX, priv->ctx))
ctx               690 net/netfilter/nft_meta.c static void nft_secmark_obj_destroy(const struct nft_ctx *ctx, struct nft_object *obj)
ctx               696 net/netfilter/nft_meta.c 	kfree(priv->ctx);
ctx                84 net/netfilter/nft_nat.c static int nft_nat_validate(const struct nft_ctx *ctx,
ctx                91 net/netfilter/nft_nat.c 	err = nft_chain_validate_dependency(ctx->chain, NFT_CHAIN_T_NAT);
ctx                97 net/netfilter/nft_nat.c 		err = nft_chain_validate_hooks(ctx->chain,
ctx               102 net/netfilter/nft_nat.c 		err = nft_chain_validate_hooks(ctx->chain,
ctx               111 net/netfilter/nft_nat.c static int nft_nat_init(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx               139 net/netfilter/nft_nat.c 	if (ctx->family != NFPROTO_INET && ctx->family != family)
ctx               202 net/netfilter/nft_nat.c 	return nf_ct_netns_get(ctx->net, family);
ctx               251 net/netfilter/nft_nat.c nft_nat_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr)
ctx               255 net/netfilter/nft_nat.c 	nf_ct_netns_put(ctx->net, priv->family);
ctx                53 net/netfilter/nft_numgen.c static int nft_ng_inc_init(const struct nft_ctx *ctx,
ctx                72 net/netfilter/nft_numgen.c 	return nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               125 net/netfilter/nft_numgen.c static int nft_ng_random_init(const struct nft_ctx *ctx,
ctx               145 net/netfilter/nft_numgen.c 	return nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               175 net/netfilter/nft_numgen.c nft_ng_select_ops(const struct nft_ctx *ctx, const struct nlattr * const tb[])
ctx                25 net/netfilter/nft_objref.c static int nft_objref_init(const struct nft_ctx *ctx,
ctx                30 net/netfilter/nft_objref.c 	u8 genmask = nft_genmask_next(ctx->net);
ctx                38 net/netfilter/nft_objref.c 	obj = nft_obj_lookup(ctx->net, ctx->table,
ctx                65 net/netfilter/nft_objref.c static void nft_objref_deactivate(const struct nft_ctx *ctx,
ctx                77 net/netfilter/nft_objref.c static void nft_objref_activate(const struct nft_ctx *ctx,
ctx               122 net/netfilter/nft_objref.c static int nft_objref_map_init(const struct nft_ctx *ctx,
ctx               127 net/netfilter/nft_objref.c 	u8 genmask = nft_genmask_next(ctx->net);
ctx               131 net/netfilter/nft_objref.c 	set = nft_set_lookup_global(ctx->net, ctx->table,
ctx               147 net/netfilter/nft_objref.c 	err = nf_tables_bind_set(ctx, set, &priv->binding);
ctx               169 net/netfilter/nft_objref.c static void nft_objref_map_deactivate(const struct nft_ctx *ctx,
ctx               175 net/netfilter/nft_objref.c 	nf_tables_deactivate_set(ctx, priv->set, &priv->binding, phase);
ctx               178 net/netfilter/nft_objref.c static void nft_objref_map_activate(const struct nft_ctx *ctx,
ctx               186 net/netfilter/nft_objref.c static void nft_objref_map_destroy(const struct nft_ctx *ctx,
ctx               191 net/netfilter/nft_objref.c 	nf_tables_destroy_set(ctx, priv->set);
ctx               207 net/netfilter/nft_objref.c nft_objref_select_ops(const struct nft_ctx *ctx,
ctx                55 net/netfilter/nft_osf.c static int nft_osf_init(const struct nft_ctx *ctx,
ctx                82 net/netfilter/nft_osf.c 	err = nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               109 net/netfilter/nft_osf.c static int nft_osf_validate(const struct nft_ctx *ctx,
ctx               113 net/netfilter/nft_osf.c 	return nft_chain_validate_hooks(ctx->chain, (1 << NF_INET_LOCAL_IN) |
ctx               127 net/netfilter/nft_payload.c static int nft_payload_init(const struct nft_ctx *ctx,
ctx               138 net/netfilter/nft_payload.c 	return nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               157 net/netfilter/nft_payload.c static int nft_payload_offload_ll(struct nft_offload_ctx *ctx,
ctx               161 net/netfilter/nft_payload.c 	struct nft_offload_reg *reg = &ctx->regs[priv->dreg];
ctx               185 net/netfilter/nft_payload.c static int nft_payload_offload_ip(struct nft_offload_ctx *ctx,
ctx               189 net/netfilter/nft_payload.c 	struct nft_offload_reg *reg = &ctx->regs[priv->dreg];
ctx               212 net/netfilter/nft_payload.c 		nft_offload_set_dependency(ctx, NFT_OFFLOAD_DEP_TRANSPORT);
ctx               221 net/netfilter/nft_payload.c static int nft_payload_offload_ip6(struct nft_offload_ctx *ctx,
ctx               225 net/netfilter/nft_payload.c 	struct nft_offload_reg *reg = &ctx->regs[priv->dreg];
ctx               248 net/netfilter/nft_payload.c 		nft_offload_set_dependency(ctx, NFT_OFFLOAD_DEP_TRANSPORT);
ctx               257 net/netfilter/nft_payload.c static int nft_payload_offload_nh(struct nft_offload_ctx *ctx,
ctx               263 net/netfilter/nft_payload.c 	switch (ctx->dep.l3num) {
ctx               265 net/netfilter/nft_payload.c 		err = nft_payload_offload_ip(ctx, flow, priv);
ctx               268 net/netfilter/nft_payload.c 		err = nft_payload_offload_ip6(ctx, flow, priv);
ctx               277 net/netfilter/nft_payload.c static int nft_payload_offload_tcp(struct nft_offload_ctx *ctx,
ctx               281 net/netfilter/nft_payload.c 	struct nft_offload_reg *reg = &ctx->regs[priv->dreg];
ctx               305 net/netfilter/nft_payload.c static int nft_payload_offload_udp(struct nft_offload_ctx *ctx,
ctx               309 net/netfilter/nft_payload.c 	struct nft_offload_reg *reg = &ctx->regs[priv->dreg];
ctx               333 net/netfilter/nft_payload.c static int nft_payload_offload_th(struct nft_offload_ctx *ctx,
ctx               339 net/netfilter/nft_payload.c 	switch (ctx->dep.protonum) {
ctx               341 net/netfilter/nft_payload.c 		err = nft_payload_offload_tcp(ctx, flow, priv);
ctx               344 net/netfilter/nft_payload.c 		err = nft_payload_offload_udp(ctx, flow, priv);
ctx               353 net/netfilter/nft_payload.c static int nft_payload_offload(struct nft_offload_ctx *ctx,
ctx               362 net/netfilter/nft_payload.c 		err = nft_payload_offload_ll(ctx, flow, priv);
ctx               365 net/netfilter/nft_payload.c 		err = nft_payload_offload_nh(ctx, flow, priv);
ctx               368 net/netfilter/nft_payload.c 		err = nft_payload_offload_th(ctx, flow, priv);
ctx               547 net/netfilter/nft_payload.c static int nft_payload_set_init(const struct nft_ctx *ctx,
ctx               613 net/netfilter/nft_payload.c nft_payload_select_ops(const struct nft_ctx *ctx,
ctx                78 net/netfilter/nft_queue.c static int nft_queue_init(const struct nft_ctx *ctx,
ctx               107 net/netfilter/nft_queue.c static int nft_queue_sreg_init(const struct nft_ctx *ctx,
ctx               178 net/netfilter/nft_queue.c nft_queue_select_ops(const struct nft_ctx *ctx,
ctx               100 net/netfilter/nft_quota.c static int nft_quota_obj_init(const struct nft_ctx *ctx,
ctx               192 net/netfilter/nft_quota.c static int nft_quota_init(const struct nft_ctx *ctx,
ctx                50 net/netfilter/nft_range.c static int nft_range_init(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx                28 net/netfilter/nft_redir.c static int nft_redir_validate(const struct nft_ctx *ctx,
ctx                34 net/netfilter/nft_redir.c 	err = nft_chain_validate_dependency(ctx->chain, NFT_CHAIN_T_NAT);
ctx                38 net/netfilter/nft_redir.c 	return nft_chain_validate_hooks(ctx->chain,
ctx                43 net/netfilter/nft_redir.c static int nft_redir_init(const struct nft_ctx *ctx,
ctx                79 net/netfilter/nft_redir.c 	return nf_ct_netns_get(ctx->net, ctx->family);
ctx               127 net/netfilter/nft_redir.c nft_redir_ipv4_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr)
ctx               129 net/netfilter/nft_redir.c 	nf_ct_netns_put(ctx->net, NFPROTO_IPV4);
ctx               176 net/netfilter/nft_redir.c nft_redir_ipv6_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr)
ctx               178 net/netfilter/nft_redir.c 	nf_ct_netns_put(ctx->net, NFPROTO_IPV6);
ctx               218 net/netfilter/nft_redir.c nft_redir_inet_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr)
ctx               220 net/netfilter/nft_redir.c 	nf_ct_netns_put(ctx->net, NFPROTO_INET);
ctx                26 net/netfilter/nft_reject.c int nft_reject_validate(const struct nft_ctx *ctx,
ctx                30 net/netfilter/nft_reject.c 	return nft_chain_validate_hooks(ctx->chain,
ctx                37 net/netfilter/nft_reject.c int nft_reject_init(const struct nft_ctx *ctx,
ctx                61 net/netfilter/nft_reject_inet.c static int nft_reject_inet_init(const struct nft_ctx *ctx,
ctx               110 net/netfilter/nft_rt.c static int nft_rt_get_init(const struct nft_ctx *ctx,
ctx               145 net/netfilter/nft_rt.c 	return nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               164 net/netfilter/nft_rt.c static int nft_rt_validate(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx               185 net/netfilter/nft_rt.c 	return nft_chain_validate_hooks(ctx->chain, hooks);
ctx               214 net/netfilter/nft_set_bitmap.c static void nft_bitmap_walk(const struct nft_ctx *ctx,
ctx               230 net/netfilter/nft_set_bitmap.c 		iter->err = iter->fn(ctx, set, iter, &elem);
ctx               255 net/netfilter/nft_set_hash.c static void nft_rhash_walk(const struct nft_ctx *ctx, struct nft_set *set,
ctx               285 net/netfilter/nft_set_hash.c 		iter->err = iter->fn(ctx, set, iter, &elem);
ctx               569 net/netfilter/nft_set_hash.c static void nft_hash_walk(const struct nft_ctx *ctx, struct nft_set *set,
ctx               586 net/netfilter/nft_set_hash.c 			iter->err = iter->fn(ctx, set, iter, &elem);
ctx               352 net/netfilter/nft_set_rbtree.c static void nft_rbtree_walk(const struct nft_ctx *ctx,
ctx               374 net/netfilter/nft_set_rbtree.c 		iter->err = iter->fn(ctx, set, iter, &elem);
ctx                76 net/netfilter/nft_socket.c static int nft_socket_init(const struct nft_ctx *ctx,
ctx                86 net/netfilter/nft_socket.c 	switch(ctx->family) {
ctx               110 net/netfilter/nft_socket.c 	return nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               152 net/netfilter/nft_synproxy.c static int nft_synproxy_do_init(const struct nft_ctx *ctx,
ctx               156 net/netfilter/nft_synproxy.c 	struct synproxy_net *snet = synproxy_pernet(ctx->net);
ctx               171 net/netfilter/nft_synproxy.c 	err = nf_ct_netns_get(ctx->net, ctx->family);
ctx               175 net/netfilter/nft_synproxy.c 	switch (ctx->family) {
ctx               177 net/netfilter/nft_synproxy.c 		err = nf_synproxy_ipv4_init(snet, ctx->net);
ctx               183 net/netfilter/nft_synproxy.c 		err = nf_synproxy_ipv6_init(snet, ctx->net);
ctx               190 net/netfilter/nft_synproxy.c 		err = nf_synproxy_ipv4_init(snet, ctx->net);
ctx               193 net/netfilter/nft_synproxy.c 		err = nf_synproxy_ipv6_init(snet, ctx->net);
ctx               202 net/netfilter/nft_synproxy.c 	nf_ct_netns_put(ctx->net, ctx->family);
ctx               206 net/netfilter/nft_synproxy.c static void nft_synproxy_do_destroy(const struct nft_ctx *ctx)
ctx               208 net/netfilter/nft_synproxy.c 	struct synproxy_net *snet = synproxy_pernet(ctx->net);
ctx               210 net/netfilter/nft_synproxy.c 	switch (ctx->family) {
ctx               212 net/netfilter/nft_synproxy.c 		nf_synproxy_ipv4_fini(snet, ctx->net);
ctx               216 net/netfilter/nft_synproxy.c 		nf_synproxy_ipv6_fini(snet, ctx->net);
ctx               221 net/netfilter/nft_synproxy.c 		nf_synproxy_ipv4_fini(snet, ctx->net);
ctx               222 net/netfilter/nft_synproxy.c 		nf_synproxy_ipv6_fini(snet, ctx->net);
ctx               225 net/netfilter/nft_synproxy.c 	nf_ct_netns_put(ctx->net, ctx->family);
ctx               250 net/netfilter/nft_synproxy.c static int nft_synproxy_validate(const struct nft_ctx *ctx,
ctx               254 net/netfilter/nft_synproxy.c 	return nft_chain_validate_hooks(ctx->chain, (1 << NF_INET_LOCAL_IN) |
ctx               258 net/netfilter/nft_synproxy.c static int nft_synproxy_init(const struct nft_ctx *ctx,
ctx               264 net/netfilter/nft_synproxy.c 	return nft_synproxy_do_init(ctx, tb, priv);
ctx               267 net/netfilter/nft_synproxy.c static void nft_synproxy_destroy(const struct nft_ctx *ctx,
ctx               270 net/netfilter/nft_synproxy.c 	nft_synproxy_do_destroy(ctx);
ctx               299 net/netfilter/nft_synproxy.c static int nft_synproxy_obj_init(const struct nft_ctx *ctx,
ctx               305 net/netfilter/nft_synproxy.c 	return nft_synproxy_do_init(ctx, tb, priv);
ctx               308 net/netfilter/nft_synproxy.c static void nft_synproxy_obj_destroy(const struct nft_ctx *ctx,
ctx               311 net/netfilter/nft_synproxy.c 	nft_synproxy_do_destroy(ctx);
ctx               184 net/netfilter/nft_tproxy.c static int nft_tproxy_init(const struct nft_ctx *ctx,
ctx               198 net/netfilter/nft_tproxy.c 	switch (ctx->family) {
ctx               222 net/netfilter/nft_tproxy.c 		err = nf_defrag_ipv4_enable(ctx->net);
ctx               229 net/netfilter/nft_tproxy.c 		err = nf_defrag_ipv6_enable(ctx->net);
ctx               236 net/netfilter/nft_tproxy.c 		err = nf_defrag_ipv4_enable(ctx->net);
ctx               240 net/netfilter/nft_tproxy.c 		err = nf_defrag_ipv6_enable(ctx->net);
ctx                72 net/netfilter/nft_tunnel.c static int nft_tunnel_get_init(const struct nft_ctx *ctx,
ctx               105 net/netfilter/nft_tunnel.c 	return nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               162 net/netfilter/nft_tunnel.c static int nft_tunnel_obj_ip_init(const struct nft_ctx *ctx,
ctx               191 net/netfilter/nft_tunnel.c static int nft_tunnel_obj_ip6_init(const struct nft_ctx *ctx,
ctx               309 net/netfilter/nft_tunnel.c static int nft_tunnel_obj_opts_init(const struct nft_ctx *ctx,
ctx               347 net/netfilter/nft_tunnel.c static int nft_tunnel_obj_init(const struct nft_ctx *ctx,
ctx               365 net/netfilter/nft_tunnel.c 		err = nft_tunnel_obj_ip_init(ctx, tb[NFTA_TUNNEL_KEY_IP], &info);
ctx               369 net/netfilter/nft_tunnel.c 		err = nft_tunnel_obj_ip6_init(ctx, tb[NFTA_TUNNEL_KEY_IP6], &info);
ctx               405 net/netfilter/nft_tunnel.c 		err = nft_tunnel_obj_opts_init(ctx, tb[NFTA_TUNNEL_KEY_OPTS],
ctx               557 net/netfilter/nft_tunnel.c static void nft_tunnel_obj_destroy(const struct nft_ctx *ctx,
ctx                32 net/netfilter/nft_xfrm.c static int nft_xfrm_get_init(const struct nft_ctx *ctx,
ctx                44 net/netfilter/nft_xfrm.c 	switch (ctx->family) {
ctx                90 net/netfilter/nft_xfrm.c 	return nft_validate_register_store(ctx, priv->dreg, NULL,
ctx               231 net/netfilter/nft_xfrm.c static int nft_xfrm_validate(const struct nft_ctx *ctx, const struct nft_expr *expr,
ctx               253 net/netfilter/nft_xfrm.c 	return nft_chain_validate_hooks(ctx->chain, hooks);
ctx              1460 net/nfc/netlink.c 	struct se_io_ctx *ctx = context;
ctx              1466 net/nfc/netlink.c 		kfree(ctx);
ctx              1475 net/nfc/netlink.c 	if (nla_put_u32(msg, NFC_ATTR_DEVICE_INDEX, ctx->dev_idx) ||
ctx              1476 net/nfc/netlink.c 	    nla_put_u32(msg, NFC_ATTR_SE_INDEX, ctx->se_idx) ||
ctx              1484 net/nfc/netlink.c 	kfree(ctx);
ctx              1491 net/nfc/netlink.c 	kfree(ctx);
ctx              1499 net/nfc/netlink.c 	struct se_io_ctx *ctx;
ctx              1527 net/nfc/netlink.c 	ctx = kzalloc(sizeof(struct se_io_ctx), GFP_KERNEL);
ctx              1528 net/nfc/netlink.c 	if (!ctx)
ctx              1531 net/nfc/netlink.c 	ctx->dev_idx = dev_idx;
ctx              1532 net/nfc/netlink.c 	ctx->se_idx = se_idx;
ctx              1534 net/nfc/netlink.c 	return nfc_se_io(dev, se_idx, apdu, apdu_len, se_io_cb, ctx);
ctx                69 net/sched/sch_codel.c static struct sk_buff *dequeue_func(struct codel_vars *vars, void *ctx)
ctx                71 net/sched/sch_codel.c 	struct Qdisc *sch = ctx;
ctx                81 net/sched/sch_codel.c static void drop_func(struct sk_buff *skb, void *ctx)
ctx                83 net/sched/sch_codel.c 	struct Qdisc *sch = ctx;
ctx               256 net/sched/sch_fq_codel.c static struct sk_buff *dequeue_func(struct codel_vars *vars, void *ctx)
ctx               258 net/sched/sch_fq_codel.c 	struct Qdisc *sch = ctx;
ctx               274 net/sched/sch_fq_codel.c static void drop_func(struct sk_buff *skb, void *ctx)
ctx               276 net/sched/sch_fq_codel.c 	struct Qdisc *sch = ctx;
ctx               789 net/sctp/outqueue.c static void sctp_outq_select_transport(struct sctp_flush_ctx *ctx,
ctx               805 net/sctp/outqueue.c 			if (ctx->transport && sctp_cmp_addr_exact(&chunk->dest,
ctx               806 net/sctp/outqueue.c 							&ctx->transport->ipaddr))
ctx               807 net/sctp/outqueue.c 				new_transport = ctx->transport;
ctx               809 net/sctp/outqueue.c 				new_transport = sctp_assoc_lookup_paddr(ctx->asoc,
ctx               817 net/sctp/outqueue.c 			new_transport = ctx->asoc->peer.active_path;
ctx               842 net/sctp/outqueue.c 				new_transport = ctx->asoc->peer.active_path;
ctx               850 net/sctp/outqueue.c 	if (new_transport != ctx->transport) {
ctx               851 net/sctp/outqueue.c 		ctx->transport = new_transport;
ctx               852 net/sctp/outqueue.c 		ctx->packet = &ctx->transport->packet;
ctx               854 net/sctp/outqueue.c 		if (list_empty(&ctx->transport->send_ready))
ctx               855 net/sctp/outqueue.c 			list_add_tail(&ctx->transport->send_ready,
ctx               856 net/sctp/outqueue.c 				      &ctx->transport_list);
ctx               858 net/sctp/outqueue.c 		sctp_packet_config(ctx->packet,
ctx               859 net/sctp/outqueue.c 				   ctx->asoc->peer.i.init_tag,
ctx               860 net/sctp/outqueue.c 				   ctx->asoc->peer.ecn_capable);
ctx               864 net/sctp/outqueue.c 		sctp_transport_burst_limited(ctx->transport);
ctx               868 net/sctp/outqueue.c static void sctp_outq_flush_ctrl(struct sctp_flush_ctx *ctx)
ctx               874 net/sctp/outqueue.c 	list_for_each_entry_safe(chunk, tmp, &ctx->q->control_chunk_list, list) {
ctx               883 net/sctp/outqueue.c 		if (ctx->asoc->src_out_of_asoc_ok &&
ctx               892 net/sctp/outqueue.c 		sctp_outq_select_transport(ctx, chunk);
ctx               903 net/sctp/outqueue.c 			error = sctp_packet_singleton(ctx->transport, chunk,
ctx               904 net/sctp/outqueue.c 						      ctx->gfp);
ctx               906 net/sctp/outqueue.c 				ctx->asoc->base.sk->sk_err = -error;
ctx               913 net/sctp/outqueue.c 				ctx->packet->vtag = ctx->asoc->c.my_vtag;
ctx               939 net/sctp/outqueue.c 			status = sctp_packet_transmit_chunk(ctx->packet, chunk,
ctx               940 net/sctp/outqueue.c 							    one_packet, ctx->gfp);
ctx               943 net/sctp/outqueue.c 				list_add(&chunk->list, &ctx->q->control_chunk_list);
ctx               947 net/sctp/outqueue.c 			ctx->asoc->stats.octrlchunks++;
ctx               954 net/sctp/outqueue.c 				sctp_transport_reset_t3_rtx(ctx->transport);
ctx               955 net/sctp/outqueue.c 				ctx->transport->last_time_sent = jiffies;
ctx               958 net/sctp/outqueue.c 			if (chunk == ctx->asoc->strreset_chunk)
ctx               959 net/sctp/outqueue.c 				sctp_transport_reset_reconf_timer(ctx->transport);
ctx               971 net/sctp/outqueue.c static bool sctp_outq_flush_rtx(struct sctp_flush_ctx *ctx,
ctx               976 net/sctp/outqueue.c 	if (ctx->asoc->peer.retran_path->state == SCTP_UNCONFIRMED)
ctx               979 net/sctp/outqueue.c 	if (ctx->transport != ctx->asoc->peer.retran_path) {
ctx               981 net/sctp/outqueue.c 		ctx->transport = ctx->asoc->peer.retran_path;
ctx               982 net/sctp/outqueue.c 		ctx->packet = &ctx->transport->packet;
ctx               984 net/sctp/outqueue.c 		if (list_empty(&ctx->transport->send_ready))
ctx               985 net/sctp/outqueue.c 			list_add_tail(&ctx->transport->send_ready,
ctx               986 net/sctp/outqueue.c 				      &ctx->transport_list);
ctx               988 net/sctp/outqueue.c 		sctp_packet_config(ctx->packet, ctx->asoc->peer.i.init_tag,
ctx               989 net/sctp/outqueue.c 				   ctx->asoc->peer.ecn_capable);
ctx               992 net/sctp/outqueue.c 	error = __sctp_outq_flush_rtx(ctx->q, ctx->packet, rtx_timeout,
ctx               993 net/sctp/outqueue.c 				      &start_timer, ctx->gfp);
ctx               995 net/sctp/outqueue.c 		ctx->asoc->base.sk->sk_err = -error;
ctx               998 net/sctp/outqueue.c 		sctp_transport_reset_t3_rtx(ctx->transport);
ctx               999 net/sctp/outqueue.c 		ctx->transport->last_time_sent = jiffies;
ctx              1005 net/sctp/outqueue.c 	if (ctx->packet->has_cookie_echo)
ctx              1011 net/sctp/outqueue.c 	if (!list_empty(&ctx->q->retransmit))
ctx              1017 net/sctp/outqueue.c static void sctp_outq_flush_data(struct sctp_flush_ctx *ctx,
ctx              1024 net/sctp/outqueue.c 	switch (ctx->asoc->state) {
ctx              1029 net/sctp/outqueue.c 		if (!ctx->packet || !ctx->packet->has_cookie_echo)
ctx              1051 net/sctp/outqueue.c 	if (!list_empty(&ctx->q->retransmit) &&
ctx              1052 net/sctp/outqueue.c 	    !sctp_outq_flush_rtx(ctx, rtx_timeout))
ctx              1060 net/sctp/outqueue.c 	if (ctx->transport)
ctx              1061 net/sctp/outqueue.c 		sctp_transport_burst_limited(ctx->transport);
ctx              1064 net/sctp/outqueue.c 	while ((chunk = sctp_outq_dequeue_data(ctx->q)) != NULL) {
ctx              1066 net/sctp/outqueue.c 		__u8 stream_state = SCTP_SO(&ctx->asoc->stream, sid)->state;
ctx              1070 net/sctp/outqueue.c 			sctp_sched_dequeue_done(ctx->q, chunk);
ctx              1077 net/sctp/outqueue.c 			sctp_outq_head_data(ctx->q, chunk);
ctx              1081 net/sctp/outqueue.c 		sctp_outq_select_transport(ctx, chunk);
ctx              1084 net/sctp/outqueue.c 			 __func__, ctx->q, chunk, chunk && chunk->chunk_hdr ?
ctx              1091 net/sctp/outqueue.c 		status = sctp_packet_transmit_chunk(ctx->packet, chunk, 0,
ctx              1092 net/sctp/outqueue.c 						    ctx->gfp);
ctx              1101 net/sctp/outqueue.c 			sctp_outq_head_data(ctx->q, chunk);
ctx              1109 net/sctp/outqueue.c 		if (ctx->asoc->state == SCTP_STATE_SHUTDOWN_PENDING)
ctx              1112 net/sctp/outqueue.c 			ctx->asoc->stats.ouodchunks++;
ctx              1114 net/sctp/outqueue.c 			ctx->asoc->stats.oodchunks++;
ctx              1119 net/sctp/outqueue.c 		sctp_sched_dequeue_done(ctx->q, chunk);
ctx              1122 net/sctp/outqueue.c 			      &ctx->transport->transmitted);
ctx              1124 net/sctp/outqueue.c 		sctp_transport_reset_t3_rtx(ctx->transport);
ctx              1125 net/sctp/outqueue.c 		ctx->transport->last_time_sent = jiffies;
ctx              1130 net/sctp/outqueue.c 		if (ctx->packet->has_cookie_echo)
ctx              1135 net/sctp/outqueue.c static void sctp_outq_flush_transports(struct sctp_flush_ctx *ctx)
ctx              1142 net/sctp/outqueue.c 	while ((ltransport = sctp_list_dequeue(&ctx->transport_list)) != NULL) {
ctx              1146 net/sctp/outqueue.c 			error = sctp_packet_transmit(packet, ctx->gfp);
ctx              1148 net/sctp/outqueue.c 				ctx->q->asoc->base.sk->sk_err = -error;
ctx              1167 net/sctp/outqueue.c 	struct sctp_flush_ctx ctx = {
ctx              1170 net/sctp/outqueue.c 		.transport_list = LIST_HEAD_INIT(ctx.transport_list),
ctx              1185 net/sctp/outqueue.c 	sctp_outq_flush_ctrl(&ctx);
ctx              1190 net/sctp/outqueue.c 	sctp_outq_flush_data(&ctx, rtx_timeout);
ctx              1194 net/sctp/outqueue.c 	sctp_outq_flush_transports(&ctx);
ctx               344 net/socket.c   	struct pseudo_fs_context *ctx = init_pseudo(fc, SOCKFS_MAGIC);
ctx               345 net/socket.c   	if (!ctx)
ctx               347 net/socket.c   	ctx->ops = &sockfs_ops;
ctx               348 net/socket.c   	ctx->dops = &sockfs_dentry_operations;
ctx               349 net/socket.c   	ctx->xattr = sockfs_xattr_handlers;
ctx                96 net/sunrpc/auth_gss/auth_gss.c gss_get_ctx(struct gss_cl_ctx *ctx)
ctx                98 net/sunrpc/auth_gss/auth_gss.c 	refcount_inc(&ctx->count);
ctx                99 net/sunrpc/auth_gss/auth_gss.c 	return ctx;
ctx               103 net/sunrpc/auth_gss/auth_gss.c gss_put_ctx(struct gss_cl_ctx *ctx)
ctx               105 net/sunrpc/auth_gss/auth_gss.c 	if (refcount_dec_and_test(&ctx->count))
ctx               106 net/sunrpc/auth_gss/auth_gss.c 		gss_free_ctx(ctx);
ctx               115 net/sunrpc/auth_gss/auth_gss.c gss_cred_set_ctx(struct rpc_cred *cred, struct gss_cl_ctx *ctx)
ctx               121 net/sunrpc/auth_gss/auth_gss.c 	gss_get_ctx(ctx);
ctx               122 net/sunrpc/auth_gss/auth_gss.c 	rcu_assign_pointer(gss_cred->gc_ctx, ctx);
ctx               161 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx = NULL;
ctx               164 net/sunrpc/auth_gss/auth_gss.c 	ctx = rcu_dereference(gss_cred->gc_ctx);
ctx               165 net/sunrpc/auth_gss/auth_gss.c 	if (ctx)
ctx               166 net/sunrpc/auth_gss/auth_gss.c 		gss_get_ctx(ctx);
ctx               168 net/sunrpc/auth_gss/auth_gss.c 	return ctx;
ctx               174 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx;
ctx               176 net/sunrpc/auth_gss/auth_gss.c 	ctx = kzalloc(sizeof(*ctx), GFP_NOFS);
ctx               177 net/sunrpc/auth_gss/auth_gss.c 	if (ctx != NULL) {
ctx               178 net/sunrpc/auth_gss/auth_gss.c 		ctx->gc_proc = RPC_GSS_PROC_DATA;
ctx               179 net/sunrpc/auth_gss/auth_gss.c 		ctx->gc_seq = 1;	/* NetApp 6.4R1 doesn't accept seq. no. 0 */
ctx               180 net/sunrpc/auth_gss/auth_gss.c 		spin_lock_init(&ctx->gc_seq_lock);
ctx               181 net/sunrpc/auth_gss/auth_gss.c 		refcount_set(&ctx->count,1);
ctx               183 net/sunrpc/auth_gss/auth_gss.c 	return ctx;
ctx               188 net/sunrpc/auth_gss/auth_gss.c gss_fill_context(const void *p, const void *end, struct gss_cl_ctx *ctx, struct gss_api_mech *gm)
ctx               206 net/sunrpc/auth_gss/auth_gss.c 	ctx->gc_expiry = now + ((unsigned long)timeout * HZ);
ctx               213 net/sunrpc/auth_gss/auth_gss.c 	ctx->gc_win = window_size;
ctx               215 net/sunrpc/auth_gss/auth_gss.c 	if (ctx->gc_win == 0) {
ctx               227 net/sunrpc/auth_gss/auth_gss.c 	p = simple_get_netobj(p, end, &ctx->gc_wire_ctx);
ctx               239 net/sunrpc/auth_gss/auth_gss.c 	ret = gss_import_sec_context(p, seclen, gm, &ctx->gc_gss_ctx, NULL, GFP_NOFS);
ctx               253 net/sunrpc/auth_gss/auth_gss.c 	p = simple_get_netobj(q, end, &ctx->gc_acceptor);
ctx               257 net/sunrpc/auth_gss/auth_gss.c 	trace_rpcgss_context(ctx->gc_expiry, now, timeout,
ctx               258 net/sunrpc/auth_gss/auth_gss.c 			     ctx->gc_acceptor.len, ctx->gc_acceptor.data);
ctx               280 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx;
ctx               317 net/sunrpc/auth_gss/auth_gss.c 	if (gss_msg->ctx != NULL)
ctx               318 net/sunrpc/auth_gss/auth_gss.c 		gss_put_ctx(gss_msg->ctx);
ctx               388 net/sunrpc/auth_gss/auth_gss.c 		if (gss_msg->ctx == NULL)
ctx               391 net/sunrpc/auth_gss/auth_gss.c 		gss_cred_set_ctx(&gss_cred->gc_base, gss_msg->ctx);
ctx               634 net/sunrpc/auth_gss/auth_gss.c 	else if (gss_msg->ctx == NULL && gss_msg->msg.errno >= 0) {
ctx               690 net/sunrpc/auth_gss/auth_gss.c 		if (gss_msg->ctx != NULL || gss_msg->msg.errno < 0) {
ctx               700 net/sunrpc/auth_gss/auth_gss.c 	if (gss_msg->ctx)
ctx               701 net/sunrpc/auth_gss/auth_gss.c 		gss_cred_set_ctx(cred, gss_msg->ctx);
ctx               723 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx;
ctx               753 net/sunrpc/auth_gss/auth_gss.c 	ctx = gss_alloc_context();
ctx               754 net/sunrpc/auth_gss/auth_gss.c 	if (ctx == NULL)
ctx               768 net/sunrpc/auth_gss/auth_gss.c 	p = gss_fill_context(p, end, ctx, gss_msg->auth->mech);
ctx               790 net/sunrpc/auth_gss/auth_gss.c 	gss_msg->ctx = gss_get_ctx(ctx);
ctx               799 net/sunrpc/auth_gss/auth_gss.c 	gss_put_ctx(ctx);
ctx              1252 net/sunrpc/auth_gss/auth_gss.c 		struct gss_cl_ctx *ctx =
ctx              1262 net/sunrpc/auth_gss/auth_gss.c 		rcu_assign_pointer(new->gc_ctx, ctx);
ctx              1263 net/sunrpc/auth_gss/auth_gss.c 		gss_get_ctx(ctx);
ctx              1279 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx = rcu_dereference_protected(gss_cred->gc_ctx, 1);
ctx              1285 net/sunrpc/auth_gss/auth_gss.c 		ctx->gc_proc = RPC_GSS_PROC_DESTROY;
ctx              1300 net/sunrpc/auth_gss/auth_gss.c gss_do_free_ctx(struct gss_cl_ctx *ctx)
ctx              1302 net/sunrpc/auth_gss/auth_gss.c 	gss_delete_sec_context(&ctx->gc_gss_ctx);
ctx              1303 net/sunrpc/auth_gss/auth_gss.c 	kfree(ctx->gc_wire_ctx.data);
ctx              1304 net/sunrpc/auth_gss/auth_gss.c 	kfree(ctx->gc_acceptor.data);
ctx              1305 net/sunrpc/auth_gss/auth_gss.c 	kfree(ctx);
ctx              1311 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx = container_of(head, struct gss_cl_ctx, gc_rcu);
ctx              1312 net/sunrpc/auth_gss/auth_gss.c 	gss_do_free_ctx(ctx);
ctx              1316 net/sunrpc/auth_gss/auth_gss.c gss_free_ctx(struct gss_cl_ctx *ctx)
ctx              1318 net/sunrpc/auth_gss/auth_gss.c 	call_rcu(&ctx->gc_rcu, gss_free_ctx_callback);
ctx              1339 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx = rcu_dereference_protected(gss_cred->gc_ctx, 1);
ctx              1344 net/sunrpc/auth_gss/auth_gss.c 	if (ctx)
ctx              1345 net/sunrpc/auth_gss/auth_gss.c 		gss_put_ctx(ctx);
ctx              1416 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx;
ctx              1421 net/sunrpc/auth_gss/auth_gss.c 	ctx = rcu_dereference(gss_cred->gc_ctx);
ctx              1422 net/sunrpc/auth_gss/auth_gss.c 	if (!ctx)
ctx              1425 net/sunrpc/auth_gss/auth_gss.c 	len = ctx->gc_acceptor.len;
ctx              1437 net/sunrpc/auth_gss/auth_gss.c 	ctx = rcu_dereference(gss_cred->gc_ctx);
ctx              1440 net/sunrpc/auth_gss/auth_gss.c 	if (!ctx || !ctx->gc_acceptor.len) {
ctx              1446 net/sunrpc/auth_gss/auth_gss.c 	acceptor = &ctx->gc_acceptor;
ctx              1474 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx;
ctx              1479 net/sunrpc/auth_gss/auth_gss.c 	ctx = rcu_dereference(gss_cred->gc_ctx);
ctx              1480 net/sunrpc/auth_gss/auth_gss.c 	if (!ctx || time_after(timeout, ctx->gc_expiry))
ctx              1491 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx;
ctx              1498 net/sunrpc/auth_gss/auth_gss.c 	ctx = rcu_dereference(gss_cred->gc_ctx);
ctx              1499 net/sunrpc/auth_gss/auth_gss.c 	if (!ctx || time_after(jiffies, ctx->gc_expiry)) {
ctx              1532 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx	*ctx = gss_cred_get_ctx(cred);
ctx              1543 net/sunrpc/auth_gss/auth_gss.c 			      ctx->gc_wire_ctx.len);
ctx              1549 net/sunrpc/auth_gss/auth_gss.c 	spin_lock(&ctx->gc_seq_lock);
ctx              1550 net/sunrpc/auth_gss/auth_gss.c 	req->rq_seqno = (ctx->gc_seq < MAXSEQ) ? ctx->gc_seq++ : MAXSEQ;
ctx              1551 net/sunrpc/auth_gss/auth_gss.c 	spin_unlock(&ctx->gc_seq_lock);
ctx              1557 net/sunrpc/auth_gss/auth_gss.c 	*p++ = cpu_to_be32(ctx->gc_proc);
ctx              1560 net/sunrpc/auth_gss/auth_gss.c 	p = xdr_encode_netobj(p, &ctx->gc_wire_ctx);
ctx              1576 net/sunrpc/auth_gss/auth_gss.c 	maj_stat = gss_get_mic(ctx->gc_gss_ctx, &verf_buf, &mic);
ctx              1585 net/sunrpc/auth_gss/auth_gss.c 	gss_put_ctx(ctx);
ctx              1675 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx = gss_cred_get_ctx(cred);
ctx              1704 net/sunrpc/auth_gss/auth_gss.c 	maj_stat = gss_verify_mic(ctx->gc_gss_ctx, &verf_buf, &mic);
ctx              1715 net/sunrpc/auth_gss/auth_gss.c 	gss_put_ctx(ctx);
ctx              1728 net/sunrpc/auth_gss/auth_gss.c static int gss_wrap_req_integ(struct rpc_cred *cred, struct gss_cl_ctx *ctx,
ctx              1756 net/sunrpc/auth_gss/auth_gss.c 	maj_stat = gss_get_mic(ctx->gc_gss_ctx, &integ_buf, &mic);
ctx              1820 net/sunrpc/auth_gss/auth_gss.c static int gss_wrap_req_priv(struct rpc_cred *cred, struct gss_cl_ctx *ctx,
ctx              1863 net/sunrpc/auth_gss/auth_gss.c 	maj_stat = gss_wrap(ctx->gc_gss_ctx, offset, snd_buf, inpages);
ctx              1899 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx = gss_cred_get_ctx(cred);
ctx              1903 net/sunrpc/auth_gss/auth_gss.c 	if (ctx->gc_proc != RPC_GSS_PROC_DATA) {
ctx              1915 net/sunrpc/auth_gss/auth_gss.c 		status = gss_wrap_req_integ(cred, ctx, task, xdr);
ctx              1918 net/sunrpc/auth_gss/auth_gss.c 		status = gss_wrap_req_priv(cred, ctx, task, xdr);
ctx              1924 net/sunrpc/auth_gss/auth_gss.c 	gss_put_ctx(ctx);
ctx              1953 net/sunrpc/auth_gss/auth_gss.c 		      struct gss_cl_ctx *ctx, struct rpc_rqst *rqstp,
ctx              2000 net/sunrpc/auth_gss/auth_gss.c 	maj_stat = gss_verify_mic(ctx->gc_gss_ctx, &gss_data, &mic);
ctx              2027 net/sunrpc/auth_gss/auth_gss.c 		     struct gss_cl_ctx *ctx, struct rpc_rqst *rqstp,
ctx              2044 net/sunrpc/auth_gss/auth_gss.c 	maj_stat = gss_unwrap(ctx->gc_gss_ctx, offset,
ctx              2059 net/sunrpc/auth_gss/auth_gss.c 	auth->au_rslack = auth->au_verfsize + 2 + ctx->gc_gss_ctx->slack;
ctx              2060 net/sunrpc/auth_gss/auth_gss.c 	auth->au_ralign = auth->au_verfsize + 2 + ctx->gc_gss_ctx->align;
ctx              2085 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx = gss_cred_get_ctx(cred);
ctx              2089 net/sunrpc/auth_gss/auth_gss.c 	if (!ctx)
ctx              2092 net/sunrpc/auth_gss/auth_gss.c 	if (gss_seq_is_newer(req->rq_seqno, READ_ONCE(ctx->gc_seq)))
ctx              2095 net/sunrpc/auth_gss/auth_gss.c 	seq_xmit = READ_ONCE(ctx->gc_seq_xmit);
ctx              2099 net/sunrpc/auth_gss/auth_gss.c 		seq_xmit = cmpxchg(&ctx->gc_seq_xmit, tmp, req->rq_seqno);
ctx              2106 net/sunrpc/auth_gss/auth_gss.c 	win = ctx->gc_win;
ctx              2111 net/sunrpc/auth_gss/auth_gss.c 	gss_put_ctx(ctx);
ctx              2124 net/sunrpc/auth_gss/auth_gss.c 	struct gss_cl_ctx *ctx = gss_cred_get_ctx(cred);
ctx              2127 net/sunrpc/auth_gss/auth_gss.c 	if (ctx->gc_proc != RPC_GSS_PROC_DATA)
ctx              2134 net/sunrpc/auth_gss/auth_gss.c 		status = gss_unwrap_resp_integ(task, cred, ctx, rqstp, xdr);
ctx              2137 net/sunrpc/auth_gss/auth_gss.c 		status = gss_unwrap_resp_priv(task, cred, ctx, rqstp, xdr);
ctx              2146 net/sunrpc/auth_gss/auth_gss.c 	gss_put_ctx(ctx);
ctx               198 net/sunrpc/auth_gss/gss_krb5_mech.c 	struct krb5_ctx *ctx, struct crypto_sync_skcipher **res)
ctx               226 net/sunrpc/auth_gss/gss_krb5_mech.c 	*res = crypto_alloc_sync_skcipher(ctx->gk5e->encrypt_name, 0, 0);
ctx               229 net/sunrpc/auth_gss/gss_krb5_mech.c 			"crypto algorithm %s\n", ctx->gk5e->encrypt_name);
ctx               235 net/sunrpc/auth_gss/gss_krb5_mech.c 			"crypto algorithm %s\n", ctx->gk5e->encrypt_name);
ctx               252 net/sunrpc/auth_gss/gss_krb5_mech.c gss_import_v1_context(const void *p, const void *end, struct krb5_ctx *ctx)
ctx               257 net/sunrpc/auth_gss/gss_krb5_mech.c 	p = simple_get_bytes(p, end, &ctx->initiate, sizeof(ctx->initiate));
ctx               262 net/sunrpc/auth_gss/gss_krb5_mech.c 	ctx->enctype = ENCTYPE_DES_CBC_RAW;
ctx               264 net/sunrpc/auth_gss/gss_krb5_mech.c 	ctx->gk5e = get_gss_krb5_enctype(ctx->enctype);
ctx               265 net/sunrpc/auth_gss/gss_krb5_mech.c 	if (ctx->gk5e == NULL) {
ctx               293 net/sunrpc/auth_gss/gss_krb5_mech.c 	p = simple_get_bytes(p, end, &ctx->endtime, sizeof(ctx->endtime));
ctx               299 net/sunrpc/auth_gss/gss_krb5_mech.c 	atomic_set(&ctx->seq_send, seq_send);
ctx               300 net/sunrpc/auth_gss/gss_krb5_mech.c 	p = simple_get_netobj(p, end, &ctx->mech_used);
ctx               303 net/sunrpc/auth_gss/gss_krb5_mech.c 	p = get_key(p, end, ctx, &ctx->enc);
ctx               306 net/sunrpc/auth_gss/gss_krb5_mech.c 	p = get_key(p, end, ctx, &ctx->seq);
ctx               317 net/sunrpc/auth_gss/gss_krb5_mech.c 	crypto_free_sync_skcipher(ctx->seq);
ctx               319 net/sunrpc/auth_gss/gss_krb5_mech.c 	crypto_free_sync_skcipher(ctx->enc);
ctx               321 net/sunrpc/auth_gss/gss_krb5_mech.c 	kfree(ctx->mech_used.data);
ctx               327 net/sunrpc/auth_gss/gss_krb5_mech.c context_v2_alloc_cipher(struct krb5_ctx *ctx, const char *cname, u8 *key)
ctx               337 net/sunrpc/auth_gss/gss_krb5_mech.c 	if (crypto_sync_skcipher_setkey(cp, key, ctx->gk5e->keylength)) {
ctx               357 net/sunrpc/auth_gss/gss_krb5_mech.c context_derive_keys_des3(struct krb5_ctx *ctx, gfp_t gfp_mask)
ctx               366 net/sunrpc/auth_gss/gss_krb5_mech.c 	keyin.data = ctx->Ksess;
ctx               367 net/sunrpc/auth_gss/gss_krb5_mech.c 	keyin.len = ctx->gk5e->keylength;
ctx               368 net/sunrpc/auth_gss/gss_krb5_mech.c 	keyout.len = ctx->gk5e->keylength;
ctx               371 net/sunrpc/auth_gss/gss_krb5_mech.c 	ctx->seq = context_v2_alloc_cipher(ctx, ctx->gk5e->encrypt_name,
ctx               372 net/sunrpc/auth_gss/gss_krb5_mech.c 					   ctx->Ksess);
ctx               373 net/sunrpc/auth_gss/gss_krb5_mech.c 	if (ctx->seq == NULL)
ctx               376 net/sunrpc/auth_gss/gss_krb5_mech.c 	ctx->enc = context_v2_alloc_cipher(ctx, ctx->gk5e->encrypt_name,
ctx               377 net/sunrpc/auth_gss/gss_krb5_mech.c 					   ctx->Ksess);
ctx               378 net/sunrpc/auth_gss/gss_krb5_mech.c 	if (ctx->enc == NULL)
ctx               383 net/sunrpc/auth_gss/gss_krb5_mech.c 	keyout.data = ctx->cksum;
ctx               384 net/sunrpc/auth_gss/gss_krb5_mech.c 	err = krb5_derive_key(ctx->gk5e, &keyin, &keyout, &c, gfp_mask);
ctx               394 net/sunrpc/auth_gss/gss_krb5_mech.c 	crypto_free_sync_skcipher(ctx->enc);
ctx               396 net/sunrpc/auth_gss/gss_krb5_mech.c 	crypto_free_sync_skcipher(ctx->seq);
ctx               407 net/sunrpc/auth_gss/gss_krb5_mech.c context_derive_keys_rc4(struct krb5_ctx *ctx)
ctx               419 net/sunrpc/auth_gss/gss_krb5_mech.c 	hmac = crypto_alloc_shash(ctx->gk5e->cksum_name, 0, 0);
ctx               422 net/sunrpc/auth_gss/gss_krb5_mech.c 			__func__, PTR_ERR(hmac), ctx->gk5e->cksum_name);
ctx               427 net/sunrpc/auth_gss/gss_krb5_mech.c 	err = crypto_shash_setkey(hmac, ctx->Ksess, ctx->gk5e->keylength);
ctx               435 net/sunrpc/auth_gss/gss_krb5_mech.c 			__func__, ctx->gk5e->cksum_name);
ctx               442 net/sunrpc/auth_gss/gss_krb5_mech.c 	err = crypto_shash_digest(desc, sigkeyconstant, slen, ctx->cksum);
ctx               449 net/sunrpc/auth_gss/gss_krb5_mech.c 	ctx->enc = crypto_alloc_sync_skcipher(ctx->gk5e->encrypt_name, 0, 0);
ctx               450 net/sunrpc/auth_gss/gss_krb5_mech.c 	if (IS_ERR(ctx->enc)) {
ctx               451 net/sunrpc/auth_gss/gss_krb5_mech.c 		err = PTR_ERR(ctx->enc);
ctx               455 net/sunrpc/auth_gss/gss_krb5_mech.c 	ctx->seq = crypto_alloc_sync_skcipher(ctx->gk5e->encrypt_name, 0, 0);
ctx               456 net/sunrpc/auth_gss/gss_krb5_mech.c 	if (IS_ERR(ctx->seq)) {
ctx               457 net/sunrpc/auth_gss/gss_krb5_mech.c 		crypto_free_sync_skcipher(ctx->enc);
ctx               458 net/sunrpc/auth_gss/gss_krb5_mech.c 		err = PTR_ERR(ctx->seq);
ctx               474 net/sunrpc/auth_gss/gss_krb5_mech.c context_derive_keys_new(struct krb5_ctx *ctx, gfp_t gfp_mask)
ctx               483 net/sunrpc/auth_gss/gss_krb5_mech.c 	keyin.data = ctx->Ksess;
ctx               484 net/sunrpc/auth_gss/gss_krb5_mech.c 	keyin.len = ctx->gk5e->keylength;
ctx               485 net/sunrpc/auth_gss/gss_krb5_mech.c 	keyout.len = ctx->gk5e->keylength;
ctx               489 net/sunrpc/auth_gss/gss_krb5_mech.c 	keyout.data = ctx->initiator_seal;
ctx               490 net/sunrpc/auth_gss/gss_krb5_mech.c 	err = krb5_derive_key(ctx->gk5e, &keyin, &keyout, &c, gfp_mask);
ctx               496 net/sunrpc/auth_gss/gss_krb5_mech.c 	ctx->initiator_enc = context_v2_alloc_cipher(ctx,
ctx               497 net/sunrpc/auth_gss/gss_krb5_mech.c 						     ctx->gk5e->encrypt_name,
ctx               498 net/sunrpc/auth_gss/gss_krb5_mech.c 						     ctx->initiator_seal);
ctx               499 net/sunrpc/auth_gss/gss_krb5_mech.c 	if (ctx->initiator_enc == NULL)
ctx               504 net/sunrpc/auth_gss/gss_krb5_mech.c 	keyout.data = ctx->acceptor_seal;
ctx               505 net/sunrpc/auth_gss/gss_krb5_mech.c 	err = krb5_derive_key(ctx->gk5e, &keyin, &keyout, &c, gfp_mask);
ctx               511 net/sunrpc/auth_gss/gss_krb5_mech.c 	ctx->acceptor_enc = context_v2_alloc_cipher(ctx,
ctx               512 net/sunrpc/auth_gss/gss_krb5_mech.c 						    ctx->gk5e->encrypt_name,
ctx               513 net/sunrpc/auth_gss/gss_krb5_mech.c 						    ctx->acceptor_seal);
ctx               514 net/sunrpc/auth_gss/gss_krb5_mech.c 	if (ctx->acceptor_enc == NULL)
ctx               519 net/sunrpc/auth_gss/gss_krb5_mech.c 	keyout.data = ctx->initiator_sign;
ctx               520 net/sunrpc/auth_gss/gss_krb5_mech.c 	err = krb5_derive_key(ctx->gk5e, &keyin, &keyout, &c, gfp_mask);
ctx               529 net/sunrpc/auth_gss/gss_krb5_mech.c 	keyout.data = ctx->acceptor_sign;
ctx               530 net/sunrpc/auth_gss/gss_krb5_mech.c 	err = krb5_derive_key(ctx->gk5e, &keyin, &keyout, &c, gfp_mask);
ctx               539 net/sunrpc/auth_gss/gss_krb5_mech.c 	keyout.data = ctx->initiator_integ;
ctx               540 net/sunrpc/auth_gss/gss_krb5_mech.c 	err = krb5_derive_key(ctx->gk5e, &keyin, &keyout, &c, gfp_mask);
ctx               549 net/sunrpc/auth_gss/gss_krb5_mech.c 	keyout.data = ctx->acceptor_integ;
ctx               550 net/sunrpc/auth_gss/gss_krb5_mech.c 	err = krb5_derive_key(ctx->gk5e, &keyin, &keyout, &c, gfp_mask);
ctx               557 net/sunrpc/auth_gss/gss_krb5_mech.c 	switch (ctx->enctype) {
ctx               560 net/sunrpc/auth_gss/gss_krb5_mech.c 		ctx->initiator_enc_aux =
ctx               561 net/sunrpc/auth_gss/gss_krb5_mech.c 			context_v2_alloc_cipher(ctx, "cbc(aes)",
ctx               562 net/sunrpc/auth_gss/gss_krb5_mech.c 						ctx->initiator_seal);
ctx               563 net/sunrpc/auth_gss/gss_krb5_mech.c 		if (ctx->initiator_enc_aux == NULL)
ctx               565 net/sunrpc/auth_gss/gss_krb5_mech.c 		ctx->acceptor_enc_aux =
ctx               566 net/sunrpc/auth_gss/gss_krb5_mech.c 			context_v2_alloc_cipher(ctx, "cbc(aes)",
ctx               567 net/sunrpc/auth_gss/gss_krb5_mech.c 						ctx->acceptor_seal);
ctx               568 net/sunrpc/auth_gss/gss_krb5_mech.c 		if (ctx->acceptor_enc_aux == NULL) {
ctx               569 net/sunrpc/auth_gss/gss_krb5_mech.c 			crypto_free_sync_skcipher(ctx->initiator_enc_aux);
ctx               577 net/sunrpc/auth_gss/gss_krb5_mech.c 	crypto_free_sync_skcipher(ctx->acceptor_enc);
ctx               579 net/sunrpc/auth_gss/gss_krb5_mech.c 	crypto_free_sync_skcipher(ctx->initiator_enc);
ctx               585 net/sunrpc/auth_gss/gss_krb5_mech.c gss_import_v2_context(const void *p, const void *end, struct krb5_ctx *ctx,
ctx               591 net/sunrpc/auth_gss/gss_krb5_mech.c 	p = simple_get_bytes(p, end, &ctx->flags, sizeof(ctx->flags));
ctx               594 net/sunrpc/auth_gss/gss_krb5_mech.c 	ctx->initiate = ctx->flags & KRB5_CTX_FLAG_INITIATOR;
ctx               596 net/sunrpc/auth_gss/gss_krb5_mech.c 	p = simple_get_bytes(p, end, &ctx->endtime, sizeof(ctx->endtime));
ctx               602 net/sunrpc/auth_gss/gss_krb5_mech.c 	atomic64_set(&ctx->seq_send64, seq_send64);
ctx               604 net/sunrpc/auth_gss/gss_krb5_mech.c 	atomic_set(&ctx->seq_send, seq_send64);
ctx               605 net/sunrpc/auth_gss/gss_krb5_mech.c 	if (seq_send64 != atomic_read(&ctx->seq_send)) {
ctx               607 net/sunrpc/auth_gss/gss_krb5_mech.c 			seq_send64, atomic_read(&ctx->seq_send));
ctx               611 net/sunrpc/auth_gss/gss_krb5_mech.c 	p = simple_get_bytes(p, end, &ctx->enctype, sizeof(ctx->enctype));
ctx               615 net/sunrpc/auth_gss/gss_krb5_mech.c 	if (ctx->enctype == ENCTYPE_DES3_CBC_SHA1)
ctx               616 net/sunrpc/auth_gss/gss_krb5_mech.c 		ctx->enctype = ENCTYPE_DES3_CBC_RAW;
ctx               617 net/sunrpc/auth_gss/gss_krb5_mech.c 	ctx->gk5e = get_gss_krb5_enctype(ctx->enctype);
ctx               618 net/sunrpc/auth_gss/gss_krb5_mech.c 	if (ctx->gk5e == NULL) {
ctx               620 net/sunrpc/auth_gss/gss_krb5_mech.c 			ctx->enctype);
ctx               624 net/sunrpc/auth_gss/gss_krb5_mech.c 	keylen = ctx->gk5e->keylength;
ctx               626 net/sunrpc/auth_gss/gss_krb5_mech.c 	p = simple_get_bytes(p, end, ctx->Ksess, keylen);
ctx               635 net/sunrpc/auth_gss/gss_krb5_mech.c 	ctx->mech_used.data = kmemdup(gss_kerberos_mech.gm_oid.data,
ctx               637 net/sunrpc/auth_gss/gss_krb5_mech.c 	if (unlikely(ctx->mech_used.data == NULL)) {
ctx               641 net/sunrpc/auth_gss/gss_krb5_mech.c 	ctx->mech_used.len = gss_kerberos_mech.gm_oid.len;
ctx               643 net/sunrpc/auth_gss/gss_krb5_mech.c 	switch (ctx->enctype) {
ctx               645 net/sunrpc/auth_gss/gss_krb5_mech.c 		return context_derive_keys_des3(ctx, gfp_mask);
ctx               647 net/sunrpc/auth_gss/gss_krb5_mech.c 		return context_derive_keys_rc4(ctx);
ctx               650 net/sunrpc/auth_gss/gss_krb5_mech.c 		return context_derive_keys_new(ctx, gfp_mask);
ctx               666 net/sunrpc/auth_gss/gss_krb5_mech.c 	struct  krb5_ctx *ctx;
ctx               669 net/sunrpc/auth_gss/gss_krb5_mech.c 	ctx = kzalloc(sizeof(*ctx), gfp_mask);
ctx               670 net/sunrpc/auth_gss/gss_krb5_mech.c 	if (ctx == NULL)
ctx               674 net/sunrpc/auth_gss/gss_krb5_mech.c 		ret = gss_import_v1_context(p, end, ctx);
ctx               676 net/sunrpc/auth_gss/gss_krb5_mech.c 		ret = gss_import_v2_context(p, end, ctx, gfp_mask);
ctx               679 net/sunrpc/auth_gss/gss_krb5_mech.c 		ctx_id->internal_ctx_id = ctx;
ctx               681 net/sunrpc/auth_gss/gss_krb5_mech.c 			*endtime = ctx->endtime;
ctx               683 net/sunrpc/auth_gss/gss_krb5_mech.c 		kfree(ctx);
ctx                73 net/sunrpc/auth_gss/gss_krb5_seal.c setup_token(struct krb5_ctx *ctx, struct xdr_netobj *token)
ctx                77 net/sunrpc/auth_gss/gss_krb5_seal.c 	int body_size = GSS_KRB5_TOK_HDR_LEN + ctx->gk5e->cksumlength;
ctx                79 net/sunrpc/auth_gss/gss_krb5_seal.c 	token->len = g_token_size(&ctx->mech_used, body_size);
ctx                82 net/sunrpc/auth_gss/gss_krb5_seal.c 	g_make_token_header(&ctx->mech_used, body_size, (unsigned char **)&ptr);
ctx                91 net/sunrpc/auth_gss/gss_krb5_seal.c 	*ptr++ = (__force u16)cpu_to_le16(ctx->gk5e->signalg);
ctx                99 net/sunrpc/auth_gss/gss_krb5_seal.c setup_token_v2(struct krb5_ctx *ctx, struct xdr_netobj *token)
ctx               105 net/sunrpc/auth_gss/gss_krb5_seal.c 	if ((ctx->flags & KRB5_CTX_FLAG_INITIATOR) == 0)
ctx               107 net/sunrpc/auth_gss/gss_krb5_seal.c 	if (ctx->flags & KRB5_CTX_FLAG_ACCEPTOR_SUBKEY)
ctx               122 net/sunrpc/auth_gss/gss_krb5_seal.c 	token->len = GSS_KRB5_TOK_HDR_LEN + ctx->gk5e->cksumlength;
ctx               127 net/sunrpc/auth_gss/gss_krb5_seal.c gss_get_mic_v1(struct krb5_ctx *ctx, struct xdr_buf *text,
ctx               139 net/sunrpc/auth_gss/gss_krb5_seal.c 	BUG_ON(ctx == NULL);
ctx               143 net/sunrpc/auth_gss/gss_krb5_seal.c 	ptr = setup_token(ctx, token);
ctx               145 net/sunrpc/auth_gss/gss_krb5_seal.c 	if (ctx->gk5e->keyed_cksum)
ctx               146 net/sunrpc/auth_gss/gss_krb5_seal.c 		cksumkey = ctx->cksum;
ctx               150 net/sunrpc/auth_gss/gss_krb5_seal.c 	if (make_checksum(ctx, ptr, 8, text, 0, cksumkey,
ctx               156 net/sunrpc/auth_gss/gss_krb5_seal.c 	seq_send = atomic_fetch_inc(&ctx->seq_send);
ctx               158 net/sunrpc/auth_gss/gss_krb5_seal.c 	if (krb5_make_seq_num(ctx, ctx->seq, ctx->initiate ? 0 : 0xff,
ctx               162 net/sunrpc/auth_gss/gss_krb5_seal.c 	return (ctx->endtime < now) ? GSS_S_CONTEXT_EXPIRED : GSS_S_COMPLETE;
ctx               166 net/sunrpc/auth_gss/gss_krb5_seal.c gss_get_mic_v2(struct krb5_ctx *ctx, struct xdr_buf *text,
ctx               180 net/sunrpc/auth_gss/gss_krb5_seal.c 	krb5_hdr = setup_token_v2(ctx, token);
ctx               184 net/sunrpc/auth_gss/gss_krb5_seal.c 	seq_send_be64 = cpu_to_be64(atomic64_fetch_inc(&ctx->seq_send64));
ctx               187 net/sunrpc/auth_gss/gss_krb5_seal.c 	if (ctx->initiate) {
ctx               188 net/sunrpc/auth_gss/gss_krb5_seal.c 		cksumkey = ctx->initiator_sign;
ctx               191 net/sunrpc/auth_gss/gss_krb5_seal.c 		cksumkey = ctx->acceptor_sign;
ctx               195 net/sunrpc/auth_gss/gss_krb5_seal.c 	if (make_checksum_v2(ctx, krb5_hdr, GSS_KRB5_TOK_HDR_LEN,
ctx               203 net/sunrpc/auth_gss/gss_krb5_seal.c 	return (ctx->endtime < now) ? GSS_S_CONTEXT_EXPIRED : GSS_S_COMPLETE;
ctx               210 net/sunrpc/auth_gss/gss_krb5_seal.c 	struct krb5_ctx		*ctx = gss_ctx->internal_ctx_id;
ctx               212 net/sunrpc/auth_gss/gss_krb5_seal.c 	switch (ctx->enctype) {
ctx               218 net/sunrpc/auth_gss/gss_krb5_seal.c 		return gss_get_mic_v1(ctx, text, token);
ctx               221 net/sunrpc/auth_gss/gss_krb5_seal.c 		return gss_get_mic_v2(ctx, text, token);
ctx                74 net/sunrpc/auth_gss/gss_krb5_unseal.c gss_verify_mic_v1(struct krb5_ctx *ctx,
ctx                91 net/sunrpc/auth_gss/gss_krb5_unseal.c 	if (g_verify_token_header(&ctx->mech_used, &bodysize, &ptr,
ctx               102 net/sunrpc/auth_gss/gss_krb5_unseal.c 	if (signalg != ctx->gk5e->signalg)
ctx               112 net/sunrpc/auth_gss/gss_krb5_unseal.c 	if (ctx->gk5e->keyed_cksum)
ctx               113 net/sunrpc/auth_gss/gss_krb5_unseal.c 		cksumkey = ctx->cksum;
ctx               117 net/sunrpc/auth_gss/gss_krb5_unseal.c 	if (make_checksum(ctx, ptr, 8, message_buffer, 0,
ctx               122 net/sunrpc/auth_gss/gss_krb5_unseal.c 					ctx->gk5e->cksumlength))
ctx               129 net/sunrpc/auth_gss/gss_krb5_unseal.c 	if (now > ctx->endtime)
ctx               134 net/sunrpc/auth_gss/gss_krb5_unseal.c 	if (krb5_get_seq_num(ctx, ptr + GSS_KRB5_TOK_HDR_LEN, ptr + 8,
ctx               138 net/sunrpc/auth_gss/gss_krb5_unseal.c 	if ((ctx->initiate && direction != 0xff) ||
ctx               139 net/sunrpc/auth_gss/gss_krb5_unseal.c 	    (!ctx->initiate && direction != 0))
ctx               146 net/sunrpc/auth_gss/gss_krb5_unseal.c gss_verify_mic_v2(struct krb5_ctx *ctx,
ctx               167 net/sunrpc/auth_gss/gss_krb5_unseal.c 	if ((!ctx->initiate && (flags & KG2_TOKEN_FLAG_SENTBYACCEPTOR)) ||
ctx               168 net/sunrpc/auth_gss/gss_krb5_unseal.c 	    (ctx->initiate && !(flags & KG2_TOKEN_FLAG_SENTBYACCEPTOR)))
ctx               180 net/sunrpc/auth_gss/gss_krb5_unseal.c 	if (ctx->initiate) {
ctx               181 net/sunrpc/auth_gss/gss_krb5_unseal.c 		cksumkey = ctx->acceptor_sign;
ctx               184 net/sunrpc/auth_gss/gss_krb5_unseal.c 		cksumkey = ctx->initiator_sign;
ctx               188 net/sunrpc/auth_gss/gss_krb5_unseal.c 	if (make_checksum_v2(ctx, ptr, GSS_KRB5_TOK_HDR_LEN, message_buffer, 0,
ctx               193 net/sunrpc/auth_gss/gss_krb5_unseal.c 				ctx->gk5e->cksumlength))
ctx               198 net/sunrpc/auth_gss/gss_krb5_unseal.c 	if (now > ctx->endtime)
ctx               214 net/sunrpc/auth_gss/gss_krb5_unseal.c 	struct krb5_ctx *ctx = gss_ctx->internal_ctx_id;
ctx               216 net/sunrpc/auth_gss/gss_krb5_unseal.c 	switch (ctx->enctype) {
ctx               222 net/sunrpc/auth_gss/gss_krb5_unseal.c 		return gss_verify_mic_v1(ctx, message_buffer, read_token);
ctx               225 net/sunrpc/auth_gss/gss_krb5_unseal.c 		return gss_verify_mic_v2(ctx, message_buffer, read_token);
ctx               339 net/sunrpc/auth_gss/gss_rpc_xdr.c 			     const struct gssx_call_ctx *ctx)
ctx               346 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_enc_buffer(xdr, &ctx->locale);
ctx               351 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_enc_buffer(xdr, &ctx->server_ctx);
ctx               565 net/sunrpc/auth_gss/gss_rpc_xdr.c 			struct gssx_ctx *ctx)
ctx               571 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_enc_buffer(xdr, &ctx->exported_context_token);
ctx               576 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_enc_buffer(xdr, &ctx->state);
ctx               581 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_enc_bool(xdr, ctx->need_release);
ctx               586 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_enc_buffer(xdr, &ctx->mech);
ctx               591 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_enc_name(xdr, &ctx->src_name);
ctx               596 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_enc_name(xdr, &ctx->targ_name);
ctx               604 net/sunrpc/auth_gss/gss_rpc_xdr.c 	p = xdr_encode_hyper(p, ctx->lifetime);
ctx               607 net/sunrpc/auth_gss/gss_rpc_xdr.c 	p = xdr_encode_hyper(p, ctx->ctx_flags);
ctx               610 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_enc_bool(xdr, ctx->locally_initiated);
ctx               615 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_enc_bool(xdr, ctx->open);
ctx               622 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = dummy_enc_opt_array(xdr, &ctx->options);
ctx               628 net/sunrpc/auth_gss/gss_rpc_xdr.c 			struct gssx_ctx *ctx)
ctx               634 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_dec_buffer(xdr, &ctx->exported_context_token);
ctx               639 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_dec_buffer(xdr, &ctx->state);
ctx               644 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_dec_bool(xdr, &ctx->need_release);
ctx               649 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_dec_buffer(xdr, &ctx->mech);
ctx               654 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_dec_name(xdr, &ctx->src_name);
ctx               659 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_dec_name(xdr, &ctx->targ_name);
ctx               667 net/sunrpc/auth_gss/gss_rpc_xdr.c 	p = xdr_decode_hyper(p, &ctx->lifetime);
ctx               670 net/sunrpc/auth_gss/gss_rpc_xdr.c 	p = xdr_decode_hyper(p, &ctx->ctx_flags);
ctx               673 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_dec_bool(xdr, &ctx->locally_initiated);
ctx               678 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = gssx_dec_bool(xdr, &ctx->open);
ctx               684 net/sunrpc/auth_gss/gss_rpc_xdr.c 	err = dummy_dec_opt_array(xdr, &ctx->options);
ctx               782 net/sunrpc/auth_gss/svcauth_gss.c find_gss_auth_domain(struct gss_ctx *ctx, u32 svc)
ctx               786 net/sunrpc/auth_gss/svcauth_gss.c 	name = gss_service_to_auth_domain_name(ctx->mech_type, svc);
ctx               856 net/sunrpc/auth_gss/svcauth_gss.c unwrap_integ_data(struct svc_rqst *rqstp, struct xdr_buf *buf, u32 seq, struct gss_ctx *ctx)
ctx               894 net/sunrpc/auth_gss/svcauth_gss.c 	maj_stat = gss_verify_mic(ctx, &integ_buf, &mic);
ctx               925 net/sunrpc/auth_gss/svcauth_gss.c unwrap_priv_data(struct svc_rqst *rqstp, struct xdr_buf *buf, u32 seq, struct gss_ctx *ctx)
ctx               948 net/sunrpc/auth_gss/svcauth_gss.c 	maj_stat = gss_unwrap(ctx, 0, priv_len, buf);
ctx                53 net/tls/tls_device.c static void tls_device_free_ctx(struct tls_context *ctx)
ctx                55 net/tls/tls_device.c 	if (ctx->tx_conf == TLS_HW) {
ctx                56 net/tls/tls_device.c 		kfree(tls_offload_ctx_tx(ctx));
ctx                57 net/tls/tls_device.c 		kfree(ctx->tx.rec_seq);
ctx                58 net/tls/tls_device.c 		kfree(ctx->tx.iv);
ctx                61 net/tls/tls_device.c 	if (ctx->rx_conf == TLS_HW)
ctx                62 net/tls/tls_device.c 		kfree(tls_offload_ctx_rx(ctx));
ctx                64 net/tls/tls_device.c 	tls_ctx_free(NULL, ctx);
ctx                69 net/tls/tls_device.c 	struct tls_context *ctx, *tmp;
ctx                77 net/tls/tls_device.c 	list_for_each_entry_safe(ctx, tmp, &gc_list, list) {
ctx                78 net/tls/tls_device.c 		struct net_device *netdev = ctx->netdev;
ctx                80 net/tls/tls_device.c 		if (netdev && ctx->tx_conf == TLS_HW) {
ctx                81 net/tls/tls_device.c 			netdev->tlsdev_ops->tls_dev_del(netdev, ctx,
ctx                84 net/tls/tls_device.c 			ctx->netdev = NULL;
ctx                87 net/tls/tls_device.c 		list_del(&ctx->list);
ctx                88 net/tls/tls_device.c 		tls_device_free_ctx(ctx);
ctx                92 net/tls/tls_device.c static void tls_device_queue_ctx_destruction(struct tls_context *ctx)
ctx                97 net/tls/tls_device.c 	list_move_tail(&ctx->list, &tls_device_gc_list);
ctx               148 net/tls/tls_device.c 	struct tls_offload_context_tx *ctx;
ctx               155 net/tls/tls_device.c 	ctx = tls_offload_ctx_tx(tls_ctx);
ctx               157 net/tls/tls_device.c 	spin_lock_irqsave(&ctx->lock, flags);
ctx               158 net/tls/tls_device.c 	info = ctx->retransmit_hint;
ctx               160 net/tls/tls_device.c 		ctx->retransmit_hint = NULL;
ctx               162 net/tls/tls_device.c 	list_for_each_entry_safe(info, temp, &ctx->records_list, list) {
ctx               171 net/tls/tls_device.c 	ctx->unacked_record_sn += deleted_records;
ctx               172 net/tls/tls_device.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx               182 net/tls/tls_device.c 	struct tls_offload_context_tx *ctx = tls_offload_ctx_tx(tls_ctx);
ctx               187 net/tls/tls_device.c 		if (ctx->open_record)
ctx               188 net/tls/tls_device.c 			destroy_record(ctx->open_record);
ctx               189 net/tls/tls_device.c 		delete_all_records(ctx);
ctx               190 net/tls/tls_device.c 		crypto_free_aead(ctx->aead_send);
ctx               256 net/tls/tls_device.c 			   struct tls_context *ctx,
ctx               261 net/tls/tls_device.c 	struct tls_prot_info *prot = &ctx->prot_info;
ctx               270 net/tls/tls_device.c 	if (test_bit(TLS_TX_SYNC_SCHED, &ctx->flags))
ctx               271 net/tls/tls_device.c 		tls_device_resync_tx(sk, ctx, tp->write_seq);
ctx               273 net/tls/tls_device.c 	tls_advance_record_sn(sk, prot, &ctx->tx);
ctx               286 net/tls/tls_device.c 	return tls_push_sg(sk, ctx, offload_ctx->sg_tx_data, 0, flags);
ctx               290 net/tls/tls_device.c 				   struct tls_context *ctx,
ctx               295 net/tls/tls_device.c 	struct tls_prot_info *prot = &ctx->prot_info;
ctx               315 net/tls/tls_device.c 	tls_fill_prepend(ctx, skb_frag_address(&record->frags[0]),
ctx               407 net/tls/tls_device.c 	struct tls_offload_context_tx *ctx = tls_offload_ctx_tx(tls_ctx);
ctx               409 net/tls/tls_device.c 	struct tls_record_info *record = ctx->open_record;
ctx               443 net/tls/tls_device.c 		rc = tls_do_allocation(sk, ctx, pfrag,
ctx               450 net/tls/tls_device.c 			record = ctx->open_record;
ctx               461 net/tls/tls_device.c 				ctx->open_record = NULL;
ctx               469 net/tls/tls_device.c 		record = ctx->open_record;
ctx               502 net/tls/tls_device.c 					ctx->open_record = NULL;
ctx               509 net/tls/tls_device.c 					     ctx,
ctx               648 net/tls/tls_device.c void tls_device_write_space(struct sock *sk, struct tls_context *ctx)
ctx               650 net/tls/tls_device.c 	if (tls_is_partially_sent_record(ctx)) {
ctx               656 net/tls/tls_device.c 		tls_push_partial_record(sk, ctx,
ctx               725 net/tls/tls_device.c 					   struct tls_offload_context_rx *ctx,
ctx               731 net/tls/tls_device.c 	if (ctx->resync_type != TLS_OFFLOAD_SYNC_TYPE_CORE_NEXT_HINT)
ctx               734 net/tls/tls_device.c 	if (ctx->resync_nh_do_now)
ctx               737 net/tls/tls_device.c 	if (ctx->resync_nh_reset) {
ctx               738 net/tls/tls_device.c 		ctx->resync_nh_reset = 0;
ctx               739 net/tls/tls_device.c 		ctx->resync_nh.decrypted_failed = 1;
ctx               740 net/tls/tls_device.c 		ctx->resync_nh.decrypted_tgt = TLS_DEVICE_RESYNC_NH_START_IVAL;
ctx               744 net/tls/tls_device.c 	if (++ctx->resync_nh.decrypted_failed <= ctx->resync_nh.decrypted_tgt)
ctx               748 net/tls/tls_device.c 	if (ctx->resync_nh.decrypted_tgt < TLS_DEVICE_RESYNC_NH_MAX_IVAL)
ctx               749 net/tls/tls_device.c 		ctx->resync_nh.decrypted_tgt *= 2;
ctx               751 net/tls/tls_device.c 		ctx->resync_nh.decrypted_tgt += TLS_DEVICE_RESYNC_NH_MAX_IVAL;
ctx               757 net/tls/tls_device.c 		ctx->resync_nh_do_now = 1;
ctx               858 net/tls/tls_device.c 	struct tls_offload_context_rx *ctx = tls_offload_ctx_rx(tls_ctx);
ctx               869 net/tls/tls_device.c 	ctx->sw.decrypted |= is_decrypted;
ctx               876 net/tls/tls_device.c 		ctx->resync_nh_reset = 1;
ctx               880 net/tls/tls_device.c 		tls_device_core_ctrl_rx_resync(tls_ctx, ctx, sk, skb);
ctx               884 net/tls/tls_device.c 	ctx->resync_nh_reset = 1;
ctx               888 net/tls/tls_device.c static void tls_device_attach(struct tls_context *ctx, struct sock *sk,
ctx               892 net/tls/tls_device.c 		refcount_set(&ctx->refcount, 1);
ctx               894 net/tls/tls_device.c 		ctx->netdev = netdev;
ctx               896 net/tls/tls_device.c 		list_add_tail(&ctx->list, &tls_device_list);
ctx               899 net/tls/tls_device.c 		ctx->sk_destruct = sk->sk_destruct;
ctx               904 net/tls/tls_device.c int tls_set_device_offload(struct sock *sk, struct tls_context *ctx)
ctx               918 net/tls/tls_device.c 	if (!ctx)
ctx               921 net/tls/tls_device.c 	if (ctx->priv_ctx_tx)
ctx               934 net/tls/tls_device.c 	crypto_info = &ctx->crypto_send.info;
ctx               967 net/tls/tls_device.c 	ctx->tx.iv = kmalloc(iv_size + TLS_CIPHER_AES_GCM_128_SALT_SIZE,
ctx               969 net/tls/tls_device.c 	if (!ctx->tx.iv) {
ctx               974 net/tls/tls_device.c 	memcpy(ctx->tx.iv + TLS_CIPHER_AES_GCM_128_SALT_SIZE, iv, iv_size);
ctx               977 net/tls/tls_device.c 	ctx->tx.rec_seq = kmemdup(rec_seq, rec_seq_size, GFP_KERNEL);
ctx               978 net/tls/tls_device.c 	if (!ctx->tx.rec_seq) {
ctx               988 net/tls/tls_device.c 	memcpy(&rcd_sn, ctx->tx.rec_seq, sizeof(rcd_sn));
ctx              1002 net/tls/tls_device.c 	ctx->push_pending_record = tls_device_push_pending_record;
ctx              1038 net/tls/tls_device.c 	ctx->priv_ctx_tx = offload_ctx;
ctx              1040 net/tls/tls_device.c 					     &ctx->crypto_send.info,
ctx              1045 net/tls/tls_device.c 	tls_device_attach(ctx, sk, netdev);
ctx              1065 net/tls/tls_device.c 	kfree(ctx->tx.rec_seq);
ctx              1067 net/tls/tls_device.c 	kfree(ctx->tx.iv);
ctx              1070 net/tls/tls_device.c 	ctx->priv_ctx_tx = NULL;
ctx              1076 net/tls/tls_device.c int tls_set_device_offload_rx(struct sock *sk, struct tls_context *ctx)
ctx              1082 net/tls/tls_device.c 	if (ctx->crypto_recv.info.version != TLS_1_2_VERSION)
ctx              1117 net/tls/tls_device.c 	ctx->priv_ctx_rx = context;
ctx              1118 net/tls/tls_device.c 	rc = tls_set_sw_offload(sk, ctx, 0);
ctx              1123 net/tls/tls_device.c 					     &ctx->crypto_recv.info,
ctx              1128 net/tls/tls_device.c 	tls_device_attach(ctx, sk, netdev);
ctx              1140 net/tls/tls_device.c 	ctx->priv_ctx_rx = NULL;
ctx              1172 net/tls/tls_device.c 	struct tls_context *ctx, *tmp;
ctx              1180 net/tls/tls_device.c 	list_for_each_entry_safe(ctx, tmp, &tls_device_list, list) {
ctx              1181 net/tls/tls_device.c 		if (ctx->netdev != netdev ||
ctx              1182 net/tls/tls_device.c 		    !refcount_inc_not_zero(&ctx->refcount))
ctx              1185 net/tls/tls_device.c 		list_move(&ctx->list, &list);
ctx              1189 net/tls/tls_device.c 	list_for_each_entry_safe(ctx, tmp, &list, list)	{
ctx              1190 net/tls/tls_device.c 		if (ctx->tx_conf == TLS_HW)
ctx              1191 net/tls/tls_device.c 			netdev->tlsdev_ops->tls_dev_del(netdev, ctx,
ctx              1193 net/tls/tls_device.c 		if (ctx->rx_conf == TLS_HW)
ctx              1194 net/tls/tls_device.c 			netdev->tlsdev_ops->tls_dev_del(netdev, ctx,
ctx              1196 net/tls/tls_device.c 		WRITE_ONCE(ctx->netdev, NULL);
ctx              1198 net/tls/tls_device.c 		while (test_bit(TLS_RX_SYNC_RUNNING, &ctx->flags))
ctx              1201 net/tls/tls_device.c 		list_del_init(&ctx->list);
ctx              1203 net/tls/tls_device.c 		if (refcount_dec_and_test(&ctx->refcount))
ctx              1204 net/tls/tls_device.c 			tls_device_free_ctx(ctx);
ctx               230 net/tls/tls_device_fallback.c 		      struct tls_offload_context_tx *ctx,
ctx               243 net/tls/tls_device_fallback.c 	spin_lock_irqsave(&ctx->lock, flags);
ctx               244 net/tls/tls_device_fallback.c 	record = tls_get_record(ctx, tcp_seq, rcd_sn);
ctx               246 net/tls/tls_device_fallback.c 		spin_unlock_irqrestore(&ctx->lock, flags);
ctx               254 net/tls/tls_device_fallback.c 		spin_unlock_irqrestore(&ctx->lock, flags);
ctx               285 net/tls/tls_device_fallback.c 	spin_unlock_irqrestore(&ctx->lock, flags);
ctx               314 net/tls/tls_device_fallback.c 	struct tls_offload_context_tx *ctx = tls_offload_ctx_tx(tls_ctx);
ctx               321 net/tls/tls_device_fallback.c 	aead_req = tls_alloc_aead_request(ctx->aead_send, GFP_ATOMIC);
ctx               350 net/tls/tls_device_fallback.c 	if (tls_enc_records(aead_req, ctx->aead_send, sg_in, sg_out, aad, iv,
ctx               376 net/tls/tls_device_fallback.c 	struct tls_offload_context_tx *ctx = tls_offload_ctx_tx(tls_ctx);
ctx               401 net/tls/tls_device_fallback.c 	if (fill_sg_in(sg_in, skb, ctx, &rcd_sn, &sync_size, &resync_sgs)) {
ctx                68 net/tls/tls_main.c static void update_sk_prot(struct sock *sk, struct tls_context *ctx)
ctx                72 net/tls/tls_main.c 	sk->sk_prot = &tls_prots[ip_ver][ctx->tx_conf][ctx->rx_conf];
ctx               100 net/tls/tls_main.c 		struct tls_context *ctx,
ctx               114 net/tls/tls_main.c 	ctx->in_tcp_sendpages = true;
ctx               133 net/tls/tls_main.c 			ctx->partially_sent_offset = offset;
ctx               134 net/tls/tls_main.c 			ctx->partially_sent_record = (void *)sg;
ctx               135 net/tls/tls_main.c 			ctx->in_tcp_sendpages = false;
ctx               149 net/tls/tls_main.c 	ctx->in_tcp_sendpages = false;
ctx               156 net/tls/tls_main.c 	struct tls_context *ctx = tls_get_ctx(sk);
ctx               158 net/tls/tls_main.c 	if (tls_is_pending_open_record(ctx))
ctx               159 net/tls/tls_main.c 		return ctx->push_pending_record(sk, flags);
ctx               199 net/tls/tls_main.c int tls_push_partial_record(struct sock *sk, struct tls_context *ctx,
ctx               205 net/tls/tls_main.c 	sg = ctx->partially_sent_record;
ctx               206 net/tls/tls_main.c 	offset = ctx->partially_sent_offset;
ctx               208 net/tls/tls_main.c 	ctx->partially_sent_record = NULL;
ctx               209 net/tls/tls_main.c 	return tls_push_sg(sk, ctx, sg, offset, flags);
ctx               212 net/tls/tls_main.c void tls_free_partial_record(struct sock *sk, struct tls_context *ctx)
ctx               216 net/tls/tls_main.c 	for (sg = ctx->partially_sent_record; sg; sg = sg_next(sg)) {
ctx               220 net/tls/tls_main.c 	ctx->partially_sent_record = NULL;
ctx               225 net/tls/tls_main.c 	struct tls_context *ctx = tls_get_ctx(sk);
ctx               231 net/tls/tls_main.c 	if (ctx->in_tcp_sendpages) {
ctx               232 net/tls/tls_main.c 		ctx->sk_write_space(sk);
ctx               237 net/tls/tls_main.c 	if (ctx->tx_conf == TLS_HW)
ctx               238 net/tls/tls_main.c 		tls_device_write_space(sk, ctx);
ctx               241 net/tls/tls_main.c 		tls_sw_write_space(sk, ctx);
ctx               243 net/tls/tls_main.c 	ctx->sk_write_space(sk);
ctx               254 net/tls/tls_main.c void tls_ctx_free(struct sock *sk, struct tls_context *ctx)
ctx               256 net/tls/tls_main.c 	if (!ctx)
ctx               259 net/tls/tls_main.c 	memzero_explicit(&ctx->crypto_send, sizeof(ctx->crypto_send));
ctx               260 net/tls/tls_main.c 	memzero_explicit(&ctx->crypto_recv, sizeof(ctx->crypto_recv));
ctx               261 net/tls/tls_main.c 	mutex_destroy(&ctx->tx_lock);
ctx               264 net/tls/tls_main.c 		kfree_rcu(ctx, rcu);
ctx               266 net/tls/tls_main.c 		kfree(ctx);
ctx               270 net/tls/tls_main.c 				 struct tls_context *ctx, long timeo)
ctx               277 net/tls/tls_main.c 	if (ctx->tx_conf == TLS_SW) {
ctx               278 net/tls/tls_main.c 		kfree(ctx->tx.rec_seq);
ctx               279 net/tls/tls_main.c 		kfree(ctx->tx.iv);
ctx               281 net/tls/tls_main.c 	} else if (ctx->tx_conf == TLS_HW) {
ctx               285 net/tls/tls_main.c 	if (ctx->rx_conf == TLS_SW)
ctx               287 net/tls/tls_main.c 	else if (ctx->rx_conf == TLS_HW)
ctx               294 net/tls/tls_main.c 	struct tls_context *ctx = tls_get_ctx(sk);
ctx               298 net/tls/tls_main.c 	if (ctx->tx_conf == TLS_SW)
ctx               299 net/tls/tls_main.c 		tls_sw_cancel_work_tx(ctx);
ctx               302 net/tls/tls_main.c 	free_ctx = ctx->tx_conf != TLS_HW && ctx->rx_conf != TLS_HW;
ctx               304 net/tls/tls_main.c 	if (ctx->tx_conf != TLS_BASE || ctx->rx_conf != TLS_BASE)
ctx               305 net/tls/tls_main.c 		tls_sk_proto_cleanup(sk, ctx, timeo);
ctx               310 net/tls/tls_main.c 	sk->sk_prot = ctx->sk_proto;
ctx               312 net/tls/tls_main.c 		sk->sk_write_space = ctx->sk_write_space;
ctx               315 net/tls/tls_main.c 	if (ctx->tx_conf == TLS_SW)
ctx               316 net/tls/tls_main.c 		tls_sw_free_ctx_tx(ctx);
ctx               317 net/tls/tls_main.c 	if (ctx->rx_conf == TLS_SW || ctx->rx_conf == TLS_HW)
ctx               318 net/tls/tls_main.c 		tls_sw_strparser_done(ctx);
ctx               319 net/tls/tls_main.c 	if (ctx->rx_conf == TLS_SW)
ctx               320 net/tls/tls_main.c 		tls_sw_free_ctx_rx(ctx);
ctx               321 net/tls/tls_main.c 	ctx->sk_proto->close(sk, timeout);
ctx               324 net/tls/tls_main.c 		tls_ctx_free(sk, ctx);
ctx               331 net/tls/tls_main.c 	struct tls_context *ctx = tls_get_ctx(sk);
ctx               343 net/tls/tls_main.c 	if (!ctx) {
ctx               349 net/tls/tls_main.c 	crypto_info = &ctx->crypto_send.info;
ctx               376 net/tls/tls_main.c 		       ctx->tx.iv + TLS_CIPHER_AES_GCM_128_SALT_SIZE,
ctx               378 net/tls/tls_main.c 		memcpy(crypto_info_aes_gcm_128->rec_seq, ctx->tx.rec_seq,
ctx               400 net/tls/tls_main.c 		       ctx->tx.iv + TLS_CIPHER_AES_GCM_256_SALT_SIZE,
ctx               402 net/tls/tls_main.c 		memcpy(crypto_info_aes_gcm_256->rec_seq, ctx->tx.rec_seq,
ctx               438 net/tls/tls_main.c 	struct tls_context *ctx = tls_get_ctx(sk);
ctx               441 net/tls/tls_main.c 		return ctx->sk_proto->getsockopt(sk, level,
ctx               452 net/tls/tls_main.c 	struct tls_context *ctx = tls_get_ctx(sk);
ctx               463 net/tls/tls_main.c 		crypto_info = &ctx->crypto_send.info;
ctx               464 net/tls/tls_main.c 		alt_crypto_info = &ctx->crypto_recv.info;
ctx               466 net/tls/tls_main.c 		crypto_info = &ctx->crypto_recv.info;
ctx               467 net/tls/tls_main.c 		alt_crypto_info = &ctx->crypto_send.info;
ctx               527 net/tls/tls_main.c 		rc = tls_set_device_offload(sk, ctx);
ctx               530 net/tls/tls_main.c 			rc = tls_set_sw_offload(sk, ctx, 1);
ctx               536 net/tls/tls_main.c 		rc = tls_set_device_offload_rx(sk, ctx);
ctx               539 net/tls/tls_main.c 			rc = tls_set_sw_offload(sk, ctx, 0);
ctx               544 net/tls/tls_main.c 		tls_sw_strparser_arm(sk, ctx);
ctx               548 net/tls/tls_main.c 		ctx->tx_conf = conf;
ctx               550 net/tls/tls_main.c 		ctx->rx_conf = conf;
ctx               551 net/tls/tls_main.c 	update_sk_prot(sk, ctx);
ctx               553 net/tls/tls_main.c 		ctx->sk_write_space = sk->sk_write_space;
ctx               589 net/tls/tls_main.c 	struct tls_context *ctx = tls_get_ctx(sk);
ctx               592 net/tls/tls_main.c 		return ctx->sk_proto->setsockopt(sk, level, optname, optval,
ctx               601 net/tls/tls_main.c 	struct tls_context *ctx;
ctx               603 net/tls/tls_main.c 	ctx = kzalloc(sizeof(*ctx), GFP_ATOMIC);
ctx               604 net/tls/tls_main.c 	if (!ctx)
ctx               607 net/tls/tls_main.c 	mutex_init(&ctx->tx_lock);
ctx               608 net/tls/tls_main.c 	rcu_assign_pointer(icsk->icsk_ulp_data, ctx);
ctx               609 net/tls/tls_main.c 	ctx->sk_proto = sk->sk_prot;
ctx               610 net/tls/tls_main.c 	return ctx;
ctx               641 net/tls/tls_main.c 	struct tls_context *ctx = tls_get_ctx(sk);
ctx               644 net/tls/tls_main.c 	ctx->sk_destruct(sk);
ctx               647 net/tls/tls_main.c 	tls_ctx_free(sk, ctx);
ctx               652 net/tls/tls_main.c 	struct tls_context *ctx;
ctx               659 net/tls/tls_main.c 			ctx = create_ctx(sk);
ctx               660 net/tls/tls_main.c 			if (!ctx)
ctx               665 net/tls/tls_main.c 			ctx->sk_destruct = sk->sk_destruct;
ctx               667 net/tls/tls_main.c 			ctx->rx_conf = TLS_HW_RECORD;
ctx               668 net/tls/tls_main.c 			ctx->tx_conf = TLS_HW_RECORD;
ctx               669 net/tls/tls_main.c 			update_sk_prot(sk, ctx);
ctx               682 net/tls/tls_main.c 	struct tls_context *ctx = tls_get_ctx(sk);
ctx               696 net/tls/tls_main.c 	ctx->sk_proto->unhash(sk);
ctx               701 net/tls/tls_main.c 	struct tls_context *ctx = tls_get_ctx(sk);
ctx               705 net/tls/tls_main.c 	err = ctx->sk_proto->hash(sk);
ctx               768 net/tls/tls_main.c 	struct tls_context *ctx;
ctx               787 net/tls/tls_main.c 	ctx = create_ctx(sk);
ctx               788 net/tls/tls_main.c 	if (!ctx) {
ctx               793 net/tls/tls_main.c 	ctx->tx_conf = TLS_BASE;
ctx               794 net/tls/tls_main.c 	ctx->rx_conf = TLS_BASE;
ctx               795 net/tls/tls_main.c 	update_sk_prot(sk, ctx);
ctx               804 net/tls/tls_main.c 	struct tls_context *ctx;
ctx               806 net/tls/tls_main.c 	ctx = tls_get_ctx(sk);
ctx               807 net/tls/tls_main.c 	if (likely(ctx)) {
ctx               808 net/tls/tls_main.c 		ctx->sk_write_space = write_space;
ctx               809 net/tls/tls_main.c 		ctx->sk_proto = p;
ctx               819 net/tls/tls_main.c 	struct tls_context *ctx;
ctx               828 net/tls/tls_main.c 	ctx = rcu_dereference(inet_csk(sk)->icsk_ulp_data);
ctx               829 net/tls/tls_main.c 	if (!ctx) {
ctx               833 net/tls/tls_main.c 	version = ctx->prot_info.version;
ctx               839 net/tls/tls_main.c 	cipher_type = ctx->prot_info.cipher_type;
ctx               845 net/tls/tls_main.c 	err = nla_put_u16(skb, TLS_INFO_TXCONF, tls_user_config(ctx, true));
ctx               849 net/tls/tls_main.c 	err = nla_put_u16(skb, TLS_INFO_RXCONF, tls_user_config(ctx, false));
ctx               121 net/tls/tls_sw.c static int padding_length(struct tls_sw_context_rx *ctx,
ctx               146 net/tls/tls_sw.c 		ctx->control = content_type;
ctx               156 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx;
ctx               166 net/tls/tls_sw.c 	ctx = tls_sw_ctx_rx(tls_ctx);
ctx               171 net/tls/tls_sw.c 		ctx->async_wait.err = err;
ctx               177 net/tls/tls_sw.c 		pad = padding_length(ctx, prot, skb);
ctx               179 net/tls/tls_sw.c 			ctx->async_wait.err = pad;
ctx               206 net/tls/tls_sw.c 	spin_lock_bh(&ctx->decrypt_compl_lock);
ctx               207 net/tls/tls_sw.c 	pending = atomic_dec_return(&ctx->decrypt_pending);
ctx               209 net/tls/tls_sw.c 	if (!pending && ctx->async_notify)
ctx               210 net/tls/tls_sw.c 		complete(&ctx->async_wait.completion);
ctx               211 net/tls/tls_sw.c 	spin_unlock_bh(&ctx->decrypt_compl_lock);
ctx               225 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx               228 net/tls/tls_sw.c 	aead_request_set_tfm(aead_req, ctx->aead_recv);
ctx               245 net/tls/tls_sw.c 		atomic_inc(&ctx->decrypt_pending);
ctx               249 net/tls/tls_sw.c 					  crypto_req_done, &ctx->async_wait);
ctx               257 net/tls/tls_sw.c 		ret = crypto_wait_req(ret, &ctx->async_wait);
ctx               261 net/tls/tls_sw.c 		atomic_dec(&ctx->decrypt_pending);
ctx               270 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx               271 net/tls/tls_sw.c 	struct tls_rec *rec = ctx->open_rec;
ctx               282 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx               283 net/tls/tls_sw.c 	struct tls_rec *rec = ctx->open_rec;
ctx               293 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx               294 net/tls/tls_sw.c 	struct tls_rec *rec = ctx->open_rec;
ctx               317 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx               322 net/tls/tls_sw.c 	mem_size = sizeof(struct tls_rec) + crypto_aead_reqsize(ctx->aead_send);
ctx               355 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx               356 net/tls/tls_sw.c 	struct tls_rec *rec = ctx->open_rec;
ctx               360 net/tls/tls_sw.c 		ctx->open_rec = NULL;
ctx               367 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx               373 net/tls/tls_sw.c 		rec = list_first_entry(&ctx->tx_list,
ctx               394 net/tls/tls_sw.c 	list_for_each_entry_safe(rec, tmp, &ctx->tx_list, list) {
ctx               429 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx               449 net/tls/tls_sw.c 			ctx->async_wait.err = sk->sk_err;
ctx               451 net/tls/tls_sw.c 			ctx->async_wait.err = err;
ctx               463 net/tls/tls_sw.c 		first_rec = list_first_entry(&ctx->tx_list,
ctx               469 net/tls/tls_sw.c 	spin_lock_bh(&ctx->encrypt_compl_lock);
ctx               470 net/tls/tls_sw.c 	pending = atomic_dec_return(&ctx->encrypt_pending);
ctx               472 net/tls/tls_sw.c 	if (!pending && ctx->async_notify)
ctx               473 net/tls/tls_sw.c 		complete(&ctx->async_wait.completion);
ctx               474 net/tls/tls_sw.c 	spin_unlock_bh(&ctx->encrypt_compl_lock);
ctx               480 net/tls/tls_sw.c 	if (!test_and_set_bit(BIT_TX_SCHEDULED, &ctx->tx_bitmask))
ctx               481 net/tls/tls_sw.c 		schedule_delayed_work(&ctx->tx_work.work, 1);
ctx               486 net/tls/tls_sw.c 			     struct tls_sw_context_tx *ctx,
ctx               491 net/tls/tls_sw.c 	struct tls_rec *rec = ctx->open_rec;
ctx               512 net/tls/tls_sw.c 	aead_request_set_tfm(aead_req, ctx->aead_send);
ctx               522 net/tls/tls_sw.c 	list_add_tail((struct list_head *)&rec->list, &ctx->tx_list);
ctx               523 net/tls/tls_sw.c 	atomic_inc(&ctx->encrypt_pending);
ctx               527 net/tls/tls_sw.c 		atomic_dec(&ctx->encrypt_pending);
ctx               540 net/tls/tls_sw.c 	ctx->open_rec = NULL;
ctx               668 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx               669 net/tls/tls_sw.c 	struct tls_rec *rec = ctx->open_rec, *tmp = NULL;
ctx               759 net/tls/tls_sw.c 	rc = tls_do_encryption(sk, tls_ctx, ctx, req,
ctx               769 net/tls/tls_sw.c 		ctx->async_capable = 1;
ctx               776 net/tls/tls_sw.c 		ctx->open_rec = tmp;
ctx               787 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx               869 net/tls/tls_sw.c 		bool reset_eval = !ctx->open_rec;
ctx               871 net/tls/tls_sw.c 		rec = ctx->open_rec;
ctx               895 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx               896 net/tls/tls_sw.c 	struct tls_rec *rec = ctx->open_rec;
ctx               917 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx               918 net/tls/tls_sw.c 	bool async_capable = ctx->async_capable;
ctx               957 net/tls/tls_sw.c 		if (ctx->open_rec)
ctx               958 net/tls/tls_sw.c 			rec = ctx->open_rec;
ctx               960 net/tls/tls_sw.c 			rec = ctx->open_rec = tls_get_rec(sk);
ctx              1018 net/tls/tls_sw.c 				else if (ctx->open_rec && ret == -ENOSPC)
ctx              1087 net/tls/tls_sw.c 			if (ctx->open_rec)
ctx              1092 net/tls/tls_sw.c 		if (ctx->open_rec && msg_en->sg.size < required_size)
ctx              1100 net/tls/tls_sw.c 		spin_lock_bh(&ctx->encrypt_compl_lock);
ctx              1101 net/tls/tls_sw.c 		ctx->async_notify = true;
ctx              1103 net/tls/tls_sw.c 		pending = atomic_read(&ctx->encrypt_pending);
ctx              1104 net/tls/tls_sw.c 		spin_unlock_bh(&ctx->encrypt_compl_lock);
ctx              1106 net/tls/tls_sw.c 			crypto_wait_req(-EINPROGRESS, &ctx->async_wait);
ctx              1108 net/tls/tls_sw.c 			reinit_completion(&ctx->async_wait.completion);
ctx              1113 net/tls/tls_sw.c 		WRITE_ONCE(ctx->async_notify, false);
ctx              1115 net/tls/tls_sw.c 		if (ctx->async_wait.err) {
ctx              1116 net/tls/tls_sw.c 			ret = ctx->async_wait.err;
ctx              1122 net/tls/tls_sw.c 	if (test_and_clear_bit(BIT_TX_SCHEDULED, &ctx->tx_bitmask)) {
ctx              1123 net/tls/tls_sw.c 		cancel_delayed_work(&ctx->tx_work.work);
ctx              1140 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx              1164 net/tls/tls_sw.c 		if (ctx->open_rec)
ctx              1165 net/tls/tls_sw.c 			rec = ctx->open_rec;
ctx              1167 net/tls/tls_sw.c 			rec = ctx->open_rec = tls_get_rec(sk);
ctx              1230 net/tls/tls_sw.c 			if (ctx->open_rec)
ctx              1235 net/tls/tls_sw.c 		if (ctx->open_rec)
ctx              1241 net/tls/tls_sw.c 		if (test_and_clear_bit(BIT_TX_SCHEDULED, &ctx->tx_bitmask)) {
ctx              1242 net/tls/tls_sw.c 			cancel_delayed_work(&ctx->tx_work.work);
ctx              1284 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx              1288 net/tls/tls_sw.c 	while (!(skb = ctx->recv_pkt) && sk_psock_queue_empty(psock)) {
ctx              1308 net/tls/tls_sw.c 			      ctx->recv_pkt != skb ||
ctx              1396 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx              1430 net/tls/tls_sw.c 	aead_size = sizeof(*aead_req) + crypto_aead_reqsize(ctx->aead_recv);
ctx              1433 net/tls/tls_sw.c 	mem_size = mem_size + crypto_aead_ivsize(ctx->aead_recv);
ctx              1466 net/tls/tls_sw.c 		       crypto_aead_ivsize(ctx->aead_recv));
ctx              1476 net/tls/tls_sw.c 		     ctx->control, prot->version);
ctx              1532 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx              1537 net/tls/tls_sw.c 	if (!ctx->decrypted) {
ctx              1545 net/tls/tls_sw.c 		if (!ctx->decrypted) {
ctx              1559 net/tls/tls_sw.c 		pad = padding_length(ctx, prot, skb);
ctx              1567 net/tls/tls_sw.c 		ctx->decrypted = true;
ctx              1568 net/tls/tls_sw.c 		ctx->saved_data_ready(sk);
ctx              1589 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx              1603 net/tls/tls_sw.c 	ctx->recv_pkt = NULL;
ctx              1604 net/tls/tls_sw.c 	__strp_unpause(&ctx->strp);
ctx              1614 net/tls/tls_sw.c static int process_rx_list(struct tls_sw_context_rx *ctx,
ctx              1623 net/tls/tls_sw.c 	struct sk_buff *skb = skb_peek(&ctx->rx_list);
ctx              1647 net/tls/tls_sw.c 		skb = skb_peek_next(skb, &ctx->rx_list);
ctx              1704 net/tls/tls_sw.c 		next_skb = skb_peek_next(skb, &ctx->rx_list);
ctx              1707 net/tls/tls_sw.c 			skb_unlink(skb, &ctx->rx_list);
ctx              1726 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx              1752 net/tls/tls_sw.c 	err = process_rx_list(ctx, msg, &control, &cmsg, 0, len, false,
ctx              1768 net/tls/tls_sw.c 	while (len && (decrypted + copied < target || ctx->recv_pkt)) {
ctx              1794 net/tls/tls_sw.c 				tlm->control = ctx->control;
ctx              1802 net/tls/tls_sw.c 		    ctx->control == TLS_RECORD_TYPE_DATA &&
ctx              1807 net/tls/tls_sw.c 		if (ctx->control == TLS_RECORD_TYPE_DATA)
ctx              1808 net/tls/tls_sw.c 			async_capable = ctx->async_capable;
ctx              1823 net/tls/tls_sw.c 			tlm->control = ctx->control;
ctx              1884 net/tls/tls_sw.c 			skb_queue_tail(&ctx->rx_list, skb);
ctx              1894 net/tls/tls_sw.c 			if (ctx->control != TLS_RECORD_TYPE_DATA)
ctx              1904 net/tls/tls_sw.c 		spin_lock_bh(&ctx->decrypt_compl_lock);
ctx              1905 net/tls/tls_sw.c 		ctx->async_notify = true;
ctx              1906 net/tls/tls_sw.c 		pending = atomic_read(&ctx->decrypt_pending);
ctx              1907 net/tls/tls_sw.c 		spin_unlock_bh(&ctx->decrypt_compl_lock);
ctx              1909 net/tls/tls_sw.c 			err = crypto_wait_req(-EINPROGRESS, &ctx->async_wait);
ctx              1918 net/tls/tls_sw.c 			reinit_completion(&ctx->async_wait.completion);
ctx              1924 net/tls/tls_sw.c 		WRITE_ONCE(ctx->async_notify, false);
ctx              1928 net/tls/tls_sw.c 			err = process_rx_list(ctx, msg, &control, &cmsg, copied,
ctx              1931 net/tls/tls_sw.c 			err = process_rx_list(ctx, msg, &control, &cmsg, 0,
ctx              1954 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx              1972 net/tls/tls_sw.c 	if (!ctx->decrypted) {
ctx              1976 net/tls/tls_sw.c 		if (ctx->control != TLS_RECORD_TYPE_DATA) {
ctx              1985 net/tls/tls_sw.c 		ctx->decrypted = true;
ctx              2005 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx              2015 net/tls/tls_sw.c 	return !ingress_empty || ctx->recv_pkt ||
ctx              2016 net/tls/tls_sw.c 		!skb_queue_empty(&ctx->rx_list);
ctx              2022 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx              2046 net/tls/tls_sw.c 	ctx->control = header[0];
ctx              2084 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx              2086 net/tls/tls_sw.c 	ctx->decrypted = false;
ctx              2088 net/tls/tls_sw.c 	ctx->recv_pkt = skb;
ctx              2091 net/tls/tls_sw.c 	ctx->saved_data_ready(strp->sk);
ctx              2097 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx              2100 net/tls/tls_sw.c 	strp_data_ready(&ctx->strp);
ctx              2105 net/tls/tls_sw.c 			ctx->saved_data_ready(sk);
ctx              2112 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx              2114 net/tls/tls_sw.c 	set_bit(BIT_TX_CLOSING, &ctx->tx_bitmask);
ctx              2115 net/tls/tls_sw.c 	set_bit(BIT_TX_SCHEDULED, &ctx->tx_bitmask);
ctx              2116 net/tls/tls_sw.c 	cancel_delayed_work_sync(&ctx->tx_work.work);
ctx              2122 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx              2126 net/tls/tls_sw.c 	smp_store_mb(ctx->async_notify, true);
ctx              2127 net/tls/tls_sw.c 	if (atomic_read(&ctx->encrypt_pending))
ctx              2128 net/tls/tls_sw.c 		crypto_wait_req(-EINPROGRESS, &ctx->async_wait);
ctx              2137 net/tls/tls_sw.c 		rec = list_first_entry(&ctx->tx_list,
ctx              2144 net/tls/tls_sw.c 	list_for_each_entry_safe(rec, tmp, &ctx->tx_list, list) {
ctx              2151 net/tls/tls_sw.c 	crypto_free_aead(ctx->aead_send);
ctx              2157 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx = tls_sw_ctx_tx(tls_ctx);
ctx              2159 net/tls/tls_sw.c 	kfree(ctx);
ctx              2165 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx              2170 net/tls/tls_sw.c 	if (ctx->aead_recv) {
ctx              2171 net/tls/tls_sw.c 		kfree_skb(ctx->recv_pkt);
ctx              2172 net/tls/tls_sw.c 		ctx->recv_pkt = NULL;
ctx              2173 net/tls/tls_sw.c 		skb_queue_purge(&ctx->rx_list);
ctx              2174 net/tls/tls_sw.c 		crypto_free_aead(ctx->aead_recv);
ctx              2175 net/tls/tls_sw.c 		strp_stop(&ctx->strp);
ctx              2180 net/tls/tls_sw.c 		if (ctx->saved_data_ready) {
ctx              2182 net/tls/tls_sw.c 			sk->sk_data_ready = ctx->saved_data_ready;
ctx              2190 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx              2192 net/tls/tls_sw.c 	strp_done(&ctx->strp);
ctx              2197 net/tls/tls_sw.c 	struct tls_sw_context_rx *ctx = tls_sw_ctx_rx(tls_ctx);
ctx              2199 net/tls/tls_sw.c 	kfree(ctx);
ctx              2218 net/tls/tls_sw.c 	struct tls_sw_context_tx *ctx;
ctx              2223 net/tls/tls_sw.c 	ctx = tls_sw_ctx_tx(tls_ctx);
ctx              2224 net/tls/tls_sw.c 	if (test_bit(BIT_TX_CLOSING, &ctx->tx_bitmask))
ctx              2227 net/tls/tls_sw.c 	if (!test_and_clear_bit(BIT_TX_SCHEDULED, &ctx->tx_bitmask))
ctx              2236 net/tls/tls_sw.c void tls_sw_write_space(struct sock *sk, struct tls_context *ctx)
ctx              2238 net/tls/tls_sw.c 	struct tls_sw_context_tx *tx_ctx = tls_sw_ctx_tx(ctx);
ctx              2258 net/tls/tls_sw.c int tls_set_sw_offload(struct sock *sk, struct tls_context *ctx, int tx)
ctx              2277 net/tls/tls_sw.c 	if (!ctx) {
ctx              2283 net/tls/tls_sw.c 		if (!ctx->priv_ctx_tx) {
ctx              2289 net/tls/tls_sw.c 			ctx->priv_ctx_tx = sw_ctx_tx;
ctx              2292 net/tls/tls_sw.c 				(struct tls_sw_context_tx *)ctx->priv_ctx_tx;
ctx              2295 net/tls/tls_sw.c 		if (!ctx->priv_ctx_rx) {
ctx              2301 net/tls/tls_sw.c 			ctx->priv_ctx_rx = sw_ctx_rx;
ctx              2304 net/tls/tls_sw.c 				(struct tls_sw_context_rx *)ctx->priv_ctx_rx;
ctx              2311 net/tls/tls_sw.c 		crypto_info = &ctx->crypto_send.info;
ctx              2312 net/tls/tls_sw.c 		cctx = &ctx->tx;
ctx              2320 net/tls/tls_sw.c 		crypto_info = &ctx->crypto_recv.info;
ctx              2321 net/tls/tls_sw.c 		cctx = &ctx->rx;
ctx              2431 net/tls/tls_sw.c 	ctx->push_pending_record = tls_sw_push_pending_record;
ctx              2472 net/tls/tls_sw.c 		kfree(ctx->priv_ctx_tx);
ctx              2473 net/tls/tls_sw.c 		ctx->priv_ctx_tx = NULL;
ctx              2475 net/tls/tls_sw.c 		kfree(ctx->priv_ctx_rx);
ctx              2476 net/tls/tls_sw.c 		ctx->priv_ctx_rx = NULL;
ctx               234 net/vmw_vsock/hyperv_transport.c static void hvs_channel_cb(void *ctx)
ctx               236 net/vmw_vsock/hyperv_transport.c 	struct sock *sk = (struct sock *)ctx;
ctx              1613 net/xfrm/xfrm_policy.c 			struct xfrm_sec_ctx *ctx)
ctx              1625 net/xfrm/xfrm_policy.c 		    xfrm_sec_ctx_match(ctx, pol->security))
ctx              1635 net/xfrm/xfrm_policy.c 					  struct xfrm_sec_ctx *ctx, int delete,
ctx              1669 net/xfrm/xfrm_policy.c 						      sel, ctx);
ctx              1678 net/xfrm/xfrm_policy.c 					      sel, ctx);
ctx              4165 net/xfrm/xfrm_policy.c 	struct xfrm_sec_ctx *ctx = xp->security;
ctx              4168 net/xfrm/xfrm_policy.c 	if (ctx)
ctx              4170 net/xfrm/xfrm_policy.c 				 ctx->ctx_alg, ctx->ctx_doi, ctx->ctx_str);
ctx              2590 net/xfrm/xfrm_state.c 	struct xfrm_sec_ctx *ctx = x->security;
ctx              2593 net/xfrm/xfrm_state.c 	if (ctx)
ctx              2595 net/xfrm/xfrm_state.c 				 ctx->ctx_alg, ctx->ctx_doi, ctx->ctx_str);
ctx              1887 net/xfrm/xfrm_user.c 		struct xfrm_sec_ctx *ctx;
ctx              1893 net/xfrm/xfrm_user.c 		ctx = NULL;
ctx              1897 net/xfrm/xfrm_user.c 			err = security_xfrm_policy_alloc(&ctx, uctx, GFP_KERNEL);
ctx              1902 net/xfrm/xfrm_user.c 					   ctx, delete, &err);
ctx              1903 net/xfrm/xfrm_user.c 		security_xfrm_policy_free(ctx);
ctx              2187 net/xfrm/xfrm_user.c 		struct xfrm_sec_ctx *ctx;
ctx              2193 net/xfrm/xfrm_user.c 		ctx = NULL;
ctx              2197 net/xfrm/xfrm_user.c 			err = security_xfrm_policy_alloc(&ctx, uctx, GFP_KERNEL);
ctx              2202 net/xfrm/xfrm_user.c 					   &p->sel, ctx, 0, &err);
ctx              2203 net/xfrm/xfrm_user.c 		security_xfrm_policy_free(ctx);
ctx               104 samples/bpf/cpustat_kern.c int bpf_prog1(struct cpu_args *ctx)
ctx               110 samples/bpf/cpustat_kern.c 	if (ctx->cpu_id > MAX_CPU)
ctx               113 samples/bpf/cpustat_kern.c 	cpu = ctx->cpu_id;
ctx               136 samples/bpf/cpustat_kern.c 	*cstate = ctx->state;
ctx               163 samples/bpf/cpustat_kern.c 	if (ctx->state != (u32)-1) {
ctx               212 samples/bpf/cpustat_kern.c int bpf_prog2(struct cpu_args *ctx)
ctx               218 samples/bpf/cpustat_kern.c 	cpu = ctx->cpu_id;
ctx               236 samples/bpf/cpustat_kern.c 	*pstate = ctx->state;
ctx                78 samples/bpf/ibumad_kern.c int on_ib_umad_read_recv(struct ib_umad_rw_args *ctx)
ctx                81 samples/bpf/ibumad_kern.c 	u8 class = ctx->mgmt_class;
ctx                98 samples/bpf/ibumad_kern.c int on_ib_umad_read_send(struct ib_umad_rw_args *ctx)
ctx               101 samples/bpf/ibumad_kern.c 	u8 class = ctx->mgmt_class;
ctx               118 samples/bpf/ibumad_kern.c int on_ib_umad_write(struct ib_umad_rw_args *ctx)
ctx               121 samples/bpf/ibumad_kern.c 	u8 class = ctx->mgmt_class;
ctx                29 samples/bpf/lathist_kern.c int bpf_prog1(struct pt_regs *ctx)
ctx                72 samples/bpf/lathist_kern.c int bpf_prog2(struct pt_regs *ctx)
ctx                99 samples/bpf/map_perf_test_kern.c int stress_hmap(struct pt_regs *ctx)
ctx               114 samples/bpf/map_perf_test_kern.c int stress_percpu_hmap(struct pt_regs *ctx)
ctx               128 samples/bpf/map_perf_test_kern.c int stress_hmap_alloc(struct pt_regs *ctx)
ctx               142 samples/bpf/map_perf_test_kern.c int stress_percpu_hmap_alloc(struct pt_regs *ctx)
ctx               156 samples/bpf/map_perf_test_kern.c int stress_lru_hmap_alloc(struct pt_regs *ctx)
ctx               176 samples/bpf/map_perf_test_kern.c 	in6 = (struct sockaddr_in6 *)PT_REGS_PARM2(ctx);
ctx               177 samples/bpf/map_perf_test_kern.c 	addrlen = (int)PT_REGS_PARM3(ctx);
ctx               235 samples/bpf/map_perf_test_kern.c int stress_lpm_trie_map_alloc(struct pt_regs *ctx)
ctx               257 samples/bpf/map_perf_test_kern.c int stress_hash_map_lookup(struct pt_regs *ctx)
ctx               270 samples/bpf/map_perf_test_kern.c int stress_array_map_lookup(struct pt_regs *ctx)
ctx                61 samples/bpf/offwaketime_kern.c int waker(struct pt_regs *ctx)
ctx                63 samples/bpf/offwaketime_kern.c 	struct task_struct *p = (void *) PT_REGS_PARM1(ctx);
ctx                70 samples/bpf/offwaketime_kern.c 	woke.ret = bpf_get_stackid(ctx, &stackmap, STACKID_FLAGS);
ctx                76 samples/bpf/offwaketime_kern.c static inline int update_counts(void *ctx, u32 pid, u64 delta)
ctx                84 samples/bpf/offwaketime_kern.c 	key.tret = bpf_get_stackid(ctx, &stackmap, STACKID_FLAGS);
ctx               118 samples/bpf/offwaketime_kern.c int oncpu(struct sched_switch_args *ctx)
ctx               121 samples/bpf/offwaketime_kern.c 	u32 pid = ctx->prev_pid;
ctx               124 samples/bpf/offwaketime_kern.c int oncpu(struct pt_regs *ctx)
ctx               126 samples/bpf/offwaketime_kern.c 	struct task_struct *p = (void *) PT_REGS_PARM1(ctx);
ctx               148 samples/bpf/offwaketime_kern.c 	return update_counts(ctx, pid, delta);
ctx                20 samples/bpf/parse_ldabs.c static inline int ip_is_fragment(struct __sk_buff *ctx, __u64 nhoff)
ctx                22 samples/bpf/parse_ldabs.c 	return load_half(ctx, nhoff + offsetof(struct iphdr, frag_off))
ctx                23 samples/bpf/sampleip_kern.c int do_sample(struct bpf_perf_event_data *ctx)
ctx                28 samples/bpf/sampleip_kern.c 	ip = PT_REGS_IP(&ctx->regs);
ctx                45 samples/bpf/sockex2_kern.c static inline int ip_is_fragment(struct __sk_buff *ctx, __u64 nhoff)
ctx                47 samples/bpf/sockex2_kern.c 	return load_half(ctx, nhoff + offsetof(struct iphdr, frag_off))
ctx                51 samples/bpf/sockex2_kern.c static inline __u32 ipv6_addr_hash(struct __sk_buff *ctx, __u64 off)
ctx                53 samples/bpf/sockex2_kern.c 	__u64 w0 = load_word(ctx, off);
ctx                54 samples/bpf/sockex2_kern.c 	__u64 w1 = load_word(ctx, off + 4);
ctx                55 samples/bpf/sockex2_kern.c 	__u64 w2 = load_word(ctx, off + 8);
ctx                56 samples/bpf/sockex2_kern.c 	__u64 w3 = load_word(ctx, off + 12);
ctx                74 samples/bpf/sockex3_kern.c static inline int ip_is_fragment(struct __sk_buff *ctx, __u64 nhoff)
ctx                76 samples/bpf/sockex3_kern.c 	return load_half(ctx, nhoff + offsetof(struct iphdr, frag_off))
ctx                80 samples/bpf/sockex3_kern.c static inline __u32 ipv6_addr_hash(struct __sk_buff *ctx, __u64 off)
ctx                82 samples/bpf/sockex3_kern.c 	__u64 w0 = load_word(ctx, off);
ctx                83 samples/bpf/sockex3_kern.c 	__u64 w1 = load_word(ctx, off + 4);
ctx                84 samples/bpf/sockex3_kern.c 	__u64 w2 = load_word(ctx, off + 8);
ctx                85 samples/bpf/sockex3_kern.c 	__u64 w3 = load_word(ctx, off + 12);
ctx                35 samples/bpf/spintest_kern.c int foo(struct pt_regs *ctx) \
ctx                37 samples/bpf/spintest_kern.c 	long v = PT_REGS_IP(ctx), *val; \
ctx                43 samples/bpf/spintest_kern.c 	bpf_get_stackid(ctx, &stackmap, BPF_F_REUSE_STACKID); \
ctx                48 samples/bpf/syscall_tp_kern.c int trace_enter_open(struct syscalls_enter_open_args *ctx)
ctx                55 samples/bpf/syscall_tp_kern.c int trace_enter_open_at(struct syscalls_enter_open_args *ctx)
ctx                62 samples/bpf/syscall_tp_kern.c int trace_enter_exit(struct syscalls_exit_open_args *ctx)
ctx                69 samples/bpf/syscall_tp_kern.c int trace_enter_exit_at(struct syscalls_exit_open_args *ctx)
ctx                 8 samples/bpf/task_fd_query_kern.c int bpf_prog1(struct pt_regs *ctx)
ctx                14 samples/bpf/task_fd_query_kern.c int bpf_prog2(struct pt_regs *ctx)
ctx                26 samples/bpf/tcp_dumpstats_kern.c int _sockops(struct bpf_sock_ops *ctx)
ctx                33 samples/bpf/tcp_dumpstats_kern.c 	switch (ctx->op) {
ctx                35 samples/bpf/tcp_dumpstats_kern.c 		bpf_sock_ops_cb_flags_set(ctx, BPF_SOCK_OPS_RTT_CB_FLAG);
ctx                43 samples/bpf/tcp_dumpstats_kern.c 	sk = ctx->sk;
ctx                30 samples/bpf/test_current_task_under_cgroup_kern.c int bpf_prog1(struct pt_regs *ctx)
ctx               104 samples/bpf/test_map_in_map_kern.c int trace_sys_connect(struct pt_regs *ctx)
ctx               113 samples/bpf/test_map_in_map_kern.c 	in6 = (struct sockaddr_in6 *)PT_REGS_PARM2(ctx);
ctx               114 samples/bpf/test_map_in_map_kern.c 	addrlen = (int)PT_REGS_PARM3(ctx);
ctx                15 samples/bpf/test_overhead_kprobe_kern.c int prog(struct pt_regs *ctx)
ctx                24 samples/bpf/test_overhead_kprobe_kern.c 	tsk = (void *)PT_REGS_PARM1(ctx);
ctx                28 samples/bpf/test_overhead_kprobe_kern.c 	bpf_probe_read(newcomm, sizeof(newcomm), (void *)PT_REGS_PARM2(ctx));
ctx                35 samples/bpf/test_overhead_kprobe_kern.c int prog2(struct pt_regs *ctx)
ctx                 7 samples/bpf/test_overhead_raw_tp_kern.c int prog(struct bpf_raw_tracepoint_args *ctx)
ctx                13 samples/bpf/test_overhead_raw_tp_kern.c int prog2(struct bpf_raw_tracepoint_args *ctx)
ctx                19 samples/bpf/test_overhead_tp_kern.c int prog(struct task_rename *ctx)
ctx                32 samples/bpf/test_overhead_tp_kern.c int prog2(struct urandom_read *ctx)
ctx                29 samples/bpf/test_probe_write_user_kern.c int bpf_prog1(struct pt_regs *ctx)
ctx                33 samples/bpf/test_probe_write_user_kern.c 	void *sockaddr_arg = (void *)PT_REGS_PARM2(ctx);
ctx                34 samples/bpf/test_probe_write_user_kern.c 	int sockaddr_len = (int)PT_REGS_PARM3(ctx);
ctx                38 samples/bpf/trace_event_kern.c int bpf_prog1(struct bpf_perf_event_data *ctx)
ctx                50 samples/bpf/trace_event_kern.c 	if (ctx->sample_period < 10000)
ctx                54 samples/bpf/trace_event_kern.c 	key.kernstack = bpf_get_stackid(ctx, &stackmap, KERN_STACKID_FLAGS);
ctx                55 samples/bpf/trace_event_kern.c 	key.userstack = bpf_get_stackid(ctx, &stackmap, USER_STACKID_FLAGS);
ctx                57 samples/bpf/trace_event_kern.c 		bpf_trace_printk(fmt, sizeof(fmt), cpu, ctx->sample_period,
ctx                58 samples/bpf/trace_event_kern.c 				 PT_REGS_IP(&ctx->regs));
ctx                62 samples/bpf/trace_event_kern.c 	ret = bpf_perf_prog_read_value(ctx, (void *)&value_buf, sizeof(struct bpf_perf_event_value));
ctx                68 samples/bpf/trace_event_kern.c 	if (ctx->addr != 0)
ctx                69 samples/bpf/trace_event_kern.c 	  bpf_trace_printk(addr_fmt, sizeof(addr_fmt), ctx->addr);
ctx                14 samples/bpf/trace_output_kern.c int bpf_prog1(struct pt_regs *ctx)
ctx                24 samples/bpf/trace_output_kern.c 	bpf_perf_event_output(ctx, &my_map, 0, &data, sizeof(data));
ctx                35 samples/bpf/trace_output_user.c static void print_bpf_output(void *ctx, int cpu, void *data, __u32 size)
ctx                21 samples/bpf/tracex1_kern.c int bpf_prog1(struct pt_regs *ctx)
ctx                32 samples/bpf/tracex1_kern.c 	skb = (struct sk_buff *) PT_REGS_PARM1(ctx);
ctx                24 samples/bpf/tracex2_kern.c int bpf_prog2(struct pt_regs *ctx)
ctx                33 samples/bpf/tracex2_kern.c 	BPF_KPROBE_READ_RET_IP(loc, ctx);
ctx                80 samples/bpf/tracex2_kern.c int bpf_prog3(struct pt_regs *ctx)
ctx                82 samples/bpf/tracex2_kern.c 	long write_size = PT_REGS_PARM3(ctx);
ctx                24 samples/bpf/tracex3_kern.c int bpf_prog1(struct pt_regs *ctx)
ctx                26 samples/bpf/tracex3_kern.c 	long rq = PT_REGS_PARM1(ctx);
ctx                52 samples/bpf/tracex3_kern.c int bpf_prog2(struct pt_regs *ctx)
ctx                54 samples/bpf/tracex3_kern.c 	long rq = PT_REGS_PARM1(ctx);
ctx                28 samples/bpf/tracex4_kern.c int bpf_prog1(struct pt_regs *ctx)
ctx                30 samples/bpf/tracex4_kern.c 	long ptr = PT_REGS_PARM2(ctx);
ctx                37 samples/bpf/tracex4_kern.c int bpf_prog2(struct pt_regs *ctx)
ctx                39 samples/bpf/tracex4_kern.c 	long ptr = PT_REGS_RC(ctx);
ctx                43 samples/bpf/tracex4_kern.c 	BPF_KRETPROBE_READ_RET_IP(ip, ctx);
ctx                29 samples/bpf/tracex5_kern.c int bpf_prog1(struct pt_regs *ctx)
ctx                31 samples/bpf/tracex5_kern.c 	int sc_nr = (int)PT_REGS_PARM1(ctx);
ctx                34 samples/bpf/tracex5_kern.c 	bpf_tail_call(ctx, &progs, sc_nr);
ctx                45 samples/bpf/tracex5_kern.c PROG(SYS__NR_write)(struct pt_regs *ctx)
ctx                49 samples/bpf/tracex5_kern.c 	bpf_probe_read(&sd, sizeof(sd), (void *)PT_REGS_PARM2(ctx));
ctx                58 samples/bpf/tracex5_kern.c PROG(SYS__NR_read)(struct pt_regs *ctx)
ctx                62 samples/bpf/tracex5_kern.c 	bpf_probe_read(&sd, sizeof(sd), (void *)PT_REGS_PARM2(ctx));
ctx                72 samples/bpf/tracex5_kern.c PROG(SYS__NR_mmap2)(struct pt_regs *ctx)
ctx                82 samples/bpf/tracex5_kern.c PROG(SYS__NR_mmap)(struct pt_regs *ctx)
ctx                26 samples/bpf/tracex6_kern.c int bpf_prog1(struct pt_regs *ctx)
ctx                47 samples/bpf/tracex6_kern.c int bpf_prog2(struct pt_regs *ctx)
ctx                 7 samples/bpf/tracex7_kern.c int bpf_prog1(struct pt_regs *ctx)
ctx                11 samples/bpf/tracex7_kern.c 	bpf_override_return(ctx, rc);
ctx                43 samples/bpf/xdp1_kern.c int xdp_prog1(struct xdp_md *ctx)
ctx                45 samples/bpf/xdp1_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx                46 samples/bpf/xdp1_kern.c 	void *data = (void *)(long)ctx->data;
ctx                59 samples/bpf/xdp2_kern.c int xdp_prog1(struct xdp_md *ctx)
ctx                61 samples/bpf/xdp2_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx                62 samples/bpf/xdp2_kern.c 	void *data = (void *)(long)ctx->data;
ctx                32 samples/bpf/xdp2skb_meta_kern.c int _xdp_mark(struct xdp_md *ctx)
ctx                41 samples/bpf/xdp2skb_meta_kern.c 	ret = bpf_xdp_adjust_meta(ctx, -(int)sizeof(*meta));
ctx                50 samples/bpf/xdp2skb_meta_kern.c 	data = (void *)(unsigned long)ctx->data;
ctx                53 samples/bpf/xdp2skb_meta_kern.c 	meta = (void *)(unsigned long)ctx->data_meta;
ctx                63 samples/bpf/xdp2skb_meta_kern.c int _tc_mark(struct __sk_buff *ctx)
ctx                65 samples/bpf/xdp2skb_meta_kern.c 	void *data      = (void *)(unsigned long)ctx->data;
ctx                66 samples/bpf/xdp2skb_meta_kern.c 	void *data_end  = (void *)(unsigned long)ctx->data_end;
ctx                67 samples/bpf/xdp2skb_meta_kern.c 	void *data_meta = (void *)(unsigned long)ctx->data_meta;
ctx                72 samples/bpf/xdp2skb_meta_kern.c 		ctx->mark = 41;
ctx                78 samples/bpf/xdp2skb_meta_kern.c 	ctx->mark = meta->mark; /* Transfer XDP-mark to SKB-mark */
ctx                43 samples/bpf/xdp_fwd_kern.c static __always_inline int xdp_fwd_flags(struct xdp_md *ctx, u32 flags)
ctx                45 samples/bpf/xdp_fwd_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx                46 samples/bpf/xdp_fwd_kern.c 	void *data = (void *)(long)ctx->data;
ctx               102 samples/bpf/xdp_fwd_kern.c 	fib_params.ifindex = ctx->ingress_ifindex;
ctx               104 samples/bpf/xdp_fwd_kern.c 	rc = bpf_fib_lookup(ctx, &fib_params, sizeof(fib_params), flags);
ctx               147 samples/bpf/xdp_fwd_kern.c int xdp_fwd_prog(struct xdp_md *ctx)
ctx               149 samples/bpf/xdp_fwd_kern.c 	return xdp_fwd_flags(ctx, 0);
ctx               153 samples/bpf/xdp_fwd_kern.c int xdp_fwd_direct_prog(struct xdp_md *ctx)
ctx               155 samples/bpf/xdp_fwd_kern.c 	return xdp_fwd_flags(ctx, BPF_FIB_LOOKUP_DIRECT);
ctx                45 samples/bpf/xdp_monitor_kern.c int xdp_redirect_collect_stat(struct xdp_redirect_ctx *ctx)
ctx                48 samples/bpf/xdp_monitor_kern.c 	int err = ctx->err;
ctx                69 samples/bpf/xdp_monitor_kern.c int trace_xdp_redirect_err(struct xdp_redirect_ctx *ctx)
ctx                71 samples/bpf/xdp_monitor_kern.c 	return xdp_redirect_collect_stat(ctx);
ctx                76 samples/bpf/xdp_monitor_kern.c int trace_xdp_redirect_map_err(struct xdp_redirect_ctx *ctx)
ctx                78 samples/bpf/xdp_monitor_kern.c 	return xdp_redirect_collect_stat(ctx);
ctx                83 samples/bpf/xdp_monitor_kern.c int trace_xdp_redirect(struct xdp_redirect_ctx *ctx)
ctx                85 samples/bpf/xdp_monitor_kern.c 	return xdp_redirect_collect_stat(ctx);
ctx                90 samples/bpf/xdp_monitor_kern.c int trace_xdp_redirect_map(struct xdp_redirect_ctx *ctx)
ctx                92 samples/bpf/xdp_monitor_kern.c 	return xdp_redirect_collect_stat(ctx);
ctx               106 samples/bpf/xdp_monitor_kern.c int trace_xdp_exception(struct xdp_exception_ctx *ctx)
ctx               111 samples/bpf/xdp_monitor_kern.c 	key = ctx->act;
ctx               160 samples/bpf/xdp_monitor_kern.c int trace_xdp_cpumap_enqueue(struct cpumap_enqueue_ctx *ctx)
ctx               162 samples/bpf/xdp_monitor_kern.c 	u32 to_cpu = ctx->to_cpu;
ctx               171 samples/bpf/xdp_monitor_kern.c 	rec->processed += ctx->processed;
ctx               172 samples/bpf/xdp_monitor_kern.c 	rec->dropped   += ctx->drops;
ctx               175 samples/bpf/xdp_monitor_kern.c 	if (ctx->processed > 0)
ctx               195 samples/bpf/xdp_monitor_kern.c int trace_xdp_cpumap_kthread(struct cpumap_kthread_ctx *ctx)
ctx               203 samples/bpf/xdp_monitor_kern.c 	rec->processed += ctx->processed;
ctx               204 samples/bpf/xdp_monitor_kern.c 	rec->dropped   += ctx->drops;
ctx               207 samples/bpf/xdp_monitor_kern.c 	if (ctx->sched)
ctx               236 samples/bpf/xdp_monitor_kern.c int trace_xdp_devmap_xmit(struct devmap_xmit_ctx *ctx)
ctx               244 samples/bpf/xdp_monitor_kern.c 	rec->processed += ctx->sent;
ctx               245 samples/bpf/xdp_monitor_kern.c 	rec->dropped   += ctx->drops;
ctx               251 samples/bpf/xdp_monitor_kern.c 	if (ctx->err)
ctx               255 samples/bpf/xdp_monitor_kern.c 	if (ctx->drops < 0)
ctx               155 samples/bpf/xdp_redirect_cpu_kern.c u16 get_dest_port_ipv4_udp(struct xdp_md *ctx, u64 nh_off)
ctx               157 samples/bpf/xdp_redirect_cpu_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               158 samples/bpf/xdp_redirect_cpu_kern.c 	void *data     = (void *)(long)ctx->data;
ctx               177 samples/bpf/xdp_redirect_cpu_kern.c int get_proto_ipv4(struct xdp_md *ctx, u64 nh_off)
ctx               179 samples/bpf/xdp_redirect_cpu_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               180 samples/bpf/xdp_redirect_cpu_kern.c 	void *data     = (void *)(long)ctx->data;
ctx               189 samples/bpf/xdp_redirect_cpu_kern.c int get_proto_ipv6(struct xdp_md *ctx, u64 nh_off)
ctx               191 samples/bpf/xdp_redirect_cpu_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               192 samples/bpf/xdp_redirect_cpu_kern.c 	void *data     = (void *)(long)ctx->data;
ctx               201 samples/bpf/xdp_redirect_cpu_kern.c int  xdp_prognum0_no_touch(struct xdp_md *ctx)
ctx               203 samples/bpf/xdp_redirect_cpu_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               204 samples/bpf/xdp_redirect_cpu_kern.c 	void *data     = (void *)(long)ctx->data;
ctx               231 samples/bpf/xdp_redirect_cpu_kern.c int  xdp_prognum1_touch_data(struct xdp_md *ctx)
ctx               233 samples/bpf/xdp_redirect_cpu_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               234 samples/bpf/xdp_redirect_cpu_kern.c 	void *data     = (void *)(long)ctx->data;
ctx               274 samples/bpf/xdp_redirect_cpu_kern.c int  xdp_prognum2_round_robin(struct xdp_md *ctx)
ctx               276 samples/bpf/xdp_redirect_cpu_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               277 samples/bpf/xdp_redirect_cpu_kern.c 	void *data     = (void *)(long)ctx->data;
ctx               322 samples/bpf/xdp_redirect_cpu_kern.c int  xdp_prognum3_proto_separate(struct xdp_md *ctx)
ctx               324 samples/bpf/xdp_redirect_cpu_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               325 samples/bpf/xdp_redirect_cpu_kern.c 	void *data     = (void *)(long)ctx->data;
ctx               348 samples/bpf/xdp_redirect_cpu_kern.c 		ip_proto = get_proto_ipv4(ctx, l3_offset);
ctx               351 samples/bpf/xdp_redirect_cpu_kern.c 		ip_proto = get_proto_ipv6(ctx, l3_offset);
ctx               390 samples/bpf/xdp_redirect_cpu_kern.c int  xdp_prognum4_ddos_filter_pktgen(struct xdp_md *ctx)
ctx               392 samples/bpf/xdp_redirect_cpu_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               393 samples/bpf/xdp_redirect_cpu_kern.c 	void *data     = (void *)(long)ctx->data;
ctx               417 samples/bpf/xdp_redirect_cpu_kern.c 		ip_proto = get_proto_ipv4(ctx, l3_offset);
ctx               420 samples/bpf/xdp_redirect_cpu_kern.c 		ip_proto = get_proto_ipv6(ctx, l3_offset);
ctx               441 samples/bpf/xdp_redirect_cpu_kern.c 		dest_port = get_dest_port_ipv4_udp(ctx, l3_offset);
ctx               469 samples/bpf/xdp_redirect_cpu_kern.c u32 get_ipv4_hash_ip_pair(struct xdp_md *ctx, u64 nh_off)
ctx               471 samples/bpf/xdp_redirect_cpu_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               472 samples/bpf/xdp_redirect_cpu_kern.c 	void *data     = (void *)(long)ctx->data;
ctx               486 samples/bpf/xdp_redirect_cpu_kern.c u32 get_ipv6_hash_ip_pair(struct xdp_md *ctx, u64 nh_off)
ctx               488 samples/bpf/xdp_redirect_cpu_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               489 samples/bpf/xdp_redirect_cpu_kern.c 	void *data     = (void *)(long)ctx->data;
ctx               510 samples/bpf/xdp_redirect_cpu_kern.c int  xdp_prognum5_lb_hash_ip_pairs(struct xdp_md *ctx)
ctx               512 samples/bpf/xdp_redirect_cpu_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               513 samples/bpf/xdp_redirect_cpu_kern.c 	void *data     = (void *)(long)ctx->data;
ctx               542 samples/bpf/xdp_redirect_cpu_kern.c 		cpu_hash = get_ipv4_hash_ip_pair(ctx, l3_offset);
ctx               545 samples/bpf/xdp_redirect_cpu_kern.c 		cpu_hash = get_ipv6_hash_ip_pair(ctx, l3_offset);
ctx               592 samples/bpf/xdp_redirect_cpu_kern.c int xdp_redirect_collect_stat(struct xdp_redirect_ctx *ctx)
ctx               596 samples/bpf/xdp_redirect_cpu_kern.c 	int err = ctx->err;
ctx               616 samples/bpf/xdp_redirect_cpu_kern.c int trace_xdp_redirect_err(struct xdp_redirect_ctx *ctx)
ctx               618 samples/bpf/xdp_redirect_cpu_kern.c 	return xdp_redirect_collect_stat(ctx);
ctx               622 samples/bpf/xdp_redirect_cpu_kern.c int trace_xdp_redirect_map_err(struct xdp_redirect_ctx *ctx)
ctx               624 samples/bpf/xdp_redirect_cpu_kern.c 	return xdp_redirect_collect_stat(ctx);
ctx               638 samples/bpf/xdp_redirect_cpu_kern.c int trace_xdp_exception(struct xdp_exception_ctx *ctx)
ctx               665 samples/bpf/xdp_redirect_cpu_kern.c int trace_xdp_cpumap_enqueue(struct cpumap_enqueue_ctx *ctx)
ctx               667 samples/bpf/xdp_redirect_cpu_kern.c 	u32 to_cpu = ctx->to_cpu;
ctx               676 samples/bpf/xdp_redirect_cpu_kern.c 	rec->processed += ctx->processed;
ctx               677 samples/bpf/xdp_redirect_cpu_kern.c 	rec->dropped   += ctx->drops;
ctx               680 samples/bpf/xdp_redirect_cpu_kern.c 	if (ctx->processed > 0)
ctx               705 samples/bpf/xdp_redirect_cpu_kern.c int trace_xdp_cpumap_kthread(struct cpumap_kthread_ctx *ctx)
ctx               713 samples/bpf/xdp_redirect_cpu_kern.c 	rec->processed += ctx->processed;
ctx               714 samples/bpf/xdp_redirect_cpu_kern.c 	rec->dropped   += ctx->drops;
ctx               717 samples/bpf/xdp_redirect_cpu_kern.c 	if (ctx->sched)
ctx                56 samples/bpf/xdp_redirect_kern.c int xdp_redirect_prog(struct xdp_md *ctx)
ctx                58 samples/bpf/xdp_redirect_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx                59 samples/bpf/xdp_redirect_kern.c 	void *data = (void *)(long)ctx->data;
ctx                85 samples/bpf/xdp_redirect_kern.c int xdp_redirect_dummy_prog(struct xdp_md *ctx)
ctx                56 samples/bpf/xdp_redirect_map_kern.c int xdp_redirect_map_prog(struct xdp_md *ctx)
ctx                58 samples/bpf/xdp_redirect_map_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx                59 samples/bpf/xdp_redirect_map_kern.c 	void *data = (void *)(long)ctx->data;
ctx                87 samples/bpf/xdp_redirect_map_kern.c int xdp_redirect_dummy_prog(struct xdp_md *ctx)
ctx               109 samples/bpf/xdp_router_ipv4_kern.c int xdp_router_ipv4_prog(struct xdp_md *ctx)
ctx               111 samples/bpf/xdp_router_ipv4_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               113 samples/bpf/xdp_router_ipv4_kern.c 	void *data = (void *)(long)ctx->data;
ctx                75 samples/bpf/xdp_rxq_info_kern.c int  xdp_prognum0(struct xdp_md *ctx)
ctx                77 samples/bpf/xdp_rxq_info_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx                78 samples/bpf/xdp_rxq_info_kern.c 	void *data     = (void *)(long)ctx->data;
ctx                93 samples/bpf/xdp_rxq_info_kern.c 	ingress_ifindex = ctx->ingress_ifindex;
ctx               109 samples/bpf/xdp_rxq_info_kern.c 	key = ctx->rx_queue_index;
ctx                18 samples/bpf/xdp_sample_pkts_kern.c int xdp_sample_prog(struct xdp_md *ctx)
ctx                20 samples/bpf/xdp_sample_pkts_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx                21 samples/bpf/xdp_sample_pkts_kern.c 	void *data = (void *)(long)ctx->data;
ctx                49 samples/bpf/xdp_sample_pkts_kern.c 		ret = bpf_perf_event_output(ctx, &my_map, flags,
ctx                75 samples/bpf/xdp_sample_pkts_user.c static void print_bpf_output(void *ctx, int cpu, void *data, __u32 size)
ctx                64 samples/seccomp/bpf-direct.c 	ucontext_t *ctx = (ucontext_t *)(void_context);
ctx                71 samples/seccomp/bpf-direct.c 	if (!ctx)
ctx                73 samples/seccomp/bpf-direct.c 	syscall = ctx->uc_mcontext.gregs[REG_SYSCALL];
ctx                74 samples/seccomp/bpf-direct.c 	buf = (char *) ctx->uc_mcontext.gregs[REG_ARG1];
ctx                75 samples/seccomp/bpf-direct.c 	len = (size_t) ctx->uc_mcontext.gregs[REG_ARG2];
ctx                79 samples/seccomp/bpf-direct.c 	if (ctx->uc_mcontext.gregs[REG_ARG0] != STDERR_FILENO)
ctx                82 samples/seccomp/bpf-direct.c 	ctx->uc_mcontext.gregs[REG_RESULT] = -1;
ctx                85 samples/seccomp/bpf-direct.c 		ctx->uc_mcontext.gregs[REG_RESULT] = bytes;
ctx               154 scripts/mod/sumversion.c static inline void md4_transform_helper(struct md4_ctx *ctx)
ctx               156 scripts/mod/sumversion.c 	le32_to_cpu_array(ctx->block, sizeof(ctx->block) / sizeof(uint32_t));
ctx               157 scripts/mod/sumversion.c 	md4_transform(ctx->hash, ctx->block);
ctx               877 security/apparmor/domain.c 	struct aa_task_ctx *ctx;
ctx               892 security/apparmor/domain.c 	ctx = task_ctx(current);
ctx               894 security/apparmor/domain.c 	AA_BUG(!ctx);
ctx               906 security/apparmor/domain.c 	    !ctx->nnp)
ctx               907 security/apparmor/domain.c 		ctx->nnp = aa_get_label(label);
ctx               912 security/apparmor/domain.c 	if (ctx->onexec)
ctx               913 security/apparmor/domain.c 		new = handle_onexec(label, ctx->onexec, ctx->token,
ctx               938 security/apparmor/domain.c 	    !unconfined(label) && !aa_label_is_subset(new, ctx->nnp)) {
ctx              1169 security/apparmor/domain.c 	struct aa_task_ctx *ctx = task_ctx(current);
ctx              1179 security/apparmor/domain.c 	previous = aa_get_newest_label(ctx->previous);
ctx              1188 security/apparmor/domain.c 	if (task_no_new_privs(current) && !unconfined(label) && !ctx->nnp)
ctx              1189 security/apparmor/domain.c 		ctx->nnp = aa_get_label(label);
ctx              1216 security/apparmor/domain.c 		    !aa_label_is_subset(new, ctx->nnp)) {
ctx              1237 security/apparmor/domain.c 		    !aa_label_is_subset(previous, ctx->nnp)) {
ctx              1319 security/apparmor/domain.c 	struct aa_task_ctx *ctx = task_ctx(current);
ctx              1333 security/apparmor/domain.c 	if (task_no_new_privs(current) && !unconfined(label) && !ctx->nnp)
ctx              1334 security/apparmor/domain.c 		ctx->nnp = aa_get_label(label);
ctx              1432 security/apparmor/domain.c 		    !aa_label_is_subset(new, ctx->nnp)) {
ctx                57 security/apparmor/include/file.h 	struct aa_file_ctx *ctx;
ctx                59 security/apparmor/include/file.h 	ctx = kzalloc(sizeof(struct aa_file_ctx), gfp);
ctx                60 security/apparmor/include/file.h 	if (ctx) {
ctx                61 security/apparmor/include/file.h 		spin_lock_init(&ctx->lock);
ctx                62 security/apparmor/include/file.h 		rcu_assign_pointer(ctx->label, aa_get_label(label));
ctx                64 security/apparmor/include/file.h 	return ctx;
ctx                71 security/apparmor/include/file.h static inline void aa_free_file_ctx(struct aa_file_ctx *ctx)
ctx                73 security/apparmor/include/file.h 	if (ctx) {
ctx                74 security/apparmor/include/file.h 		aa_put_label(rcu_access_pointer(ctx->label));
ctx                75 security/apparmor/include/file.h 		kzfree(ctx);
ctx                79 security/apparmor/include/file.h static inline struct aa_label *aa_get_file_label(struct aa_file_ctx *ctx)
ctx                81 security/apparmor/include/file.h 	return aa_get_label_rcu(&ctx->label);
ctx                42 security/apparmor/include/task.h static inline void aa_free_task_ctx(struct aa_task_ctx *ctx)
ctx                44 security/apparmor/include/task.h 	if (ctx) {
ctx                45 security/apparmor/include/task.h 		aa_put_label(ctx->nnp);
ctx                46 security/apparmor/include/task.h 		aa_put_label(ctx->previous);
ctx                47 security/apparmor/include/task.h 		aa_put_label(ctx->onexec);
ctx                69 security/apparmor/include/task.h static inline void aa_clear_task_ctx_trans(struct aa_task_ctx *ctx)
ctx                71 security/apparmor/include/task.h 	AA_BUG(!ctx);
ctx                73 security/apparmor/include/task.h 	aa_put_label(ctx->previous);
ctx                74 security/apparmor/include/task.h 	aa_put_label(ctx->onexec);
ctx                75 security/apparmor/include/task.h 	ctx->previous = NULL;
ctx                76 security/apparmor/include/task.h 	ctx->onexec = NULL;
ctx                77 security/apparmor/include/task.h 	ctx->token = 0;
ctx               428 security/apparmor/lsm.c 	struct aa_file_ctx *ctx = file_ctx(file);
ctx               431 security/apparmor/lsm.c 	spin_lock_init(&ctx->lock);
ctx               432 security/apparmor/lsm.c 	rcu_assign_pointer(ctx->label, aa_get_label(label));
ctx               439 security/apparmor/lsm.c 	struct aa_file_ctx *ctx = file_ctx(file);
ctx               441 security/apparmor/lsm.c 	if (ctx)
ctx               442 security/apparmor/lsm.c 		aa_put_label(rcu_access_pointer(ctx->label));
ctx               581 security/apparmor/lsm.c 	struct aa_task_ctx *ctx = task_ctx(current);
ctx               586 security/apparmor/lsm.c 	else if (strcmp(name, "prev") == 0  && ctx->previous)
ctx               587 security/apparmor/lsm.c 		label = aa_get_newest_label(ctx->previous);
ctx               588 security/apparmor/lsm.c 	else if (strcmp(name, "exec") == 0 && ctx->onexec)
ctx               589 security/apparmor/lsm.c 		label = aa_get_newest_label(ctx->onexec);
ctx               761 security/apparmor/lsm.c 	struct aa_sk_ctx *ctx;
ctx               763 security/apparmor/lsm.c 	ctx = kzalloc(sizeof(*ctx), flags);
ctx               764 security/apparmor/lsm.c 	if (!ctx)
ctx               767 security/apparmor/lsm.c 	SK_CTX(sk) = ctx;
ctx               777 security/apparmor/lsm.c 	struct aa_sk_ctx *ctx = SK_CTX(sk);
ctx               780 security/apparmor/lsm.c 	aa_put_label(ctx->label);
ctx               781 security/apparmor/lsm.c 	aa_put_label(ctx->peer);
ctx               782 security/apparmor/lsm.c 	kfree(ctx);
ctx               791 security/apparmor/lsm.c 	struct aa_sk_ctx *ctx = SK_CTX(sk);
ctx               794 security/apparmor/lsm.c 	new->label = aa_get_label(ctx->label);
ctx               795 security/apparmor/lsm.c 	new->peer = aa_get_label(ctx->peer);
ctx               843 security/apparmor/lsm.c 		struct aa_sk_ctx *ctx = SK_CTX(sock->sk);
ctx               845 security/apparmor/lsm.c 		aa_put_label(ctx->label);
ctx               846 security/apparmor/lsm.c 		ctx->label = aa_get_label(label);
ctx              1028 security/apparmor/lsm.c 	struct aa_sk_ctx *ctx = SK_CTX(sk);
ctx              1033 security/apparmor/lsm.c 	return apparmor_secmark_check(ctx->label, OP_RECVMSG, AA_MAY_RECEIVE,
ctx              1041 security/apparmor/lsm.c 	struct aa_sk_ctx *ctx = SK_CTX(sk);
ctx              1043 security/apparmor/lsm.c 	if (ctx->peer)
ctx              1044 security/apparmor/lsm.c 		return ctx->peer;
ctx              1125 security/apparmor/lsm.c 	struct aa_sk_ctx *ctx = SK_CTX(sk);
ctx              1127 security/apparmor/lsm.c 	if (!ctx->label)
ctx              1128 security/apparmor/lsm.c 		ctx->label = aa_get_current_label();
ctx              1135 security/apparmor/lsm.c 	struct aa_sk_ctx *ctx = SK_CTX(sk);
ctx              1140 security/apparmor/lsm.c 	return apparmor_secmark_check(ctx->label, OP_CONNECT, AA_MAY_CONNECT,
ctx              1617 security/apparmor/lsm.c 	struct aa_sk_ctx *ctx;
ctx              1627 security/apparmor/lsm.c 	ctx = SK_CTX(sk);
ctx              1628 security/apparmor/lsm.c 	if (!apparmor_secmark_check(ctx->label, OP_SENDMSG, AA_MAY_SEND,
ctx                44 security/apparmor/task.c 	struct aa_task_ctx *ctx = task_ctx(current);
ctx                59 security/apparmor/task.c 	if (ctx->nnp && label_is_stale(ctx->nnp)) {
ctx                60 security/apparmor/task.c 		struct aa_label *tmp = ctx->nnp;
ctx                62 security/apparmor/task.c 		ctx->nnp = aa_get_newest_label(tmp);
ctx                95 security/apparmor/task.c 	struct aa_task_ctx *ctx = task_ctx(current);
ctx                98 security/apparmor/task.c 	aa_put_label(ctx->onexec);
ctx                99 security/apparmor/task.c 	ctx->onexec = label;
ctx               100 security/apparmor/task.c 	ctx->token = stack;
ctx               117 security/apparmor/task.c 	struct aa_task_ctx *ctx = task_ctx(current);
ctx               125 security/apparmor/task.c 	if (!ctx->previous) {
ctx               127 security/apparmor/task.c 		ctx->previous = cred_label(new);
ctx               128 security/apparmor/task.c 		ctx->token = token;
ctx               129 security/apparmor/task.c 	} else if (ctx->token == token) {
ctx               139 security/apparmor/task.c 	aa_put_label(ctx->onexec);
ctx               140 security/apparmor/task.c 	ctx->onexec = NULL;
ctx               157 security/apparmor/task.c 	struct aa_task_ctx *ctx = task_ctx(current);
ctx               160 security/apparmor/task.c 	if (ctx->token != token)
ctx               163 security/apparmor/task.c 	if (!ctx->previous)
ctx               171 security/apparmor/task.c 	set_cred_label(new, aa_get_newest_label(ctx->previous));
ctx               174 security/apparmor/task.c 	aa_clear_task_ctx_trans(ctx);
ctx               236 security/integrity/integrity.h integrity_audit_log_start(struct audit_context *ctx, gfp_t gfp_mask, int type)
ctx               238 security/integrity/integrity.h 	return audit_log_start(ctx, gfp_mask, type);
ctx               250 security/integrity/integrity.h integrity_audit_log_start(struct audit_context *ctx, gfp_t gfp_mask, int type)
ctx                84 security/keys/dh.c 	char ctx[];
ctx               142 security/keys/internal.h 				    struct keyring_search_context *ctx);
ctx               144 security/keys/internal.h extern key_ref_t search_cred_keyrings_rcu(struct keyring_search_context *ctx);
ctx               145 security/keys/internal.h extern key_ref_t search_process_keyrings_rcu(struct keyring_search_context *ctx);
ctx               460 security/keys/keyring.c 	struct keyring_read_iterator_context *ctx = data;
ctx               464 security/keys/keyring.c 	       key->type->name, key->serial, ctx->count, ctx->buflen);
ctx               466 security/keys/keyring.c 	if (ctx->count >= ctx->buflen)
ctx               469 security/keys/keyring.c 	*ctx->buffer++ = key->serial;
ctx               470 security/keys/keyring.c 	ctx->count += sizeof(key->serial);
ctx               484 security/keys/keyring.c 	struct keyring_read_iterator_context ctx;
ctx               494 security/keys/keyring.c 		ctx.buffer = (key_serial_t __user *)buffer;
ctx               495 security/keys/keyring.c 		ctx.buflen = buflen;
ctx               496 security/keys/keyring.c 		ctx.count = 0;
ctx               498 security/keys/keyring.c 					  keyring_read_iterator, &ctx);
ctx               576 security/keys/keyring.c 	struct keyring_search_context *ctx = iterator_data;
ctx               584 security/keys/keyring.c 	if (key->type != ctx->index_key.type) {
ctx               590 security/keys/keyring.c 	if (ctx->flags & KEYRING_SEARCH_DO_STATE_CHECK) {
ctx               595 security/keys/keyring.c 			ctx->result = ERR_PTR(-EKEYREVOKED);
ctx               596 security/keys/keyring.c 			kleave(" = %d [invrev]", ctx->skipped_ret);
ctx               600 security/keys/keyring.c 		if (expiry && ctx->now >= expiry) {
ctx               601 security/keys/keyring.c 			if (!(ctx->flags & KEYRING_SEARCH_SKIP_EXPIRED))
ctx               602 security/keys/keyring.c 				ctx->result = ERR_PTR(-EKEYEXPIRED);
ctx               603 security/keys/keyring.c 			kleave(" = %d [expire]", ctx->skipped_ret);
ctx               609 security/keys/keyring.c 	if (!ctx->match_data.cmp(key, &ctx->match_data)) {
ctx               615 security/keys/keyring.c 	if (!(ctx->flags & KEYRING_SEARCH_NO_CHECK_PERM) &&
ctx               616 security/keys/keyring.c 	    key_task_permission(make_key_ref(key, ctx->possessed),
ctx               617 security/keys/keyring.c 				ctx->cred, KEY_NEED_SEARCH) < 0) {
ctx               618 security/keys/keyring.c 		ctx->result = ERR_PTR(-EACCES);
ctx               619 security/keys/keyring.c 		kleave(" = %d [!perm]", ctx->skipped_ret);
ctx               623 security/keys/keyring.c 	if (ctx->flags & KEYRING_SEARCH_DO_STATE_CHECK) {
ctx               626 security/keys/keyring.c 			ctx->result = ERR_PTR(state);
ctx               627 security/keys/keyring.c 			kleave(" = %d [neg]", ctx->skipped_ret);
ctx               633 security/keys/keyring.c 	ctx->result = make_key_ref(key, ctx->possessed);
ctx               638 security/keys/keyring.c 	return ctx->skipped_ret;
ctx               646 security/keys/keyring.c static int search_keyring(struct key *keyring, struct keyring_search_context *ctx)
ctx               648 security/keys/keyring.c 	if (ctx->match_data.lookup_type == KEYRING_SEARCH_LOOKUP_DIRECT) {
ctx               653 security/keys/keyring.c 					  &ctx->index_key);
ctx               654 security/keys/keyring.c 		return object ? ctx->iterator(object, ctx) : 0;
ctx               656 security/keys/keyring.c 	return assoc_array_iterate(&keyring->keys, ctx->iterator, ctx);
ctx               664 security/keys/keyring.c 				   struct keyring_search_context *ctx)
ctx               680 security/keys/keyring.c 	       ctx->index_key.type->name,
ctx               681 security/keys/keyring.c 	       ctx->index_key.description);
ctx               684 security/keys/keyring.c 	BUG_ON((ctx->flags & STATE_CHECKS) == 0 ||
ctx               685 security/keys/keyring.c 	       (ctx->flags & STATE_CHECKS) == STATE_CHECKS);
ctx               687 security/keys/keyring.c 	if (ctx->index_key.description)
ctx               688 security/keys/keyring.c 		key_set_index_key(&ctx->index_key);
ctx               693 security/keys/keyring.c 	if (ctx->match_data.lookup_type == KEYRING_SEARCH_LOOKUP_ITERATE ||
ctx               694 security/keys/keyring.c 	    keyring_compare_object(keyring, &ctx->index_key)) {
ctx               695 security/keys/keyring.c 		ctx->skipped_ret = 2;
ctx               696 security/keys/keyring.c 		switch (ctx->iterator(keyring_key_to_ptr(keyring), ctx)) {
ctx               706 security/keys/keyring.c 	ctx->skipped_ret = 0;
ctx               718 security/keys/keyring.c 	if (search_keyring(keyring, ctx))
ctx               729 security/keys/keyring.c 	if (!(ctx->flags & KEYRING_SEARCH_RECURSE))
ctx               784 security/keys/keyring.c 			if (ctx->flags & KEYRING_SEARCH_DETECT_TOO_DEEP) {
ctx               785 security/keys/keyring.c 				ctx->result = ERR_PTR(-ELOOP);
ctx               792 security/keys/keyring.c 		if (!(ctx->flags & KEYRING_SEARCH_NO_CHECK_PERM) &&
ctx               793 security/keys/keyring.c 		    key_task_permission(make_key_ref(key, ctx->possessed),
ctx               794 security/keys/keyring.c 					ctx->cred, KEY_NEED_SEARCH) < 0)
ctx               853 security/keys/keyring.c 	key = key_ref_to_ptr(ctx->result);
ctx               855 security/keys/keyring.c 	if (!(ctx->flags & KEYRING_SEARCH_NO_UPDATE_TIME)) {
ctx               856 security/keys/keyring.c 		key->last_used_at = ctx->now;
ctx               857 security/keys/keyring.c 		keyring->last_used_at = ctx->now;
ctx               859 security/keys/keyring.c 			stack[--sp].keyring->last_used_at = ctx->now;
ctx               900 security/keys/keyring.c 			     struct keyring_search_context *ctx)
ctx               905 security/keys/keyring.c 	ctx->iterator = keyring_search_iterator;
ctx               906 security/keys/keyring.c 	ctx->possessed = is_key_possessed(keyring_ref);
ctx               907 security/keys/keyring.c 	ctx->result = ERR_PTR(-EAGAIN);
ctx               915 security/keys/keyring.c 	if (!(ctx->flags & KEYRING_SEARCH_NO_CHECK_PERM)) {
ctx               916 security/keys/keyring.c 		err = key_task_permission(keyring_ref, ctx->cred, KEY_NEED_SEARCH);
ctx               921 security/keys/keyring.c 	ctx->now = ktime_get_real_seconds();
ctx               922 security/keys/keyring.c 	if (search_nested_keyrings(keyring, ctx))
ctx               923 security/keys/keyring.c 		__key_get(key_ref_to_ptr(ctx->result));
ctx               924 security/keys/keyring.c 	return ctx->result;
ctx               942 security/keys/keyring.c 	struct keyring_search_context ctx = {
ctx               956 security/keys/keyring.c 		ctx.flags |= KEYRING_SEARCH_RECURSE;
ctx               958 security/keys/keyring.c 		ret = type->match_preparse(&ctx.match_data);
ctx               964 security/keys/keyring.c 	key = keyring_search_rcu(keyring, &ctx);
ctx               968 security/keys/keyring.c 		type->match_free(&ctx.match_data);
ctx              1190 security/keys/keyring.c 	struct keyring_search_context *ctx = iterator_data;
ctx              1197 security/keys/keyring.c 	if (key != ctx->match_data.raw_data)
ctx              1200 security/keys/keyring.c 	ctx->result = ERR_PTR(-EDEADLK);
ctx              1213 security/keys/keyring.c 	struct keyring_search_context ctx = {
ctx              1226 security/keys/keyring.c 	search_nested_keyrings(B, &ctx);
ctx              1228 security/keys/keyring.c 	return PTR_ERR(ctx.result) == -EAGAIN ? 0 : PTR_ERR(ctx.result);
ctx               165 security/keys/proc.c 	struct keyring_search_context ctx = {
ctx               182 security/keys/proc.c 		skey_ref = search_cred_keyrings_rcu(&ctx);
ctx               191 security/keys/proc.c 	rc = key_task_permission(key_ref, ctx.cred, KEY_NEED_VIEW);
ctx               191 security/keys/process_keys.c 	struct keyring_search_context ctx = {
ctx               204 security/keys/process_keys.c 	ctx.index_key.desc_len = snprintf(buf, sizeof(buf), "_uid_ses.%u",
ctx               209 security/keys/process_keys.c 					       &ctx);
ctx               422 security/keys/process_keys.c key_ref_t search_cred_keyrings_rcu(struct keyring_search_context *ctx)
ctx               426 security/keys/process_keys.c 	const struct cred *cred = ctx->cred;
ctx               442 security/keys/process_keys.c 			make_key_ref(cred->thread_keyring, 1), ctx);
ctx               460 security/keys/process_keys.c 			make_key_ref(cred->process_keyring, 1), ctx);
ctx               481 security/keys/process_keys.c 			make_key_ref(cred->session_keyring, 1), ctx);
ctx               502 security/keys/process_keys.c 					     ctx);
ctx               539 security/keys/process_keys.c key_ref_t search_process_keyrings_rcu(struct keyring_search_context *ctx)
ctx               544 security/keys/process_keys.c 	key_ref = search_cred_keyrings_rcu(ctx);
ctx               553 security/keys/process_keys.c 	if (ctx->cred->request_key_auth &&
ctx               554 security/keys/process_keys.c 	    ctx->cred == current_cred() &&
ctx               555 security/keys/process_keys.c 	    ctx->index_key.type != &key_type_request_key_auth
ctx               557 security/keys/process_keys.c 		const struct cred *cred = ctx->cred;
ctx               560 security/keys/process_keys.c 			rka = ctx->cred->request_key_auth->payload.data[0];
ctx               563 security/keys/process_keys.c 			ctx->cred = rka->cred;
ctx               564 security/keys/process_keys.c 			key_ref = search_cred_keyrings_rcu(ctx);
ctx               565 security/keys/process_keys.c 			ctx->cred = cred;
ctx               614 security/keys/process_keys.c 	struct keyring_search_context ctx = {
ctx               626 security/keys/process_keys.c 	ctx.cred = get_current_cred();
ctx               631 security/keys/process_keys.c 		if (!ctx.cred->thread_keyring) {
ctx               643 security/keys/process_keys.c 		key = ctx.cred->thread_keyring;
ctx               649 security/keys/process_keys.c 		if (!ctx.cred->process_keyring) {
ctx               661 security/keys/process_keys.c 		key = ctx.cred->process_keyring;
ctx               667 security/keys/process_keys.c 		if (!ctx.cred->session_keyring) {
ctx               683 security/keys/process_keys.c 				    &ctx.cred->session_keyring->flags) &&
ctx               691 security/keys/process_keys.c 		key = ctx.cred->session_keyring;
ctx               716 security/keys/process_keys.c 		key = ctx.cred->request_key_auth;
ctx               725 security/keys/process_keys.c 		if (!ctx.cred->request_key_auth)
ctx               728 security/keys/process_keys.c 		down_read(&ctx.cred->request_key_auth->sem);
ctx               730 security/keys/process_keys.c 			     &ctx.cred->request_key_auth->flags)) {
ctx               734 security/keys/process_keys.c 			rka = ctx.cred->request_key_auth->payload.data[0];
ctx               738 security/keys/process_keys.c 		up_read(&ctx.cred->request_key_auth->sem);
ctx               758 security/keys/process_keys.c 		ctx.index_key			= key->index_key;
ctx               759 security/keys/process_keys.c 		ctx.match_data.raw_data		= key;
ctx               762 security/keys/process_keys.c 		skey_ref = search_process_keyrings_rcu(&ctx);
ctx               804 security/keys/process_keys.c 	ret = key_task_permission(key_ref, ctx.cred, perm);
ctx               811 security/keys/process_keys.c 	put_cred(ctx.cred);
ctx               822 security/keys/process_keys.c 	put_cred(ctx.cred);
ctx                22 security/keys/request_key.c static struct key *check_cached_key(struct keyring_search_context *ctx)
ctx                28 security/keys/request_key.c 	    ctx->match_data.cmp(key, &ctx->match_data) &&
ctx               366 security/keys/request_key.c static int construct_alloc_key(struct keyring_search_context *ctx,
ctx               379 security/keys/request_key.c 	       ctx->index_key.type->name, ctx->index_key.description);
ctx               386 security/keys/request_key.c 	if (ctx->index_key.type->read)
ctx               388 security/keys/request_key.c 	if (ctx->index_key.type == &key_type_keyring ||
ctx               389 security/keys/request_key.c 	    ctx->index_key.type->update)
ctx               392 security/keys/request_key.c 	key = key_alloc(ctx->index_key.type, ctx->index_key.description,
ctx               393 security/keys/request_key.c 			ctx->cred->fsuid, ctx->cred->fsgid, ctx->cred,
ctx               401 security/keys/request_key.c 		ret = __key_link_lock(dest_keyring, &ctx->index_key);
ctx               404 security/keys/request_key.c 		ret = __key_link_begin(dest_keyring, &ctx->index_key, &edit);
ctx               415 security/keys/request_key.c 	key_ref = search_process_keyrings_rcu(ctx);
ctx               425 security/keys/request_key.c 		__key_link_end(dest_keyring, &ctx->index_key, edit);
ctx               441 security/keys/request_key.c 		__key_link_end(dest_keyring, &ctx->index_key, edit);
ctx               457 security/keys/request_key.c 	__key_link_end(dest_keyring, &ctx->index_key, edit);
ctx               473 security/keys/request_key.c static struct key *construct_key_and_link(struct keyring_search_context *ctx,
ctx               486 security/keys/request_key.c 	if (ctx->index_key.type == &key_type_keyring)
ctx               499 security/keys/request_key.c 	ret = construct_alloc_key(ctx, dest_keyring, flags, user, &key);
ctx               567 security/keys/request_key.c 	struct keyring_search_context ctx = {
ctx               585 security/keys/request_key.c 	       ctx.index_key.type->name, ctx.index_key.description,
ctx               589 security/keys/request_key.c 		ret = type->match_preparse(&ctx.match_data);
ctx               596 security/keys/request_key.c 	key = check_cached_key(&ctx);
ctx               602 security/keys/request_key.c 	key_ref = search_process_keyrings_rcu(&ctx);
ctx               637 security/keys/request_key.c 		key = construct_key_and_link(&ctx, callout_info, callout_len,
ctx               643 security/keys/request_key.c 		type->match_free(&ctx.match_data);
ctx               770 security/keys/request_key.c 	struct keyring_search_context ctx = {
ctx               787 security/keys/request_key.c 	key = check_cached_key(&ctx);
ctx               792 security/keys/request_key.c 	key_ref = search_process_keyrings_rcu(&ctx);
ctx               249 security/keys/request_key_auth.c 	struct keyring_search_context ctx = {
ctx               262 security/keys/request_key_auth.c 	ctx.index_key.desc_len = sprintf(description, "%x", target_id);
ctx               265 security/keys/request_key_auth.c 	authkey_ref = search_process_keyrings_rcu(&ctx);
ctx                39 security/keys/trusted.c 	char ctx[];
ctx               952 security/security.c 					const struct qstr *name, void **ctx,
ctx               956 security/security.c 				name, ctx, ctxlen);
ctx              1941 security/security.c int security_inode_notifysecctx(struct inode *inode, void *ctx, u32 ctxlen)
ctx              1943 security/security.c 	return call_int_hook(inode_notifysecctx, 0, inode, ctx, ctxlen);
ctx              1947 security/security.c int security_inode_setsecctx(struct dentry *dentry, void *ctx, u32 ctxlen)
ctx              1949 security/security.c 	return call_int_hook(inode_setsecctx, 0, dentry, ctx, ctxlen);
ctx              1953 security/security.c int security_inode_getsecctx(struct inode *inode, void **ctx, u32 *ctxlen)
ctx              1955 security/security.c 	return call_int_hook(inode_getsecctx, -EOPNOTSUPP, inode, ctx, ctxlen);
ctx              2241 security/security.c void security_xfrm_policy_free(struct xfrm_sec_ctx *ctx)
ctx              2243 security/security.c 	call_void_hook(xfrm_policy_free_security, ctx);
ctx              2247 security/security.c int security_xfrm_policy_delete(struct xfrm_sec_ctx *ctx)
ctx              2249 security/security.c 	return call_int_hook(xfrm_policy_delete_security, 0, ctx);
ctx              2276 security/security.c int security_xfrm_policy_lookup(struct xfrm_sec_ctx *ctx, u32 fl_secid, u8 dir)
ctx              2278 security/security.c 	return call_int_hook(xfrm_policy_lookup, 0, ctx, fl_secid, dir);
ctx              2868 security/selinux/hooks.c 					const struct qstr *name, void **ctx,
ctx              2881 security/selinux/hooks.c 	return security_sid_to_context(&selinux_state, newsid, (char **)ctx,
ctx              6521 security/selinux/hooks.c static int selinux_inode_notifysecctx(struct inode *inode, void *ctx, u32 ctxlen)
ctx              6524 security/selinux/hooks.c 					   ctx, ctxlen, 0);
ctx              6532 security/selinux/hooks.c static int selinux_inode_setsecctx(struct dentry *dentry, void *ctx, u32 ctxlen)
ctx              6534 security/selinux/hooks.c 	return __vfs_setxattr_noperm(dentry, XATTR_NAME_SELINUX, ctx, ctxlen, 0);
ctx              6537 security/selinux/hooks.c static int selinux_inode_getsecctx(struct inode *inode, void **ctx, u32 *ctxlen)
ctx              6541 security/selinux/hooks.c 						ctx, true);
ctx                18 security/selinux/include/xfrm.h void selinux_xfrm_policy_free(struct xfrm_sec_ctx *ctx);
ctx                19 security/selinux/include/xfrm.h int selinux_xfrm_policy_delete(struct xfrm_sec_ctx *ctx);
ctx                26 security/selinux/include/xfrm.h int selinux_xfrm_policy_lookup(struct xfrm_sec_ctx *ctx, u32 fl_secid, u8 dir);
ctx              1377 security/selinux/ss/services.c 				    struct context *ctx,
ctx              1386 security/selinux/ss/services.c 	context_init(ctx);
ctx              1407 security/selinux/ss/services.c 	ctx->user = usrdatum->value;
ctx              1422 security/selinux/ss/services.c 	ctx->role = role->value;
ctx              1435 security/selinux/ss/services.c 	ctx->type = typdatum->value;
ctx              1437 security/selinux/ss/services.c 	rc = mls_context_to_sid(pol, oldc, p, ctx, sidtabp, def_sid);
ctx              1443 security/selinux/ss/services.c 	if (!policydb_context_isvalid(pol, ctx))
ctx              1448 security/selinux/ss/services.c 		context_destroy(ctx);
ctx              3589 security/selinux/ss/services.c 	struct context *ctx;
ctx              3605 security/selinux/ss/services.c 		ctx = sidtab_search(sidtab, SECINITSID_NETMSG);
ctx              3606 security/selinux/ss/services.c 		if (ctx == NULL)
ctx              3610 security/selinux/ss/services.c 		ctx_new.user = ctx->user;
ctx              3611 security/selinux/ss/services.c 		ctx_new.role = ctx->role;
ctx              3612 security/selinux/ss/services.c 		ctx_new.type = ctx->type;
ctx              3657 security/selinux/ss/services.c 	struct context *ctx;
ctx              3665 security/selinux/ss/services.c 	ctx = sidtab_search(state->ss->sidtab, sid);
ctx              3666 security/selinux/ss/services.c 	if (ctx == NULL)
ctx              3670 security/selinux/ss/services.c 	secattr->domain = kstrdup(sym_name(policydb, SYM_TYPES, ctx->type - 1),
ctx              3677 security/selinux/ss/services.c 	mls_export_netlbl_lvl(policydb, ctx, secattr);
ctx              3678 security/selinux/ss/services.c 	rc = mls_export_netlbl_cat(policydb, ctx, secattr);
ctx                55 security/selinux/xfrm.c static inline int selinux_authorizable_ctx(struct xfrm_sec_ctx *ctx)
ctx                57 security/selinux/xfrm.c 	return (ctx &&
ctx                58 security/selinux/xfrm.c 		(ctx->ctx_doi == XFRM_SC_DOI_LSM) &&
ctx                59 security/selinux/xfrm.c 		(ctx->ctx_alg == XFRM_SC_ALG_SELINUX));
ctx                80 security/selinux/xfrm.c 	struct xfrm_sec_ctx *ctx = NULL;
ctx                92 security/selinux/xfrm.c 	ctx = kmalloc(sizeof(*ctx) + str_len + 1, gfp);
ctx                93 security/selinux/xfrm.c 	if (!ctx)
ctx                96 security/selinux/xfrm.c 	ctx->ctx_doi = XFRM_SC_DOI_LSM;
ctx                97 security/selinux/xfrm.c 	ctx->ctx_alg = XFRM_SC_ALG_SELINUX;
ctx                98 security/selinux/xfrm.c 	ctx->ctx_len = str_len;
ctx                99 security/selinux/xfrm.c 	memcpy(ctx->ctx_str, &uctx[1], str_len);
ctx               100 security/selinux/xfrm.c 	ctx->ctx_str[str_len] = '\0';
ctx               101 security/selinux/xfrm.c 	rc = security_context_to_sid(&selinux_state, ctx->ctx_str, str_len,
ctx               102 security/selinux/xfrm.c 				     &ctx->ctx_sid, gfp);
ctx               107 security/selinux/xfrm.c 			  tsec->sid, ctx->ctx_sid,
ctx               112 security/selinux/xfrm.c 	*ctxp = ctx;
ctx               117 security/selinux/xfrm.c 	kfree(ctx);
ctx               124 security/selinux/xfrm.c static void selinux_xfrm_free(struct xfrm_sec_ctx *ctx)
ctx               126 security/selinux/xfrm.c 	if (!ctx)
ctx               130 security/selinux/xfrm.c 	kfree(ctx);
ctx               136 security/selinux/xfrm.c static int selinux_xfrm_delete(struct xfrm_sec_ctx *ctx)
ctx               140 security/selinux/xfrm.c 	if (!ctx)
ctx               144 security/selinux/xfrm.c 			    tsec->sid, ctx->ctx_sid,
ctx               153 security/selinux/xfrm.c int selinux_xfrm_policy_lookup(struct xfrm_sec_ctx *ctx, u32 fl_secid, u8 dir)
ctx               159 security/selinux/xfrm.c 	if (!ctx)
ctx               163 security/selinux/xfrm.c 	if (!selinux_authorizable_ctx(ctx))
ctx               167 security/selinux/xfrm.c 			  fl_secid, ctx->ctx_sid,
ctx               238 security/selinux/xfrm.c 				struct xfrm_sec_ctx *ctx = x->security;
ctx               241 security/selinux/xfrm.c 					sid_session = ctx->ctx_sid;
ctx               244 security/selinux/xfrm.c 				} else if (sid_session != ctx->ctx_sid) {
ctx               316 security/selinux/xfrm.c void selinux_xfrm_policy_free(struct xfrm_sec_ctx *ctx)
ctx               318 security/selinux/xfrm.c 	selinux_xfrm_free(ctx);
ctx               324 security/selinux/xfrm.c int selinux_xfrm_policy_delete(struct xfrm_sec_ctx *ctx)
ctx               326 security/selinux/xfrm.c 	return selinux_xfrm_delete(ctx);
ctx               347 security/selinux/xfrm.c 	struct xfrm_sec_ctx *ctx;
ctx               362 security/selinux/xfrm.c 	ctx = kmalloc(sizeof(*ctx) + str_len, GFP_ATOMIC);
ctx               363 security/selinux/xfrm.c 	if (!ctx) {
ctx               368 security/selinux/xfrm.c 	ctx->ctx_doi = XFRM_SC_DOI_LSM;
ctx               369 security/selinux/xfrm.c 	ctx->ctx_alg = XFRM_SC_ALG_SELINUX;
ctx               370 security/selinux/xfrm.c 	ctx->ctx_sid = secid;
ctx               371 security/selinux/xfrm.c 	ctx->ctx_len = str_len;
ctx               372 security/selinux/xfrm.c 	memcpy(ctx->ctx_str, ctx_str, str_len);
ctx               374 security/selinux/xfrm.c 	x->security = ctx;
ctx               416 security/selinux/xfrm.c 				struct xfrm_sec_ctx *ctx = x->security;
ctx               417 security/selinux/xfrm.c 				peer_sid = ctx->ctx_sid;
ctx              4480 security/smack/smack_lsm.c static int smack_inode_notifysecctx(struct inode *inode, void *ctx, u32 ctxlen)
ctx              4482 security/smack/smack_lsm.c 	return smack_inode_setsecurity(inode, XATTR_SMACK_SUFFIX, ctx, ctxlen, 0);
ctx              4485 security/smack/smack_lsm.c static int smack_inode_setsecctx(struct dentry *dentry, void *ctx, u32 ctxlen)
ctx              4487 security/smack/smack_lsm.c 	return __vfs_setxattr_noperm(dentry, XATTR_NAME_SMACK, ctx, ctxlen, 0);
ctx              4490 security/smack/smack_lsm.c static int smack_inode_getsecctx(struct inode *inode, void **ctx, u32 *ctxlen)
ctx              4494 security/smack/smack_lsm.c 	*ctx = skp->smk_known;
ctx               118 sound/core/seq/oss/seq_oss_readq.c 	struct readq_sysex_ctx *ctx = ptr;
ctx               120 sound/core/seq/oss/seq_oss_readq.c 	return snd_seq_oss_readq_puts(ctx->readq, ctx->dev, buf, count);
ctx               126 sound/core/seq/oss/seq_oss_readq.c 	struct readq_sysex_ctx ctx = {
ctx               133 sound/core/seq/oss/seq_oss_readq.c 	return snd_seq_dump_var_event(ev, readq_dump_sysex, &ctx);
ctx               368 sound/pci/hda/hda_controller.c 		struct system_counterval_t *system, void *ctx)
ctx               370 sound/pci/hda/hda_controller.c 	struct snd_pcm_substream *substream = ctx;
ctx               470 sound/pci/hda/hda_controller.c 		struct system_counterval_t *system, void *ctx)
ctx                72 sound/soc/au1x/ac97c.c static inline unsigned long RD(struct au1xpsc_audio_data *ctx, int reg)
ctx                74 sound/soc/au1x/ac97c.c 	return __raw_readl(ctx->mmio + reg);
ctx                77 sound/soc/au1x/ac97c.c static inline void WR(struct au1xpsc_audio_data *ctx, int reg, unsigned long v)
ctx                79 sound/soc/au1x/ac97c.c 	__raw_writel(v, ctx->mmio + reg);
ctx                86 sound/soc/au1x/ac97c.c 	struct au1xpsc_audio_data *ctx = ac97_to_ctx(ac97);
ctx                93 sound/soc/au1x/ac97c.c 		mutex_lock(&ctx->lock);
ctx                96 sound/soc/au1x/ac97c.c 		while ((RD(ctx, AC97_STATUS) & STAT_CP) && --tmo)
ctx               103 sound/soc/au1x/ac97c.c 		WR(ctx, AC97_CMDRESP, CMD_IDX(r) | CMD_READ);
ctx               109 sound/soc/au1x/ac97c.c 		while ((RD(ctx, AC97_STATUS) & STAT_CP) && --tmo)
ctx               111 sound/soc/au1x/ac97c.c 		data = RD(ctx, AC97_CMDRESP);
ctx               117 sound/soc/au1x/ac97c.c 		mutex_unlock(&ctx->lock);
ctx               128 sound/soc/au1x/ac97c.c 	struct au1xpsc_audio_data *ctx = ac97_to_ctx(ac97);
ctx               133 sound/soc/au1x/ac97c.c 		mutex_lock(&ctx->lock);
ctx               135 sound/soc/au1x/ac97c.c 		for (tmo = 5; (RD(ctx, AC97_STATUS) & STAT_CP) && tmo; tmo--)
ctx               142 sound/soc/au1x/ac97c.c 		WR(ctx, AC97_CMDRESP, CMD_WRITE | CMD_IDX(r) | CMD_SET_DATA(v));
ctx               144 sound/soc/au1x/ac97c.c 		for (tmo = 10; (RD(ctx, AC97_STATUS) & STAT_CP) && tmo; tmo--)
ctx               149 sound/soc/au1x/ac97c.c 		mutex_unlock(&ctx->lock);
ctx               157 sound/soc/au1x/ac97c.c 	struct au1xpsc_audio_data *ctx = ac97_to_ctx(ac97);
ctx               159 sound/soc/au1x/ac97c.c 	WR(ctx, AC97_CONFIG, ctx->cfg | CFG_SG | CFG_SN);
ctx               161 sound/soc/au1x/ac97c.c 	WR(ctx, AC97_CONFIG, ctx->cfg | CFG_SG);
ctx               162 sound/soc/au1x/ac97c.c 	WR(ctx, AC97_CONFIG, ctx->cfg);
ctx               167 sound/soc/au1x/ac97c.c 	struct au1xpsc_audio_data *ctx = ac97_to_ctx(ac97);
ctx               170 sound/soc/au1x/ac97c.c 	WR(ctx, AC97_CONFIG, ctx->cfg | CFG_RS);
ctx               172 sound/soc/au1x/ac97c.c 	WR(ctx, AC97_CONFIG, ctx->cfg);
ctx               176 sound/soc/au1x/ac97c.c 	while (((RD(ctx, AC97_STATUS) & STAT_RD) == 0) && --i)
ctx               193 sound/soc/au1x/ac97c.c 	struct au1xpsc_audio_data *ctx = snd_soc_dai_get_drvdata(dai);
ctx               194 sound/soc/au1x/ac97c.c 	snd_soc_dai_set_dma_data(dai, substream, &ctx->dmaids[0]);
ctx               234 sound/soc/au1x/ac97c.c 	struct au1xpsc_audio_data *ctx;
ctx               236 sound/soc/au1x/ac97c.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               237 sound/soc/au1x/ac97c.c 	if (!ctx)
ctx               240 sound/soc/au1x/ac97c.c 	mutex_init(&ctx->lock);
ctx               251 sound/soc/au1x/ac97c.c 	ctx->mmio = devm_ioremap_nocache(&pdev->dev, iores->start,
ctx               253 sound/soc/au1x/ac97c.c 	if (!ctx->mmio)
ctx               259 sound/soc/au1x/ac97c.c 	ctx->dmaids[SNDRV_PCM_STREAM_PLAYBACK] = dmares->start;
ctx               264 sound/soc/au1x/ac97c.c 	ctx->dmaids[SNDRV_PCM_STREAM_CAPTURE] = dmares->start;
ctx               267 sound/soc/au1x/ac97c.c 	WR(ctx, AC97_ENABLE, EN_D | EN_CE);
ctx               268 sound/soc/au1x/ac97c.c 	WR(ctx, AC97_ENABLE, EN_CE);
ctx               270 sound/soc/au1x/ac97c.c 	ctx->cfg = CFG_RC(3) | CFG_XS(3);
ctx               271 sound/soc/au1x/ac97c.c 	WR(ctx, AC97_CONFIG, ctx->cfg);
ctx               273 sound/soc/au1x/ac97c.c 	platform_set_drvdata(pdev, ctx);
ctx               284 sound/soc/au1x/ac97c.c 	ac97c_workdata = ctx;
ctx               290 sound/soc/au1x/ac97c.c 	struct au1xpsc_audio_data *ctx = platform_get_drvdata(pdev);
ctx               294 sound/soc/au1x/ac97c.c 	WR(ctx, AC97_ENABLE, EN_D);	/* clock off, disable */
ctx               304 sound/soc/au1x/ac97c.c 	struct au1xpsc_audio_data *ctx = dev_get_drvdata(dev);
ctx               306 sound/soc/au1x/ac97c.c 	WR(ctx, AC97_ENABLE, EN_D);	/* clock off, disable */
ctx               313 sound/soc/au1x/ac97c.c 	struct au1xpsc_audio_data *ctx = dev_get_drvdata(dev);
ctx               315 sound/soc/au1x/ac97c.c 	WR(ctx, AC97_ENABLE, EN_D | EN_CE);
ctx               316 sound/soc/au1x/ac97c.c 	WR(ctx, AC97_ENABLE, EN_CE);
ctx               317 sound/soc/au1x/ac97c.c 	WR(ctx, AC97_CONFIG, ctx->cfg);
ctx               186 sound/soc/au1x/dma.c 	struct alchemy_pcm_ctx *ctx = ss_to_ctx(ss);
ctx               187 sound/soc/au1x/dma.c 	return &(ctx->stream[ss->stream]);
ctx               192 sound/soc/au1x/dma.c 	struct alchemy_pcm_ctx *ctx = ss_to_ctx(substream);
ctx               203 sound/soc/au1x/dma.c 	ctx->stream[s].dma = request_au1000_dma(dmaids[s], name,
ctx               205 sound/soc/au1x/dma.c 					&ctx->stream[s]);
ctx               206 sound/soc/au1x/dma.c 	set_dma_mode(ctx->stream[s].dma,
ctx               207 sound/soc/au1x/dma.c 		     get_dma_mode(ctx->stream[s].dma) & ~DMA_NC);
ctx               209 sound/soc/au1x/dma.c 	ctx->stream[s].substream = substream;
ctx               210 sound/soc/au1x/dma.c 	ctx->stream[s].buffer = NULL;
ctx               218 sound/soc/au1x/dma.c 	struct alchemy_pcm_ctx *ctx = ss_to_ctx(substream);
ctx               221 sound/soc/au1x/dma.c 	ctx->stream[stype].substream = NULL;
ctx               222 sound/soc/au1x/dma.c 	free_au1000_dma(ctx->stream[stype].dma);
ctx               312 sound/soc/au1x/dma.c 	struct alchemy_pcm_ctx *ctx;
ctx               314 sound/soc/au1x/dma.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               315 sound/soc/au1x/dma.c 	if (!ctx)
ctx               318 sound/soc/au1x/dma.c 	platform_set_drvdata(pdev, ctx);
ctx                70 sound/soc/au1x/i2sc.c static inline unsigned long RD(struct au1xpsc_audio_data *ctx, int reg)
ctx                72 sound/soc/au1x/i2sc.c 	return __raw_readl(ctx->mmio + reg);
ctx                75 sound/soc/au1x/i2sc.c static inline void WR(struct au1xpsc_audio_data *ctx, int reg, unsigned long v)
ctx                77 sound/soc/au1x/i2sc.c 	__raw_writel(v, ctx->mmio + reg);
ctx                83 sound/soc/au1x/i2sc.c 	struct au1xpsc_audio_data *ctx = snd_soc_dai_get_drvdata(cpu_dai);
ctx                88 sound/soc/au1x/i2sc.c 	c = ctx->cfg;
ctx               131 sound/soc/au1x/i2sc.c 	ctx->cfg = c;
ctx               139 sound/soc/au1x/i2sc.c 	struct au1xpsc_audio_data *ctx = snd_soc_dai_get_drvdata(dai);
ctx               146 sound/soc/au1x/i2sc.c 		WR(ctx, I2S_ENABLE, EN_D | EN_CE);
ctx               147 sound/soc/au1x/i2sc.c 		WR(ctx, I2S_ENABLE, EN_CE);
ctx               148 sound/soc/au1x/i2sc.c 		ctx->cfg |= (stype == PCM_TX) ? CFG_TN : CFG_RN;
ctx               149 sound/soc/au1x/i2sc.c 		WR(ctx, I2S_CFG, ctx->cfg);
ctx               153 sound/soc/au1x/i2sc.c 		ctx->cfg &= ~((stype == PCM_TX) ? CFG_TN : CFG_RN);
ctx               154 sound/soc/au1x/i2sc.c 		WR(ctx, I2S_CFG, ctx->cfg);
ctx               155 sound/soc/au1x/i2sc.c 		WR(ctx, I2S_ENABLE, EN_D);		/* power off */
ctx               185 sound/soc/au1x/i2sc.c 	struct au1xpsc_audio_data *ctx = snd_soc_dai_get_drvdata(dai);
ctx               192 sound/soc/au1x/i2sc.c 	ctx->cfg &= ~CFG_SZ_MASK;
ctx               193 sound/soc/au1x/i2sc.c 	ctx->cfg |= v;
ctx               200 sound/soc/au1x/i2sc.c 	struct au1xpsc_audio_data *ctx = snd_soc_dai_get_drvdata(dai);
ctx               201 sound/soc/au1x/i2sc.c 	snd_soc_dai_set_dma_data(dai, substream, &ctx->dmaids[0]);
ctx               236 sound/soc/au1x/i2sc.c 	struct au1xpsc_audio_data *ctx;
ctx               238 sound/soc/au1x/i2sc.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               239 sound/soc/au1x/i2sc.c 	if (!ctx)
ctx               251 sound/soc/au1x/i2sc.c 	ctx->mmio = devm_ioremap_nocache(&pdev->dev, iores->start,
ctx               253 sound/soc/au1x/i2sc.c 	if (!ctx->mmio)
ctx               259 sound/soc/au1x/i2sc.c 	ctx->dmaids[SNDRV_PCM_STREAM_PLAYBACK] = dmares->start;
ctx               264 sound/soc/au1x/i2sc.c 	ctx->dmaids[SNDRV_PCM_STREAM_CAPTURE] = dmares->start;
ctx               266 sound/soc/au1x/i2sc.c 	platform_set_drvdata(pdev, ctx);
ctx               274 sound/soc/au1x/i2sc.c 	struct au1xpsc_audio_data *ctx = platform_get_drvdata(pdev);
ctx               278 sound/soc/au1x/i2sc.c 	WR(ctx, I2S_ENABLE, EN_D);	/* clock off, disable */
ctx               286 sound/soc/au1x/i2sc.c 	struct au1xpsc_audio_data *ctx = dev_get_drvdata(dev);
ctx               288 sound/soc/au1x/i2sc.c 	WR(ctx, I2S_ENABLE, EN_D);	/* clock off, disable */
ctx               776 sound/soc/intel/atom/sst-atom-controls.c 	struct sst_data *ctx = snd_soc_dai_get_drvdata(dai);
ctx               778 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.nb_slots = slots;
ctx               779 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.active_tx_slot_map = tx_mask;
ctx               780 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.active_rx_slot_map = rx_mask;
ctx               781 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.nb_bits_per_slots = slot_width;
ctx               832 sound/soc/intel/atom/sst-atom-controls.c 	struct sst_data *ctx = snd_soc_dai_get_drvdata(dai);
ctx               838 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.ssp_protocol = SSP_MODE_PCM;
ctx               839 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.mode = sst_get_ssp_mode(dai, fmt) | (SSP_PCM_MODE_NETWORK << 1);
ctx               840 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.start_delay = 0;
ctx               841 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.data_polarity = 1;
ctx               842 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.frame_sync_width = 1;
ctx               846 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.ssp_protocol = SSP_MODE_PCM;
ctx               847 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.mode = sst_get_ssp_mode(dai, fmt) | (SSP_PCM_MODE_NETWORK << 1);
ctx               848 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.start_delay = 1;
ctx               849 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.data_polarity = 1;
ctx               850 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.frame_sync_width = 1;
ctx               854 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.ssp_protocol = SSP_MODE_I2S;
ctx               855 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.mode = sst_get_ssp_mode(dai, fmt) | (SSP_PCM_MODE_NORMAL << 1);
ctx               856 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.start_delay = 1;
ctx               857 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.data_polarity = 0;
ctx               858 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.frame_sync_width = ctx->ssp_cmd.nb_bits_per_slots;
ctx               862 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.ssp_protocol = SSP_MODE_I2S;
ctx               863 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.mode = sst_get_ssp_mode(dai, fmt) | (SSP_PCM_MODE_NORMAL << 1);
ctx               864 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.start_delay = 0;
ctx               865 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.data_polarity = 0;
ctx               866 sound/soc/intel/atom/sst-atom-controls.c 		ctx->ssp_cmd.frame_sync_width = ctx->ssp_cmd.nb_bits_per_slots;
ctx               877 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.frame_sync_polarity = fs_polarity;
ctx               905 sound/soc/intel/atom/sst-atom-controls.c 	struct sst_data *ctx = snd_soc_dai_get_drvdata(dai);
ctx               909 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.selection = config->ssp_id;
ctx               910 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.nb_bits_per_slots = config->bits_per_slot;
ctx               911 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.nb_slots = config->slots;
ctx               912 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.mode = config->ssp_mode | (config->pcm_mode << 1);
ctx               913 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.duplex = config->duplex;
ctx               914 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.active_tx_slot_map = config->active_slot_map;
ctx               915 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.active_rx_slot_map = config->active_slot_map;
ctx               916 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.frame_sync_frequency = config->fs_frequency;
ctx               917 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.frame_sync_polarity = config->frame_sync_polarity;
ctx               918 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.data_polarity = config->data_polarity;
ctx               919 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.frame_sync_width = config->fs_width;
ctx               920 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.ssp_protocol = config->ssp_protocol;
ctx               921 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.start_delay = config->start_delay;
ctx               922 sound/soc/intel/atom/sst-atom-controls.c 	ctx->ssp_cmd.reserved1 = ctx->ssp_cmd.reserved2 = 0xFF;
ctx               103 sound/soc/intel/atom/sst-mfld-platform-compress.c 	struct sst_data *ctx = snd_soc_component_get_drvdata(component);
ctx               110 sound/soc/intel/atom/sst-mfld-platform-compress.c 	retval = sst_fill_stream_params(cstream, ctx, &str_params, true);
ctx               174 sound/soc/intel/atom/sst-mfld-platform-pcm.c 	const struct sst_data *ctx, struct snd_sst_params *str_params, bool is_compress)
ctx               182 sound/soc/intel/atom/sst-mfld-platform-pcm.c 	map = ctx->pdata->pdev_strm_map;
ctx               183 sound/soc/intel/atom/sst-mfld-platform-pcm.c 	map_size = ctx->pdata->strm_map_size;
ctx               231 sound/soc/intel/atom/sst-mfld-platform-pcm.c 	struct sst_data *ctx = snd_soc_dai_get_drvdata(dai);
ctx               242 sound/soc/intel/atom/sst-mfld-platform-pcm.c 	ret_val = sst_fill_stream_params(substream, ctx, &str_params, false);
ctx               158 sound/soc/intel/atom/sst-mfld-platform.h int sst_fill_stream_params(void *substream, const struct sst_data *ctx,
ctx               194 sound/soc/intel/atom/sst/sst.c 	struct intel_sst_drv *ctx = container_of(work,
ctx               197 sound/soc/intel/atom/sst/sst.c 	ctx->ops->post_message(ctx, NULL, false);
ctx               200 sound/soc/intel/atom/sst/sst.c static int sst_workqueue_init(struct intel_sst_drv *ctx)
ctx               202 sound/soc/intel/atom/sst/sst.c 	INIT_LIST_HEAD(&ctx->memcpy_list);
ctx               203 sound/soc/intel/atom/sst/sst.c 	INIT_LIST_HEAD(&ctx->rx_list);
ctx               204 sound/soc/intel/atom/sst/sst.c 	INIT_LIST_HEAD(&ctx->ipc_dispatch_list);
ctx               205 sound/soc/intel/atom/sst/sst.c 	INIT_LIST_HEAD(&ctx->block_list);
ctx               206 sound/soc/intel/atom/sst/sst.c 	INIT_WORK(&ctx->ipc_post_msg_wq, sst_process_pending_msg);
ctx               207 sound/soc/intel/atom/sst/sst.c 	init_waitqueue_head(&ctx->wait_queue);
ctx               209 sound/soc/intel/atom/sst/sst.c 	ctx->post_msg_wq =
ctx               211 sound/soc/intel/atom/sst/sst.c 	if (!ctx->post_msg_wq)
ctx               216 sound/soc/intel/atom/sst/sst.c static void sst_init_locks(struct intel_sst_drv *ctx)
ctx               218 sound/soc/intel/atom/sst/sst.c 	mutex_init(&ctx->sst_lock);
ctx               219 sound/soc/intel/atom/sst/sst.c 	spin_lock_init(&ctx->rx_msg_lock);
ctx               220 sound/soc/intel/atom/sst/sst.c 	spin_lock_init(&ctx->ipc_spin_lock);
ctx               221 sound/soc/intel/atom/sst/sst.c 	spin_lock_init(&ctx->block_lock);
ctx               224 sound/soc/intel/atom/sst/sst.c int sst_alloc_drv_context(struct intel_sst_drv **ctx,
ctx               227 sound/soc/intel/atom/sst/sst.c 	*ctx = devm_kzalloc(dev, sizeof(struct intel_sst_drv), GFP_KERNEL);
ctx               228 sound/soc/intel/atom/sst/sst.c 	if (!(*ctx))
ctx               231 sound/soc/intel/atom/sst/sst.c 	(*ctx)->dev = dev;
ctx               232 sound/soc/intel/atom/sst/sst.c 	(*ctx)->dev_id = dev_id;
ctx               241 sound/soc/intel/atom/sst/sst.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               243 sound/soc/intel/atom/sst/sst.c 	if (ctx->fw_version.type == 0 && ctx->fw_version.major == 0 &&
ctx               244 sound/soc/intel/atom/sst/sst.c 	    ctx->fw_version.minor == 0 && ctx->fw_version.build == 0)
ctx               248 sound/soc/intel/atom/sst/sst.c 			       ctx->fw_version.type, ctx->fw_version.major,
ctx               249 sound/soc/intel/atom/sst/sst.c 			       ctx->fw_version.minor, ctx->fw_version.build);
ctx               264 sound/soc/intel/atom/sst/sst.c int sst_context_init(struct intel_sst_drv *ctx)
ctx               268 sound/soc/intel/atom/sst/sst.c 	if (!ctx->pdata)
ctx               271 sound/soc/intel/atom/sst/sst.c 	if (!ctx->pdata->probe_data)
ctx               274 sound/soc/intel/atom/sst/sst.c 	memcpy(&ctx->info, ctx->pdata->probe_data, sizeof(ctx->info));
ctx               276 sound/soc/intel/atom/sst/sst.c 	ret = sst_driver_ops(ctx);
ctx               280 sound/soc/intel/atom/sst/sst.c 	sst_init_locks(ctx);
ctx               281 sound/soc/intel/atom/sst/sst.c 	sst_set_fw_state_locked(ctx, SST_RESET);
ctx               284 sound/soc/intel/atom/sst/sst.c 	ctx->pvt_id = 1;
ctx               285 sound/soc/intel/atom/sst/sst.c 	ctx->stream_cnt = 0;
ctx               286 sound/soc/intel/atom/sst/sst.c 	ctx->fw_in_mem = NULL;
ctx               288 sound/soc/intel/atom/sst/sst.c 	ctx->use_dma = 0;
ctx               289 sound/soc/intel/atom/sst/sst.c 	ctx->use_lli = 0;
ctx               291 sound/soc/intel/atom/sst/sst.c 	if (sst_workqueue_init(ctx))
ctx               294 sound/soc/intel/atom/sst/sst.c 	ctx->mailbox_recv_offset = ctx->pdata->ipc_info->mbox_recv_off;
ctx               295 sound/soc/intel/atom/sst/sst.c 	ctx->ipc_reg.ipcx = SST_IPCX + ctx->pdata->ipc_info->ipc_offset;
ctx               296 sound/soc/intel/atom/sst/sst.c 	ctx->ipc_reg.ipcd = SST_IPCD + ctx->pdata->ipc_info->ipc_offset;
ctx               298 sound/soc/intel/atom/sst/sst.c 	dev_info(ctx->dev, "Got drv data max stream %d\n",
ctx               299 sound/soc/intel/atom/sst/sst.c 				ctx->info.max_streams);
ctx               301 sound/soc/intel/atom/sst/sst.c 	for (i = 1; i <= ctx->info.max_streams; i++) {
ctx               302 sound/soc/intel/atom/sst/sst.c 		struct stream_info *stream = &ctx->streams[i];
ctx               310 sound/soc/intel/atom/sst/sst.c 	ret = devm_request_threaded_irq(ctx->dev, ctx->irq_num, ctx->ops->interrupt,
ctx               311 sound/soc/intel/atom/sst/sst.c 					ctx->ops->irq_thread, 0, SST_DRV_NAME,
ctx               312 sound/soc/intel/atom/sst/sst.c 					ctx);
ctx               316 sound/soc/intel/atom/sst/sst.c 	dev_dbg(ctx->dev, "Registered IRQ %#x\n", ctx->irq_num);
ctx               319 sound/soc/intel/atom/sst/sst.c 	sst_shim_write64(ctx->shim, SST_IMRX, 0xFFFF0038);
ctx               321 sound/soc/intel/atom/sst/sst.c 	ctx->qos = devm_kzalloc(ctx->dev,
ctx               323 sound/soc/intel/atom/sst/sst.c 	if (!ctx->qos) {
ctx               327 sound/soc/intel/atom/sst/sst.c 	pm_qos_add_request(ctx->qos, PM_QOS_CPU_DMA_LATENCY,
ctx               330 sound/soc/intel/atom/sst/sst.c 	dev_dbg(ctx->dev, "Requesting FW %s now...\n", ctx->firmware_name);
ctx               331 sound/soc/intel/atom/sst/sst.c 	ret = request_firmware_nowait(THIS_MODULE, true, ctx->firmware_name,
ctx               332 sound/soc/intel/atom/sst/sst.c 				      ctx->dev, GFP_KERNEL, ctx, sst_firmware_load_cb);
ctx               334 sound/soc/intel/atom/sst/sst.c 		dev_err(ctx->dev, "Firmware download failed:%d\n", ret);
ctx               338 sound/soc/intel/atom/sst/sst.c 	ret = sysfs_create_group(&ctx->dev->kobj,
ctx               341 sound/soc/intel/atom/sst/sst.c 		dev_err(ctx->dev,
ctx               346 sound/soc/intel/atom/sst/sst.c 	sst_register(ctx->dev);
ctx               349 sound/soc/intel/atom/sst/sst.c 	sysfs_remove_group(&ctx->dev->kobj, &sst_fw_version_attr_group);
ctx               352 sound/soc/intel/atom/sst/sst.c 	destroy_workqueue(ctx->post_msg_wq);
ctx               357 sound/soc/intel/atom/sst/sst.c void sst_context_cleanup(struct intel_sst_drv *ctx)
ctx               359 sound/soc/intel/atom/sst/sst.c 	pm_runtime_get_noresume(ctx->dev);
ctx               360 sound/soc/intel/atom/sst/sst.c 	pm_runtime_disable(ctx->dev);
ctx               361 sound/soc/intel/atom/sst/sst.c 	sst_unregister(ctx->dev);
ctx               362 sound/soc/intel/atom/sst/sst.c 	sst_set_fw_state_locked(ctx, SST_SHUTDOWN);
ctx               363 sound/soc/intel/atom/sst/sst.c 	sysfs_remove_group(&ctx->dev->kobj, &sst_fw_version_attr_group);
ctx               365 sound/soc/intel/atom/sst/sst.c 	destroy_workqueue(ctx->post_msg_wq);
ctx               366 sound/soc/intel/atom/sst/sst.c 	pm_qos_remove_request(ctx->qos);
ctx               367 sound/soc/intel/atom/sst/sst.c 	kfree(ctx->fw_sg_list.src);
ctx               368 sound/soc/intel/atom/sst/sst.c 	kfree(ctx->fw_sg_list.dst);
ctx               369 sound/soc/intel/atom/sst/sst.c 	ctx->fw_sg_list.list_len = 0;
ctx               370 sound/soc/intel/atom/sst/sst.c 	kfree(ctx->fw_in_mem);
ctx               371 sound/soc/intel/atom/sst/sst.c 	ctx->fw_in_mem = NULL;
ctx               372 sound/soc/intel/atom/sst/sst.c 	sst_memcpy_free_resources(ctx);
ctx               373 sound/soc/intel/atom/sst/sst.c 	ctx = NULL;
ctx               377 sound/soc/intel/atom/sst/sst.c void sst_configure_runtime_pm(struct intel_sst_drv *ctx)
ctx               379 sound/soc/intel/atom/sst/sst.c 	pm_runtime_set_autosuspend_delay(ctx->dev, SST_SUSPEND_DELAY);
ctx               380 sound/soc/intel/atom/sst/sst.c 	pm_runtime_use_autosuspend(ctx->dev);
ctx               388 sound/soc/intel/atom/sst/sst.c 		pm_runtime_set_active(ctx->dev);
ctx               390 sound/soc/intel/atom/sst/sst.c 	pm_runtime_enable(ctx->dev);
ctx               393 sound/soc/intel/atom/sst/sst.c 		pm_runtime_set_active(ctx->dev);
ctx               395 sound/soc/intel/atom/sst/sst.c 		pm_runtime_put_noidle(ctx->dev);
ctx               402 sound/soc/intel/atom/sst/sst.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               404 sound/soc/intel/atom/sst/sst.c 	if (ctx->sst_state == SST_RESET) {
ctx               409 sound/soc/intel/atom/sst/sst.c 	if (ctx->ops->save_dsp_context(ctx))
ctx               413 sound/soc/intel/atom/sst/sst.c 	sst_set_fw_state_locked(ctx, SST_RESET);
ctx               415 sound/soc/intel/atom/sst/sst.c 	synchronize_irq(ctx->irq_num);
ctx               416 sound/soc/intel/atom/sst/sst.c 	flush_workqueue(ctx->post_msg_wq);
ctx               418 sound/soc/intel/atom/sst/sst.c 	ctx->ops->reset(ctx);
ctx               425 sound/soc/intel/atom/sst/sst.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               430 sound/soc/intel/atom/sst/sst.c 	if (ctx->sst_state == SST_RESET)
ctx               437 sound/soc/intel/atom/sst/sst.c 	for (i = 1; i <= ctx->info.max_streams; i++) {
ctx               438 sound/soc/intel/atom/sst/sst.c 		struct stream_info *stream = &ctx->streams[i];
ctx               445 sound/soc/intel/atom/sst/sst.c 		if (ctx->pdata->streams_lost_on_suspend) {
ctx               449 sound/soc/intel/atom/sst/sst.c 				sst_free_stream(ctx, i);
ctx               452 sound/soc/intel/atom/sst/sst.c 	synchronize_irq(ctx->irq_num);
ctx               453 sound/soc/intel/atom/sst/sst.c 	flush_workqueue(ctx->post_msg_wq);
ctx               456 sound/soc/intel/atom/sst/sst.c 	sst_set_fw_state_locked(ctx, SST_RESET);
ctx               459 sound/soc/intel/atom/sst/sst.c 	if (ctx->ops->save_dsp_context(ctx))
ctx               466 sound/soc/intel/atom/sst/sst.c 	fw_save->iram = kvzalloc(ctx->iram_end - ctx->iram_base, GFP_KERNEL);
ctx               471 sound/soc/intel/atom/sst/sst.c 	fw_save->dram = kvzalloc(ctx->dram_end - ctx->dram_base, GFP_KERNEL);
ctx               482 sound/soc/intel/atom/sst/sst.c 	fw_save->ddr = kvzalloc(ctx->ddr_end - ctx->ddr_base, GFP_KERNEL);
ctx               488 sound/soc/intel/atom/sst/sst.c 	memcpy32_fromio(fw_save->iram, ctx->iram, ctx->iram_end - ctx->iram_base);
ctx               489 sound/soc/intel/atom/sst/sst.c 	memcpy32_fromio(fw_save->dram, ctx->dram, ctx->dram_end - ctx->dram_base);
ctx               490 sound/soc/intel/atom/sst/sst.c 	memcpy32_fromio(fw_save->sram, ctx->mailbox, SST_MAILBOX_SIZE);
ctx               491 sound/soc/intel/atom/sst/sst.c 	memcpy32_fromio(fw_save->ddr, ctx->ddr, ctx->ddr_end - ctx->ddr_base);
ctx               493 sound/soc/intel/atom/sst/sst.c 	ctx->fw_save = fw_save;
ctx               494 sound/soc/intel/atom/sst/sst.c 	ctx->ops->reset(ctx);
ctx               509 sound/soc/intel/atom/sst/sst.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               510 sound/soc/intel/atom/sst/sst.c 	struct sst_fw_save *fw_save = ctx->fw_save;
ctx               517 sound/soc/intel/atom/sst/sst.c 	sst_set_fw_state_locked(ctx, SST_FW_LOADING);
ctx               520 sound/soc/intel/atom/sst/sst.c 	ctx->ops->reset(ctx);
ctx               522 sound/soc/intel/atom/sst/sst.c 	ctx->fw_save = NULL;
ctx               524 sound/soc/intel/atom/sst/sst.c 	memcpy32_toio(ctx->iram, fw_save->iram, ctx->iram_end - ctx->iram_base);
ctx               525 sound/soc/intel/atom/sst/sst.c 	memcpy32_toio(ctx->dram, fw_save->dram, ctx->dram_end - ctx->dram_base);
ctx               526 sound/soc/intel/atom/sst/sst.c 	memcpy32_toio(ctx->mailbox, fw_save->sram, SST_MAILBOX_SIZE);
ctx               527 sound/soc/intel/atom/sst/sst.c 	memcpy32_toio(ctx->ddr, fw_save->ddr, ctx->ddr_end - ctx->ddr_base);
ctx               535 sound/soc/intel/atom/sst/sst.c 	block = sst_create_block(ctx, 0, FW_DWNL_ID);
ctx               541 sound/soc/intel/atom/sst/sst.c 	ctx->ops->start(ctx);
ctx               542 sound/soc/intel/atom/sst/sst.c 	ret = sst_wait_timeout(ctx, block);
ctx               544 sound/soc/intel/atom/sst/sst.c 		dev_err(ctx->dev, "fw download failed %d\n", ret);
ctx               549 sound/soc/intel/atom/sst/sst.c 		sst_set_fw_state_locked(ctx, SST_FW_RUNNING);
ctx               552 sound/soc/intel/atom/sst/sst.c 	if (ctx->pdata->streams_lost_on_suspend) {
ctx               553 sound/soc/intel/atom/sst/sst.c 		for (i = 1; i <= ctx->info.max_streams; i++) {
ctx               554 sound/soc/intel/atom/sst/sst.c 			struct stream_info *stream = &ctx->streams[i];
ctx               557 sound/soc/intel/atom/sst/sst.c 				dev_dbg(ctx->dev, "Re-allocing stream %d status %d prev %d\n",
ctx               560 sound/soc/intel/atom/sst/sst.c 				sst_realloc_stream(ctx, i);
ctx               567 sound/soc/intel/atom/sst/sst.c 	sst_free_block(ctx, block);
ctx               416 sound/soc/intel/atom/sst/sst.h 	void (*clear_interrupt)(struct intel_sst_drv *ctx);
ctx               417 sound/soc/intel/atom/sst/sst.h 	int (*start)(struct intel_sst_drv *ctx);
ctx               418 sound/soc/intel/atom/sst/sst.h 	int (*reset)(struct intel_sst_drv *ctx);
ctx               419 sound/soc/intel/atom/sst/sst.h 	void (*process_reply)(struct intel_sst_drv *ctx, struct ipc_post *msg);
ctx               420 sound/soc/intel/atom/sst/sst.h 	int (*post_message)(struct intel_sst_drv *ctx,
ctx               426 sound/soc/intel/atom/sst/sst.h 	int (*alloc_stream)(struct intel_sst_drv *ctx, void *params);
ctx               436 sound/soc/intel/atom/sst/sst.h int sst_send_byte_stream_mrfld(struct intel_sst_drv *ctx,
ctx               442 sound/soc/intel/atom/sst/sst.h int sst_get_stream_allocated(struct intel_sst_drv *ctx,
ctx               447 sound/soc/intel/atom/sst/sst.h int sst_post_message_mrfld(struct intel_sst_drv *ctx,
ctx               449 sound/soc/intel/atom/sst/sst.h void sst_process_reply_mrfld(struct intel_sst_drv *ctx, struct ipc_post *msg);
ctx               450 sound/soc/intel/atom/sst/sst.h int sst_start_mrfld(struct intel_sst_drv *ctx);
ctx               451 sound/soc/intel/atom/sst/sst.h int intel_sst_reset_dsp_mrfld(struct intel_sst_drv *ctx);
ctx               452 sound/soc/intel/atom/sst/sst.h void intel_sst_clear_intr_mrfld(struct intel_sst_drv *ctx);
ctx               454 sound/soc/intel/atom/sst/sst.h int sst_load_fw(struct intel_sst_drv *ctx);
ctx               456 sound/soc/intel/atom/sst/sst.h void sst_post_download_mrfld(struct intel_sst_drv *ctx);
ctx               458 sound/soc/intel/atom/sst/sst.h void sst_memcpy_free_resources(struct intel_sst_drv *ctx);
ctx               465 sound/soc/intel/atom/sst/sst.h int free_stream_context(struct intel_sst_drv *ctx, unsigned int str_id);
ctx               469 sound/soc/intel/atom/sst/sst.h void sst_cdev_fragment_elapsed(struct intel_sst_drv *ctx, int str_id);
ctx               475 sound/soc/intel/atom/sst/sst.h struct sst_block *sst_create_block(struct intel_sst_drv *ctx,
ctx               480 sound/soc/intel/atom/sst/sst.h int sst_free_block(struct intel_sst_drv *ctx, struct sst_block *freed);
ctx               481 sound/soc/intel/atom/sst/sst.h int sst_wake_up_block(struct intel_sst_drv *ctx, int result,
ctx               483 sound/soc/intel/atom/sst/sst.h int sst_request_firmware_async(struct intel_sst_drv *ctx);
ctx               518 sound/soc/intel/atom/sst/sst.h int sst_alloc_drv_context(struct intel_sst_drv **ctx,
ctx               520 sound/soc/intel/atom/sst/sst.h int sst_context_init(struct intel_sst_drv *ctx);
ctx               521 sound/soc/intel/atom/sst/sst.h void sst_context_cleanup(struct intel_sst_drv *ctx);
ctx               522 sound/soc/intel/atom/sst/sst.h void sst_configure_runtime_pm(struct intel_sst_drv *ctx);
ctx               149 sound/soc/intel/atom/sst/sst_acpi.c static int sst_platform_get_resources(struct intel_sst_drv *ctx)
ctx               152 sound/soc/intel/atom/sst/sst_acpi.c 	struct platform_device *pdev = to_platform_device(ctx->dev);
ctx               157 sound/soc/intel/atom/sst/sst_acpi.c 					ctx->pdata->res_info->acpi_lpe_res_index);
ctx               159 sound/soc/intel/atom/sst/sst_acpi.c 		dev_err(ctx->dev, "Invalid SHIM base from IFWI\n");
ctx               162 sound/soc/intel/atom/sst/sst_acpi.c 	dev_info(ctx->dev, "LPE base: %#x size:%#x", (unsigned int) rsrc->start,
ctx               165 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->iram_base = rsrc->start + ctx->pdata->res_info->iram_offset;
ctx               166 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->iram_end =  ctx->iram_base + ctx->pdata->res_info->iram_size - 1;
ctx               167 sound/soc/intel/atom/sst/sst_acpi.c 	dev_info(ctx->dev, "IRAM base: %#x", ctx->iram_base);
ctx               168 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->iram = devm_ioremap_nocache(ctx->dev, ctx->iram_base,
ctx               169 sound/soc/intel/atom/sst/sst_acpi.c 					 ctx->pdata->res_info->iram_size);
ctx               170 sound/soc/intel/atom/sst/sst_acpi.c 	if (!ctx->iram) {
ctx               171 sound/soc/intel/atom/sst/sst_acpi.c 		dev_err(ctx->dev, "unable to map IRAM\n");
ctx               175 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->dram_base = rsrc->start + ctx->pdata->res_info->dram_offset;
ctx               176 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->dram_end = ctx->dram_base + ctx->pdata->res_info->dram_size - 1;
ctx               177 sound/soc/intel/atom/sst/sst_acpi.c 	dev_info(ctx->dev, "DRAM base: %#x", ctx->dram_base);
ctx               178 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->dram = devm_ioremap_nocache(ctx->dev, ctx->dram_base,
ctx               179 sound/soc/intel/atom/sst/sst_acpi.c 					 ctx->pdata->res_info->dram_size);
ctx               180 sound/soc/intel/atom/sst/sst_acpi.c 	if (!ctx->dram) {
ctx               181 sound/soc/intel/atom/sst/sst_acpi.c 		dev_err(ctx->dev, "unable to map DRAM\n");
ctx               185 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->shim_phy_add = rsrc->start + ctx->pdata->res_info->shim_offset;
ctx               186 sound/soc/intel/atom/sst/sst_acpi.c 	dev_info(ctx->dev, "SHIM base: %#x", ctx->shim_phy_add);
ctx               187 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->shim = devm_ioremap_nocache(ctx->dev, ctx->shim_phy_add,
ctx               188 sound/soc/intel/atom/sst/sst_acpi.c 					ctx->pdata->res_info->shim_size);
ctx               189 sound/soc/intel/atom/sst/sst_acpi.c 	if (!ctx->shim) {
ctx               190 sound/soc/intel/atom/sst/sst_acpi.c 		dev_err(ctx->dev, "unable to map SHIM\n");
ctx               195 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->shim_phy_add = ctx->pdata->res_info->shim_phy_addr;
ctx               198 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->mailbox_add = rsrc->start + ctx->pdata->res_info->mbox_offset;
ctx               199 sound/soc/intel/atom/sst/sst_acpi.c 	dev_info(ctx->dev, "Mailbox base: %#x", ctx->mailbox_add);
ctx               200 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->mailbox = devm_ioremap_nocache(ctx->dev, ctx->mailbox_add,
ctx               201 sound/soc/intel/atom/sst/sst_acpi.c 					    ctx->pdata->res_info->mbox_size);
ctx               202 sound/soc/intel/atom/sst/sst_acpi.c 	if (!ctx->mailbox) {
ctx               203 sound/soc/intel/atom/sst/sst_acpi.c 		dev_err(ctx->dev, "unable to map mailbox\n");
ctx               208 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->mailbox_add = ctx->info.mailbox_start;
ctx               211 sound/soc/intel/atom/sst/sst_acpi.c 					ctx->pdata->res_info->acpi_ddr_index);
ctx               213 sound/soc/intel/atom/sst/sst_acpi.c 		dev_err(ctx->dev, "Invalid DDR base from IFWI\n");
ctx               216 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->ddr_base = rsrc->start;
ctx               217 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->ddr_end = rsrc->end;
ctx               218 sound/soc/intel/atom/sst/sst_acpi.c 	dev_info(ctx->dev, "DDR base: %#x", ctx->ddr_base);
ctx               219 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->ddr = devm_ioremap_nocache(ctx->dev, ctx->ddr_base,
ctx               221 sound/soc/intel/atom/sst/sst_acpi.c 	if (!ctx->ddr) {
ctx               222 sound/soc/intel/atom/sst/sst_acpi.c 		dev_err(ctx->dev, "unable to map DDR\n");
ctx               227 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->irq_num = platform_get_irq(pdev,
ctx               228 sound/soc/intel/atom/sst/sst_acpi.c 				ctx->pdata->res_info->acpi_ipc_irq_index);
ctx               229 sound/soc/intel/atom/sst/sst_acpi.c 	if (ctx->irq_num <= 0)
ctx               230 sound/soc/intel/atom/sst/sst_acpi.c 		return ctx->irq_num < 0 ? ctx->irq_num : -EIO;
ctx               239 sound/soc/intel/atom/sst/sst_acpi.c 	struct intel_sst_drv *ctx;
ctx               273 sound/soc/intel/atom/sst/sst_acpi.c 	ret = sst_alloc_drv_context(&ctx, dev, dev_id);
ctx               307 sound/soc/intel/atom/sst/sst_acpi.c 	ctx->pdata = pdata;
ctx               308 sound/soc/intel/atom/sst/sst_acpi.c 	strcpy(ctx->firmware_name, mach->fw_filename);
ctx               310 sound/soc/intel/atom/sst/sst_acpi.c 	ret = sst_platform_get_resources(ctx);
ctx               314 sound/soc/intel/atom/sst/sst_acpi.c 	ret = sst_context_init(ctx);
ctx               318 sound/soc/intel/atom/sst/sst_acpi.c 	sst_configure_runtime_pm(ctx);
ctx               319 sound/soc/intel/atom/sst/sst_acpi.c 	platform_set_drvdata(pdev, ctx);
ctx               333 sound/soc/intel/atom/sst/sst_acpi.c 	struct intel_sst_drv *ctx;
ctx               335 sound/soc/intel/atom/sst/sst_acpi.c 	ctx = platform_get_drvdata(pdev);
ctx               336 sound/soc/intel/atom/sst/sst_acpi.c 	sst_context_cleanup(ctx);
ctx                43 sound/soc/intel/atom/sst/sst_drv_interface.c int free_stream_context(struct intel_sst_drv *ctx, unsigned int str_id)
ctx                48 sound/soc/intel/atom/sst/sst_drv_interface.c 	stream = get_stream_info(ctx, str_id);
ctx                51 sound/soc/intel/atom/sst/sst_drv_interface.c 		ret = sst_free_stream(ctx, str_id);
ctx                53 sound/soc/intel/atom/sst/sst_drv_interface.c 			sst_clean_stream(&ctx->streams[str_id]);
ctx                56 sound/soc/intel/atom/sst/sst_drv_interface.c 		dev_err(ctx->dev, "we tried to free stream context %d which was freed!!!\n", str_id);
ctx                61 sound/soc/intel/atom/sst/sst_drv_interface.c int sst_get_stream_allocated(struct intel_sst_drv *ctx,
ctx                67 sound/soc/intel/atom/sst/sst_drv_interface.c 	retval = ctx->ops->alloc_stream(ctx, str_param);
ctx                69 sound/soc/intel/atom/sst/sst_drv_interface.c 		dev_dbg(ctx->dev, "Stream allocated %d\n", retval);
ctx               117 sound/soc/intel/atom/sst/sst_drv_interface.c int sst_get_stream(struct intel_sst_drv *ctx,
ctx               124 sound/soc/intel/atom/sst/sst_drv_interface.c 	retval = ctx->ops->alloc_stream(ctx, str_param);
ctx               129 sound/soc/intel/atom/sst/sst_drv_interface.c 	str_info = &ctx->streams[retval];
ctx               137 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               144 sound/soc/intel/atom/sst/sst_drv_interface.c 		dev_dbg(ctx->dev, "Enable: pm usage count: %d\n", usage_count);
ctx               147 sound/soc/intel/atom/sst/sst_drv_interface.c 			dev_err(ctx->dev, "Runtime get failed with err: %d\n", ret);
ctx               150 sound/soc/intel/atom/sst/sst_drv_interface.c 		if ((ctx->sst_state == SST_RESET) && (usage_count == 1)) {
ctx               151 sound/soc/intel/atom/sst/sst_drv_interface.c 			ret = sst_load_fw(ctx);
ctx               154 sound/soc/intel/atom/sst/sst_drv_interface.c 				sst_set_fw_state_locked(ctx, SST_RESET);
ctx               155 sound/soc/intel/atom/sst/sst_drv_interface.c 				ret = sst_pm_runtime_put(ctx);
ctx               160 sound/soc/intel/atom/sst/sst_drv_interface.c 		dev_dbg(ctx->dev, "Disable: pm usage count: %d\n", usage_count);
ctx               161 sound/soc/intel/atom/sst/sst_drv_interface.c 		return sst_pm_runtime_put(ctx);
ctx               178 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               183 sound/soc/intel/atom/sst/sst_drv_interface.c 	retval = sst_get_stream(ctx, str_param);
ctx               185 sound/soc/intel/atom/sst/sst_drv_interface.c 		ctx->stream_cnt++;
ctx               187 sound/soc/intel/atom/sst/sst_drv_interface.c 		dev_err(ctx->dev, "sst_get_stream returned err %d\n", retval);
ctx               197 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               199 sound/soc/intel/atom/sst/sst_drv_interface.c 	retval = pm_runtime_get_sync(ctx->dev);
ctx               201 sound/soc/intel/atom/sst/sst_drv_interface.c 		pm_runtime_put_sync(ctx->dev);
ctx               205 sound/soc/intel/atom/sst/sst_drv_interface.c 	str_id = sst_get_stream(ctx, str_params);
ctx               208 sound/soc/intel/atom/sst/sst_drv_interface.c 		stream = &ctx->streams[str_id];
ctx               216 sound/soc/intel/atom/sst/sst_drv_interface.c 		sst_pm_runtime_put(ctx);
ctx               225 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               227 sound/soc/intel/atom/sst/sst_drv_interface.c 	stream = get_stream_info(ctx, str_id);
ctx               233 sound/soc/intel/atom/sst/sst_drv_interface.c 	retval = sst_free_stream(ctx, str_id);
ctx               251 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               253 sound/soc/intel/atom/sst/sst_drv_interface.c 	stream = get_stream_info(ctx, str_id);
ctx               261 sound/soc/intel/atom/sst/sst_drv_interface.c 	addr =  ((void __iomem *)(ctx->mailbox + ctx->tstamp)) +
ctx               280 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               284 sound/soc/intel/atom/sst/sst_drv_interface.c 	str_info = get_stream_info(ctx, str_id);
ctx               289 sound/soc/intel/atom/sst/sst_drv_interface.c 	retval = sst_prepare_and_post_msg(ctx, str_info->task_id, IPC_CMD,
ctx               299 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               301 sound/soc/intel/atom/sst/sst_drv_interface.c 	return sst_pause_stream(ctx, str_id);
ctx               307 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               309 sound/soc/intel/atom/sst/sst_drv_interface.c 	return sst_resume_stream(ctx, str_id);
ctx               315 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               317 sound/soc/intel/atom/sst/sst_drv_interface.c 	str_info = get_stream_info(ctx, str_id);
ctx               322 sound/soc/intel/atom/sst/sst_drv_interface.c 	return sst_start_stream(ctx, str_id);
ctx               327 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               329 sound/soc/intel/atom/sst/sst_drv_interface.c 	return sst_drop_stream(ctx, str_id);
ctx               334 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               336 sound/soc/intel/atom/sst/sst_drv_interface.c 	return sst_drain_stream(ctx, str_id, false);
ctx               342 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               344 sound/soc/intel/atom/sst/sst_drv_interface.c 	return sst_drain_stream(ctx, str_id, true);
ctx               352 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               355 sound/soc/intel/atom/sst/sst_drv_interface.c 	addr = (void __iomem *)(ctx->mailbox + ctx->tstamp) +
ctx               360 sound/soc/intel/atom/sst/sst_drv_interface.c 	stream = get_stream_info(ctx, str_id);
ctx               439 sound/soc/intel/atom/sst/sst_drv_interface.c void sst_cdev_fragment_elapsed(struct intel_sst_drv *ctx, int str_id)
ctx               443 sound/soc/intel/atom/sst/sst_drv_interface.c 	dev_dbg(ctx->dev, "fragment elapsed from firmware for str_id %d\n",
ctx               445 sound/soc/intel/atom/sst/sst_drv_interface.c 	stream = &ctx->streams[str_id];
ctx               462 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               464 sound/soc/intel/atom/sst/sst_drv_interface.c 	stream = get_stream_info(ctx, str_id);
ctx               466 sound/soc/intel/atom/sst/sst_drv_interface.c 		dev_err(ctx->dev, "stream info is NULL for str %d!!!\n", str_id);
ctx               470 sound/soc/intel/atom/sst/sst_drv_interface.c 	retval = free_stream_context(ctx, str_id);
ctx               474 sound/soc/intel/atom/sst/sst_drv_interface.c 	ctx->stream_cnt--;
ctx               477 sound/soc/intel/atom/sst/sst_drv_interface.c 		dev_err(ctx->dev, "free stream returned err %d\n", retval);
ctx               479 sound/soc/intel/atom/sst/sst_drv_interface.c 	dev_dbg(ctx->dev, "Exit\n");
ctx               483 sound/soc/intel/atom/sst/sst_drv_interface.c static inline int sst_calc_tstamp(struct intel_sst_drv *ctx,
ctx               492 sound/soc/intel/atom/sst/sst_drv_interface.c 	dev_dbg(ctx->dev, "mrfld ring_buffer_counter %llu in bytes\n",
ctx               494 sound/soc/intel/atom/sst/sst_drv_interface.c 	dev_dbg(ctx->dev, "mrfld hardware_counter %llu in bytes\n",
ctx               507 sound/soc/intel/atom/sst/sst_drv_interface.c 	dev_dbg(ctx->dev, "pcm delay %zu in bytes\n", delay_bytes);
ctx               512 sound/soc/intel/atom/sst/sst_drv_interface.c 	dev_dbg(ctx->dev, "buffer ptr %llu pcm_delay rep: %llu\n",
ctx               523 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               527 sound/soc/intel/atom/sst/sst_drv_interface.c 	stream = get_stream_info(ctx, str_id);
ctx               535 sound/soc/intel/atom/sst/sst_drv_interface.c 	addr = (void __iomem *)(ctx->mailbox + ctx->tstamp) +
ctx               540 sound/soc/intel/atom/sst/sst_drv_interface.c 	return sst_calc_tstamp(ctx, info, substream, &fw_tstamp);
ctx               546 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               548 sound/soc/intel/atom/sst/sst_drv_interface.c 	if (ctx->sst_state != SST_FW_RUNNING)
ctx               550 sound/soc/intel/atom/sst/sst_drv_interface.c 	str_info = get_stream_info(ctx, str_id);
ctx               555 sound/soc/intel/atom/sst/sst_drv_interface.c 	sst_start_stream(ctx, str_id);
ctx               563 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               565 sound/soc/intel/atom/sst/sst_drv_interface.c 	if (ctx->sst_state != SST_FW_RUNNING)
ctx               568 sound/soc/intel/atom/sst/sst_drv_interface.c 	str_info = get_stream_info(ctx, str_id);
ctx               573 sound/soc/intel/atom/sst/sst_drv_interface.c 	return sst_drop_stream(ctx, str_id);
ctx               579 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               581 sound/soc/intel/atom/sst/sst_drv_interface.c 	if (ctx->sst_state != SST_FW_RUNNING)
ctx               584 sound/soc/intel/atom/sst/sst_drv_interface.c 	str_info = get_stream_info(ctx, str_id);
ctx               588 sound/soc/intel/atom/sst/sst_drv_interface.c 	return sst_pause_stream(ctx, str_id);
ctx               594 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               596 sound/soc/intel/atom/sst/sst_drv_interface.c 	if (ctx->sst_state != SST_FW_RUNNING)
ctx               599 sound/soc/intel/atom/sst/sst_drv_interface.c 	str_info = get_stream_info(ctx, str_id);
ctx               602 sound/soc/intel/atom/sst/sst_drv_interface.c 	return sst_resume_stream(ctx, str_id);
ctx               609 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               613 sound/soc/intel/atom/sst/sst_drv_interface.c 	if (ctx->sst_state != SST_FW_RUNNING)
ctx               616 sound/soc/intel/atom/sst/sst_drv_interface.c 	stream = get_stream_info(ctx, str_id);
ctx               620 sound/soc/intel/atom/sst/sst_drv_interface.c 	dev_dbg(ctx->dev, "setting the period ptrs\n");
ctx               626 sound/soc/intel/atom/sst/sst_drv_interface.c 	dev_dbg(ctx->dev,
ctx               647 sound/soc/intel/atom/sst/sst_drv_interface.c 	struct intel_sst_drv *ctx = dev_get_drvdata(dev);
ctx               651 sound/soc/intel/atom/sst/sst_drv_interface.c 	ret_val = pm_runtime_get_sync(ctx->dev);
ctx               653 sound/soc/intel/atom/sst/sst_drv_interface.c 		pm_runtime_put_sync(ctx->dev);
ctx               657 sound/soc/intel/atom/sst/sst_drv_interface.c 	ret_val = sst_send_byte_stream_mrfld(ctx, bytes);
ctx               658 sound/soc/intel/atom/sst/sst_drv_interface.c 	sst_pm_runtime_put(ctx);
ctx                29 sound/soc/intel/atom/sst/sst_ipc.c struct sst_block *sst_create_block(struct intel_sst_drv *ctx,
ctx                34 sound/soc/intel/atom/sst/sst_ipc.c 	dev_dbg(ctx->dev, "Enter\n");
ctx                42 sound/soc/intel/atom/sst/sst_ipc.c 	spin_lock_bh(&ctx->block_lock);
ctx                43 sound/soc/intel/atom/sst/sst_ipc.c 	list_add_tail(&msg->node, &ctx->block_list);
ctx                44 sound/soc/intel/atom/sst/sst_ipc.c 	spin_unlock_bh(&ctx->block_lock);
ctx                64 sound/soc/intel/atom/sst/sst_ipc.c int sst_wake_up_block(struct intel_sst_drv *ctx, int result,
ctx                69 sound/soc/intel/atom/sst/sst_ipc.c 	dev_dbg(ctx->dev, "Enter\n");
ctx                71 sound/soc/intel/atom/sst/sst_ipc.c 	spin_lock_bh(&ctx->block_lock);
ctx                72 sound/soc/intel/atom/sst/sst_ipc.c 	list_for_each_entry(block, &ctx->block_list, node) {
ctx                73 sound/soc/intel/atom/sst/sst_ipc.c 		dev_dbg(ctx->dev, "Block ipc %d, drv_id %d\n", block->msg_id,
ctx                76 sound/soc/intel/atom/sst/sst_ipc.c 			dev_dbg(ctx->dev, "free up the block\n");
ctx                81 sound/soc/intel/atom/sst/sst_ipc.c 			spin_unlock_bh(&ctx->block_lock);
ctx                82 sound/soc/intel/atom/sst/sst_ipc.c 			wake_up(&ctx->wait_queue);
ctx                86 sound/soc/intel/atom/sst/sst_ipc.c 	spin_unlock_bh(&ctx->block_lock);
ctx                87 sound/soc/intel/atom/sst/sst_ipc.c 	dev_dbg(ctx->dev,
ctx                93 sound/soc/intel/atom/sst/sst_ipc.c int sst_free_block(struct intel_sst_drv *ctx, struct sst_block *freed)
ctx                97 sound/soc/intel/atom/sst/sst_ipc.c 	dev_dbg(ctx->dev, "Enter\n");
ctx                98 sound/soc/intel/atom/sst/sst_ipc.c 	spin_lock_bh(&ctx->block_lock);
ctx                99 sound/soc/intel/atom/sst/sst_ipc.c 	list_for_each_entry_safe(block, __block, &ctx->block_list, node) {
ctx               104 sound/soc/intel/atom/sst/sst_ipc.c 			spin_unlock_bh(&ctx->block_lock);
ctx               111 sound/soc/intel/atom/sst/sst_ipc.c 	spin_unlock_bh(&ctx->block_lock);
ctx               112 sound/soc/intel/atom/sst/sst_ipc.c 	dev_err(ctx->dev, "block is already freed!!!\n");
ctx               107 sound/soc/intel/atom/sst/sst_loader.c static int sst_validate_fw_image(struct intel_sst_drv *ctx, unsigned long size,
ctx               111 sound/soc/intel/atom/sst/sst_loader.c 	const void *sst_fw_in_mem = ctx->fw_in_mem;
ctx               113 sound/soc/intel/atom/sst/sst_loader.c 	dev_dbg(ctx->dev, "Enter\n");
ctx               117 sound/soc/intel/atom/sst/sst_loader.c 	dev_dbg(ctx->dev,
ctx               126 sound/soc/intel/atom/sst/sst_loader.c 		dev_err(ctx->dev, "InvalidFW sign/filesize mismatch\n");
ctx               231 sound/soc/intel/atom/sst/sst_loader.c static int sst_parse_fw_memcpy(struct intel_sst_drv *ctx, unsigned long size,
ctx               238 sound/soc/intel/atom/sst/sst_loader.c 	ret_val = sst_validate_fw_image(ctx, size, &module, &num_modules);
ctx               243 sound/soc/intel/atom/sst/sst_loader.c 		ret_val = sst_parse_module_memcpy(ctx, module, fw_list);
ctx               314 sound/soc/intel/atom/sst/sst_loader.c 	struct intel_sst_drv *ctx = context;
ctx               316 sound/soc/intel/atom/sst/sst_loader.c 	dev_dbg(ctx->dev, "Enter\n");
ctx               319 sound/soc/intel/atom/sst/sst_loader.c 		dev_err(ctx->dev, "request fw failed\n");
ctx               323 sound/soc/intel/atom/sst/sst_loader.c 	mutex_lock(&ctx->sst_lock);
ctx               325 sound/soc/intel/atom/sst/sst_loader.c 	if (ctx->sst_state != SST_RESET ||
ctx               326 sound/soc/intel/atom/sst/sst_loader.c 			ctx->fw_in_mem != NULL) {
ctx               328 sound/soc/intel/atom/sst/sst_loader.c 		mutex_unlock(&ctx->sst_lock);
ctx               332 sound/soc/intel/atom/sst/sst_loader.c 	dev_dbg(ctx->dev, "Request Fw completed\n");
ctx               333 sound/soc/intel/atom/sst/sst_loader.c 	sst_cache_and_parse_fw(ctx, fw);
ctx               334 sound/soc/intel/atom/sst/sst_loader.c 	mutex_unlock(&ctx->sst_lock);
ctx               382 sound/soc/intel/atom/sst/sst_loader.c void sst_post_download_mrfld(struct intel_sst_drv *ctx)
ctx               384 sound/soc/intel/atom/sst/sst_loader.c 	sst_dccm_config_write(ctx->dram, ctx->ddr_base);
ctx               385 sound/soc/intel/atom/sst/sst_loader.c 	dev_dbg(ctx->dev, "config written to DCCM\n");
ctx                25 sound/soc/intel/atom/sst/sst_pci.c static int sst_platform_get_resources(struct intel_sst_drv *ctx)
ctx                28 sound/soc/intel/atom/sst/sst_pci.c 	struct pci_dev *pci = ctx->pci;
ctx                36 sound/soc/intel/atom/sst/sst_pci.c 	if (ctx->dev_id == SST_MRFLD_PCI_ID) {
ctx                37 sound/soc/intel/atom/sst/sst_pci.c 		ctx->ddr_base = pci_resource_start(pci, 0);
ctx                39 sound/soc/intel/atom/sst/sst_pci.c 		ddr_base = relocate_imr_addr_mrfld(ctx->ddr_base);
ctx                40 sound/soc/intel/atom/sst/sst_pci.c 		if (!ctx->pdata->lib_info) {
ctx                41 sound/soc/intel/atom/sst/sst_pci.c 			dev_err(ctx->dev, "lib_info pointer NULL\n");
ctx                45 sound/soc/intel/atom/sst/sst_pci.c 		if (ddr_base != ctx->pdata->lib_info->mod_base) {
ctx                46 sound/soc/intel/atom/sst/sst_pci.c 			dev_err(ctx->dev,
ctx                51 sound/soc/intel/atom/sst/sst_pci.c 		ctx->ddr_end = pci_resource_end(pci, 0);
ctx                53 sound/soc/intel/atom/sst/sst_pci.c 		ctx->ddr = pcim_iomap(pci, 0,
ctx                55 sound/soc/intel/atom/sst/sst_pci.c 		if (!ctx->ddr) {
ctx                59 sound/soc/intel/atom/sst/sst_pci.c 		dev_dbg(ctx->dev, "sst: DDR Ptr %p\n", ctx->ddr);
ctx                61 sound/soc/intel/atom/sst/sst_pci.c 		ctx->ddr = NULL;
ctx                64 sound/soc/intel/atom/sst/sst_pci.c 	ctx->shim_phy_add = pci_resource_start(pci, 1);
ctx                65 sound/soc/intel/atom/sst/sst_pci.c 	ctx->shim = pcim_iomap(pci, 1, pci_resource_len(pci, 1));
ctx                66 sound/soc/intel/atom/sst/sst_pci.c 	if (!ctx->shim) {
ctx                70 sound/soc/intel/atom/sst/sst_pci.c 	dev_dbg(ctx->dev, "SST Shim Ptr %p\n", ctx->shim);
ctx                73 sound/soc/intel/atom/sst/sst_pci.c 	ctx->mailbox_add = pci_resource_start(pci, 2);
ctx                74 sound/soc/intel/atom/sst/sst_pci.c 	ctx->mailbox = pcim_iomap(pci, 2, pci_resource_len(pci, 2));
ctx                75 sound/soc/intel/atom/sst/sst_pci.c 	if (!ctx->mailbox) {
ctx                79 sound/soc/intel/atom/sst/sst_pci.c 	dev_dbg(ctx->dev, "SRAM Ptr %p\n", ctx->mailbox);
ctx                82 sound/soc/intel/atom/sst/sst_pci.c 	ctx->iram_end = pci_resource_end(pci, 3);
ctx                83 sound/soc/intel/atom/sst/sst_pci.c 	ctx->iram_base = pci_resource_start(pci, 3);
ctx                84 sound/soc/intel/atom/sst/sst_pci.c 	ctx->iram = pcim_iomap(pci, 3, pci_resource_len(pci, 3));
ctx                85 sound/soc/intel/atom/sst/sst_pci.c 	if (!ctx->iram) {
ctx                89 sound/soc/intel/atom/sst/sst_pci.c 	dev_dbg(ctx->dev, "IRAM Ptr %p\n", ctx->iram);
ctx                92 sound/soc/intel/atom/sst/sst_pci.c 	ctx->dram_end = pci_resource_end(pci, 4);
ctx                93 sound/soc/intel/atom/sst/sst_pci.c 	ctx->dram_base = pci_resource_start(pci, 4);
ctx                94 sound/soc/intel/atom/sst/sst_pci.c 	ctx->dram = pcim_iomap(pci, 4, pci_resource_len(pci, 4));
ctx                95 sound/soc/intel/atom/sst/sst_pci.c 	if (!ctx->dram) {
ctx                99 sound/soc/intel/atom/sst/sst_pci.c 	dev_dbg(ctx->dev, "DRAM Ptr %p\n", ctx->dram);
ctx               226 sound/soc/intel/boards/bxt_da7219_max98357a.c 	struct bxt_card_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               237 sound/soc/intel/boards/bxt_da7219_max98357a.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               605 sound/soc/intel/boards/bxt_da7219_max98357a.c 	struct bxt_card_private *ctx = snd_soc_card_get_drvdata(card);
ctx               618 sound/soc/intel/boards/bxt_da7219_max98357a.c 	list_for_each_entry(pcm, &ctx->hdmi_pcm_list, head) {
ctx               661 sound/soc/intel/boards/bxt_da7219_max98357a.c 	struct bxt_card_private *ctx;
ctx               666 sound/soc/intel/boards/bxt_da7219_max98357a.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               667 sound/soc/intel/boards/bxt_da7219_max98357a.c 	if (!ctx)
ctx               670 sound/soc/intel/boards/bxt_da7219_max98357a.c 	INIT_LIST_HEAD(&ctx->hdmi_pcm_list);
ctx               673 sound/soc/intel/boards/bxt_da7219_max98357a.c 	snd_soc_card_set_drvdata(&broxton_audio_card, ctx);
ctx               186 sound/soc/intel/boards/bxt_rt298.c 	struct bxt_rt286_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               197 sound/soc/intel/boards/bxt_rt298.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               524 sound/soc/intel/boards/bxt_rt298.c 	struct bxt_rt286_private *ctx = snd_soc_card_get_drvdata(card);
ctx               530 sound/soc/intel/boards/bxt_rt298.c 	list_for_each_entry(pcm, &ctx->hdmi_pcm_list, head) {
ctx               588 sound/soc/intel/boards/bxt_rt298.c 	struct bxt_rt286_private *ctx;
ctx               611 sound/soc/intel/boards/bxt_rt298.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               612 sound/soc/intel/boards/bxt_rt298.c 	if (!ctx)
ctx               615 sound/soc/intel/boards/bxt_rt298.c 	INIT_LIST_HEAD(&ctx->hdmi_pcm_list);
ctx               618 sound/soc/intel/boards/bxt_rt298.c 	snd_soc_card_set_drvdata(card, ctx);
ctx                48 sound/soc/intel/boards/cht_bsw_max98090_ti.c 	struct cht_mc_private *ctx = snd_soc_card_get_drvdata(card);
ctx                52 sound/soc/intel/boards/cht_bsw_max98090_ti.c 	if (ctx->quirks & QUIRK_PMC_PLT_CLK_0)
ctx                62 sound/soc/intel/boards/cht_bsw_max98090_ti.c 		ret = clk_prepare_enable(ctx->mclk);
ctx                69 sound/soc/intel/boards/cht_bsw_max98090_ti.c 		clk_disable_unprepare(ctx->mclk);
ctx               190 sound/soc/intel/boards/cht_bsw_max98090_ti.c 	struct cht_mc_private *ctx = snd_soc_card_get_drvdata(runtime->card);
ctx               191 sound/soc/intel/boards/cht_bsw_max98090_ti.c 	struct snd_soc_jack *jack = &ctx->jack;
ctx               193 sound/soc/intel/boards/cht_bsw_max98090_ti.c 	if (ctx->ts3a227e_present) {
ctx               225 sound/soc/intel/boards/cht_bsw_max98090_ti.c 	if (ctx->quirks & QUIRK_PMC_PLT_CLK_0)
ctx               238 sound/soc/intel/boards/cht_bsw_max98090_ti.c 	ret = clk_prepare_enable(ctx->mclk);
ctx               240 sound/soc/intel/boards/cht_bsw_max98090_ti.c 		clk_disable_unprepare(ctx->mclk);
ctx               242 sound/soc/intel/boards/cht_bsw_max98090_ti.c 	ret = clk_set_rate(ctx->mclk, CHT_PLAT_CLK_3_HZ);
ctx               293 sound/soc/intel/boards/cht_bsw_max98090_ti.c 	struct cht_mc_private *ctx = snd_soc_card_get_drvdata(card);
ctx               294 sound/soc/intel/boards/cht_bsw_max98090_ti.c 	struct snd_soc_jack *jack = &ctx->jack;
ctx               608 sound/soc/intel/boards/cht_bsw_max98090_ti.c 	struct cht_mc_private *ctx = snd_soc_card_get_drvdata(card);
ctx               610 sound/soc/intel/boards/cht_bsw_max98090_ti.c 	if (ctx->quirks & QUIRK_PMC_PLT_CLK_0)
ctx               611 sound/soc/intel/boards/cht_bsw_max98090_ti.c 		clk_disable_unprepare(ctx->mclk);
ctx                97 sound/soc/intel/boards/cht_bsw_nau8824.c 	struct cht_mc_private *ctx = snd_soc_card_get_drvdata(runtime->card);
ctx                98 sound/soc/intel/boards/cht_bsw_nau8824.c 	struct snd_soc_jack *jack = &ctx->jack;
ctx                73 sound/soc/intel/boards/cht_bsw_rt5645.c 	struct cht_mc_private *ctx = snd_soc_card_get_drvdata(card);
ctx                86 sound/soc/intel/boards/cht_bsw_rt5645.c 		ret = clk_prepare_enable(ctx->mclk);
ctx               105 sound/soc/intel/boards/cht_bsw_rt5645.c 		clk_disable_unprepare(ctx->mclk);
ctx               254 sound/soc/intel/boards/cht_bsw_rt5645.c 	struct cht_mc_private *ctx = snd_soc_card_get_drvdata(runtime->card);
ctx               298 sound/soc/intel/boards/cht_bsw_rt5645.c 	if (ctx->acpi_card->codec_type == CODEC_TYPE_RT5650)
ctx               306 sound/soc/intel/boards/cht_bsw_rt5645.c 				    jack_type, &ctx->jack,
ctx               313 sound/soc/intel/boards/cht_bsw_rt5645.c 	rt5645_set_jack_detect(component, &ctx->jack, &ctx->jack, &ctx->jack);
ctx               326 sound/soc/intel/boards/cht_bsw_rt5645.c 	ret = clk_prepare_enable(ctx->mclk);
ctx               328 sound/soc/intel/boards/cht_bsw_rt5645.c 		clk_disable_unprepare(ctx->mclk);
ctx               330 sound/soc/intel/boards/cht_bsw_rt5645.c 	ret = clk_set_rate(ctx->mclk, CHT_PLAT_CLK_3_HZ);
ctx                54 sound/soc/intel/boards/cht_bsw_rt5672.c 	struct cht_mc_private *ctx = snd_soc_card_get_drvdata(card);
ctx                64 sound/soc/intel/boards/cht_bsw_rt5672.c 		if (ctx->mclk) {
ctx                65 sound/soc/intel/boards/cht_bsw_rt5672.c 			ret = clk_prepare_enable(ctx->mclk);
ctx                97 sound/soc/intel/boards/cht_bsw_rt5672.c 		if (ctx->mclk)
ctx                98 sound/soc/intel/boards/cht_bsw_rt5672.c 			clk_disable_unprepare(ctx->mclk);
ctx               181 sound/soc/intel/boards/cht_bsw_rt5672.c 	struct cht_mc_private *ctx = snd_soc_card_get_drvdata(runtime->card);
ctx               203 sound/soc/intel/boards/cht_bsw_rt5672.c 				    &ctx->headset,
ctx               209 sound/soc/intel/boards/cht_bsw_rt5672.c 	snd_jack_set_key(ctx->headset.jack, SND_JACK_BTN_0, KEY_PLAYPAUSE);
ctx               210 sound/soc/intel/boards/cht_bsw_rt5672.c 	snd_jack_set_key(ctx->headset.jack, SND_JACK_BTN_1, KEY_VOLUMEUP);
ctx               211 sound/soc/intel/boards/cht_bsw_rt5672.c 	snd_jack_set_key(ctx->headset.jack, SND_JACK_BTN_2, KEY_VOLUMEDOWN);
ctx               213 sound/soc/intel/boards/cht_bsw_rt5672.c 	rt5670_set_jack_detect(component, &ctx->headset);
ctx               214 sound/soc/intel/boards/cht_bsw_rt5672.c 	if (ctx->mclk) {
ctx               225 sound/soc/intel/boards/cht_bsw_rt5672.c 		ret = clk_prepare_enable(ctx->mclk);
ctx               227 sound/soc/intel/boards/cht_bsw_rt5672.c 			clk_disable_unprepare(ctx->mclk);
ctx               229 sound/soc/intel/boards/cht_bsw_rt5672.c 		ret = clk_set_rate(ctx->mclk, CHT_PLAT_CLK_3_HZ);
ctx               350 sound/soc/intel/boards/cht_bsw_rt5672.c 	struct cht_mc_private *ctx = snd_soc_card_get_drvdata(card);
ctx               354 sound/soc/intel/boards/cht_bsw_rt5672.c 			     ctx->codec_name, sizeof(ctx->codec_name))) {
ctx               367 sound/soc/intel/boards/cht_bsw_rt5672.c 	struct cht_mc_private *ctx = snd_soc_card_get_drvdata(card);
ctx               371 sound/soc/intel/boards/cht_bsw_rt5672.c 			     ctx->codec_name, sizeof(ctx->codec_name))) {
ctx               136 sound/soc/intel/boards/glk_rt5682_max98357a.c 	struct glk_card_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               162 sound/soc/intel/boards/glk_rt5682_max98357a.c 			&ctx->geminilake_headset, NULL, 0);
ctx               168 sound/soc/intel/boards/glk_rt5682_max98357a.c 	jack = &ctx->geminilake_headset;
ctx               208 sound/soc/intel/boards/glk_rt5682_max98357a.c 	struct glk_card_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               219 sound/soc/intel/boards/glk_rt5682_max98357a.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               541 sound/soc/intel/boards/glk_rt5682_max98357a.c 	struct glk_card_private *ctx = snd_soc_card_get_drvdata(card);
ctx               548 sound/soc/intel/boards/glk_rt5682_max98357a.c 	list_for_each_entry(pcm, &ctx->hdmi_pcm_list, head) {
ctx               591 sound/soc/intel/boards/glk_rt5682_max98357a.c 	struct glk_card_private *ctx;
ctx               597 sound/soc/intel/boards/glk_rt5682_max98357a.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               598 sound/soc/intel/boards/glk_rt5682_max98357a.c 	if (!ctx)
ctx               601 sound/soc/intel/boards/glk_rt5682_max98357a.c 	INIT_LIST_HEAD(&ctx->hdmi_pcm_list);
ctx               605 sound/soc/intel/boards/glk_rt5682_max98357a.c 	snd_soc_card_set_drvdata(card, ctx);
ctx               161 sound/soc/intel/boards/kbl_da7219_max98357a.c 	struct kbl_codec_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               182 sound/soc/intel/boards/kbl_da7219_max98357a.c 			&ctx->kabylake_headset, NULL, 0);
ctx               188 sound/soc/intel/boards/kbl_da7219_max98357a.c 	jack = &ctx->kabylake_headset;
ctx               194 sound/soc/intel/boards/kbl_da7219_max98357a.c 	da7219_aad_jack_det(component, &ctx->kabylake_headset);
ctx               205 sound/soc/intel/boards/kbl_da7219_max98357a.c 	struct kbl_codec_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               216 sound/soc/intel/boards/kbl_da7219_max98357a.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               542 sound/soc/intel/boards/kbl_da7219_max98357a.c 	struct kbl_codec_private *ctx = snd_soc_card_get_drvdata(card);
ctx               548 sound/soc/intel/boards/kbl_da7219_max98357a.c 	list_for_each_entry(pcm, &ctx->hdmi_pcm_list, head) {
ctx               592 sound/soc/intel/boards/kbl_da7219_max98357a.c 	struct kbl_codec_private *ctx;
ctx               594 sound/soc/intel/boards/kbl_da7219_max98357a.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               595 sound/soc/intel/boards/kbl_da7219_max98357a.c 	if (!ctx)
ctx               598 sound/soc/intel/boards/kbl_da7219_max98357a.c 	INIT_LIST_HEAD(&ctx->hdmi_pcm_list);
ctx               604 sound/soc/intel/boards/kbl_da7219_max98357a.c 	snd_soc_card_set_drvdata(kabylake_audio_card, ctx);
ctx               333 sound/soc/intel/boards/kbl_da7219_max98927.c 	struct kbl_codec_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               351 sound/soc/intel/boards/kbl_da7219_max98927.c 			&ctx->kabylake_headset, NULL, 0);
ctx               357 sound/soc/intel/boards/kbl_da7219_max98927.c 	jack = &ctx->kabylake_headset;
ctx               363 sound/soc/intel/boards/kbl_da7219_max98927.c 	da7219_aad_jack_det(component, &ctx->kabylake_headset);
ctx               380 sound/soc/intel/boards/kbl_da7219_max98927.c 	struct kbl_codec_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               391 sound/soc/intel/boards/kbl_da7219_max98927.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               970 sound/soc/intel/boards/kbl_da7219_max98927.c 	struct kbl_codec_private *ctx = snd_soc_card_get_drvdata(card);
ctx               977 sound/soc/intel/boards/kbl_da7219_max98927.c 	list_for_each_entry(pcm, &ctx->hdmi_pcm_list, head) {
ctx              1092 sound/soc/intel/boards/kbl_da7219_max98927.c 	struct kbl_codec_private *ctx;
ctx              1097 sound/soc/intel/boards/kbl_da7219_max98927.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx              1098 sound/soc/intel/boards/kbl_da7219_max98927.c 	if (!ctx)
ctx              1101 sound/soc/intel/boards/kbl_da7219_max98927.c 	INIT_LIST_HEAD(&ctx->hdmi_pcm_list);
ctx              1123 sound/soc/intel/boards/kbl_da7219_max98927.c 	snd_soc_card_set_drvdata(kabylake_audio_card, ctx);
ctx               159 sound/soc/intel/boards/kbl_rt5660.c 	struct kbl_codec_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               168 sound/soc/intel/boards/kbl_rt5660.c 	ctx->gpio_lo_mute = devm_gpiod_get(component->dev, "lineout-mute",
ctx               170 sound/soc/intel/boards/kbl_rt5660.c 	if (IS_ERR(ctx->gpio_lo_mute)) {
ctx               172 sound/soc/intel/boards/kbl_rt5660.c 		return PTR_ERR(ctx->gpio_lo_mute);
ctx               212 sound/soc/intel/boards/kbl_rt5660.c 	struct kbl_codec_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               223 sound/soc/intel/boards/kbl_rt5660.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               464 sound/soc/intel/boards/kbl_rt5660.c 	struct kbl_codec_private *ctx = snd_soc_card_get_drvdata(card);
ctx               470 sound/soc/intel/boards/kbl_rt5660.c 	list_for_each_entry(pcm, &ctx->hdmi_pcm_list, head) {
ctx               514 sound/soc/intel/boards/kbl_rt5660.c 	struct kbl_codec_private *ctx;
ctx               516 sound/soc/intel/boards/kbl_rt5660.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               517 sound/soc/intel/boards/kbl_rt5660.c 	if (!ctx)
ctx               520 sound/soc/intel/boards/kbl_rt5660.c 	INIT_LIST_HEAD(&ctx->hdmi_pcm_list);
ctx               526 sound/soc/intel/boards/kbl_rt5660.c 	snd_soc_card_set_drvdata(kabylake_audio_card, ctx);
ctx               260 sound/soc/intel/boards/kbl_rt5663_max98927.c 	struct kbl_rt5663_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               270 sound/soc/intel/boards/kbl_rt5663_max98927.c 			SND_JACK_BTN_2 | SND_JACK_BTN_3, &ctx->kabylake_headset,
ctx               277 sound/soc/intel/boards/kbl_rt5663_max98927.c 	jack = &ctx->kabylake_headset;
ctx               283 sound/soc/intel/boards/kbl_rt5663_max98927.c 	snd_soc_component_set_jack(component, &ctx->kabylake_headset, NULL);
ctx               307 sound/soc/intel/boards/kbl_rt5663_max98927.c 	struct kbl_rt5663_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               318 sound/soc/intel/boards/kbl_rt5663_max98927.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               882 sound/soc/intel/boards/kbl_rt5663_max98927.c 	struct kbl_rt5663_private *ctx = snd_soc_card_get_drvdata(card);
ctx               888 sound/soc/intel/boards/kbl_rt5663_max98927.c 	list_for_each_entry(pcm, &ctx->hdmi_pcm_list, head) {
ctx               949 sound/soc/intel/boards/kbl_rt5663_max98927.c 	struct kbl_rt5663_private *ctx;
ctx               953 sound/soc/intel/boards/kbl_rt5663_max98927.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               954 sound/soc/intel/boards/kbl_rt5663_max98927.c 	if (!ctx)
ctx               957 sound/soc/intel/boards/kbl_rt5663_max98927.c 	INIT_LIST_HEAD(&ctx->hdmi_pcm_list);
ctx               963 sound/soc/intel/boards/kbl_rt5663_max98927.c 	snd_soc_card_set_drvdata(kabylake_audio_card, ctx);
ctx               970 sound/soc/intel/boards/kbl_rt5663_max98927.c 	ctx->mclk = devm_clk_get(&pdev->dev, "ssp1_mclk");
ctx               971 sound/soc/intel/boards/kbl_rt5663_max98927.c 	if (IS_ERR(ctx->mclk)) {
ctx               972 sound/soc/intel/boards/kbl_rt5663_max98927.c 		ret = PTR_ERR(ctx->mclk);
ctx               984 sound/soc/intel/boards/kbl_rt5663_max98927.c 	ctx->sclk = devm_clk_get(&pdev->dev, "ssp1_sclk");
ctx               985 sound/soc/intel/boards/kbl_rt5663_max98927.c 	if (IS_ERR(ctx->sclk)) {
ctx               986 sound/soc/intel/boards/kbl_rt5663_max98927.c 		ret = PTR_ERR(ctx->sclk);
ctx               158 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 	struct kbl_codec_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               168 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 			SND_JACK_BTN_2 | SND_JACK_BTN_3, &ctx->kabylake_headset,
ctx               175 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 	jack = &ctx->kabylake_headset;
ctx               181 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 	snd_soc_component_set_jack(component, &ctx->kabylake_headset, NULL);
ctx               192 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 	struct kbl_codec_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               203 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               596 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 	struct kbl_codec_private *ctx = snd_soc_card_get_drvdata(card);
ctx               602 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 	list_for_each_entry(pcm, &ctx->hdmi_pcm_list, head) {
ctx               607 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 				SND_JACK_AVOUT, &ctx->kabylake_hdmi[i],
ctx               613 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 						&ctx->kabylake_hdmi[i]);
ctx               647 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 	struct kbl_codec_private *ctx;
ctx               650 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               651 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 	if (!ctx)
ctx               654 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 	INIT_LIST_HEAD(&ctx->hdmi_pcm_list);
ctx               657 sound/soc/intel/boards/kbl_rt5663_rt5514_max98927.c 	snd_soc_card_set_drvdata(&kabylake_audio_card, ctx);
ctx                21 sound/soc/intel/boards/skl_hda_dsp_common.c 	struct skl_hda_private *ctx = snd_soc_card_get_drvdata(card);
ctx                30 sound/soc/intel/boards/skl_hda_dsp_common.c 		 ctx->dai_index);
ctx                36 sound/soc/intel/boards/skl_hda_dsp_common.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               136 sound/soc/intel/boards/skl_hda_dsp_common.c 	struct skl_hda_private *ctx = snd_soc_card_get_drvdata(card);
ctx               142 sound/soc/intel/boards/skl_hda_dsp_common.c 	list_for_each_entry(pcm, &ctx->hdmi_pcm_list, head) {
ctx                72 sound/soc/intel/boards/skl_hda_dsp_generic.c 	struct skl_hda_private *ctx = snd_soc_card_get_drvdata(card);
ctx                76 sound/soc/intel/boards/skl_hda_dsp_generic.c 	link->platforms->name = ctx->platform_name;
ctx                80 sound/soc/intel/boards/skl_hda_dsp_generic.c 		ret = skl_hda_hdmi_add_pcm(card, ctx->pcm_count);
ctx                85 sound/soc/intel/boards/skl_hda_dsp_generic.c 		ctx->dai_index++;
ctx                88 sound/soc/intel/boards/skl_hda_dsp_generic.c 	ctx->pcm_count++;
ctx               157 sound/soc/intel/boards/skl_hda_dsp_generic.c 	struct skl_hda_private *ctx;
ctx               162 sound/soc/intel/boards/skl_hda_dsp_generic.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               163 sound/soc/intel/boards/skl_hda_dsp_generic.c 	if (!ctx)
ctx               166 sound/soc/intel/boards/skl_hda_dsp_generic.c 	INIT_LIST_HEAD(&ctx->hdmi_pcm_list);
ctx               178 sound/soc/intel/boards/skl_hda_dsp_generic.c 	ctx->pcm_count = hda_soc_card.num_links;
ctx               179 sound/soc/intel/boards/skl_hda_dsp_generic.c 	ctx->dai_index = 1; /* hdmi codec dai name starts from index 1 */
ctx               180 sound/soc/intel/boards/skl_hda_dsp_generic.c 	ctx->platform_name = mach->mach_params.platform;
ctx               183 sound/soc/intel/boards/skl_hda_dsp_generic.c 	snd_soc_card_set_drvdata(&hda_soc_card, ctx);
ctx               184 sound/soc/intel/boards/skl_nau88l25_max98357a.c 	struct skl_nau8825_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               195 sound/soc/intel/boards/skl_nau88l25_max98357a.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               202 sound/soc/intel/boards/skl_nau88l25_max98357a.c 	struct skl_nau8825_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               213 sound/soc/intel/boards/skl_nau88l25_max98357a.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               220 sound/soc/intel/boards/skl_nau88l25_max98357a.c 	struct skl_nau8825_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               231 sound/soc/intel/boards/skl_nau88l25_max98357a.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               601 sound/soc/intel/boards/skl_nau88l25_max98357a.c 	struct skl_nau8825_private *ctx = snd_soc_card_get_drvdata(card);
ctx               607 sound/soc/intel/boards/skl_nau88l25_max98357a.c 	list_for_each_entry(pcm, &ctx->hdmi_pcm_list, head) {
ctx               651 sound/soc/intel/boards/skl_nau88l25_max98357a.c 	struct skl_nau8825_private *ctx;
ctx               654 sound/soc/intel/boards/skl_nau88l25_max98357a.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               655 sound/soc/intel/boards/skl_nau88l25_max98357a.c 	if (!ctx)
ctx               658 sound/soc/intel/boards/skl_nau88l25_max98357a.c 	INIT_LIST_HEAD(&ctx->hdmi_pcm_list);
ctx               661 sound/soc/intel/boards/skl_nau88l25_max98357a.c 	snd_soc_card_set_drvdata(&skylake_audio_card, ctx);
ctx               203 sound/soc/intel/boards/skl_nau88l25_ssm4567.c 	struct skl_nau88125_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               214 sound/soc/intel/boards/skl_nau88l25_ssm4567.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               221 sound/soc/intel/boards/skl_nau88l25_ssm4567.c 	struct skl_nau88125_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               232 sound/soc/intel/boards/skl_nau88l25_ssm4567.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               240 sound/soc/intel/boards/skl_nau88l25_ssm4567.c 	struct skl_nau88125_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               251 sound/soc/intel/boards/skl_nau88l25_ssm4567.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               642 sound/soc/intel/boards/skl_nau88l25_ssm4567.c 	struct skl_nau88125_private *ctx = snd_soc_card_get_drvdata(card);
ctx               648 sound/soc/intel/boards/skl_nau88l25_ssm4567.c 	list_for_each_entry(pcm, &ctx->hdmi_pcm_list, head) {
ctx               694 sound/soc/intel/boards/skl_nau88l25_ssm4567.c 	struct skl_nau88125_private *ctx;
ctx               697 sound/soc/intel/boards/skl_nau88l25_ssm4567.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               698 sound/soc/intel/boards/skl_nau88l25_ssm4567.c 	if (!ctx)
ctx               701 sound/soc/intel/boards/skl_nau88l25_ssm4567.c 	INIT_LIST_HEAD(&ctx->hdmi_pcm_list);
ctx               704 sound/soc/intel/boards/skl_nau88l25_ssm4567.c 	snd_soc_card_set_drvdata(&skylake_audio_card, ctx);
ctx               145 sound/soc/intel/boards/skl_rt286.c 	struct skl_rt286_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               156 sound/soc/intel/boards/skl_rt286.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               483 sound/soc/intel/boards/skl_rt286.c 	struct skl_rt286_private *ctx = snd_soc_card_get_drvdata(card);
ctx               489 sound/soc/intel/boards/skl_rt286.c 	list_for_each_entry(pcm, &ctx->hdmi_pcm_list, head) {
ctx               532 sound/soc/intel/boards/skl_rt286.c 	struct skl_rt286_private *ctx;
ctx               534 sound/soc/intel/boards/skl_rt286.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               535 sound/soc/intel/boards/skl_rt286.c 	if (!ctx)
ctx               538 sound/soc/intel/boards/skl_rt286.c 	INIT_LIST_HEAD(&ctx->hdmi_pcm_list);
ctx               541 sound/soc/intel/boards/skl_rt286.c 	snd_soc_card_set_drvdata(&skylake_rt286, ctx);
ctx               120 sound/soc/intel/boards/sof_rt5682.c 	struct sof_card_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               132 sound/soc/intel/boards/sof_rt5682.c 	list_add_tail(&pcm->head, &ctx->hdmi_pcm_list);
ctx               139 sound/soc/intel/boards/sof_rt5682.c 	struct sof_card_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               163 sound/soc/intel/boards/sof_rt5682.c 		ret = clk_prepare_enable(ctx->mclk);
ctx               165 sound/soc/intel/boards/sof_rt5682.c 			clk_disable_unprepare(ctx->mclk);
ctx               167 sound/soc/intel/boards/sof_rt5682.c 		ret = clk_set_rate(ctx->mclk, 19200000);
ctx               181 sound/soc/intel/boards/sof_rt5682.c 				    &ctx->sof_headset, NULL, 0);
ctx               187 sound/soc/intel/boards/sof_rt5682.c 	jack = &ctx->sof_headset;
ctx               207 sound/soc/intel/boards/sof_rt5682.c 	struct sof_card_private *ctx = snd_soc_card_get_drvdata(rtd->card);
ctx               213 sound/soc/intel/boards/sof_rt5682.c 			ret = clk_prepare_enable(ctx->mclk);
ctx               270 sound/soc/intel/boards/sof_rt5682.c 	struct sof_card_private *ctx = snd_soc_card_get_drvdata(card);
ctx               281 sound/soc/intel/boards/sof_rt5682.c 	list_for_each_entry(pcm, &ctx->hdmi_pcm_list, head) {
ctx               584 sound/soc/intel/boards/sof_rt5682.c 	struct sof_card_private *ctx;
ctx               588 sound/soc/intel/boards/sof_rt5682.c 	ctx = devm_kzalloc(&pdev->dev, sizeof(*ctx), GFP_KERNEL);
ctx               589 sound/soc/intel/boards/sof_rt5682.c 	if (!ctx)
ctx               624 sound/soc/intel/boards/sof_rt5682.c 		ctx->mclk = devm_clk_get(&pdev->dev, "pmc_plt_clk_3");
ctx               625 sound/soc/intel/boards/sof_rt5682.c 		if (IS_ERR(ctx->mclk)) {
ctx               626 sound/soc/intel/boards/sof_rt5682.c 			ret = PTR_ERR(ctx->mclk);
ctx               634 sound/soc/intel/boards/sof_rt5682.c 		ret = clk_prepare_enable(ctx->mclk);
ctx               662 sound/soc/intel/boards/sof_rt5682.c 	INIT_LIST_HEAD(&ctx->hdmi_pcm_list);
ctx               672 sound/soc/intel/boards/sof_rt5682.c 	snd_soc_card_set_drvdata(&sof_audio_card_rt5682, ctx);
ctx               245 sound/soc/intel/common/sst-dsp.c int sst_dsp_register_poll(struct sst_dsp *ctx, u32 offset, u32 mask,
ctx               263 sound/soc/intel/common/sst-dsp.c 	while ((((reg = sst_dsp_shim_read_unlocked(ctx, offset)) & mask) != target)
ctx               273 sound/soc/intel/common/sst-dsp.c 		dev_dbg(ctx->dev, "FW Poll Status: reg=%#x %s successful\n",
ctx               279 sound/soc/intel/common/sst-dsp.c 	dev_dbg(ctx->dev, "FW Poll Status: reg=%#x %s timedout\n",
ctx                43 sound/soc/intel/skylake/bxt-sst.c static unsigned int bxt_get_errorcode(struct sst_dsp *ctx)
ctx                45 sound/soc/intel/skylake/bxt-sst.c 	 return sst_dsp_shim_read(ctx, BXT_ADSP_ERROR_CODE);
ctx                49 sound/soc/intel/skylake/bxt-sst.c bxt_load_library(struct sst_dsp *ctx, struct skl_lib_info *linfo, int lib_count)
ctx                52 sound/soc/intel/skylake/bxt-sst.c 	struct skl_dev *skl = ctx->thread_context;
ctx                63 sound/soc/intel/skylake/bxt-sst.c 		stream_tag = ctx->dsp_ops.prepare(ctx->dev, 0x40,
ctx                66 sound/soc/intel/skylake/bxt-sst.c 			dev_err(ctx->dev, "Lib prepare DMA err: %x\n",
ctx                75 sound/soc/intel/skylake/bxt-sst.c 		ctx->dsp_ops.trigger(ctx->dev, true, stream_tag);
ctx                78 sound/soc/intel/skylake/bxt-sst.c 			dev_err(ctx->dev, "IPC Load Lib for %s fail: %d\n",
ctx                81 sound/soc/intel/skylake/bxt-sst.c 		ctx->dsp_ops.trigger(ctx->dev, false, stream_tag);
ctx                82 sound/soc/intel/skylake/bxt-sst.c 		ctx->dsp_ops.cleanup(ctx->dev, &dmab, stream_tag);
ctx                97 sound/soc/intel/skylake/bxt-sst.c static int sst_bxt_prepare_fw(struct sst_dsp *ctx,
ctx               102 sound/soc/intel/skylake/bxt-sst.c 	stream_tag = ctx->dsp_ops.prepare(ctx->dev, 0x40, fwsize, &ctx->dmab);
ctx               104 sound/soc/intel/skylake/bxt-sst.c 		dev_err(ctx->dev, "Failed to prepare DMA FW loading err: %x\n",
ctx               109 sound/soc/intel/skylake/bxt-sst.c 	ctx->dsp_ops.stream_tag = stream_tag;
ctx               110 sound/soc/intel/skylake/bxt-sst.c 	memcpy(ctx->dmab.area, fwdata, fwsize);
ctx               113 sound/soc/intel/skylake/bxt-sst.c 	ret = skl_dsp_core_power_up(ctx, SKL_DSP_CORE0_MASK |
ctx               116 sound/soc/intel/skylake/bxt-sst.c 		dev_err(ctx->dev, "dsp core0/1 power up failed\n");
ctx               121 sound/soc/intel/skylake/bxt-sst.c 	sst_dsp_shim_write(ctx, SKL_ADSP_REG_HIPCI, SKL_ADSP_REG_HIPCI_BUSY |
ctx               125 sound/soc/intel/skylake/bxt-sst.c 	ret = skl_dsp_start_core(ctx, SKL_DSP_CORE0_MASK);
ctx               127 sound/soc/intel/skylake/bxt-sst.c 		dev_err(ctx->dev, "Start dsp core failed ret: %d\n", ret);
ctx               133 sound/soc/intel/skylake/bxt-sst.c 	ret = sst_dsp_register_poll(ctx, SKL_ADSP_REG_HIPCIE,
ctx               138 sound/soc/intel/skylake/bxt-sst.c 		dev_err(ctx->dev, "Timeout for Purge Request%d\n", ret);
ctx               143 sound/soc/intel/skylake/bxt-sst.c 	ret = skl_dsp_core_power_down(ctx, SKL_DSP_CORE_MASK(1));
ctx               145 sound/soc/intel/skylake/bxt-sst.c 		dev_err(ctx->dev, "dsp core1 power down failed\n");
ctx               150 sound/soc/intel/skylake/bxt-sst.c 	skl_ipc_int_enable(ctx);
ctx               151 sound/soc/intel/skylake/bxt-sst.c 	skl_ipc_op_int_enable(ctx);
ctx               154 sound/soc/intel/skylake/bxt-sst.c 	ret = sst_dsp_register_poll(ctx, BXT_ADSP_FW_STATUS, SKL_FW_STS_MASK,
ctx               157 sound/soc/intel/skylake/bxt-sst.c 		dev_err(ctx->dev, "Timeout for ROM init, ret:%d\n", ret);
ctx               164 sound/soc/intel/skylake/bxt-sst.c 	ctx->dsp_ops.cleanup(ctx->dev, &ctx->dmab, stream_tag);
ctx               165 sound/soc/intel/skylake/bxt-sst.c 	skl_dsp_core_power_down(ctx, SKL_DSP_CORE_MASK(1));
ctx               166 sound/soc/intel/skylake/bxt-sst.c 	skl_dsp_disable_core(ctx, SKL_DSP_CORE0_MASK);
ctx               170 sound/soc/intel/skylake/bxt-sst.c static int sst_transfer_fw_host_dma(struct sst_dsp *ctx)
ctx               174 sound/soc/intel/skylake/bxt-sst.c 	ctx->dsp_ops.trigger(ctx->dev, true, ctx->dsp_ops.stream_tag);
ctx               175 sound/soc/intel/skylake/bxt-sst.c 	ret = sst_dsp_register_poll(ctx, BXT_ADSP_FW_STATUS, SKL_FW_STS_MASK,
ctx               178 sound/soc/intel/skylake/bxt-sst.c 	ctx->dsp_ops.trigger(ctx->dev, false, ctx->dsp_ops.stream_tag);
ctx               179 sound/soc/intel/skylake/bxt-sst.c 	ctx->dsp_ops.cleanup(ctx->dev, &ctx->dmab, ctx->dsp_ops.stream_tag);
ctx               184 sound/soc/intel/skylake/bxt-sst.c static int bxt_load_base_firmware(struct sst_dsp *ctx)
ctx               187 sound/soc/intel/skylake/bxt-sst.c 	struct skl_dev *skl = ctx->thread_context;
ctx               190 sound/soc/intel/skylake/bxt-sst.c 	if (ctx->fw == NULL) {
ctx               191 sound/soc/intel/skylake/bxt-sst.c 		ret = request_firmware(&ctx->fw, ctx->fw_name, ctx->dev);
ctx               193 sound/soc/intel/skylake/bxt-sst.c 			dev_err(ctx->dev, "Request firmware failed %d\n", ret);
ctx               200 sound/soc/intel/skylake/bxt-sst.c 		ret = snd_skl_parse_uuids(ctx, ctx->fw, BXT_ADSP_FW_BIN_HDR_OFFSET, 0);
ctx               205 sound/soc/intel/skylake/bxt-sst.c 	stripped_fw.data = ctx->fw->data;
ctx               206 sound/soc/intel/skylake/bxt-sst.c 	stripped_fw.size = ctx->fw->size;
ctx               211 sound/soc/intel/skylake/bxt-sst.c 		ret = sst_bxt_prepare_fw(ctx, stripped_fw.data, stripped_fw.size);
ctx               217 sound/soc/intel/skylake/bxt-sst.c 		dev_err(ctx->dev, "Error code=0x%x: FW status=0x%x\n",
ctx               218 sound/soc/intel/skylake/bxt-sst.c 			sst_dsp_shim_read(ctx, BXT_ADSP_ERROR_CODE),
ctx               219 sound/soc/intel/skylake/bxt-sst.c 			sst_dsp_shim_read(ctx, BXT_ADSP_FW_STATUS));
ctx               221 sound/soc/intel/skylake/bxt-sst.c 		dev_err(ctx->dev, "Core En/ROM load fail:%d\n", ret);
ctx               225 sound/soc/intel/skylake/bxt-sst.c 	ret = sst_transfer_fw_host_dma(ctx);
ctx               227 sound/soc/intel/skylake/bxt-sst.c 		dev_err(ctx->dev, "Transfer firmware failed %d\n", ret);
ctx               228 sound/soc/intel/skylake/bxt-sst.c 		dev_info(ctx->dev, "Error code=0x%x: FW status=0x%x\n",
ctx               229 sound/soc/intel/skylake/bxt-sst.c 			sst_dsp_shim_read(ctx, BXT_ADSP_ERROR_CODE),
ctx               230 sound/soc/intel/skylake/bxt-sst.c 			sst_dsp_shim_read(ctx, BXT_ADSP_FW_STATUS));
ctx               232 sound/soc/intel/skylake/bxt-sst.c 		skl_dsp_disable_core(ctx, SKL_DSP_CORE0_MASK);
ctx               234 sound/soc/intel/skylake/bxt-sst.c 		dev_dbg(ctx->dev, "Firmware download successful\n");
ctx               238 sound/soc/intel/skylake/bxt-sst.c 			dev_err(ctx->dev, "DSP boot fail, FW Ready timeout\n");
ctx               239 sound/soc/intel/skylake/bxt-sst.c 			skl_dsp_disable_core(ctx, SKL_DSP_CORE0_MASK);
ctx               250 sound/soc/intel/skylake/bxt-sst.c 	release_firmware(ctx->fw);
ctx               251 sound/soc/intel/skylake/bxt-sst.c 	ctx->fw = NULL;
ctx               269 sound/soc/intel/skylake/bxt-sst.c static int bxt_d0i3_target_state(struct sst_dsp *ctx)
ctx               271 sound/soc/intel/skylake/bxt-sst.c 	struct skl_dev *skl = ctx->thread_context;
ctx               293 sound/soc/intel/skylake/bxt-sst.c 	struct sst_dsp *ctx = skl->dsp;
ctx               297 sound/soc/intel/skylake/bxt-sst.c 	dev_dbg(ctx->dev, "In %s:\n", __func__);
ctx               300 sound/soc/intel/skylake/bxt-sst.c 	if (skl_dsp_get_enabled_cores(ctx) !=  SKL_DSP_CORE0_MASK) {
ctx               301 sound/soc/intel/skylake/bxt-sst.c 		dev_warn(ctx->dev,
ctx               306 sound/soc/intel/skylake/bxt-sst.c 	target_state = bxt_d0i3_target_state(ctx);
ctx               320 sound/soc/intel/skylake/bxt-sst.c 		dev_err(ctx->dev, "Failed to set DSP to D0i3 state\n");
ctx               332 sound/soc/intel/skylake/bxt-sst.c static int bxt_schedule_dsp_D0i3(struct sst_dsp *ctx)
ctx               334 sound/soc/intel/skylake/bxt-sst.c 	struct skl_dev *skl = ctx->thread_context;
ctx               338 sound/soc/intel/skylake/bxt-sst.c 	if (bxt_d0i3_target_state(ctx) != SKL_DSP_D0I3_NONE) {
ctx               340 sound/soc/intel/skylake/bxt-sst.c 		dev_dbg(ctx->dev, "%s: Schedule D0i3\n", __func__);
ctx               349 sound/soc/intel/skylake/bxt-sst.c static int bxt_set_dsp_D0i0(struct sst_dsp *ctx)
ctx               353 sound/soc/intel/skylake/bxt-sst.c 	struct skl_dev *skl = ctx->thread_context;
ctx               355 sound/soc/intel/skylake/bxt-sst.c 	dev_dbg(ctx->dev, "In %s:\n", __func__);
ctx               364 sound/soc/intel/skylake/bxt-sst.c 	dev_dbg(ctx->dev, "Set DSP to D0i0\n");
ctx               380 sound/soc/intel/skylake/bxt-sst.c 		dev_err(ctx->dev, "Failed to set DSP to D0i0\n");
ctx               390 sound/soc/intel/skylake/bxt-sst.c static int bxt_set_dsp_D0(struct sst_dsp *ctx, unsigned int core_id)
ctx               392 sound/soc/intel/skylake/bxt-sst.c 	struct skl_dev *skl = ctx->thread_context;
ctx               399 sound/soc/intel/skylake/bxt-sst.c 		ret = bxt_load_base_firmware(ctx);
ctx               401 sound/soc/intel/skylake/bxt-sst.c 			dev_err(ctx->dev, "reload fw failed: %d\n", ret);
ctx               406 sound/soc/intel/skylake/bxt-sst.c 			ret = bxt_load_library(ctx, skl->lib_info,
ctx               409 sound/soc/intel/skylake/bxt-sst.c 				dev_err(ctx->dev, "reload libs failed: %d\n", ret);
ctx               419 sound/soc/intel/skylake/bxt-sst.c 		ret = skl_dsp_core_power_up(ctx, core_mask |
ctx               422 sound/soc/intel/skylake/bxt-sst.c 		ret = skl_dsp_core_power_up(ctx, core_mask);
ctx               433 sound/soc/intel/skylake/bxt-sst.c 		skl_ipc_int_enable(ctx);
ctx               434 sound/soc/intel/skylake/bxt-sst.c 		skl_ipc_op_int_enable(ctx);
ctx               438 sound/soc/intel/skylake/bxt-sst.c 	ret = skl_dsp_start_core(ctx, core_mask);
ctx               448 sound/soc/intel/skylake/bxt-sst.c 		skl_dsp_core_power_down(ctx, SKL_DSP_CORE_MASK(1));
ctx               450 sound/soc/intel/skylake/bxt-sst.c 			dev_err(ctx->dev, "%s: DSP boot timeout\n", __func__);
ctx               451 sound/soc/intel/skylake/bxt-sst.c 			dev_err(ctx->dev, "Error code=0x%x: FW status=0x%x\n",
ctx               452 sound/soc/intel/skylake/bxt-sst.c 				sst_dsp_shim_read(ctx, BXT_ADSP_ERROR_CODE),
ctx               453 sound/soc/intel/skylake/bxt-sst.c 				sst_dsp_shim_read(ctx, BXT_ADSP_FW_STATUS));
ctx               454 sound/soc/intel/skylake/bxt-sst.c 			dev_err(ctx->dev, "Failed to set core0 to D0 state\n");
ctx               469 sound/soc/intel/skylake/bxt-sst.c 			dev_err(ctx->dev, "IPC set_dx for core %d fail: %d\n",
ctx               480 sound/soc/intel/skylake/bxt-sst.c 	skl_dsp_disable_core(ctx, core_mask);
ctx               485 sound/soc/intel/skylake/bxt-sst.c static int bxt_set_dsp_D3(struct sst_dsp *ctx, unsigned int core_id)
ctx               489 sound/soc/intel/skylake/bxt-sst.c 	struct skl_dev *skl = ctx->thread_context;
ctx               495 sound/soc/intel/skylake/bxt-sst.c 	dev_dbg(ctx->dev, "core mask=%x dx_mask=%x\n",
ctx               501 sound/soc/intel/skylake/bxt-sst.c 		dev_err(ctx->dev,
ctx               513 sound/soc/intel/skylake/bxt-sst.c 		skl_ipc_op_int_disable(ctx);
ctx               514 sound/soc/intel/skylake/bxt-sst.c 		skl_ipc_int_disable(ctx);
ctx               516 sound/soc/intel/skylake/bxt-sst.c 	ret = skl_dsp_disable_core(ctx, core_mask);
ctx               518 sound/soc/intel/skylake/bxt-sst.c 		dev_err(ctx->dev, "Failed to disable core %d\n", ret);
ctx                27 sound/soc/intel/skylake/cnl-sst-dsp.c cnl_dsp_core_set_reset_state(struct sst_dsp *ctx, unsigned int core_mask)
ctx                30 sound/soc/intel/skylake/cnl-sst-dsp.c 	sst_dsp_shim_update_bits_unlocked(ctx,
ctx                35 sound/soc/intel/skylake/cnl-sst-dsp.c 	return sst_dsp_register_poll(ctx,
ctx                44 sound/soc/intel/skylake/cnl-sst-dsp.c cnl_dsp_core_unset_reset_state(struct sst_dsp *ctx, unsigned int core_mask)
ctx                47 sound/soc/intel/skylake/cnl-sst-dsp.c 	sst_dsp_shim_update_bits_unlocked(ctx, CNL_ADSP_REG_ADSPCS,
ctx                51 sound/soc/intel/skylake/cnl-sst-dsp.c 	return sst_dsp_register_poll(ctx,
ctx                59 sound/soc/intel/skylake/cnl-sst-dsp.c static bool is_cnl_dsp_core_enable(struct sst_dsp *ctx, unsigned int core_mask)
ctx                64 sound/soc/intel/skylake/cnl-sst-dsp.c 	val = sst_dsp_shim_read_unlocked(ctx, CNL_ADSP_REG_ADSPCS);
ctx                71 sound/soc/intel/skylake/cnl-sst-dsp.c 	dev_dbg(ctx->dev, "DSP core(s) enabled? %d: core_mask %#x\n",
ctx                77 sound/soc/intel/skylake/cnl-sst-dsp.c static int cnl_dsp_reset_core(struct sst_dsp *ctx, unsigned int core_mask)
ctx                80 sound/soc/intel/skylake/cnl-sst-dsp.c 	sst_dsp_shim_update_bits_unlocked(ctx, CNL_ADSP_REG_ADSPCS,
ctx                85 sound/soc/intel/skylake/cnl-sst-dsp.c 	return cnl_dsp_core_set_reset_state(ctx, core_mask);
ctx                88 sound/soc/intel/skylake/cnl-sst-dsp.c static int cnl_dsp_start_core(struct sst_dsp *ctx, unsigned int core_mask)
ctx                93 sound/soc/intel/skylake/cnl-sst-dsp.c 	ret = cnl_dsp_core_unset_reset_state(ctx, core_mask);
ctx                98 sound/soc/intel/skylake/cnl-sst-dsp.c 	sst_dsp_shim_update_bits_unlocked(ctx, CNL_ADSP_REG_ADSPCS,
ctx               101 sound/soc/intel/skylake/cnl-sst-dsp.c 	if (!is_cnl_dsp_core_enable(ctx, core_mask)) {
ctx               102 sound/soc/intel/skylake/cnl-sst-dsp.c 		cnl_dsp_reset_core(ctx, core_mask);
ctx               103 sound/soc/intel/skylake/cnl-sst-dsp.c 		dev_err(ctx->dev, "DSP core mask %#x enable failed\n",
ctx               111 sound/soc/intel/skylake/cnl-sst-dsp.c static int cnl_dsp_core_power_up(struct sst_dsp *ctx, unsigned int core_mask)
ctx               114 sound/soc/intel/skylake/cnl-sst-dsp.c 	sst_dsp_shim_update_bits_unlocked(ctx, CNL_ADSP_REG_ADSPCS,
ctx               119 sound/soc/intel/skylake/cnl-sst-dsp.c 	return sst_dsp_register_poll(ctx, CNL_ADSP_REG_ADSPCS,
ctx               126 sound/soc/intel/skylake/cnl-sst-dsp.c static int cnl_dsp_core_power_down(struct sst_dsp *ctx, unsigned int core_mask)
ctx               129 sound/soc/intel/skylake/cnl-sst-dsp.c 	sst_dsp_shim_update_bits_unlocked(ctx, CNL_ADSP_REG_ADSPCS,
ctx               133 sound/soc/intel/skylake/cnl-sst-dsp.c 	return sst_dsp_register_poll(ctx,
ctx               141 sound/soc/intel/skylake/cnl-sst-dsp.c int cnl_dsp_enable_core(struct sst_dsp *ctx, unsigned int core_mask)
ctx               146 sound/soc/intel/skylake/cnl-sst-dsp.c 	ret = cnl_dsp_core_power_up(ctx, core_mask);
ctx               148 sound/soc/intel/skylake/cnl-sst-dsp.c 		dev_dbg(ctx->dev, "DSP core mask %#x power up failed",
ctx               153 sound/soc/intel/skylake/cnl-sst-dsp.c 	return cnl_dsp_start_core(ctx, core_mask);
ctx               156 sound/soc/intel/skylake/cnl-sst-dsp.c int cnl_dsp_disable_core(struct sst_dsp *ctx, unsigned int core_mask)
ctx               160 sound/soc/intel/skylake/cnl-sst-dsp.c 	ret = cnl_dsp_reset_core(ctx, core_mask);
ctx               162 sound/soc/intel/skylake/cnl-sst-dsp.c 		dev_err(ctx->dev, "DSP core mask %#x reset failed\n",
ctx               168 sound/soc/intel/skylake/cnl-sst-dsp.c 	ret = cnl_dsp_core_power_down(ctx, core_mask);
ctx               170 sound/soc/intel/skylake/cnl-sst-dsp.c 		dev_err(ctx->dev, "DSP core mask %#x power down failed\n",
ctx               175 sound/soc/intel/skylake/cnl-sst-dsp.c 	if (is_cnl_dsp_core_enable(ctx, core_mask)) {
ctx               176 sound/soc/intel/skylake/cnl-sst-dsp.c 		dev_err(ctx->dev, "DSP core mask %#x disable failed\n",
ctx               186 sound/soc/intel/skylake/cnl-sst-dsp.c 	struct sst_dsp *ctx = dev_id;
ctx               190 sound/soc/intel/skylake/cnl-sst-dsp.c 	spin_lock(&ctx->spinlock);
ctx               192 sound/soc/intel/skylake/cnl-sst-dsp.c 	val = sst_dsp_shim_read_unlocked(ctx, CNL_ADSP_REG_ADSPIS);
ctx               193 sound/soc/intel/skylake/cnl-sst-dsp.c 	ctx->intr_status = val;
ctx               196 sound/soc/intel/skylake/cnl-sst-dsp.c 		spin_unlock(&ctx->spinlock);
ctx               201 sound/soc/intel/skylake/cnl-sst-dsp.c 		cnl_ipc_int_disable(ctx);
ctx               205 sound/soc/intel/skylake/cnl-sst-dsp.c 	spin_unlock(&ctx->spinlock);
ctx               220 sound/soc/intel/skylake/cnl-sst-dsp.c void cnl_ipc_int_enable(struct sst_dsp *ctx)
ctx               222 sound/soc/intel/skylake/cnl-sst-dsp.c 	sst_dsp_shim_update_bits(ctx, CNL_ADSP_REG_ADSPIC,
ctx               226 sound/soc/intel/skylake/cnl-sst-dsp.c void cnl_ipc_int_disable(struct sst_dsp *ctx)
ctx               228 sound/soc/intel/skylake/cnl-sst-dsp.c 	sst_dsp_shim_update_bits_unlocked(ctx, CNL_ADSP_REG_ADSPIC,
ctx               232 sound/soc/intel/skylake/cnl-sst-dsp.c void cnl_ipc_op_int_enable(struct sst_dsp *ctx)
ctx               235 sound/soc/intel/skylake/cnl-sst-dsp.c 	sst_dsp_shim_update_bits(ctx, CNL_ADSP_REG_HIPCCTL,
ctx               240 sound/soc/intel/skylake/cnl-sst-dsp.c 	sst_dsp_shim_update_bits(ctx, CNL_ADSP_REG_HIPCCTL,
ctx               245 sound/soc/intel/skylake/cnl-sst-dsp.c void cnl_ipc_op_int_disable(struct sst_dsp *ctx)
ctx               248 sound/soc/intel/skylake/cnl-sst-dsp.c 	sst_dsp_shim_update_bits(ctx, CNL_ADSP_REG_HIPCCTL,
ctx               252 sound/soc/intel/skylake/cnl-sst-dsp.c 	sst_dsp_shim_update_bits(ctx, CNL_ADSP_REG_HIPCCTL,
ctx               256 sound/soc/intel/skylake/cnl-sst-dsp.c bool cnl_ipc_int_status(struct sst_dsp *ctx)
ctx               258 sound/soc/intel/skylake/cnl-sst-dsp.c 	return sst_dsp_shim_read_unlocked(ctx, CNL_ADSP_REG_ADSPIS) &
ctx                85 sound/soc/intel/skylake/cnl-sst-dsp.h int cnl_dsp_enable_core(struct sst_dsp *ctx, unsigned int core);
ctx                86 sound/soc/intel/skylake/cnl-sst-dsp.h int cnl_dsp_disable_core(struct sst_dsp *ctx, unsigned int core);
ctx                90 sound/soc/intel/skylake/cnl-sst-dsp.h void cnl_ipc_int_enable(struct sst_dsp *ctx);
ctx                91 sound/soc/intel/skylake/cnl-sst-dsp.h void cnl_ipc_int_disable(struct sst_dsp *ctx);
ctx                92 sound/soc/intel/skylake/cnl-sst-dsp.h void cnl_ipc_op_int_enable(struct sst_dsp *ctx);
ctx                93 sound/soc/intel/skylake/cnl-sst-dsp.h void cnl_ipc_op_int_disable(struct sst_dsp *ctx);
ctx                94 sound/soc/intel/skylake/cnl-sst-dsp.h bool cnl_ipc_int_status(struct sst_dsp *ctx);
ctx                46 sound/soc/intel/skylake/cnl-sst.c static int cnl_prepare_fw(struct sst_dsp *ctx, const void *fwdata, u32 fwsize)
ctx                51 sound/soc/intel/skylake/cnl-sst.c 	stream_tag = ctx->dsp_ops.prepare(ctx->dev, 0x40, fwsize, &ctx->dmab);
ctx                53 sound/soc/intel/skylake/cnl-sst.c 		dev_err(ctx->dev, "dma prepare failed: 0%#x\n", stream_tag);
ctx                57 sound/soc/intel/skylake/cnl-sst.c 	ctx->dsp_ops.stream_tag = stream_tag;
ctx                58 sound/soc/intel/skylake/cnl-sst.c 	memcpy(ctx->dmab.area, fwdata, fwsize);
ctx                61 sound/soc/intel/skylake/cnl-sst.c 	sst_dsp_shim_write(ctx, CNL_ADSP_REG_HIPCIDR,
ctx                65 sound/soc/intel/skylake/cnl-sst.c 	ret = cnl_dsp_enable_core(ctx, SKL_DSP_CORE0_MASK);
ctx                67 sound/soc/intel/skylake/cnl-sst.c 		dev_err(ctx->dev, "dsp boot core failed ret: %d\n", ret);
ctx                73 sound/soc/intel/skylake/cnl-sst.c 	cnl_ipc_int_enable(ctx);
ctx                74 sound/soc/intel/skylake/cnl-sst.c 	cnl_ipc_op_int_enable(ctx);
ctx                76 sound/soc/intel/skylake/cnl-sst.c 	ret = sst_dsp_register_poll(ctx, CNL_ADSP_FW_STATUS, CNL_FW_STS_MASK,
ctx                80 sound/soc/intel/skylake/cnl-sst.c 		dev_err(ctx->dev, "rom init timeout, ret: %d\n", ret);
ctx                87 sound/soc/intel/skylake/cnl-sst.c 	ctx->dsp_ops.cleanup(ctx->dev, &ctx->dmab, stream_tag);
ctx                88 sound/soc/intel/skylake/cnl-sst.c 	cnl_dsp_disable_core(ctx, SKL_DSP_CORE0_MASK);
ctx                93 sound/soc/intel/skylake/cnl-sst.c static int sst_transfer_fw_host_dma(struct sst_dsp *ctx)
ctx                97 sound/soc/intel/skylake/cnl-sst.c 	ctx->dsp_ops.trigger(ctx->dev, true, ctx->dsp_ops.stream_tag);
ctx                98 sound/soc/intel/skylake/cnl-sst.c 	ret = sst_dsp_register_poll(ctx, CNL_ADSP_FW_STATUS, CNL_FW_STS_MASK,
ctx               102 sound/soc/intel/skylake/cnl-sst.c 	ctx->dsp_ops.trigger(ctx->dev, false, ctx->dsp_ops.stream_tag);
ctx               103 sound/soc/intel/skylake/cnl-sst.c 	ctx->dsp_ops.cleanup(ctx->dev, &ctx->dmab, ctx->dsp_ops.stream_tag);
ctx               108 sound/soc/intel/skylake/cnl-sst.c static int cnl_load_base_firmware(struct sst_dsp *ctx)
ctx               111 sound/soc/intel/skylake/cnl-sst.c 	struct skl_dev *cnl = ctx->thread_context;
ctx               114 sound/soc/intel/skylake/cnl-sst.c 	if (!ctx->fw) {
ctx               115 sound/soc/intel/skylake/cnl-sst.c 		ret = request_firmware(&ctx->fw, ctx->fw_name, ctx->dev);
ctx               117 sound/soc/intel/skylake/cnl-sst.c 			dev_err(ctx->dev, "request firmware failed: %d\n", ret);
ctx               124 sound/soc/intel/skylake/cnl-sst.c 		ret = snd_skl_parse_uuids(ctx, ctx->fw,
ctx               130 sound/soc/intel/skylake/cnl-sst.c 	stripped_fw.data = ctx->fw->data;
ctx               131 sound/soc/intel/skylake/cnl-sst.c 	stripped_fw.size = ctx->fw->size;
ctx               134 sound/soc/intel/skylake/cnl-sst.c 	ret = cnl_prepare_fw(ctx, stripped_fw.data, stripped_fw.size);
ctx               136 sound/soc/intel/skylake/cnl-sst.c 		dev_err(ctx->dev, "prepare firmware failed: %d\n", ret);
ctx               140 sound/soc/intel/skylake/cnl-sst.c 	ret = sst_transfer_fw_host_dma(ctx);
ctx               142 sound/soc/intel/skylake/cnl-sst.c 		dev_err(ctx->dev, "transfer firmware failed: %d\n", ret);
ctx               143 sound/soc/intel/skylake/cnl-sst.c 		cnl_dsp_disable_core(ctx, SKL_DSP_CORE0_MASK);
ctx               150 sound/soc/intel/skylake/cnl-sst.c 		dev_err(ctx->dev, "FW ready timed-out\n");
ctx               151 sound/soc/intel/skylake/cnl-sst.c 		cnl_dsp_disable_core(ctx, SKL_DSP_CORE0_MASK);
ctx               161 sound/soc/intel/skylake/cnl-sst.c 	release_firmware(ctx->fw);
ctx               162 sound/soc/intel/skylake/cnl-sst.c 	ctx->fw = NULL;
ctx               167 sound/soc/intel/skylake/cnl-sst.c static int cnl_set_dsp_D0(struct sst_dsp *ctx, unsigned int core_id)
ctx               169 sound/soc/intel/skylake/cnl-sst.c 	struct skl_dev *cnl = ctx->thread_context;
ctx               176 sound/soc/intel/skylake/cnl-sst.c 		ret = cnl_load_base_firmware(ctx);
ctx               178 sound/soc/intel/skylake/cnl-sst.c 			dev_err(ctx->dev, "fw reload failed: %d\n", ret);
ctx               186 sound/soc/intel/skylake/cnl-sst.c 	ret = cnl_dsp_enable_core(ctx, core_mask);
ctx               188 sound/soc/intel/skylake/cnl-sst.c 		dev_err(ctx->dev, "enable dsp core %d failed: %d\n",
ctx               195 sound/soc/intel/skylake/cnl-sst.c 		cnl_ipc_int_enable(ctx);
ctx               196 sound/soc/intel/skylake/cnl-sst.c 		cnl_ipc_op_int_enable(ctx);
ctx               202 sound/soc/intel/skylake/cnl-sst.c 			dev_err(ctx->dev,
ctx               204 sound/soc/intel/skylake/cnl-sst.c 				sst_dsp_shim_read(ctx, CNL_ADSP_FW_STATUS),
ctx               205 sound/soc/intel/skylake/cnl-sst.c 				sst_dsp_shim_read(ctx, CNL_ADSP_ERROR_CODE));
ctx               215 sound/soc/intel/skylake/cnl-sst.c 			dev_err(ctx->dev, "set_dx failed, core: %d ret: %d\n",
ctx               224 sound/soc/intel/skylake/cnl-sst.c 	cnl_dsp_disable_core(ctx, core_mask);
ctx               229 sound/soc/intel/skylake/cnl-sst.c static int cnl_set_dsp_D3(struct sst_dsp *ctx, unsigned int core_id)
ctx               231 sound/soc/intel/skylake/cnl-sst.c 	struct skl_dev *cnl = ctx->thread_context;
ctx               242 sound/soc/intel/skylake/cnl-sst.c 		dev_err(ctx->dev,
ctx               250 sound/soc/intel/skylake/cnl-sst.c 		skl_ipc_op_int_disable(ctx);
ctx               251 sound/soc/intel/skylake/cnl-sst.c 		skl_ipc_int_disable(ctx);
ctx               254 sound/soc/intel/skylake/cnl-sst.c 	ret = cnl_dsp_disable_core(ctx, core_mask);
ctx               256 sound/soc/intel/skylake/cnl-sst.c 		dev_err(ctx->dev, "disable dsp core %d failed: %d\n",
ctx               266 sound/soc/intel/skylake/cnl-sst.c static unsigned int cnl_get_errno(struct sst_dsp *ctx)
ctx               268 sound/soc/intel/skylake/cnl-sst.c 	return sst_dsp_shim_read(ctx, CNL_ADSP_ERROR_CODE);
ctx                16 sound/soc/intel/skylake/skl-sst-cldma.c static void skl_cldma_int_enable(struct sst_dsp *ctx)
ctx                18 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_update_bits_unlocked(ctx, SKL_ADSP_REG_ADSPIC,
ctx                22 sound/soc/intel/skylake/skl-sst-cldma.c void skl_cldma_int_disable(struct sst_dsp *ctx)
ctx                24 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_update_bits_unlocked(ctx,
ctx                28 sound/soc/intel/skylake/skl-sst-cldma.c static void skl_cldma_stream_run(struct sst_dsp  *ctx, bool enable)
ctx                33 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_update_bits_unlocked(ctx,
ctx                41 sound/soc/intel/skylake/skl-sst-cldma.c 		val = sst_dsp_shim_read(ctx, SKL_ADSP_REG_CL_SD_CTL) &
ctx                51 sound/soc/intel/skylake/skl-sst-cldma.c 		dev_err(ctx->dev, "Failed to set Run bit=%d enable=%d\n", val, enable);
ctx                54 sound/soc/intel/skylake/skl-sst-cldma.c static void skl_cldma_stream_clear(struct sst_dsp  *ctx)
ctx                57 sound/soc/intel/skylake/skl-sst-cldma.c 	skl_cldma_stream_run(ctx, 0);
ctx                59 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL,
ctx                61 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL,
ctx                63 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL,
ctx                65 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL,
ctx                68 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_BDLPL, CL_SD_BDLPLBA(0));
ctx                69 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_BDLPU, 0);
ctx                71 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_CBL, 0);
ctx                72 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_LVI, 0);
ctx                76 sound/soc/intel/skylake/skl-sst-cldma.c static void skl_cldma_setup_bdle(struct sst_dsp *ctx,
ctx                82 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.frags = 0;
ctx                85 sound/soc/intel/skylake/skl-sst-cldma.c 				(ctx->cl_dev.frags * ctx->cl_dev.bufsize));
ctx                90 sound/soc/intel/skylake/skl-sst-cldma.c 		bdl[2] = cpu_to_le32(ctx->cl_dev.bufsize);
ctx                92 sound/soc/intel/skylake/skl-sst-cldma.c 		size -= ctx->cl_dev.bufsize;
ctx                96 sound/soc/intel/skylake/skl-sst-cldma.c 		ctx->cl_dev.frags++;
ctx               106 sound/soc/intel/skylake/skl-sst-cldma.c static void skl_cldma_setup_controller(struct sst_dsp  *ctx,
ctx               110 sound/soc/intel/skylake/skl-sst-cldma.c 	skl_cldma_stream_clear(ctx);
ctx               111 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_BDLPL,
ctx               113 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_BDLPU,
ctx               116 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_CBL, max_size);
ctx               117 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_write(ctx, SKL_ADSP_REG_CL_SD_LVI, count - 1);
ctx               118 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL,
ctx               120 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL,
ctx               122 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL,
ctx               124 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_CL_SD_CTL,
ctx               128 sound/soc/intel/skylake/skl-sst-cldma.c static void skl_cldma_setup_spb(struct sst_dsp  *ctx,
ctx               132 sound/soc/intel/skylake/skl-sst-cldma.c 		sst_dsp_shim_update_bits_unlocked(ctx,
ctx               137 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_write_unlocked(ctx, SKL_ADSP_REG_CL_SPBFIFO_SPIB, size);
ctx               140 sound/soc/intel/skylake/skl-sst-cldma.c static void skl_cldma_cleanup_spb(struct sst_dsp  *ctx)
ctx               142 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_update_bits_unlocked(ctx,
ctx               147 sound/soc/intel/skylake/skl-sst-cldma.c 	sst_dsp_shim_write_unlocked(ctx, SKL_ADSP_REG_CL_SPBFIFO_SPIB, 0);
ctx               150 sound/soc/intel/skylake/skl-sst-cldma.c static void skl_cldma_cleanup(struct sst_dsp  *ctx)
ctx               152 sound/soc/intel/skylake/skl-sst-cldma.c 	skl_cldma_cleanup_spb(ctx);
ctx               153 sound/soc/intel/skylake/skl-sst-cldma.c 	skl_cldma_stream_clear(ctx);
ctx               155 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->dsp_ops.free_dma_buf(ctx->dev, &ctx->cl_dev.dmab_data);
ctx               156 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->dsp_ops.free_dma_buf(ctx->dev, &ctx->cl_dev.dmab_bdl);
ctx               159 sound/soc/intel/skylake/skl-sst-cldma.c int skl_cldma_wait_interruptible(struct sst_dsp *ctx)
ctx               163 sound/soc/intel/skylake/skl-sst-cldma.c 	if (!wait_event_timeout(ctx->cl_dev.wait_queue,
ctx               164 sound/soc/intel/skylake/skl-sst-cldma.c 				ctx->cl_dev.wait_condition,
ctx               166 sound/soc/intel/skylake/skl-sst-cldma.c 		dev_err(ctx->dev, "%s: Wait timeout\n", __func__);
ctx               171 sound/soc/intel/skylake/skl-sst-cldma.c 	dev_dbg(ctx->dev, "%s: Event wake\n", __func__);
ctx               172 sound/soc/intel/skylake/skl-sst-cldma.c 	if (ctx->cl_dev.wake_status != SKL_CL_DMA_BUF_COMPLETE) {
ctx               173 sound/soc/intel/skylake/skl-sst-cldma.c 		dev_err(ctx->dev, "%s: DMA Error\n", __func__);
ctx               178 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.wake_status = SKL_CL_DMA_STATUS_NONE;
ctx               182 sound/soc/intel/skylake/skl-sst-cldma.c static void skl_cldma_stop(struct sst_dsp *ctx)
ctx               184 sound/soc/intel/skylake/skl-sst-cldma.c 	skl_cldma_stream_run(ctx, false);
ctx               187 sound/soc/intel/skylake/skl-sst-cldma.c static void skl_cldma_fill_buffer(struct sst_dsp *ctx, unsigned int size,
ctx               190 sound/soc/intel/skylake/skl-sst-cldma.c 	dev_dbg(ctx->dev, "Size: %x, intr_enable: %d\n", size, intr_enable);
ctx               191 sound/soc/intel/skylake/skl-sst-cldma.c 	dev_dbg(ctx->dev, "buf_pos_index:%d, trigger:%d\n",
ctx               192 sound/soc/intel/skylake/skl-sst-cldma.c 			ctx->cl_dev.dma_buffer_offset, trigger);
ctx               193 sound/soc/intel/skylake/skl-sst-cldma.c 	dev_dbg(ctx->dev, "spib position: %d\n", ctx->cl_dev.curr_spib_pos);
ctx               200 sound/soc/intel/skylake/skl-sst-cldma.c 	if (ctx->cl_dev.dma_buffer_offset + size > ctx->cl_dev.bufsize) {
ctx               201 sound/soc/intel/skylake/skl-sst-cldma.c 		unsigned int size_b = ctx->cl_dev.bufsize -
ctx               202 sound/soc/intel/skylake/skl-sst-cldma.c 					ctx->cl_dev.dma_buffer_offset;
ctx               203 sound/soc/intel/skylake/skl-sst-cldma.c 		memcpy(ctx->cl_dev.dmab_data.area + ctx->cl_dev.dma_buffer_offset,
ctx               207 sound/soc/intel/skylake/skl-sst-cldma.c 		ctx->cl_dev.dma_buffer_offset = 0;
ctx               210 sound/soc/intel/skylake/skl-sst-cldma.c 	memcpy(ctx->cl_dev.dmab_data.area + ctx->cl_dev.dma_buffer_offset,
ctx               213 sound/soc/intel/skylake/skl-sst-cldma.c 	if (ctx->cl_dev.curr_spib_pos == ctx->cl_dev.bufsize)
ctx               214 sound/soc/intel/skylake/skl-sst-cldma.c 		ctx->cl_dev.dma_buffer_offset = 0;
ctx               216 sound/soc/intel/skylake/skl-sst-cldma.c 		ctx->cl_dev.dma_buffer_offset = ctx->cl_dev.curr_spib_pos;
ctx               218 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.wait_condition = false;
ctx               221 sound/soc/intel/skylake/skl-sst-cldma.c 		skl_cldma_int_enable(ctx);
ctx               223 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.ops.cl_setup_spb(ctx, ctx->cl_dev.curr_spib_pos, trigger);
ctx               225 sound/soc/intel/skylake/skl-sst-cldma.c 		ctx->cl_dev.ops.cl_trigger(ctx, true);
ctx               244 sound/soc/intel/skylake/skl-sst-cldma.c skl_cldma_copy_to_buf(struct sst_dsp *ctx, const void *bin,
ctx               257 sound/soc/intel/skylake/skl-sst-cldma.c 	dev_dbg(ctx->dev, "%s: Total binary size: %u\n", __func__, bytes_left);
ctx               260 sound/soc/intel/skylake/skl-sst-cldma.c 		if (bytes_left > ctx->cl_dev.bufsize) {
ctx               266 sound/soc/intel/skylake/skl-sst-cldma.c 			if (ctx->cl_dev.curr_spib_pos == 0)
ctx               267 sound/soc/intel/skylake/skl-sst-cldma.c 				ctx->cl_dev.curr_spib_pos = ctx->cl_dev.bufsize;
ctx               269 sound/soc/intel/skylake/skl-sst-cldma.c 			size = ctx->cl_dev.bufsize;
ctx               270 sound/soc/intel/skylake/skl-sst-cldma.c 			skl_cldma_fill_buffer(ctx, size, curr_pos, true, start);
ctx               274 sound/soc/intel/skylake/skl-sst-cldma.c 				ret = skl_cldma_wait_interruptible(ctx);
ctx               276 sound/soc/intel/skylake/skl-sst-cldma.c 					skl_cldma_stop(ctx);
ctx               281 sound/soc/intel/skylake/skl-sst-cldma.c 			skl_cldma_int_disable(ctx);
ctx               283 sound/soc/intel/skylake/skl-sst-cldma.c 			if ((ctx->cl_dev.curr_spib_pos + bytes_left)
ctx               284 sound/soc/intel/skylake/skl-sst-cldma.c 							<= ctx->cl_dev.bufsize) {
ctx               285 sound/soc/intel/skylake/skl-sst-cldma.c 				ctx->cl_dev.curr_spib_pos += bytes_left;
ctx               288 sound/soc/intel/skylake/skl-sst-cldma.c 					(ctx->cl_dev.bufsize -
ctx               289 sound/soc/intel/skylake/skl-sst-cldma.c 					ctx->cl_dev.curr_spib_pos);
ctx               290 sound/soc/intel/skylake/skl-sst-cldma.c 				ctx->cl_dev.curr_spib_pos = excess_bytes;
ctx               294 sound/soc/intel/skylake/skl-sst-cldma.c 			skl_cldma_fill_buffer(ctx, size,
ctx               306 sound/soc/intel/skylake/skl-sst-cldma.c void skl_cldma_process_intr(struct sst_dsp *ctx)
ctx               311 sound/soc/intel/skylake/skl-sst-cldma.c 		sst_dsp_shim_read_unlocked(ctx, SKL_ADSP_REG_CL_SD_STS);
ctx               314 sound/soc/intel/skylake/skl-sst-cldma.c 		ctx->cl_dev.wake_status = SKL_CL_DMA_ERR;
ctx               316 sound/soc/intel/skylake/skl-sst-cldma.c 		ctx->cl_dev.wake_status = SKL_CL_DMA_BUF_COMPLETE;
ctx               318 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.wait_condition = true;
ctx               319 sound/soc/intel/skylake/skl-sst-cldma.c 	wake_up(&ctx->cl_dev.wait_queue);
ctx               322 sound/soc/intel/skylake/skl-sst-cldma.c int skl_cldma_prepare(struct sst_dsp *ctx)
ctx               327 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.bufsize = SKL_MAX_BUFFER_SIZE;
ctx               330 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.ops.cl_setup_bdle = skl_cldma_setup_bdle;
ctx               331 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.ops.cl_setup_controller = skl_cldma_setup_controller;
ctx               332 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.ops.cl_setup_spb = skl_cldma_setup_spb;
ctx               333 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.ops.cl_cleanup_spb = skl_cldma_cleanup_spb;
ctx               334 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.ops.cl_trigger = skl_cldma_stream_run;
ctx               335 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.ops.cl_cleanup_controller = skl_cldma_cleanup;
ctx               336 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.ops.cl_copy_to_dmabuf = skl_cldma_copy_to_buf;
ctx               337 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.ops.cl_stop_dma = skl_cldma_stop;
ctx               340 sound/soc/intel/skylake/skl-sst-cldma.c 	ret = ctx->dsp_ops.alloc_dma_buf(ctx->dev,
ctx               341 sound/soc/intel/skylake/skl-sst-cldma.c 			&ctx->cl_dev.dmab_data, ctx->cl_dev.bufsize);
ctx               343 sound/soc/intel/skylake/skl-sst-cldma.c 		dev_err(ctx->dev, "Alloc buffer for base fw failed: %x\n", ret);
ctx               347 sound/soc/intel/skylake/skl-sst-cldma.c 	ret = ctx->dsp_ops.alloc_dma_buf(ctx->dev,
ctx               348 sound/soc/intel/skylake/skl-sst-cldma.c 			&ctx->cl_dev.dmab_bdl, PAGE_SIZE);
ctx               350 sound/soc/intel/skylake/skl-sst-cldma.c 		dev_err(ctx->dev, "Alloc buffer for blde failed: %x\n", ret);
ctx               351 sound/soc/intel/skylake/skl-sst-cldma.c 		ctx->dsp_ops.free_dma_buf(ctx->dev, &ctx->cl_dev.dmab_data);
ctx               354 sound/soc/intel/skylake/skl-sst-cldma.c 	bdl = (__le32 *)ctx->cl_dev.dmab_bdl.area;
ctx               357 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.ops.cl_setup_bdle(ctx, &ctx->cl_dev.dmab_data,
ctx               358 sound/soc/intel/skylake/skl-sst-cldma.c 			&bdl, ctx->cl_dev.bufsize, 1);
ctx               359 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.ops.cl_setup_controller(ctx, &ctx->cl_dev.dmab_bdl,
ctx               360 sound/soc/intel/skylake/skl-sst-cldma.c 			ctx->cl_dev.bufsize, ctx->cl_dev.frags);
ctx               362 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.curr_spib_pos = 0;
ctx               363 sound/soc/intel/skylake/skl-sst-cldma.c 	ctx->cl_dev.dma_buffer_offset = 0;
ctx               364 sound/soc/intel/skylake/skl-sst-cldma.c 	init_waitqueue_head(&ctx->cl_dev.wait_queue);
ctx               196 sound/soc/intel/skylake/skl-sst-cldma.h 	void (*cl_setup_bdle)(struct sst_dsp *ctx,
ctx               199 sound/soc/intel/skylake/skl-sst-cldma.h 	void (*cl_setup_controller)(struct sst_dsp *ctx,
ctx               202 sound/soc/intel/skylake/skl-sst-cldma.h 	void (*cl_setup_spb)(struct sst_dsp  *ctx,
ctx               204 sound/soc/intel/skylake/skl-sst-cldma.h 	void (*cl_cleanup_spb)(struct sst_dsp  *ctx);
ctx               205 sound/soc/intel/skylake/skl-sst-cldma.h 	void (*cl_trigger)(struct sst_dsp  *ctx, bool enable);
ctx               206 sound/soc/intel/skylake/skl-sst-cldma.h 	void (*cl_cleanup_controller)(struct sst_dsp  *ctx);
ctx               207 sound/soc/intel/skylake/skl-sst-cldma.h 	int (*cl_copy_to_dmabuf)(struct sst_dsp *ctx,
ctx               209 sound/soc/intel/skylake/skl-sst-cldma.h 	void (*cl_stop_dma)(struct sst_dsp *ctx);
ctx                22 sound/soc/intel/skylake/skl-sst-dsp.c void skl_dsp_set_state_locked(struct sst_dsp *ctx, int state)
ctx                24 sound/soc/intel/skylake/skl-sst-dsp.c 	mutex_lock(&ctx->mutex);
ctx                25 sound/soc/intel/skylake/skl-sst-dsp.c 	ctx->sst_state = state;
ctx                26 sound/soc/intel/skylake/skl-sst-dsp.c 	mutex_unlock(&ctx->mutex);
ctx                34 sound/soc/intel/skylake/skl-sst-dsp.c void skl_dsp_init_core_state(struct sst_dsp *ctx)
ctx                36 sound/soc/intel/skylake/skl-sst-dsp.c 	struct skl_dev *skl = ctx->thread_context;
ctx                49 sound/soc/intel/skylake/skl-sst-dsp.c unsigned int skl_dsp_get_enabled_cores(struct sst_dsp *ctx)
ctx                51 sound/soc/intel/skylake/skl-sst-dsp.c 	struct skl_dev *skl = ctx->thread_context;
ctx                57 sound/soc/intel/skylake/skl-sst-dsp.c 	val = sst_dsp_shim_read_unlocked(ctx, SKL_ADSP_REG_ADSPCS);
ctx                72 sound/soc/intel/skylake/skl-sst-dsp.c 	dev_dbg(ctx->dev, "DSP enabled cores mask = %x\n", en_cores_mask);
ctx                78 sound/soc/intel/skylake/skl-sst-dsp.c skl_dsp_core_set_reset_state(struct sst_dsp *ctx, unsigned int core_mask)
ctx                83 sound/soc/intel/skylake/skl-sst-dsp.c 	sst_dsp_shim_update_bits_unlocked(ctx,
ctx                88 sound/soc/intel/skylake/skl-sst-dsp.c 	ret = sst_dsp_register_poll(ctx,
ctx                94 sound/soc/intel/skylake/skl-sst-dsp.c 	if ((sst_dsp_shim_read_unlocked(ctx, SKL_ADSP_REG_ADSPCS) &
ctx                97 sound/soc/intel/skylake/skl-sst-dsp.c 		dev_err(ctx->dev, "Set reset state failed: core_mask %x\n",
ctx               106 sound/soc/intel/skylake/skl-sst-dsp.c 		struct sst_dsp *ctx, unsigned int core_mask)
ctx               110 sound/soc/intel/skylake/skl-sst-dsp.c 	dev_dbg(ctx->dev, "In %s\n", __func__);
ctx               113 sound/soc/intel/skylake/skl-sst-dsp.c 	sst_dsp_shim_update_bits_unlocked(ctx, SKL_ADSP_REG_ADSPCS,
ctx               117 sound/soc/intel/skylake/skl-sst-dsp.c 	ret = sst_dsp_register_poll(ctx,
ctx               124 sound/soc/intel/skylake/skl-sst-dsp.c 	if ((sst_dsp_shim_read_unlocked(ctx, SKL_ADSP_REG_ADSPCS) &
ctx               126 sound/soc/intel/skylake/skl-sst-dsp.c 		dev_err(ctx->dev, "Unset reset state failed: core_mask %x\n",
ctx               135 sound/soc/intel/skylake/skl-sst-dsp.c is_skl_dsp_core_enable(struct sst_dsp *ctx, unsigned int core_mask)
ctx               140 sound/soc/intel/skylake/skl-sst-dsp.c 	val = sst_dsp_shim_read_unlocked(ctx, SKL_ADSP_REG_ADSPCS);
ctx               147 sound/soc/intel/skylake/skl-sst-dsp.c 	dev_dbg(ctx->dev, "DSP core(s) enabled? %d : core_mask %x\n",
ctx               153 sound/soc/intel/skylake/skl-sst-dsp.c static int skl_dsp_reset_core(struct sst_dsp *ctx, unsigned int core_mask)
ctx               156 sound/soc/intel/skylake/skl-sst-dsp.c 	sst_dsp_shim_update_bits_unlocked(ctx, SKL_ADSP_REG_ADSPCS,
ctx               161 sound/soc/intel/skylake/skl-sst-dsp.c 	return skl_dsp_core_set_reset_state(ctx, core_mask);
ctx               164 sound/soc/intel/skylake/skl-sst-dsp.c int skl_dsp_start_core(struct sst_dsp *ctx, unsigned int core_mask)
ctx               169 sound/soc/intel/skylake/skl-sst-dsp.c 	ret = skl_dsp_core_unset_reset_state(ctx, core_mask);
ctx               174 sound/soc/intel/skylake/skl-sst-dsp.c 	dev_dbg(ctx->dev, "unstall/run core: core_mask = %x\n", core_mask);
ctx               175 sound/soc/intel/skylake/skl-sst-dsp.c 	sst_dsp_shim_update_bits_unlocked(ctx, SKL_ADSP_REG_ADSPCS,
ctx               178 sound/soc/intel/skylake/skl-sst-dsp.c 	if (!is_skl_dsp_core_enable(ctx, core_mask)) {
ctx               179 sound/soc/intel/skylake/skl-sst-dsp.c 		skl_dsp_reset_core(ctx, core_mask);
ctx               180 sound/soc/intel/skylake/skl-sst-dsp.c 		dev_err(ctx->dev, "DSP start core failed: core_mask %x\n",
ctx               188 sound/soc/intel/skylake/skl-sst-dsp.c int skl_dsp_core_power_up(struct sst_dsp *ctx, unsigned int core_mask)
ctx               193 sound/soc/intel/skylake/skl-sst-dsp.c 	sst_dsp_shim_update_bits_unlocked(ctx, SKL_ADSP_REG_ADSPCS,
ctx               198 sound/soc/intel/skylake/skl-sst-dsp.c 	ret = sst_dsp_register_poll(ctx,
ctx               205 sound/soc/intel/skylake/skl-sst-dsp.c 	if ((sst_dsp_shim_read_unlocked(ctx, SKL_ADSP_REG_ADSPCS) &
ctx               208 sound/soc/intel/skylake/skl-sst-dsp.c 		dev_err(ctx->dev, "DSP core power up failed: core_mask %x\n",
ctx               216 sound/soc/intel/skylake/skl-sst-dsp.c int skl_dsp_core_power_down(struct sst_dsp  *ctx, unsigned int core_mask)
ctx               219 sound/soc/intel/skylake/skl-sst-dsp.c 	sst_dsp_shim_update_bits_unlocked(ctx, SKL_ADSP_REG_ADSPCS,
ctx               223 sound/soc/intel/skylake/skl-sst-dsp.c 	return sst_dsp_register_poll(ctx,
ctx               231 sound/soc/intel/skylake/skl-sst-dsp.c int skl_dsp_enable_core(struct sst_dsp  *ctx, unsigned int core_mask)
ctx               236 sound/soc/intel/skylake/skl-sst-dsp.c 	ret = skl_dsp_core_power_up(ctx, core_mask);
ctx               238 sound/soc/intel/skylake/skl-sst-dsp.c 		dev_err(ctx->dev, "dsp core power up failed: core_mask %x\n",
ctx               243 sound/soc/intel/skylake/skl-sst-dsp.c 	return skl_dsp_start_core(ctx, core_mask);
ctx               246 sound/soc/intel/skylake/skl-sst-dsp.c int skl_dsp_disable_core(struct sst_dsp *ctx, unsigned int core_mask)
ctx               250 sound/soc/intel/skylake/skl-sst-dsp.c 	ret = skl_dsp_reset_core(ctx, core_mask);
ctx               252 sound/soc/intel/skylake/skl-sst-dsp.c 		dev_err(ctx->dev, "dsp core reset failed: core_mask %x\n",
ctx               258 sound/soc/intel/skylake/skl-sst-dsp.c 	ret = skl_dsp_core_power_down(ctx, core_mask);
ctx               260 sound/soc/intel/skylake/skl-sst-dsp.c 		dev_err(ctx->dev, "dsp core power down fail mask %x: %d\n",
ctx               265 sound/soc/intel/skylake/skl-sst-dsp.c 	if (is_skl_dsp_core_enable(ctx, core_mask)) {
ctx               266 sound/soc/intel/skylake/skl-sst-dsp.c 		dev_err(ctx->dev, "dsp core disable fail mask %x: %d\n",
ctx               274 sound/soc/intel/skylake/skl-sst-dsp.c int skl_dsp_boot(struct sst_dsp *ctx)
ctx               278 sound/soc/intel/skylake/skl-sst-dsp.c 	if (is_skl_dsp_core_enable(ctx, SKL_DSP_CORE0_MASK)) {
ctx               279 sound/soc/intel/skylake/skl-sst-dsp.c 		ret = skl_dsp_reset_core(ctx, SKL_DSP_CORE0_MASK);
ctx               281 sound/soc/intel/skylake/skl-sst-dsp.c 			dev_err(ctx->dev, "dsp core0 reset fail: %d\n", ret);
ctx               285 sound/soc/intel/skylake/skl-sst-dsp.c 		ret = skl_dsp_start_core(ctx, SKL_DSP_CORE0_MASK);
ctx               287 sound/soc/intel/skylake/skl-sst-dsp.c 			dev_err(ctx->dev, "dsp core0 start fail: %d\n", ret);
ctx               291 sound/soc/intel/skylake/skl-sst-dsp.c 		ret = skl_dsp_disable_core(ctx, SKL_DSP_CORE0_MASK);
ctx               293 sound/soc/intel/skylake/skl-sst-dsp.c 			dev_err(ctx->dev, "dsp core0 disable fail: %d\n", ret);
ctx               296 sound/soc/intel/skylake/skl-sst-dsp.c 		ret = skl_dsp_enable_core(ctx, SKL_DSP_CORE0_MASK);
ctx               304 sound/soc/intel/skylake/skl-sst-dsp.c 	struct sst_dsp *ctx = dev_id;
ctx               308 sound/soc/intel/skylake/skl-sst-dsp.c 	spin_lock(&ctx->spinlock);
ctx               310 sound/soc/intel/skylake/skl-sst-dsp.c 	val = sst_dsp_shim_read_unlocked(ctx, SKL_ADSP_REG_ADSPIS);
ctx               311 sound/soc/intel/skylake/skl-sst-dsp.c 	ctx->intr_status = val;
ctx               314 sound/soc/intel/skylake/skl-sst-dsp.c 		spin_unlock(&ctx->spinlock);
ctx               319 sound/soc/intel/skylake/skl-sst-dsp.c 		skl_ipc_int_disable(ctx);
ctx               324 sound/soc/intel/skylake/skl-sst-dsp.c 		skl_cldma_int_disable(ctx);
ctx               328 sound/soc/intel/skylake/skl-sst-dsp.c 	spin_unlock(&ctx->spinlock);
ctx               336 sound/soc/intel/skylake/skl-sst-dsp.c int skl_dsp_get_core(struct sst_dsp *ctx, unsigned int core_id)
ctx               338 sound/soc/intel/skylake/skl-sst-dsp.c 	struct skl_dev *skl = ctx->thread_context;
ctx               342 sound/soc/intel/skylake/skl-sst-dsp.c 		dev_err(ctx->dev, "invalid core id: %d\n", core_id);
ctx               349 sound/soc/intel/skylake/skl-sst-dsp.c 		ret = ctx->fw_ops.set_state_D0(ctx, core_id);
ctx               351 sound/soc/intel/skylake/skl-sst-dsp.c 			dev_err(ctx->dev, "unable to get core%d\n", core_id);
ctx               357 sound/soc/intel/skylake/skl-sst-dsp.c 	dev_dbg(ctx->dev, "core id %d state %d usage_count %d\n",
ctx               365 sound/soc/intel/skylake/skl-sst-dsp.c int skl_dsp_put_core(struct sst_dsp *ctx, unsigned int core_id)
ctx               367 sound/soc/intel/skylake/skl-sst-dsp.c 	struct skl_dev *skl = ctx->thread_context;
ctx               371 sound/soc/intel/skylake/skl-sst-dsp.c 		dev_err(ctx->dev, "invalid core id: %d\n", core_id);
ctx               377 sound/soc/intel/skylake/skl-sst-dsp.c 		ret = ctx->fw_ops.set_state_D3(ctx, core_id);
ctx               379 sound/soc/intel/skylake/skl-sst-dsp.c 			dev_err(ctx->dev, "unable to put core %d: %d\n",
ctx               385 sound/soc/intel/skylake/skl-sst-dsp.c 	dev_dbg(ctx->dev, "core id %d state %d usage_count %d\n",
ctx               393 sound/soc/intel/skylake/skl-sst-dsp.c int skl_dsp_wake(struct sst_dsp *ctx)
ctx               395 sound/soc/intel/skylake/skl-sst-dsp.c 	return skl_dsp_get_core(ctx, SKL_DSP_CORE0_ID);
ctx               399 sound/soc/intel/skylake/skl-sst-dsp.c int skl_dsp_sleep(struct sst_dsp *ctx)
ctx               401 sound/soc/intel/skylake/skl-sst-dsp.c 	return skl_dsp_put_core(ctx, SKL_DSP_CORE0_ID);
ctx               458 sound/soc/intel/skylake/skl-sst-dsp.c bool is_skl_dsp_running(struct sst_dsp *ctx)
ctx               460 sound/soc/intel/skylake/skl-sst-dsp.c 	return (ctx->sst_state == SKL_DSP_RUNNING);
ctx               138 sound/soc/intel/skylake/skl-sst-dsp.h 	int (*load_fw)(struct sst_dsp  *ctx);
ctx               140 sound/soc/intel/skylake/skl-sst-dsp.h 	int (*load_library)(struct sst_dsp *ctx,
ctx               142 sound/soc/intel/skylake/skl-sst-dsp.h 	int (*parse_fw)(struct sst_dsp *ctx);
ctx               143 sound/soc/intel/skylake/skl-sst-dsp.h 	int (*set_state_D0)(struct sst_dsp *ctx, unsigned int core_id);
ctx               144 sound/soc/intel/skylake/skl-sst-dsp.h 	int (*set_state_D3)(struct sst_dsp *ctx, unsigned int core_id);
ctx               145 sound/soc/intel/skylake/skl-sst-dsp.h 	int (*set_state_D0i3)(struct sst_dsp *ctx);
ctx               146 sound/soc/intel/skylake/skl-sst-dsp.h 	int (*set_state_D0i0)(struct sst_dsp *ctx);
ctx               147 sound/soc/intel/skylake/skl-sst-dsp.h 	unsigned int (*get_fw_errcode)(struct sst_dsp *ctx);
ctx               148 sound/soc/intel/skylake/skl-sst-dsp.h 	int (*load_mod)(struct sst_dsp *ctx, u16 mod_id, u8 *mod_name);
ctx               149 sound/soc/intel/skylake/skl-sst-dsp.h 	int (*unload_mod)(struct sst_dsp *ctx, u16 mod_id);
ctx               193 sound/soc/intel/skylake/skl-sst-dsp.h void skl_cldma_process_intr(struct sst_dsp *ctx);
ctx               194 sound/soc/intel/skylake/skl-sst-dsp.h void skl_cldma_int_disable(struct sst_dsp *ctx);
ctx               195 sound/soc/intel/skylake/skl-sst-dsp.h int skl_cldma_prepare(struct sst_dsp *ctx);
ctx               196 sound/soc/intel/skylake/skl-sst-dsp.h int skl_cldma_wait_interruptible(struct sst_dsp *ctx);
ctx               198 sound/soc/intel/skylake/skl-sst-dsp.h void skl_dsp_set_state_locked(struct sst_dsp *ctx, int state);
ctx               202 sound/soc/intel/skylake/skl-sst-dsp.h bool is_skl_dsp_running(struct sst_dsp *ctx);
ctx               204 sound/soc/intel/skylake/skl-sst-dsp.h unsigned int skl_dsp_get_enabled_cores(struct sst_dsp *ctx);
ctx               205 sound/soc/intel/skylake/skl-sst-dsp.h void skl_dsp_init_core_state(struct sst_dsp *ctx);
ctx               206 sound/soc/intel/skylake/skl-sst-dsp.h int skl_dsp_enable_core(struct sst_dsp *ctx, unsigned int core_mask);
ctx               207 sound/soc/intel/skylake/skl-sst-dsp.h int skl_dsp_disable_core(struct sst_dsp *ctx, unsigned int core_mask);
ctx               208 sound/soc/intel/skylake/skl-sst-dsp.h int skl_dsp_core_power_up(struct sst_dsp *ctx, unsigned int core_mask);
ctx               209 sound/soc/intel/skylake/skl-sst-dsp.h int skl_dsp_core_power_down(struct sst_dsp *ctx, unsigned int core_mask);
ctx               210 sound/soc/intel/skylake/skl-sst-dsp.h int skl_dsp_core_unset_reset_state(struct sst_dsp *ctx,
ctx               212 sound/soc/intel/skylake/skl-sst-dsp.h int skl_dsp_start_core(struct sst_dsp *ctx, unsigned int core_mask);
ctx               215 sound/soc/intel/skylake/skl-sst-dsp.h int skl_dsp_wake(struct sst_dsp *ctx);
ctx               216 sound/soc/intel/skylake/skl-sst-dsp.h int skl_dsp_sleep(struct sst_dsp *ctx);
ctx               219 sound/soc/intel/skylake/skl-sst-dsp.h int skl_dsp_get_core(struct sst_dsp *ctx, unsigned int core_id);
ctx               220 sound/soc/intel/skylake/skl-sst-dsp.h int skl_dsp_put_core(struct sst_dsp *ctx, unsigned int core_id);
ctx               222 sound/soc/intel/skylake/skl-sst-dsp.h int skl_dsp_boot(struct sst_dsp *ctx);
ctx               234 sound/soc/intel/skylake/skl-sst-dsp.h int snd_skl_parse_uuids(struct sst_dsp *ctx, const struct firmware *fw,
ctx               558 sound/soc/intel/skylake/skl-sst-ipc.c void skl_ipc_int_enable(struct sst_dsp *ctx)
ctx               560 sound/soc/intel/skylake/skl-sst-ipc.c 	sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_ADSPIC,
ctx               564 sound/soc/intel/skylake/skl-sst-ipc.c void skl_ipc_int_disable(struct sst_dsp *ctx)
ctx               566 sound/soc/intel/skylake/skl-sst-ipc.c 	sst_dsp_shim_update_bits_unlocked(ctx, SKL_ADSP_REG_ADSPIC,
ctx               570 sound/soc/intel/skylake/skl-sst-ipc.c void skl_ipc_op_int_enable(struct sst_dsp *ctx)
ctx               573 sound/soc/intel/skylake/skl-sst-ipc.c 	sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_HIPCCTL,
ctx               577 sound/soc/intel/skylake/skl-sst-ipc.c 	sst_dsp_shim_update_bits(ctx, SKL_ADSP_REG_HIPCCTL,
ctx               581 sound/soc/intel/skylake/skl-sst-ipc.c void skl_ipc_op_int_disable(struct sst_dsp *ctx)
ctx               584 sound/soc/intel/skylake/skl-sst-ipc.c 	sst_dsp_shim_update_bits_unlocked(ctx, SKL_ADSP_REG_HIPCCTL,
ctx               588 sound/soc/intel/skylake/skl-sst-ipc.c 	sst_dsp_shim_update_bits_unlocked(ctx, SKL_ADSP_REG_HIPCCTL,
ctx               593 sound/soc/intel/skylake/skl-sst-ipc.c bool skl_ipc_int_status(struct sst_dsp *ctx)
ctx               595 sound/soc/intel/skylake/skl-sst-ipc.c 	return sst_dsp_shim_read_unlocked(ctx,
ctx               154 sound/soc/intel/skylake/skl-sst-ipc.h void skl_ipc_op_int_enable(struct sst_dsp *ctx);
ctx               155 sound/soc/intel/skylake/skl-sst-ipc.h void skl_ipc_op_int_disable(struct sst_dsp *ctx);
ctx               161 sound/soc/intel/skylake/skl-sst-ipc.h void skl_clear_module_cnt(struct sst_dsp *ctx);
ctx               229 sound/soc/intel/skylake/skl-sst-utils.c int snd_skl_parse_uuids(struct sst_dsp *ctx, const struct firmware *fw,
ctx               236 sound/soc/intel/skylake/skl-sst-utils.c 	struct skl_dev *skl = ctx->thread_context;
ctx               253 sound/soc/intel/skylake/skl-sst-utils.c 		dev_err(ctx->dev, "Small fw file size, No space for hdr\n");
ctx               262 sound/soc/intel/skylake/skl-sst-utils.c 		dev_err(ctx->dev, "Small fw file size, No module entry\n");
ctx               273 sound/soc/intel/skylake/skl-sst-utils.c 		dev_err(ctx->dev, "Small fw file size, No modules\n");
ctx               299 sound/soc/intel/skylake/skl-sst-utils.c 		module->instance_id = devm_kzalloc(ctx->dev, size, GFP_KERNEL);
ctx               307 sound/soc/intel/skylake/skl-sst-utils.c 		dev_dbg(ctx->dev,
ctx                33 sound/soc/intel/skylake/skl-sst.c static bool skl_check_fw_status(struct sst_dsp *ctx, u32 status)
ctx                37 sound/soc/intel/skylake/skl-sst.c 	cur_sts = sst_dsp_shim_read(ctx, SKL_ADSP_FW_STATUS) & SKL_FW_STS_MASK;
ctx                42 sound/soc/intel/skylake/skl-sst.c static int skl_transfer_firmware(struct sst_dsp *ctx,
ctx                47 sound/soc/intel/skylake/skl-sst.c 	ret = ctx->cl_dev.ops.cl_copy_to_dmabuf(ctx, basefw, base_fw_size,
ctx                52 sound/soc/intel/skylake/skl-sst.c 	ret = sst_dsp_register_poll(ctx,
ctx                59 sound/soc/intel/skylake/skl-sst.c 	ctx->cl_dev.ops.cl_stop_dma(ctx);
ctx                66 sound/soc/intel/skylake/skl-sst.c static int skl_load_base_firmware(struct sst_dsp *ctx)
ctx                69 sound/soc/intel/skylake/skl-sst.c 	struct skl_dev *skl = ctx->thread_context;
ctx                76 sound/soc/intel/skylake/skl-sst.c 	if (ctx->fw == NULL) {
ctx                77 sound/soc/intel/skylake/skl-sst.c 		ret = request_firmware(&ctx->fw, ctx->fw_name, ctx->dev);
ctx                79 sound/soc/intel/skylake/skl-sst.c 			dev_err(ctx->dev, "Request firmware failed %d\n", ret);
ctx                86 sound/soc/intel/skylake/skl-sst.c 		ret = snd_skl_parse_uuids(ctx, ctx->fw, SKL_ADSP_FW_BIN_HDR_OFFSET, 0);
ctx                88 sound/soc/intel/skylake/skl-sst.c 			dev_err(ctx->dev, "UUID parsing err: %d\n", ret);
ctx                89 sound/soc/intel/skylake/skl-sst.c 			release_firmware(ctx->fw);
ctx                90 sound/soc/intel/skylake/skl-sst.c 			skl_dsp_disable_core(ctx, SKL_DSP_CORE0_MASK);
ctx                96 sound/soc/intel/skylake/skl-sst.c 	stripped_fw.data = ctx->fw->data;
ctx                97 sound/soc/intel/skylake/skl-sst.c 	stripped_fw.size = ctx->fw->size;
ctx               101 sound/soc/intel/skylake/skl-sst.c 	ret = skl_dsp_boot(ctx);
ctx               103 sound/soc/intel/skylake/skl-sst.c 		dev_err(ctx->dev, "Boot dsp core failed ret: %d\n", ret);
ctx               107 sound/soc/intel/skylake/skl-sst.c 	ret = skl_cldma_prepare(ctx);
ctx               109 sound/soc/intel/skylake/skl-sst.c 		dev_err(ctx->dev, "CL dma prepare failed : %d\n", ret);
ctx               114 sound/soc/intel/skylake/skl-sst.c 	skl_ipc_int_enable(ctx);
ctx               115 sound/soc/intel/skylake/skl-sst.c 	skl_ipc_op_int_enable(ctx);
ctx               119 sound/soc/intel/skylake/skl-sst.c 		if (skl_check_fw_status(ctx, SKL_FW_INIT)) {
ctx               120 sound/soc/intel/skylake/skl-sst.c 			dev_dbg(ctx->dev,
ctx               127 sound/soc/intel/skylake/skl-sst.c 		reg = sst_dsp_shim_read(ctx, SKL_ADSP_FW_STATUS);
ctx               128 sound/soc/intel/skylake/skl-sst.c 		dev_err(ctx->dev,
ctx               134 sound/soc/intel/skylake/skl-sst.c 	ret = skl_transfer_firmware(ctx, stripped_fw.data, stripped_fw.size);
ctx               136 sound/soc/intel/skylake/skl-sst.c 		dev_err(ctx->dev, "Transfer firmware failed%d\n", ret);
ctx               142 sound/soc/intel/skylake/skl-sst.c 			dev_err(ctx->dev, "DSP boot failed, FW Ready timed-out\n");
ctx               147 sound/soc/intel/skylake/skl-sst.c 		dev_dbg(ctx->dev, "Download firmware successful%d\n", ret);
ctx               152 sound/soc/intel/skylake/skl-sst.c 	ctx->cl_dev.ops.cl_cleanup_controller(ctx);
ctx               154 sound/soc/intel/skylake/skl-sst.c 	skl_dsp_disable_core(ctx, SKL_DSP_CORE0_MASK);
ctx               155 sound/soc/intel/skylake/skl-sst.c 	release_firmware(ctx->fw);
ctx               156 sound/soc/intel/skylake/skl-sst.c 	ctx->fw = NULL;
ctx               160 sound/soc/intel/skylake/skl-sst.c static int skl_set_dsp_D0(struct sst_dsp *ctx, unsigned int core_id)
ctx               164 sound/soc/intel/skylake/skl-sst.c 	struct skl_dev *skl = ctx->thread_context;
ctx               169 sound/soc/intel/skylake/skl-sst.c 		ret = skl_load_base_firmware(ctx);
ctx               171 sound/soc/intel/skylake/skl-sst.c 			dev_err(ctx->dev, "unable to load firmware\n");
ctx               177 sound/soc/intel/skylake/skl-sst.c 			ret = ctx->fw_ops.load_library(ctx, skl->lib_info,
ctx               180 sound/soc/intel/skylake/skl-sst.c 				dev_err(ctx->dev, "reload libs failed: %d\n",
ctx               193 sound/soc/intel/skylake/skl-sst.c 		ret = skl_dsp_enable_core(ctx, core_mask);
ctx               203 sound/soc/intel/skylake/skl-sst.c 			dev_err(ctx->dev, "Failed to set dsp to D0:core id= %d\n",
ctx               205 sound/soc/intel/skylake/skl-sst.c 			skl_dsp_disable_core(ctx, core_mask);
ctx               214 sound/soc/intel/skylake/skl-sst.c static int skl_set_dsp_D3(struct sst_dsp *ctx, unsigned int core_id)
ctx               218 sound/soc/intel/skylake/skl-sst.c 	struct skl_dev *skl = ctx->thread_context;
ctx               226 sound/soc/intel/skylake/skl-sst.c 		dev_err(ctx->dev, "set Dx core %d fail: %d\n", core_id, ret);
ctx               230 sound/soc/intel/skylake/skl-sst.c 		ctx->cl_dev.ops.cl_cleanup_controller(ctx);
ctx               231 sound/soc/intel/skylake/skl-sst.c 		skl_cldma_int_disable(ctx);
ctx               232 sound/soc/intel/skylake/skl-sst.c 		skl_ipc_op_int_disable(ctx);
ctx               233 sound/soc/intel/skylake/skl-sst.c 		skl_ipc_int_disable(ctx);
ctx               236 sound/soc/intel/skylake/skl-sst.c 	ret = skl_dsp_disable_core(ctx, core_mask);
ctx               244 sound/soc/intel/skylake/skl-sst.c static unsigned int skl_get_errorcode(struct sst_dsp *ctx)
ctx               246 sound/soc/intel/skylake/skl-sst.c 	 return sst_dsp_shim_read(ctx, SKL_ADSP_ERROR_CODE);
ctx               253 sound/soc/intel/skylake/skl-sst.c static int skl_get_module(struct sst_dsp *ctx, u16 mod_id)
ctx               257 sound/soc/intel/skylake/skl-sst.c 	list_for_each_entry(module, &ctx->module_list, list) {
ctx               265 sound/soc/intel/skylake/skl-sst.c static int skl_put_module(struct sst_dsp *ctx, u16 mod_id)
ctx               269 sound/soc/intel/skylake/skl-sst.c 	list_for_each_entry(module, &ctx->module_list, list) {
ctx               277 sound/soc/intel/skylake/skl-sst.c static struct skl_module_table *skl_fill_module_table(struct sst_dsp *ctx,
ctx               285 sound/soc/intel/skylake/skl-sst.c 	ret = request_firmware(&fw, mod_name, ctx->dev);
ctx               287 sound/soc/intel/skylake/skl-sst.c 		dev_err(ctx->dev, "Request Module %s failed :%d\n",
ctx               292 sound/soc/intel/skylake/skl-sst.c 	skl_module = devm_kzalloc(ctx->dev, sizeof(*skl_module), GFP_KERNEL);
ctx               299 sound/soc/intel/skylake/skl-sst.c 	skl_module->mod_info = devm_kzalloc(ctx->dev, size, GFP_KERNEL);
ctx               307 sound/soc/intel/skylake/skl-sst.c 	list_add(&skl_module->list, &ctx->module_list);
ctx               314 sound/soc/intel/skylake/skl-sst.c 			struct sst_dsp *ctx, u16 mod_id)
ctx               318 sound/soc/intel/skylake/skl-sst.c 	if (list_empty(&ctx->module_list)) {
ctx               319 sound/soc/intel/skylake/skl-sst.c 		dev_err(ctx->dev, "Module list is empty\n");
ctx               323 sound/soc/intel/skylake/skl-sst.c 	list_for_each_entry(module, &ctx->module_list, list) {
ctx               331 sound/soc/intel/skylake/skl-sst.c static int skl_transfer_module(struct sst_dsp *ctx, const void *data,
ctx               335 sound/soc/intel/skylake/skl-sst.c 	struct skl_dev *skl = ctx->thread_context;
ctx               338 sound/soc/intel/skylake/skl-sst.c 	bytes_left = ctx->cl_dev.ops.cl_copy_to_dmabuf(ctx, data, size, false);
ctx               349 sound/soc/intel/skylake/skl-sst.c 		dev_err(ctx->dev, "Failed to Load %s with err %d\n",
ctx               362 sound/soc/intel/skylake/skl-sst.c 		ret = skl_cldma_wait_interruptible(ctx);
ctx               366 sound/soc/intel/skylake/skl-sst.c 		bytes_left = ctx->cl_dev.ops.cl_copy_to_dmabuf(ctx,
ctx               374 sound/soc/intel/skylake/skl-sst.c 		dev_err(ctx->dev, "Module Load failed\n");
ctx               379 sound/soc/intel/skylake/skl-sst.c 	ctx->cl_dev.ops.cl_stop_dma(ctx);
ctx               385 sound/soc/intel/skylake/skl-sst.c skl_load_library(struct sst_dsp *ctx, struct skl_lib_info *linfo, int lib_count)
ctx               387 sound/soc/intel/skylake/skl-sst.c 	struct skl_dev *skl = ctx->thread_context;
ctx               397 sound/soc/intel/skylake/skl-sst.c 		ret = skl_transfer_module(ctx, stripped_fw.data,
ctx               410 sound/soc/intel/skylake/skl-sst.c static int skl_load_module(struct sst_dsp *ctx, u16 mod_id, u8 *guid)
ctx               418 sound/soc/intel/skylake/skl-sst.c 	module_entry = skl_module_get_from_id(ctx, mod_id);
ctx               420 sound/soc/intel/skylake/skl-sst.c 		module_entry = skl_fill_module_table(ctx, mod_name, mod_id);
ctx               422 sound/soc/intel/skylake/skl-sst.c 			dev_err(ctx->dev, "Failed to Load module\n");
ctx               428 sound/soc/intel/skylake/skl-sst.c 		ret = skl_transfer_module(ctx, module_entry->mod_info->fw->data,
ctx               432 sound/soc/intel/skylake/skl-sst.c 			dev_err(ctx->dev, "Failed to Load module\n");
ctx               437 sound/soc/intel/skylake/skl-sst.c 	ret = skl_get_module(ctx, mod_id);
ctx               442 sound/soc/intel/skylake/skl-sst.c static int skl_unload_module(struct sst_dsp *ctx, u16 mod_id)
ctx               445 sound/soc/intel/skylake/skl-sst.c 	struct skl_dev *skl = ctx->thread_context;
ctx               448 sound/soc/intel/skylake/skl-sst.c 	usage_cnt = skl_put_module(ctx, mod_id);
ctx               450 sound/soc/intel/skylake/skl-sst.c 		dev_err(ctx->dev, "Module bad usage cnt!:%d\n", usage_cnt);
ctx               461 sound/soc/intel/skylake/skl-sst.c 		dev_err(ctx->dev, "Failed to UnLoad module\n");
ctx               462 sound/soc/intel/skylake/skl-sst.c 		skl_get_module(ctx, mod_id);
ctx               469 sound/soc/intel/skylake/skl-sst.c void skl_clear_module_cnt(struct sst_dsp *ctx)
ctx               473 sound/soc/intel/skylake/skl-sst.c 	if (list_empty(&ctx->module_list))
ctx               476 sound/soc/intel/skylake/skl-sst.c 	list_for_each_entry(module, &ctx->module_list, list) {
ctx               482 sound/soc/intel/skylake/skl-sst.c static void skl_clear_module_table(struct sst_dsp *ctx)
ctx               486 sound/soc/intel/skylake/skl-sst.c 	if (list_empty(&ctx->module_list))
ctx               489 sound/soc/intel/skylake/skl-sst.c 	list_for_each_entry_safe(module, tmp, &ctx->module_list, list) {
ctx                77 sound/soc/soc-acpi.c 				struct snd_soc_acpi_package_context *ctx)
ctx                81 sound/soc/soc-acpi.c 	status = acpi_get_devices(hid, snd_soc_acpi_find_package, ctx, NULL);
ctx                83 sound/soc/soc-acpi.c 	if (ACPI_FAILURE(status) || !ctx->data_valid)
ctx                58 sound/soc/xilinx/xlnx_spdif.c 	struct spdif_dev_data *ctx = arg;
ctx                60 sound/soc/xilinx/xlnx_spdif.c 	val = readl(ctx->base + XSPDIF_IRQ_STS_REG);
ctx                63 sound/soc/xilinx/xlnx_spdif.c 		       ctx->base + XSPDIF_IRQ_STS_REG);
ctx                64 sound/soc/xilinx/xlnx_spdif.c 		val = readl(ctx->base +
ctx                67 sound/soc/xilinx/xlnx_spdif.c 		       ctx->base + XSPDIF_IRQ_ENABLE_REG);
ctx                69 sound/soc/xilinx/xlnx_spdif.c 		ctx->rx_chsts_updated = true;
ctx                70 sound/soc/xilinx/xlnx_spdif.c 		wake_up_interruptible(&ctx->chsts_q);
ctx                81 sound/soc/xilinx/xlnx_spdif.c 	struct spdif_dev_data *ctx = dev_get_drvdata(dai->dev);
ctx                83 sound/soc/xilinx/xlnx_spdif.c 	val = readl(ctx->base + XSPDIF_CONTROL_REG);
ctx                85 sound/soc/xilinx/xlnx_spdif.c 	writel(val, ctx->base + XSPDIF_CONTROL_REG);
ctx                89 sound/soc/xilinx/xlnx_spdif.c 		       ctx->base + XSPDIF_IRQ_ENABLE_REG);
ctx                91 sound/soc/xilinx/xlnx_spdif.c 		       ctx->base + XSPDIF_GLOBAL_IRQ_ENABLE_REG);
ctx               100 sound/soc/xilinx/xlnx_spdif.c 	struct spdif_dev_data *ctx = dev_get_drvdata(dai->dev);
ctx               102 sound/soc/xilinx/xlnx_spdif.c 	writel(XSPDIF_SOFT_RESET_VALUE, ctx->base + XSPDIF_SOFT_RESET_REG);
ctx               110 sound/soc/xilinx/xlnx_spdif.c 	struct spdif_dev_data *ctx = dev_get_drvdata(dai->dev);
ctx               112 sound/soc/xilinx/xlnx_spdif.c 	clk_div = DIV_ROUND_CLOSEST(ctx->aclk, MAX_CHANNELS * AES_SAMPLE_WIDTH *
ctx               141 sound/soc/xilinx/xlnx_spdif.c 	val = readl(ctx->base + XSPDIF_CONTROL_REG);
ctx               144 sound/soc/xilinx/xlnx_spdif.c 	writel(val, ctx->base + XSPDIF_CONTROL_REG);
ctx               152 sound/soc/xilinx/xlnx_spdif.c 	struct spdif_dev_data *ctx = dev_get_drvdata(dai->dev);
ctx               156 sound/soc/xilinx/xlnx_spdif.c 	err = wait_event_interruptible_timeout(ctx->chsts_q,
ctx               157 sound/soc/xilinx/xlnx_spdif.c 					       ctx->rx_chsts_updated,
ctx               163 sound/soc/xilinx/xlnx_spdif.c 	ctx->rx_chsts_updated = false;
ctx               173 sound/soc/xilinx/xlnx_spdif.c 	struct spdif_dev_data *ctx = dev_get_drvdata(dai->dev);
ctx               175 sound/soc/xilinx/xlnx_spdif.c 	val = readl(ctx->base + XSPDIF_CONTROL_REG);
ctx               181 sound/soc/xilinx/xlnx_spdif.c 		writel(val, ctx->base + XSPDIF_CONTROL_REG);
ctx               189 sound/soc/xilinx/xlnx_spdif.c 		writel(val, ctx->base + XSPDIF_CONTROL_REG);
ctx               242 sound/soc/xilinx/xlnx_spdif.c 	struct spdif_dev_data *ctx;
ctx               247 sound/soc/xilinx/xlnx_spdif.c 	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
ctx               248 sound/soc/xilinx/xlnx_spdif.c 	if (!ctx)
ctx               251 sound/soc/xilinx/xlnx_spdif.c 	ctx->axi_clk = devm_clk_get(dev, "s_axi_aclk");
ctx               252 sound/soc/xilinx/xlnx_spdif.c 	if (IS_ERR(ctx->axi_clk)) {
ctx               253 sound/soc/xilinx/xlnx_spdif.c 		ret = PTR_ERR(ctx->axi_clk);
ctx               257 sound/soc/xilinx/xlnx_spdif.c 	ret = clk_prepare_enable(ctx->axi_clk);
ctx               263 sound/soc/xilinx/xlnx_spdif.c 	ctx->base = devm_platform_ioremap_resource(pdev, 0);
ctx               264 sound/soc/xilinx/xlnx_spdif.c 	if (IS_ERR(ctx->base)) {
ctx               265 sound/soc/xilinx/xlnx_spdif.c 		ret = PTR_ERR(ctx->base);
ctx               268 sound/soc/xilinx/xlnx_spdif.c 	ret = of_property_read_u32(node, "xlnx,spdif-mode", &ctx->mode);
ctx               273 sound/soc/xilinx/xlnx_spdif.c 	if (ctx->mode) {
ctx               284 sound/soc/xilinx/xlnx_spdif.c 				       0, "XLNX_SPDIF_RX", ctx);
ctx               291 sound/soc/xilinx/xlnx_spdif.c 		init_waitqueue_head(&ctx->chsts_q);
ctx               295 sound/soc/xilinx/xlnx_spdif.c 	ret = of_property_read_u32(node, "xlnx,aud_clk_i", &ctx->aclk);
ctx               301 sound/soc/xilinx/xlnx_spdif.c 	dev_set_drvdata(dev, ctx);
ctx               310 sound/soc/xilinx/xlnx_spdif.c 	writel(XSPDIF_SOFT_RESET_VALUE, ctx->base + XSPDIF_SOFT_RESET_REG);
ctx               314 sound/soc/xilinx/xlnx_spdif.c 	clk_disable_unprepare(ctx->axi_clk);
ctx               320 sound/soc/xilinx/xlnx_spdif.c 	struct spdif_dev_data *ctx = dev_get_drvdata(&pdev->dev);
ctx               322 sound/soc/xilinx/xlnx_spdif.c 	clk_disable_unprepare(ctx->axi_clk);
ctx               174 sound/usb/endpoint.c 			       struct snd_urb_ctx *ctx)
ctx               176 sound/usb/endpoint.c 	struct urb *urb = ctx->urb;
ctx               186 sound/usb/endpoint.c 	for (i = 0; i < ctx->packets; ++i) {
ctx               191 sound/usb/endpoint.c 		if (ctx->packet_size[i])
ctx               192 sound/usb/endpoint.c 			counts = ctx->packet_size[i];
ctx               210 sound/usb/endpoint.c 	urb->number_of_packets = ctx->packets;
ctx               211 sound/usb/endpoint.c 	urb->transfer_buffer_length = offs * ep->stride + ctx->packets * extra;
ctx               218 sound/usb/endpoint.c 				 struct snd_urb_ctx *ctx)
ctx               220 sound/usb/endpoint.c 	struct urb *urb = ctx->urb;
ctx               231 sound/usb/endpoint.c 			prepare_silent_urb(ep, ctx);
ctx               314 sound/usb/endpoint.c 		struct snd_urb_ctx *ctx = NULL;
ctx               325 sound/usb/endpoint.c 				ctx = list_first_entry(&ep->ready_playback_urbs,
ctx               330 sound/usb/endpoint.c 		if (ctx == NULL)
ctx               333 sound/usb/endpoint.c 		list_del_init(&ctx->ready_list);
ctx               337 sound/usb/endpoint.c 			ctx->packet_size[i] = packet->packet_size[i];
ctx               340 sound/usb/endpoint.c 		prepare_outbound_urb(ep, ctx);
ctx               342 sound/usb/endpoint.c 		err = usb_submit_urb(ctx->urb, GFP_ATOMIC);
ctx               346 sound/usb/endpoint.c 				ctx->index, err, ctx->urb);
ctx               348 sound/usb/endpoint.c 			set_bit(ctx->index, &ep->active_mask);
ctx               357 sound/usb/endpoint.c 	struct snd_urb_ctx *ctx = urb->context;
ctx               358 sound/usb/endpoint.c 	struct snd_usb_endpoint *ep = ctx->ep;
ctx               376 sound/usb/endpoint.c 		retire_outbound_urb(ep, ctx);
ctx               383 sound/usb/endpoint.c 			list_add_tail(&ctx->ready_list, &ep->ready_playback_urbs);
ctx               390 sound/usb/endpoint.c 		prepare_outbound_urb(ep, ctx);
ctx               395 sound/usb/endpoint.c 		retire_inbound_urb(ep, ctx);
ctx               400 sound/usb/endpoint.c 		prepare_inbound_urb(ep, ctx);
ctx               414 sound/usb/endpoint.c 	clear_bit(ctx->index, &ep->active_mask);
ctx               953 sound/usb/endpoint.c 			struct snd_urb_ctx *ctx = ep->urb + i;
ctx               954 sound/usb/endpoint.c 			list_add_tail(&ctx->ready_list, &ep->ready_playback_urbs);
ctx              1564 sound/usb/pcm.c 	struct snd_urb_ctx *ctx = urb->context;
ctx              1575 sound/usb/pcm.c 	for (i = 0; i < ctx->packets; i++) {
ctx              1576 sound/usb/pcm.c 		if (ctx->packet_size[i])
ctx              1577 sound/usb/pcm.c 			counts = ctx->packet_size[i];
ctx              1602 sound/usb/pcm.c 				if (i < ctx->packets) {
ctx               218 sound/x86/intel_hdmi_audio.c static void had_read_register(struct snd_intelhad *ctx, u32 reg, u32 *val)
ctx               220 sound/x86/intel_hdmi_audio.c 	if (!ctx->connected)
ctx               223 sound/x86/intel_hdmi_audio.c 		*val = had_read_register_raw(ctx->card_ctx, ctx->pipe, reg);
ctx               226 sound/x86/intel_hdmi_audio.c static void had_write_register(struct snd_intelhad *ctx, u32 reg, u32 val)
ctx               228 sound/x86/intel_hdmi_audio.c 	if (ctx->connected)
ctx               229 sound/x86/intel_hdmi_audio.c 		had_write_register_raw(ctx->card_ctx, ctx->pipe, reg, val);
ctx               257 sound/x86/intel_hdmi_audio.c static void had_ack_irqs(struct snd_intelhad *ctx)
ctx               261 sound/x86/intel_hdmi_audio.c 	if (!ctx->connected)
ctx               263 sound/x86/intel_hdmi_audio.c 	had_read_register(ctx, AUD_HDMI_STATUS, &status_reg);
ctx               265 sound/x86/intel_hdmi_audio.c 	had_write_register(ctx, AUD_HDMI_STATUS, status_reg);
ctx               266 sound/x86/intel_hdmi_audio.c 	had_read_register(ctx, AUD_HDMI_STATUS, &status_reg);
ctx              1532 sound/x86/intel_hdmi_audio.c 		struct snd_intelhad *ctx = &card_ctx->pcm_ctx[port];
ctx              1533 sound/x86/intel_hdmi_audio.c 		int pipe = ctx->pipe;
ctx              1539 sound/x86/intel_hdmi_audio.c 			had_process_buffer_done(ctx);
ctx              1541 sound/x86/intel_hdmi_audio.c 			had_process_buffer_underrun(ctx);
ctx              1553 sound/x86/intel_hdmi_audio.c 	struct snd_intelhad *ctx;
ctx              1555 sound/x86/intel_hdmi_audio.c 	ctx = &card_ctx->pcm_ctx[single_port ? 0 : port];
ctx              1557 sound/x86/intel_hdmi_audio.c 		ctx->port = port;
ctx              1559 sound/x86/intel_hdmi_audio.c 	schedule_work(&ctx->hdmi_audio_wq);
ctx              1565 sound/x86/intel_hdmi_audio.c 	struct snd_intelhad *ctx =
ctx              1567 sound/x86/intel_hdmi_audio.c 	struct intel_hdmi_lpe_audio_pdata *pdata = ctx->dev->platform_data;
ctx              1568 sound/x86/intel_hdmi_audio.c 	struct intel_hdmi_lpe_audio_port_pdata *ppdata = &pdata->port[ctx->port];
ctx              1570 sound/x86/intel_hdmi_audio.c 	pm_runtime_get_sync(ctx->dev);
ctx              1571 sound/x86/intel_hdmi_audio.c 	mutex_lock(&ctx->mutex);
ctx              1573 sound/x86/intel_hdmi_audio.c 		dev_dbg(ctx->dev, "%s: Event: HAD_NOTIFY_HOT_UNPLUG : port = %d\n",
ctx              1574 sound/x86/intel_hdmi_audio.c 			__func__, ctx->port);
ctx              1576 sound/x86/intel_hdmi_audio.c 		memset(ctx->eld, 0, sizeof(ctx->eld)); /* clear the old ELD */
ctx              1578 sound/x86/intel_hdmi_audio.c 		ctx->dp_output = false;
ctx              1579 sound/x86/intel_hdmi_audio.c 		ctx->tmds_clock_speed = 0;
ctx              1580 sound/x86/intel_hdmi_audio.c 		ctx->link_rate = 0;
ctx              1583 sound/x86/intel_hdmi_audio.c 		had_process_hot_unplug(ctx);
ctx              1585 sound/x86/intel_hdmi_audio.c 		ctx->pipe = -1;
ctx              1587 sound/x86/intel_hdmi_audio.c 		dev_dbg(ctx->dev, "%s: HAD_NOTIFY_ELD : port = %d, tmds = %d\n",
ctx              1588 sound/x86/intel_hdmi_audio.c 			__func__, ctx->port, ppdata->ls_clock);
ctx              1590 sound/x86/intel_hdmi_audio.c 		memcpy(ctx->eld, ppdata->eld, sizeof(ctx->eld));
ctx              1592 sound/x86/intel_hdmi_audio.c 		ctx->dp_output = ppdata->dp_output;
ctx              1593 sound/x86/intel_hdmi_audio.c 		if (ctx->dp_output) {
ctx              1594 sound/x86/intel_hdmi_audio.c 			ctx->tmds_clock_speed = 0;
ctx              1595 sound/x86/intel_hdmi_audio.c 			ctx->link_rate = ppdata->ls_clock;
ctx              1597 sound/x86/intel_hdmi_audio.c 			ctx->tmds_clock_speed = ppdata->ls_clock;
ctx              1598 sound/x86/intel_hdmi_audio.c 			ctx->link_rate = 0;
ctx              1605 sound/x86/intel_hdmi_audio.c 		had_process_hot_plug(ctx);
ctx              1607 sound/x86/intel_hdmi_audio.c 		ctx->pipe = ppdata->pipe;
ctx              1610 sound/x86/intel_hdmi_audio.c 		had_process_mode_change(ctx);
ctx              1613 sound/x86/intel_hdmi_audio.c 	mutex_unlock(&ctx->mutex);
ctx              1614 sound/x86/intel_hdmi_audio.c 	pm_runtime_mark_last_busy(ctx->dev);
ctx              1615 sound/x86/intel_hdmi_audio.c 	pm_runtime_put_autosuspend(ctx->dev);
ctx              1621 sound/x86/intel_hdmi_audio.c static int had_create_jack(struct snd_intelhad *ctx,
ctx              1630 sound/x86/intel_hdmi_audio.c 	err = snd_jack_new(ctx->card_ctx->card, hdmi_str,
ctx              1631 sound/x86/intel_hdmi_audio.c 			   SND_JACK_AVOUT, &ctx->jack,
ctx              1635 sound/x86/intel_hdmi_audio.c 	ctx->jack->private_data = ctx;
ctx              1675 sound/x86/intel_hdmi_audio.c 		struct snd_intelhad *ctx = &card_ctx->pcm_ctx[port];
ctx              1677 sound/x86/intel_hdmi_audio.c 		cancel_work_sync(&ctx->hdmi_audio_wq);
ctx              1696 sound/x86/intel_hdmi_audio.c 	struct snd_intelhad *ctx;
ctx              1745 sound/x86/intel_hdmi_audio.c 		ctx = &card_ctx->pcm_ctx[port];
ctx              1746 sound/x86/intel_hdmi_audio.c 		ctx->card_ctx = card_ctx;
ctx              1747 sound/x86/intel_hdmi_audio.c 		ctx->dev = card_ctx->dev;
ctx              1748 sound/x86/intel_hdmi_audio.c 		ctx->port = single_port ? -1 : port;
ctx              1749 sound/x86/intel_hdmi_audio.c 		ctx->pipe = -1;
ctx              1751 sound/x86/intel_hdmi_audio.c 		spin_lock_init(&ctx->had_spinlock);
ctx              1752 sound/x86/intel_hdmi_audio.c 		mutex_init(&ctx->mutex);
ctx              1753 sound/x86/intel_hdmi_audio.c 		INIT_WORK(&ctx->hdmi_audio_wq, had_audio_wq);
ctx              1790 sound/x86/intel_hdmi_audio.c 		ctx = &card_ctx->pcm_ctx[port];
ctx              1797 sound/x86/intel_hdmi_audio.c 		pcm->private_data = ctx;
ctx              1815 sound/x86/intel_hdmi_audio.c 			kctl = snd_ctl_new1(&had_controls[i], ctx);
ctx              1829 sound/x86/intel_hdmi_audio.c 		ret = had_register_chmap_ctls(ctx, pcm);
ctx              1833 sound/x86/intel_hdmi_audio.c 		ret = had_create_jack(ctx, pcm);
ctx              1851 sound/x86/intel_hdmi_audio.c 		struct snd_intelhad *ctx = &card_ctx->pcm_ctx[port];
ctx              1853 sound/x86/intel_hdmi_audio.c 		schedule_work(&ctx->hdmi_audio_wq);
ctx               354 tools/bpf/bpftool/btf.c static void __printf(2, 0) btf_dump_printf(void *ctx,
ctx                74 tools/bpf/bpftool/map_perf_ring.c 	struct event_pipe_ctx *ctx = private_data;
ctx                75 tools/bpf/bpftool/map_perf_ring.c 	int idx = ctx->all_cpus ? cpu : ctx->idx;
ctx               129 tools/bpf/bpftool/map_perf_ring.c 	struct event_pipe_ctx ctx = {
ctx               158 tools/bpf/bpftool/map_perf_ring.c 			ctx.cpu = strtoul(*argv, &endptr, 0);
ctx               169 tools/bpf/bpftool/map_perf_ring.c 			ctx.idx = strtoul(*argv, &endptr, 0);
ctx               181 tools/bpf/bpftool/map_perf_ring.c 		ctx.all_cpus = false;
ctx               184 tools/bpf/bpftool/map_perf_ring.c 	if (!ctx.all_cpus) {
ctx               185 tools/bpf/bpftool/map_perf_ring.c 		if (ctx.idx == -1 || ctx.cpu == -1) {
ctx               190 tools/bpf/bpftool/map_perf_ring.c 		ctx.cpu = 0;
ctx               191 tools/bpf/bpftool/map_perf_ring.c 		ctx.idx = 0;
ctx               196 tools/bpf/bpftool/map_perf_ring.c 	opts.ctx = &ctx;
ctx               197 tools/bpf/bpftool/map_perf_ring.c 	opts.cpu_cnt = ctx.all_cpus ? 0 : 1;
ctx               198 tools/bpf/bpftool/map_perf_ring.c 	opts.cpus = &ctx.cpu;
ctx               199 tools/bpf/bpftool/map_perf_ring.c 	opts.map_keys = &ctx.idx;
ctx              1370 tools/lib/bpf/btf.c static size_t btf_dedup_identity_hash_fn(const void *key, void *ctx)
ctx              1375 tools/lib/bpf/btf.c static size_t btf_dedup_collision_hash_fn(const void *key, void *ctx)
ctx              1380 tools/lib/bpf/btf.c static bool btf_dedup_equal_fn(const void *k1, const void *k2, void *ctx)
ctx              1444 tools/lib/bpf/btf.c typedef int (*str_off_fn_t)(__u32 *str_off_ptr, void *ctx);
ctx              1450 tools/lib/bpf/btf.c static int btf_for_each_str_off(struct btf_dedup *d, str_off_fn_t fn, void *ctx)
ctx              1458 tools/lib/bpf/btf.c 		r = fn(&t->name_off, ctx);
ctx              1469 tools/lib/bpf/btf.c 				r = fn(&m->name_off, ctx);
ctx              1481 tools/lib/bpf/btf.c 				r = fn(&m->name_off, ctx);
ctx              1493 tools/lib/bpf/btf.c 				r = fn(&m->name_off, ctx);
ctx              1517 tools/lib/bpf/btf.c 		r = fn(&sec->sec_name_off, ctx);
ctx              1524 tools/lib/bpf/btf.c 			r = fn(&line_info->file_name_off, ctx);
ctx              1527 tools/lib/bpf/btf.c 			r = fn(&line_info->line_off, ctx);
ctx              1564 tools/lib/bpf/btf.c static int btf_str_mark_as_used(__u32 *str_off_ptr, void *ctx)
ctx              1572 tools/lib/bpf/btf.c 	strs = ctx;
ctx              1581 tools/lib/bpf/btf.c static int btf_str_remap_offset(__u32 *str_off_ptr, void *ctx)
ctx              1589 tools/lib/bpf/btf.c 	strs = ctx;
ctx               115 tools/lib/bpf/btf.h 	void *ctx;
ctx               118 tools/lib/bpf/btf.h typedef void (*btf_dump_printf_fn_t)(void *ctx, const char *fmt, va_list args);
ctx                88 tools/lib/bpf/btf_dump.c static size_t str_hash_fn(const void *key, void *ctx)
ctx               100 tools/lib/bpf/btf_dump.c static bool str_equal_fn(const void *a, const void *b, void *ctx)
ctx               115 tools/lib/bpf/btf_dump.c 	d->printf_fn(d->opts.ctx, fmt, args);
ctx               134 tools/lib/bpf/btf_dump.c 	d->opts.ctx = opts ? opts->ctx : NULL;
ctx                33 tools/lib/bpf/hashmap.c 		   hashmap_equal_fn equal_fn, void *ctx)
ctx                37 tools/lib/bpf/hashmap.c 	map->ctx = ctx;
ctx                47 tools/lib/bpf/hashmap.c 			     void *ctx)
ctx                53 tools/lib/bpf/hashmap.c 	hashmap__init(map, hash_fn, equal_fn, ctx);
ctx               106 tools/lib/bpf/hashmap.c 		h = hash_bits(map->hash_fn(cur->key, map->ctx), new_cap_bits);
ctx               131 tools/lib/bpf/hashmap.c 		if (map->equal_fn(cur->key, key, map->ctx)) {
ctx               155 tools/lib/bpf/hashmap.c 	h = hash_bits(map->hash_fn(key, map->ctx), map->cap_bits);
ctx               179 tools/lib/bpf/hashmap.c 		h = hash_bits(map->hash_fn(key, map->ctx), map->cap_bits);
ctx               199 tools/lib/bpf/hashmap.c 	h = hash_bits(map->hash_fn(key, map->ctx), map->cap_bits);
ctx               214 tools/lib/bpf/hashmap.c 	h = hash_bits(map->hash_fn(key, map->ctx), map->cap_bits);
ctx                26 tools/lib/bpf/hashmap.h typedef size_t (*hashmap_hash_fn)(const void *key, void *ctx);
ctx                27 tools/lib/bpf/hashmap.h typedef bool (*hashmap_equal_fn)(const void *key1, const void *key2, void *ctx);
ctx                38 tools/lib/bpf/hashmap.h 	void *ctx;
ctx                46 tools/lib/bpf/hashmap.h #define HASHMAP_INIT(hash_fn, equal_fn, ctx) {	\
ctx                49 tools/lib/bpf/hashmap.h 	.ctx = (ctx),				\
ctx                57 tools/lib/bpf/hashmap.h 		   hashmap_equal_fn equal_fn, void *ctx);
ctx                60 tools/lib/bpf/hashmap.h 			     void *ctx);
ctx               163 tools/lib/bpf/hashmap.h 	for (cur = ({ size_t bkt = hash_bits(map->hash_fn((_key), map->ctx),\
ctx               168 tools/lib/bpf/hashmap.h 		if (map->equal_fn(cur->key, (_key), map->ctx))
ctx               171 tools/lib/bpf/hashmap.h 	for (cur = ({ size_t bkt = hash_bits(map->hash_fn((_key), map->ctx),\
ctx               176 tools/lib/bpf/hashmap.h 		if (map->equal_fn(cur->key, (_key), map->ctx))
ctx              2941 tools/lib/bpf/libbpf.c static size_t bpf_core_hash_fn(const void *key, void *ctx)
ctx              2946 tools/lib/bpf/libbpf.c static bool bpf_core_equal_fn(const void *k1, const void *k2, void *ctx)
ctx              5307 tools/lib/bpf/libbpf.c 	void *ctx;
ctx              5327 tools/lib/bpf/libbpf.c 	void *ctx; /* passed into callbacks */
ctx              5443 tools/lib/bpf/libbpf.c 	p.ctx = opts ? opts->ctx : NULL;
ctx              5456 tools/lib/bpf/libbpf.c 	p.ctx = opts->ctx;
ctx              5501 tools/lib/bpf/libbpf.c 	pb->ctx = p->ctx;
ctx              5599 tools/lib/bpf/libbpf.c perf_buffer__process_record(struct perf_event_header *e, void *ctx)
ctx              5601 tools/lib/bpf/libbpf.c 	struct perf_cpu_buf *cpu_buf = ctx;
ctx              5607 tools/lib/bpf/libbpf.c 		return pb->event_cb(pb->ctx, cpu_buf->cpu, e);
ctx              5614 tools/lib/bpf/libbpf.c 			pb->sample_cb(pb->ctx, cpu_buf->cpu, s->data, s->size);
ctx              5621 tools/lib/bpf/libbpf.c 			pb->lost_cb(pb->ctx, cpu_buf->cpu, s->lost);
ctx               364 tools/lib/bpf/libbpf.h typedef void (*perf_buffer_sample_fn)(void *ctx, int cpu,
ctx               366 tools/lib/bpf/libbpf.h typedef void (*perf_buffer_lost_fn)(void *ctx, int cpu, __u64 cnt);
ctx               375 tools/lib/bpf/libbpf.h 	void *ctx;
ctx               493 tools/lib/subcmd/parse-options.c static void parse_options_start(struct parse_opt_ctx_t *ctx,
ctx               496 tools/lib/subcmd/parse-options.c 	memset(ctx, 0, sizeof(*ctx));
ctx               497 tools/lib/subcmd/parse-options.c 	ctx->argc = argc - 1;
ctx               498 tools/lib/subcmd/parse-options.c 	ctx->argv = argv + 1;
ctx               499 tools/lib/subcmd/parse-options.c 	ctx->out  = argv;
ctx               500 tools/lib/subcmd/parse-options.c 	ctx->cpidx = ((flags & PARSE_OPT_KEEP_ARGV0) != 0);
ctx               501 tools/lib/subcmd/parse-options.c 	ctx->flags = flags;
ctx               511 tools/lib/subcmd/parse-options.c static int parse_options_step(struct parse_opt_ctx_t *ctx,
ctx               515 tools/lib/subcmd/parse-options.c 	int internal_help = !(ctx->flags & PARSE_OPT_NO_INTERNAL_HELP);
ctx               520 tools/lib/subcmd/parse-options.c 	ctx->opt = NULL;
ctx               522 tools/lib/subcmd/parse-options.c 	for (; ctx->argc; ctx->argc--, ctx->argv++) {
ctx               523 tools/lib/subcmd/parse-options.c 		arg = ctx->argv[0];
ctx               525 tools/lib/subcmd/parse-options.c 			if (ctx->flags & PARSE_OPT_STOP_AT_NON_OPTION)
ctx               527 tools/lib/subcmd/parse-options.c 			ctx->out[ctx->cpidx++] = ctx->argv[0];
ctx               532 tools/lib/subcmd/parse-options.c 			ctx->opt = ++arg;
ctx               533 tools/lib/subcmd/parse-options.c 			if (internal_help && *ctx->opt == 'h') {
ctx               534 tools/lib/subcmd/parse-options.c 				return usage_with_options_internal(usagestr, options, 0, ctx);
ctx               536 tools/lib/subcmd/parse-options.c 			switch (parse_short_opt(ctx, options)) {
ctx               546 tools/lib/subcmd/parse-options.c 			if (ctx->opt)
ctx               548 tools/lib/subcmd/parse-options.c 			while (ctx->opt) {
ctx               549 tools/lib/subcmd/parse-options.c 				if (internal_help && *ctx->opt == 'h')
ctx               550 tools/lib/subcmd/parse-options.c 					return usage_with_options_internal(usagestr, options, 0, ctx);
ctx               551 tools/lib/subcmd/parse-options.c 				arg = ctx->opt;
ctx               552 tools/lib/subcmd/parse-options.c 				switch (parse_short_opt(ctx, options)) {
ctx               561 tools/lib/subcmd/parse-options.c 					ctx->argv[0] = strdup(ctx->opt - 1);
ctx               562 tools/lib/subcmd/parse-options.c 					*(char *)ctx->argv[0] = '-';
ctx               574 tools/lib/subcmd/parse-options.c 			if (!(ctx->flags & PARSE_OPT_KEEP_DASHDASH)) {
ctx               575 tools/lib/subcmd/parse-options.c 				ctx->argc--;
ctx               576 tools/lib/subcmd/parse-options.c 				ctx->argv++;
ctx               583 tools/lib/subcmd/parse-options.c 			return usage_with_options_internal(usagestr, options, 1, ctx);
ctx               585 tools/lib/subcmd/parse-options.c 			return usage_with_options_internal(usagestr, options, 0, ctx);
ctx               590 tools/lib/subcmd/parse-options.c 		switch (parse_long_opt(ctx, arg, options)) {
ctx               603 tools/lib/subcmd/parse-options.c 		if (!(ctx->flags & PARSE_OPT_KEEP_UNKNOWN))
ctx               605 tools/lib/subcmd/parse-options.c 		ctx->out[ctx->cpidx++] = ctx->argv[0];
ctx               606 tools/lib/subcmd/parse-options.c 		ctx->opt = NULL;
ctx               612 tools/lib/subcmd/parse-options.c 	if ((excl_short_opt && ctx->excl_opt->short_name) ||
ctx               613 tools/lib/subcmd/parse-options.c 	    ctx->excl_opt->long_name == NULL) {
ctx               614 tools/lib/subcmd/parse-options.c 		char opt = ctx->excl_opt->short_name;
ctx               617 tools/lib/subcmd/parse-options.c 		parse_options_usage(NULL, options, ctx->excl_opt->long_name, 0);
ctx               622 tools/lib/subcmd/parse-options.c static int parse_options_end(struct parse_opt_ctx_t *ctx)
ctx               624 tools/lib/subcmd/parse-options.c 	memmove(ctx->out + ctx->cpidx, ctx->argv, ctx->argc * sizeof(*ctx->out));
ctx               625 tools/lib/subcmd/parse-options.c 	ctx->out[ctx->cpidx + ctx->argc] = NULL;
ctx               626 tools/lib/subcmd/parse-options.c 	return ctx->cpidx + ctx->argc;
ctx               632 tools/lib/subcmd/parse-options.c 	struct parse_opt_ctx_t ctx;
ctx               650 tools/lib/subcmd/parse-options.c 	parse_options_start(&ctx, argc, argv, flags);
ctx               651 tools/lib/subcmd/parse-options.c 	switch (parse_options_step(&ctx, options, usagestr)) {
ctx               672 tools/lib/subcmd/parse-options.c 		if (ctx.argv[0][1] == '-')
ctx               674 tools/lib/subcmd/parse-options.c 				 ctx.argv[0] + 2);
ctx               676 tools/lib/subcmd/parse-options.c 			astrcatf(&error_buf, "unknown switch `%c'", *ctx.opt);
ctx               680 tools/lib/subcmd/parse-options.c 	return parse_options_end(&ctx);
ctx               824 tools/lib/subcmd/parse-options.c static bool option__in_argv(const struct option *opt, const struct parse_opt_ctx_t *ctx)
ctx               828 tools/lib/subcmd/parse-options.c 	for (i = 1; i < ctx->argc; ++i) {
ctx               829 tools/lib/subcmd/parse-options.c 		const char *arg = ctx->argv[i];
ctx               857 tools/lib/subcmd/parse-options.c 				       struct parse_opt_ctx_t *ctx)
ctx               889 tools/lib/subcmd/parse-options.c 		if (ctx && ctx->argc > 1 && !option__in_argv(opts, ctx))
ctx                23 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	enum intel_pt_pkt_ctx ctx;
ctx               230 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 			      enum intel_pt_pkt_ctx ctx)
ctx               232 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	enum intel_pt_pkt_ctx old_ctx = ctx;
ctx               234 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	intel_pt_upd_pkt_ctx(packet, &ctx);
ctx               236 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	if (ctx != old_ctx) {
ctx               248 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	enum intel_pt_pkt_ctx ctx = d->ctx;
ctx               254 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	ret = intel_pt_get_packet(d->bytes, d->len, &packet, &ctx);
ctx               279 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 	    packet.payload != d->packet.payload || ctx != d->new_ctx)
ctx               280 tools/perf/arch/x86/tests/intel-pt-pkt-decoder-test.c 		return fail(d, &packet, ret, ctx);
ctx                82 tools/perf/bench/sched-messaging.c static void *sender(struct sender_context *ctx)
ctx                87 tools/perf/bench/sched-messaging.c 	ready(ctx->ready_out, ctx->wakefd);
ctx                91 tools/perf/bench/sched-messaging.c 		for (j = 0; j < ctx->num_fds; j++) {
ctx                95 tools/perf/bench/sched-messaging.c 			ret = write(ctx->out_fds[j], data + done,
ctx               110 tools/perf/bench/sched-messaging.c static void *receiver(struct receiver_context* ctx)
ctx               115 tools/perf/bench/sched-messaging.c 		close(ctx->in_fds[1]);
ctx               118 tools/perf/bench/sched-messaging.c 	ready(ctx->ready_out, ctx->wakefd);
ctx               121 tools/perf/bench/sched-messaging.c 	for (i = 0; i < ctx->num_packets; i++) {
ctx               126 tools/perf/bench/sched-messaging.c 		ret = read(ctx->in_fds[0], data + done, DATASIZE - done);
ctx               137 tools/perf/bench/sched-messaging.c static pthread_t create_worker(void *ctx, void *(*func)(void *))
ctx               151 tools/perf/bench/sched-messaging.c 			(*func) (ctx);
ctx               169 tools/perf/bench/sched-messaging.c 	ret = pthread_create(&childid, &attr, func, ctx);
ctx               206 tools/perf/bench/sched-messaging.c 		struct receiver_context *ctx = malloc(sizeof(*ctx));
ctx               208 tools/perf/bench/sched-messaging.c 		if (!ctx)
ctx               215 tools/perf/bench/sched-messaging.c 		ctx->num_packets = num_fds * nr_loops;
ctx               216 tools/perf/bench/sched-messaging.c 		ctx->in_fds[0] = fds[0];
ctx               217 tools/perf/bench/sched-messaging.c 		ctx->in_fds[1] = fds[1];
ctx               218 tools/perf/bench/sched-messaging.c 		ctx->ready_out = ready_out;
ctx               219 tools/perf/bench/sched-messaging.c 		ctx->wakefd = wakefd;
ctx               221 tools/perf/bench/sched-messaging.c 		pth[i] = create_worker(ctx, (void *)receiver);
ctx               615 tools/perf/builtin-sched.c static void *thread_func(void *ctx)
ctx               617 tools/perf/builtin-sched.c 	struct sched_thread_parms *parms = ctx;
ctx              1693 tools/perf/builtin-script.c 				void *ctx, const char *color,
ctx              1697 tools/perf/builtin-script.c 	struct metric_ctx *mctx = ctx;
ctx              1712 tools/perf/builtin-script.c 			    void *ctx)
ctx              1714 tools/perf/builtin-script.c 	struct metric_ctx *mctx = ctx;
ctx              1727 tools/perf/builtin-script.c 	struct perf_stat_output_ctx ctx = {
ctx              1730 tools/perf/builtin-script.c 		.ctx = &(struct metric_ctx) {
ctx              1756 tools/perf/builtin-script.c 						      &ctx,
ctx                44 tools/perf/examples/bpf/5sec.c int probe(hrtimer_nanosleep, rqtp->tv_sec)(void *ctx, int err, long sec)
ctx                90 tools/perf/examples/bpf/augmented_raw_syscalls.c static inline int augmented__output(void *ctx, struct augmented_args_payload *args, int len)
ctx                93 tools/perf/examples/bpf/augmented_raw_syscalls.c 	return perf_event_output(ctx, &__augmented_syscalls__, BPF_F_CURRENT_CPU, args, len);
ctx                48 tools/perf/include/bpf/bpf.h static void (*bpf_tail_call)(void *ctx, void *map, int index) = (void *)BPF_FUNC_tail_call;
ctx                36 tools/perf/tests/bpf-script-example.c int bpf_func__SyS_epoll_pwait(void *ctx)
ctx                15 tools/perf/tests/bpf-script-test-kbuild.c int bpf_func__vfs_llseek(void *ctx)
ctx                30 tools/perf/tests/bpf-script-test-prologue.c int bpf_func__null_lseek(void *ctx, int err, unsigned long _f_mode,
ctx                38 tools/perf/tests/bpf-script-test-relocation.c int bpf_func__sys_write(void *ctx)
ctx                 9 tools/perf/tests/expr.c static int test(struct parse_ctx *ctx, const char *e, double val2)
ctx                13 tools/perf/tests/expr.c 	if (expr__parse(&val, ctx, &e))
ctx                25 tools/perf/tests/expr.c 	struct parse_ctx ctx;
ctx                28 tools/perf/tests/expr.c 	expr__ctx_init(&ctx);
ctx                29 tools/perf/tests/expr.c 	expr__add_id(&ctx, "FOO", 1);
ctx                30 tools/perf/tests/expr.c 	expr__add_id(&ctx, "BAR", 2);
ctx                32 tools/perf/tests/expr.c 	ret = test(&ctx, "1+1", 2);
ctx                33 tools/perf/tests/expr.c 	ret |= test(&ctx, "FOO+BAR", 3);
ctx                34 tools/perf/tests/expr.c 	ret |= test(&ctx, "(BAR/2)%2", 1);
ctx                35 tools/perf/tests/expr.c 	ret |= test(&ctx, "1 - -4",  5);
ctx                36 tools/perf/tests/expr.c 	ret |= test(&ctx, "(FOO-1)*2 + (BAR/2)%2 - -4",  5);
ctx                37 tools/perf/tests/expr.c 	ret |= test(&ctx, "1-1 | 1", 1);
ctx                38 tools/perf/tests/expr.c 	ret |= test(&ctx, "1-1 & 1", 0);
ctx                39 tools/perf/tests/expr.c 	ret |= test(&ctx, "min(1,2) + 1", 2);
ctx                40 tools/perf/tests/expr.c 	ret |= test(&ctx, "max(1,2) + 1", 3);
ctx                41 tools/perf/tests/expr.c 	ret |= test(&ctx, "1+1 if 3*4 else 0", 2);
ctx                47 tools/perf/tests/expr.c 	ret = expr__parse(&val, &ctx, &p);
ctx                51 tools/perf/tests/expr.c 	ret = expr__parse(&val, &ctx, &p);
ctx                29 tools/perf/ui/gtk/gtk.h static inline bool perf_gtk__is_active_context(struct perf_gtk_context *ctx)
ctx                31 tools/perf/ui/gtk/gtk.h 	return ctx && ctx->main_window;
ctx                35 tools/perf/ui/gtk/gtk.h int perf_gtk__deactivate_context(struct perf_gtk_context **ctx);
ctx                13 tools/perf/ui/gtk/util.c 	struct perf_gtk_context *ctx;
ctx                15 tools/perf/ui/gtk/util.c 	ctx = malloc(sizeof(*pgctx));
ctx                16 tools/perf/ui/gtk/util.c 	if (ctx)
ctx                17 tools/perf/ui/gtk/util.c 		ctx->main_window = window;
ctx                19 tools/perf/ui/gtk/util.c 	return ctx;
ctx                22 tools/perf/ui/gtk/util.c int perf_gtk__deactivate_context(struct perf_gtk_context **ctx)
ctx                24 tools/perf/ui/gtk/util.c 	if (!perf_gtk__is_active_context(*ctx))
ctx                27 tools/perf/ui/gtk/util.c 	zfree(ctx);
ctx                18 tools/perf/util/expr.h void expr__ctx_init(struct parse_ctx *ctx);
ctx                19 tools/perf/util/expr.h void expr__add_id(struct parse_ctx *ctx, const char *id, double val);
ctx                21 tools/perf/util/expr.h int expr__parse(double *final_val, struct parse_ctx *ctx, const char **pp);
ctx                17 tools/perf/util/expr.y %parse-param { struct parse_ctx *ctx }
ctx                42 tools/perf/util/expr.y 		       struct parse_ctx *ctx __maybe_unused,
ctx                49 tools/perf/util/expr.y static int lookup_id(struct parse_ctx *ctx, char *id, double *val)
ctx                53 tools/perf/util/expr.y 	for (i = 0; i < ctx->num_ids; i++) {
ctx                54 tools/perf/util/expr.y 		if (!strcasecmp(ctx->ids[i].name, id)) {
ctx                55 tools/perf/util/expr.y 			*val = ctx->ids[i].val;
ctx                74 tools/perf/util/expr.y 	| ID			{ if (lookup_id(ctx, $1, &$$) < 0) {
ctx               172 tools/perf/util/expr.y void expr__add_id(struct parse_ctx *ctx, const char *name, double val)
ctx               175 tools/perf/util/expr.y 	assert(ctx->num_ids < MAX_PARSE_ID);
ctx               176 tools/perf/util/expr.y 	idx = ctx->num_ids++;
ctx               177 tools/perf/util/expr.y 	ctx->ids[idx].name = name;
ctx               178 tools/perf/util/expr.y 	ctx->ids[idx].val = val;
ctx               181 tools/perf/util/expr.y void expr__ctx_init(struct parse_ctx *ctx)
ctx               183 tools/perf/util/expr.y 	ctx->num_ids = 0;
ctx              2753 tools/perf/util/intel-pt-decoder/intel-pt-decoder.c 	enum intel_pt_pkt_ctx ctx = INTEL_PT_NO_CTX;
ctx              2758 tools/perf/util/intel-pt-decoder/intel-pt-decoder.c 		ret = intel_pt_get_packet(buf, len, &packet, &ctx);
ctx               531 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.c 				  enum intel_pt_pkt_ctx ctx)
ctx               542 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.c 	switch (ctx) {
ctx               596 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.c 			  enum intel_pt_pkt_ctx *ctx)
ctx               631 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.c 		*ctx = INTEL_PT_NO_CTX;
ctx               635 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.c 			*ctx = INTEL_PT_BLK_4_CTX;
ctx               637 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.c 			*ctx = INTEL_PT_BLK_8_CTX;
ctx               645 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.c 			struct intel_pt_pkt *packet, enum intel_pt_pkt_ctx *ctx)
ctx               649 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.c 	ret = intel_pt_do_get_packet(buf, len, packet, *ctx);
ctx               653 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.c 		intel_pt_upd_pkt_ctx(packet, ctx);
ctx                80 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.h 			enum intel_pt_pkt_ctx *ctx);
ctx                83 tools/perf/util/intel-pt-decoder/intel-pt-pkt-decoder.h 			  enum intel_pt_pkt_ctx *ctx);
ctx               187 tools/perf/util/intel-pt.c 	enum intel_pt_pkt_ctx ctx = INTEL_PT_NO_CTX;
ctx               194 tools/perf/util/intel-pt.c 		ret = intel_pt_get_packet(buf, len, &packet, &ctx);
ctx                 9 tools/perf/util/perf-hooks.h typedef void (*perf_hook_func_t)(void *ctx);
ctx               144 tools/perf/util/stat-display.c 			 void *ctx)
ctx               146 tools/perf/util/stat-display.c 	struct outstate *os = ctx;
ctx               163 tools/perf/util/stat-display.c 			     void *ctx, const char *color, const char *fmt,
ctx               166 tools/perf/util/stat-display.c 	struct outstate *os = ctx;
ctx               189 tools/perf/util/stat-display.c static void new_line_csv(struct perf_stat_config *config, void *ctx)
ctx               191 tools/perf/util/stat-display.c 	struct outstate *os = ctx;
ctx               203 tools/perf/util/stat-display.c 			     void *ctx,
ctx               207 tools/perf/util/stat-display.c 	struct outstate *os = ctx;
ctx               249 tools/perf/util/stat-display.c 			      void *ctx, const char *color, const char *fmt,
ctx               252 tools/perf/util/stat-display.c 	struct outstate *os = ctx;
ctx               271 tools/perf/util/stat-display.c 				  void *ctx, const char *color __maybe_unused,
ctx               275 tools/perf/util/stat-display.c 	struct outstate *os = ctx;
ctx               292 tools/perf/util/stat-display.c 			    void *ctx __maybe_unused)
ctx               297 tools/perf/util/stat-display.c 				void *ctx, const char *color __maybe_unused,
ctx               301 tools/perf/util/stat-display.c 	struct outstate *os = ctx;
ctx               475 tools/perf/util/stat-display.c 	out.ctx = &os;
ctx               938 tools/perf/util/stat-display.c 		out.ctx = &os;
ctx                29 tools/perf/util/stat-shadow.c 	int ctx;
ctx                57 tools/perf/util/stat-shadow.c 	if (a->ctx != b->ctx)
ctx                58 tools/perf/util/stat-shadow.c 		return a->ctx - b->ctx;
ctx               102 tools/perf/util/stat-shadow.c 					      int ctx,
ctx               111 tools/perf/util/stat-shadow.c 		.ctx = ctx,
ctx               151 tools/perf/util/stat-shadow.c 	int ctx = 0;
ctx               154 tools/perf/util/stat-shadow.c 		ctx |= CTX_BIT_KERNEL;
ctx               156 tools/perf/util/stat-shadow.c 		ctx |= CTX_BIT_USER;
ctx               158 tools/perf/util/stat-shadow.c 		ctx |= CTX_BIT_HV;
ctx               160 tools/perf/util/stat-shadow.c 		ctx |= CTX_BIT_HOST;
ctx               162 tools/perf/util/stat-shadow.c 		ctx |= CTX_BIT_IDLE;
ctx               164 tools/perf/util/stat-shadow.c 	return ctx;
ctx               196 tools/perf/util/stat-shadow.c 				int ctx, int cpu, u64 count)
ctx               199 tools/perf/util/stat-shadow.c 						   type, ctx, st);
ctx               213 tools/perf/util/stat-shadow.c 	int ctx = evsel_context(counter);
ctx               222 tools/perf/util/stat-shadow.c 		update_runtime_stat(st, STAT_CYCLES, ctx, cpu, count);
ctx               224 tools/perf/util/stat-shadow.c 		update_runtime_stat(st, STAT_CYCLES_IN_TX, ctx, cpu, count);
ctx               226 tools/perf/util/stat-shadow.c 		update_runtime_stat(st, STAT_TRANSACTION, ctx, cpu, count);
ctx               228 tools/perf/util/stat-shadow.c 		update_runtime_stat(st, STAT_ELISION, ctx, cpu, count);
ctx               231 tools/perf/util/stat-shadow.c 				    ctx, cpu, count);
ctx               234 tools/perf/util/stat-shadow.c 				    ctx, cpu, count);
ctx               237 tools/perf/util/stat-shadow.c 				    ctx, cpu, count);
ctx               240 tools/perf/util/stat-shadow.c 				    ctx, cpu, count);
ctx               243 tools/perf/util/stat-shadow.c 				    ctx, cpu, count);
ctx               246 tools/perf/util/stat-shadow.c 				    ctx, cpu, count);
ctx               249 tools/perf/util/stat-shadow.c 				    ctx, cpu, count);
ctx               251 tools/perf/util/stat-shadow.c 		update_runtime_stat(st, STAT_BRANCHES, ctx, cpu, count);
ctx               253 tools/perf/util/stat-shadow.c 		update_runtime_stat(st, STAT_CACHEREFS, ctx, cpu, count);
ctx               255 tools/perf/util/stat-shadow.c 		update_runtime_stat(st, STAT_L1_DCACHE, ctx, cpu, count);
ctx               257 tools/perf/util/stat-shadow.c 		update_runtime_stat(st, STAT_L1_ICACHE, ctx, cpu, count);
ctx               259 tools/perf/util/stat-shadow.c 		update_runtime_stat(st, STAT_LL_CACHE, ctx, cpu, count);
ctx               261 tools/perf/util/stat-shadow.c 		update_runtime_stat(st, STAT_DTLB_CACHE, ctx, cpu, count);
ctx               263 tools/perf/util/stat-shadow.c 		update_runtime_stat(st, STAT_ITLB_CACHE, ctx, cpu, count);
ctx               265 tools/perf/util/stat-shadow.c 		update_runtime_stat(st, STAT_SMI_NUM, ctx, cpu, count);
ctx               267 tools/perf/util/stat-shadow.c 		update_runtime_stat(st, STAT_APERF, ctx, cpu, count);
ctx               400 tools/perf/util/stat-shadow.c 			       enum stat_type type, int ctx, int cpu)
ctx               404 tools/perf/util/stat-shadow.c 	v = saved_value_lookup(NULL, cpu, false, type, ctx, st);
ctx               412 tools/perf/util/stat-shadow.c 			     enum stat_type type, int ctx, int cpu)
ctx               416 tools/perf/util/stat-shadow.c 	v = saved_value_lookup(NULL, cpu, false, type, ctx, st);
ctx               431 tools/perf/util/stat-shadow.c 	int ctx = evsel_context(evsel);
ctx               433 tools/perf/util/stat-shadow.c 	total = runtime_stat_avg(st, STAT_CYCLES, ctx, cpu);
ctx               441 tools/perf/util/stat-shadow.c 		out->print_metric(config, out->ctx, color, "%7.2f%%", "frontend cycles idle",
ctx               444 tools/perf/util/stat-shadow.c 		out->print_metric(config, out->ctx, NULL, NULL, "frontend cycles idle", 0);
ctx               455 tools/perf/util/stat-shadow.c 	int ctx = evsel_context(evsel);
ctx               457 tools/perf/util/stat-shadow.c 	total = runtime_stat_avg(st, STAT_CYCLES, ctx, cpu);
ctx               464 tools/perf/util/stat-shadow.c 	out->print_metric(config, out->ctx, color, "%7.2f%%", "backend cycles idle", ratio);
ctx               476 tools/perf/util/stat-shadow.c 	int ctx = evsel_context(evsel);
ctx               478 tools/perf/util/stat-shadow.c 	total = runtime_stat_avg(st, STAT_BRANCHES, ctx, cpu);
ctx               485 tools/perf/util/stat-shadow.c 	out->print_metric(config, out->ctx, color, "%7.2f%%", "of all branches", ratio);
ctx               498 tools/perf/util/stat-shadow.c 	int ctx = evsel_context(evsel);
ctx               500 tools/perf/util/stat-shadow.c 	total = runtime_stat_avg(st, STAT_L1_DCACHE, ctx, cpu);
ctx               507 tools/perf/util/stat-shadow.c 	out->print_metric(config, out->ctx, color, "%7.2f%%", "of all L1-dcache hits", ratio);
ctx               520 tools/perf/util/stat-shadow.c 	int ctx = evsel_context(evsel);
ctx               522 tools/perf/util/stat-shadow.c 	total = runtime_stat_avg(st, STAT_L1_ICACHE, ctx, cpu);
ctx               528 tools/perf/util/stat-shadow.c 	out->print_metric(config, out->ctx, color, "%7.2f%%", "of all L1-icache hits", ratio);
ctx               540 tools/perf/util/stat-shadow.c 	int ctx = evsel_context(evsel);
ctx               542 tools/perf/util/stat-shadow.c 	total = runtime_stat_avg(st, STAT_DTLB_CACHE, ctx, cpu);
ctx               548 tools/perf/util/stat-shadow.c 	out->print_metric(config, out->ctx, color, "%7.2f%%", "of all dTLB cache hits", ratio);
ctx               560 tools/perf/util/stat-shadow.c 	int ctx = evsel_context(evsel);
ctx               562 tools/perf/util/stat-shadow.c 	total = runtime_stat_avg(st, STAT_ITLB_CACHE, ctx, cpu);
ctx               568 tools/perf/util/stat-shadow.c 	out->print_metric(config, out->ctx, color, "%7.2f%%", "of all iTLB cache hits", ratio);
ctx               580 tools/perf/util/stat-shadow.c 	int ctx = evsel_context(evsel);
ctx               582 tools/perf/util/stat-shadow.c 	total = runtime_stat_avg(st, STAT_LL_CACHE, ctx, cpu);
ctx               588 tools/perf/util/stat-shadow.c 	out->print_metric(config, out->ctx, color, "%7.2f%%", "of all LL-cache hits", ratio);
ctx               640 tools/perf/util/stat-shadow.c static double td_total_slots(int ctx, int cpu, struct runtime_stat *st)
ctx               642 tools/perf/util/stat-shadow.c 	return runtime_stat_avg(st, STAT_TOPDOWN_TOTAL_SLOTS, ctx, cpu);
ctx               645 tools/perf/util/stat-shadow.c static double td_bad_spec(int ctx, int cpu, struct runtime_stat *st)
ctx               651 tools/perf/util/stat-shadow.c 	total = runtime_stat_avg(st, STAT_TOPDOWN_SLOTS_ISSUED, ctx, cpu) -
ctx               652 tools/perf/util/stat-shadow.c 		runtime_stat_avg(st, STAT_TOPDOWN_SLOTS_RETIRED, ctx, cpu) +
ctx               653 tools/perf/util/stat-shadow.c 		runtime_stat_avg(st, STAT_TOPDOWN_RECOVERY_BUBBLES, ctx, cpu);
ctx               655 tools/perf/util/stat-shadow.c 	total_slots = td_total_slots(ctx, cpu, st);
ctx               661 tools/perf/util/stat-shadow.c static double td_retiring(int ctx, int cpu, struct runtime_stat *st)
ctx               664 tools/perf/util/stat-shadow.c 	double total_slots = td_total_slots(ctx, cpu, st);
ctx               666 tools/perf/util/stat-shadow.c 					    ctx, cpu);
ctx               673 tools/perf/util/stat-shadow.c static double td_fe_bound(int ctx, int cpu, struct runtime_stat *st)
ctx               676 tools/perf/util/stat-shadow.c 	double total_slots = td_total_slots(ctx, cpu, st);
ctx               678 tools/perf/util/stat-shadow.c 					    ctx, cpu);
ctx               685 tools/perf/util/stat-shadow.c static double td_be_bound(int ctx, int cpu, struct runtime_stat *st)
ctx               687 tools/perf/util/stat-shadow.c 	double sum = (td_fe_bound(ctx, cpu, st) +
ctx               688 tools/perf/util/stat-shadow.c 		      td_bad_spec(ctx, cpu, st) +
ctx               689 tools/perf/util/stat-shadow.c 		      td_retiring(ctx, cpu, st));
ctx               701 tools/perf/util/stat-shadow.c 	int ctx = evsel_context(evsel);
ctx               704 tools/perf/util/stat-shadow.c 	smi_num = runtime_stat_avg(st, STAT_SMI_NUM, ctx, cpu);
ctx               705 tools/perf/util/stat-shadow.c 	aperf = runtime_stat_avg(st, STAT_APERF, ctx, cpu);
ctx               706 tools/perf/util/stat-shadow.c 	cycles = runtime_stat_avg(st, STAT_CYCLES, ctx, cpu);
ctx               716 tools/perf/util/stat-shadow.c 	out->print_metric(config, out->ctx, color, "%8.1f%%", "SMI cycles%", cost);
ctx               717 tools/perf/util/stat-shadow.c 	out->print_metric(config, out->ctx, NULL, "%4.0f", "SMI#", smi_num);
ctx               735 tools/perf/util/stat-shadow.c 	void *ctxp = out->ctx;
ctx               822 tools/perf/util/stat-shadow.c 	void *ctxp = out->ctx;
ctx               826 tools/perf/util/stat-shadow.c 	int ctx = evsel_context(evsel);
ctx               831 tools/perf/util/stat-shadow.c 		total = runtime_stat_avg(st, STAT_CYCLES, ctx, cpu);
ctx               842 tools/perf/util/stat-shadow.c 					 ctx, cpu);
ctx               846 tools/perf/util/stat-shadow.c 						    ctx, cpu));
ctx               856 tools/perf/util/stat-shadow.c 		if (runtime_stat_n(st, STAT_BRANCHES, ctx, cpu) != 0)
ctx               866 tools/perf/util/stat-shadow.c 		if (runtime_stat_n(st, STAT_L1_DCACHE, ctx, cpu) != 0)
ctx               876 tools/perf/util/stat-shadow.c 		if (runtime_stat_n(st, STAT_L1_ICACHE, ctx, cpu) != 0)
ctx               886 tools/perf/util/stat-shadow.c 		if (runtime_stat_n(st, STAT_DTLB_CACHE, ctx, cpu) != 0)
ctx               896 tools/perf/util/stat-shadow.c 		if (runtime_stat_n(st, STAT_ITLB_CACHE, ctx, cpu) != 0)
ctx               906 tools/perf/util/stat-shadow.c 		if (runtime_stat_n(st, STAT_LL_CACHE, ctx, cpu) != 0)
ctx               911 tools/perf/util/stat-shadow.c 		total = runtime_stat_avg(st, STAT_CACHEREFS, ctx, cpu);
ctx               916 tools/perf/util/stat-shadow.c 		if (runtime_stat_n(st, STAT_CACHEREFS, ctx, cpu) != 0)
ctx               935 tools/perf/util/stat-shadow.c 		total = runtime_stat_avg(st, STAT_CYCLES, ctx, cpu);
ctx               945 tools/perf/util/stat-shadow.c 		total = runtime_stat_avg(st, STAT_CYCLES, ctx, cpu);
ctx               946 tools/perf/util/stat-shadow.c 		total2 = runtime_stat_avg(st, STAT_CYCLES_IN_TX, ctx, cpu);
ctx               957 tools/perf/util/stat-shadow.c 					 ctx, cpu);
ctx               962 tools/perf/util/stat-shadow.c 		if (runtime_stat_n(st, STAT_CYCLES_IN_TX, ctx, cpu) != 0)
ctx               970 tools/perf/util/stat-shadow.c 					 ctx, cpu);
ctx               983 tools/perf/util/stat-shadow.c 		double fe_bound = td_fe_bound(ctx, cpu, st);
ctx               990 tools/perf/util/stat-shadow.c 		double retiring = td_retiring(ctx, cpu, st);
ctx               997 tools/perf/util/stat-shadow.c 		double bad_spec = td_bad_spec(ctx, cpu, st);
ctx              1004 tools/perf/util/stat-shadow.c 		double be_bound = td_be_bound(ctx, cpu, st);
ctx              1017 tools/perf/util/stat-shadow.c 		if (td_total_slots(ctx, cpu, st) > 0)
ctx               167 tools/perf/util/stat.h 			       void *ctx, const char *color, const char *unit,
ctx               169 tools/perf/util/stat.h typedef void (*new_line_t)(struct perf_stat_config *config, void *ctx);
ctx               179 tools/perf/util/stat.h 	void *ctx;
ctx               642 tools/testing/nvdimm/test/nfit.c 	struct region_search_spa *ctx = data;
ctx               652 tools/testing/nvdimm/test/nfit.c 	if (ctx->addr >= nd_region->ndr_start && ctx->addr < ndr_end) {
ctx               653 tools/testing/nvdimm/test/nfit.c 		ctx->region = nd_region;
ctx               667 tools/testing/nvdimm/test/nfit.c 	struct region_search_spa ctx = {
ctx               673 tools/testing/nvdimm/test/nfit.c 	ret = device_for_each_child(&bus->dev, &ctx,
ctx               679 tools/testing/nvdimm/test/nfit.c 	nd_region = ctx.region;
ctx               681 tools/testing/nvdimm/test/nfit.c 	dpa = ctx.addr - nd_region->ndr_start;
ctx                45 tools/testing/selftests/bpf/bpf_helpers.h static void (*bpf_tail_call)(void *ctx, void *map, int index) =
ctx                58 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_clone_redirect)(void *ctx, int ifindex, int flags) =
ctx                64 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_perf_event_output)(void *ctx, void *map,
ctx                68 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_get_stackid)(void *ctx, void *map, int flags) =
ctx                74 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_get_tunnel_key)(void *ctx, void *key, int size, int flags) =
ctx                76 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_set_tunnel_key)(void *ctx, void *key, int size, int flags) =
ctx                78 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_get_tunnel_opt)(void *ctx, void *md, int size) =
ctx                80 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_set_tunnel_opt)(void *ctx, void *md, int size) =
ctx                84 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_xdp_adjust_head)(void *ctx, int offset) =
ctx                86 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_xdp_adjust_meta)(void *ctx, int offset) =
ctx                88 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_get_socket_cookie)(void *ctx) =
ctx                90 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_setsockopt)(void *ctx, int level, int optname, void *optval,
ctx                93 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_getsockopt)(void *ctx, int level, int optname, void *optval,
ctx                96 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_sock_ops_cb_flags_set)(void *ctx, int flags) =
ctx                98 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_sk_redirect_map)(void *ctx, void *map, int key, int flags) =
ctx               100 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_sk_redirect_hash)(void *ctx, void *map, void *key, int flags) =
ctx               111 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_perf_prog_read_value)(void *ctx, void *buf,
ctx               114 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_override_return)(void *ctx, unsigned long rc) =
ctx               116 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_msg_redirect_map)(void *ctx, void *map, int key, int flags) =
ctx               118 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_msg_redirect_hash)(void *ctx,
ctx               121 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_msg_apply_bytes)(void *ctx, int len) =
ctx               123 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_msg_cork_bytes)(void *ctx, int len) =
ctx               125 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_msg_pull_data)(void *ctx, int start, int end, int flags) =
ctx               127 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_msg_push_data)(void *ctx, int start, int end, int flags) =
ctx               129 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_msg_pop_data)(void *ctx, int start, int cut, int flags) =
ctx               131 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_bind)(void *ctx, void *addr, int addr_len) =
ctx               133 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_xdp_adjust_tail)(void *ctx, int offset) =
ctx               135 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_get_xfrm_state)(void *ctx, int index, void *state,
ctx               138 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_sk_select_reuseport)(void *ctx, void *map, void *key, __u32 flags) =
ctx               140 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_get_stack)(void *ctx, void *buf, int size, int flags) =
ctx               142 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_fib_lookup)(void *ctx, struct bpf_fib_lookup *params,
ctx               145 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_lwt_push_encap)(void *ctx, unsigned int type, void *hdr,
ctx               148 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_lwt_seg6_store_bytes)(void *ctx, unsigned int offset,
ctx               151 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_lwt_seg6_action)(void *ctx, unsigned int action, void *param,
ctx               154 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_lwt_seg6_adjust_srh)(void *ctx, unsigned int offset,
ctx               157 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_rc_repeat)(void *ctx) =
ctx               159 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_rc_keydown)(void *ctx, unsigned int protocol,
ctx               166 tools/testing/selftests/bpf/bpf_helpers.h static unsigned long long (*bpf_skb_cgroup_id)(void *ctx) =
ctx               168 tools/testing/selftests/bpf/bpf_helpers.h static unsigned long long (*bpf_skb_ancestor_cgroup_id)(void *ctx, int level) =
ctx               170 tools/testing/selftests/bpf/bpf_helpers.h static struct bpf_sock *(*bpf_sk_lookup_tcp)(void *ctx,
ctx               175 tools/testing/selftests/bpf/bpf_helpers.h static struct bpf_sock *(*bpf_skc_lookup_tcp)(void *ctx,
ctx               180 tools/testing/selftests/bpf/bpf_helpers.h static struct bpf_sock *(*bpf_sk_lookup_udp)(void *ctx,
ctx               187 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_vlan_push)(void *ctx, __be16 vlan_proto, __u16 vlan_tci) =
ctx               189 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_vlan_pop)(void *ctx) =
ctx               191 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_rc_pointer_rel)(void *ctx, int rel_x, int rel_y) =
ctx               203 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_ecn_set_ce)(void *ctx) =
ctx               208 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_sysctl_get_name)(void *ctx, char *buf,
ctx               212 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_sysctl_get_current_value)(void *ctx, char *buf,
ctx               215 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_sysctl_get_new_value)(void *ctx, char *buf,
ctx               218 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_sysctl_set_new_value)(void *ctx, const char *buf,
ctx               276 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_load_bytes)(void *ctx, int off, void *to, int len) =
ctx               278 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_load_bytes_relative)(void *ctx, int off, void *to, int len, __u32 start_header) =
ctx               280 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_store_bytes)(void *ctx, int off, void *from, int len, int flags) =
ctx               282 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_l3_csum_replace)(void *ctx, int off, int from, int to, int flags) =
ctx               284 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_l4_csum_replace)(void *ctx, int off, int from, int to, int flags) =
ctx               288 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_under_cgroup)(void *ctx, void *map, int index) =
ctx               294 tools/testing/selftests/bpf/bpf_helpers.h static unsigned int (*bpf_get_cgroup_classid)(void *ctx) =
ctx               296 tools/testing/selftests/bpf/bpf_helpers.h static unsigned int (*bpf_get_route_realm)(void *ctx) =
ctx               298 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_change_proto)(void *ctx, __be16 proto, __u64 flags) =
ctx               300 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_change_type)(void *ctx, __u32 type) =
ctx               302 tools/testing/selftests/bpf/bpf_helpers.h static unsigned int (*bpf_get_hash_recalc)(void *ctx) =
ctx               306 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_change_tail)(void *ctx, __u32 len, __u64 flags) =
ctx               308 tools/testing/selftests/bpf/bpf_helpers.h static long long (*bpf_csum_update)(void *ctx, __u32 csum) =
ctx               310 tools/testing/selftests/bpf/bpf_helpers.h static void (*bpf_set_hash_invalid)(void *ctx) =
ctx               314 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_probe_read_str)(void *ctx, __u32 size,
ctx               317 tools/testing/selftests/bpf/bpf_helpers.h static unsigned int (*bpf_get_socket_uid)(void *ctx) =
ctx               319 tools/testing/selftests/bpf/bpf_helpers.h static unsigned int (*bpf_set_hash)(void *ctx, __u32 hash) =
ctx               321 tools/testing/selftests/bpf/bpf_helpers.h static int (*bpf_skb_adjust_room)(void *ctx, __s32 len_diff, __u32 mode,
ctx               502 tools/testing/selftests/bpf/bpf_helpers.h #define BPF_KPROBE_READ_RET_IP(ip, ctx)		({ (ip) = (ctx)->link; })
ctx               505 tools/testing/selftests/bpf/bpf_helpers.h #define BPF_KPROBE_READ_RET_IP(ip, ctx)		({ (ip) = PT_REGS_RET(ctx); })
ctx               508 tools/testing/selftests/bpf/bpf_helpers.h #define BPF_KPROBE_READ_RET_IP(ip, ctx)		({				\
ctx               509 tools/testing/selftests/bpf/bpf_helpers.h 		bpf_probe_read(&(ip), sizeof(ip), (void *)PT_REGS_RET(ctx)); })
ctx               510 tools/testing/selftests/bpf/bpf_helpers.h #define BPF_KRETPROBE_READ_RET_IP(ip, ctx)	({				\
ctx               512 tools/testing/selftests/bpf/bpf_helpers.h 				(void *)(PT_REGS_FP(ctx) + sizeof(ip))); })
ctx               465 tools/testing/selftests/bpf/prog_tests/flow_dissector.c 		static struct bpf_flow_keys ctx = {};
ctx               468 tools/testing/selftests/bpf/prog_tests/flow_dissector.c 			tattr.ctx_in = &ctx;
ctx               469 tools/testing/selftests/bpf/prog_tests/flow_dissector.c 			tattr.ctx_size_in = sizeof(ctx);
ctx               470 tools/testing/selftests/bpf/prog_tests/flow_dissector.c 			ctx.flags = tests[i].flags;
ctx                23 tools/testing/selftests/bpf/prog_tests/get_stack_raw_tp.c static void get_stack_print_output(void *ctx, int cpu, void *data, __u32 size)
ctx                 9 tools/testing/selftests/bpf/prog_tests/perf_buffer.c static void on_sample(void *ctx, int cpu, void *data, __u32 size)
ctx                12 tools/testing/selftests/bpf/prog_tests/perf_buffer.c 	cpu_set_t *cpu_seen = ctx;
ctx                72 tools/testing/selftests/bpf/prog_tests/perf_buffer.c 	pb_opts.ctx = &cpu_seen;
ctx                22 tools/testing/selftests/bpf/progs/connect4_prog.c int connect_v4_prog(struct bpf_sock_addr *ctx)
ctx                35 tools/testing/selftests/bpf/progs/connect4_prog.c 	if (ctx->type != SOCK_STREAM && ctx->type != SOCK_DGRAM)
ctx                37 tools/testing/selftests/bpf/progs/connect4_prog.c 	else if (ctx->type == SOCK_STREAM)
ctx                38 tools/testing/selftests/bpf/progs/connect4_prog.c 		sk = bpf_sk_lookup_tcp(ctx, &tuple, sizeof(tuple.ipv4),
ctx                41 tools/testing/selftests/bpf/progs/connect4_prog.c 		sk = bpf_sk_lookup_udp(ctx, &tuple, sizeof(tuple.ipv4),
ctx                56 tools/testing/selftests/bpf/progs/connect4_prog.c 	ctx->user_ip4 = bpf_htonl(DST_REWRITE_IP4);
ctx                57 tools/testing/selftests/bpf/progs/connect4_prog.c 	ctx->user_port = bpf_htons(DST_REWRITE_PORT4);
ctx                66 tools/testing/selftests/bpf/progs/connect4_prog.c 	if (bpf_bind(ctx, (struct sockaddr *)&sa, sizeof(sa)) != 0)
ctx                30 tools/testing/selftests/bpf/progs/connect6_prog.c int connect_v6_prog(struct bpf_sock_addr *ctx)
ctx                47 tools/testing/selftests/bpf/progs/connect6_prog.c 	if (ctx->type != SOCK_STREAM && ctx->type != SOCK_DGRAM)
ctx                49 tools/testing/selftests/bpf/progs/connect6_prog.c 	else if (ctx->type == SOCK_STREAM)
ctx                50 tools/testing/selftests/bpf/progs/connect6_prog.c 		sk = bpf_sk_lookup_tcp(ctx, &tuple, sizeof(tuple.ipv6),
ctx                53 tools/testing/selftests/bpf/progs/connect6_prog.c 		sk = bpf_sk_lookup_udp(ctx, &tuple, sizeof(tuple.ipv6),
ctx                71 tools/testing/selftests/bpf/progs/connect6_prog.c 	ctx->user_ip6[0] = bpf_htonl(DST_REWRITE_IP6_0);
ctx                72 tools/testing/selftests/bpf/progs/connect6_prog.c 	ctx->user_ip6[1] = bpf_htonl(DST_REWRITE_IP6_1);
ctx                73 tools/testing/selftests/bpf/progs/connect6_prog.c 	ctx->user_ip6[2] = bpf_htonl(DST_REWRITE_IP6_2);
ctx                74 tools/testing/selftests/bpf/progs/connect6_prog.c 	ctx->user_ip6[3] = bpf_htonl(DST_REWRITE_IP6_3);
ctx                76 tools/testing/selftests/bpf/progs/connect6_prog.c 	ctx->user_port = bpf_htons(DST_REWRITE_PORT6);
ctx                89 tools/testing/selftests/bpf/progs/connect6_prog.c 	if (bpf_bind(ctx, (struct sockaddr *)&sa, sizeof(sa)) != 0)
ctx                13 tools/testing/selftests/bpf/progs/dev_cgroup.c int bpf_prog1(struct bpf_cgroup_dev_ctx *ctx)
ctx                15 tools/testing/selftests/bpf/progs/dev_cgroup.c 	short type = ctx->access_type & 0xFFFF;
ctx                17 tools/testing/selftests/bpf/progs/dev_cgroup.c 	short access = ctx->access_type >> 16;
ctx                41 tools/testing/selftests/bpf/progs/dev_cgroup.c 	bpf_trace_printk(fmt, sizeof(fmt), ctx->major, ctx->minor);
ctx                47 tools/testing/selftests/bpf/progs/dev_cgroup.c 	if (ctx->major != 1 || type != BPF_DEVCG_DEV_CHAR)
ctx                50 tools/testing/selftests/bpf/progs/dev_cgroup.c 	switch (ctx->minor) {
ctx                22 tools/testing/selftests/bpf/progs/get_cgroup_id_kern.c int trace(void *ctx)
ctx                14 tools/testing/selftests/bpf/progs/loop1.c int nested_loops(volatile struct pt_regs* ctx)
ctx                21 tools/testing/selftests/bpf/progs/loop1.c 				m = PT_REGS_RC(ctx);
ctx                14 tools/testing/selftests/bpf/progs/loop2.c int while_true(volatile struct pt_regs* ctx)
ctx                19 tools/testing/selftests/bpf/progs/loop2.c 		if (PT_REGS_RC(ctx) & 1)
ctx                14 tools/testing/selftests/bpf/progs/loop3.c int while_true(volatile struct pt_regs* ctx)
ctx                19 tools/testing/selftests/bpf/progs/loop3.c 		sum += PT_REGS_RC(ctx);
ctx               155 tools/testing/selftests/bpf/progs/pyperf.h static __always_inline int __on_event(struct pt_regs *ctx)
ctx               173 tools/testing/selftests/bpf/progs/pyperf.h 	event->user_stack_id = bpf_get_stackid(ctx, &stackmap, BPF_F_USER_STACK);
ctx               174 tools/testing/selftests/bpf/progs/pyperf.h 	event->kernel_stack_id = bpf_get_stackid(ctx, &stackmap, 0);
ctx               247 tools/testing/selftests/bpf/progs/pyperf.h 	bpf_perf_event_output(ctx, &perfmap, 0, event, offsetof(Event, metadata));
ctx               252 tools/testing/selftests/bpf/progs/pyperf.h int on_event(struct pt_regs* ctx)
ctx               255 tools/testing/selftests/bpf/progs/pyperf.h 	ret |= __on_event(ctx);
ctx               256 tools/testing/selftests/bpf/progs/pyperf.h 	ret |= __on_event(ctx);
ctx               257 tools/testing/selftests/bpf/progs/pyperf.h 	ret |= __on_event(ctx);
ctx               258 tools/testing/selftests/bpf/progs/pyperf.h 	ret |= __on_event(ctx);
ctx               259 tools/testing/selftests/bpf/progs/pyperf.h 	ret |= __on_event(ctx);
ctx                22 tools/testing/selftests/bpf/progs/sendmsg4_prog.c int sendmsg_v4_prog(struct bpf_sock_addr *ctx)
ctx                24 tools/testing/selftests/bpf/progs/sendmsg4_prog.c 	if (ctx->type != SOCK_DGRAM)
ctx                28 tools/testing/selftests/bpf/progs/sendmsg4_prog.c 	if (ctx->msg_src_ip4 == bpf_htonl(SRC1_IP4) ||
ctx                29 tools/testing/selftests/bpf/progs/sendmsg4_prog.c 	    ctx->msg_src_ip4 == bpf_htonl(SRC2_IP4)) {
ctx                30 tools/testing/selftests/bpf/progs/sendmsg4_prog.c 		ctx->msg_src_ip4 = bpf_htonl(SRC_REWRITE_IP4);
ctx                37 tools/testing/selftests/bpf/progs/sendmsg4_prog.c 	if ((ctx->user_ip4 >> 24) == (bpf_htonl(DST_IP4) >> 24) &&
ctx                38 tools/testing/selftests/bpf/progs/sendmsg4_prog.c 	     ctx->user_port == bpf_htons(DST_PORT)) {
ctx                39 tools/testing/selftests/bpf/progs/sendmsg4_prog.c 		ctx->user_ip4 = bpf_htonl(DST_REWRITE_IP4);
ctx                40 tools/testing/selftests/bpf/progs/sendmsg4_prog.c 		ctx->user_port = bpf_htons(DST_REWRITE_PORT4);
ctx                26 tools/testing/selftests/bpf/progs/sendmsg6_prog.c int sendmsg_v6_prog(struct bpf_sock_addr *ctx)
ctx                28 tools/testing/selftests/bpf/progs/sendmsg6_prog.c 	if (ctx->type != SOCK_DGRAM)
ctx                32 tools/testing/selftests/bpf/progs/sendmsg6_prog.c 	if (ctx->msg_src_ip6[3] == bpf_htonl(1) ||
ctx                33 tools/testing/selftests/bpf/progs/sendmsg6_prog.c 	    ctx->msg_src_ip6[3] == bpf_htonl(0)) {
ctx                34 tools/testing/selftests/bpf/progs/sendmsg6_prog.c 		ctx->msg_src_ip6[0] = bpf_htonl(SRC_REWRITE_IP6_0);
ctx                35 tools/testing/selftests/bpf/progs/sendmsg6_prog.c 		ctx->msg_src_ip6[1] = bpf_htonl(SRC_REWRITE_IP6_1);
ctx                36 tools/testing/selftests/bpf/progs/sendmsg6_prog.c 		ctx->msg_src_ip6[2] = bpf_htonl(SRC_REWRITE_IP6_2);
ctx                37 tools/testing/selftests/bpf/progs/sendmsg6_prog.c 		ctx->msg_src_ip6[3] = bpf_htonl(SRC_REWRITE_IP6_3);
ctx                44 tools/testing/selftests/bpf/progs/sendmsg6_prog.c 	if (ctx->user_ip6[0] == bpf_htonl(0xFACEB00C)) {
ctx                45 tools/testing/selftests/bpf/progs/sendmsg6_prog.c 		ctx->user_ip6[0] = bpf_htonl(DST_REWRITE_IP6_0);
ctx                46 tools/testing/selftests/bpf/progs/sendmsg6_prog.c 		ctx->user_ip6[1] = bpf_htonl(DST_REWRITE_IP6_1);
ctx                47 tools/testing/selftests/bpf/progs/sendmsg6_prog.c 		ctx->user_ip6[2] = bpf_htonl(DST_REWRITE_IP6_2);
ctx                48 tools/testing/selftests/bpf/progs/sendmsg6_prog.c 		ctx->user_ip6[3] = bpf_htonl(DST_REWRITE_IP6_3);
ctx                50 tools/testing/selftests/bpf/progs/sendmsg6_prog.c 		ctx->user_port = bpf_htons(DST_REWRITE_PORT6);
ctx                23 tools/testing/selftests/bpf/progs/socket_cookie_prog.c int set_cookie(struct bpf_sock_addr *ctx)
ctx                27 tools/testing/selftests/bpf/progs/socket_cookie_prog.c 	if (ctx->family != AF_INET6 || ctx->user_family != AF_INET6)
ctx                30 tools/testing/selftests/bpf/progs/socket_cookie_prog.c 	p = bpf_sk_storage_get(&socket_cookies, ctx->sk, 0,
ctx                36 tools/testing/selftests/bpf/progs/socket_cookie_prog.c 	p->cookie_key = bpf_get_socket_cookie(ctx);
ctx                42 tools/testing/selftests/bpf/progs/socket_cookie_prog.c int update_cookie(struct bpf_sock_ops *ctx)
ctx                47 tools/testing/selftests/bpf/progs/socket_cookie_prog.c 	if (ctx->family != AF_INET6)
ctx                50 tools/testing/selftests/bpf/progs/socket_cookie_prog.c 	if (ctx->op != BPF_SOCK_OPS_TCP_CONNECT_CB)
ctx                53 tools/testing/selftests/bpf/progs/socket_cookie_prog.c 	if (!ctx->sk)
ctx                56 tools/testing/selftests/bpf/progs/socket_cookie_prog.c 	p = bpf_sk_storage_get(&socket_cookies, ctx->sk, 0, 0);
ctx                60 tools/testing/selftests/bpf/progs/socket_cookie_prog.c 	if (p->cookie_key != bpf_get_socket_cookie(ctx))
ctx                63 tools/testing/selftests/bpf/progs/socket_cookie_prog.c 	p->cookie_value = (ctx->local_port << 8) | p->cookie_value;
ctx                38 tools/testing/selftests/bpf/progs/sockopt_inherit.c static __inline struct sockopt_inherit *get_storage(struct bpf_sockopt *ctx)
ctx                40 tools/testing/selftests/bpf/progs/sockopt_inherit.c 	if (ctx->optname == CUSTOM_INHERIT1)
ctx                41 tools/testing/selftests/bpf/progs/sockopt_inherit.c 		return bpf_sk_storage_get(&cloned1_map, ctx->sk, 0,
ctx                43 tools/testing/selftests/bpf/progs/sockopt_inherit.c 	else if (ctx->optname == CUSTOM_INHERIT2)
ctx                44 tools/testing/selftests/bpf/progs/sockopt_inherit.c 		return bpf_sk_storage_get(&cloned2_map, ctx->sk, 0,
ctx                47 tools/testing/selftests/bpf/progs/sockopt_inherit.c 		return bpf_sk_storage_get(&listener_only_map, ctx->sk, 0,
ctx                52 tools/testing/selftests/bpf/progs/sockopt_inherit.c int _getsockopt(struct bpf_sockopt *ctx)
ctx                54 tools/testing/selftests/bpf/progs/sockopt_inherit.c 	__u8 *optval_end = ctx->optval_end;
ctx                56 tools/testing/selftests/bpf/progs/sockopt_inherit.c 	__u8 *optval = ctx->optval;
ctx                58 tools/testing/selftests/bpf/progs/sockopt_inherit.c 	if (ctx->level != SOL_CUSTOM)
ctx                64 tools/testing/selftests/bpf/progs/sockopt_inherit.c 	storage = get_storage(ctx);
ctx                68 tools/testing/selftests/bpf/progs/sockopt_inherit.c 	ctx->retval = 0; /* Reset system call return value to zero */
ctx                71 tools/testing/selftests/bpf/progs/sockopt_inherit.c 	ctx->optlen = 1;
ctx                77 tools/testing/selftests/bpf/progs/sockopt_inherit.c int _setsockopt(struct bpf_sockopt *ctx)
ctx                79 tools/testing/selftests/bpf/progs/sockopt_inherit.c 	__u8 *optval_end = ctx->optval_end;
ctx                81 tools/testing/selftests/bpf/progs/sockopt_inherit.c 	__u8 *optval = ctx->optval;
ctx                83 tools/testing/selftests/bpf/progs/sockopt_inherit.c 	if (ctx->level != SOL_CUSTOM)
ctx                89 tools/testing/selftests/bpf/progs/sockopt_inherit.c 	storage = get_storage(ctx);
ctx                94 tools/testing/selftests/bpf/progs/sockopt_inherit.c 	ctx->optlen = -1;
ctx                10 tools/testing/selftests/bpf/progs/sockopt_multi.c int _getsockopt_child(struct bpf_sockopt *ctx)
ctx                12 tools/testing/selftests/bpf/progs/sockopt_multi.c 	__u8 *optval_end = ctx->optval_end;
ctx                13 tools/testing/selftests/bpf/progs/sockopt_multi.c 	__u8 *optval = ctx->optval;
ctx                15 tools/testing/selftests/bpf/progs/sockopt_multi.c 	if (ctx->level != SOL_IP || ctx->optname != IP_TOS)
ctx                24 tools/testing/selftests/bpf/progs/sockopt_multi.c 	ctx->retval = 0; /* Reset system call return value to zero */
ctx                27 tools/testing/selftests/bpf/progs/sockopt_multi.c 	ctx->optlen = 1;
ctx                33 tools/testing/selftests/bpf/progs/sockopt_multi.c int _getsockopt_parent(struct bpf_sockopt *ctx)
ctx                35 tools/testing/selftests/bpf/progs/sockopt_multi.c 	__u8 *optval_end = ctx->optval_end;
ctx                36 tools/testing/selftests/bpf/progs/sockopt_multi.c 	__u8 *optval = ctx->optval;
ctx                38 tools/testing/selftests/bpf/progs/sockopt_multi.c 	if (ctx->level != SOL_IP || ctx->optname != IP_TOS)
ctx                47 tools/testing/selftests/bpf/progs/sockopt_multi.c 	ctx->retval = 0; /* Reset system call return value to zero */
ctx                50 tools/testing/selftests/bpf/progs/sockopt_multi.c 	ctx->optlen = 1;
ctx                56 tools/testing/selftests/bpf/progs/sockopt_multi.c int _setsockopt(struct bpf_sockopt *ctx)
ctx                58 tools/testing/selftests/bpf/progs/sockopt_multi.c 	__u8 *optval_end = ctx->optval_end;
ctx                59 tools/testing/selftests/bpf/progs/sockopt_multi.c 	__u8 *optval = ctx->optval;
ctx                61 tools/testing/selftests/bpf/progs/sockopt_multi.c 	if (ctx->level != SOL_IP || ctx->optname != IP_TOS)
ctx                68 tools/testing/selftests/bpf/progs/sockopt_multi.c 	ctx->optlen = 1;
ctx                26 tools/testing/selftests/bpf/progs/sockopt_sk.c int _getsockopt(struct bpf_sockopt *ctx)
ctx                28 tools/testing/selftests/bpf/progs/sockopt_sk.c 	__u8 *optval_end = ctx->optval_end;
ctx                29 tools/testing/selftests/bpf/progs/sockopt_sk.c 	__u8 *optval = ctx->optval;
ctx                32 tools/testing/selftests/bpf/progs/sockopt_sk.c 	if (ctx->level == SOL_IP && ctx->optname == IP_TOS)
ctx                39 tools/testing/selftests/bpf/progs/sockopt_sk.c 	if (ctx->level == SOL_SOCKET && ctx->optname == SO_SNDBUF) {
ctx                47 tools/testing/selftests/bpf/progs/sockopt_sk.c 	if (ctx->level == SOL_TCP && ctx->optname == TCP_CONGESTION) {
ctx                55 tools/testing/selftests/bpf/progs/sockopt_sk.c 	if (ctx->level != SOL_CUSTOM)
ctx                61 tools/testing/selftests/bpf/progs/sockopt_sk.c 	storage = bpf_sk_storage_get(&socket_storage_map, ctx->sk, 0,
ctx                66 tools/testing/selftests/bpf/progs/sockopt_sk.c 	if (!ctx->retval)
ctx                70 tools/testing/selftests/bpf/progs/sockopt_sk.c 	ctx->retval = 0; /* Reset system call return value to zero */
ctx                73 tools/testing/selftests/bpf/progs/sockopt_sk.c 	ctx->optlen = 1;
ctx                79 tools/testing/selftests/bpf/progs/sockopt_sk.c int _setsockopt(struct bpf_sockopt *ctx)
ctx                81 tools/testing/selftests/bpf/progs/sockopt_sk.c 	__u8 *optval_end = ctx->optval_end;
ctx                82 tools/testing/selftests/bpf/progs/sockopt_sk.c 	__u8 *optval = ctx->optval;
ctx                85 tools/testing/selftests/bpf/progs/sockopt_sk.c 	if (ctx->level == SOL_IP && ctx->optname == IP_TOS)
ctx                92 tools/testing/selftests/bpf/progs/sockopt_sk.c 	if (ctx->level == SOL_SOCKET && ctx->optname == SO_SNDBUF) {
ctx                99 tools/testing/selftests/bpf/progs/sockopt_sk.c 		ctx->optlen = 4;
ctx               104 tools/testing/selftests/bpf/progs/sockopt_sk.c 	if (ctx->level == SOL_TCP && ctx->optname == TCP_CONGESTION) {
ctx               111 tools/testing/selftests/bpf/progs/sockopt_sk.c 		ctx->optlen = 5;
ctx               116 tools/testing/selftests/bpf/progs/sockopt_sk.c 	if (ctx->level != SOL_CUSTOM)
ctx               122 tools/testing/selftests/bpf/progs/sockopt_sk.c 	storage = bpf_sk_storage_get(&socket_storage_map, ctx->sk, 0,
ctx               128 tools/testing/selftests/bpf/progs/sockopt_sk.c 	ctx->optlen = -1; /* BPF has consumed this option, don't call kernel
ctx               496 tools/testing/selftests/bpf/progs/strobemeta.h int on_event(struct pt_regs *ctx) {
ctx               519 tools/testing/selftests/bpf/progs/strobemeta.h 		sample->kernel_stack_id = bpf_get_stackid(ctx, &stacks_1, 0);
ctx               520 tools/testing/selftests/bpf/progs/strobemeta.h 		sample->user_stack_id = bpf_get_stackid(ctx, &stacks_1, BPF_F_USER_STACK);
ctx               522 tools/testing/selftests/bpf/progs/strobemeta.h 		sample->kernel_stack_id = bpf_get_stackid(ctx, &stacks_0, 0);
ctx               523 tools/testing/selftests/bpf/progs/strobemeta.h 		sample->user_stack_id = bpf_get_stackid(ctx, &stacks_0, BPF_F_USER_STACK);
ctx               529 tools/testing/selftests/bpf/progs/strobemeta.h 		bpf_perf_event_output(ctx, &samples, 0, sample, 1 + sample_size);
ctx                25 tools/testing/selftests/bpf/progs/tcp_rtt.c int _sockops(struct bpf_sock_ops *ctx)
ctx                29 tools/testing/selftests/bpf/progs/tcp_rtt.c 	int op = (int) ctx->op;
ctx                32 tools/testing/selftests/bpf/progs/tcp_rtt.c 	sk = ctx->sk;
ctx                42 tools/testing/selftests/bpf/progs/tcp_rtt.c 		bpf_sock_ops_cb_flags_set(ctx, BPF_SOCK_OPS_RTT_CB_FLAG);
ctx                16 tools/testing/selftests/bpf/progs/test_attach_probe.c int handle_sys_nanosleep_entry(struct pt_regs *ctx)
ctx                25 tools/testing/selftests/bpf/progs/test_attach_probe.c int handle_sys_getpid_return(struct pt_regs *ctx)
ctx                34 tools/testing/selftests/bpf/progs/test_attach_probe.c int handle_uprobe_entry(struct pt_regs *ctx)
ctx                43 tools/testing/selftests/bpf/progs/test_attach_probe.c int handle_uprobe_return(struct pt_regs *ctx)
ctx                35 tools/testing/selftests/bpf/progs/test_core_reloc_arrays.c int test_core_arrays(void *ctx)
ctx                43 tools/testing/selftests/bpf/progs/test_core_reloc_flavors.c int test_core_flavors(void *ctx)
ctx                27 tools/testing/selftests/bpf/progs/test_core_reloc_ints.c int test_core_ints(void *ctx)
ctx                21 tools/testing/selftests/bpf/progs/test_core_reloc_kernel.c int test_core_kernel(void *ctx)
ctx                36 tools/testing/selftests/bpf/progs/test_core_reloc_misc.c int test_core_misc(void *ctx)
ctx                45 tools/testing/selftests/bpf/progs/test_core_reloc_mods.c int test_core_mods(void *ctx)
ctx                34 tools/testing/selftests/bpf/progs/test_core_reloc_nesting.c int test_core_nesting(void *ctx)
ctx                29 tools/testing/selftests/bpf/progs/test_core_reloc_primitives.c int test_core_primitives(void *ctx)
ctx                20 tools/testing/selftests/bpf/progs/test_core_reloc_ptr_as_arr.c int test_core_ptr_as_arr(void *ctx)
ctx                58 tools/testing/selftests/bpf/progs/test_get_stack_rawtp.c int bpf_prog1(void *ctx)
ctx                72 tools/testing/selftests/bpf/progs/test_get_stack_rawtp.c 	data->kern_stack_size = bpf_get_stack(ctx, data->kern_stack,
ctx                74 tools/testing/selftests/bpf/progs/test_get_stack_rawtp.c 	data->user_stack_size = bpf_get_stack(ctx, data->user_stack, max_len,
ctx                77 tools/testing/selftests/bpf/progs/test_get_stack_rawtp.c 		ctx, data->user_stack_buildid, max_buildid_len,
ctx                79 tools/testing/selftests/bpf/progs/test_get_stack_rawtp.c 	bpf_perf_event_output(ctx, &perfmap, 0, data, sizeof(*data));
ctx                86 tools/testing/selftests/bpf/progs/test_get_stack_rawtp.c 	usize = bpf_get_stack(ctx, raw_data, max_len, BPF_F_USER_STACK);
ctx                90 tools/testing/selftests/bpf/progs/test_get_stack_rawtp.c 	ksize = bpf_get_stack(ctx, raw_data + usize, max_len - usize, 0);
ctx                96 tools/testing/selftests/bpf/progs/test_get_stack_rawtp.c 		bpf_perf_event_output(ctx, &perfmap, 0, raw_data, total_size);
ctx                 9 tools/testing/selftests/bpf/progs/test_get_stack_rawtp_err.c int bpf_prog2(void *ctx)
ctx                15 tools/testing/selftests/bpf/progs/test_get_stack_rawtp_err.c 	error = bpf_get_stack(ctx, stack, 0, -1);
ctx               454 tools/testing/selftests/bpf/progs/test_l4lb.c int balancer_ingress(struct __sk_buff *ctx)
ctx               456 tools/testing/selftests/bpf/progs/test_l4lb.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               457 tools/testing/selftests/bpf/progs/test_l4lb.c 	void *data = (void *)(long)ctx->data;
ctx               467 tools/testing/selftests/bpf/progs/test_l4lb.c 		return process_packet(data, nh_off, data_end, false, ctx);
ctx               469 tools/testing/selftests/bpf/progs/test_l4lb.c 		return process_packet(data, nh_off, data_end, true, ctx);
ctx               454 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c int balancer_ingress(struct __sk_buff *ctx)
ctx               456 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               457 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c 	void *data = (void *)(long)ctx->data;
ctx               467 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c 		return process_packet(data, nh_off, data_end, false, ctx);
ctx               469 tools/testing/selftests/bpf/progs/test_l4lb_noinline.c 		return process_packet(data, nh_off, data_end, true, ctx);
ctx                27 tools/testing/selftests/bpf/progs/test_map_in_map.c int xdp_mimtest0(struct xdp_md *ctx)
ctx                15 tools/testing/selftests/bpf/progs/test_perf_buffer.c int handle_sys_nanosleep_entry(struct pt_regs *ctx)
ctx                19 tools/testing/selftests/bpf/progs/test_perf_buffer.c 	bpf_perf_event_output(ctx, &perf_buf_map, BPF_F_CURRENT_CPU,
ctx                22 tools/testing/selftests/bpf/progs/test_send_signal_kern.c int bpf_send_signal_test(void *ctx)
ctx                54 tools/testing/selftests/bpf/progs/test_stacktrace_map.c int oncpu(struct sched_switch_args *ctx)
ctx                65 tools/testing/selftests/bpf/progs/test_stacktrace_map.c 	key = bpf_get_stackid(ctx, &stackmap, 0);
ctx                70 tools/testing/selftests/bpf/progs/test_stacktrace_map.c 			bpf_get_stack(ctx, stack_p, max_len, 0);
ctx                21 tools/testing/selftests/bpf/progs/test_sysctl_loop1.c static __always_inline int is_tcp_mem(struct bpf_sysctl *ctx)
ctx                29 tools/testing/selftests/bpf/progs/test_sysctl_loop1.c 	ret = bpf_sysctl_get_name(ctx, name, sizeof(name), 0);
ctx                42 tools/testing/selftests/bpf/progs/test_sysctl_loop1.c int sysctl_tcp_mem(struct bpf_sysctl *ctx)
ctx                52 tools/testing/selftests/bpf/progs/test_sysctl_loop1.c 	if (ctx->write)
ctx                55 tools/testing/selftests/bpf/progs/test_sysctl_loop1.c 	if (!is_tcp_mem(ctx))
ctx                58 tools/testing/selftests/bpf/progs/test_sysctl_loop1.c 	ret = bpf_sysctl_get_current_value(ctx, value, MAX_VALUE_STR_LEN);
ctx                21 tools/testing/selftests/bpf/progs/test_sysctl_loop2.c static __attribute__((noinline)) int is_tcp_mem(struct bpf_sysctl *ctx)
ctx                29 tools/testing/selftests/bpf/progs/test_sysctl_loop2.c 	ret = bpf_sysctl_get_name(ctx, name, sizeof(name), 0);
ctx                43 tools/testing/selftests/bpf/progs/test_sysctl_loop2.c int sysctl_tcp_mem(struct bpf_sysctl *ctx)
ctx                50 tools/testing/selftests/bpf/progs/test_sysctl_loop2.c 	if (ctx->write)
ctx                53 tools/testing/selftests/bpf/progs/test_sysctl_loop2.c 	if (!is_tcp_mem(ctx))
ctx                56 tools/testing/selftests/bpf/progs/test_sysctl_loop2.c 	ret = bpf_sysctl_get_current_value(ctx, value, MAX_VALUE_STR_LEN);
ctx                22 tools/testing/selftests/bpf/progs/test_sysctl_prog.c static __always_inline int is_tcp_mem(struct bpf_sysctl *ctx)
ctx                30 tools/testing/selftests/bpf/progs/test_sysctl_prog.c 	ret = bpf_sysctl_get_name(ctx, name, sizeof(name), 0);
ctx                43 tools/testing/selftests/bpf/progs/test_sysctl_prog.c int sysctl_tcp_mem(struct bpf_sysctl *ctx)
ctx                50 tools/testing/selftests/bpf/progs/test_sysctl_prog.c 	if (ctx->write)
ctx                53 tools/testing/selftests/bpf/progs/test_sysctl_prog.c 	if (!is_tcp_mem(ctx))
ctx                56 tools/testing/selftests/bpf/progs/test_sysctl_prog.c 	ret = bpf_sysctl_get_current_value(ctx, value, MAX_VALUE_STR_LEN);
ctx                45 tools/testing/selftests/bpf/progs/test_tcp_check_syncookie_kern.c static __always_inline void check_syncookie(void *ctx, void *data,
ctx                82 tools/testing/selftests/bpf/progs/test_tcp_check_syncookie_kern.c 		sk = bpf_skc_lookup_tcp(ctx, &tup, sizeof(tup.ipv4),
ctx               114 tools/testing/selftests/bpf/progs/test_tcp_check_syncookie_kern.c 		sk = bpf_skc_lookup_tcp(ctx, &tup, sizeof(tup.ipv6),
ctx               160 tools/testing/selftests/bpf/progs/test_tcp_check_syncookie_kern.c int check_syncookie_xdp(struct xdp_md *ctx)
ctx               162 tools/testing/selftests/bpf/progs/test_tcp_check_syncookie_kern.c 	check_syncookie(ctx, (void *)(long)ctx->data,
ctx               163 tools/testing/selftests/bpf/progs/test_tcp_check_syncookie_kern.c 			(void *)(long)ctx->data_end);
ctx                20 tools/testing/selftests/bpf/progs/test_tracepoint.c int oncpu(struct sched_switch_args *ctx)
ctx                 9 tools/testing/selftests/bpf/progs/test_verif_scale1.c int balancer_ingress(struct __sk_buff *ctx)
ctx                11 tools/testing/selftests/bpf/progs/test_verif_scale1.c 	void *data_end = (void *)(long)ctx->data_end;
ctx                12 tools/testing/selftests/bpf/progs/test_verif_scale1.c 	void *data = (void *)(long)ctx->data;
ctx                24 tools/testing/selftests/bpf/progs/test_verif_scale1.c 	ctx->tc_index = jhash(ptr, nh_off, ctx->cb[0] + i++); \
ctx                 9 tools/testing/selftests/bpf/progs/test_verif_scale2.c int balancer_ingress(struct __sk_buff *ctx)
ctx                11 tools/testing/selftests/bpf/progs/test_verif_scale2.c 	void *data_end = (void *)(long)ctx->data_end;
ctx                12 tools/testing/selftests/bpf/progs/test_verif_scale2.c 	void *data = (void *)(long)ctx->data;
ctx                24 tools/testing/selftests/bpf/progs/test_verif_scale2.c 	ctx->tc_index = jhash(ptr, nh_off, ctx->cb[0] + i++); \
ctx                 9 tools/testing/selftests/bpf/progs/test_verif_scale3.c int balancer_ingress(struct __sk_buff *ctx)
ctx                11 tools/testing/selftests/bpf/progs/test_verif_scale3.c 	void *data_end = (void *)(long)ctx->data_end;
ctx                12 tools/testing/selftests/bpf/progs/test_verif_scale3.c 	void *data = (void *)(long)ctx->data;
ctx                24 tools/testing/selftests/bpf/progs/test_verif_scale3.c 	ctx->tc_index = jhash(ptr, nh_off, ctx->cb[0] + i++); \
ctx                 9 tools/testing/selftests/bpf/progs/test_xdp_meta.c #define ctx_ptr(ctx, mem) (void *)(unsigned long)ctx->mem
ctx                12 tools/testing/selftests/bpf/progs/test_xdp_meta.c int ing_cls(struct __sk_buff *ctx)
ctx                17 tools/testing/selftests/bpf/progs/test_xdp_meta.c 	data_meta = ctx_ptr(ctx, data_meta);
ctx                18 tools/testing/selftests/bpf/progs/test_xdp_meta.c 	data_end  = ctx_ptr(ctx, data_end);
ctx                19 tools/testing/selftests/bpf/progs/test_xdp_meta.c 	data      = ctx_ptr(ctx, data);
ctx                32 tools/testing/selftests/bpf/progs/test_xdp_meta.c int ing_xdp(struct xdp_md *ctx)
ctx                37 tools/testing/selftests/bpf/progs/test_xdp_meta.c 	ret = bpf_xdp_adjust_meta(ctx, -round_up(ETH_ALEN, 4));
ctx                41 tools/testing/selftests/bpf/progs/test_xdp_meta.c 	data_meta = ctx_ptr(ctx, data_meta);
ctx                42 tools/testing/selftests/bpf/progs/test_xdp_meta.c 	data_end  = ctx_ptr(ctx, data_end);
ctx                43 tools/testing/selftests/bpf/progs/test_xdp_meta.c 	data      = ctx_ptr(ctx, data);
ctx               801 tools/testing/selftests/bpf/progs/test_xdp_noinline.c int balancer_ingress(struct xdp_md *ctx)
ctx               803 tools/testing/selftests/bpf/progs/test_xdp_noinline.c 	void *data = (void *)(long)ctx->data;
ctx               804 tools/testing/selftests/bpf/progs/test_xdp_noinline.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               814 tools/testing/selftests/bpf/progs/test_xdp_noinline.c 		return process_packet(data, nh_off, data_end, 0, ctx);
ctx               816 tools/testing/selftests/bpf/progs/test_xdp_noinline.c 		return process_packet(data, nh_off, data_end, 1, ctx);
ctx               106 tools/testing/selftests/bpf/progs/test_xdp_vlan.c int  xdp_prognum0(struct xdp_md *ctx)
ctx               108 tools/testing/selftests/bpf/progs/test_xdp_vlan.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               109 tools/testing/selftests/bpf/progs/test_xdp_vlan.c 	void *data     = (void *)(long)ctx->data;
ctx               148 tools/testing/selftests/bpf/progs/test_xdp_vlan.c int  xdp_prognum1(struct xdp_md *ctx)
ctx               150 tools/testing/selftests/bpf/progs/test_xdp_vlan.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               151 tools/testing/selftests/bpf/progs/test_xdp_vlan.c 	void *data     = (void *)(long)ctx->data;
ctx               182 tools/testing/selftests/bpf/progs/test_xdp_vlan.c int  xdp_prognum2(struct xdp_md *ctx)
ctx               184 tools/testing/selftests/bpf/progs/test_xdp_vlan.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               185 tools/testing/selftests/bpf/progs/test_xdp_vlan.c 	void *data     = (void *)(long)ctx->data;
ctx               207 tools/testing/selftests/bpf/progs/test_xdp_vlan.c 	bpf_xdp_adjust_head(ctx, VLAN_HDR_SZ);
ctx               241 tools/testing/selftests/bpf/progs/test_xdp_vlan.c int  xdp_prognum3(struct xdp_md *ctx)
ctx               243 tools/testing/selftests/bpf/progs/test_xdp_vlan.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               244 tools/testing/selftests/bpf/progs/test_xdp_vlan.c 	void *data     = (void *)(long)ctx->data;
ctx               259 tools/testing/selftests/bpf/progs/test_xdp_vlan.c 	bpf_xdp_adjust_head(ctx, VLAN_HDR_SZ);
ctx               271 tools/testing/selftests/bpf/progs/test_xdp_vlan.c int _tc_progA(struct __sk_buff *ctx)
ctx               273 tools/testing/selftests/bpf/progs/test_xdp_vlan.c 	bpf_skb_vlan_push(ctx, bpf_htons(ETH_P_8021Q), TESTVLAN);
ctx                 8 tools/testing/selftests/bpf/progs/xdp_dummy.c int xdp_dummy_prog(struct xdp_md *ctx)
ctx                59 tools/testing/selftests/bpf/progs/xdping_kern.c static __always_inline int icmp_check(struct xdp_md *ctx, int type)
ctx                61 tools/testing/selftests/bpf/progs/xdping_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx                62 tools/testing/selftests/bpf/progs/xdping_kern.c 	void *data = (void *)(long)ctx->data;
ctx                90 tools/testing/selftests/bpf/progs/xdping_kern.c int xdping_client(struct xdp_md *ctx)
ctx                92 tools/testing/selftests/bpf/progs/xdping_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx                93 tools/testing/selftests/bpf/progs/xdping_kern.c 	void *data = (void *)(long)ctx->data;
ctx               104 tools/testing/selftests/bpf/progs/xdping_kern.c 	ret = icmp_check(ctx, ICMP_ECHOREPLY);
ctx               154 tools/testing/selftests/bpf/progs/xdping_kern.c int xdping_server(struct xdp_md *ctx)
ctx               156 tools/testing/selftests/bpf/progs/xdping_kern.c 	void *data_end = (void *)(long)ctx->data_end;
ctx               157 tools/testing/selftests/bpf/progs/xdping_kern.c 	void *data = (void *)(long)ctx->data;
ctx               164 tools/testing/selftests/bpf/progs/xdping_kern.c 	ret = icmp_check(ctx, ICMP_ECHO);
ctx                18 tools/testing/selftests/bpf/test_btf_dump.c void btf_dump_printf(void *ctx, const char *fmt, va_list args)
ctx                20 tools/testing/selftests/bpf/test_btf_dump.c 	vfprintf(ctx, fmt, args);
ctx                91 tools/testing/selftests/bpf/test_btf_dump.c 	test_case->opts.ctx = f;
ctx                22 tools/testing/selftests/bpf/test_hashmap.c size_t hash_fn(const void *k, void *ctx)
ctx                27 tools/testing/selftests/bpf/test_hashmap.c bool equal_fn(const void *a, const void *b, void *ctx)
ctx               252 tools/testing/selftests/bpf/test_hashmap.c size_t collision_hash_fn(const void *k, void *ctx)
ctx                34 tools/testing/selftests/bpf/test_tcpnotify_user.c static void dummyfn(void *ctx, int cpu, void *data, __u32 size)
ctx                57 tools/testing/selftests/powerpc/alignment/alignment_handler.c void sighandler(int sig, siginfo_t *info, void *ctx)
ctx                59 tools/testing/selftests/powerpc/alignment/alignment_handler.c 	ucontext_t *ucp = ctx;
ctx                20 tools/testing/selftests/powerpc/alignment/copy_first_unaligned.c 	ucontext_t *ctx = ptr;
ctx                22 tools/testing/selftests/powerpc/alignment/copy_first_unaligned.c 	unsigned int *pc = (unsigned int *)ctx->uc_mcontext.gp_regs[PT_NIP];
ctx                24 tools/testing/selftests/powerpc/alignment/copy_first_unaligned.c 	unsigned int *pc = (unsigned int *)ctx->uc_mcontext.uc_regs->gregs[PT_NIP];
ctx               246 tools/testing/selftests/powerpc/utils.c 	ucontext_t *ctx = (ucontext_t *)unused;
ctx               247 tools/testing/selftests/powerpc/utils.c 	unsigned long *pc = &UCONTEXT_NIA(ctx);
ctx                56 tools/testing/selftests/x86/entry_from_vm86.c 	ucontext_t *ctx = (ucontext_t*)ctx_void;
ctx                58 tools/testing/selftests/x86/entry_from_vm86.c 	if (ctx->uc_mcontext.gregs[REG_EFL] & X86_EFLAGS_VM ||
ctx                59 tools/testing/selftests/x86/entry_from_vm86.c 	    (ctx->uc_mcontext.gregs[REG_CS] & 3) != 3) {
ctx                73 tools/testing/selftests/x86/entry_from_vm86.c 	       (unsigned long)ctx->uc_mcontext.gregs[REG_EFL],
ctx                74 tools/testing/selftests/x86/entry_from_vm86.c 	       (unsigned short)ctx->uc_mcontext.gregs[REG_CS]);
ctx                66 tools/testing/selftests/x86/fsgsbase.c 	ucontext_t *ctx = (ucontext_t*)ctx_void;
ctx                76 tools/testing/selftests/x86/fsgsbase.c 	ctx->uc_mcontext.gregs[REG_RIP] += 4;	/* Skip the faulting mov */
ctx               334 tools/testing/selftests/x86/fsgsbase.c static void *threadproc(void *ctx)
ctx               435 tools/testing/selftests/x86/ldt_gdt.c static void *threadproc(void *ctx)
ctx               119 tools/testing/selftests/x86/mov_ss_trap.c 	ucontext_t *ctx = ctx_void;
ctx               122 tools/testing/selftests/x86/mov_ss_trap.c 	       (unsigned long)ctx->uc_mcontext.gregs[REG_IP],
ctx               123 tools/testing/selftests/x86/mov_ss_trap.c 	       !!(ctx->uc_mcontext.gregs[REG_EFL] & X86_EFLAGS_RF));
ctx               128 tools/testing/selftests/x86/mov_ss_trap.c 	ucontext_t *ctx = ctx_void;
ctx               131 tools/testing/selftests/x86/mov_ss_trap.c 	       (unsigned long)ctx->uc_mcontext.gregs[REG_IP]);
ctx               136 tools/testing/selftests/x86/mov_ss_trap.c 	ucontext_t *ctx = ctx_void;
ctx               139 tools/testing/selftests/x86/mov_ss_trap.c 	       (unsigned long)ctx->uc_mcontext.gregs[REG_IP]);
ctx               307 tools/testing/selftests/x86/sigreturn.c static unsigned short *ssptr(ucontext_t *ctx)
ctx               309 tools/testing/selftests/x86/sigreturn.c 	struct selectors *sels = (void *)&ctx->uc_mcontext.gregs[REG_CSGSFS];
ctx               313 tools/testing/selftests/x86/sigreturn.c static unsigned short *csptr(ucontext_t *ctx)
ctx               315 tools/testing/selftests/x86/sigreturn.c 	struct selectors *sels = (void *)&ctx->uc_mcontext.gregs[REG_CSGSFS];
ctx               323 tools/testing/selftests/x86/sigreturn.c static greg_t *ssptr(ucontext_t *ctx)
ctx               325 tools/testing/selftests/x86/sigreturn.c 	return &ctx->uc_mcontext.gregs[REG_SS];
ctx               328 tools/testing/selftests/x86/sigreturn.c static greg_t *csptr(ucontext_t *ctx)
ctx               330 tools/testing/selftests/x86/sigreturn.c 	return &ctx->uc_mcontext.gregs[REG_CS];
ctx               394 tools/testing/selftests/x86/sigreturn.c static void validate_signal_ss(int sig, ucontext_t *ctx)
ctx               397 tools/testing/selftests/x86/sigreturn.c 	bool was_64bit = (cs_bitness(*csptr(ctx)) == 64);
ctx               399 tools/testing/selftests/x86/sigreturn.c 	if (!(ctx->uc_flags & UC_SIGCONTEXT_SS)) {
ctx               411 tools/testing/selftests/x86/sigreturn.c 	if (!!(ctx->uc_flags & UC_STRICT_RESTORE_SS) != was_64bit) {
ctx               417 tools/testing/selftests/x86/sigreturn.c 	if (is_valid_ss(*ssptr(ctx))) {
ctx               425 tools/testing/selftests/x86/sigreturn.c 		if (hw_ss != *ssptr(ctx)) {
ctx               440 tools/testing/selftests/x86/sigreturn.c 	ucontext_t *ctx = (ucontext_t*)ctx_void;
ctx               442 tools/testing/selftests/x86/sigreturn.c 	validate_signal_ss(sig, ctx);
ctx               444 tools/testing/selftests/x86/sigreturn.c 	memcpy(&initial_regs, &ctx->uc_mcontext.gregs, sizeof(gregset_t));
ctx               446 tools/testing/selftests/x86/sigreturn.c 	*csptr(ctx) = sig_cs;
ctx               447 tools/testing/selftests/x86/sigreturn.c 	*ssptr(ctx) = sig_ss;
ctx               449 tools/testing/selftests/x86/sigreturn.c 	ctx->uc_mcontext.gregs[REG_IP] =
ctx               451 tools/testing/selftests/x86/sigreturn.c 	ctx->uc_mcontext.gregs[REG_SP] = (unsigned long)0x8badf00d5aadc0deULL;
ctx               452 tools/testing/selftests/x86/sigreturn.c 	ctx->uc_mcontext.gregs[REG_CX] = 0;
ctx               463 tools/testing/selftests/x86/sigreturn.c 	ctx->uc_mcontext.gregs[REG_DS] = 0;
ctx               464 tools/testing/selftests/x86/sigreturn.c 	ctx->uc_mcontext.gregs[REG_ES] = 0;
ctx               467 tools/testing/selftests/x86/sigreturn.c 	memcpy(&requested_regs, &ctx->uc_mcontext.gregs, sizeof(gregset_t));
ctx               468 tools/testing/selftests/x86/sigreturn.c 	requested_regs[REG_CX] = *ssptr(ctx);	/* The asm code does this. */
ctx               480 tools/testing/selftests/x86/sigreturn.c 	ucontext_t *ctx = (ucontext_t*)ctx_void;
ctx               482 tools/testing/selftests/x86/sigreturn.c 	validate_signal_ss(sig, ctx);
ctx               484 tools/testing/selftests/x86/sigreturn.c 	sig_err = ctx->uc_mcontext.gregs[REG_ERR];
ctx               485 tools/testing/selftests/x86/sigreturn.c 	sig_trapno = ctx->uc_mcontext.gregs[REG_TRAPNO];
ctx               490 tools/testing/selftests/x86/sigreturn.c 	greg_t asm_ss = ctx->uc_mcontext.gregs[REG_CX];
ctx               494 tools/testing/selftests/x86/sigreturn.c 		       ss, *ssptr(ctx), (unsigned long long)asm_ss);
ctx               498 tools/testing/selftests/x86/sigreturn.c 	memcpy(&resulting_regs, &ctx->uc_mcontext.gregs, sizeof(gregset_t));
ctx               499 tools/testing/selftests/x86/sigreturn.c 	memcpy(&ctx->uc_mcontext.gregs, &initial_regs, sizeof(gregset_t));
ctx               503 tools/testing/selftests/x86/sigreturn.c 		if (ctx->uc_flags & UC_STRICT_RESTORE_SS) {
ctx               513 tools/testing/selftests/x86/sigreturn.c 			*ssptr(ctx) = 0;
ctx               525 tools/testing/selftests/x86/sigreturn.c 	ucontext_t *ctx = (ucontext_t*)ctx_void;
ctx               527 tools/testing/selftests/x86/sigreturn.c 	if (!(ctx->uc_flags & UC_STRICT_RESTORE_SS)) {
ctx               533 tools/testing/selftests/x86/sigreturn.c 	ctx->uc_flags &= ~UC_STRICT_RESTORE_SS;
ctx               534 tools/testing/selftests/x86/sigreturn.c 	*ssptr(ctx) = 0;
ctx                75 tools/testing/selftests/x86/single_step_syscall.c 	ucontext_t *ctx = (ucontext_t*)ctx_void;
ctx                89 tools/testing/selftests/x86/single_step_syscall.c 		       (unsigned long)ctx->uc_mcontext.gregs[REG_IP]);
ctx                69 tools/testing/selftests/x86/syscall_arg_fault.c 	ucontext_t *ctx = (ucontext_t*)ctx_void;
ctx                70 tools/testing/selftests/x86/syscall_arg_fault.c 	long ax = (long)ctx->uc_mcontext.gregs[REG_AX];
ctx                93 tools/testing/selftests/x86/syscall_arg_fault.c 	ucontext_t *ctx = (ucontext_t*)ctx_void;
ctx                94 tools/testing/selftests/x86/syscall_arg_fault.c 	unsigned short *ip = (unsigned short *)ctx->uc_mcontext.gregs[REG_IP];
ctx               110 tools/testing/selftests/x86/syscall_arg_fault.c 	ucontext_t *ctx = (ucontext_t*)ctx_void;
ctx               111 tools/testing/selftests/x86/syscall_arg_fault.c 	unsigned short *ip = (unsigned short *)ctx->uc_mcontext.gregs[REG_IP];
ctx                71 tools/testing/selftests/x86/sysret_rip.c 	ucontext_t *ctx = (ucontext_t*)ctx_void;
ctx                73 tools/testing/selftests/x86/sysret_rip.c 	if (rip != ctx->uc_mcontext.gregs[REG_RIP]) {
ctx                75 tools/testing/selftests/x86/sysret_rip.c 		       rip, (unsigned long)ctx->uc_mcontext.gregs[REG_RIP]);
ctx                80 tools/testing/selftests/x86/sysret_rip.c 	memcpy(&ctx->uc_mcontext.gregs, &initial_regs, sizeof(gregset_t));
ctx                87 tools/testing/selftests/x86/sysret_rip.c 	ucontext_t *ctx = (ucontext_t*)ctx_void;
ctx                89 tools/testing/selftests/x86/sysret_rip.c 	memcpy(&initial_regs, &ctx->uc_mcontext.gregs, sizeof(gregset_t));
ctx                92 tools/testing/selftests/x86/sysret_rip.c 	ctx->uc_mcontext.gregs[REG_RIP] = rip;
ctx                93 tools/testing/selftests/x86/sysret_rip.c 	ctx->uc_mcontext.gregs[REG_RCX] = rip;
ctx                96 tools/testing/selftests/x86/sysret_rip.c 	assert(ctx->uc_mcontext.gregs[REG_EFL] ==
ctx                97 tools/testing/selftests/x86/sysret_rip.c 	       ctx->uc_mcontext.gregs[REG_R11]);
ctx               115 tools/testing/selftests/x86/sysret_rip.c 	ucontext_t *ctx = (ucontext_t*)ctx_void;
ctx               117 tools/testing/selftests/x86/sysret_rip.c 	if (rip != ctx->uc_mcontext.gregs[REG_RIP]) {
ctx               119 tools/testing/selftests/x86/sysret_rip.c 		       rip, (unsigned long)ctx->uc_mcontext.gregs[REG_RIP]);
ctx                23 tools/testing/selftests/x86/sysret_ss_attrs.c static void *threadproc(void *ctx)
ctx               185 tools/testing/selftests/x86/test_vsyscall.c 	ucontext_t *ctx = (ucontext_t *)ctx_void;
ctx               187 tools/testing/selftests/x86/test_vsyscall.c 	segv_err =  ctx->uc_mcontext.gregs[REG_ERR];
ctx               513 tools/testing/selftests/x86/test_vsyscall.c 	ucontext_t *ctx = (ucontext_t *)ctx_void;
ctx               514 tools/testing/selftests/x86/test_vsyscall.c 	unsigned long ip = ctx->uc_mcontext.gregs[REG_RIP];
ctx                87 tools/testing/selftests/x86/unwind_vdso.c _Unwind_Reason_Code trace_fn(struct _Unwind_Context * ctx, void *opaque)
ctx                90 tools/testing/selftests/x86/unwind_vdso.c 	unsigned long ip = _Unwind_GetIP(ctx);
ctx               102 tools/testing/selftests/x86/unwind_vdso.c 		unsigned long eax = _Unwind_GetGR(ctx, 0);
ctx               103 tools/testing/selftests/x86/unwind_vdso.c 		unsigned long ecx = _Unwind_GetGR(ctx, 1);
ctx               104 tools/testing/selftests/x86/unwind_vdso.c 		unsigned long edx = _Unwind_GetGR(ctx, 2);
ctx               105 tools/testing/selftests/x86/unwind_vdso.c 		unsigned long ebx = _Unwind_GetGR(ctx, 3);
ctx               106 tools/testing/selftests/x86/unwind_vdso.c 		unsigned long ebp = _Unwind_GetGR(ctx, 5);
ctx               107 tools/testing/selftests/x86/unwind_vdso.c 		unsigned long esi = _Unwind_GetGR(ctx, 6);
ctx               108 tools/testing/selftests/x86/unwind_vdso.c 		unsigned long edi = _Unwind_GetGR(ctx, 7);
ctx               128 tools/testing/selftests/x86/unwind_vdso.c 	ucontext_t *ctx = (ucontext_t *)ctx_void;
ctx               130 tools/testing/selftests/x86/unwind_vdso.c 	unsigned long ip = ctx->uc_mcontext.gregs[REG_EIP];
ctx               136 tools/testing/selftests/x86/unwind_vdso.c 		return_address = *(unsigned long *)(unsigned long)ctx->uc_mcontext.gregs[REG_ESP];
ctx               146 tools/testing/selftests/x86/unwind_vdso.c 		ctx->uc_mcontext.gregs[REG_EFL] &= ~X86_EFLAGS_TF;
ctx               242 tools/usb/ffs-aio-example/multibuff/device_app/aio_multibuff.c 	io_context_t ctx;
ctx               286 tools/usb/ffs-aio-example/multibuff/device_app/aio_multibuff.c 	memset(&ctx, 0, sizeof(ctx));
ctx               288 tools/usb/ffs-aio-example/multibuff/device_app/aio_multibuff.c 	if (io_setup(AIO_MAX, &ctx) < 0) {
ctx               340 tools/usb/ffs-aio-example/multibuff/device_app/aio_multibuff.c 			ret = io_submit(ctx, iobuf[i].cnt, iobuf[i].iocb);
ctx               361 tools/usb/ffs-aio-example/multibuff/device_app/aio_multibuff.c 		ret = io_getevents(ctx, 1, BUFS_MAX, e, NULL);
ctx               374 tools/usb/ffs-aio-example/multibuff/device_app/aio_multibuff.c 	io_destroy(ctx);
ctx                50 tools/usb/ffs-aio-example/multibuff/host_app/test.c 	libusb_context *ctx;
ctx                66 tools/usb/ffs-aio-example/multibuff/host_app/test.c 	state->ctx = NULL;
ctx                70 tools/usb/ffs-aio-example/multibuff/host_app/test.c 	ret = libusb_init(&state->ctx);
ctx                76 tools/usb/ffs-aio-example/multibuff/host_app/test.c 	cnt = libusb_get_device_list(state->ctx, &list);
ctx               137 tools/usb/ffs-aio-example/multibuff/host_app/test.c 	libusb_exit(state->ctx);
ctx               151 tools/usb/ffs-aio-example/multibuff/host_app/test.c 	libusb_exit(state->ctx);
ctx               212 tools/usb/ffs-aio-example/simple/device_app/aio_simple.c 	io_context_t ctx;
ctx               260 tools/usb/ffs-aio-example/simple/device_app/aio_simple.c 	memset(&ctx, 0, sizeof(ctx));
ctx               262 tools/usb/ffs-aio-example/simple/device_app/aio_simple.c 	if (io_setup(2, &ctx) < 0) {
ctx               311 tools/usb/ffs-aio-example/simple/device_app/aio_simple.c 			ret = io_getevents(ctx, 1, 2, e, NULL);
ctx               331 tools/usb/ffs-aio-example/simple/device_app/aio_simple.c 			ret = io_submit(ctx, 1, &iocb_in);
ctx               345 tools/usb/ffs-aio-example/simple/device_app/aio_simple.c 			ret = io_submit(ctx, 1, &iocb_out);
ctx               356 tools/usb/ffs-aio-example/simple/device_app/aio_simple.c 	io_destroy(ctx);
ctx                50 tools/usb/ffs-aio-example/simple/host_app/test.c 	libusb_context *ctx;
ctx                66 tools/usb/ffs-aio-example/simple/host_app/test.c 	state->ctx = NULL;
ctx                70 tools/usb/ffs-aio-example/simple/host_app/test.c 	ret = libusb_init(&state->ctx);
ctx                76 tools/usb/ffs-aio-example/simple/host_app/test.c 	cnt = libusb_get_device_list(state->ctx, &list);
ctx               137 tools/usb/ffs-aio-example/simple/host_app/test.c 	libusb_exit(state->ctx);
ctx               151 tools/usb/ffs-aio-example/simple/host_app/test.c 	libusb_exit(state->ctx);
ctx                61 tools/virtio/linux/virtio.h 				      bool ctx,
ctx                94 virt/kvm/arm/arch_timer.c 	struct arch_timer_context *ctx;
ctx               109 virt/kvm/arm/arch_timer.c 		ctx = map.direct_vtimer;
ctx               111 virt/kvm/arm/arch_timer.c 		ctx = map.direct_ptimer;
ctx               113 virt/kvm/arm/arch_timer.c 	if (kvm_timer_should_fire(ctx))
ctx               114 virt/kvm/arm/arch_timer.c 		kvm_timer_update_irq(vcpu, true, ctx);
ctx               161 virt/kvm/arm/arch_timer.c 		struct arch_timer_context *ctx = &vcpu->arch.timer_cpu.timers[i];
ctx               163 virt/kvm/arm/arch_timer.c 		WARN(ctx->loaded, "timer %d loaded\n", i);
ctx               164 virt/kvm/arm/arch_timer.c 		if (kvm_timer_irq_can_fire(ctx))
ctx               165 virt/kvm/arm/arch_timer.c 			min_delta = min(min_delta, kvm_timer_compute_delta(ctx));
ctx               201 virt/kvm/arm/arch_timer.c 	struct arch_timer_context *ctx;
ctx               205 virt/kvm/arm/arch_timer.c 	ctx = container_of(hrt, struct arch_timer_context, hrtimer);
ctx               206 virt/kvm/arm/arch_timer.c 	vcpu = ctx->vcpu;
ctx               208 virt/kvm/arm/arch_timer.c 	trace_kvm_timer_hrtimer_expire(ctx);
ctx               215 virt/kvm/arm/arch_timer.c 	ns = kvm_timer_compute_delta(ctx);
ctx               221 virt/kvm/arm/arch_timer.c 	kvm_timer_update_irq(vcpu, true, ctx);
ctx               313 virt/kvm/arm/arch_timer.c static void timer_emulate(struct arch_timer_context *ctx)
ctx               315 virt/kvm/arm/arch_timer.c 	bool should_fire = kvm_timer_should_fire(ctx);
ctx               317 virt/kvm/arm/arch_timer.c 	trace_kvm_timer_emulate(ctx, should_fire);
ctx               319 virt/kvm/arm/arch_timer.c 	if (should_fire != ctx->irq.level) {
ctx               320 virt/kvm/arm/arch_timer.c 		kvm_timer_update_irq(ctx->vcpu, should_fire, ctx);
ctx               329 virt/kvm/arm/arch_timer.c 	if (!kvm_timer_irq_can_fire(ctx)) {
ctx               330 virt/kvm/arm/arch_timer.c 		soft_timer_cancel(&ctx->hrtimer);
ctx               334 virt/kvm/arm/arch_timer.c 	soft_timer_start(&ctx->hrtimer, kvm_timer_compute_delta(ctx));
ctx               337 virt/kvm/arm/arch_timer.c static void timer_save_state(struct arch_timer_context *ctx)
ctx               339 virt/kvm/arm/arch_timer.c 	struct arch_timer_cpu *timer = vcpu_timer(ctx->vcpu);
ctx               340 virt/kvm/arm/arch_timer.c 	enum kvm_arch_timers index = arch_timer_ctx_index(ctx);
ctx               348 virt/kvm/arm/arch_timer.c 	if (!ctx->loaded)
ctx               353 virt/kvm/arm/arch_timer.c 		ctx->cnt_ctl = read_sysreg_el0(SYS_CNTV_CTL);
ctx               354 virt/kvm/arm/arch_timer.c 		ctx->cnt_cval = read_sysreg_el0(SYS_CNTV_CVAL);
ctx               362 virt/kvm/arm/arch_timer.c 		ctx->cnt_ctl = read_sysreg_el0(SYS_CNTP_CTL);
ctx               363 virt/kvm/arm/arch_timer.c 		ctx->cnt_cval = read_sysreg_el0(SYS_CNTP_CVAL);
ctx               374 virt/kvm/arm/arch_timer.c 	trace_kvm_timer_save_state(ctx);
ctx               376 virt/kvm/arm/arch_timer.c 	ctx->loaded = false;
ctx               416 virt/kvm/arm/arch_timer.c static void timer_restore_state(struct arch_timer_context *ctx)
ctx               418 virt/kvm/arm/arch_timer.c 	struct arch_timer_cpu *timer = vcpu_timer(ctx->vcpu);
ctx               419 virt/kvm/arm/arch_timer.c 	enum kvm_arch_timers index = arch_timer_ctx_index(ctx);
ctx               427 virt/kvm/arm/arch_timer.c 	if (ctx->loaded)
ctx               432 virt/kvm/arm/arch_timer.c 		write_sysreg_el0(ctx->cnt_cval, SYS_CNTV_CVAL);
ctx               434 virt/kvm/arm/arch_timer.c 		write_sysreg_el0(ctx->cnt_ctl, SYS_CNTV_CTL);
ctx               437 virt/kvm/arm/arch_timer.c 		write_sysreg_el0(ctx->cnt_cval, SYS_CNTP_CVAL);
ctx               439 virt/kvm/arm/arch_timer.c 		write_sysreg_el0(ctx->cnt_ctl, SYS_CNTP_CTL);
ctx               445 virt/kvm/arm/arch_timer.c 	trace_kvm_timer_restore_state(ctx);
ctx               447 virt/kvm/arm/arch_timer.c 	ctx->loaded = true;
ctx               467 virt/kvm/arm/arch_timer.c static inline void set_timer_irq_phys_active(struct arch_timer_context *ctx, bool active)
ctx               470 virt/kvm/arm/arch_timer.c 	r = irq_set_irqchip_state(ctx->host_timer_irq, IRQCHIP_STATE_ACTIVE, active);
ctx               474 virt/kvm/arm/arch_timer.c static void kvm_timer_vcpu_load_gic(struct arch_timer_context *ctx)
ctx               476 virt/kvm/arm/arch_timer.c 	struct kvm_vcpu *vcpu = ctx->vcpu;
ctx               485 virt/kvm/arm/arch_timer.c 	kvm_timer_update_irq(ctx->vcpu, kvm_timer_should_fire(ctx), ctx);
ctx               488 virt/kvm/arm/arch_timer.c 		phys_active = kvm_vgic_map_is_active(vcpu, ctx->irq.irq);
ctx               490 virt/kvm/arm/arch_timer.c 	phys_active |= ctx->irq.level;
ctx               492 virt/kvm/arm/arch_timer.c 	set_timer_irq_phys_active(ctx, phys_active);
ctx               294 virt/kvm/arm/trace.h 	TP_PROTO(struct arch_timer_context *ctx),
ctx               295 virt/kvm/arm/trace.h 	TP_ARGS(ctx),
ctx               304 virt/kvm/arm/trace.h 		__entry->ctl			= ctx->cnt_ctl;
ctx               305 virt/kvm/arm/trace.h 		__entry->cval			= ctx->cnt_cval;
ctx               306 virt/kvm/arm/trace.h 		__entry->timer_idx		= arch_timer_ctx_index(ctx);
ctx               316 virt/kvm/arm/trace.h 	TP_PROTO(struct arch_timer_context *ctx),
ctx               317 virt/kvm/arm/trace.h 	TP_ARGS(ctx),
ctx               326 virt/kvm/arm/trace.h 		__entry->ctl			= ctx->cnt_ctl;
ctx               327 virt/kvm/arm/trace.h 		__entry->cval			= ctx->cnt_cval;
ctx               328 virt/kvm/arm/trace.h 		__entry->timer_idx		= arch_timer_ctx_index(ctx);
ctx               338 virt/kvm/arm/trace.h 	TP_PROTO(struct arch_timer_context *ctx),
ctx               339 virt/kvm/arm/trace.h 	TP_ARGS(ctx),
ctx               346 virt/kvm/arm/trace.h 		__entry->timer_idx		= arch_timer_ctx_index(ctx);
ctx               353 virt/kvm/arm/trace.h 	TP_PROTO(struct arch_timer_context *ctx, bool should_fire),
ctx               354 virt/kvm/arm/trace.h 	TP_ARGS(ctx, should_fire),
ctx               362 virt/kvm/arm/trace.h 		__entry->timer_idx		= arch_timer_ctx_index(ctx);