context 59 arch/alpha/include/asm/cacheflush.h mm->context[smp_processor_id()] = 0; context 149 arch/alpha/include/asm/mmu_context.h mmc = next_mm->context[cpu]; context 152 arch/alpha/include/asm/mmu_context.h next_mm->context[cpu] = mmc; context 196 arch/alpha/include/asm/mmu_context.h if (!mm->context[cpu]) \ context 238 arch/alpha/include/asm/mmu_context.h mm->context[i] = 0; context 92 arch/alpha/include/asm/tlbflush.h unsigned long *mmc = &mm->context[smp_processor_id()]; context 659 arch/alpha/kernel/smp.c if (mm->context[cpu]) context 660 arch/alpha/kernel/smp.c mm->context[cpu] = 0; context 706 arch/alpha/kernel/smp.c if (mm->context[cpu]) context 707 arch/alpha/kernel/smp.c mm->context[cpu] = 0; context 760 arch/alpha/kernel/smp.c if (mm->context[cpu]) context 761 arch/alpha/kernel/smp.c mm->context[cpu] = 0; context 47 arch/alpha/mm/fault.c next_mm->context[smp_processor_id()] = mmc; context 221 arch/arc/include/asm/entry-arcv2.h ; INPUT: r0 has STAT32 of calling context context 240 arch/arc/include/asm/entry-arcv2.h ; INPUT: r0 has STAT32 of calling context context 48 arch/arc/include/asm/mmu_context.h #define asid_mm(mm, cpu) mm->context.asid[cpu] context 40 arch/arc/kernel/asm-offsets.c DEFINE(MM_CTXT, offsetof(struct mm_struct, context)); context 144 arch/arm/include/asm/elf.h (elf_addr_t)current->mm->context.vdso); \ context 27 arch/arm/include/asm/mmu.h #define ASID(mm) ((unsigned int)((mm)->context.id.counter & ~ASID_MASK)) context 32 arch/arm/include/asm/mmu_context.h atomic64_set(&mm->context.id, 0); context 53 arch/arm/include/asm/mmu_context.h if (unlikely(mm->context.vmalloc_seq != init_mm.context.vmalloc_seq)) context 64 arch/arm/include/asm/mmu_context.h mm->context.switch_pending = 1; context 76 arch/arm/include/asm/mmu_context.h if (mm && mm->context.switch_pending) { context 84 arch/arm/include/asm/mmu_context.h if (mm->context.switch_pending) { context 85 arch/arm/include/asm/mmu_context.h mm->context.switch_pending = 0; context 122 arch/arm/kernel/asm-offsets.c DEFINE(MM_CONTEXT_ID, offsetof(struct mm_struct, context.id.counter)); context 405 arch/arm/kernel/process.c current->mm->context.sigpage = new_vma->vm_start; context 450 arch/arm/kernel/process.c mm->context.sigpage = addr; context 175 arch/arm/kernel/signal.c struct sigcontext context; context 184 arch/arm/kernel/signal.c err |= __copy_from_user(&context, &sf->uc.uc_mcontext, sizeof(context)); context 186 arch/arm/kernel/signal.c regs->ARM_r0 = context.arm_r0; context 187 arch/arm/kernel/signal.c regs->ARM_r1 = context.arm_r1; context 188 arch/arm/kernel/signal.c regs->ARM_r2 = context.arm_r2; context 189 arch/arm/kernel/signal.c regs->ARM_r3 = context.arm_r3; context 190 arch/arm/kernel/signal.c regs->ARM_r4 = context.arm_r4; context 191 arch/arm/kernel/signal.c regs->ARM_r5 = context.arm_r5; context 192 arch/arm/kernel/signal.c regs->ARM_r6 = context.arm_r6; context 193 arch/arm/kernel/signal.c regs->ARM_r7 = context.arm_r7; context 194 arch/arm/kernel/signal.c regs->ARM_r8 = context.arm_r8; context 195 arch/arm/kernel/signal.c regs->ARM_r9 = context.arm_r9; context 196 arch/arm/kernel/signal.c regs->ARM_r10 = context.arm_r10; context 197 arch/arm/kernel/signal.c regs->ARM_fp = context.arm_fp; context 198 arch/arm/kernel/signal.c regs->ARM_ip = context.arm_ip; context 199 arch/arm/kernel/signal.c regs->ARM_sp = context.arm_sp; context 200 arch/arm/kernel/signal.c regs->ARM_lr = context.arm_lr; context 201 arch/arm/kernel/signal.c regs->ARM_pc = context.arm_pc; context 202 arch/arm/kernel/signal.c regs->ARM_cpsr = context.arm_cpsr; context 291 arch/arm/kernel/signal.c struct sigcontext context; context 294 arch/arm/kernel/signal.c context = (struct sigcontext) { context 319 arch/arm/kernel/signal.c err |= __copy_to_user(&sf->uc.uc_mcontext, &context, sizeof(context)); context 460 arch/arm/kernel/signal.c retcode = mm->context.sigpage + signal_return_offset + context 60 arch/arm/kernel/vdso.c current->mm->context.vdso = new_vma->vm_start; context 247 arch/arm/kernel/vdso.c mm->context.vdso = 0; context 264 arch/arm/kernel/vdso.c mm->context.vdso = addr; context 458 arch/arm/mach-davinci/board-da830-evm.c int gpio, unsigned ngpio, void *context) context 473 arch/arm/mach-davinci/board-da830-evm.c unsigned ngpio, void *context) context 315 arch/arm/mach-omap1/board-h2.c static int tps_setup(struct i2c_client *client, void *context) context 192 arch/arm/mach-omap1/board-osk.c static int osk_tps_setup(struct i2c_client *client, void *context) context 141 arch/arm/mach-omap2/clockdomain.h u32 context; context 360 arch/arm/mach-omap2/cm33xx.c clkdm->context = am33xx_cm_read_reg_bits(clkdm->cm_inst, context 375 arch/arm/mach-omap2/cm33xx.c switch (clkdm->context) { context 489 arch/arm/mach-omap2/cminst44xx.c clkdm->context = omap4_cminst_read_inst_reg(clkdm->prcm_partition, context 493 arch/arm/mach-omap2/cminst44xx.c clkdm->context &= OMAP4430_MODULEMODE_MASK; context 505 arch/arm/mach-omap2/cminst44xx.c switch (clkdm->context) { context 144 arch/arm/mach-omap2/powerdomain.h u32 context; context 347 arch/arm/mach-omap2/prm33xx.c pwrdm->context = am33xx_prm_read_reg(pwrdm->prcm_offs, context 353 arch/arm/mach-omap2/prm33xx.c pwrdm->context &= ~AM33XX_LOWPOWERSTATECHANGE_MASK; context 363 arch/arm/mach-omap2/prm33xx.c am33xx_prm_write_reg(pwrdm->context, pwrdm->prcm_offs, context 368 arch/arm/mach-omap2/prm33xx.c ctrl = OMAP_POWERSTATEST_MASK & pwrdm->context; context 685 arch/arm/mach-omap2/prm44xx.c pwrdm->context = omap4_prminst_read_inst_reg(pwrdm->prcm_partition, context 693 arch/arm/mach-omap2/prm44xx.c pwrdm->context &= ~OMAP4430_LOWPOWERSTATECHANGE_MASK; context 711 arch/arm/mach-omap2/prm44xx.c omap4_prminst_write_inst_reg(pwrdm->context, context 718 arch/arm/mach-omap2/prm44xx.c ctrl = OMAP_POWERSTATEST_MASK & pwrdm->context; context 108 arch/arm/mach-prima2/rtciobrg.c static int regmap_iobg_regwrite(void *context, unsigned int reg, context 115 arch/arm/mach-prima2/rtciobrg.c static int regmap_iobg_regread(void *context, unsigned int reg, context 544 arch/arm/mach-pxa/balloon3.c .context = NULL, context 789 arch/arm/mach-pxa/stargate2.c .context = NULL, context 741 arch/arm/mach-pxa/zeus.c unsigned ngpio, void *context) context 312 arch/arm/mach-s3c24xx/mach-osiris.c static int osiris_tps_setup(struct i2c_client *client, void *context) context 318 arch/arm/mach-s3c24xx/mach-osiris.c static int osiris_tps_remove(struct i2c_client *client, void *context) context 59 arch/arm/mm/context.c context_id = mm->context.id.counter; context 192 arch/arm/mm/context.c u64 asid = atomic64_read(&mm->context.id); context 243 arch/arm/mm/context.c if (unlikely(mm->context.vmalloc_seq != init_mm.context.vmalloc_seq)) context 253 arch/arm/mm/context.c asid = atomic64_read(&mm->context.id); context 260 arch/arm/mm/context.c asid = atomic64_read(&mm->context.id); context 263 arch/arm/mm/context.c atomic64_set(&mm->context.id, asid); context 121 arch/arm/mm/ioremap.c seq = init_mm.context.vmalloc_seq; context 126 arch/arm/mm/ioremap.c mm->context.vmalloc_seq = seq; context 127 arch/arm/mm/ioremap.c } while (seq != init_mm.context.vmalloc_seq); context 164 arch/arm/mm/ioremap.c init_mm.context.vmalloc_seq++; context 181 arch/arm/mm/ioremap.c if (current->active_mm->context.vmalloc_seq != init_mm.context.vmalloc_seq) context 146 arch/arm64/include/asm/elf.h (elf_addr_t)current->mm->context.vdso); \ context 215 arch/arm64/include/asm/elf.h (Elf64_Off)current->mm->context.vdso); \ context 30 arch/arm64/include/asm/mmu.h #define ASID(mm) ((mm)->context.id.counter & 0xffff) context 178 arch/arm64/include/asm/mmu_context.h #define init_new_context(tsk,mm) ({ atomic64_set(&(mm)->context.id, 0); 0; }) context 74 arch/arm64/kernel/asm-offsets.c DEFINE(MM_CONTEXT_ID, offsetof(struct mm_struct, context.id.counter)); context 40 arch/arm64/kernel/probes/uprobes.c if (mm->context.flags & MMCF_AARCH32) context 572 arch/arm64/kernel/process.c current->mm->context.flags = is_compat_task() ? MMCF_AARCH32 : 0; context 736 arch/arm64/kernel/signal.c sigtramp = VDSO_SYMBOL(current->mm->context.vdso, sigtramp); context 346 arch/arm64/kernel/signal32.c void *vdso_base = current->mm->context.vdso; context 374 arch/arm64/kernel/signal32.c retcode = (unsigned long)current->mm->context.vdso + context 95 arch/arm64/kernel/vdso.c current->mm->context.vdso = (void *)new_vma->vm_start; context 164 arch/arm64/kernel/vdso.c mm->context.vdso = (void *)vdso_base; context 175 arch/arm64/kernel/vdso.c mm->context.vdso = NULL; context 337 arch/arm64/kernel/vdso.c mm->context.vdso = (void *)addr; context 137 arch/arm64/mm/context.c u64 asid = atomic64_read(&mm->context.id); context 191 arch/arm64/mm/context.c asid = atomic64_read(&mm->context.id); context 216 arch/arm64/mm/context.c asid = atomic64_read(&mm->context.id); context 219 arch/arm64/mm/context.c atomic64_set(&mm->context.id, asid); context 24 arch/csky/include/asm/mmu_context.h #define cpu_asid(mm) (atomic64_read(&mm->context.asid) & ASID_MASK) context 26 arch/csky/include/asm/mmu_context.h #define init_new_context(tsk,mm) ({ atomic64_set(&(mm)->context.asid, 0); 0; }) context 45 arch/csky/include/asm/mmu_context.h write_mmu_entryhi(next->context.asid.counter); context 137 arch/csky/kernel/signal.c struct csky_vdso *vdso = current->mm->context.vdso; context 70 arch/csky/kernel/vdso.c mm->context.vdso = (void *)addr; context 82 arch/csky/kernel/vdso.c if (vma->vm_start == (long)vma->vm_mm->context.vdso) context 21 arch/csky/mm/context.c asid_check_context(&asid_info, &mm->context.asid, cpu, mm); context 64 arch/hexagon/include/asm/mmu_context.h if (next->context.generation < prev->context.generation) { context 68 arch/hexagon/include/asm/mmu_context.h next->context.generation = prev->context.generation; context 71 arch/hexagon/include/asm/mmu_context.h __vmnewmap((void *)next->context.ptbase); context 36 arch/hexagon/include/asm/pgalloc.h mm->context.generation = kmap_generation; context 39 arch/hexagon/include/asm/pgalloc.h mm->context.ptbase = __pa(pgd); context 78 arch/hexagon/include/asm/pgalloc.h mm->context.generation = kmap_generation; context 79 arch/hexagon/include/asm/pgalloc.h current->active_mm->context.generation = kmap_generation; context 101 arch/hexagon/kernel/signal.c struct hexagon_vdso *vdso = current->mm->context.vdso; context 76 arch/hexagon/kernel/vdso.c mm->context.vdso = (void *)vdso_base; context 85 arch/hexagon/kernel/vdso.c if (vma->vm_mm && vma->vm_start == (long)vma->vm_mm->context.vdso) context 71 arch/hexagon/mm/init.c init_mm.context.ptbase = __pa(init_mm.pgd); context 30 arch/hexagon/mm/vm_tlb.c if (mm->context.ptbase == current->active_mm->context.ptbase) context 59 arch/hexagon/mm/vm_tlb.c if (current->active_mm->context.ptbase == mm->context.ptbase) context 70 arch/hexagon/mm/vm_tlb.c if (mm->context.ptbase == current->active_mm->context.ptbase) context 116 arch/ia64/hp/common/aml_nfw.c struct ia64_nfw_context *context = handler_context; context 117 arch/ia64/hp/common/aml_nfw.c u8 *offset = (u8 *) context + address; context 129 arch/ia64/hp/common/aml_nfw.c aml_nfw_execute(context); context 108 arch/ia64/include/asm/mca.h unsigned long context; /* 0 if return to same context context 83 arch/ia64/include/asm/mmu_context.h nv_mm_context_t context = mm->context; context 85 arch/ia64/include/asm/mmu_context.h if (likely(context)) context 90 arch/ia64/include/asm/mmu_context.h context = mm->context; context 91 arch/ia64/include/asm/mmu_context.h if (context == 0) { context 101 arch/ia64/include/asm/mmu_context.h mm->context = context = ia64_ctx.next++; context 102 arch/ia64/include/asm/mmu_context.h __set_bit(context, ia64_ctx.bitmap); context 112 arch/ia64/include/asm/mmu_context.h return context; context 122 arch/ia64/include/asm/mmu_context.h mm->context = 0; context 133 arch/ia64/include/asm/mmu_context.h reload_context (nv_mm_context_t context) context 140 arch/ia64/include/asm/mmu_context.h rid = context << 3; /* make space for encoding the region number */ context 167 arch/ia64/include/asm/mmu_context.h nv_mm_context_t context; context 170 arch/ia64/include/asm/mmu_context.h context = get_mmu_context(mm); context 173 arch/ia64/include/asm/mmu_context.h reload_context(context); context 178 arch/ia64/include/asm/mmu_context.h } while (unlikely(context != mm->context)); context 82 arch/ia64/include/asm/tlbflush.h set_bit(mm->context, ia64_ctx.flushmap); context 83 arch/ia64/include/asm/tlbflush.h mm->context = 0; context 109 arch/ia64/include/asm/tlbflush.h vma->vm_mm->context = 0; context 828 arch/ia64/kernel/acpi.c void *context, void **ret) context 242 arch/ia64/kernel/asm-offsets.c offsetof (struct ia64_sal_os_state, context)); context 184 arch/ia64/kernel/salinfo.c salinfo_platform_oemdata_cpu(void *context) context 186 arch/ia64/kernel/salinfo.c struct salinfo_platform_oemdata_parms *parms = context; context 383 arch/ia64/kernel/salinfo.c salinfo_log_read_cpu(void *context) context 385 arch/ia64/kernel/salinfo.c struct salinfo_data *data = context; context 454 arch/ia64/kernel/salinfo.c salinfo_log_clear_cpu(void *context) context 456 arch/ia64/kernel/salinfo.c struct salinfo_data *data = context; context 854 arch/ia64/kernel/unwind.c desc_abi (unsigned char abi, unsigned char context, struct unw_state_record *sr) context 856 arch/ia64/kernel/unwind.c if (abi == 3 && context == 'i') { context 862 arch/ia64/kernel/unwind.c __func__, abi, context); context 321 arch/ia64/mm/tlb.c mm->context = 0; context 36 arch/m68k/include/asm/mmu_context.h if (mm->context != NO_CONTEXT) context 49 arch/m68k/include/asm/mmu_context.h mm->context = ctx; context 56 arch/m68k/include/asm/mmu_context.h #define init_new_context(tsk, mm) (((mm)->context = NO_CONTEXT), 0) context 63 arch/m68k/include/asm/mmu_context.h if (mm->context != NO_CONTEXT) { context 64 arch/m68k/include/asm/mmu_context.h clear_bit(mm->context, context_map); context 65 arch/m68k/include/asm/mmu_context.h mm->context = NO_CONTEXT; context 70 arch/m68k/include/asm/mmu_context.h static inline void set_context(mm_context_t context, pgd_t *pgd) context 72 arch/m68k/include/asm/mmu_context.h __asm__ __volatile__ ("movec %0,%%asid" : : "d" (context)); context 79 arch/m68k/include/asm/mmu_context.h set_context(tsk->mm->context, next->pgd); context 90 arch/m68k/include/asm/mmu_context.h set_context(mm->context, mm->pgd); context 140 arch/m68k/include/asm/mmu_context.h asid = mm->context & 0xff; context 166 arch/m68k/include/asm/mmu_context.h extern void clear_context(unsigned long context); context 172 arch/m68k/include/asm/mmu_context.h mm->context = SUN3_INVALID_CONTEXT; context 180 arch/m68k/include/asm/mmu_context.h if (mm->context == SUN3_INVALID_CONTEXT) context 181 arch/m68k/include/asm/mmu_context.h mm->context = get_free_context(mm); context 187 arch/m68k/include/asm/mmu_context.h if (mm->context != SUN3_INVALID_CONTEXT) context 188 arch/m68k/include/asm/mmu_context.h clear_context(mm->context); context 194 arch/m68k/include/asm/mmu_context.h sun3_put_context(mm->context); context 220 arch/m68k/include/asm/mmu_context.h mm->context = virt_to_phys(mm->pgd); context 229 arch/m68k/include/asm/mmu_context.h 0x80000000 | _PAGE_TABLE, mm->context context 267 arch/m68k/include/asm/mmu_context.h asm volatile ("movec %0,%%urp" : : "r" (mm->context)); context 298 arch/m68k/include/asm/mmu_context.h next_mm->context = virt_to_phys(next_mm->pgd); context 182 arch/m68k/include/asm/oplib.h int context, char *program_counter); context 208 arch/m68k/include/asm/oplib.h extern void prom_putsegment(int context, unsigned long virt_addr, context 152 arch/m68k/include/asm/tlbflush.h sun3_put_context(mm->context); context 178 arch/m68k/include/asm/tlbflush.h sun3_put_context(vma->vm_mm->context); context 201 arch/m68k/include/asm/tlbflush.h sun3_put_context(mm->context); context 207 arch/m68k/include/asm/tlbflush.h if(pmeg_ctx[seg] == mm->context) { context 697 arch/m68k/kernel/signal.c struct sigcontext context; context 706 arch/m68k/kernel/signal.c if (copy_from_user(&context, usc, sizeof(context))) context 710 arch/m68k/kernel/signal.c regs->d0 = context.sc_d0; context 711 arch/m68k/kernel/signal.c regs->d1 = context.sc_d1; context 712 arch/m68k/kernel/signal.c regs->a0 = context.sc_a0; context 713 arch/m68k/kernel/signal.c regs->a1 = context.sc_a1; context 714 arch/m68k/kernel/signal.c regs->sr = (regs->sr & 0xff00) | (context.sc_sr & 0xff); context 715 arch/m68k/kernel/signal.c regs->pc = context.sc_pc; context 717 arch/m68k/kernel/signal.c wrusp(context.sc_usp); context 718 arch/m68k/kernel/signal.c formatvec = context.sc_formatvec; context 720 arch/m68k/kernel/signal.c err = restore_fpu_state(&context); context 891 arch/m68k/kernel/signal.c struct sigcontext context; context 914 arch/m68k/kernel/signal.c setup_sigcontext(&context, regs, set->sig[0]); context 915 arch/m68k/kernel/signal.c err |= copy_to_user (&frame->sc, &context, sizeof(context)); context 138 arch/m68k/mm/mcfmmu.c asid = mm->context & 0xff; context 28 arch/m68k/mm/sun3kmap.c extern void mmu_emu_map_pmeg (int context, int vaddr); context 210 arch/m68k/sun3/mmu_emu.c void clear_context(unsigned long context) context 215 arch/m68k/sun3/mmu_emu.c if(context) { context 216 arch/m68k/sun3/mmu_emu.c if(!ctx_alloc[context]) context 219 arch/m68k/sun3/mmu_emu.c ctx_alloc[context]->context = SUN3_INVALID_CONTEXT; context 220 arch/m68k/sun3/mmu_emu.c ctx_alloc[context] = (struct mm_struct *)0; context 226 arch/m68k/sun3/mmu_emu.c sun3_put_context(context); context 229 arch/m68k/sun3/mmu_emu.c if((pmeg_ctx[i] == context) && (pmeg_alloc[i] == 1)) { context 284 arch/m68k/sun3/mmu_emu.c inline void mmu_emu_map_pmeg (int context, int vaddr) context 299 arch/m68k/sun3/mmu_emu.c curr_pmeg, context, vaddr); context 306 arch/m68k/sun3/mmu_emu.c sun3_put_context(context); context 319 arch/m68k/sun3/mmu_emu.c sun3_put_context(context); context 326 arch/m68k/sun3/mmu_emu.c pmeg_ctx[curr_pmeg] = context; context 359 arch/m68k/sun3/mmu_emu.c unsigned char context; context 365 arch/m68k/sun3/mmu_emu.c context = 0; context 367 arch/m68k/sun3/mmu_emu.c context = current->mm->context; context 403 arch/m68k/sun3/mmu_emu.c mmu_emu_map_pmeg (context, vaddr); context 57 arch/microblaze/include/asm/mmu_context_mm.h extern void set_context(mm_context_t context, pgd_t *pgd); context 88 arch/microblaze/include/asm/mmu_context_mm.h if (mm->context != NO_CONTEXT) context 99 arch/microblaze/include/asm/mmu_context_mm.h mm->context = ctx; context 106 arch/microblaze/include/asm/mmu_context_mm.h # define init_new_context(tsk, mm) (((mm)->context = NO_CONTEXT), 0) context 113 arch/microblaze/include/asm/mmu_context_mm.h if (mm->context != NO_CONTEXT) { context 114 arch/microblaze/include/asm/mmu_context_mm.h clear_bit(mm->context, context_map); context 115 arch/microblaze/include/asm/mmu_context_mm.h mm->context = NO_CONTEXT; context 125 arch/microblaze/include/asm/mmu_context_mm.h set_context(next->context, next->pgd); context 137 arch/microblaze/include/asm/mmu_context_mm.h set_context(mm->context, mm->pgd); context 476 arch/mips/include/asm/elf.h (unsigned long)current->mm->context.vdso); \ context 686 arch/mips/include/asm/kvm_host.h __BUILD_KVM_RW_HW(context, l, MIPS_CP0_TLB_CONTEXT, 0) context 109 arch/mips/include/asm/mmu_context.h return atomic64_read(&mm->context.mmid); context 111 arch/mips/include/asm/mmu_context.h return mm->context.asid[cpu]; context 118 arch/mips/include/asm/mmu_context.h atomic64_set(&mm->context.mmid, ctx); context 120 arch/mips/include/asm/mmu_context.h mm->context.asid[cpu] = ctx; context 151 arch/mips/include/asm/mmu_context.h mm->context.bd_emupage_allocmap = NULL; context 152 arch/mips/include/asm/mmu_context.h spin_lock_init(&mm->context.bd_emupage_lock); context 153 arch/mips/include/asm/mmu_context.h init_waitqueue_head(&mm->context.bd_emupage_queue); context 715 arch/mips/include/asm/sn/sn0/hubio.h context: 15, /* Bit vector: context 727 arch/mips/include/asm/sn/sn0/hubio.h #define icrbd_context icrbd_field_s.context context 189 arch/mips/kernel/asm-offsets.c OFFSET(MM_CONTEXT, mm_struct, context); context 806 arch/mips/kernel/signal.c void *vdso = current->mm->context.vdso; context 182 arch/mips/kernel/vdso.c mm->context.vdso = (void *)vdso_addr; context 366 arch/mips/kvm/entry.c offsetof(struct kvm, arch.gpa_mm.context.asid)); context 376 arch/mips/kvm/entry.c guest_kernel_mm.context.asid)); context 379 arch/mips/kvm/entry.c guest_user_mm.context.asid)); context 414 arch/mips/kvm/entry.c (int)offsetof(struct mm_struct, context.asid), T1); context 77 arch/mips/math-emu/dsemul.c mm_context_t *mm_ctx = ¤t->mm->context; context 127 arch/mips/math-emu/dsemul.c mm_context_t *mm_ctx = &mm->context; context 207 arch/mips/math-emu/dsemul.c mm_context_t *mm_ctx = &mm->context; context 175 arch/nds32/include/asm/elf.h (elf_addr_t)current->mm->context.vdso); \ context 15 arch/nds32/include/asm/mmu_context.h mm->context.id = 0; context 40 arch/nds32/include/asm/mmu_context.h mm->context.id = cid; context 46 arch/nds32/include/asm/mmu_context.h ((mm->context.id ^ cpu_last_cid) >> TLB_MISC_offCID >> CID_BITS)) context 262 arch/nds32/kernel/signal.c retcode = VDSO_SYMBOL(current->mm->context.vdso, rt_sigtramp); context 178 arch/nds32/kernel/vdso.c mm->context.vdso = (void *)vdso_base; context 192 arch/nds32/kernel/vdso.c mm->context.vdso = NULL; context 530 arch/nds32/mm/proc.c cid = (cid & ~TLB_MISC_mskCID) | mm->context.id; context 27 arch/nds32/mm/tlb.c ncid = (ocid & ~TLB_MISC_mskCID) | vma->vm_mm->context.id; context 44 arch/nds32/mm/tlb.c ncid = (ocid & ~TLB_MISC_mskCID) | vma->vm_mm->context.id; context 42 arch/nios2/include/asm/mmu_context.h mm->context = 0; context 53 arch/nios2/mm/mmu_context.c static void set_context(mm_context_t context) context 55 arch/nios2/mm/mmu_context.c set_mmu_pid(CTX_PID(context)); context 89 arch/nios2/mm/mmu_context.c if (unlikely(CTX_VERSION(next->context) != context 91 arch/nios2/mm/mmu_context.c next->context = get_new_context(); context 97 arch/nios2/mm/mmu_context.c set_context(next->context); context 108 arch/nios2/mm/mmu_context.c next->context = get_new_context(); context 109 arch/nios2/mm/mmu_context.c set_context(next->context); context 113 arch/nios2/mm/mmu_context.c unsigned long get_pid_from_context(mm_context_t *context) context 115 arch/nios2/mm/mmu_context.c return CTX_PID((*context)); context 106 arch/nios2/mm/tlb.c unsigned long mmu_pid = get_pid_from_context(&vma->vm_mm->context); context 116 arch/nios2/mm/tlb.c unsigned long mmu_pid = get_pid_from_context(&vma->vm_mm->context); context 259 arch/nios2/mm/tlb.c unsigned long mmu_pid = get_pid_from_context(&mm->context); context 262 arch/nios2/mm/tlb.c memset(&mm->context, 0, sizeof(mm_context_t)); context 165 arch/openrisc/mm/tlb.c mm->context = NO_CONTEXT; context 27 arch/parisc/include/asm/mmu_context.h mm->context = alloc_sid(); context 34 arch/parisc/include/asm/mmu_context.h free_sid(mm->context); context 35 arch/parisc/include/asm/mmu_context.h mm->context = 0; context 38 arch/parisc/include/asm/mmu_context.h static inline unsigned long __space_to_prot(mm_context_t context) context 41 arch/parisc/include/asm/mmu_context.h return context << 1; context 43 arch/parisc/include/asm/mmu_context.h return context >> (SPACEID_SHIFT - 1); context 47 arch/parisc/include/asm/mmu_context.h static inline void load_context(mm_context_t context) context 49 arch/parisc/include/asm/mmu_context.h mtsp(context, 3); context 50 arch/parisc/include/asm/mmu_context.h mtctl(__space_to_prot(context), 8); context 58 arch/parisc/include/asm/mmu_context.h load_context(next->context); context 90 arch/parisc/include/asm/mmu_context.h if (next->context == 0) context 91 arch/parisc/include/asm/mmu_context.h next->context = alloc_sid(); context 74 arch/parisc/include/asm/pgtable.h mtsp(mm->context, 1); context 252 arch/parisc/include/asm/processor.h __u32 spaceid = (__u32)current->mm->context; \ context 20 arch/parisc/include/asm/tlbflush.h __flush_tlb_range((vma)->vm_mm->context, start, end) context 55 arch/parisc/include/asm/tlbflush.h if (mm->context != 0) context 56 arch/parisc/include/asm/tlbflush.h free_sid(mm->context); context 57 arch/parisc/include/asm/tlbflush.h mm->context = alloc_sid(); context 59 arch/parisc/include/asm/tlbflush.h load_context(mm->context); context 556 arch/parisc/kernel/cache.c if (mm->context) context 562 arch/parisc/kernel/cache.c if (mm->context == mfsp(3)) { context 585 arch/parisc/kernel/cache.c if (unlikely(mm->context)) { context 603 arch/parisc/kernel/cache.c if (vma->vm_mm->context) context 609 arch/parisc/kernel/cache.c if (vma->vm_mm->context == mfsp(3)) { context 625 arch/parisc/kernel/cache.c if (unlikely(vma->vm_mm->context)) { context 639 arch/parisc/kernel/cache.c if (likely(vma->vm_mm->context)) { context 231 arch/powerpc/include/asm/book3s/32/pgtable.h extern int flush_hash_pages(unsigned context, unsigned long va, context 235 arch/powerpc/include/asm/book3s/32/pgtable.h extern void add_hash_page(unsigned context, unsigned long va, context 304 arch/powerpc/include/asm/book3s/32/pgtable.h static inline int __ptep_test_and_clear_young(unsigned int context, unsigned long addr, pte_t *ptep) context 310 arch/powerpc/include/asm/book3s/32/pgtable.h flush_hash_pages(context, addr, ptephys, 1); context 315 arch/powerpc/include/asm/book3s/32/pgtable.h __ptep_test_and_clear_young((__vma)->vm_mm->context.id, __addr, __ptep) context 766 arch/powerpc/include/asm/book3s/64/mmu-hash.h static inline unsigned long get_vsid(unsigned long context, unsigned long ea, context 784 arch/powerpc/include/asm/book3s/64/mmu-hash.h protovsid = (context << ESID_BITS) | context 790 arch/powerpc/include/asm/book3s/64/mmu-hash.h protovsid = (context << ESID_BITS_1T) | context 834 arch/powerpc/include/asm/book3s/64/mmu-hash.h unsigned long context; context 839 arch/powerpc/include/asm/book3s/64/mmu-hash.h context = get_kernel_context(ea); context 840 arch/powerpc/include/asm/book3s/64/mmu-hash.h return get_vsid(context, ea, ssize); context 261 arch/powerpc/include/asm/book3s/64/mmu.h unsigned long context = get_user_context(ctx, ea); context 263 arch/powerpc/include/asm/book3s/64/mmu.h return get_vsid(context, ea, ssize); context 146 arch/powerpc/include/asm/book3s/64/tlbflush.h if (atomic_read(&vma->vm_mm->context.copros) > 0) context 174 arch/powerpc/include/asm/elf.h VDSO_AUX_ENT(AT_SYSINFO_EHDR, current->mm->context.vdso_base); \ context 20 arch/powerpc/include/asm/mm-arch-hooks.h if (old_start == mm->context.vdso_base) context 21 arch/powerpc/include/asm/mm-arch-hooks.h mm->context.vdso_base = new_start; context 88 arch/powerpc/include/asm/mmu_context.h VM_WARN_ON(mm->context.extended_id[index]); context 89 arch/powerpc/include/asm/mmu_context.h mm->context.extended_id[index] = context_id; context 97 arch/powerpc/include/asm/mmu_context.h context_id = get_user_context(&mm->context, ea); context 136 arch/powerpc/include/asm/mmu_context.h atomic_inc(&mm->context.active_cpus); context 141 arch/powerpc/include/asm/mmu_context.h atomic_dec(&mm->context.active_cpus); context 151 arch/powerpc/include/asm/mmu_context.h if (atomic_inc_return(&mm->context.copros) == 1) context 180 arch/powerpc/include/asm/mmu_context.h c = atomic_dec_if_positive(&mm->context.copros); context 237 arch/powerpc/include/asm/mmu_context.h if (start <= mm->context.vdso_base && mm->context.vdso_base < end) context 238 arch/powerpc/include/asm/mmu_context.h mm->context.vdso_base = 0; context 289 arch/powerpc/include/asm/nohash/32/pgtable.h static inline int __ptep_test_and_clear_young(unsigned int context, unsigned long addr, pte_t *ptep) context 296 arch/powerpc/include/asm/nohash/32/pgtable.h __ptep_test_and_clear_young((__vma)->vm_mm->context.id, __addr, __ptep) context 74 arch/powerpc/include/asm/pkeys.h #define mm_pkey_allocation_map(mm) (mm->context.pkey_allocation_map) context 66 arch/powerpc/include/asm/tlb.h if (atomic_read(&mm->context.active_cpus) > 1) context 72 arch/powerpc/include/asm/tlb.h WARN_ON(atomic_read(&mm->context.copros) > 0); context 79 arch/powerpc/include/asm/tlb.h atomic_set(&mm->context.active_cpus, 1); context 86 arch/powerpc/kernel/asm-offsets.c OFFSET(MMCONTEXTID, mm_struct, context.id); context 308 arch/powerpc/kernel/paca.c mm_context_t *context = &mm->context; context 310 arch/powerpc/kernel/paca.c get_paca()->mm_ctx_id = context->id; context 312 arch/powerpc/kernel/paca.c VM_BUG_ON(!mm_ctx_slb_addr_limit(context)); context 313 arch/powerpc/kernel/paca.c get_paca()->mm_ctx_slb_addr_limit = mm_ctx_slb_addr_limit(context); context 314 arch/powerpc/kernel/paca.c memcpy(&get_paca()->mm_ctx_low_slices_psize, mm_ctx_low_slices(context), context 316 arch/powerpc/kernel/paca.c memcpy(&get_paca()->mm_ctx_high_slices_psize, mm_ctx_high_slices(context), context 317 arch/powerpc/kernel/paca.c TASK_SLICE_ARRAY_SZ(context)); context 319 arch/powerpc/kernel/paca.c get_paca()->mm_ctx_user_psize = context->user_psize; context 320 arch/powerpc/kernel/paca.c get_paca()->mm_ctx_sllp = context->sllp; context 933 arch/powerpc/kernel/signal_32.c if (vdso32_rt_sigtramp && tsk->mm->context.vdso_base) { context 935 arch/powerpc/kernel/signal_32.c tramp = tsk->mm->context.vdso_base + vdso32_rt_sigtramp; context 1395 arch/powerpc/kernel/signal_32.c if (vdso32_sigtramp && tsk->mm->context.vdso_base) { context 1397 arch/powerpc/kernel/signal_32.c tramp = tsk->mm->context.vdso_base + vdso32_sigtramp; context 869 arch/powerpc/kernel/signal_64.c if (vdso64_rt_sigtramp && tsk->mm->context.vdso_base) { context 870 arch/powerpc/kernel/signal_64.c regs->link = tsk->mm->context.vdso_base + vdso64_rt_sigtramp; context 158 arch/powerpc/kernel/vdso.c current->mm->context.vdso_base = 0; context 193 arch/powerpc/kernel/vdso.c current->mm->context.vdso_base = vdso_base; context 210 arch/powerpc/kernel/vdso.c current->mm->context.vdso_base = 0; context 224 arch/powerpc/kernel/vdso.c if (vma->vm_mm && vma->vm_start == vma->vm_mm->context.vdso_base) context 308 arch/powerpc/mm/book3s32/mmu.c add_hash_page(mm->context.id, ea, pmd_val(*pmd)); context 78 arch/powerpc/mm/book3s32/mmu_context.c mm->context.id = __init_new_context(); context 98 arch/powerpc/mm/book3s32/mmu_context.c if (mm->context.id != NO_CONTEXT) { context 99 arch/powerpc/mm/book3s32/mmu_context.c __destroy_context(mm->context.id); context 100 arch/powerpc/mm/book3s32/mmu_context.c mm->context.id = NO_CONTEXT; context 41 arch/powerpc/mm/book3s32/tlb.c flush_hash_pages(mm->context.id, addr, ptephys, 1); context 80 arch/powerpc/mm/book3s32/tlb.c unsigned int ctx = mm->context.id; context 150 arch/powerpc/mm/book3s32/tlb.c flush_hash_pages(mm->context.id, vmaddr, pmd_val(*pmd), 1); context 331 arch/powerpc/mm/book3s64/hash_pgtable.c vsid = get_user_vsid(&mm->context, addr, ssize); context 91 arch/powerpc/mm/book3s64/hash_tlb.c vsid = get_user_vsid(&mm->context, addr, ssize); context 1057 arch/powerpc/mm/book3s64/hash_utils.c init_mm.context.hash_context = &init_hash_mm_context; context 1058 arch/powerpc/mm/book3s64/hash_utils.c mm_ctx_set_slb_addr_limit(&init_mm.context, SLB_ADDR_LIMIT_DEFAULT); context 1166 arch/powerpc/mm/book3s64/hash_utils.c struct subpage_prot_table *spt = mm_ctx_subpage_prot(&mm->context); context 1271 arch/powerpc/mm/book3s64/hash_utils.c vsid = get_user_vsid(&mm->context, ea, ssize); context 1503 arch/powerpc/mm/book3s64/hash_utils.c if (unlikely(psize != mm_ctx_user_psize(&mm->context))) context 1547 arch/powerpc/mm/book3s64/hash_utils.c vsid = get_user_vsid(&mm->context, ea, ssize); context 1582 arch/powerpc/mm/book3s64/hash_utils.c if (mm_ctx_user_psize(&mm->context) == MMU_PAGE_64K) context 1595 arch/powerpc/mm/book3s64/hash_utils.c mm_ctx_user_psize(&mm->context), context 1596 arch/powerpc/mm/book3s64/hash_utils.c mm_ctx_user_psize(&mm->context), context 52 arch/powerpc/mm/book3s64/iommu_api.c return !list_empty(&mm->context.iommu_group_mem_list); context 150 arch/powerpc/mm/book3s64/iommu_api.c list_for_each_entry_rcu(mem2, &mm->context.iommu_group_mem_list, next) { context 161 arch/powerpc/mm/book3s64/iommu_api.c list_add_rcu(&mem->next, &mm->context.iommu_group_mem_list); context 289 arch/powerpc/mm/book3s64/iommu_api.c list_for_each_entry_rcu(mem, &mm->context.iommu_group_mem_list, next) { context 307 arch/powerpc/mm/book3s64/iommu_api.c list_for_each_entry_lockless(mem, &mm->context.iommu_group_mem_list, context 327 arch/powerpc/mm/book3s64/iommu_api.c list_for_each_entry_rcu(mem, &mm->context.iommu_group_mem_list, next) { context 421 arch/powerpc/mm/book3s64/iommu_api.c list_for_each_entry_rcu(mem, &mm->context.iommu_group_mem_list, next) { context 460 arch/powerpc/mm/book3s64/iommu_api.c INIT_LIST_HEAD_RCU(&mm->context.iommu_group_mem_list); context 95 arch/powerpc/mm/book3s64/mmu_context.c mm->context.hash_context = kmalloc(sizeof(struct hash_mm_context), context 97 arch/powerpc/mm/book3s64/mmu_context.c if (!mm->context.hash_context) context 114 arch/powerpc/mm/book3s64/mmu_context.c if (mm->context.id == 0) { context 115 arch/powerpc/mm/book3s64/mmu_context.c memset(mm->context.hash_context, 0, sizeof(struct hash_mm_context)); context 119 arch/powerpc/mm/book3s64/mmu_context.c memcpy(mm->context.hash_context, current->mm->context.hash_context, sizeof(struct hash_mm_context)); context 122 arch/powerpc/mm/book3s64/mmu_context.c if (current->mm->context.hash_context->spt) { context 123 arch/powerpc/mm/book3s64/mmu_context.c mm->context.hash_context->spt = kmalloc(sizeof(struct subpage_prot_table), context 125 arch/powerpc/mm/book3s64/mmu_context.c if (!mm->context.hash_context->spt) { context 126 arch/powerpc/mm/book3s64/mmu_context.c kfree(mm->context.hash_context); context 133 arch/powerpc/mm/book3s64/mmu_context.c index = realloc_context_ids(&mm->context); context 136 arch/powerpc/mm/book3s64/mmu_context.c kfree(mm->context.hash_context->spt); context 138 arch/powerpc/mm/book3s64/mmu_context.c kfree(mm->context.hash_context); context 177 arch/powerpc/mm/book3s64/mmu_context.c mm->context.hash_context = NULL; context 194 arch/powerpc/mm/book3s64/mmu_context.c mm->context.id = index; context 196 arch/powerpc/mm/book3s64/mmu_context.c mm->context.pte_frag = NULL; context 197 arch/powerpc/mm/book3s64/mmu_context.c mm->context.pmd_frag = NULL; context 201 arch/powerpc/mm/book3s64/mmu_context.c atomic_set(&mm->context.active_cpus, 0); context 202 arch/powerpc/mm/book3s64/mmu_context.c atomic_set(&mm->context.copros, 0); context 244 arch/powerpc/mm/book3s64/mmu_context.c frag = mm->context.pte_frag; context 248 arch/powerpc/mm/book3s64/mmu_context.c frag = mm->context.pmd_frag; context 257 arch/powerpc/mm/book3s64/mmu_context.c WARN_ON_ONCE(!list_empty(&mm->context.iommu_group_mem_list)); context 273 arch/powerpc/mm/book3s64/mmu_context.c process_tb[mm->context.id].prtb0 = 0; context 276 arch/powerpc/mm/book3s64/mmu_context.c destroy_contexts(&mm->context); context 277 arch/powerpc/mm/book3s64/mmu_context.c mm->context.id = MMU_NO_CONTEXT; context 299 arch/powerpc/mm/book3s64/mmu_context.c process_tb[mm->context.id].prtb0 = 0; context 306 arch/powerpc/mm/book3s64/mmu_context.c mtspr(SPRN_PID, next->context.id); context 274 arch/powerpc/mm/book3s64/pgtable.c ret = mm->context.pmd_frag; context 282 arch/powerpc/mm/book3s64/pgtable.c mm->context.pmd_frag = pmd_frag; context 320 arch/powerpc/mm/book3s64/pgtable.c if (likely(!mm->context.pmd_frag)) { context 322 arch/powerpc/mm/book3s64/pgtable.c mm->context.pmd_frag = ret + PMD_FRAG_SIZE; context 170 arch/powerpc/mm/book3s64/pkeys.c mm->context.execute_only_pkey = execute_only_key; context 322 arch/powerpc/mm/book3s64/pkeys.c return mm->context.execute_only_pkey; context 331 arch/powerpc/mm/book3s64/pkeys.c return (vma_pkey(vma) == vma->vm_mm->context.execute_only_pkey); context 427 arch/powerpc/mm/book3s64/pkeys.c mm->context.execute_only_pkey = oldmm->context.execute_only_pkey; context 106 arch/powerpc/mm/book3s64/radix_hugetlbpage.c (atomic_read(&mm->context.copros) > 0)) context 385 arch/powerpc/mm/book3s64/radix_pgtable.c init_mm.context.id = mmu_base_pid; context 1044 arch/powerpc/mm/book3s64/radix_pgtable.c if ((change & _PAGE_RW) && atomic_read(&mm->context.copros) > 0) { context 1078 arch/powerpc/mm/book3s64/radix_pgtable.c (atomic_read(&mm->context.copros) > 0)) context 364 arch/powerpc/mm/book3s64/radix_tlb.c if (atomic_read(&mm->context.copros) > 0) context 497 arch/powerpc/mm/book3s64/radix_tlb.c if (atomic_read(&mm->context.copros) > 0) context 551 arch/powerpc/mm/book3s64/radix_tlb.c if (atomic_read(&mm->context.copros) > 0) context 571 arch/powerpc/mm/book3s64/radix_tlb.c pid = mm->context.id; context 584 arch/powerpc/mm/book3s64/radix_tlb.c pid = mm->context.id; context 598 arch/powerpc/mm/book3s64/radix_tlb.c pid = mm->context.id; context 617 arch/powerpc/mm/book3s64/radix_tlb.c if (atomic_read(&mm->context.copros) > 0) context 631 arch/powerpc/mm/book3s64/radix_tlb.c if (atomic_read(&mm->context.copros) > 0) context 640 arch/powerpc/mm/book3s64/radix_tlb.c unsigned long pid = mm->context.id; context 676 arch/powerpc/mm/book3s64/radix_tlb.c pid = mm->context.id; context 712 arch/powerpc/mm/book3s64/radix_tlb.c pid = mm->context.id; context 752 arch/powerpc/mm/book3s64/radix_tlb.c pid = mm->context.id; context 845 arch/powerpc/mm/book3s64/radix_tlb.c pid = mm->context.id; context 1083 arch/powerpc/mm/book3s64/radix_tlb.c pid = mm->context.id; context 1151 arch/powerpc/mm/book3s64/radix_tlb.c pid = mm->context.id; context 1219 arch/powerpc/mm/book3s64/radix_tlb.c unsigned long pid = mm->context.id; context 640 arch/powerpc/mm/book3s64/slb.c static long slb_insert_entry(unsigned long ea, unsigned long context, context 647 arch/powerpc/mm/book3s64/slb.c vsid = get_vsid(context, ea, ssize); context 686 arch/powerpc/mm/book3s64/slb.c unsigned long context; context 728 arch/powerpc/mm/book3s64/slb.c context = get_kernel_context(ea); context 730 arch/powerpc/mm/book3s64/slb.c return slb_insert_entry(ea, context, flags, ssize, true); context 735 arch/powerpc/mm/book3s64/slb.c unsigned long context; context 744 arch/powerpc/mm/book3s64/slb.c if (ea >= mm_ctx_slb_addr_limit(&mm->context)) context 747 arch/powerpc/mm/book3s64/slb.c context = get_user_context(&mm->context, ea); context 748 arch/powerpc/mm/book3s64/slb.c if (!context) context 761 arch/powerpc/mm/book3s64/slb.c return slb_insert_entry(ea, context, flags, ssize, false); context 24 arch/powerpc/mm/book3s64/subpage_prot.c struct subpage_prot_table *spt = mm_ctx_subpage_prot(&mm->context); context 97 arch/powerpc/mm/book3s64/subpage_prot.c spt = mm_ctx_subpage_prot(&mm->context); context 222 arch/powerpc/mm/book3s64/subpage_prot.c spt = mm_ctx_subpage_prot(&mm->context); context 233 arch/powerpc/mm/book3s64/subpage_prot.c mm->context.hash_context->spt = spt; context 102 arch/powerpc/mm/copro_fault.c vsid = get_user_vsid(&mm->context, ea, ssize); context 99 arch/powerpc/mm/mmu_context.c void *frag = pte_frag_get(&mm->context); context 151 arch/powerpc/mm/nohash/book3e_hugetlbpage.c if (unlikely(book3e_tlb_exists(ea, mm->context.id))) { context 161 arch/powerpc/mm/nohash/book3e_hugetlbpage.c mas1 = MAS1_VALID | MAS1_TID(mm->context.id) | MAS1_TSIZE(tsize); context 124 arch/powerpc/mm/nohash/mmu_context.c if (mm->context.active) { context 133 arch/powerpc/mm/nohash/mmu_context.c mm->context.id = MMU_NO_CONTEXT; context 178 arch/powerpc/mm/nohash/mmu_context.c mm->context.id = MMU_NO_CONTEXT; context 183 arch/powerpc/mm/nohash/mmu_context.c mm->context.active = 0; context 220 arch/powerpc/mm/nohash/mmu_context.c mm->context.id = MMU_NO_CONTEXT; context 244 arch/powerpc/mm/nohash/mmu_context.c nact += context_mm[id]->context.active; context 274 arch/powerpc/mm/nohash/mmu_context.c cpu, next, next->context.active, next->context.id); context 278 arch/powerpc/mm/nohash/mmu_context.c next->context.active++; context 280 arch/powerpc/mm/nohash/mmu_context.c pr_hardcont(" (old=0x%p a=%d)", prev, prev->context.active); context 281 arch/powerpc/mm/nohash/mmu_context.c WARN_ON(prev->context.active < 1); context 282 arch/powerpc/mm/nohash/mmu_context.c prev->context.active--; context 289 arch/powerpc/mm/nohash/mmu_context.c id = next->context.id; context 332 arch/powerpc/mm/nohash/mmu_context.c next->context.id = id; context 378 arch/powerpc/mm/nohash/mmu_context.c if (mm->context.id == 0) context 380 arch/powerpc/mm/nohash/mmu_context.c mm->context.id = MMU_NO_CONTEXT; context 381 arch/powerpc/mm/nohash/mmu_context.c mm->context.active = 0; context 382 arch/powerpc/mm/nohash/mmu_context.c pte_frag_set(&mm->context, NULL); context 394 arch/powerpc/mm/nohash/mmu_context.c if (mm->context.id == MMU_NO_CONTEXT) context 397 arch/powerpc/mm/nohash/mmu_context.c WARN_ON(mm->context.active != 0); context 400 arch/powerpc/mm/nohash/mmu_context.c id = mm->context.id; context 403 arch/powerpc/mm/nohash/mmu_context.c mm->context.id = MMU_NO_CONTEXT; context 405 arch/powerpc/mm/nohash/mmu_context.c mm->context.active = 0; context 454 arch/powerpc/mm/nohash/mmu_context.c init_mm.context.active = NR_CPUS; context 199 arch/powerpc/mm/nohash/tlb.c pid = mm->context.id; context 212 arch/powerpc/mm/nohash/tlb.c pid = mm ? mm->context.id : 0; context 275 arch/powerpc/mm/nohash/tlb.c pid = mm->context.id; context 304 arch/powerpc/mm/nohash/tlb.c pid = mm->context.id; context 788 arch/powerpc/mm/nohash/tlb.c mm_ctx_set_slb_addr_limit(&init_mm.context, SLB_ADDR_LIMIT_DEFAULT); context 41 arch/powerpc/mm/pgtable-frag.c ret = pte_frag_get(&mm->context); context 49 arch/powerpc/mm/pgtable-frag.c pte_frag_set(&mm->context, pte_frag); context 89 arch/powerpc/mm/pgtable-frag.c if (likely(!pte_frag_get(&mm->context))) { context 91 arch/powerpc/mm/pgtable-frag.c pte_frag_set(&mm->context, ret + PTE_FRAG_SIZE); context 91 arch/powerpc/mm/slice.c if ((mm_ctx_slb_addr_limit(&mm->context) - len) < addr) context 200 arch/powerpc/mm/slice.c psize_mask = slice_mask_for_size(&mm->context, psize); context 207 arch/powerpc/mm/slice.c lpsizes = mm_ctx_low_slices(&mm->context); context 217 arch/powerpc/mm/slice.c old_mask = slice_mask_for_size(&mm->context, old_psize); context 226 arch/powerpc/mm/slice.c hpsizes = mm_ctx_high_slices(&mm->context); context 227 arch/powerpc/mm/slice.c for (i = 0; i < GET_HIGH_SLICE_INDEX(mm_ctx_slb_addr_limit(&mm->context)); i++) { context 236 arch/powerpc/mm/slice.c old_mask = slice_mask_for_size(&mm->context, old_psize); context 246 arch/powerpc/mm/slice.c (unsigned long)mm_ctx_low_slices(&mm->context), context 247 arch/powerpc/mm/slice.c (unsigned long)mm_ctx_high_slices(&mm->context)); context 347 arch/powerpc/mm/slice.c addr += mm_ctx_slb_addr_limit(&mm->context) - DEFAULT_MAP_WINDOW; context 459 arch/powerpc/mm/slice.c if (high_limit > mm_ctx_slb_addr_limit(&mm->context)) { context 465 arch/powerpc/mm/slice.c mm_ctx_set_slb_addr_limit(&mm->context, high_limit); context 472 arch/powerpc/mm/slice.c BUG_ON(mm_ctx_slb_addr_limit(&mm->context) == 0); context 492 arch/powerpc/mm/slice.c maskp = slice_mask_for_size(&mm->context, psize); context 519 arch/powerpc/mm/slice.c compat_maskp = slice_mask_for_size(&mm->context, MMU_PAGE_4K); context 649 arch/powerpc/mm/slice.c mm_ctx_user_psize(¤t->mm->context), 0); context 659 arch/powerpc/mm/slice.c mm_ctx_user_psize(¤t->mm->context), 1); context 670 arch/powerpc/mm/slice.c psizes = mm_ctx_low_slices(&mm->context); context 673 arch/powerpc/mm/slice.c psizes = mm_ctx_high_slices(&mm->context); context 694 arch/powerpc/mm/slice.c mm_ctx_set_slb_addr_limit(&mm->context, SLB_ADDR_LIMIT_DEFAULT); context 695 arch/powerpc/mm/slice.c mm_ctx_set_user_psize(&mm->context, psize); context 700 arch/powerpc/mm/slice.c lpsizes = mm_ctx_low_slices(&mm->context); context 703 arch/powerpc/mm/slice.c hpsizes = mm_ctx_high_slices(&mm->context); context 709 arch/powerpc/mm/slice.c mask = slice_mask_for_size(&mm->context, psize); context 725 arch/powerpc/mm/slice.c mm_ctx_set_slb_addr_limit(&mm->context, DEFAULT_MAP_WINDOW); context 764 arch/powerpc/mm/slice.c unsigned int psize = mm_ctx_user_psize(&mm->context); context 768 arch/powerpc/mm/slice.c maskp = slice_mask_for_size(&mm->context, psize); context 775 arch/powerpc/mm/slice.c compat_maskp = slice_mask_for_size(&mm->context, MMU_PAGE_4K); context 209 arch/powerpc/perf/callchain.c if (vdso64_rt_sigtramp && current->mm->context.vdso_base && context 210 arch/powerpc/perf/callchain.c nip == current->mm->context.vdso_base + vdso64_rt_sigtramp) context 368 arch/powerpc/perf/callchain.c if (vdso32_sigtramp && current->mm->context.vdso_base && context 369 arch/powerpc/perf/callchain.c nip == current->mm->context.vdso_base + vdso32_sigtramp) context 379 arch/powerpc/perf/callchain.c if (vdso32_rt_sigtramp && current->mm->context.vdso_base && context 380 arch/powerpc/perf/callchain.c nip == current->mm->context.vdso_base + vdso32_rt_sigtramp) context 137 arch/powerpc/platforms/44x/warp.c static irqreturn_t temp_isr(int irq, void *context) context 62 arch/riscv/include/asm/elf.h (elf_addr_t)current->mm->context.vdso); \ context 193 arch/riscv/kernel/signal.c current->mm->context.vdso, rt_sigreturn); context 76 arch/riscv/kernel/vdso.c mm->context.vdso = (void *)vdso_base; context 83 arch/riscv/kernel/vdso.c mm->context.vdso = NULL; context 92 arch/riscv/kernel/vdso.c if (vma->vm_mm && (vma->vm_start == (long)vma->vm_mm->context.vdso)) context 37 arch/riscv/mm/cacheflush.c mask = &mm->context.icache_stale_mask; context 28 arch/riscv/mm/context.c cpumask_t *mask = &mm->context.icache_stale_mask; context 176 arch/s390/include/asm/elf.h !current->mm->context.alloc_pgste) { \ context 276 arch/s390/include/asm/elf.h (unsigned long)current->mm->context.vdso_base); \ context 40 arch/s390/include/asm/mmu.h .context.lock = __SPIN_LOCK_UNLOCKED(name.context.lock), \ context 41 arch/s390/include/asm/mmu.h .context.pgtable_list = LIST_HEAD_INIT(name.context.pgtable_list), \ context 42 arch/s390/include/asm/mmu.h .context.gmap_list = LIST_HEAD_INIT(name.context.gmap_list), context 21 arch/s390/include/asm/mmu_context.h spin_lock_init(&mm->context.lock); context 22 arch/s390/include/asm/mmu_context.h INIT_LIST_HEAD(&mm->context.pgtable_list); context 23 arch/s390/include/asm/mmu_context.h INIT_LIST_HEAD(&mm->context.gmap_list); context 24 arch/s390/include/asm/mmu_context.h cpumask_clear(&mm->context.cpu_attach_mask); context 25 arch/s390/include/asm/mmu_context.h atomic_set(&mm->context.flush_count, 0); context 26 arch/s390/include/asm/mmu_context.h mm->context.gmap_asce = 0; context 27 arch/s390/include/asm/mmu_context.h mm->context.flush_mm = 0; context 28 arch/s390/include/asm/mmu_context.h mm->context.compat_mm = test_thread_flag(TIF_31BIT); context 30 arch/s390/include/asm/mmu_context.h mm->context.alloc_pgste = page_table_allocate_pgste || context 32 arch/s390/include/asm/mmu_context.h (current->mm && current->mm->context.alloc_pgste); context 33 arch/s390/include/asm/mmu_context.h mm->context.has_pgste = 0; context 34 arch/s390/include/asm/mmu_context.h mm->context.uses_skeys = 0; context 35 arch/s390/include/asm/mmu_context.h mm->context.uses_cmm = 0; context 36 arch/s390/include/asm/mmu_context.h mm->context.allow_gmap_hpage_1m = 0; context 38 arch/s390/include/asm/mmu_context.h switch (mm->context.asce_limit) { context 46 arch/s390/include/asm/mmu_context.h mm->context.asce_limit = STACK_TOP_MAX; context 47 arch/s390/include/asm/mmu_context.h mm->context.asce = __pa(mm->pgd) | _ASCE_TABLE_LENGTH | context 52 arch/s390/include/asm/mmu_context.h mm->context.asce = __pa(mm->pgd) | _ASCE_TABLE_LENGTH | context 57 arch/s390/include/asm/mmu_context.h mm->context.asce = __pa(mm->pgd) | _ASCE_TABLE_LENGTH | context 62 arch/s390/include/asm/mmu_context.h mm->context.asce = __pa(mm->pgd) | _ASCE_TABLE_LENGTH | context 73 arch/s390/include/asm/mmu_context.h S390_lowcore.user_asce = mm->context.asce; context 93 arch/s390/include/asm/mmu_context.h S390_lowcore.user_asce = next->context.asce; context 94 arch/s390/include/asm/mmu_context.h cpumask_set_cpu(cpu, &next->context.cpu_attach_mask); context 105 arch/s390/include/asm/mmu_context.h cpumask_clear_cpu(cpu, &prev->context.cpu_attach_mask); context 116 arch/s390/include/asm/mmu_context.h while (atomic_read(&mm->context.flush_count)) context 123 arch/s390/include/asm/pgalloc.h if (mm->context.asce_limit == _REGION3_SIZE) { context 135 arch/s390/include/asm/pgalloc.h if (mm->context.asce_limit == _REGION3_SIZE) context 500 arch/s390/include/asm/pgtable.h return mm->context.asce_limit <= _REGION1_SIZE; context 506 arch/s390/include/asm/pgtable.h return mm->context.asce_limit <= _REGION2_SIZE; context 512 arch/s390/include/asm/pgtable.h return mm->context.asce_limit <= _REGION3_SIZE; context 519 arch/s390/include/asm/pgtable.h if (unlikely(mm->context.has_pgste)) context 528 arch/s390/include/asm/pgtable.h if (unlikely(mm->context.alloc_pgste)) context 542 arch/s390/include/asm/pgtable.h if (mm->context.uses_skeys) context 1283 arch/s390/include/asm/pgtable.h return end <= current->mm->context.asce_limit; context 68 arch/s390/include/asm/tlb.h tlb->mm->context.flush_mm = 1; context 93 arch/s390/include/asm/tlb.h tlb->mm->context.flush_mm = 1; context 112 arch/s390/include/asm/tlb.h tlb->mm->context.flush_mm = 1; context 130 arch/s390/include/asm/tlb.h tlb->mm->context.flush_mm = 1; context 61 arch/s390/include/asm/tlbflush.h atomic_inc(&mm->context.flush_count); context 63 arch/s390/include/asm/tlbflush.h cpumask_copy(mm_cpumask(mm), &mm->context.cpu_attach_mask); context 65 arch/s390/include/asm/tlbflush.h gmap_asce = READ_ONCE(mm->context.gmap_asce); context 69 arch/s390/include/asm/tlbflush.h __tlb_flush_idte(mm->context.asce); context 74 arch/s390/include/asm/tlbflush.h atomic_dec(&mm->context.flush_count); context 81 arch/s390/include/asm/tlbflush.h __tlb_flush_idte(init_mm.context.asce); context 88 arch/s390/include/asm/tlbflush.h spin_lock(&mm->context.lock); context 89 arch/s390/include/asm/tlbflush.h if (mm->context.flush_mm) { context 90 arch/s390/include/asm/tlbflush.h mm->context.flush_mm = 0; context 93 arch/s390/include/asm/tlbflush.h spin_unlock(&mm->context.lock); context 260 arch/s390/kernel/smp.c cpumask_set_cpu(cpu, &init_mm.context.cpu_attach_mask); context 958 arch/s390/kernel/smp.c cpumask_clear_cpu(cpu, &init_mm.context.cpu_attach_mask); context 59 arch/s390/kernel/vdso.c if (vma->vm_mm->context.compat_mm) { context 80 arch/s390/kernel/vdso.c if (vma->vm_mm->context.compat_mm) context 90 arch/s390/kernel/vdso.c current->mm->context.vdso_base = vma->vm_start; context 214 arch/s390/kernel/vdso.c mm->context.compat_mm = is_compat_task(); context 215 arch/s390/kernel/vdso.c if (mm->context.compat_mm) context 257 arch/s390/kernel/vdso.c current->mm->context.vdso_base = vdso_base; context 760 arch/s390/kvm/kvm-s390.c kvm->mm->context.allow_gmap_hpage_1m = 1; context 825 arch/s390/kvm/kvm-s390.c else if (kvm->mm->context.allow_gmap_hpage_1m) context 2068 arch/s390/kvm/kvm-s390.c if (!bufsize || !kvm->mm->context.uses_cmm) { context 2156 arch/s390/kvm/kvm-s390.c if (!kvm->mm->context.uses_cmm) { context 2158 arch/s390/kvm/kvm-s390.c kvm->mm->context.uses_cmm = 1; context 3559 arch/s390/kvm/kvm-s390.c (vcpu->kvm->mm->context.uses_cmm)) context 1217 arch/s390/kvm/priv.c if (vcpu->kvm->mm->context.uses_cmm == 0) { context 1219 arch/s390/kvm/priv.c vcpu->kvm->mm->context.uses_cmm = 1; context 106 arch/s390/mm/gmap.c spin_lock(&mm->context.lock); context 107 arch/s390/mm/gmap.c list_add_rcu(&gmap->list, &mm->context.gmap_list); context 108 arch/s390/mm/gmap.c if (list_is_singular(&mm->context.gmap_list)) context 112 arch/s390/mm/gmap.c WRITE_ONCE(mm->context.gmap_asce, gmap_asce); context 113 arch/s390/mm/gmap.c spin_unlock(&mm->context.lock); context 254 arch/s390/mm/gmap.c spin_lock(&gmap->mm->context.lock); context 256 arch/s390/mm/gmap.c if (list_empty(&gmap->mm->context.gmap_list)) context 258 arch/s390/mm/gmap.c else if (list_is_singular(&gmap->mm->context.gmap_list)) context 259 arch/s390/mm/gmap.c gmap_asce = list_first_entry(&gmap->mm->context.gmap_list, context 263 arch/s390/mm/gmap.c WRITE_ONCE(gmap->mm->context.gmap_asce, gmap_asce); context 264 arch/s390/mm/gmap.c spin_unlock(&gmap->mm->context.lock); context 518 arch/s390/mm/gmap.c list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { context 594 arch/s390/mm/gmap.c if (pmd_large(*pmd) && !gmap->mm->context.allow_gmap_hpage_1m) context 919 arch/s390/mm/gmap.c if (!gmap->mm->context.allow_gmap_hpage_1m) context 1649 arch/s390/mm/gmap.c BUG_ON(parent->mm->context.allow_gmap_hpage_1m); context 2245 arch/s390/mm/gmap.c list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { context 2310 arch/s390/mm/gmap.c list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { context 2363 arch/s390/mm/gmap.c list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { context 2398 arch/s390/mm/gmap.c list_for_each_entry_rcu(gmap, &mm->context.gmap_list, list) { context 2547 arch/s390/mm/gmap.c mm->context.has_pgste = 1; context 2608 arch/s390/mm/gmap.c mm->context.uses_skeys = 1; context 2612 arch/s390/mm/gmap.c mm->context.uses_skeys = 0; context 363 arch/s390/mm/hugetlbpage.c if (addr + len > current->mm->context.asce_limit && context 102 arch/s390/mm/init.c init_mm.context.asce = (__pa(init_mm.pgd) & PAGE_MASK) | asce_bits; context 103 arch/s390/mm/init.c S390_lowcore.kernel_asce = init_mm.context.asce; context 178 arch/s390/mm/init.c cpumask_set_cpu(0, &init_mm.context.cpu_attach_mask); context 112 arch/s390/mm/mmap.c if (addr + len > current->mm->context.asce_limit && context 177 arch/s390/mm/mmap.c if (addr + len > current->mm->context.asce_limit && context 75 arch/s390/mm/pgalloc.c S390_lowcore.user_asce = mm->context.asce; context 96 arch/s390/mm/pgalloc.c VM_BUG_ON(mm->context.asce_limit < _REGION2_SIZE); context 99 arch/s390/mm/pgalloc.c while (mm->context.asce_limit < end) { context 107 arch/s390/mm/pgalloc.c if (mm->context.asce_limit == _REGION2_SIZE) { context 111 arch/s390/mm/pgalloc.c mm->context.asce_limit = _REGION1_SIZE; context 112 arch/s390/mm/pgalloc.c mm->context.asce = __pa(mm->pgd) | _ASCE_TABLE_LENGTH | context 119 arch/s390/mm/pgalloc.c mm->context.asce_limit = -PAGE_SIZE; context 120 arch/s390/mm/pgalloc.c mm->context.asce = __pa(mm->pgd) | _ASCE_TABLE_LENGTH | context 136 arch/s390/mm/pgalloc.c VM_BUG_ON(mm->context.asce_limit != _REGION2_SIZE); context 146 arch/s390/mm/pgalloc.c mm->context.asce_limit = _REGION3_SIZE; context 147 arch/s390/mm/pgalloc.c mm->context.asce = __pa(mm->pgd) | _ASCE_TABLE_LENGTH | context 201 arch/s390/mm/pgalloc.c spin_lock_bh(&mm->context.lock); context 202 arch/s390/mm/pgalloc.c if (!list_empty(&mm->context.pgtable_list)) { context 203 arch/s390/mm/pgalloc.c page = list_first_entry(&mm->context.pgtable_list, context 217 arch/s390/mm/pgalloc.c spin_unlock_bh(&mm->context.lock); context 241 arch/s390/mm/pgalloc.c spin_lock_bh(&mm->context.lock); context 242 arch/s390/mm/pgalloc.c list_add(&page->lru, &mm->context.pgtable_list); context 243 arch/s390/mm/pgalloc.c spin_unlock_bh(&mm->context.lock); context 257 arch/s390/mm/pgalloc.c spin_lock_bh(&mm->context.lock); context 261 arch/s390/mm/pgalloc.c list_add(&page->lru, &mm->context.pgtable_list); context 264 arch/s390/mm/pgalloc.c spin_unlock_bh(&mm->context.lock); context 291 arch/s390/mm/pgalloc.c spin_lock_bh(&mm->context.lock); context 295 arch/s390/mm/pgalloc.c list_add_tail(&page->lru, &mm->context.pgtable_list); context 298 arch/s390/mm/pgalloc.c spin_unlock_bh(&mm->context.lock); context 36 arch/s390/mm/pgtable.c asce = READ_ONCE(mm->context.gmap_asce); context 40 arch/s390/mm/pgtable.c asce = asce ? : mm->context.asce; context 56 arch/s390/mm/pgtable.c asce = READ_ONCE(mm->context.gmap_asce); context 60 arch/s390/mm/pgtable.c asce = asce ? : mm->context.asce; context 78 arch/s390/mm/pgtable.c atomic_inc(&mm->context.flush_count); context 84 arch/s390/mm/pgtable.c atomic_dec(&mm->context.flush_count); context 97 arch/s390/mm/pgtable.c atomic_inc(&mm->context.flush_count); context 98 arch/s390/mm/pgtable.c if (cpumask_equal(&mm->context.cpu_attach_mask, context 101 arch/s390/mm/pgtable.c mm->context.flush_mm = 1; context 104 arch/s390/mm/pgtable.c atomic_dec(&mm->context.flush_count); context 347 arch/s390/mm/pgtable.c mm->context.asce, IDTE_LOCAL); context 350 arch/s390/mm/pgtable.c if (mm_has_pgste(mm) && mm->context.allow_gmap_hpage_1m) context 359 arch/s390/mm/pgtable.c mm->context.asce, IDTE_GLOBAL); context 360 arch/s390/mm/pgtable.c if (mm_has_pgste(mm) && mm->context.allow_gmap_hpage_1m) context 364 arch/s390/mm/pgtable.c if (mm_has_pgste(mm) && mm->context.allow_gmap_hpage_1m) context 368 arch/s390/mm/pgtable.c if (mm_has_pgste(mm) && mm->context.allow_gmap_hpage_1m) context 381 arch/s390/mm/pgtable.c atomic_inc(&mm->context.flush_count); context 387 arch/s390/mm/pgtable.c atomic_dec(&mm->context.flush_count); context 399 arch/s390/mm/pgtable.c atomic_inc(&mm->context.flush_count); context 400 arch/s390/mm/pgtable.c if (cpumask_equal(&mm->context.cpu_attach_mask, context 403 arch/s390/mm/pgtable.c mm->context.flush_mm = 1; context 409 arch/s390/mm/pgtable.c atomic_dec(&mm->context.flush_count); context 464 arch/s390/mm/pgtable.c mm->context.asce, IDTE_LOCAL); context 474 arch/s390/mm/pgtable.c mm->context.asce, IDTE_GLOBAL); context 493 arch/s390/mm/pgtable.c atomic_inc(&mm->context.flush_count); context 499 arch/s390/mm/pgtable.c atomic_dec(&mm->context.flush_count); context 257 arch/s390/numa/toptree.c struct toptree *toptree_first(struct toptree *context, int level) context 261 arch/s390/numa/toptree.c if (context->level == level) context 262 arch/s390/numa/toptree.c return context; context 264 arch/s390/numa/toptree.c if (!list_empty(&context->children)) { context 265 arch/s390/numa/toptree.c list_for_each_entry(child, &context->children, sibling) { context 304 arch/s390/numa/toptree.c struct toptree *toptree_next(struct toptree *cur, struct toptree *context, context 312 arch/s390/numa/toptree.c if (context->level == level) context 320 arch/s390/numa/toptree.c while (cur_context->level < context->level - 1) { context 343 arch/s390/numa/toptree.c int toptree_count(struct toptree *context, int level) context 348 arch/s390/numa/toptree.c toptree_for_each(cur, context, level) context 30 arch/s390/numa/toptree.h int toptree_count(struct toptree *context, int level); context 32 arch/s390/numa/toptree.h struct toptree *toptree_first(struct toptree *context, int level); context 33 arch/s390/numa/toptree.h struct toptree *toptree_next(struct toptree *cur, struct toptree *context, context 200 arch/sh/include/asm/elf.h #define VDSO_BASE ((unsigned long)current->mm->context.vdso) context 41 arch/sh/include/asm/mmu_context.h #define cpu_context(cpu, mm) ((mm)->context.id[cpu]) context 91 arch/sh/kernel/dumpstack.c struct thread_info *context; context 94 arch/sh/kernel/dumpstack.c context = (struct thread_info *) context 104 arch/sh/kernel/dumpstack.c context, &graph); context 289 arch/sh/kernel/signal_32.c } else if (likely(current->mm->context.vdso)) { context 359 arch/sh/kernel/signal_32.c } else if (likely(current->mm->context.vdso)) { context 80 arch/sh/kernel/vsyscall/vsyscall.c current->mm->context.vdso = (void *)addr; context 89 arch/sh/kernel/vsyscall/vsyscall.c if (vma->vm_mm && vma->vm_start == (long)vma->vm_mm->context.vdso) context 222 arch/sparc/include/asm/elf_64.h (unsigned long)current->mm->context.vdso); \ context 36 arch/sparc/include/asm/mman.h if (!current->mm->context.adi) { context 39 arch/sparc/include/asm/mman.h current->mm->context.adi = true; context 42 arch/sparc/include/asm/mmu_context_64.h &mm->context.tsb_block[MM_TSB_BASE], context 44 arch/sparc/include/asm/mmu_context_64.h (mm->context.tsb_block[MM_TSB_HUGE].tsb ? context 45 arch/sparc/include/asm/mmu_context_64.h &mm->context.tsb_block[MM_TSB_HUGE] : context 50 arch/sparc/include/asm/mmu_context_64.h , __pa(&mm->context.tsb_descr[MM_TSB_BASE]), context 75 arch/sparc/include/asm/mmu_context_64.h : "r" (CTX_HWBITS((__mm)->context)), \ context 90 arch/sparc/include/asm/mmu_context_64.h spin_lock_irqsave(&mm->context.lock, flags); context 91 arch/sparc/include/asm/mmu_context_64.h ctx_valid = CTX_VALID(mm->context); context 125 arch/sparc/include/asm/mmu_context_64.h tsb_context_switch_ctx(mm, CTX_HWBITS(mm->context)); context 133 arch/sparc/include/asm/mmu_context_64.h __flush_tlb_mm(CTX_HWBITS(mm->context), context 136 arch/sparc/include/asm/mmu_context_64.h spin_unlock_irqrestore(&mm->context.lock, flags); context 181 arch/sparc/include/asm/mmu_context_64.h if (current && current->mm && current->mm->context.adi) { context 107 arch/sparc/include/asm/oplib_32.h int context, char *program_counter); context 152 arch/sparc/include/asm/pgtsrmmu.h void srmmu_set_context(int context); context 20 arch/sparc/include/asm/tlb_64.h #define do_flush_tlb_mm(mm) __flush_tlb_mm(CTX_HWBITS(mm->context), SECONDARY_CONTEXT) context 51 arch/sparc/include/asm/tlbflush_64.h void __flush_tlb_page(unsigned long context, unsigned long vaddr); context 58 arch/sparc/include/asm/tlbflush_64.h __flush_tlb_page(CTX_HWBITS(mm->context), vaddr); context 135 arch/sparc/kernel/adi_64.c if (mm->context.tag_store) { context 136 arch/sparc/kernel/adi_64.c tag_desc = mm->context.tag_store; context 137 arch/sparc/kernel/adi_64.c spin_lock_irqsave(&mm->context.tag_lock, flags); context 144 arch/sparc/kernel/adi_64.c spin_unlock_irqrestore(&mm->context.tag_lock, flags); context 174 arch/sparc/kernel/adi_64.c spin_lock_irqsave(&mm->context.tag_lock, flags); context 175 arch/sparc/kernel/adi_64.c if (mm->context.tag_store) { context 176 arch/sparc/kernel/adi_64.c tag_desc = mm->context.tag_store; context 205 arch/sparc/kernel/adi_64.c mm->context.tag_store = kzalloc(size, GFP_NOWAIT|__GFP_NOWARN); context 206 arch/sparc/kernel/adi_64.c if (mm->context.tag_store == NULL) { context 210 arch/sparc/kernel/adi_64.c tag_desc = mm->context.tag_store; context 213 arch/sparc/kernel/adi_64.c open_desc = mm->context.tag_store; context 295 arch/sparc/kernel/adi_64.c spin_unlock_irqrestore(&mm->context.tag_lock, flags); context 304 arch/sparc/kernel/adi_64.c spin_lock_irqsave(&mm->context.tag_lock, flags); context 312 arch/sparc/kernel/adi_64.c if (tag_desc != mm->context.tag_store) { context 317 arch/sparc/kernel/adi_64.c spin_unlock_irqrestore(&mm->context.tag_lock, flags); context 53 arch/sparc/kernel/asm-offsets.c DEFINE(AOFF_mm_context, offsetof(struct mm_struct, context)); context 165 arch/sparc/kernel/entry.h unsigned long context); context 1082 arch/sparc/kernel/smp_64.c u32 ctx = CTX_HWBITS(mm->context); context 1115 arch/sparc/kernel/smp_64.c u32 ctx = CTX_HWBITS(mm->context); context 1136 arch/sparc/kernel/smp_64.c unsigned long context = CTX_HWBITS(mm->context); context 1143 arch/sparc/kernel/smp_64.c context, vaddr, 0, context 1145 arch/sparc/kernel/smp_64.c __flush_tlb_page(context, vaddr); context 2665 arch/sparc/kernel/traps_64.c unsigned long context) context 2693 arch/sparc/kernel/traps_64.c __func__, addr, context); context 227 arch/sparc/kernel/unaligned_32.c (current->mm ? current->mm->context : context 228 arch/sparc/kernel/unaligned_32.c current->active_mm->context)); context 279 arch/sparc/kernel/unaligned_64.c (current->mm ? CTX_HWBITS(current->mm->context) : context 280 arch/sparc/kernel/unaligned_64.c CTX_HWBITS(current->active_mm->context))); context 51 arch/sparc/mm/fault_32.c (tsk->mm ? tsk->mm->context : tsk->active_mm->context)); context 54 arch/sparc/mm/fault_64.c CTX_HWBITS(tsk->mm->context) : context 55 arch/sparc/mm/fault_64.c CTX_HWBITS(tsk->active_mm->context))); context 463 arch/sparc/mm/fault_64.c mm_rss -= (mm->context.thp_pte_count * (HPAGE_SIZE / PAGE_SIZE)); context 466 arch/sparc/mm/fault_64.c mm->context.tsb_block[MM_TSB_BASE].tsb_rss_limit)) context 469 arch/sparc/mm/fault_64.c mm_rss = mm->context.hugetlb_pte_count + mm->context.thp_pte_count; context 472 arch/sparc/mm/fault_64.c mm->context.tsb_block[MM_TSB_HUGE].tsb_rss_limit)) { context 473 arch/sparc/mm/fault_64.c if (mm->context.tsb_block[MM_TSB_HUGE].tsb) context 340 arch/sparc/mm/hugetlbpage.c mm->context.hugetlb_pte_count += nptes; context 378 arch/sparc/mm/hugetlbpage.c mm->context.hugetlb_pte_count -= nptes; context 315 arch/sparc/mm/init_64.c struct tsb *tsb = mm->context.tsb_block[tsb_index].tsb; context 322 arch/sparc/mm/init_64.c (mm->context.tsb_block[tsb_index].tsb_nentries - 1UL)); context 436 arch/sparc/mm/init_64.c spin_lock_irqsave(&mm->context.lock, flags); context 440 arch/sparc/mm/init_64.c if (mm->context.hugetlb_pte_count || mm->context.thp_pte_count) { context 472 arch/sparc/mm/init_64.c spin_unlock_irqrestore(&mm->context.lock, flags); context 817 arch/sparc/mm/init_64.c old_ctx = mm->context.sparc64_ctx_val; context 821 arch/sparc/mm/init_64.c mm->context.sparc64_ctx_val = new_ctx; context 844 arch/sparc/mm/init_64.c if (unlikely(CTX_VALID(mm->context))) context 846 arch/sparc/mm/init_64.c orig_pgsz_bits = (mm->context.sparc64_ctx_val & CTX_PGSZ_MASK); context 856 arch/sparc/mm/init_64.c if (mm->context.sparc64_ctx_val) context 861 arch/sparc/mm/init_64.c mm->context.sparc64_ctx_val = new_ctx | orig_pgsz_bits; context 2961 arch/sparc/mm/init_64.c spin_lock_irqsave(&mm->context.lock, flags); context 2963 arch/sparc/mm/init_64.c if (mm->context.tsb_block[MM_TSB_HUGE].tsb != NULL) context 2967 arch/sparc/mm/init_64.c spin_unlock_irqrestore(&mm->context.lock, flags); context 2998 arch/sparc/mm/init_64.c tp = &mm->context.tsb_block[MM_TSB_HUGE]; context 3013 arch/sparc/mm/init_64.c ctx = mm->context.sparc64_ctx_val; context 3018 arch/sparc/mm/init_64.c if (ctx != mm->context.sparc64_ctx_val) { context 3030 arch/sparc/mm/init_64.c mm->context.sparc64_ctx_val = ctx; context 75 arch/sparc/mm/srmmu.c #define FLUSH_BEGIN(mm) if ((mm)->context != NO_CONTEXT) { context 443 arch/sparc/mm/srmmu.c mm->context = ctxp->ctx_number; context 456 arch/sparc/mm/srmmu.c ctxp->ctx_mm->context = NO_CONTEXT; context 458 arch/sparc/mm/srmmu.c mm->context = ctxp->ctx_number; context 461 arch/sparc/mm/srmmu.c static inline void free_context(int context) context 465 arch/sparc/mm/srmmu.c ctx_old = ctx_list_pool + context; context 498 arch/sparc/mm/srmmu.c if (mm->context == NO_CONTEXT) { context 502 arch/sparc/mm/srmmu.c srmmu_ctxd_set(&srmmu_context_table[mm->context], mm->pgd); context 511 arch/sparc/mm/srmmu.c srmmu_set_context(mm->context); context 610 arch/sparc/mm/srmmu.c if ((ctx1 = vma->vm_mm->context) != -1) { context 913 arch/sparc/mm/srmmu.c init_mm.context = (unsigned long) NO_CONTEXT; context 1018 arch/sparc/mm/srmmu.c mm->context = NO_CONTEXT; context 1026 arch/sparc/mm/srmmu.c if (mm->context != NO_CONTEXT) { context 1028 arch/sparc/mm/srmmu.c srmmu_ctxd_set(&srmmu_context_table[mm->context], srmmu_swapper_pg_dir); context 1031 arch/sparc/mm/srmmu.c free_context(mm->context); context 1033 arch/sparc/mm/srmmu.c mm->context = NO_CONTEXT; context 1675 arch/sparc/mm/srmmu.c if (mm->context != NO_CONTEXT) { context 1687 arch/sparc/mm/srmmu.c if (mm->context != NO_CONTEXT) { context 1707 arch/sparc/mm/srmmu.c if (mm->context != NO_CONTEXT) { context 1724 arch/sparc/mm/srmmu.c if (mm->context != NO_CONTEXT) { context 1739 arch/sparc/mm/srmmu.c if (mm->context != NO_CONTEXT) { context 1754 arch/sparc/mm/srmmu.c if (mm->context != NO_CONTEXT) { context 34 arch/sparc/mm/tlb.c if (CTX_VALID(mm->context)) { context 42 arch/sparc/mm/tlb.c __flush_tlb_pending(CTX_HWBITS(tb->mm->context), context 184 arch/sparc/mm/tlb.c mm->context.hugetlb_pte_count++; context 186 arch/sparc/mm/tlb.c mm->context.thp_pte_count++; context 189 arch/sparc/mm/tlb.c mm->context.hugetlb_pte_count--; context 191 arch/sparc/mm/tlb.c mm->context.thp_pte_count--; context 260 arch/sparc/mm/tlb.c (vma->vm_mm)->context.thp_pte_count--; context 124 arch/sparc/mm/tsb.c spin_lock_irqsave(&mm->context.lock, flags); context 127 arch/sparc/mm/tsb.c base = (unsigned long) mm->context.tsb_block[MM_TSB_BASE].tsb; context 128 arch/sparc/mm/tsb.c nentries = mm->context.tsb_block[MM_TSB_BASE].tsb_nentries; context 140 arch/sparc/mm/tsb.c else if (mm->context.tsb_block[MM_TSB_HUGE].tsb) { context 141 arch/sparc/mm/tsb.c base = (unsigned long) mm->context.tsb_block[MM_TSB_HUGE].tsb; context 142 arch/sparc/mm/tsb.c nentries = mm->context.tsb_block[MM_TSB_HUGE].tsb_nentries; context 149 arch/sparc/mm/tsb.c spin_unlock_irqrestore(&mm->context.lock, flags); context 157 arch/sparc/mm/tsb.c spin_lock_irqsave(&mm->context.lock, flags); context 160 arch/sparc/mm/tsb.c base = (unsigned long) mm->context.tsb_block[MM_TSB_BASE].tsb; context 161 arch/sparc/mm/tsb.c nentries = mm->context.tsb_block[MM_TSB_BASE].tsb_nentries; context 174 arch/sparc/mm/tsb.c else if (mm->context.tsb_block[MM_TSB_HUGE].tsb) { context 175 arch/sparc/mm/tsb.c base = (unsigned long) mm->context.tsb_block[MM_TSB_HUGE].tsb; context 176 arch/sparc/mm/tsb.c nentries = mm->context.tsb_block[MM_TSB_HUGE].tsb_nentries; context 183 arch/sparc/mm/tsb.c spin_unlock_irqrestore(&mm->context.lock, flags); context 199 arch/sparc/mm/tsb.c mm->context.tsb_block[tsb_idx].tsb_nentries = context 216 arch/sparc/mm/tsb.c tsb_paddr = __pa(mm->context.tsb_block[tsb_idx].tsb); context 277 arch/sparc/mm/tsb.c mm->context.tsb_block[tsb_idx].tsb_reg_val = tsb_reg; context 278 arch/sparc/mm/tsb.c mm->context.tsb_block[tsb_idx].tsb_map_vaddr = 0; context 279 arch/sparc/mm/tsb.c mm->context.tsb_block[tsb_idx].tsb_map_pte = 0; context 285 arch/sparc/mm/tsb.c mm->context.tsb_block[tsb_idx].tsb_reg_val = tsb_reg; context 286 arch/sparc/mm/tsb.c mm->context.tsb_block[tsb_idx].tsb_map_vaddr = base; context 287 arch/sparc/mm/tsb.c mm->context.tsb_block[tsb_idx].tsb_map_pte = tte; context 292 arch/sparc/mm/tsb.c struct hv_tsb_descr *hp = &mm->context.tsb_descr[tsb_idx]; context 432 arch/sparc/mm/tsb.c if (mm->context.tsb_block[tsb_index].tsb == NULL && context 443 arch/sparc/mm/tsb.c if (mm->context.tsb_block[tsb_index].tsb != NULL) context 444 arch/sparc/mm/tsb.c mm->context.tsb_block[tsb_index].tsb_rss_limit = ~0UL; context 473 arch/sparc/mm/tsb.c spin_lock_irqsave(&mm->context.lock, flags); context 475 arch/sparc/mm/tsb.c old_tsb = mm->context.tsb_block[tsb_index].tsb; context 477 arch/sparc/mm/tsb.c (mm->context.tsb_block[tsb_index].tsb_reg_val & 0x7UL); context 478 arch/sparc/mm/tsb.c old_size = (mm->context.tsb_block[tsb_index].tsb_nentries * context 487 arch/sparc/mm/tsb.c (rss < mm->context.tsb_block[tsb_index].tsb_rss_limit))) { context 488 arch/sparc/mm/tsb.c spin_unlock_irqrestore(&mm->context.lock, flags); context 494 arch/sparc/mm/tsb.c mm->context.tsb_block[tsb_index].tsb_rss_limit = new_rss_limit; context 514 arch/sparc/mm/tsb.c mm->context.tsb_block[tsb_index].tsb = new_tsb; context 517 arch/sparc/mm/tsb.c spin_unlock_irqrestore(&mm->context.lock, flags); context 545 arch/sparc/mm/tsb.c spin_lock_init(&mm->context.lock); context 547 arch/sparc/mm/tsb.c mm->context.sparc64_ctx_val = 0UL; context 549 arch/sparc/mm/tsb.c mm->context.tag_store = NULL; context 550 arch/sparc/mm/tsb.c spin_lock_init(&mm->context.tag_lock); context 557 arch/sparc/mm/tsb.c saved_hugetlb_pte_count = mm->context.hugetlb_pte_count; context 558 arch/sparc/mm/tsb.c saved_thp_pte_count = mm->context.thp_pte_count; context 559 arch/sparc/mm/tsb.c mm->context.hugetlb_pte_count = 0; context 560 arch/sparc/mm/tsb.c mm->context.thp_pte_count = 0; context 570 arch/sparc/mm/tsb.c mm->context.tsb_block[i].tsb = NULL; context 584 arch/sparc/mm/tsb.c if (unlikely(!mm->context.tsb_block[MM_TSB_BASE].tsb)) context 607 arch/sparc/mm/tsb.c tsb_destroy_one(&mm->context.tsb_block[i]); context 611 arch/sparc/mm/tsb.c if (CTX_VALID(mm->context)) { context 612 arch/sparc/mm/tsb.c unsigned long nr = CTX_NRBITS(mm->context); context 619 arch/sparc/mm/tsb.c if (mm->context.tag_store) { context 624 arch/sparc/mm/tsb.c tag_desc = mm->context.tag_store; context 632 arch/sparc/mm/tsb.c kfree(mm->context.tag_store); context 633 arch/sparc/mm/tsb.c mm->context.tag_store = NULL; context 39 arch/sparc/power/hibernate.c tsb_context_switch_ctx(mm, CTX_HWBITS(mm->context)); context 393 arch/sparc/vdso/vma.c current->mm->context.vdso = (void __user *)text_start; context 423 arch/sparc/vdso/vma.c current->mm->context.vdso = NULL; context 52 arch/um/drivers/mconsole.h enum mc_context context; context 84 arch/um/drivers/mconsole_kern.c if (req.cmd->context == MCONSOLE_INTR) context 54 arch/um/include/asm/mmu_context.h __switch_mm(&new->context.id); context 69 arch/um/include/asm/mmu_context.h __switch_mm(&next->context.id); context 29 arch/um/kernel/exec.c ret = unmap(¤t->mm->context.id, 0, STUB_START, 0, &data); context 30 arch/um/kernel/exec.c ret = ret || unmap(¤t->mm->context.id, STUB_END, context 40 arch/um/kernel/exec.c __switch_mm(¤t->mm->context.id); context 31 arch/um/kernel/reboot.c pid = t->mm->context.id.u.pid; context 54 arch/um/kernel/skas/mmu.c struct mm_context *to_mm = &mm->context; context 64 arch/um/kernel/skas/mmu.c from_mm = ¤t->mm->context; context 103 arch/um/kernel/skas/mmu.c ret = init_stub_pte(mm, STUB_DATA, mm->context.id.stack); context 107 arch/um/kernel/skas/mmu.c mm->context.stub_pages[0] = virt_to_page(__syscall_stub_start); context 108 arch/um/kernel/skas/mmu.c mm->context.stub_pages[1] = virt_to_page(mm->context.id.stack); context 114 arch/um/kernel/skas/mmu.c mm->context.stub_pages); context 142 arch/um/kernel/skas/mmu.c struct mm_context *mmu = &mm->context; context 54 arch/um/kernel/skas/process.c return current->mm->context.id.stack; context 130 arch/um/kernel/time.c os_alarm_process(get_current()->mm->context.id.u.pid); context 74 arch/um/kernel/tlb.c ret = map(&hvc->mm->context.id, op->u.mmap.addr, context 85 arch/um/kernel/tlb.c ret = unmap(&hvc->mm->context.id, context 97 arch/um/kernel/tlb.c ret = protect(&hvc->mm->context.id, context 460 arch/um/kernel/tlb.c mm_id = &mm->context.id; context 44 arch/x86/boot/compressed/kaslr_64.c static void *alloc_pgt_page(void *context) context 46 arch/x86/boot/compressed/kaslr_64.c struct alloc_pgt_data *pages = (struct alloc_pgt_data *)context; context 88 arch/x86/boot/compressed/kaslr_64.c mapping_info.context = &pgt_data; context 367 arch/x86/entry/common.c unsigned long landing_pad = (unsigned long)current->mm->context.vdso + context 45 arch/x86/entry/vdso/vma.c const struct vdso_image *image = vma->vm_mm->context.vdso_image; context 63 arch/x86/entry/vdso/vma.c (unsigned long)current->mm->context.vdso; context 76 arch/x86/entry/vdso/vma.c const struct vdso_image *image = current->mm->context.vdso_image; context 82 arch/x86/entry/vdso/vma.c current->mm->context.vdso = (void __user *)new_vma->vm_start; context 90 arch/x86/entry/vdso/vma.c const struct vdso_image *image = vma->vm_mm->context.vdso_image; context 191 arch/x86/entry/vdso/vma.c current->mm->context.vdso = (void __user *)text_start; context 192 arch/x86/entry/vdso/vma.c current->mm->context.vdso_image = image; context 319 arch/x86/entry/vsyscall/vsyscall_64.c if (!mm || mm->context.ia32_compat) context 2142 arch/x86/events/core.c if (atomic_inc_return(&mm->context.perf_rdpmc_allowed) == 1) context 2152 arch/x86/events/core.c if (atomic_dec_and_test(&mm->context.perf_rdpmc_allowed)) context 2407 arch/x86/events/core.c ldt = READ_ONCE(current->active_mm->context.ldt); context 292 arch/x86/ia32/ia32_signal.c if (current->mm->context.vdso) context 293 arch/x86/ia32/ia32_signal.c restorer = current->mm->context.vdso + context 372 arch/x86/ia32/ia32_signal.c restorer = current->mm->context.vdso + context 332 arch/x86/include/asm/elf.h (unsigned long __force)current->mm->context.vdso); \ context 340 arch/x86/include/asm/elf.h (unsigned long __force)current->mm->context.vdso); \ context 355 arch/x86/include/asm/elf.h #define VDSO_CURRENT_BASE ((unsigned long)current->mm->context.vdso) context 358 arch/x86/include/asm/elf.h ((unsigned long)current->mm->context.vdso + \ context 7 arch/x86/include/asm/init.h void *context; /* context for alloc_pgt_page */ context 60 arch/x86/include/asm/mmu.h .context = { \ context 62 arch/x86/include/asm/mmu.h .lock = __MUTEX_INITIALIZER(mm.context.lock), \ context 34 arch/x86/include/asm/mmu_context.h atomic_read(&mm->context.perf_rdpmc_allowed)) context 83 arch/x86/include/asm/mmu_context.h mm->context.ldt = NULL; context 84 arch/x86/include/asm/mmu_context.h init_rwsem(&mm->context.ldt_usr_sem); context 106 arch/x86/include/asm/mmu_context.h ldt = READ_ONCE(mm->context.ldt); context 172 arch/x86/include/asm/mmu_context.h if (unlikely((unsigned long)prev->context.ldt | context 173 arch/x86/include/asm/mmu_context.h (unsigned long)next->context.ldt)) context 189 arch/x86/include/asm/mmu_context.h mutex_init(&mm->context.lock); context 191 arch/x86/include/asm/mmu_context.h mm->context.ctx_id = atomic64_inc_return(&last_mm_ctx_id); context 192 arch/x86/include/asm/mmu_context.h atomic64_set(&mm->context.tlb_gen, 0); context 197 arch/x86/include/asm/mmu_context.h mm->context.pkey_allocation_map = 0x1; context 199 arch/x86/include/asm/mmu_context.h mm->context.execute_only_pkey = -1; context 244 arch/x86/include/asm/mmu_context.h mm->context.pkey_allocation_map = oldmm->context.pkey_allocation_map; context 245 arch/x86/include/asm/mmu_context.h mm->context.execute_only_pkey = oldmm->context.execute_only_pkey; context 266 arch/x86/include/asm/mmu_context.h !(mm->context.ia32_compat == TIF_IA32); context 73 arch/x86/include/asm/mpx.h return (mm->context.bd_addr != MPX_INVALID_BOUNDS_DIR); context 82 arch/x86/include/asm/mpx.h mm->context.bd_addr = MPX_INVALID_BOUNDS_DIR; context 46 arch/x86/include/asm/pkeys.h #define mm_pkey_allocation_map(mm) (mm->context.pkey_allocation_map) context 71 arch/x86/include/asm/pkeys.h if (pkey == mm->context.execute_only_pkey) context 599 arch/x86/include/asm/tlbflush.h return atomic64_inc_return(&mm->context.tlb_gen); context 189 arch/x86/kernel/cpu/mce/inject.c int context = MCJ_CTX(m->inject_flags); context 195 arch/x86/kernel/cpu/mce/inject.c switch (context) { context 224 arch/x86/kernel/cpu/mce/inject.c int context = MCJ_CTX(m->inject_flags); context 228 arch/x86/kernel/cpu/mce/inject.c if (context == MCJ_CTX_RANDOM) context 40 arch/x86/kernel/cpu/mce/severity.c unsigned char context; context 46 arch/x86/kernel/cpu/mce/severity.c #define KERNEL .context = IN_KERNEL context 47 arch/x86/kernel/cpu/mce/severity.c #define USER .context = IN_USER context 48 arch/x86/kernel/cpu/mce/severity.c #define KERNEL_RECOV .context = IN_KERNEL_RECOV context 221 arch/x86/kernel/cpu/mce/severity.c static int mce_severity_amd_smca(struct mce *m, enum context err_ctx) context 254 arch/x86/kernel/cpu/mce/severity.c enum context ctx = error_context(m); context 307 arch/x86/kernel/cpu/mce/severity.c enum context ctx = error_context(m); context 319 arch/x86/kernel/cpu/mce/severity.c if (s->context && ctx != s->context) context 109 arch/x86/kernel/ldt.c if (mm->context.ldt) { context 159 arch/x86/kernel/ldt.c if (boot_cpu_has(X86_FEATURE_PTI) && !mm->context.ldt) context 184 arch/x86/kernel/ldt.c if (boot_cpu_has(X86_FEATURE_PTI) && !mm->context.ldt) context 331 arch/x86/kernel/ldt.c mutex_lock(&mm->context.lock); context 334 arch/x86/kernel/ldt.c smp_store_release(&mm->context.ldt, ldt); context 339 arch/x86/kernel/ldt.c mutex_unlock(&mm->context.lock); context 367 arch/x86/kernel/ldt.c mutex_lock(&old_mm->context.lock); context 368 arch/x86/kernel/ldt.c if (!old_mm->context.ldt) context 371 arch/x86/kernel/ldt.c new_ldt = alloc_ldt_struct(old_mm->context.ldt->nr_entries); context 377 arch/x86/kernel/ldt.c memcpy(new_ldt->entries, old_mm->context.ldt->entries, context 387 arch/x86/kernel/ldt.c mm->context.ldt = new_ldt; context 390 arch/x86/kernel/ldt.c mutex_unlock(&old_mm->context.lock); context 401 arch/x86/kernel/ldt.c free_ldt_struct(mm->context.ldt); context 402 arch/x86/kernel/ldt.c mm->context.ldt = NULL; context 416 arch/x86/kernel/ldt.c down_read(&mm->context.ldt_usr_sem); context 418 arch/x86/kernel/ldt.c if (!mm->context.ldt) { context 426 arch/x86/kernel/ldt.c entries_size = mm->context.ldt->nr_entries * LDT_ENTRY_SIZE; context 430 arch/x86/kernel/ldt.c if (copy_to_user(ptr, mm->context.ldt->entries, entries_size)) { context 445 arch/x86/kernel/ldt.c up_read(&mm->context.ldt_usr_sem); context 505 arch/x86/kernel/ldt.c if (down_write_killable(&mm->context.ldt_usr_sem)) context 508 arch/x86/kernel/ldt.c old_ldt = mm->context.ldt; context 548 arch/x86/kernel/ldt.c up_write(&mm->context.ldt_usr_sem); context 198 arch/x86/kernel/machine_kexec_64.c .context = image, context 317 arch/x86/kernel/process_64.c mutex_lock(&task->mm->context.lock); context 318 arch/x86/kernel/process_64.c ldt = task->mm->context.ldt; context 323 arch/x86/kernel/process_64.c mutex_unlock(&task->mm->context.lock); context 634 arch/x86/kernel/process_64.c current->mm->context.ia32_compat = 0; context 649 arch/x86/kernel/process_64.c current->mm->context.ia32_compat = TIF_X32; context 670 arch/x86/kernel/process_64.c current->mm->context.ia32_compat = TIF_IA32; context 337 arch/x86/kernel/signal.c if (current->mm->context.vdso) context 338 arch/x86/kernel/signal.c restorer = current->mm->context.vdso + context 402 arch/x86/kernel/signal.c restorer = current->mm->context.vdso + context 36 arch/x86/kernel/step.c mutex_lock(&child->mm->context.lock); context 37 arch/x86/kernel/step.c if (unlikely(!child->mm->context.ldt || context 38 arch/x86/kernel/step.c seg >= child->mm->context.ldt->nr_entries)) context 41 arch/x86/kernel/step.c desc = &child->mm->context.ldt->entries[seg]; context 49 arch/x86/kernel/step.c mutex_unlock(&child->mm->context.lock); context 4341 arch/x86/kvm/mmu.c struct kvm_mmu *context) context 4343 arch/x86/kvm/mmu.c context->page_fault = nonpaging_page_fault; context 4344 arch/x86/kvm/mmu.c context->gva_to_gpa = nonpaging_gva_to_gpa; context 4345 arch/x86/kvm/mmu.c context->sync_page = nonpaging_sync_page; context 4346 arch/x86/kvm/mmu.c context->invlpg = nonpaging_invlpg; context 4347 arch/x86/kvm/mmu.c context->update_pte = nonpaging_update_pte; context 4348 arch/x86/kvm/mmu.c context->root_level = 0; context 4349 arch/x86/kvm/mmu.c context->shadow_root_level = PT32E_ROOT_LEVEL; context 4350 arch/x86/kvm/mmu.c context->direct_map = true; context 4351 arch/x86/kvm/mmu.c context->nx = false; context 4604 arch/x86/kvm/mmu.c struct kvm_mmu *context) context 4606 arch/x86/kvm/mmu.c __reset_rsvds_bits_mask(vcpu, &context->guest_rsvd_check, context 4607 arch/x86/kvm/mmu.c cpuid_maxphyaddr(vcpu), context->root_level, context 4608 arch/x86/kvm/mmu.c context->nx, context 4651 arch/x86/kvm/mmu.c struct kvm_mmu *context, bool execonly) context 4653 arch/x86/kvm/mmu.c __reset_rsvds_bits_mask_ept(&context->guest_rsvd_check, context 4663 arch/x86/kvm/mmu.c reset_shadow_zero_bits_mask(struct kvm_vcpu *vcpu, struct kvm_mmu *context) context 4665 arch/x86/kvm/mmu.c bool uses_nx = context->nx || context 4666 arch/x86/kvm/mmu.c context->mmu_role.base.smep_andnot_wp; context 4674 arch/x86/kvm/mmu.c shadow_zero_check = &context->shadow_zero_check; context 4677 arch/x86/kvm/mmu.c context->shadow_root_level, uses_nx, context 4684 arch/x86/kvm/mmu.c for (i = context->shadow_root_level; --i >= 0;) { context 4704 arch/x86/kvm/mmu.c struct kvm_mmu *context) context 4709 arch/x86/kvm/mmu.c shadow_zero_check = &context->shadow_zero_check; context 4714 arch/x86/kvm/mmu.c context->shadow_root_level, false, context 4725 arch/x86/kvm/mmu.c for (i = context->shadow_root_level; --i >= 0;) { context 4737 arch/x86/kvm/mmu.c struct kvm_mmu *context, bool execonly) context 4739 arch/x86/kvm/mmu.c __reset_rsvds_bits_mask_ept(&context->shadow_zero_check, context 4910 arch/x86/kvm/mmu.c struct kvm_mmu *context, context 4913 arch/x86/kvm/mmu.c context->nx = is_nx(vcpu); context 4914 arch/x86/kvm/mmu.c context->root_level = level; context 4916 arch/x86/kvm/mmu.c reset_rsvds_bits_mask(vcpu, context); context 4917 arch/x86/kvm/mmu.c update_permission_bitmask(vcpu, context, false); context 4918 arch/x86/kvm/mmu.c update_pkru_bitmask(vcpu, context, false); context 4919 arch/x86/kvm/mmu.c update_last_nonleaf_level(vcpu, context); context 4922 arch/x86/kvm/mmu.c context->page_fault = paging64_page_fault; context 4923 arch/x86/kvm/mmu.c context->gva_to_gpa = paging64_gva_to_gpa; context 4924 arch/x86/kvm/mmu.c context->sync_page = paging64_sync_page; context 4925 arch/x86/kvm/mmu.c context->invlpg = paging64_invlpg; context 4926 arch/x86/kvm/mmu.c context->update_pte = paging64_update_pte; context 4927 arch/x86/kvm/mmu.c context->shadow_root_level = level; context 4928 arch/x86/kvm/mmu.c context->direct_map = false; context 4932 arch/x86/kvm/mmu.c struct kvm_mmu *context) context 4937 arch/x86/kvm/mmu.c paging64_init_context_common(vcpu, context, root_level); context 4941 arch/x86/kvm/mmu.c struct kvm_mmu *context) context 4943 arch/x86/kvm/mmu.c context->nx = false; context 4944 arch/x86/kvm/mmu.c context->root_level = PT32_ROOT_LEVEL; context 4946 arch/x86/kvm/mmu.c reset_rsvds_bits_mask(vcpu, context); context 4947 arch/x86/kvm/mmu.c update_permission_bitmask(vcpu, context, false); context 4948 arch/x86/kvm/mmu.c update_pkru_bitmask(vcpu, context, false); context 4949 arch/x86/kvm/mmu.c update_last_nonleaf_level(vcpu, context); context 4951 arch/x86/kvm/mmu.c context->page_fault = paging32_page_fault; context 4952 arch/x86/kvm/mmu.c context->gva_to_gpa = paging32_gva_to_gpa; context 4953 arch/x86/kvm/mmu.c context->sync_page = paging32_sync_page; context 4954 arch/x86/kvm/mmu.c context->invlpg = paging32_invlpg; context 4955 arch/x86/kvm/mmu.c context->update_pte = paging32_update_pte; context 4956 arch/x86/kvm/mmu.c context->shadow_root_level = PT32E_ROOT_LEVEL; context 4957 arch/x86/kvm/mmu.c context->direct_map = false; context 4961 arch/x86/kvm/mmu.c struct kvm_mmu *context) context 4963 arch/x86/kvm/mmu.c paging64_init_context_common(vcpu, context, PT32E_ROOT_LEVEL); context 5018 arch/x86/kvm/mmu.c struct kvm_mmu *context = vcpu->arch.mmu; context 5023 arch/x86/kvm/mmu.c if (new_role.as_u64 == context->mmu_role.as_u64) context 5026 arch/x86/kvm/mmu.c context->mmu_role.as_u64 = new_role.as_u64; context 5027 arch/x86/kvm/mmu.c context->page_fault = tdp_page_fault; context 5028 arch/x86/kvm/mmu.c context->sync_page = nonpaging_sync_page; context 5029 arch/x86/kvm/mmu.c context->invlpg = nonpaging_invlpg; context 5030 arch/x86/kvm/mmu.c context->update_pte = nonpaging_update_pte; context 5031 arch/x86/kvm/mmu.c context->shadow_root_level = kvm_x86_ops->get_tdp_level(vcpu); context 5032 arch/x86/kvm/mmu.c context->direct_map = true; context 5033 arch/x86/kvm/mmu.c context->set_cr3 = kvm_x86_ops->set_tdp_cr3; context 5034 arch/x86/kvm/mmu.c context->get_cr3 = get_cr3; context 5035 arch/x86/kvm/mmu.c context->get_pdptr = kvm_pdptr_read; context 5036 arch/x86/kvm/mmu.c context->inject_page_fault = kvm_inject_page_fault; context 5039 arch/x86/kvm/mmu.c context->nx = false; context 5040 arch/x86/kvm/mmu.c context->gva_to_gpa = nonpaging_gva_to_gpa; context 5041 arch/x86/kvm/mmu.c context->root_level = 0; context 5043 arch/x86/kvm/mmu.c context->nx = is_nx(vcpu); context 5044 arch/x86/kvm/mmu.c context->root_level = is_la57_mode(vcpu) ? context 5046 arch/x86/kvm/mmu.c reset_rsvds_bits_mask(vcpu, context); context 5047 arch/x86/kvm/mmu.c context->gva_to_gpa = paging64_gva_to_gpa; context 5049 arch/x86/kvm/mmu.c context->nx = is_nx(vcpu); context 5050 arch/x86/kvm/mmu.c context->root_level = PT32E_ROOT_LEVEL; context 5051 arch/x86/kvm/mmu.c reset_rsvds_bits_mask(vcpu, context); context 5052 arch/x86/kvm/mmu.c context->gva_to_gpa = paging64_gva_to_gpa; context 5054 arch/x86/kvm/mmu.c context->nx = false; context 5055 arch/x86/kvm/mmu.c context->root_level = PT32_ROOT_LEVEL; context 5056 arch/x86/kvm/mmu.c reset_rsvds_bits_mask(vcpu, context); context 5057 arch/x86/kvm/mmu.c context->gva_to_gpa = paging32_gva_to_gpa; context 5060 arch/x86/kvm/mmu.c update_permission_bitmask(vcpu, context, false); context 5061 arch/x86/kvm/mmu.c update_pkru_bitmask(vcpu, context, false); context 5062 arch/x86/kvm/mmu.c update_last_nonleaf_level(vcpu, context); context 5063 arch/x86/kvm/mmu.c reset_tdp_shadow_zero_bits_mask(vcpu, context); context 5090 arch/x86/kvm/mmu.c struct kvm_mmu *context = vcpu->arch.mmu; context 5095 arch/x86/kvm/mmu.c if (new_role.as_u64 == context->mmu_role.as_u64) context 5099 arch/x86/kvm/mmu.c nonpaging_init_context(vcpu, context); context 5101 arch/x86/kvm/mmu.c paging64_init_context(vcpu, context); context 5103 arch/x86/kvm/mmu.c paging32E_init_context(vcpu, context); context 5105 arch/x86/kvm/mmu.c paging32_init_context(vcpu, context); context 5107 arch/x86/kvm/mmu.c context->mmu_role.as_u64 = new_role.as_u64; context 5108 arch/x86/kvm/mmu.c reset_shadow_zero_bits_mask(vcpu, context); context 5144 arch/x86/kvm/mmu.c struct kvm_mmu *context = vcpu->arch.mmu; context 5152 arch/x86/kvm/mmu.c if (new_role.as_u64 == context->mmu_role.as_u64) context 5155 arch/x86/kvm/mmu.c context->shadow_root_level = PT64_ROOT_4LEVEL; context 5157 arch/x86/kvm/mmu.c context->nx = true; context 5158 arch/x86/kvm/mmu.c context->ept_ad = accessed_dirty; context 5159 arch/x86/kvm/mmu.c context->page_fault = ept_page_fault; context 5160 arch/x86/kvm/mmu.c context->gva_to_gpa = ept_gva_to_gpa; context 5161 arch/x86/kvm/mmu.c context->sync_page = ept_sync_page; context 5162 arch/x86/kvm/mmu.c context->invlpg = ept_invlpg; context 5163 arch/x86/kvm/mmu.c context->update_pte = ept_update_pte; context 5164 arch/x86/kvm/mmu.c context->root_level = PT64_ROOT_4LEVEL; context 5165 arch/x86/kvm/mmu.c context->direct_map = false; context 5166 arch/x86/kvm/mmu.c context->mmu_role.as_u64 = new_role.as_u64; context 5168 arch/x86/kvm/mmu.c update_permission_bitmask(vcpu, context, true); context 5169 arch/x86/kvm/mmu.c update_pkru_bitmask(vcpu, context, true); context 5170 arch/x86/kvm/mmu.c update_last_nonleaf_level(vcpu, context); context 5171 arch/x86/kvm/mmu.c reset_rsvds_bits_mask_ept(vcpu, context, execonly); context 5172 arch/x86/kvm/mmu.c reset_ept_shadow_zero_bits_mask(vcpu, context, execonly); context 5178 arch/x86/kvm/mmu.c struct kvm_mmu *context = vcpu->arch.mmu; context 5181 arch/x86/kvm/mmu.c context->set_cr3 = kvm_x86_ops->set_cr3; context 5182 arch/x86/kvm/mmu.c context->get_cr3 = get_cr3; context 5183 arch/x86/kvm/mmu.c context->get_pdptr = kvm_pdptr_read; context 5184 arch/x86/kvm/mmu.c context->inject_page_fault = kvm_inject_page_fault; context 57 arch/x86/kvm/mmu.h reset_shadow_zero_bits_mask(struct kvm_vcpu *vcpu, struct kvm_mmu *context); context 586 arch/x86/lib/insn-eval.c mutex_lock(¤t->active_mm->context.lock); context 587 arch/x86/lib/insn-eval.c ldt = current->active_mm->context.ldt; context 593 arch/x86/lib/insn-eval.c mutex_unlock(¤t->active_mm->context.lock); context 30 arch/x86/math-emu/fpu_system.h mutex_lock(¤t->mm->context.lock); context 31 arch/x86/math-emu/fpu_system.h if (current->mm->context.ldt && seg < current->mm->context.ldt->nr_entries) context 32 arch/x86/math-emu/fpu_system.h ret = current->mm->context.ldt->entries[seg]; context 33 arch/x86/math-emu/fpu_system.h mutex_unlock(¤t->mm->context.lock); context 51 arch/x86/mm/ident_map.c pmd = (pmd_t *)info->alloc_pgt_page(info->context); context 79 arch/x86/mm/ident_map.c pud = (pud_t *)info->alloc_pgt_page(info->context); context 120 arch/x86/mm/ident_map.c p4d = (p4d_t *)info->alloc_pgt_page(info->context); context 240 arch/x86/mm/mpx.c mm->context.bd_addr = bd_base; context 241 arch/x86/mm/mpx.c if (mm->context.bd_addr == MPX_INVALID_BOUNDS_DIR) context 256 arch/x86/mm/mpx.c mm->context.bd_addr = MPX_INVALID_BOUNDS_DIR; context 825 arch/x86/mm/mpx.c bde_vaddr = mm->context.bd_addr + mpx_get_bd_entry_offset(mm, start); context 18 arch/x86/mm/pkeys.c int execute_only_pkey = mm->context.execute_only_pkey; context 59 arch/x86/mm/pkeys.c mm->context.execute_only_pkey = execute_only_pkey; context 68 arch/x86/mm/pkeys.c if (vma_pkey(vma) != vma->vm_mm->context.execute_only_pkey) context 92 arch/x86/mm/tlb.c next->context.ctx_id) context 335 arch/x86/mm/tlb.c next->context.ctx_id); context 361 arch/x86/mm/tlb.c next_tlb_gen = atomic64_read(&next->context.tlb_gen); context 405 arch/x86/mm/tlb.c next_tlb_gen = atomic64_read(&next->context.tlb_gen); context 415 arch/x86/mm/tlb.c this_cpu_write(cpu_tlbstate.ctxs[new_asid].ctx_id, next->context.ctx_id); context 486 arch/x86/mm/tlb.c u64 tlb_gen = atomic64_read(&init_mm.context.tlb_gen); context 507 arch/x86/mm/tlb.c this_cpu_write(cpu_tlbstate.ctxs[0].ctx_id, mm->context.ctx_id); context 535 arch/x86/mm/tlb.c u64 mm_tlb_gen = atomic64_read(&loaded_mm->context.tlb_gen); context 545 arch/x86/mm/tlb.c loaded_mm->context.ctx_id); context 415 arch/x86/pci/mmconfig-shared.c void *context, void **rv) context 417 arch/x86/pci/mmconfig-shared.c struct resource *mcfg_res = context; context 420 arch/x86/pci/mmconfig-shared.c check_mcfg_resource, context); context 132 arch/x86/platform/olpc/olpc-xo15-sci.c static u32 xo15_sci_gpe_handler(acpi_handle gpe_device, u32 gpe, void *context) context 87 arch/x86/power/hibernate_64.c static void *alloc_pgt_page(void *context) context 58 arch/x86/um/ldt.c uml_ldt_t *ldt = ¤t->mm->context.arch.ldt; context 123 arch/x86/um/ldt.c uml_ldt_t *ldt = ¤t->mm->context.arch.ldt; context 124 arch/x86/um/ldt.c struct mm_id * mm_idp = ¤t->mm->context.id; context 20 arch/x86/um/syscalls_64.c int pid = task->mm->context.id.u.pid; context 82 arch/xtensa/include/asm/mmu_context.h mm->context.asid[cpu] = asid; context 83 arch/xtensa/include/asm/mmu_context.h mm->context.cpu = cpu; context 93 arch/xtensa/include/asm/mmu_context.h unsigned long asid = mm->context.asid[cpu]; context 104 arch/xtensa/include/asm/mmu_context.h set_rasid_register(ASID_INSERT(mm->context.asid[cpu])); context 119 arch/xtensa/include/asm/mmu_context.h mm->context.asid[cpu] = NO_CONTEXT; context 121 arch/xtensa/include/asm/mmu_context.h mm->context.cpu = -1; context 129 arch/xtensa/include/asm/mmu_context.h int migrated = next->context.cpu != cpu; context 133 arch/xtensa/include/asm/mmu_context.h next->context.cpu = cpu; context 114 arch/xtensa/kernel/asm-offsets.c DEFINE(MM_CONTEXT, offsetof (struct mm_struct, context)); context 70 arch/xtensa/mm/tlb.c mm->context.asid[cpu] = NO_CONTEXT; context 74 arch/xtensa/mm/tlb.c mm->context.asid[cpu] = NO_CONTEXT; context 75 arch/xtensa/mm/tlb.c mm->context.cpu = -1; context 95 arch/xtensa/mm/tlb.c if (mm->context.asid[cpu] == NO_CONTEXT) context 99 arch/xtensa/mm/tlb.c (unsigned long)mm->context.asid[cpu], start, end); context 105 arch/xtensa/mm/tlb.c set_rasid_register(ASID_INSERT(mm->context.asid[cpu])); context 133 arch/xtensa/mm/tlb.c if (mm->context.asid[cpu] == NO_CONTEXT) context 139 arch/xtensa/mm/tlb.c set_rasid_register(ASID_INSERT(mm->context.asid[cpu])); context 36 crypto/asymmetric_keys/mscode_parser.c int mscode_note_content_type(void *context, size_t hdrlen, context 68 crypto/asymmetric_keys/mscode_parser.c int mscode_note_digest_algo(void *context, size_t hdrlen, context 72 crypto/asymmetric_keys/mscode_parser.c struct pefile_context *ctx = context; context 116 crypto/asymmetric_keys/mscode_parser.c int mscode_note_digest(void *context, size_t hdrlen, context 120 crypto/asymmetric_keys/mscode_parser.c struct pefile_context *ctx = context; context 204 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_note_OID(void *context, size_t hdrlen, context 208 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 223 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_sig_note_digest_algo(void *context, size_t hdrlen, context 227 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 261 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_sig_note_pkey_algo(void *context, size_t hdrlen, context 265 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 282 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_check_content_type(void *context, size_t hdrlen, context 286 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 299 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_note_signeddata_version(void *context, size_t hdrlen, context 303 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 333 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_note_signerinfo_version(void *context, size_t hdrlen, context 337 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 376 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_extract_cert(void *context, size_t hdrlen, context 380 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 416 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_note_certificate_list(void *context, size_t hdrlen, context 420 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 434 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_note_content(void *context, size_t hdrlen, context 438 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 454 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_note_data(void *context, size_t hdrlen, context 458 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 471 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_sig_note_authenticated_attr(void *context, size_t hdrlen, context 475 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 554 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_sig_note_set_of_authattrs(void *context, size_t hdrlen, context 558 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 582 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_sig_note_serial(void *context, size_t hdrlen, context 586 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 595 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_sig_note_issuer(void *context, size_t hdrlen, context 599 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 608 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_sig_note_skid(void *context, size_t hdrlen, context 612 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 624 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_sig_note_signature(void *context, size_t hdrlen, context 628 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 641 crypto/asymmetric_keys/pkcs7_parser.c int pkcs7_note_signed_info(void *context, size_t hdrlen, context 645 crypto/asymmetric_keys/pkcs7_parser.c struct pkcs7_parse_context *ctx = context; context 33 crypto/asymmetric_keys/pkcs8_parser.c int pkcs8_note_OID(void *context, size_t hdrlen, context 37 crypto/asymmetric_keys/pkcs8_parser.c struct pkcs8_parse_context *ctx = context; context 53 crypto/asymmetric_keys/pkcs8_parser.c int pkcs8_note_version(void *context, size_t hdrlen, context 67 crypto/asymmetric_keys/pkcs8_parser.c int pkcs8_note_algo(void *context, size_t hdrlen, context 71 crypto/asymmetric_keys/pkcs8_parser.c struct pkcs8_parse_context *ctx = context; context 83 crypto/asymmetric_keys/pkcs8_parser.c int pkcs8_note_key(void *context, size_t hdrlen, context 87 crypto/asymmetric_keys/pkcs8_parser.c struct pkcs8_parse_context *ctx = context; context 21 crypto/asymmetric_keys/tpm_parser.c int tpm_note_key(void *context, size_t hdrlen, context 25 crypto/asymmetric_keys/tpm_parser.c struct tpm_parse_context *ctx = context; context 155 crypto/asymmetric_keys/x509_cert_parser.c int x509_note_OID(void *context, size_t hdrlen, context 159 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 175 crypto/asymmetric_keys/x509_cert_parser.c int x509_note_tbs_certificate(void *context, size_t hdrlen, context 179 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 192 crypto/asymmetric_keys/x509_cert_parser.c int x509_note_pkey_algo(void *context, size_t hdrlen, context 196 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 254 crypto/asymmetric_keys/x509_cert_parser.c int x509_note_signature(void *context, size_t hdrlen, context 258 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 286 crypto/asymmetric_keys/x509_cert_parser.c int x509_note_serial(void *context, size_t hdrlen, context 290 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 299 crypto/asymmetric_keys/x509_cert_parser.c int x509_extract_name_segment(void *context, size_t hdrlen, context 303 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 403 crypto/asymmetric_keys/x509_cert_parser.c int x509_note_issuer(void *context, size_t hdrlen, context 407 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 413 crypto/asymmetric_keys/x509_cert_parser.c int x509_note_subject(void *context, size_t hdrlen, context 417 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 426 crypto/asymmetric_keys/x509_cert_parser.c int x509_note_params(void *context, size_t hdrlen, context 430 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 447 crypto/asymmetric_keys/x509_cert_parser.c int x509_extract_key_data(void *context, size_t hdrlen, context 451 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 476 crypto/asymmetric_keys/x509_cert_parser.c int x509_process_extension(void *context, size_t hdrlen, context 480 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 610 crypto/asymmetric_keys/x509_cert_parser.c int x509_note_not_before(void *context, size_t hdrlen, context 614 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 618 crypto/asymmetric_keys/x509_cert_parser.c int x509_note_not_after(void *context, size_t hdrlen, context 622 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 629 crypto/asymmetric_keys/x509_cert_parser.c int x509_akid_note_kid(void *context, size_t hdrlen, context 633 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 652 crypto/asymmetric_keys/x509_cert_parser.c int x509_akid_note_name(void *context, size_t hdrlen, context 656 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 668 crypto/asymmetric_keys/x509_cert_parser.c int x509_akid_note_serial(void *context, size_t hdrlen, context 672 crypto/asymmetric_keys/x509_cert_parser.c struct x509_parse_context *ctx = context; context 149 crypto/ecrdsa.c int ecrdsa_param_curve(void *context, size_t hdrlen, unsigned char tag, context 152 crypto/ecrdsa.c struct ecrdsa_ctx *ctx = context; context 162 crypto/ecrdsa.c int ecrdsa_param_digest(void *context, size_t hdrlen, unsigned char tag, context 165 crypto/ecrdsa.c struct ecrdsa_ctx *ctx = context; context 173 crypto/ecrdsa.c int ecrdsa_parse_pub_key(void *context, size_t hdrlen, unsigned char tag, context 176 crypto/ecrdsa.c struct ecrdsa_ctx *ctx = context; context 16 crypto/rsa_helper.c int rsa_get_n(void *context, size_t hdrlen, unsigned char tag, context 19 crypto/rsa_helper.c struct rsa_key *key = context; context 46 crypto/rsa_helper.c int rsa_get_e(void *context, size_t hdrlen, unsigned char tag, context 49 crypto/rsa_helper.c struct rsa_key *key = context; context 61 crypto/rsa_helper.c int rsa_get_d(void *context, size_t hdrlen, unsigned char tag, context 64 crypto/rsa_helper.c struct rsa_key *key = context; context 76 crypto/rsa_helper.c int rsa_get_p(void *context, size_t hdrlen, unsigned char tag, context 79 crypto/rsa_helper.c struct rsa_key *key = context; context 91 crypto/rsa_helper.c int rsa_get_q(void *context, size_t hdrlen, unsigned char tag, context 94 crypto/rsa_helper.c struct rsa_key *key = context; context 106 crypto/rsa_helper.c int rsa_get_dp(void *context, size_t hdrlen, unsigned char tag, context 109 crypto/rsa_helper.c struct rsa_key *key = context; context 121 crypto/rsa_helper.c int rsa_get_dq(void *context, size_t hdrlen, unsigned char tag, context 124 crypto/rsa_helper.c struct rsa_key *key = context; context 136 crypto/rsa_helper.c int rsa_get_qinv(void *context, size_t hdrlen, unsigned char tag, context 139 crypto/rsa_helper.c struct rsa_key *key = context; context 58 drivers/acpi/acpi_dbg.c void *context; context 393 drivers/acpi/acpi_dbg.c void *context; context 399 drivers/acpi/acpi_dbg.c context = acpi_aml_io.context; context 404 drivers/acpi/acpi_dbg.c function(context); context 425 drivers/acpi/acpi_dbg.c static int acpi_aml_create_thread(acpi_osd_exec_callback function, void *context) context 431 drivers/acpi/acpi_dbg.c acpi_aml_io.context = context; context 73 drivers/acpi/acpi_memhotplug.c acpi_memory_get_resource(struct acpi_resource *resource, void *context) context 75 drivers/acpi/acpi_memhotplug.c struct acpi_memory_device *mem_device = context; context 496 drivers/acpi/acpi_processor.c void *context, context 636 drivers/acpi/acpi_processor.c void *context, context 1720 drivers/acpi/acpi_video.c acpi_video_bus_match(acpi_handle handle, u32 level, void *context, context 1723 drivers/acpi/acpi_video.c struct acpi_device *device = context; context 240 drivers/acpi/acpica/acdebug.h void ACPI_SYSTEM_XFACE acpi_db_execute_thread(void *context); context 238 drivers/acpi/acpica/acdispat.h union acpi_generic_state *state, void *context); context 106 drivers/acpi/acpica/acevents.h void *context); context 127 drivers/acpi/acpica/acevents.h u32 level, void *context, void **return_value); context 133 drivers/acpi/acpica/acevents.h acpi_ev_walk_gpe_list(acpi_gpe_callback gpe_walk_callback, void *context); context 137 drivers/acpi/acpica/acevents.h struct acpi_gpe_block_info *gpe_block, void *context); context 148 drivers/acpi/acpica/acevents.h void *context); context 168 drivers/acpi/acpica/acevents.h acpi_adr_space_setup setup, void *context); context 238 drivers/acpi/acpica/acevents.h u32 ACPI_SYSTEM_XFACE acpi_ev_gpe_xrupt_handler(void *context); context 86 drivers/acpi/acpica/achware.h struct acpi_gpe_block_info *gpe_block, void *context); context 92 drivers/acpi/acpica/achware.h struct acpi_gpe_block_info *gpe_block, void *context); context 109 drivers/acpi/acpica/achware.h void *context); context 414 drivers/acpi/acpica/aclocal.h void *context; /* Context to be passed to handler */ context 421 drivers/acpi/acpica/aclocal.h void *context; /* Context to be passed to handler */ context 515 drivers/acpi/acpica/aclocal.h gpe_block, void *context); context 521 drivers/acpi/acpica/aclocal.h void *context; /* Context to be passed to handler */ context 640 drivers/acpi/acpica/aclocal.h void *context; context 63 drivers/acpi/acpica/acnamesp.h u32 level, void *context, void **return_value); context 83 drivers/acpi/acpica/acnamesp.h void *context, void **return_value); context 191 drivers/acpi/acpica/acnamesp.h u32 level, void *context, void **return_value); context 276 drivers/acpi/acpica/acobject.h void *context; context 285 drivers/acpi/acpica/acobject.h void *context; context 209 drivers/acpi/acpica/acresrc.h u32 offset, u8 resource_index, void **context); context 118 drivers/acpi/acpica/acstruct.h void *context; context 123 drivers/acpi/acpica/acutils.h u8 resource_index, void **context); context 129 drivers/acpi/acpica/acutils.h void *context); context 549 drivers/acpi/acpica/acutils.h acpi_pkg_callback walk_callback, void *context); context 581 drivers/acpi/acpica/acutils.h void **context); context 30 drivers/acpi/acpica/dbcmds.c acpi_db_resource_callback(struct acpi_resource *resource, void *context); context 34 drivers/acpi/acpica/dbcmds.c u32 nesting_level, void *context, void **return_value); context 733 drivers/acpi/acpica/dbcmds.c acpi_db_resource_callback(struct acpi_resource *resource, void *context) context 753 drivers/acpi/acpica/dbcmds.c u32 nesting_level, void *context, void **return_value) context 29 drivers/acpi/acpica/dbdisply.c void *context, void **return_value); context 1057 drivers/acpi/acpica/dbdisply.c void *context, void **return_value) context 28 drivers/acpi/acpica/dbexec.c static void ACPI_SYSTEM_XFACE acpi_db_method_thread(void *context); context 32 drivers/acpi/acpica/dbexec.c u32 nesting_level, void *context, void **return_value); context 34 drivers/acpi/acpica/dbexec.c static void ACPI_SYSTEM_XFACE acpi_db_single_execution_thread(void *context); context 292 drivers/acpi/acpica/dbexec.c u32 nesting_level, void *context, void **return_value) context 486 drivers/acpi/acpica/dbexec.c static void ACPI_SYSTEM_XFACE acpi_db_method_thread(void *context) context 489 drivers/acpi/acpica/dbexec.c struct acpi_db_method_info *info = context; context 590 drivers/acpi/acpica/dbexec.c static void ACPI_SYSTEM_XFACE acpi_db_single_execution_thread(void *context) context 592 drivers/acpi/acpica/dbexec.c struct acpi_db_method_info *info = context; context 1158 drivers/acpi/acpica/dbinput.c void ACPI_SYSTEM_XFACE acpi_db_execute_thread(void *context) context 22 drivers/acpi/acpica/dbmethod.c u32 nesting_level, void *context, void **return_value); context 358 drivers/acpi/acpica/dbmethod.c u32 nesting_level, void *context, void **return_value) context 363 drivers/acpi/acpica/dbmethod.c (struct acpi_db_execute_walk *)context; context 21 drivers/acpi/acpica/dbnames.c void *context, void **return_value); context 26 drivers/acpi/acpica/dbnames.c void *context, void **return_value); context 31 drivers/acpi/acpica/dbnames.c void *context, void **return_value); context 36 drivers/acpi/acpica/dbnames.c void *context, void **return_value); context 40 drivers/acpi/acpica/dbnames.c u32 nesting_level, void *context, void **return_value); context 45 drivers/acpi/acpica/dbnames.c void *context, void **return_value); context 49 drivers/acpi/acpica/dbnames.c u32 nesting_level, void *context, void **return_value); context 293 drivers/acpi/acpica/dbnames.c void *context, void **return_value) context 296 drivers/acpi/acpica/dbnames.c char *requested_name = (char *)context; context 397 drivers/acpi/acpica/dbnames.c void *context, void **return_value) context 401 drivers/acpi/acpica/dbnames.c u32 *count = (u32 *)context; context 489 drivers/acpi/acpica/dbnames.c void *context, void **return_value) context 491 drivers/acpi/acpica/dbnames.c struct acpi_object_info *info = (struct acpi_object_info *)context; context 520 drivers/acpi/acpica/dbnames.c void *context, void **return_value) context 522 drivers/acpi/acpica/dbnames.c struct acpi_walk_info *info = (struct acpi_walk_info *)context; context 645 drivers/acpi/acpica/dbnames.c u32 nesting_level, void *context, void **return_value) context 648 drivers/acpi/acpica/dbnames.c (struct acpi_integrity_info *)context; context 745 drivers/acpi/acpica/dbnames.c void *context, void **return_value) context 748 drivers/acpi/acpica/dbnames.c (union acpi_operand_object *)context; context 813 drivers/acpi/acpica/dbnames.c u32 nesting_level, void *context, void **return_value) context 24 drivers/acpi/acpica/dbstats.c void *context, void **return_value); context 216 drivers/acpi/acpica/dbstats.c void *context, void **return_value) context 23 drivers/acpi/acpica/dbtest.c u32 nesting_level, void *context, void **return_value); context 53 drivers/acpi/acpica/dbtest.c void *context, void **return_value); context 241 drivers/acpi/acpica/dbtest.c u32 nesting_level, void *context, void **return_value) context 973 drivers/acpi/acpica/dbtest.c void *context, void **return_value) context 978 drivers/acpi/acpica/dbtest.c (struct acpi_db_execute_walk *)context; context 23 drivers/acpi/acpica/dsinit.c u32 level, void *context, void **return_value); context 47 drivers/acpi/acpica/dsinit.c u32 level, void *context, void **return_value) context 50 drivers/acpi/acpica/dsinit.c (struct acpi_init_walk_info *)context; context 326 drivers/acpi/acpica/dspkginit.c union acpi_generic_state *state, void *context) context 342 drivers/acpi/acpica/dspkginit.c if (context) { context 346 drivers/acpi/acpica/dspkginit.c element_ptr = (union acpi_operand_object **)context; context 137 drivers/acpi/acpica/evevent.c acpi_gbl_fixed_event_handlers[i].context = NULL; context 265 drivers/acpi/acpica/evevent.c handler) (acpi_gbl_fixed_event_handlers[event].context)); context 19 drivers/acpi/acpica/evglock.c static u32 acpi_ev_global_lock_handler(void *context); context 115 drivers/acpi/acpica/evglock.c static u32 acpi_ev_global_lock_handler(void *context) context 19 drivers/acpi/acpica/evgpe.c static void ACPI_SYSTEM_XFACE acpi_ev_asynch_execute_gpe_method(void *context); context 21 drivers/acpi/acpica/evgpe.c static void ACPI_SYSTEM_XFACE acpi_ev_asynch_enable_gpe(void *context); context 455 drivers/acpi/acpica/evgpe.c static void ACPI_SYSTEM_XFACE acpi_ev_asynch_execute_gpe_method(void *context) context 457 drivers/acpi/acpica/evgpe.c struct acpi_gpe_event_info *gpe_event_info = context; context 552 drivers/acpi/acpica/evgpe.c static void ACPI_SYSTEM_XFACE acpi_ev_asynch_enable_gpe(void *context) context 554 drivers/acpi/acpica/evgpe.c struct acpi_gpe_event_info *gpe_event_info = context; context 718 drivers/acpi/acpica/evgpe.c gpe_handler_info->context); context 808 drivers/acpi/acpica/evgpe.c context); context 406 drivers/acpi/acpica/evgpeblk.c void *context) context 414 drivers/acpi/acpica/evgpeblk.c u8 *is_polling_needed = context; context 285 drivers/acpi/acpica/evgpeinit.c u32 level, void *context, void **return_value) context 290 drivers/acpi/acpica/evgpeinit.c ACPI_CAST_PTR(struct acpi_gpe_walk_info, context); context 31 drivers/acpi/acpica/evgpeutil.c acpi_ev_walk_gpe_list(acpi_gpe_callback gpe_walk_callback, void *context) context 56 drivers/acpi/acpica/evgpeutil.c context); context 90 drivers/acpi/acpica/evgpeutil.c struct acpi_gpe_block_info *gpe_block, void *context) context 92 drivers/acpi/acpica/evgpeutil.c struct acpi_gpe_device_info *info = context; context 272 drivers/acpi/acpica/evgpeutil.c void *context) context 22 drivers/acpi/acpica/evhandler.c u32 level, void *context, void **return_value); context 166 drivers/acpi/acpica/evhandler.c u32 level, void *context, void **return_value) context 176 drivers/acpi/acpica/evhandler.c handler_obj = (union acpi_operand_object *)context; context 331 drivers/acpi/acpica/evhandler.c acpi_adr_space_setup setup, void *context) context 497 drivers/acpi/acpica/evhandler.c handler_obj->address_space.context = context; context 19 drivers/acpi/acpica/evmisc.c static void ACPI_SYSTEM_XFACE acpi_ev_notify_dispatch(void *context); context 161 drivers/acpi/acpica/evmisc.c static void ACPI_SYSTEM_XFACE acpi_ev_notify_dispatch(void *context) context 163 drivers/acpi/acpica/evmisc.c union acpi_generic_state *info = (union acpi_generic_state *)context; context 173 drivers/acpi/acpica/evmisc.c info->notify.global->context); context 182 drivers/acpi/acpica/evmisc.c handler_obj->notify.context); context 28 drivers/acpi/acpica/evregion.c u32 level, void *context, void **return_value); context 113 drivers/acpi/acpica/evregion.c struct acpi_connection_info *context; context 137 drivers/acpi/acpica/evregion.c context = handler_desc->address_space.context; context 168 drivers/acpi/acpica/evregion.c context, ®ion_context); context 221 drivers/acpi/acpica/evregion.c context && field_obj) { context 225 drivers/acpi/acpica/evregion.c context->connection = field_obj->field.resource_buffer; context 226 drivers/acpi/acpica/evregion.c context->length = field_obj->field.resource_length; context 227 drivers/acpi/acpica/evregion.c context->access_length = field_obj->field.access_length; context 230 drivers/acpi/acpica/evregion.c context && field_obj) { context 234 drivers/acpi/acpica/evregion.c context->connection = field_obj->field.resource_buffer; context 235 drivers/acpi/acpica/evregion.c context->length = field_obj->field.resource_length; context 236 drivers/acpi/acpica/evregion.c context->access_length = field_obj->field.access_length; context 260 drivers/acpi/acpica/evregion.c status = handler(function, address, bit_width, value, context, context 394 drivers/acpi/acpica/evregion.c context, region_context); context 713 drivers/acpi/acpica/evregion.c u32 level, void *context, void **return_value) context 720 drivers/acpi/acpica/evregion.c info = ACPI_CAST_PTR(struct acpi_reg_walk_info, context); context 17 drivers/acpi/acpica/evsci.c static u32 ACPI_SYSTEM_XFACE acpi_ev_sci_xrupt_handler(void *context); context 54 drivers/acpi/acpica/evsci.c int_status |= sci_handler->address(sci_handler->context); context 76 drivers/acpi/acpica/evsci.c static u32 ACPI_SYSTEM_XFACE acpi_ev_sci_xrupt_handler(void *context) context 78 drivers/acpi/acpica/evsci.c struct acpi_gpe_xrupt_info *gpe_xrupt_list = context; context 120 drivers/acpi/acpica/evsci.c u32 ACPI_SYSTEM_XFACE acpi_ev_gpe_xrupt_handler(void *context) context 122 drivers/acpi/acpica/evsci.c struct acpi_gpe_xrupt_info *gpe_xrupt_list = context; context 27 drivers/acpi/acpica/evxface.c acpi_gpe_handler address, void *context); context 59 drivers/acpi/acpica/evxface.c acpi_notify_handler handler, void *context) context 98 drivers/acpi/acpica/evxface.c acpi_gbl_global_notify[i].context = context; context 168 drivers/acpi/acpica/evxface.c handler_obj->notify.context = context; context 253 drivers/acpi/acpica/evxface.c acpi_gbl_global_notify[i].context = NULL; context 389 drivers/acpi/acpica/evxface.c acpi_status acpi_install_sci_handler(acpi_sci_handler address, void *context) context 410 drivers/acpi/acpica/evxface.c new_sci_handler->context = context; context 534 drivers/acpi/acpica/evxface.c acpi_install_global_event_handler(acpi_gbl_event_handler handler, void *context) context 559 drivers/acpi/acpica/evxface.c acpi_gbl_global_event_handler_context = context; context 585 drivers/acpi/acpica/evxface.c acpi_event_handler handler, void *context) context 612 drivers/acpi/acpica/evxface.c acpi_gbl_fixed_event_handlers[event].context = context; context 625 drivers/acpi/acpica/evxface.c acpi_gbl_fixed_event_handlers[event].context = NULL; context 677 drivers/acpi/acpica/evxface.c acpi_gbl_fixed_event_handlers[event].context = NULL; context 720 drivers/acpi/acpica/evxface.c acpi_gpe_handler address, void *context) context 769 drivers/acpi/acpica/evxface.c handler->context = context; context 842 drivers/acpi/acpica/evxface.c u32 type, acpi_gpe_handler address, void *context) context 849 drivers/acpi/acpica/evxface.c FALSE, address, context); context 876 drivers/acpi/acpica/evxface.c u32 type, acpi_gpe_handler address, void *context) context 883 drivers/acpi/acpica/evxface.c TRUE, address, context); context 46 drivers/acpi/acpica/evxfregn.c acpi_adr_space_setup setup, void *context) context 76 drivers/acpi/acpica/evxfregn.c context); context 214 drivers/acpi/acpica/exdump.c {ACPI_EXD_POINTER, ACPI_EXD_OFFSET(address_space.context), "Context"} context 222 drivers/acpi/acpica/exdump.c {ACPI_EXD_POINTER, ACPI_EXD_OFFSET(notify.context), "Context"}, context 21 drivers/acpi/acpica/hwgpe.c void *context); context 296 drivers/acpi/acpica/hwgpe.c struct acpi_gpe_block_info *gpe_block, void *context) context 333 drivers/acpi/acpica/hwgpe.c struct acpi_gpe_block_info *gpe_block, void *context) context 372 drivers/acpi/acpica/hwgpe.c void *context) context 420 drivers/acpi/acpica/hwgpe.c void *context) context 470 drivers/acpi/acpica/hwgpe.c void *context) context 472 drivers/acpi/acpica/hwgpe.c struct acpi_gpe_block_status_context *c = context; context 589 drivers/acpi/acpica/hwgpe.c struct acpi_gpe_block_status_context context = { context 603 drivers/acpi/acpica/hwgpe.c context.gpe_skip_register_info = gpe_event_info->register_info; context 604 drivers/acpi/acpica/hwgpe.c context.gpe_skip_mask = acpi_hw_get_gpe_register_bit(gpe_event_info); context 609 drivers/acpi/acpica/hwgpe.c (void)acpi_ev_walk_gpe_list(acpi_hw_get_gpe_block_status, &context); context 610 drivers/acpi/acpica/hwgpe.c return (context.retval != 0); context 24 drivers/acpi/acpica/nsdump.c u32 level, void *context, void **return_value); context 31 drivers/acpi/acpica/nsdump.c u32 level, void *context, void **return_value); context 35 drivers/acpi/acpica/nsdump.c u32 level, void *context, void **return_value); context 141 drivers/acpi/acpica/nsdump.c u32 level, void *context, void **return_value) context 143 drivers/acpi/acpica/nsdump.c struct acpi_walk_info *info = (struct acpi_walk_info *)context; context 678 drivers/acpi/acpica/nsdump.c u32 level, void *context, void **return_value) context 680 drivers/acpi/acpica/nsdump.c u32 max_level = *((u32 *)context); context 715 drivers/acpi/acpica/nsdump.c u32 level, void *context, void **return_value) context 717 drivers/acpi/acpica/nsdump.c u32 *max_level = (u32 *)context; context 36 drivers/acpi/acpica/nsdumpdv.c u32 level, void *context, void **return_value) context 46 drivers/acpi/acpica/nsdumpdv.c acpi_ns_dump_one_object(obj_handle, level, context, return_value); context 23 drivers/acpi/acpica/nsinit.c u32 level, void *context, void **return_value); context 27 drivers/acpi/acpica/nsinit.c u32 nesting_level, void *context, void **return_value); context 31 drivers/acpi/acpica/nsinit.c u32 nesting_level, void *context, void **return_value); context 261 drivers/acpi/acpica/nsinit.c u32 level, void *context, void **return_value) context 317 drivers/acpi/acpica/nsinit.c u32 level, void *context, void **return_value) context 322 drivers/acpi/acpica/nsinit.c (struct acpi_init_walk_info *)context; context 451 drivers/acpi/acpica/nsinit.c u32 nesting_level, void *context, void **return_value) context 454 drivers/acpi/acpica/nsinit.c ACPI_CAST_PTR(struct acpi_device_walk_info, context); context 516 drivers/acpi/acpica/nsinit.c u32 nesting_level, void *context, void **return_value) context 519 drivers/acpi/acpica/nsinit.c ACPI_CAST_PTR(struct acpi_device_walk_info, context); context 156 drivers/acpi/acpica/nswalk.c void *context, void **return_value) context 231 drivers/acpi/acpica/nswalk.c level, context, context 238 drivers/acpi/acpica/nswalk.c level, context, context 559 drivers/acpi/acpica/nsxfeval.c void *context, void **return_value) context 609 drivers/acpi/acpica/nsxfeval.c context, return_value); context 637 drivers/acpi/acpica/nsxfeval.c void *context, void **return_value) context 639 drivers/acpi/acpica/nsxfeval.c struct acpi_get_devices_info *info = context; context 741 drivers/acpi/acpica/nsxfeval.c info->context, return_value); context 773 drivers/acpi/acpica/nsxfeval.c void *context, void **return_value) context 791 drivers/acpi/acpica/nsxfeval.c info.context = context; context 32 drivers/acpi/acpica/rslist.c u32 offset, u8 resource_index, void **context) context 35 drivers/acpi/acpica/rslist.c ACPI_CAST_INDIRECT_PTR(struct acpi_resource, context); context 35 drivers/acpi/acpica/rsxface.c acpi_rs_match_vendor_resource(struct acpi_resource *resource, void *context); context 442 drivers/acpi/acpica/rsxface.c acpi_rs_match_vendor_resource(struct acpi_resource *resource, void *context) context 444 drivers/acpi/acpica/rsxface.c struct acpi_vendor_walk_info *info = context; context 508 drivers/acpi/acpica/rsxface.c void *context) context 547 drivers/acpi/acpica/rsxface.c status = user_function(resource, context); context 596 drivers/acpi/acpica/rsxface.c acpi_walk_resource_callback user_function, void *context) context 623 drivers/acpi/acpica/rsxface.c status = acpi_walk_resource_buffer(&buffer, user_function, context); context 457 drivers/acpi/acpica/tbxface.c acpi_install_table_handler(acpi_table_handler handler, void *context) context 482 drivers/acpi/acpica/tbxface.c acpi_gbl_table_handler_context = context; context 28 drivers/acpi/acpica/utcopy.c void *context); context 50 drivers/acpi/acpica/utcopy.c void *context); context 215 drivers/acpi/acpica/utcopy.c void *context) context 218 drivers/acpi/acpica/utcopy.c struct acpi_pkg_info *info = (struct acpi_pkg_info *)context; context 794 drivers/acpi/acpica/utcopy.c void *context) context 246 drivers/acpi/acpica/utdelete.c context, context 199 drivers/acpi/acpica/utmisc.c acpi_pkg_callback walk_callback, void *context) context 238 drivers/acpi/acpica/utmisc.c this_source_obj, state, context); context 278 drivers/acpi/acpica/utmisc.c this_source_obj, state, context); context 30 drivers/acpi/acpica/utobject.c union acpi_generic_state *state, void *context); context 557 drivers/acpi/acpica/utobject.c union acpi_generic_state *state, void *context) context 560 drivers/acpi/acpica/utobject.c struct acpi_pkg_info *info = (struct acpi_pkg_info *)context; context 140 drivers/acpi/acpica/utresrc.c acpi_walk_aml_callback user_function, void **context) context 186 drivers/acpi/acpica/utresrc.c context); context 214 drivers/acpi/acpica/utresrc.c *context = aml; context 235 drivers/acpi/acpica/utresrc.c user_function(end_tag, 2, offset, resource_index, context); context 776 drivers/acpi/apei/apei-base.c struct acpi_osc_context context = { context 788 drivers/acpi/apei/apei-base.c || ACPI_FAILURE(acpi_run_osc(handle, &context))) context 791 drivers/acpi/apei/apei-base.c kfree(context.ret.pointer); context 143 drivers/acpi/arm64/iort.c (struct acpi_iort_node *node, void *context); context 226 drivers/acpi/arm64/iort.c void *context) context 248 drivers/acpi/arm64/iort.c ACPI_SUCCESS(callback(iort_node, context))) context 259 drivers/acpi/arm64/iort.c void *context) context 261 drivers/acpi/arm64/iort.c struct device *dev = context; context 131 drivers/acpi/bus.c void *context) context 176 drivers/acpi/bus.c struct acpi_osc_context *context, char *error) context 180 drivers/acpi/bus.c acpi_handle_debug(handle, "(%s): %s\n", context->uuid_str, error); context 183 drivers/acpi/bus.c for (i = 0; i < context->cap.length; i += sizeof(u32)) context 184 drivers/acpi/bus.c pr_debug(" %x", *((u32 *)(context->cap.pointer + i))); context 189 drivers/acpi/bus.c acpi_status acpi_run_osc(acpi_handle handle, struct acpi_osc_context *context) context 199 drivers/acpi/bus.c if (!context) context 201 drivers/acpi/bus.c if (guid_parse(context->uuid_str, &guid)) context 203 drivers/acpi/bus.c context->ret.length = ACPI_ALLOCATE_BUFFER; context 204 drivers/acpi/bus.c context->ret.pointer = NULL; context 213 drivers/acpi/bus.c in_params[1].integer.value = context->rev; context 215 drivers/acpi/bus.c in_params[2].integer.value = context->cap.length/sizeof(u32); context 217 drivers/acpi/bus.c in_params[3].buffer.length = context->cap.length; context 218 drivers/acpi/bus.c in_params[3].buffer.pointer = context->cap.pointer; context 229 drivers/acpi/bus.c || out_obj->buffer.length != context->cap.length) { context 230 drivers/acpi/bus.c acpi_print_osc_error(handle, context, context 239 drivers/acpi/bus.c acpi_print_osc_error(handle, context, context 242 drivers/acpi/bus.c acpi_print_osc_error(handle, context, context 245 drivers/acpi/bus.c acpi_print_osc_error(handle, context, context 248 drivers/acpi/bus.c if (((u32 *)context->cap.pointer)[OSC_QUERY_DWORD] context 258 drivers/acpi/bus.c context->ret.length = out_obj->buffer.length; context 259 drivers/acpi/bus.c context->ret.pointer = kmemdup(out_obj->buffer.pointer, context 260 drivers/acpi/bus.c context->ret.length, GFP_KERNEL); context 261 drivers/acpi/bus.c if (!context->ret.pointer) { context 270 drivers/acpi/bus.c context->ret.pointer = NULL; context 288 drivers/acpi/bus.c struct acpi_osc_context context = { context 320 drivers/acpi/bus.c if (ACPI_SUCCESS(acpi_run_osc(handle, &context))) { context 321 drivers/acpi/bus.c u32 *capbuf_ret = context.ret.pointer; context 322 drivers/acpi/bus.c if (context.ret.length > OSC_SUPPORT_DWORD) { context 328 drivers/acpi/bus.c kfree(context.ret.pointer); context 1113 drivers/acpi/bus.c static acpi_status acpi_bus_table_handler(u32 event, void *table, void *context) context 1115 drivers/acpi/bus.c acpi_scan_table_handler(event, table, context); context 1117 drivers/acpi/bus.c return acpi_sysfs_table_handler(event, table, context); context 461 drivers/acpi/device_pm.c if (adev->wakeup.context.func) { context 463 drivers/acpi/device_pm.c adev->wakeup.context.func, context 464 drivers/acpi/device_pm.c dev_name(adev->wakeup.context.dev)); context 465 drivers/acpi/device_pm.c adev->wakeup.context.func(&adev->wakeup.context); context 486 drivers/acpi/device_pm.c void (*func)(struct acpi_device_wakeup_context *context)) context 506 drivers/acpi/device_pm.c adev->wakeup.context.dev = dev; context 507 drivers/acpi/device_pm.c adev->wakeup.context.func = func; context 536 drivers/acpi/device_pm.c adev->wakeup.context.func = NULL; context 537 drivers/acpi/device_pm.c adev->wakeup.context.dev = NULL; context 739 drivers/acpi/device_pm.c static void acpi_pm_notify_work_func(struct acpi_device_wakeup_context *context) context 741 drivers/acpi/device_pm.c struct device *dev = context->dev; context 1328 drivers/acpi/ec.c ec_parse_io_ports(struct acpi_resource *resource, void *context); context 1358 drivers/acpi/ec.c void *context, void **return_value) context 1362 drivers/acpi/ec.c struct acpi_ec *ec = context; context 1374 drivers/acpi/ec.c ec_parse_device(acpi_handle handle, u32 Level, void *context, void **retval) context 1378 drivers/acpi/ec.c struct acpi_ec *ec = context; context 1641 drivers/acpi/ec.c ec_parse_io_ports(struct acpi_resource *resource, void *context) context 1643 drivers/acpi/ec.c struct acpi_ec *ec = context; context 69 drivers/acpi/evged.c void *context) context 75 drivers/acpi/evged.c struct acpi_ged_device *geddev = context; context 88 drivers/acpi/internal.h acpi_status acpi_sysfs_table_handler(u32 event, void *table, void *context); context 89 drivers/acpi/internal.h void acpi_scan_table_handler(u32 event, void *table, void *context); context 97 drivers/acpi/ioapic.c void *context, void **rv) context 127 drivers/acpi/ioapic.c ioapic->root_handle = (acpi_handle)context; context 179 drivers/acpi/irq.c void *context) context 181 drivers/acpi/irq.c struct acpi_irq_parse_one_ctx *ctx = context; context 45 drivers/acpi/osl.c void *context; context 557 drivers/acpi/osl.c void *context) context 580 drivers/acpi/osl.c acpi_irq_context = context; context 843 drivers/acpi/osl.c dpc->function(dpc->context); context 882 drivers/acpi/osl.c int acpi_debugger_create_thread(acpi_osd_exec_callback function, void *context) context 903 drivers/acpi/osl.c ret = func(function, context); context 1057 drivers/acpi/osl.c acpi_osd_exec_callback function, void *context) context 1065 drivers/acpi/osl.c function, context)); context 1068 drivers/acpi/osl.c ret = acpi_debugger_create_thread(function, context); context 1090 drivers/acpi/osl.c dpc->context = context; context 87 drivers/acpi/pci_link.c void *context) context 89 drivers/acpi/pci_link.c struct acpi_pci_link *link = context; context 175 drivers/acpi/pci_link.c void *context) context 177 drivers/acpi/pci_link.c int *irq = context; context 179 drivers/acpi/pci_root.c struct acpi_osc_context context = { context 187 drivers/acpi/pci_root.c status = acpi_run_osc(handle, &context); context 189 drivers/acpi/pci_root.c *retval = *((u32 *)(context.ret.pointer + 8)); context 190 drivers/acpi/pci_root.c kfree(context.ret.pointer); context 87 drivers/acpi/pci_slot.c register_slot(acpi_handle handle, u32 lvl, void *context, void **rv) context 94 drivers/acpi/pci_slot.c struct pci_bus *pci_bus = context; context 160 drivers/acpi/processor_pdc.c early_init_pdc(acpi_handle handle, u32 lvl, void *context, void **rv) context 531 drivers/acpi/resource.c void *context) context 533 drivers/acpi/resource.c struct res_proc_context *c = context; context 745 drivers/acpi/resource.c void *context, void **ret) context 747 drivers/acpi/resource.c struct resource *res = context; context 596 drivers/acpi/sbs.c static void acpi_sbs_callback(void *context) context 599 drivers/acpi/sbs.c struct acpi_sbs *sbs = context; context 28 drivers/acpi/sbshc.c void *context; context 179 drivers/acpi/sbshc.c smbus_alarm_callback callback, void *context) context 183 drivers/acpi/sbshc.c hc->context = context; context 194 drivers/acpi/sbshc.c hc->context = NULL; context 202 drivers/acpi/sbshc.c static inline void acpi_smbus_callback(void *context) context 204 drivers/acpi/sbshc.c struct acpi_smb_hc *hc = context; context 206 drivers/acpi/sbshc.c hc->callback(hc->context); context 209 drivers/acpi/sbshc.c static int smbus_alarm(void *context) context 211 drivers/acpi/sbshc.c struct acpi_smb_hc *hc = context; context 24 drivers/acpi/sbshc.h typedef void (*smbus_alarm_callback)(void *context); context 31 drivers/acpi/sbshc.h smbus_alarm_callback callback, void *context); context 554 drivers/acpi/scan.c static void acpi_scan_drop_device(acpi_handle handle, void *context) context 557 drivers/acpi/scan.c struct acpi_device *adev = context; context 1117 drivers/acpi/scan.c acpi_backlight_cap_match(acpi_handle handle, u32 level, void *context, context 1120 drivers/acpi/scan.c long *cap = context; context 1661 drivers/acpi/scan.c void *context) context 1663 drivers/acpi/scan.c struct resource *res = context; context 2315 drivers/acpi/scan.c void acpi_scan_table_handler(u32 event, void *table, void *context) context 403 drivers/acpi/sysfs.c acpi_status acpi_sysfs_table_handler(u32 event, void *table, void *context) context 638 drivers/acpi/sysfs.c u32 event_number, void *context) context 63 drivers/acpi/video_detect.c find_video(acpi_handle handle, u32 lvl, void *context, void **rv) context 65 drivers/acpi/video_detect.c long *cap = context; context 17 drivers/acpi/wakeup.c bool (*wakeup)(void *context); context 18 drivers/acpi/wakeup.c void *context; context 119 drivers/acpi/wakeup.c int acpi_register_wakeup_handler(int wake_irq, bool (*wakeup)(void *context), context 120 drivers/acpi/wakeup.c void *context) context 136 drivers/acpi/wakeup.c handler->context = context; context 151 drivers/acpi/wakeup.c void acpi_unregister_wakeup_handler(bool (*wakeup)(void *context), context 152 drivers/acpi/wakeup.c void *context) context 158 drivers/acpi/wakeup.c if (handler->wakeup == wakeup && handler->context == context) { context 174 drivers/acpi/wakeup.c if (handler->wakeup(handler->context)) context 486 drivers/android/binder.c struct binder_context *context; context 1200 drivers/android/binder.c node == node->proc->context->binder_context_mgr_node && context 1440 drivers/android/binder.c struct binder_context *context = proc->context; context 1467 drivers/android/binder.c new_ref->data.desc = (node == context->binder_context_mgr_node) ? 0 : 1; context 2864 drivers/android/binder.c struct binder_context *context = proc->context; context 2877 drivers/android/binder.c strscpy(e->context_name, proc->context->name, BINDERFS_MAX_NAME); context 2960 drivers/android/binder.c mutex_lock(&context->context_mgr_node_lock); context 2961 drivers/android/binder.c target_node = context->binder_context_mgr_node; context 2968 drivers/android/binder.c mutex_unlock(&context->context_mgr_node_lock); context 3603 drivers/android/binder.c struct binder_context *context = proc->context; context 3638 drivers/android/binder.c mutex_lock(&context->context_mgr_node_lock); context 3639 drivers/android/binder.c ctx_mgr_node = context->binder_context_mgr_node; context 3644 drivers/android/binder.c mutex_unlock(&context->context_mgr_node_lock); context 4895 drivers/android/binder.c struct binder_context *context = proc->context; context 4899 drivers/android/binder.c mutex_lock(&context->context_mgr_node_lock); context 4900 drivers/android/binder.c if (context->binder_context_mgr_node) { context 4908 drivers/android/binder.c if (uid_valid(context->binder_context_mgr_uid)) { context 4909 drivers/android/binder.c if (!uid_eq(context->binder_context_mgr_uid, curr_euid)) { context 4913 drivers/android/binder.c context->binder_context_mgr_uid)); context 4918 drivers/android/binder.c context->binder_context_mgr_uid = curr_euid; context 4930 drivers/android/binder.c context->binder_context_mgr_node = new_node; context 4934 drivers/android/binder.c mutex_unlock(&context->context_mgr_node_lock); context 4942 drivers/android/binder.c struct binder_context *context = proc->context; context 4953 drivers/android/binder.c mutex_lock(&context->context_mgr_node_lock); context 4954 drivers/android/binder.c if (!context->binder_context_mgr_node || context 4955 drivers/android/binder.c context->binder_context_mgr_node->proc != proc) { context 4956 drivers/android/binder.c mutex_unlock(&context->context_mgr_node_lock); context 4959 drivers/android/binder.c mutex_unlock(&context->context_mgr_node_lock); context 5234 drivers/android/binder.c proc->context = &binder_dev->context; context 5410 drivers/android/binder.c struct binder_context *context = proc->context; context 5419 drivers/android/binder.c mutex_lock(&context->context_mgr_node_lock); context 5420 drivers/android/binder.c if (context->binder_context_mgr_node && context 5421 drivers/android/binder.c context->binder_context_mgr_node->proc == proc) { context 5425 drivers/android/binder.c context->binder_context_mgr_node = NULL; context 5427 drivers/android/binder.c mutex_unlock(&context->context_mgr_node_lock); context 5428 drivers/android/binder.c device = container_of(proc->context, struct binder_device, context); context 5430 drivers/android/binder.c kfree(context->name); context 5433 drivers/android/binder.c proc->context = NULL; context 5716 drivers/android/binder.c seq_printf(m, "context %s\n", proc->context->name); context 5880 drivers/android/binder.c seq_printf(m, "context %s\n", proc->context->name); context 6091 drivers/android/binder.c binder_device->context.binder_context_mgr_uid = INVALID_UID; context 6092 drivers/android/binder.c binder_device->context.name = name; context 6093 drivers/android/binder.c mutex_init(&binder_device->context.context_mgr_node_lock); context 35 drivers/android/binder_internal.h struct binder_context context; context 159 drivers/android/binderfs.c device->context.binder_context_mgr_uid = INVALID_UID; context 160 drivers/android/binderfs.c device->context.name = name; context 163 drivers/android/binderfs.c mutex_init(&device->context.context_mgr_node_lock); context 262 drivers/android/binderfs.c kfree(device->context.name); context 50 drivers/ata/libata-acpi.c #define ata_hotplug_data(context) (container_of((context), struct ata_acpi_hotplug_context, hp)->data) context 183 drivers/ata/libata-acpi.c struct ata_acpi_hotplug_context *context; context 197 drivers/ata/libata-acpi.c context = kzalloc(sizeof(*context), GFP_KERNEL); context 198 drivers/ata/libata-acpi.c if (!context) context 201 drivers/ata/libata-acpi.c context->data.ap = ap; context 202 drivers/ata/libata-acpi.c acpi_initialize_hp_context(adev, &context->hp, ata_acpi_ap_notify_dock, context 212 drivers/ata/libata-acpi.c struct ata_acpi_hotplug_context *context; context 239 drivers/ata/libata-acpi.c context = kzalloc(sizeof(*context), GFP_KERNEL); context 240 drivers/ata/libata-acpi.c if (!context) context 243 drivers/ata/libata-acpi.c context->data.dev = dev; context 244 drivers/ata/libata-acpi.c acpi_initialize_hp_context(adev, &context->hp, ata_acpi_dev_notify_dock, context 240 drivers/ata/libata-zpodd.c static void zpodd_wake_dev(acpi_handle handle, u32 event, void *context) context 242 drivers/ata/libata-zpodd.c struct ata_device *ata_dev = context; context 1013 drivers/ata/sata_sil24.c u32 context, cerr; context 1033 drivers/ata/sata_sil24.c context = readl(port + PORT_CONTEXT); context 1034 drivers/ata/sata_sil24.c pmp = (context >> 5) & 0xf; context 961 drivers/base/firmware_loader/main.c void *context; context 962 drivers/base/firmware_loader/main.c void (*cont)(const struct firmware *fw, void *context); context 975 drivers/base/firmware_loader/main.c fw_work->cont(fw, fw_work->context); context 1009 drivers/base/firmware_loader/main.c const char *name, struct device *device, gfp_t gfp, void *context, context 1010 drivers/base/firmware_loader/main.c void (*cont)(const struct firmware *fw, void *context)) context 1025 drivers/base/firmware_loader/main.c fw_work->context = context; context 103 drivers/base/regmap/internal.h int (*reg_read)(void *context, unsigned int reg, unsigned int *val); context 104 drivers/base/regmap/internal.h int (*reg_write)(void *context, unsigned int reg, unsigned int val); context 105 drivers/base/regmap/internal.h int (*reg_update_bits)(void *context, unsigned int reg, context 44 drivers/base/regmap/regmap-ac97.c static int regmap_ac97_reg_read(void *context, unsigned int reg, context 47 drivers/base/regmap/regmap-ac97.c struct snd_ac97 *ac97 = context; context 54 drivers/base/regmap/regmap-ac97.c static int regmap_ac97_reg_write(void *context, unsigned int reg, context 57 drivers/base/regmap/regmap-ac97.c struct snd_ac97 *ac97 = context; context 15 drivers/base/regmap/regmap-i2c.c static int regmap_smbus_byte_reg_read(void *context, unsigned int reg, context 18 drivers/base/regmap/regmap-i2c.c struct device *dev = context; context 34 drivers/base/regmap/regmap-i2c.c static int regmap_smbus_byte_reg_write(void *context, unsigned int reg, context 37 drivers/base/regmap/regmap-i2c.c struct device *dev = context; context 51 drivers/base/regmap/regmap-i2c.c static int regmap_smbus_word_reg_read(void *context, unsigned int reg, context 54 drivers/base/regmap/regmap-i2c.c struct device *dev = context; context 70 drivers/base/regmap/regmap-i2c.c static int regmap_smbus_word_reg_write(void *context, unsigned int reg, context 73 drivers/base/regmap/regmap-i2c.c struct device *dev = context; context 87 drivers/base/regmap/regmap-i2c.c static int regmap_smbus_word_read_swapped(void *context, unsigned int reg, context 90 drivers/base/regmap/regmap-i2c.c struct device *dev = context; context 106 drivers/base/regmap/regmap-i2c.c static int regmap_smbus_word_write_swapped(void *context, unsigned int reg, context 109 drivers/base/regmap/regmap-i2c.c struct device *dev = context; context 123 drivers/base/regmap/regmap-i2c.c static int regmap_i2c_write(void *context, const void *data, size_t count) context 125 drivers/base/regmap/regmap-i2c.c struct device *dev = context; context 138 drivers/base/regmap/regmap-i2c.c static int regmap_i2c_gather_write(void *context, context 142 drivers/base/regmap/regmap-i2c.c struct device *dev = context; context 172 drivers/base/regmap/regmap-i2c.c static int regmap_i2c_read(void *context, context 176 drivers/base/regmap/regmap-i2c.c struct device *dev = context; context 208 drivers/base/regmap/regmap-i2c.c static int regmap_i2c_smbus_i2c_write(void *context, const void *data, context 211 drivers/base/regmap/regmap-i2c.c struct device *dev = context; context 222 drivers/base/regmap/regmap-i2c.c static int regmap_i2c_smbus_i2c_read(void *context, const void *reg, context 226 drivers/base/regmap/regmap-i2c.c struct device *dev = context; context 9 drivers/base/regmap/regmap-i3c.c static int regmap_i3c_write(void *context, const void *data, size_t count) context 11 drivers/base/regmap/regmap-i3c.c struct device *dev = context; context 24 drivers/base/regmap/regmap-i3c.c static int regmap_i3c_read(void *context, context 28 drivers/base/regmap/regmap-i3c.c struct device *dev = context; context 115 drivers/base/regmap/regmap-mmio.c static int regmap_mmio_write(void *context, unsigned int reg, unsigned int val) context 117 drivers/base/regmap/regmap-mmio.c struct regmap_mmio_context *ctx = context; context 172 drivers/base/regmap/regmap-mmio.c static int regmap_mmio_read(void *context, unsigned int reg, unsigned int *val) context 174 drivers/base/regmap/regmap-mmio.c struct regmap_mmio_context *ctx = context; context 191 drivers/base/regmap/regmap-mmio.c static void regmap_mmio_free_context(void *context) context 193 drivers/base/regmap/regmap-mmio.c struct regmap_mmio_context *ctx = context; context 200 drivers/base/regmap/regmap-mmio.c kfree(context); context 40 drivers/base/regmap/regmap-sccb.c static int regmap_sccb_read(void *context, unsigned int reg, unsigned int *val) context 42 drivers/base/regmap/regmap-sccb.c struct device *dev = context; context 75 drivers/base/regmap/regmap-sccb.c static int regmap_sccb_write(void *context, unsigned int reg, unsigned int val) context 77 drivers/base/regmap/regmap-sccb.c struct device *dev = context; context 10 drivers/base/regmap/regmap-sdw.c static int regmap_sdw_write(void *context, unsigned int reg, unsigned int val) context 12 drivers/base/regmap/regmap-sdw.c struct device *dev = context; context 18 drivers/base/regmap/regmap-sdw.c static int regmap_sdw_read(void *context, unsigned int reg, unsigned int *val) context 20 drivers/base/regmap/regmap-sdw.c struct device *dev = context; context 10 drivers/base/regmap/regmap-slimbus.c static int regmap_slimbus_write(void *context, const void *data, size_t count) context 12 drivers/base/regmap/regmap-slimbus.c struct slim_device *sdev = context; context 17 drivers/base/regmap/regmap-slimbus.c static int regmap_slimbus_read(void *context, const void *reg, size_t reg_size, context 20 drivers/base/regmap/regmap-slimbus.c struct slim_device *sdev = context; context 28 drivers/base/regmap/regmap-spi.c static int regmap_spi_write(void *context, const void *data, size_t count) context 30 drivers/base/regmap/regmap-spi.c struct device *dev = context; context 36 drivers/base/regmap/regmap-spi.c static int regmap_spi_gather_write(void *context, context 40 drivers/base/regmap/regmap-spi.c struct device *dev = context; context 53 drivers/base/regmap/regmap-spi.c static int regmap_spi_async_write(void *context, context 61 drivers/base/regmap/regmap-spi.c struct device *dev = context; context 75 drivers/base/regmap/regmap-spi.c async->m.context = async; context 91 drivers/base/regmap/regmap-spi.c static int regmap_spi_read(void *context, context 95 drivers/base/regmap/regmap-spi.c struct device *dev = context; context 16 drivers/base/regmap/regmap-spmi.c static int regmap_spmi_base_read(void *context, context 26 drivers/base/regmap/regmap-spmi.c err = spmi_register_read(context, addr++, val++); context 31 drivers/base/regmap/regmap-spmi.c static int regmap_spmi_base_gather_write(void *context, context 46 drivers/base/regmap/regmap-spmi.c err = spmi_register_zero_write(context, *data); context 56 drivers/base/regmap/regmap-spmi.c err = spmi_register_write(context, addr, *data); context 69 drivers/base/regmap/regmap-spmi.c static int regmap_spmi_base_write(void *context, const void *data, context 73 drivers/base/regmap/regmap-spmi.c return regmap_spmi_base_gather_write(context, data, 1, data + 1, context 105 drivers/base/regmap/regmap-spmi.c static int regmap_spmi_ext_read(void *context, context 124 drivers/base/regmap/regmap-spmi.c err = spmi_ext_register_read(context, addr, val, len); context 136 drivers/base/regmap/regmap-spmi.c err = spmi_ext_register_readl(context, addr, val, len); context 149 drivers/base/regmap/regmap-spmi.c static int regmap_spmi_ext_gather_write(void *context, context 164 drivers/base/regmap/regmap-spmi.c err = spmi_ext_register_write(context, addr, val, len); context 176 drivers/base/regmap/regmap-spmi.c err = spmi_ext_register_writel(context, addr, val, len); context 189 drivers/base/regmap/regmap-spmi.c static int regmap_spmi_ext_write(void *context, const void *data, context 193 drivers/base/regmap/regmap-spmi.c return regmap_spmi_ext_gather_write(context, data, 2, data + 2, context 21 drivers/base/regmap/regmap-w1.c static int w1_reg_a8_v8_read(void *context, unsigned int reg, unsigned int *val) context 23 drivers/base/regmap/regmap-w1.c struct device *dev = context; context 43 drivers/base/regmap/regmap-w1.c static int w1_reg_a8_v8_write(void *context, unsigned int reg, unsigned int val) context 45 drivers/base/regmap/regmap-w1.c struct device *dev = context; context 69 drivers/base/regmap/regmap-w1.c static int w1_reg_a8_v16_read(void *context, unsigned int reg, context 72 drivers/base/regmap/regmap-w1.c struct device *dev = context; context 93 drivers/base/regmap/regmap-w1.c static int w1_reg_a8_v16_write(void *context, unsigned int reg, context 96 drivers/base/regmap/regmap-w1.c struct device *dev = context; context 121 drivers/base/regmap/regmap-w1.c static int w1_reg_a16_v16_read(void *context, unsigned int reg, context 124 drivers/base/regmap/regmap-w1.c struct device *dev = context; context 146 drivers/base/regmap/regmap-w1.c static int w1_reg_a16_v16_write(void *context, unsigned int reg, context 149 drivers/base/regmap/regmap-w1.c struct device *dev = context; context 48 drivers/base/regmap/regmap.c static int _regmap_bus_reg_read(void *context, unsigned int reg, context 50 drivers/base/regmap/regmap.c static int _regmap_bus_read(void *context, unsigned int reg, context 52 drivers/base/regmap/regmap.c static int _regmap_bus_formatted_write(void *context, unsigned int reg, context 54 drivers/base/regmap/regmap.c static int _regmap_bus_reg_write(void *context, unsigned int reg, context 56 drivers/base/regmap/regmap.c static int _regmap_bus_raw_write(void *context, unsigned int reg, context 1710 drivers/base/regmap/regmap.c static int _regmap_bus_formatted_write(void *context, unsigned int reg, context 1715 drivers/base/regmap/regmap.c struct regmap *map = context; context 1738 drivers/base/regmap/regmap.c static int _regmap_bus_reg_write(void *context, unsigned int reg, context 1741 drivers/base/regmap/regmap.c struct regmap *map = context; context 1746 drivers/base/regmap/regmap.c static int _regmap_bus_raw_write(void *context, unsigned int reg, context 1749 drivers/base/regmap/regmap.c struct regmap *map = context; context 1771 drivers/base/regmap/regmap.c void *context = _regmap_map_get_context(map); context 1791 drivers/base/regmap/regmap.c return map->reg_write(context, reg, val); context 2496 drivers/base/regmap/regmap.c static int _regmap_bus_reg_read(void *context, unsigned int reg, context 2499 drivers/base/regmap/regmap.c struct regmap *map = context; context 2504 drivers/base/regmap/regmap.c static int _regmap_bus_read(void *context, unsigned int reg, context 2508 drivers/base/regmap/regmap.c struct regmap *map = context; context 2526 drivers/base/regmap/regmap.c void *context = _regmap_map_get_context(map); context 2540 drivers/base/regmap/regmap.c ret = map->reg_read(context, reg, val); context 79 drivers/block/drbd/drbd_actlog.c __be32 context[AL_CONTEXT_PER_TRANSACTION]; context 367 drivers/block/drbd/drbd_actlog.c buffer->context[i] = cpu_to_be32(extent_nr); context 370 drivers/block/drbd/drbd_actlog.c buffer->context[i] = cpu_to_be32(LC_FREE); context 66 drivers/bluetooth/bcm203x.c struct bcm203x_data *data = urb->context; context 175 drivers/bluetooth/bfusb.c struct sk_buff *skb = (struct sk_buff *) urb->context; context 335 drivers/bluetooth/bfusb.c struct sk_buff *skb = (struct sk_buff *) urb->context; context 48 drivers/bluetooth/bpa10x.c struct sk_buff *skb = urb->context; context 86 drivers/bluetooth/bpa10x.c struct hci_dev *hdev = urb->context; context 388 drivers/bluetooth/btintel.c static int regmap_ibt_read(void *context, const void *addr, size_t reg_size, context 391 drivers/bluetooth/btintel.c struct regmap_ibt_context *ctx = context; context 452 drivers/bluetooth/btintel.c static int regmap_ibt_gather_write(void *context, context 456 drivers/bluetooth/btintel.c struct regmap_ibt_context *ctx = context; context 506 drivers/bluetooth/btintel.c static int regmap_ibt_write(void *context, const void *data, size_t count) context 514 drivers/bluetooth/btintel.c return regmap_ibt_gather_write(context, data, 4, data + 4, count - 4); context 517 drivers/bluetooth/btintel.c static void regmap_ibt_free_context(void *context) context 519 drivers/bluetooth/btintel.c kfree(context); context 750 drivers/bluetooth/btusb.c struct hci_dev *hdev = urb->context; context 840 drivers/bluetooth/btusb.c struct hci_dev *hdev = urb->context; context 929 drivers/bluetooth/btusb.c struct hci_dev *hdev = urb->context; context 1051 drivers/bluetooth/btusb.c struct hci_dev *hdev = urb->context; context 1138 drivers/bluetooth/btusb.c struct sk_buff *skb = urb->context; context 1166 drivers/bluetooth/btusb.c struct sk_buff *skb = urb->context; context 2556 drivers/bluetooth/btusb.c struct hci_dev *hdev = urb->context; context 399 drivers/bus/sunxi-rsb.c static int regmap_sunxi_rsb_reg_read(void *context, unsigned int reg, context 402 drivers/bus/sunxi-rsb.c struct sunxi_rsb_ctx *ctx = context; context 411 drivers/bus/sunxi-rsb.c static int regmap_sunxi_rsb_reg_write(void *context, unsigned int reg, context 414 drivers/bus/sunxi-rsb.c struct sunxi_rsb_ctx *ctx = context; context 420 drivers/bus/sunxi-rsb.c static void regmap_sunxi_rsb_free_ctx(void *context) context 422 drivers/bus/sunxi-rsb.c struct sunxi_rsb_ctx *ctx = context; context 16 drivers/bus/vexpress-config.c void *context; context 82 drivers/bus/vexpress-config.c bridge->ops->regmap_exit(regmap, bridge->context); context 103 drivers/bus/vexpress-config.c regmap = (bridge->ops->regmap_init)(dev, bridge->context); context 117 drivers/bus/vexpress-config.c struct vexpress_config_bridge_ops *ops, void *context) context 142 drivers/bus/vexpress-config.c bridge->context = context; context 475 drivers/char/agp/hp-agp.c zx1_gart_probe (acpi_handle obj, u32 depth, void *context, void **ret) context 518 drivers/char/agp/hp-agp.c "(ioc=%llx, lba=%llx)\n", (char *)context, context 59 drivers/char/ipmi/ipmi_si_platform.c u32 gpe_number, void *context) context 61 drivers/char/ipmi/ipmi_si_platform.c struct si_sm_io *io = context; context 524 drivers/clk/clk-cdce925.c void *context, const void *data, size_t count) context 526 drivers/clk/clk-cdce925.c struct device *dev = context; context 550 drivers/clk/clk-cdce925.c static int cdce925_regmap_i2c_read(void *context, context 553 drivers/clk/clk-cdce925.c struct device *dev = context; context 27 drivers/clk/ti/clock.h u32 context; context 283 drivers/clk/ti/divider.c divider->context = val & div_mask(divider); context 301 drivers/clk/ti/divider.c val |= divider->context << divider->shift; context 798 drivers/clk/ti/dpll3xxx.c clk->context = (v & dd->enable_mask) >> __ffs(dd->enable_mask); context 800 drivers/clk/ti/dpll3xxx.c if (clk->context == DPLL_LOCKED) { context 826 drivers/clk/ti/dpll3xxx.c if (clk->context == DPLL_LOCKED) { context 839 drivers/clk/ti/dpll3xxx.c _omap3_dpll_write_clken(clk, clk->context); context 859 drivers/clk/ti/dpll3xxx.c clk->context = (v & dd->enable_mask) >> __ffs(dd->enable_mask); context 861 drivers/clk/ti/dpll3xxx.c if (clk->context == DPLL_LOCKED) { context 890 drivers/clk/ti/dpll3xxx.c if (clk->context == ((ctrl & dd->enable_mask) >> context 900 drivers/clk/ti/dpll3xxx.c if (clk->context == DPLL_LOCKED) context 903 drivers/clk/ti/dpll3xxx.c _omap3_dpll_write_clken(clk, clk->context); context 98 drivers/clocksource/timer-ti-dm.c timer->context.twer); context 100 drivers/clocksource/timer-ti-dm.c timer->context.tcrr); context 102 drivers/clocksource/timer-ti-dm.c timer->context.tldr); context 104 drivers/clocksource/timer-ti-dm.c timer->context.tmar); context 106 drivers/clocksource/timer-ti-dm.c timer->context.tsicr); context 107 drivers/clocksource/timer-ti-dm.c writel_relaxed(timer->context.tier, timer->irq_ena); context 109 drivers/clocksource/timer-ti-dm.c timer->context.tclr); context 536 drivers/clocksource/timer-ti-dm.c timer->context.tclr = l; context 557 drivers/clocksource/timer-ti-dm.c timer->context.tclr = context 582 drivers/clocksource/timer-ti-dm.c timer->context.tclr = l; context 583 drivers/clocksource/timer-ti-dm.c timer->context.tldr = load; context 606 drivers/clocksource/timer-ti-dm.c timer->context.tclr = l; context 607 drivers/clocksource/timer-ti-dm.c timer->context.tmar = match; context 632 drivers/clocksource/timer-ti-dm.c timer->context.tclr = l; context 655 drivers/clocksource/timer-ti-dm.c timer->context.tclr = l; context 670 drivers/clocksource/timer-ti-dm.c timer->context.tier = value; context 671 drivers/clocksource/timer-ti-dm.c timer->context.twer = value; context 700 drivers/clocksource/timer-ti-dm.c timer->context.tier &= ~mask; context 701 drivers/clocksource/timer-ti-dm.c timer->context.twer &= ~mask; context 750 drivers/clocksource/timer-ti-dm.c timer->context.tcrr = value; context 670 drivers/cpufreq/longhaul.c void *context, void **return_value) context 964 drivers/crypto/caam/caamalg.c void *context) context 966 drivers/crypto/caam/caamalg.c struct aead_request *req = context; context 985 drivers/crypto/caam/caamalg.c void *context) context 987 drivers/crypto/caam/caamalg.c struct aead_request *req = context; context 1006 drivers/crypto/caam/caamalg.c void *context) context 1008 drivers/crypto/caam/caamalg.c struct skcipher_request *req = context; context 1046 drivers/crypto/caam/caamalg.c void *context) context 1048 drivers/crypto/caam/caamalg.c struct skcipher_request *req = context; context 601 drivers/crypto/caam/caamhash.c void *context) context 603 drivers/crypto/caam/caamhash.c struct ahash_request *req = context; context 629 drivers/crypto/caam/caamhash.c void *context) context 631 drivers/crypto/caam/caamhash.c struct ahash_request *req = context; context 661 drivers/crypto/caam/caamhash.c void *context) context 663 drivers/crypto/caam/caamhash.c struct ahash_request *req = context; context 689 drivers/crypto/caam/caamhash.c void *context) context 691 drivers/crypto/caam/caamhash.c struct ahash_request *req = context; context 117 drivers/crypto/caam/caampkc.c static void rsa_pub_done(struct device *dev, u32 *desc, u32 err, void *context) context 119 drivers/crypto/caam/caampkc.c struct akcipher_request *req = context; context 136 drivers/crypto/caam/caampkc.c void *context) context 138 drivers/crypto/caam/caampkc.c struct akcipher_request *req = context; context 155 drivers/crypto/caam/caampkc.c void *context) context 157 drivers/crypto/caam/caampkc.c struct akcipher_request *req = context; context 174 drivers/crypto/caam/caampkc.c void *context) context 176 drivers/crypto/caam/caampkc.c struct akcipher_request *req = context; context 107 drivers/crypto/caam/caamrng.c static void rng_done(struct device *jrdev, u32 *desc, u32 err, void *context) context 15 drivers/crypto/caam/key_gen.c void *context) context 17 drivers/crypto/caam/key_gen.c struct split_key_result *res = context; context 44 drivers/crypto/caam/key_gen.h void split_key_done(struct device *dev, u32 *desc, u32 err, void *context); context 800 drivers/crypto/inside-secure/safexcel.h dma_addr_t context); context 120 drivers/crypto/inside-secure/safexcel_ring.c dma_addr_t context) { context 136 drivers/crypto/inside-secure/safexcel_ring.c if (first && context) { context 151 drivers/crypto/inside-secure/safexcel_ring.c (lower_32_bits(context) & GENMASK(31, 2)) >> 2; context 152 drivers/crypto/inside-secure/safexcel_ring.c cdesc->control_data.context_hi = upper_32_bits(context); context 960 drivers/crypto/mxs-dcp.c static irqreturn_t mxs_dcp_irq(int irq, void *context) context 962 drivers/crypto/mxs-dcp.c struct dcp *sdcp = context; context 177 drivers/crypto/sahara.c u8 context[SHA256_DIGEST_SIZE + 4]; context 1014 drivers/crypto/sahara.c memcpy(dev->context_base, rctx->context, rctx->context_size); context 1040 drivers/crypto/sahara.c memcpy(rctx->context, dev->context_base, rctx->context_size); context 1043 drivers/crypto/sahara.c memcpy(req->result, rctx->context, rctx->digest_size); context 271 drivers/crypto/talitos.c void *context, int error), context 272 drivers/crypto/talitos.c void *context) context 303 drivers/crypto/talitos.c request->context = context; context 376 drivers/crypto/talitos.c saved_req.context = request->context; context 389 drivers/crypto/talitos.c saved_req.callback(dev, saved_req.desc, saved_req.context, context 1027 drivers/crypto/talitos.c struct talitos_desc *desc, void *context, context 1030 drivers/crypto/talitos.c struct aead_request *areq = context; context 1048 drivers/crypto/talitos.c void *context, int err) context 1050 drivers/crypto/talitos.c struct aead_request *req = context; context 1075 drivers/crypto/talitos.c void *context, int err) context 1077 drivers/crypto/talitos.c struct aead_request *req = context; context 1199 drivers/crypto/talitos.c void *context, int error)) context 1551 drivers/crypto/talitos.c struct talitos_desc *desc, void *context, context 1554 drivers/crypto/talitos.c struct ablkcipher_request *areq = context; context 1574 drivers/crypto/talitos.c void *context, int error)) context 1743 drivers/crypto/talitos.c struct talitos_desc *desc, void *context, context 1746 drivers/crypto/talitos.c struct ahash_request *areq = context; context 1788 drivers/crypto/talitos.c void *context, int error)) context 84 drivers/crypto/talitos.h void *context, int error); context 85 drivers/crypto/talitos.h void *context; context 152 drivers/dma-buf/dma-fence-array.c u64 context, unsigned seqno, context 166 drivers/dma-buf/dma-fence-array.c context, seqno); context 188 drivers/dma-buf/dma-fence-array.c bool dma_fence_match_context(struct dma_fence *fence, u64 context) context 194 drivers/dma-buf/dma-fence-array.c return fence->context == context; context 197 drivers/dma-buf/dma-fence-array.c if (array->fences[i]->context != context) context 101 drivers/dma-buf/dma-fence-chain.c if ((*pfence)->context != chain->base.context || context 234 drivers/dma-buf/dma-fence-chain.c uint64_t context; context 244 drivers/dma-buf/dma-fence-chain.c context = prev->context; context 247 drivers/dma-buf/dma-fence-chain.c context = dma_fence_context_alloc(1); context 254 drivers/dma-buf/dma-fence-chain.c &chain->lock, context, seqno); context 240 drivers/dma-buf/dma-fence.c fence->context, fence->seqno)) { context 647 drivers/dma-buf/dma-fence.c spinlock_t *lock, u64 context, u64 seqno) context 656 drivers/dma-buf/dma-fence.c fence->context = context; context 244 drivers/dma-buf/dma-resv.c if (old->context == fence->context || context 87 drivers/dma-buf/sw_sync.c obj->context = dma_fence_context_alloc(1); context 244 drivers/dma-buf/sw_sync.c obj->context, value); context 38 drivers/dma-buf/sync_debug.h u64 context; context 141 drivers/dma-buf/sync_file.c fence->context, context 243 drivers/dma-buf/sync_file.c if (pt_a->context < pt_b->context) { context 247 drivers/dma-buf/sync_file.c } else if (pt_a->context > pt_b->context) { context 387 drivers/dma/altera-msgdma.c unsigned long flags, void *context) context 2052 drivers/dma/amba-pl08x.c unsigned long flags, void *context) context 1066 drivers/dma/at_hdmac.c unsigned long flags, void *context) context 635 drivers/dma/at_xdmac.c unsigned long flags, void *context) context 646 drivers/dma/bcm2835-dma.c unsigned long flags, void *context) context 319 drivers/dma/bestcomm/bestcomm.c bcom_eng->tdt[task].context = ctx_pa; context 2270 drivers/dma/coh901318.c unsigned long flags, void *context) context 529 drivers/dma/dma-axi-dmac.c unsigned long flags, void *context) context 356 drivers/dma/dma-jz4780.c void *context) context 438 drivers/dma/dw-edma/dw-edma-core.c unsigned long flags, void *context) context 624 drivers/dma/dw/core.c unsigned long flags, void *context) context 1035 drivers/dma/ep93xx_dma.c unsigned long flags, void *context) context 514 drivers/dma/fsl-edma-common.c unsigned long flags, void *context) context 241 drivers/dma/fsl-edma-common.h unsigned long flags, void *context); context 261 drivers/dma/hsu/hsu.c unsigned long flags, void *context) context 295 drivers/dma/idma64.c unsigned long flags, void *context) context 450 drivers/dma/img-mdc-dma.c unsigned long flags, void *context) context 818 drivers/dma/imx-dma.c unsigned long flags, void *context) context 431 drivers/dma/imx-sdma.c struct sdma_context_data *context; context 986 drivers/dma/imx-sdma.c struct sdma_context_data *context = sdma->context; context 1015 drivers/dma/imx-sdma.c memset(context, 0, sizeof(*context)); context 1016 drivers/dma/imx-sdma.c context->channel_state.pc = load_address; context 1021 drivers/dma/imx-sdma.c context->gReg[0] = sdmac->event_mask[1]; context 1022 drivers/dma/imx-sdma.c context->gReg[1] = sdmac->event_mask[0]; context 1023 drivers/dma/imx-sdma.c context->gReg[2] = sdmac->per_addr; context 1024 drivers/dma/imx-sdma.c context->gReg[6] = sdmac->shp_addr; context 1025 drivers/dma/imx-sdma.c context->gReg[7] = sdmac->watermark_level; context 1029 drivers/dma/imx-sdma.c bd0->mode.count = sizeof(*context) / 4; context 1031 drivers/dma/imx-sdma.c bd0->ext_buffer_addr = 2048 + (sizeof(*context) / 4) * channel; context 1443 drivers/dma/imx-sdma.c unsigned long flags, void *context) context 1724 drivers/dma/imx-sdma.c static void sdma_load_firmware(const struct firmware *fw, void *context) context 1726 drivers/dma/imx-sdma.c struct sdma_engine *sdma = context; context 1893 drivers/dma/imx-sdma.c sdma->context = (void *)sdma->channel_control + context 1336 drivers/dma/ipu/ipu_idmac.c void *context) context 534 drivers/dma/k3dma.c enum dma_transfer_direction dir, unsigned long flags, void *context) context 342 drivers/dma/mediatek/mtk-uart-apdma.c unsigned long tx_flags, void *context) context 528 drivers/dma/mmp_pdma.c unsigned long flags, void *context) context 272 drivers/dma/moxart-dma.c unsigned long tx_flags, void *context) context 686 drivers/dma/mpc512x_dma.c unsigned long flags, void *context) context 497 drivers/dma/mxs-dma.c unsigned long flags, void *context) context 1008 drivers/dma/nbpfaxi.c enum dma_transfer_direction direction, unsigned long flags, void *context) context 879 drivers/dma/owl-dma.c unsigned long flags, void *context) context 569 drivers/dma/pch_dma.c void *context) context 2831 drivers/dma/pl330.c unsigned long flg, void *context) context 973 drivers/dma/pxa_dma.c unsigned long flags, void *context) context 609 drivers/dma/qcom/bam_dma.c void *context) context 977 drivers/dma/s3c24xx-dma.c unsigned long flags, void *context) context 524 drivers/dma/sa11x0-dma.c enum dma_transfer_direction dir, unsigned long flags, void *context) context 1168 drivers/dma/sh/rcar-dmac.c unsigned long flags, void *context) context 663 drivers/dma/sh/shdma-base.c enum dma_transfer_direction direction, unsigned long flags, void *context) context 418 drivers/dma/sh/usb-dmac.c unsigned long dma_flags, void *context) context 922 drivers/dma/sprd-dma.c unsigned long flags, void *context) context 936 drivers/dma/sprd-dma.c if (context) { context 938 drivers/dma/sprd-dma.c (struct sprd_dma_linklist *)context; context 508 drivers/dma/st_fdma.c unsigned long flags, void *context) context 2500 drivers/dma/ste_dma40.c unsigned long dma_flags, void *context) context 831 drivers/dma/stm32-dma.c unsigned long flags, void *context) context 783 drivers/dma/stm32-mdma.c unsigned long flags, void *context) context 769 drivers/dma/sun4i-dma.c unsigned long flags, void *context) context 685 drivers/dma/sun6i-dma.c unsigned long flags, void *context) context 1003 drivers/dma/tegra20-apb-dma.c void *context) context 586 drivers/dma/ti/cppi41.c enum dma_transfer_direction dir, unsigned long tx_flags, void *context) context 1056 drivers/dma/ti/edma.c unsigned long tx_flags, void *context) context 887 drivers/dma/ti/omap-dma.c enum dma_transfer_direction dir, unsigned long tx_flags, void *context) context 492 drivers/dma/timb_dma.c void *context) context 805 drivers/dma/txx9dmac.c unsigned long flags, void *context) context 233 drivers/dma/uniphier-mdmac.c unsigned long flags, void *context) context 1778 drivers/dma/xilinx/xilinx_dma.c void *context) context 1783 drivers/dma/xilinx/xilinx_dma.c u32 *app_w = (u32 *)context; context 547 drivers/dma/zx_dma.c enum dma_transfer_direction dir, unsigned long flags, void *context) context 517 drivers/edac/altera_edac.c static int s10_protected_reg_write(void *context, unsigned int reg, context 521 drivers/edac/altera_edac.c unsigned long offset = (unsigned long)context; context 539 drivers/edac/altera_edac.c static int s10_protected_reg_read(void *context, unsigned int reg, context 543 drivers/edac/altera_edac.c unsigned long offset = (unsigned long)context; context 41 drivers/edac/aspeed_edac.c static int regmap_reg_write(void *context, unsigned int reg, unsigned int val) context 43 drivers/edac/aspeed_edac.c void __iomem *regs = (void __iomem *)context; context 57 drivers/edac/aspeed_edac.c static int regmap_reg_read(void *context, unsigned int reg, unsigned int *val) context 59 drivers/edac/aspeed_edac.c void __iomem *regs = (void __iomem *)context; context 911 drivers/firewire/core-cdev.c static void iso_callback(struct fw_iso_context *context, u32 cycle, context 930 drivers/firewire/core-cdev.c static void iso_mc_callback(struct fw_iso_context *context, context 948 drivers/firewire/core-cdev.c static enum dma_data_direction iso_dma_direction(struct fw_iso_context *context) context 950 drivers/firewire/core-cdev.c if (context->type == FW_ISO_CONTEXT_TRANSMIT) context 959 drivers/firewire/core-cdev.c struct fw_iso_context *context; context 992 drivers/firewire/core-cdev.c context = fw_iso_context_create(client->device->card, a->type, context 994 drivers/firewire/core-cdev.c if (IS_ERR(context)) context 995 drivers/firewire/core-cdev.c return PTR_ERR(context); context 997 drivers/firewire/core-cdev.c context->drop_overflow_headers = true; context 1003 drivers/firewire/core-cdev.c fw_iso_context_destroy(context); context 1010 drivers/firewire/core-cdev.c iso_dma_direction(context)); context 1013 drivers/firewire/core-cdev.c fw_iso_context_destroy(context); context 1020 drivers/firewire/core-cdev.c client->iso_context = context; context 757 drivers/firewire/net.c static void fwnet_receive_broadcast(struct fw_iso_context *context, context 792 drivers/firewire/net.c context->card->generation, true); context 1114 drivers/firewire/net.c struct fw_iso_context *context; context 1136 drivers/firewire/net.c context = fw_iso_context_create(dev->card, FW_ISO_CONTEXT_RECEIVE, context 1140 drivers/firewire/net.c if (IS_ERR(context)) { context 1141 drivers/firewire/net.c retval = PTR_ERR(context); context 1160 drivers/firewire/net.c dev->broadcast_rcv_context = context; context 1171 drivers/firewire/net.c retval = fw_iso_context_queue(context, &packet, context 1181 drivers/firewire/net.c retval = fw_iso_context_start(context, -1, 0, context 99 drivers/firewire/ohci.c struct context; context 101 drivers/firewire/ohci.c typedef int (*descriptor_callback_t)(struct context *ctx, context 165 drivers/firewire/ohci.c struct context context; context 206 drivers/firewire/ohci.c struct context at_request_ctx; context 207 drivers/firewire/ohci.c struct context at_response_ctx; context 1054 drivers/firewire/ohci.c struct context *ctx = (struct context *) data; context 1099 drivers/firewire/ohci.c static int context_add_buffer(struct context *ctx) context 1134 drivers/firewire/ohci.c static int context_init(struct context *ctx, struct fw_ohci *ohci, context 1167 drivers/firewire/ohci.c static void context_release(struct context *ctx) context 1179 drivers/firewire/ohci.c static struct descriptor *context_get_descriptors(struct context *ctx, context 1210 drivers/firewire/ohci.c static void context_run(struct context *ctx, u32 extra) context 1222 drivers/firewire/ohci.c static void context_append(struct context *ctx, context 1258 drivers/firewire/ohci.c static void context_stop(struct context *ctx) context 1288 drivers/firewire/ohci.c static int at_context_queue_packet(struct context *ctx, context 1418 drivers/firewire/ohci.c static void at_context_flush(struct context *ctx) context 1429 drivers/firewire/ohci.c static int handle_at_packet(struct context *context, context 1435 drivers/firewire/ohci.c struct fw_ohci *ohci = context->ohci; context 1438 drivers/firewire/ohci.c if (last->transfer_status == 0 && !context->flushing) context 1472 drivers/firewire/ohci.c if (context->flushing) context 1494 drivers/firewire/ohci.c if (context->flushing) { context 1591 drivers/firewire/ohci.c static void handle_local_request(struct context *ctx, struct fw_packet *packet) context 1630 drivers/firewire/ohci.c static void at_context_transmit(struct context *ctx, struct fw_packet *packet) context 2103 drivers/firewire/ohci.c &ohci->ir_context_list[i].context.tasklet); context 2115 drivers/firewire/ohci.c &ohci->it_context_list[i].context.tasklet); context 2548 drivers/firewire/ohci.c struct context *ctx = &ohci->at_request_ctx; context 2761 drivers/firewire/ohci.c static int handle_ir_packet_per_buffer(struct context *context, context 2766 drivers/firewire/ohci.c container_of(context, struct iso_context, context); context 2780 drivers/firewire/ohci.c dma_sync_single_range_for_cpu(context->ohci->card.device, context 2796 drivers/firewire/ohci.c static int handle_ir_buffer_fill(struct context *context, context 2801 drivers/firewire/ohci.c container_of(context, struct iso_context, context); context 2819 drivers/firewire/ohci.c dma_sync_single_range_for_cpu(context->ohci->card.device, context 2836 drivers/firewire/ohci.c dma_sync_single_range_for_cpu(ctx->context.ohci->card.device, context 2847 drivers/firewire/ohci.c static inline void sync_it_packet_for_cpu(struct context *context, context 2866 drivers/firewire/ohci.c (context->current_bus & PAGE_MASK)) { context 2874 drivers/firewire/ohci.c dma_sync_single_range_for_cpu(context->ohci->card.device, context 2884 drivers/firewire/ohci.c static int handle_it_packet(struct context *context, context 2889 drivers/firewire/ohci.c container_of(context, struct iso_context, context); context 2900 drivers/firewire/ohci.c sync_it_packet_for_cpu(context, d); context 2998 drivers/firewire/ohci.c ret = context_init(&ctx->context, ohci, regs, callback); context 3034 drivers/firewire/ohci.c struct fw_ohci *ohci = ctx->context.ohci; context 3039 drivers/firewire/ohci.c if (ctx->context.last->branch_address == 0) context 3052 drivers/firewire/ohci.c context_run(&ctx->context, match); context 3068 drivers/firewire/ohci.c reg_write(ohci, CONTEXT_MATCH(ctx->context.regs), match); context 3069 drivers/firewire/ohci.c context_run(&ctx->context, control); context 3099 drivers/firewire/ohci.c context_stop(&ctx->context); context 3100 drivers/firewire/ohci.c tasklet_kill(&ctx->context.tasklet); context 3113 drivers/firewire/ohci.c context_release(&ctx->context); context 3180 drivers/firewire/ohci.c if (ctx->context.running) context 3186 drivers/firewire/ohci.c if (ctx->context.running) context 3227 drivers/firewire/ohci.c d = context_get_descriptors(&ctx->context, z + header_z, &d_bus); context 3273 drivers/firewire/ohci.c dma_sync_single_range_for_device(ctx->context.ohci->card.device, context 3291 drivers/firewire/ohci.c context_append(&ctx->context, d, z, header_z); context 3301 drivers/firewire/ohci.c struct device *device = ctx->context.ohci->card.device; context 3324 drivers/firewire/ohci.c d = context_get_descriptors(&ctx->context, context 3371 drivers/firewire/ohci.c context_append(&ctx->context, d, z, header_z); context 3397 drivers/firewire/ohci.c d = context_get_descriptors(&ctx->context, 1, &d_bus); context 3419 drivers/firewire/ohci.c dma_sync_single_range_for_device(ctx->context.ohci->card.device, context 3427 drivers/firewire/ohci.c context_append(&ctx->context, d, 1, 0); context 3442 drivers/firewire/ohci.c spin_lock_irqsave(&ctx->context.ohci->lock, flags); context 3454 drivers/firewire/ohci.c spin_unlock_irqrestore(&ctx->context.ohci->lock, flags); context 3461 drivers/firewire/ohci.c struct context *ctx = context 3462 drivers/firewire/ohci.c &container_of(base, struct iso_context, base)->context; context 3472 drivers/firewire/ohci.c tasklet_disable(&ctx->context.tasklet); context 3475 drivers/firewire/ohci.c context_tasklet((unsigned long)&ctx->context); context 3495 drivers/firewire/ohci.c tasklet_enable(&ctx->context.tasklet); context 401 drivers/gpio/gpio-adp5588.c ret = pdata->setup(client, gc->base, gc->ngpio, pdata->context); context 421 drivers/gpio/gpio-adp5588.c pdata->context); context 708 drivers/gpio/gpio-max732x.c chip->gpio_chip.ngpio, pdata->context); context 726 drivers/gpio/gpio-max732x.c chip->gpio_chip.ngpio, pdata->context); context 53 drivers/gpio/gpio-omap.c struct gpio_regs context; context 109 drivers/gpio/gpio-omap.c bank->context.oe = omap_gpio_rmw(bank->base + bank->regs->direction, context 123 drivers/gpio/gpio-omap.c bank->context.dataout |= l; context 126 drivers/gpio/gpio-omap.c bank->context.dataout &= ~l; context 136 drivers/gpio/gpio-omap.c bank->context.dataout = omap_gpio_rmw(bank->base + bank->regs->dataout, context 213 drivers/gpio/gpio-omap.c bank->context.debounce = debounce; context 214 drivers/gpio/gpio-omap.c bank->context.debounce_en = val; context 241 drivers/gpio/gpio-omap.c bank->context.debounce_en &= ~gpio_bit; context 242 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.debounce_en, context 246 drivers/gpio/gpio-omap.c bank->context.debounce = 0; context 247 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.debounce, bank->base + context 291 drivers/gpio/gpio-omap.c bank->context.leveldetect0 = context 293 drivers/gpio/gpio-omap.c bank->context.leveldetect1 = context 295 drivers/gpio/gpio-omap.c bank->context.risingdetect = context 297 drivers/gpio/gpio-omap.c bank->context.fallingdetect = context 300 drivers/gpio/gpio-omap.c bank->level_mask = bank->context.leveldetect0 | context 301 drivers/gpio/gpio-omap.c bank->context.leveldetect1; context 388 drivers/gpio/gpio-omap.c bank->context.ctrl = ctrl; context 402 drivers/gpio/gpio-omap.c bank->context.ctrl = ctrl; context 513 drivers/gpio/gpio-omap.c bank->context.irqenable1 |= gpio_mask; context 516 drivers/gpio/gpio-omap.c bank->context.irqenable1 &= ~gpio_mask; context 520 drivers/gpio/gpio-omap.c bank->context.irqenable1 = context 533 drivers/gpio/gpio-omap.c bank->context.wake_en = context 720 drivers/gpio/gpio-omap.c writel_relaxed(0xffff & ~bank->context.wake_en, mask_reg); context 734 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.wake_en, mask_reg); context 927 drivers/gpio/gpio-omap.c bank->context.dataout = l; context 969 drivers/gpio/gpio-omap.c bank->context.oe = readl_relaxed(bank->base + bank->regs->direction); context 1061 drivers/gpio/gpio-omap.c p->context.ctrl = readl_relaxed(base + regs->ctrl); context 1062 drivers/gpio/gpio-omap.c p->context.oe = readl_relaxed(base + regs->direction); context 1063 drivers/gpio/gpio-omap.c p->context.wake_en = readl_relaxed(base + regs->wkup_en); context 1064 drivers/gpio/gpio-omap.c p->context.leveldetect0 = readl_relaxed(base + regs->leveldetect0); context 1065 drivers/gpio/gpio-omap.c p->context.leveldetect1 = readl_relaxed(base + regs->leveldetect1); context 1066 drivers/gpio/gpio-omap.c p->context.risingdetect = readl_relaxed(base + regs->risingdetect); context 1067 drivers/gpio/gpio-omap.c p->context.fallingdetect = readl_relaxed(base + regs->fallingdetect); context 1068 drivers/gpio/gpio-omap.c p->context.irqenable1 = readl_relaxed(base + regs->irqenable); context 1069 drivers/gpio/gpio-omap.c p->context.irqenable2 = readl_relaxed(base + regs->irqenable2); context 1070 drivers/gpio/gpio-omap.c p->context.dataout = readl_relaxed(base + regs->dataout); context 1080 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.wake_en, base + regs->wkup_en); context 1081 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.ctrl, base + regs->ctrl); context 1082 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.leveldetect0, base + regs->leveldetect0); context 1083 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.leveldetect1, base + regs->leveldetect1); context 1084 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.risingdetect, base + regs->risingdetect); context 1085 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.fallingdetect, base + regs->fallingdetect); context 1086 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.dataout, base + regs->dataout); context 1087 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.oe, base + regs->direction); context 1090 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.debounce, base + regs->debounce); context 1091 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.debounce_en, context 1095 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.irqenable1, base + regs->irqenable); context 1096 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.irqenable2, base + regs->irqenable2); context 1111 drivers/gpio/gpio-omap.c mask = bank->enabled_non_wakeup_gpios & bank->context.fallingdetect; context 1112 drivers/gpio/gpio-omap.c mask &= ~bank->context.risingdetect; context 1116 drivers/gpio/gpio-omap.c mask = bank->enabled_non_wakeup_gpios & bank->context.risingdetect; context 1117 drivers/gpio/gpio-omap.c mask &= ~bank->context.fallingdetect; context 1176 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.fallingdetect, context 1178 drivers/gpio/gpio-omap.c writel_relaxed(bank->context.risingdetect, context 1197 drivers/gpio/gpio-omap.c gen0 = l & bank->context.fallingdetect; context 1200 drivers/gpio/gpio-omap.c gen1 = l & bank->context.risingdetect; context 1204 drivers/gpio/gpio-omap.c gen = l & (~(bank->context.fallingdetect) & context 1205 drivers/gpio/gpio-omap.c ~(bank->context.risingdetect)); context 1016 drivers/gpio/gpio-pca953x.c chip->gpio_chip.ngpio, pdata->context); context 1036 drivers/gpio/gpio-pca953x.c chip->gpio_chip.ngpio, pdata->context); context 378 drivers/gpio/gpio-pcf857x.c pdata->context); context 403 drivers/gpio/gpio-pcf857x.c pdata->context); context 126 drivers/gpio/gpio-zynq.c struct gpio_regs context; context 657 drivers/gpio/gpio-zynq.c gpio->context.datalsw[bank_num] = context 660 drivers/gpio/gpio-zynq.c gpio->context.datamsw[bank_num] = context 663 drivers/gpio/gpio-zynq.c gpio->context.dirm[bank_num] = readl_relaxed(gpio->base_addr + context 665 drivers/gpio/gpio-zynq.c gpio->context.int_en[bank_num] = readl_relaxed(gpio->base_addr + context 667 drivers/gpio/gpio-zynq.c gpio->context.int_type[bank_num] = context 670 drivers/gpio/gpio-zynq.c gpio->context.int_polarity[bank_num] = context 673 drivers/gpio/gpio-zynq.c gpio->context.int_any[bank_num] = context 686 drivers/gpio/gpio-zynq.c writel_relaxed(gpio->context.datalsw[bank_num], context 689 drivers/gpio/gpio-zynq.c writel_relaxed(gpio->context.datamsw[bank_num], context 692 drivers/gpio/gpio-zynq.c writel_relaxed(gpio->context.dirm[bank_num], context 695 drivers/gpio/gpio-zynq.c writel_relaxed(gpio->context.int_type[bank_num], context 698 drivers/gpio/gpio-zynq.c writel_relaxed(gpio->context.int_polarity[bank_num], context 701 drivers/gpio/gpio-zynq.c writel_relaxed(gpio->context.int_any[bank_num], context 704 drivers/gpio/gpio-zynq.c writel_relaxed(~(gpio->context.int_en[bank_num]), context 260 drivers/gpio/gpiolib-acpi.c void *context) context 262 drivers/gpio/gpiolib-acpi.c struct acpi_gpio_chip *acpi_gpio = context; context 95 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h struct amdgpu_amdkfd_fence *amdgpu_amdkfd_fence_create(u64 context, context 62 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c struct amdgpu_amdkfd_fence *amdgpu_amdkfd_fence_create(u64 context, context 78 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c context, atomic_inc_return(&fence_seq)); context 246 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c if (f->context == ef->base.context) context 2065 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c process_info->eviction_fence->base.context, context 147 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c job->base.s_fence->scheduled.context : 0; context 128 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c uint64_t context = dma_fence_context_alloc(1); context 131 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c array = dma_fence_array_create(count, fences, context, context 281 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c updates->context == (*id)->flushed_updates->context && context 288 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c ((*id)->last_flush->context != fence_context && context 359 drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c ((*id)->last_flush->context != fence_context && context 261 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c uint32_t idx = best_bo->fence->context; context 361 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c idx = fence->context % AMDGPU_SA_NUM_FENCE_LISTS; context 392 drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c i->fence->seqno, i->fence->context); context 136 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c hash_for_each_possible(sync->fences, e, node, f->context) { context 137 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c if (unlikely(e->fence->context != f->context)) context 177 drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c hash_add(sync->fences, &e->node, f->context); context 170 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __field(unsigned int, context) context 180 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->context = job->base.s_fence->finished.context; context 186 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->sched_job_id, __get_str(timeline), __entry->context, context 196 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __field(unsigned int, context) context 205 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->context = job->base.s_fence->finished.context; context 211 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->sched_job_id, __get_str(timeline), __entry->context, context 482 drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h __entry->ctx = fence->context; context 1182 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dc_release_state(dm_state->context); context 1183 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dm_state->context = dc_create_state(dm->dc); context 1185 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dc_resource_state_construct(dm->dc, dm_state->context); context 1978 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c if (old_state && old_state->context) context 1979 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c new_state->context = dc_copy_state(old_state->context); context 1981 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c if (!new_state->context) { context 1994 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c if (dm_state && dm_state->context) context 1995 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dc_release_state(dm_state->context); context 2029 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c state->context = dc_create_state(adev->dm.dc); context 2030 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c if (!state->context) { context 2035 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dc_resource_state_copy_construct_current(adev->dm.dc, state->context); context 3525 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c static void dm_enable_per_frame_crtc_master_sync(struct dc_state *context) context 3529 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c if (context->stream_count < 2) context 3531 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c for (i = 0; i < context->stream_count ; i++) { context 3532 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c if (!context->streams[i]) context 3539 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c set_multisync_trigger_params(context->streams[i]); context 3541 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c set_master_stream(context->streams, context->stream_count); context 6174 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c if (dm_state && dm_state->context) { context 6175 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dc_state = dm_state->context; context 6742 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dm_state->context, context 6785 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dm_state->context, context 6986 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dm_state->context)) { context 7052 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dm_state->context)) { context 7205 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c status = dc_stream_get_status_from_state(old_dm_state->context, context 7453 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c if (dc_validate_global_state(dc, dm_state->context, false) != DC_OK) { context 7472 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c if (new_dm_state->context) context 7473 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dc_release_state(new_dm_state->context); context 7475 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c new_dm_state->context = old_dm_state->context; context 7477 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c if (old_dm_state->context) context 7478 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c dc_retain_state(old_dm_state->context); context 336 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.h struct dc_state *context; context 543 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c struct dc_state *context, context 553 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_exit_ns = context 555 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_enter_plus_exit_ns = context 557 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.pstate_change_ns = context 559 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.b.pte_meta_urgent_ns = v->ptemeta_urgent_watermark * 1000; context 560 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.b.urgent_ns = v->urgent_watermark * 1000; context 567 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_exit_ns = context 569 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_enter_plus_exit_ns = context 571 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.pstate_change_ns = context 573 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.c.pte_meta_urgent_ns = v->ptemeta_urgent_watermark * 1000; context 574 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.c.urgent_ns = v->urgent_watermark * 1000; context 588 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_exit_ns = context 590 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_enter_plus_exit_ns = context 592 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.pstate_change_ns = context 594 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.d.pte_meta_urgent_ns = v->ptemeta_urgent_watermark * 1000; context 595 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.d.urgent_ns = v->urgent_watermark * 1000; context 608 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_exit_ns = context 610 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_ns = context 612 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = context 614 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.a.pte_meta_urgent_ns = v->ptemeta_urgent_watermark * 1000; context 615 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.a.urgent_ns = v->urgent_watermark * 1000; context 617 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.b = context->bw_ctx.bw.dcn.watermarks.a; context 618 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.c = context->bw_ctx.bw.dcn.watermarks.a; context 621 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.d = context->bw_ctx.bw.dcn.watermarks.a; context 694 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c struct dc_state *context) context 700 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->stream_count >= 2) context 703 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c if (context->stream_count == 1 && context 705 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c hack_force_pipe_split(v, context->streams[0]->timing.pix_clk_100hz); context 721 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c struct dc_state *context, context 731 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c struct dcn_bw_internal_vars *v = &context->dcn_bw_vars; context 866 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i]; context 1029 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c hack_bounding_box(v, &dc->debug, context); context 1035 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c && context->stream_count == 1 context 1050 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = v->sr_enter_plus_exit_time; context 1051 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.dml.soc.sr_exit_time_us = v->sr_exit_time; context 1117 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_exit_ns = context 1119 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_ns = context 1121 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = context 1123 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.a.pte_meta_urgent_ns = v->ptemeta_urgent_watermark * 1000; context 1124 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.a.urgent_ns = v->urgent_watermark * 1000; context 1125 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.b = context->bw_ctx.bw.dcn.watermarks.a; context 1126 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.c = context->bw_ctx.bw.dcn.watermarks.a; context 1127 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.watermarks.d = context->bw_ctx.bw.dcn.watermarks.a; context 1129 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.clk.fclk_khz = (int)(bw_consumed * 1000000 / context 1132 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.clk.fclk_khz = (int)(bw_consumed * 1000000 / 32); context 1134 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz = (int)(v->dcf_clk_deep_sleep * 1000); context 1135 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.clk.dcfclk_khz = (int)(v->dcfclk * 1000); context 1137 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.clk.dispclk_khz = (int)(v->dispclk * 1000); context 1139 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.clk.dispclk_khz = (int)(dc->dcn_soc->max_dispclk_vmax0p9 * 1000); context 1141 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c if (context->bw_ctx.bw.dcn.clk.dispclk_khz < context 1143 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.clk.dispclk_khz = context 1147 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.clk.dppclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz / context 1149 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.clk.phyclk_khz = v->phyclk_per_state[v->voltage_level]; context 1152 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = context 1156 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = context 1160 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = context 1164 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = context 1172 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i]; context 1233 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c hsplit_pipe = find_idle_secondary_pipe(&context->res_ctx, pool, pipe); context 1235 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c split_stream_across_pipes(&context->res_ctx, pool, pipe, hsplit_pipe); context 1266 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = context 1268 drivers/gpu/drm/amd/display/dc/calcs/dcn_calcs.c context->bw_ctx.dml.soc.sr_exit_time_us = dc->dcn_soc->sr_exit_time; context 47 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c struct dc_state *context) context 52 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c for (i = 0; i < context->stream_count; i++) { context 53 drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c const struct dc_stream_state *stream = context->streams[i]; context 165 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c uint32_t dce_get_max_pixel_clock_for_all_paths(struct dc_state *context) context 171 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 196 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c struct dc_state *context) context 201 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c int max_pix_clk = dce_get_max_pixel_clock_for_all_paths(context); context 208 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c if (context->bw_ctx.bw.dce.dispclk_khz > context 218 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c < context->bw_ctx.bw.dce.dispclk_khz) context 383 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c struct dc_state *context) context 385 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg; context 387 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c pp_display_cfg->avail_mclk_switch_time_us = dce110_get_min_vblank_time_us(context); context 389 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c dce110_fill_display_configs(context, pp_display_cfg); context 396 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c struct dc_state *context, context 401 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz; context 407 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c level_change_req.power_level = dce_get_required_clocks_state(clk_mgr_base, context); context 419 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c dce_pplib_apply_display_requirements(clk_mgr_base->ctx->dc, context); context 37 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.h struct dc_state *context); context 39 drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.h uint32_t dce_get_max_pixel_clock_for_all_paths(struct dc_state *context); context 92 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c uint32_t dce110_get_min_vblank_time_us(const struct dc_state *context) context 97 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c for (j = 0; j < context->stream_count; j++) { context 98 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c struct dc_stream_state *stream = context->streams[j]; context 120 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c const struct dc_state *context, context 126 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c for (j = 0; j < context->stream_count; j++) { context 129 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c const struct dc_stream_state *stream = context->streams[j]; context 135 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c if (stream == context->res_ctx.pipe_ctx[k].stream) { context 136 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c pipe_ctx = &context->res_ctx.pipe_ctx[k]; context 174 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c struct dc_state *context) context 176 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg; context 183 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c context->bw_ctx.bw.dce.all_displays_in_sync; context 185 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c context->bw_ctx.bw.dce.nbp_state_change_enable == false; context 187 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c context->bw_ctx.bw.dce.cpuc_state_change_enable == false; context 189 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c context->bw_ctx.bw.dce.cpup_state_change_enable == false; context 191 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c context->bw_ctx.bw.dce.blackout_recovery_time_us; context 198 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c if (ASICREV_IS_VEGA20_P(dc->ctx->asic_id.hw_internal_rev) && (context->stream_count >= 2)) { context 204 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c pp_display_cfg->min_memory_clock_khz = context->bw_ctx.bw.dce.yclk_khz context 210 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c context->bw_ctx.bw.dce.sclk_khz); context 219 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c pp_display_cfg->min_dcfclock_khz = (context->stream_count > 4) ? context 223 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c = context->bw_ctx.bw.dce.sclk_deep_sleep_khz; context 226 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c dce110_get_min_vblank_time_us(context); context 232 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c dce110_fill_display_configs(context, pp_display_cfg); context 237 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c &context->streams[0]->timing; context 249 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c struct dc_state *context, context 254 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz; context 260 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c level_change_req.power_level = dce_get_required_clocks_state(clk_mgr_base, context); context 269 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c context->bw_ctx.bw.dce.dispclk_khz = dce_set_clock(clk_mgr_base, patched_disp_clk); context 272 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c dce11_pplib_apply_display_requirements(clk_mgr_base->ctx->dc, context); context 34 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.h const struct dc_state *context, context 40 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.h struct dc_state *context); context 42 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.h uint32_t dce110_get_min_vblank_time_us(const struct dc_state *context); context 194 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c struct dc_state *context, context 199 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz; context 205 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c level_change_req.power_level = dce_get_required_clocks_state(clk_mgr_base, context); context 217 drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c dce11_pplib_apply_display_requirements(clk_mgr_base->ctx->dc, context); context 85 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c struct dc_state *context, context 90 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c int max_pix_clk = dce_get_max_pixel_clock_for_all_paths(context); context 91 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz; context 119 drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c dce11_pplib_apply_display_requirements(clk_mgr_base->ctx->dc, context); context 126 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr.c struct dc_state *context, context 132 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr.c struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk; context 144 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr.c display_count = clk_mgr_helper_get_active_display_cnt(dc, context); context 103 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c struct dc_state *context) context 110 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c if (!context->res_ctx.pipe_ctx[i].plane_state) context 113 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst; context 114 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz; context 187 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c struct dc_state *context, context 191 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk; context 210 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c display_count = clk_mgr_helper_get_active_display_cnt(dc, context); context 284 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c if (!context->res_ctx.pipe_ctx[i].plane_state) context 287 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst; context 288 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz; context 301 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c if (!context->res_ctx.pipe_ctx[i].plane_state) context 304 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst; context 305 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz; context 320 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c struct dc_state *context, context 325 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk; context 399 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c struct dc_state *context, context 405 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c clock_cfg->max_clock_khz = context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz; context 408 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c clock_cfg->bw_requirequired_clock_khz = context->bw_ctx.bw.dcn.clk.bw_dispclk_khz; context 411 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c clock_cfg->max_clock_khz = context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz; context 414 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c clock_cfg->bw_requirequired_clock_khz = context->bw_ctx.bw.dcn.clk.bw_dppclk_khz; context 30 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.h struct dc_state *context, context 34 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.h struct dc_state *context, context 37 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.h struct dc_state *context); context 49 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.h struct dc_state *context, context 56 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c struct dc_state *context, context 60 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk; context 69 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c display_count = clk_mgr_helper_get_active_display_cnt(dc, context); context 116 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c dcn20_update_clocks_update_dpp_dto(clk_mgr, context); context 123 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c dcn20_update_clocks_update_dpp_dto(clk_mgr, context); context 732 drivers/gpu/drm/amd/display/dc/core/dc.c struct dc_state *context) context 743 drivers/gpu/drm/amd/display/dc/core/dc.c static void disable_dangling_plane(struct dc *dc, struct dc_state *context) context 759 drivers/gpu/drm/amd/display/dc/core/dc.c for (j = 0; j < context->stream_count; j++) { context 760 drivers/gpu/drm/amd/display/dc/core/dc.c if (old_stream == context->streams[j]) { context 955 drivers/gpu/drm/amd/display/dc/core/dc.c struct dc_state *context) context 959 drivers/gpu/drm/amd/display/dc/core/dc.c if (context->stream_count != dc->current_state->stream_count) context 963 drivers/gpu/drm/amd/display/dc/core/dc.c if (dc->current_state->streams[i] != context->streams[i]) context 1028 drivers/gpu/drm/amd/display/dc/core/dc.c struct dc_state *context, context 1037 drivers/gpu/drm/amd/display/dc/core/dc.c if (context != NULL) context 1038 drivers/gpu/drm/amd/display/dc/core/dc.c pipe = &context->res_ctx.pipe_ctx[i]; context 1055 drivers/gpu/drm/amd/display/dc/core/dc.c static enum dc_status dc_commit_state_no_check(struct dc *dc, struct dc_state *context) context 1063 drivers/gpu/drm/amd/display/dc/core/dc.c disable_dangling_plane(dc, context); context 1065 drivers/gpu/drm/amd/display/dc/core/dc.c for (i = 0; i < context->stream_count; i++) context 1066 drivers/gpu/drm/amd/display/dc/core/dc.c dc_streams[i] = context->streams[i]; context 1069 drivers/gpu/drm/amd/display/dc/core/dc.c dc->hwss.enable_accelerated_mode(dc, context); context 1071 drivers/gpu/drm/amd/display/dc/core/dc.c for (i = 0; i < context->stream_count; i++) { context 1072 drivers/gpu/drm/amd/display/dc/core/dc.c if (context->streams[i]->apply_seamless_boot_optimization) context 1077 drivers/gpu/drm/amd/display/dc/core/dc.c dc->hwss.prepare_bandwidth(dc, context); context 1082 drivers/gpu/drm/amd/display/dc/core/dc.c for (i = 0; i < context->stream_count; i++) { context 1083 drivers/gpu/drm/amd/display/dc/core/dc.c if (context->streams[i]->mode_changed) context 1087 drivers/gpu/drm/amd/display/dc/core/dc.c dc, context->streams[i], context 1088 drivers/gpu/drm/amd/display/dc/core/dc.c context->stream_status[i].plane_count, context 1089 drivers/gpu/drm/amd/display/dc/core/dc.c context); /* use new pipe config in new context */ context 1094 drivers/gpu/drm/amd/display/dc/core/dc.c pipe = &context->res_ctx.pipe_ctx[i]; context 1098 drivers/gpu/drm/amd/display/dc/core/dc.c result = dc->hwss.apply_ctx_to_hw(dc, context); context 1103 drivers/gpu/drm/amd/display/dc/core/dc.c if (context->stream_count > 1 && !dc->debug.disable_timing_sync) { context 1104 drivers/gpu/drm/amd/display/dc/core/dc.c enable_timing_multisync(dc, context); context 1105 drivers/gpu/drm/amd/display/dc/core/dc.c program_timing_sync(dc, context); context 1109 drivers/gpu/drm/amd/display/dc/core/dc.c for (i = 0; i < context->stream_count; i++) { context 1110 drivers/gpu/drm/amd/display/dc/core/dc.c const struct dc_link *link = context->streams[i]->link; context 1112 drivers/gpu/drm/amd/display/dc/core/dc.c if (!context->streams[i]->mode_changed) context 1116 drivers/gpu/drm/amd/display/dc/core/dc.c dc, context->streams[i], context 1117 drivers/gpu/drm/amd/display/dc/core/dc.c context->stream_status[i].plane_count, context 1118 drivers/gpu/drm/amd/display/dc/core/dc.c context); context 1125 drivers/gpu/drm/amd/display/dc/core/dc.c pipe = &context->res_ctx.pipe_ctx[k]; context 1127 drivers/gpu/drm/amd/display/dc/core/dc.c for (l = 0 ; pipe && l < context->stream_count; l++) { context 1128 drivers/gpu/drm/amd/display/dc/core/dc.c if (context->streams[l] && context 1129 drivers/gpu/drm/amd/display/dc/core/dc.c context->streams[l] == pipe->stream && context 1136 drivers/gpu/drm/amd/display/dc/core/dc.c context->streams[i]->timing.h_addressable, context 1137 drivers/gpu/drm/amd/display/dc/core/dc.c context->streams[i]->timing.v_addressable, context 1138 drivers/gpu/drm/amd/display/dc/core/dc.c context->streams[i]->timing.h_total, context 1139 drivers/gpu/drm/amd/display/dc/core/dc.c context->streams[i]->timing.v_total, context 1140 drivers/gpu/drm/amd/display/dc/core/dc.c context->streams[i]->timing.pix_clk_100hz / 10); context 1143 drivers/gpu/drm/amd/display/dc/core/dc.c dc_enable_stereo(dc, context, dc_streams, context->stream_count); context 1147 drivers/gpu/drm/amd/display/dc/core/dc.c dc->hwss.optimize_bandwidth(dc, context); context 1149 drivers/gpu/drm/amd/display/dc/core/dc.c for (i = 0; i < context->stream_count; i++) context 1150 drivers/gpu/drm/amd/display/dc/core/dc.c context->streams[i]->mode_changed = false; context 1152 drivers/gpu/drm/amd/display/dc/core/dc.c memset(&context->commit_hints, 0, sizeof(context->commit_hints)); context 1156 drivers/gpu/drm/amd/display/dc/core/dc.c dc->current_state = context; context 1163 drivers/gpu/drm/amd/display/dc/core/dc.c bool dc_commit_state(struct dc *dc, struct dc_state *context) context 1168 drivers/gpu/drm/amd/display/dc/core/dc.c if (false == context_changed(dc, context)) context 1172 drivers/gpu/drm/amd/display/dc/core/dc.c __func__, context->stream_count); context 1174 drivers/gpu/drm/amd/display/dc/core/dc.c for (i = 0; i < context->stream_count; i++) { context 1175 drivers/gpu/drm/amd/display/dc/core/dc.c struct dc_stream_state *stream = context->streams[i]; context 1180 drivers/gpu/drm/amd/display/dc/core/dc.c result = dc_commit_state_no_check(dc, context); context 1185 drivers/gpu/drm/amd/display/dc/core/dc.c static bool is_flip_pending_in_pipes(struct dc *dc, struct dc_state *context) context 1191 drivers/gpu/drm/amd/display/dc/core/dc.c pipe = &context->res_ctx.pipe_ctx[i]; context 1208 drivers/gpu/drm/amd/display/dc/core/dc.c struct dc_state *context = dc->current_state; context 1215 drivers/gpu/drm/amd/display/dc/core/dc.c if (is_flip_pending_in_pipes(dc, context)) context 1219 drivers/gpu/drm/amd/display/dc/core/dc.c if (context->res_ctx.pipe_ctx[i].stream == NULL || context 1220 drivers/gpu/drm/amd/display/dc/core/dc.c context->res_ctx.pipe_ctx[i].plane_state == NULL) { context 1221 drivers/gpu/drm/amd/display/dc/core/dc.c context->res_ctx.pipe_ctx[i].pipe_idx = i; context 1222 drivers/gpu/drm/amd/display/dc/core/dc.c dc->hwss.disable_plane(dc, &context->res_ctx.pipe_ctx[i]); context 1227 drivers/gpu/drm/amd/display/dc/core/dc.c dc->hwss.optimize_bandwidth(dc, context); context 1233 drivers/gpu/drm/amd/display/dc/core/dc.c struct dc_state *context = kvzalloc(sizeof(struct dc_state), context 1236 drivers/gpu/drm/amd/display/dc/core/dc.c if (!context) context 1243 drivers/gpu/drm/amd/display/dc/core/dc.c memcpy(&context->bw_ctx.dml, &dc->dml, sizeof(struct display_mode_lib)); context 1246 drivers/gpu/drm/amd/display/dc/core/dc.c kref_init(&context->refcount); context 1248 drivers/gpu/drm/amd/display/dc/core/dc.c return context; context 1289 drivers/gpu/drm/amd/display/dc/core/dc.c void dc_retain_state(struct dc_state *context) context 1291 drivers/gpu/drm/amd/display/dc/core/dc.c kref_get(&context->refcount); context 1296 drivers/gpu/drm/amd/display/dc/core/dc.c struct dc_state *context = container_of(kref, struct dc_state, refcount); context 1297 drivers/gpu/drm/amd/display/dc/core/dc.c dc_resource_state_destruct(context); context 1298 drivers/gpu/drm/amd/display/dc/core/dc.c kvfree(context); context 1301 drivers/gpu/drm/amd/display/dc/core/dc.c void dc_release_state(struct dc_state *context) context 1303 drivers/gpu/drm/amd/display/dc/core/dc.c kref_put(&context->refcount, dc_state_free); context 1356 drivers/gpu/drm/amd/display/dc/core/dc.c const struct dc_state *context, context 1362 drivers/gpu/drm/amd/display/dc/core/dc.c const struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j]; context 1519 drivers/gpu/drm/amd/display/dc/core/dc.c const struct dc_state *context = dc->current_state; context 1529 drivers/gpu/drm/amd/display/dc/core/dc.c if (!is_surface_in_context(context, u->surface)) { context 1799 drivers/gpu/drm/amd/display/dc/core/dc.c struct dc_state *context, context 1884 drivers/gpu/drm/amd/display/dc/core/dc.c if (!dc->res_pool->funcs->validate_bandwidth(dc, context, context 1897 drivers/gpu/drm/amd/display/dc/core/dc.c struct dc_state *context) context 1903 drivers/gpu/drm/amd/display/dc/core/dc.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j]; context 1999 drivers/gpu/drm/amd/display/dc/core/dc.c struct dc_state *context) context 2019 drivers/gpu/drm/amd/display/dc/core/dc.c dc->hwss.prepare_bandwidth(dc, context); context 2020 drivers/gpu/drm/amd/display/dc/core/dc.c context_clock_trace(dc, context); context 2025 drivers/gpu/drm/amd/display/dc/core/dc.c commit_planes_do_stream_update(dc, stream, stream_update, update_type, context); context 2032 drivers/gpu/drm/amd/display/dc/core/dc.c dc->hwss.apply_ctx_for_surface(dc, stream, 0, context); context 2042 drivers/gpu/drm/amd/display/dc/core/dc.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j]; context 2062 drivers/gpu/drm/amd/display/dc/core/dc.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j]; context 2090 drivers/gpu/drm/amd/display/dc/core/dc.c stream_get_status(context, pipe_ctx->stream); context 2093 drivers/gpu/drm/amd/display/dc/core/dc.c dc, pipe_ctx->stream, stream_status->plane_count, context); context 2111 drivers/gpu/drm/amd/display/dc/core/dc.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j]; context 2130 drivers/gpu/drm/amd/display/dc/core/dc.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j]; context 2156 drivers/gpu/drm/amd/display/dc/core/dc.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j]; context 2178 drivers/gpu/drm/amd/display/dc/core/dc.c struct dc_state *context; context 2183 drivers/gpu/drm/amd/display/dc/core/dc.c context = dc->current_state; context 2195 drivers/gpu/drm/amd/display/dc/core/dc.c context = dc_create_state(dc); context 2196 drivers/gpu/drm/amd/display/dc/core/dc.c if (context == NULL) { context 2201 drivers/gpu/drm/amd/display/dc/core/dc.c dc_resource_state_copy_construct(state, context); context 2204 drivers/gpu/drm/amd/display/dc/core/dc.c struct pipe_ctx *new_pipe = &context->res_ctx.pipe_ctx[i]; context 2221 drivers/gpu/drm/amd/display/dc/core/dc.c &context->res_ctx.pipe_ctx[j]; context 2231 drivers/gpu/drm/amd/display/dc/core/dc.c copy_stream_update_to_stream(dc, context, stream, stream_update); context 2240 drivers/gpu/drm/amd/display/dc/core/dc.c context); context 2242 drivers/gpu/drm/amd/display/dc/core/dc.c if (dc->current_state != context) { context 2246 drivers/gpu/drm/amd/display/dc/core/dc.c dc->current_state = context; context 2250 drivers/gpu/drm/amd/display/dc/core/dc.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 348 drivers/gpu/drm/amd/display/dc/core/dc_debug.c struct dc_state *context) context 354 drivers/gpu/drm/amd/display/dc/core/dc_debug.c context->bw_ctx.bw.dcn.clk.dispclk_khz, context 355 drivers/gpu/drm/amd/display/dc/core/dc_debug.c context->bw_ctx.bw.dcn.clk.dppclk_khz, context 356 drivers/gpu/drm/amd/display/dc/core/dc_debug.c context->bw_ctx.bw.dcn.clk.dcfclk_khz, context 357 drivers/gpu/drm/amd/display/dc/core/dc_debug.c context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz, context 358 drivers/gpu/drm/amd/display/dc/core/dc_debug.c context->bw_ctx.bw.dcn.clk.fclk_khz, context 359 drivers/gpu/drm/amd/display/dc/core/dc_debug.c context->bw_ctx.bw.dcn.clk.socclk_khz); context 362 drivers/gpu/drm/amd/display/dc/core/dc_debug.c context->bw_ctx.bw.dcn.clk.dispclk_khz, context 363 drivers/gpu/drm/amd/display/dc/core/dc_debug.c context->bw_ctx.bw.dcn.clk.dppclk_khz, context 364 drivers/gpu/drm/amd/display/dc/core/dc_debug.c context->bw_ctx.bw.dcn.clk.dcfclk_khz, context 365 drivers/gpu/drm/amd/display/dc/core/dc_debug.c context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz, context 366 drivers/gpu/drm/amd/display/dc/core/dc_debug.c context->bw_ctx.bw.dcn.clk.fclk_khz, context 367 drivers/gpu/drm/amd/display/dc/core/dc_debug.c context->bw_ctx.bw.dcn.clk.socclk_khz); context 1054 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct dc_state *context) context 1059 drivers/gpu/drm/amd/display/dc/core/dc_resource.c if (context->res_ctx.pipe_ctx[i].plane_state != NULL && context 1060 drivers/gpu/drm/amd/display/dc/core/dc_resource.c context->res_ctx.pipe_ctx[i].stream != NULL) context 1061 drivers/gpu/drm/amd/display/dc/core/dc_resource.c if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i])) context 1164 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct dc_state *context, context 1169 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct resource_context *res_ctx = &context->res_ctx; context 1190 drivers/gpu/drm/amd/display/dc/core/dc_resource.c return pool->funcs->acquire_idle_pipe_for_layer(context, pool, head_pipe->stream); context 1234 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct dc_state *context) context 1241 drivers/gpu/drm/amd/display/dc/core/dc_resource.c for (i = 0; i < context->stream_count; i++) context 1242 drivers/gpu/drm/amd/display/dc/core/dc_resource.c if (context->streams[i] == stream) { context 1243 drivers/gpu/drm/amd/display/dc/core/dc_resource.c stream_status = &context->stream_status[i]; context 1258 drivers/gpu/drm/amd/display/dc/core/dc_resource.c head_pipe = resource_get_head_pipe_for_stream(&context->res_ctx, stream); context 1269 drivers/gpu/drm/amd/display/dc/core/dc_resource.c tail_pipe = resource_get_tail_pipe(&context->res_ctx, head_pipe); context 1272 drivers/gpu/drm/amd/display/dc/core/dc_resource.c free_pipe = acquire_free_pipe_for_head(context, pool, head_pipe); context 1276 drivers/gpu/drm/amd/display/dc/core/dc_resource.c int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream); context 1278 drivers/gpu/drm/amd/display/dc/core/dc_resource.c free_pipe = &context->res_ctx.pipe_ctx[pipe_idx]; context 1312 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct dc_state *context) context 1318 drivers/gpu/drm/amd/display/dc/core/dc_resource.c for (i = 0; i < context->stream_count; i++) context 1319 drivers/gpu/drm/amd/display/dc/core/dc_resource.c if (context->streams[i] == stream) { context 1320 drivers/gpu/drm/amd/display/dc/core/dc_resource.c stream_status = &context->stream_status[i]; context 1331 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 1383 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct dc_state *context) context 1389 drivers/gpu/drm/amd/display/dc/core/dc_resource.c for (i = 0; i < context->stream_count; i++) context 1390 drivers/gpu/drm/amd/display/dc/core/dc_resource.c if (context->streams[i] == stream) { context 1391 drivers/gpu/drm/amd/display/dc/core/dc_resource.c stream_status = &context->stream_status[i]; context 1406 drivers/gpu/drm/amd/display/dc/core/dc_resource.c if (!dc_remove_plane_from_context(dc, stream, del_planes[i], context)) context 1417 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct dc_state *context) context 1431 drivers/gpu/drm/amd/display/dc/core/dc_resource.c if (!dc_add_plane_to_context(dc, stream, set[i].plane_states[j], context)) context 1442 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct dc_state *context) context 1453 drivers/gpu/drm/amd/display/dc/core/dc_resource.c return add_all_planes_for_stream(dc, stream, &set, 1, context); context 1787 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct dc_state *context) context 1791 drivers/gpu/drm/amd/display/dc/core/dc_resource.c for (i = 0; i < context->stream_count; i++) { context 1792 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct dc_stream_state *stream_has_pll = context->streams[i]; context 1907 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct dc_state *context, context 1939 drivers/gpu/drm/amd/display/dc/core/dc_resource.c &context->res_ctx, context 1945 drivers/gpu/drm/amd/display/dc/core/dc_resource.c pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream); context 1949 drivers/gpu/drm/amd/display/dc/core/dc_resource.c pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream); context 1952 drivers/gpu/drm/amd/display/dc/core/dc_resource.c if (pipe_idx < 0 || context->res_ctx.pipe_ctx[pipe_idx].stream_res.tg == NULL) context 1955 drivers/gpu/drm/amd/display/dc/core/dc_resource.c pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx]; context 1959 drivers/gpu/drm/amd/display/dc/core/dc_resource.c &context->res_ctx, pool, stream); context 1965 drivers/gpu/drm/amd/display/dc/core/dc_resource.c &context->res_ctx, pool, context 1974 drivers/gpu/drm/amd/display/dc/core/dc_resource.c &context->res_ctx, pool, pipe_ctx->stream_res.stream_enc->id); context 1982 drivers/gpu/drm/amd/display/dc/core/dc_resource.c update_audio_usage(&context->res_ctx, pool, context 1990 drivers/gpu/drm/amd/display/dc/core/dc_resource.c for (i = 0; i < context->stream_count; i++) context 1991 drivers/gpu/drm/amd/display/dc/core/dc_resource.c if (context->streams[i] == stream) { context 1992 drivers/gpu/drm/amd/display/dc/core/dc_resource.c context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst; context 1993 drivers/gpu/drm/amd/display/dc/core/dc_resource.c context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->id; context 1994 drivers/gpu/drm/amd/display/dc/core/dc_resource.c context->stream_status[i].audio_inst = context 2429 drivers/gpu/drm/amd/display/dc/core/dc_resource.c void dc_resource_state_destruct(struct dc_state *context) context 2433 drivers/gpu/drm/amd/display/dc/core/dc_resource.c for (i = 0; i < context->stream_count; i++) { context 2434 drivers/gpu/drm/amd/display/dc/core/dc_resource.c for (j = 0; j < context->stream_status[i].plane_count; j++) context 2436 drivers/gpu/drm/amd/display/dc/core/dc_resource.c context->stream_status[i].plane_states[j]); context 2438 drivers/gpu/drm/amd/display/dc/core/dc_resource.c context->stream_status[i].plane_count = 0; context 2439 drivers/gpu/drm/amd/display/dc/core/dc_resource.c dc_stream_release(context->streams[i]); context 2440 drivers/gpu/drm/amd/display/dc/core/dc_resource.c context->streams[i] = NULL; context 2533 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct dc_state *context, context 2539 drivers/gpu/drm/amd/display/dc/core/dc_resource.c &context->res_ctx, stream); context 2552 drivers/gpu/drm/amd/display/dc/core/dc_resource.c &context->res_ctx, context 2558 drivers/gpu/drm/amd/display/dc/core/dc_resource.c &context->res_ctx, context 2566 drivers/gpu/drm/amd/display/dc/core/dc_resource.c &context->res_ctx, pool, context 902 drivers/gpu/drm/amd/display/dc/dc.h void dc_resource_state_destruct(struct dc_state *context); context 913 drivers/gpu/drm/amd/display/dc/dc.h bool dc_commit_state(struct dc *dc, struct dc_state *context); context 918 drivers/gpu/drm/amd/display/dc/dc.h void dc_retain_state(struct dc_state *context); context 919 drivers/gpu/drm/amd/display/dc/dc.h void dc_release_state(struct dc_state *context); context 316 drivers/gpu/drm/amd/display/dc/dc_stream.h struct dc_state *context); context 322 drivers/gpu/drm/amd/display/dc/dc_stream.h struct dc_state *context); context 327 drivers/gpu/drm/amd/display/dc/dc_stream.h struct dc_state *context); context 334 drivers/gpu/drm/amd/display/dc/dc_stream.h struct dc_state *context); context 365 drivers/gpu/drm/amd/display/dc/dc_stream.h struct dc_state *context, context 184 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c static uint32_t get_max_pixel_clock_for_all_paths(struct dc_state *context) context 190 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 215 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c struct dc_state *context) context 220 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c int max_pix_clk = get_max_pixel_clock_for_all_paths(context); context 227 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c if (context->bw_ctx.bw.dce.dispclk_khz > context 237 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c < context->bw_ctx.bw.dce.dispclk_khz) context 492 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c const struct dc_state *context, context 498 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c for (j = 0; j < context->stream_count; j++) { context 501 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c const struct dc_stream_state *stream = context->streams[j]; context 507 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c if (stream == context->res_ctx.pipe_ctx[k].stream) { context 508 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c pipe_ctx = &context->res_ctx.pipe_ctx[k]; context 544 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c static uint32_t dce110_get_min_vblank_time_us(const struct dc_state *context) context 549 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c for (j = 0; j < context->stream_count; j++) { context 550 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c struct dc_stream_state *stream = context->streams[j]; context 596 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c struct dc_state *context) context 598 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg; context 600 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c pp_display_cfg->avail_mclk_switch_time_us = dce110_get_min_vblank_time_us(context); context 602 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c dce110_fill_display_configs(context, pp_display_cfg); context 610 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c struct dc_state *context) context 612 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg; context 615 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c context->bw_ctx.bw.dce.all_displays_in_sync; context 617 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c context->bw_ctx.bw.dce.nbp_state_change_enable == false; context 619 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c context->bw_ctx.bw.dce.cpuc_state_change_enable == false; context 621 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c context->bw_ctx.bw.dce.cpup_state_change_enable == false; context 623 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c context->bw_ctx.bw.dce.blackout_recovery_time_us; context 625 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c pp_display_cfg->min_memory_clock_khz = context->bw_ctx.bw.dce.yclk_khz context 630 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c context->bw_ctx.bw.dce.sclk_khz); context 639 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c pp_display_cfg->min_dcfclock_khz = (context->stream_count > 4)? context 643 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c = context->bw_ctx.bw.dce.sclk_deep_sleep_khz; context 646 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c dce110_get_min_vblank_time_us(context); context 652 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c dce110_fill_display_configs(context, pp_display_cfg); context 657 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c &context->streams[0]->timing; context 669 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c struct dc_state *context, context 674 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz; context 680 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c level_change_req.power_level = dce_get_required_clocks_state(clk_mgr, context); context 692 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c dce_pplib_apply_display_requirements(clk_mgr->ctx->dc, context); context 696 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c struct dc_state *context, context 701 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz; context 707 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c level_change_req.power_level = dce_get_required_clocks_state(clk_mgr, context); context 716 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c context->bw_ctx.bw.dce.dispclk_khz = dce_set_clock(clk_mgr, patched_disp_clk); context 719 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c dce11_pplib_apply_display_requirements(clk_mgr->ctx->dc, context); context 723 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c struct dc_state *context, context 728 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz; context 734 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c level_change_req.power_level = dce_get_required_clocks_state(clk_mgr, context); context 746 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c dce11_pplib_apply_display_requirements(clk_mgr->ctx->dc, context); context 750 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c struct dc_state *context, context 755 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c int max_pix_clk = get_max_pixel_clock_for_all_paths(context); context 756 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz; context 784 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c dce11_pplib_apply_display_requirements(clk_mgr->ctx->dc, context); context 111 drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.c struct dc_state *context) context 113 drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.c dce110_set_safe_displaymarks(&context->res_ctx, dc->res_pool); context 117 drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.c context, context 123 drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.c struct dc_state *context) context 125 drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.c dce110_set_safe_displaymarks(&context->res_ctx, dc->res_pool); context 129 drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.c context, context 38 drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.h struct dc_state *context); context 42 drivers/gpu/drm/amd/display/dc/dce100/dce100_hw_sequencer.h struct dc_state *context); context 750 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c struct dc_state *context, context 753 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(&context->res_ctx, stream); context 767 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c struct dc_state *context, context 774 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c if (context->res_ctx.pipe_ctx[i].stream) context 780 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c context->bw_ctx.bw.dce.dispclk_khz = 681000; context 781 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c context->bw_ctx.bw.dce.yclk_khz = 250000 * MEMORY_TYPE_MULTIPLIER_CZ; context 783 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c context->bw_ctx.bw.dce.dispclk_khz = 0; context 784 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c context->bw_ctx.bw.dce.yclk_khz = 0; context 791 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c struct dc_state *context) context 795 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c for (i = 0; i < context->stream_count; i++) { context 796 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c if (context->stream_status[i].plane_count == 0) context 799 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c if (context->stream_status[i].plane_count > 1) context 802 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c if (context->stream_status[i].plane_states[0]->format context 812 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c struct dc_state *context) context 814 drivers/gpu/drm/amd/display/dc/dce100/dce100_resource.c if (!dce100_validate_surface_sets(context)) context 1271 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct dc_state *context, context 1326 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct dc_state *context, context 1343 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c build_audio_output(context, pipe_ctx, &audio_output); context 1367 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c dc->hwss.enable_stream_timing(pipe_ctx, context, dc); context 1417 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c core_link_enable_stream(context, pipe_ctx); context 1521 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c static struct dc_stream_state *get_edp_stream(struct dc_state *context) context 1525 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c for (i = 0; i < context->stream_count; i++) { context 1526 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (context->streams[i]->signal == SIGNAL_TYPE_EDP) context 1527 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c return context->streams[i]; context 1546 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct dc_state *context) context 1570 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c void dce110_enable_accelerated_mode(struct dc *dc, struct dc_state *context) context 1573 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct dc_link *edp_link_with_sink = get_edp_link_with_sink(dc, context); context 1581 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c dc->hwss.init_pipes(dc, context); context 1583 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c edp_stream = get_edp_stream(context); context 1605 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c for (i = 0; i < context->stream_count; i++) { context 1606 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (context->streams[i]->apply_seamless_boot_optimization) { context 1648 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct dc_state *context) context 1654 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 1664 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[num_pipes], context 1665 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[num_pipes], context 1666 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c context->bw_ctx.bw.dce.stutter_entry_wm_ns[num_pipes], context 1667 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c context->bw_ctx.bw.dce.urgent_wm_ns[num_pipes], context 1673 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[num_pipes], context 1674 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[num_pipes], context 1675 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c context->bw_ctx.bw.dce.urgent_wm_ns[num_pipes], context 1791 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct dc_state *context, context 1796 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct resource_context *res_ctx = &context->res_ctx; context 1807 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (context->stream_count != 1) context 1856 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct dc_state *context) context 1860 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (should_enable_fbc(dc, context, &pipe_idx)) { context 1864 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx]; context 1880 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct dc_state *context) context 1889 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 1936 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (old_clk && 0 == resource_get_clock_source_reference(&context->res_ctx, context 1950 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct dc_state *context) context 1974 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 1988 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c build_audio_output(context, pipe_ctx, &audio_output); context 2002 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 2016 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c build_audio_output(context, pipe_ctx, &audio_output); context 2031 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct dc_state *context) context 2039 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c dc->hwss.reset_hw_ctx_wrap(dc, context); context 2042 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (context->stream_count <= 0) context 2052 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 2072 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c dce110_setup_audio_dto(dc, context); context 2077 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 2095 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c context, context 2361 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c static void init_pipes(struct dc *dc, struct dc_state *context) context 2437 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct dc_state *context) context 2441 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c dce110_set_safe_displaymarks(&context->res_ctx, dc->res_pool); context 2445 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c context, context 2451 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct dc_state *context) context 2455 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c dce110_set_displaymarks(dc, context); context 2459 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c context, context 2580 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct dc_state *context) context 2591 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 2602 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 2613 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c context->stream_count); context 2624 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 2634 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c enable_fbc(dc, context); context 40 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.h struct dc_state *context); context 58 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.h void dce110_enable_accelerated_mode(struct dc *dc, struct dc_state *context); context 68 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.h struct dc_state *context); context 72 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.h struct dc_state *context); context 869 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c struct dc_state *context, context 872 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(&context->res_ctx, stream); context 892 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c struct dc_state *context, context 905 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->res_ctx.pipe_ctx, context 907 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c &context->bw_ctx.bw.dce)) context 913 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->streams[0]->timing.h_addressable, context 914 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->streams[0]->timing.v_addressable, context 915 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->streams[0]->timing.pix_clk_100hz / 10); context 918 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c &context->bw_ctx.bw.dce, sizeof(context->bw_ctx.bw.dce))) { context 932 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[0].b_mark, context 933 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[0].a_mark, context 934 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.urgent_wm_ns[0].b_mark, context 935 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.urgent_wm_ns[0].a_mark, context 936 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[0].b_mark, context 937 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[0].a_mark, context 938 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[1].b_mark, context 939 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[1].a_mark, context 940 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.urgent_wm_ns[1].b_mark, context 941 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.urgent_wm_ns[1].a_mark, context 942 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[1].b_mark, context 943 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[1].a_mark, context 944 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[2].b_mark, context 945 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[2].a_mark, context 946 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.urgent_wm_ns[2].b_mark, context 947 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.urgent_wm_ns[2].a_mark, context 948 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[2].b_mark, context 949 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[2].a_mark, context 950 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.stutter_mode_enable, context 951 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.cpuc_state_change_enable, context 952 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.cpup_state_change_enable, context 953 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.nbp_state_change_enable, context 954 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.all_displays_in_sync, context 955 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.dispclk_khz, context 956 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.sclk_khz, context 957 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.sclk_deep_sleep_khz, context 958 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.yclk_khz, context 959 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->bw_ctx.bw.dce.blackout_recovery_time_us); context 975 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c struct dc_state *context) context 979 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c for (i = 0; i < context->stream_count; i++) { context 980 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c if (context->stream_status[i].plane_count == 0) context 983 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c if (context->stream_status[i].plane_count > 2) context 986 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c for (j = 0; j < context->stream_status[i].plane_count; j++) { context 988 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->stream_status[i].plane_states[j]; context 1007 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c if (context->streams[i]->timing.pixel_encoding context 1021 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c struct dc_state *context) context 1023 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c if (!dce110_validate_surface_sets(context)) context 1049 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c struct dc_state *context, context 1054 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c struct resource_context *res_ctx = &context->res_ctx; context 1102 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c context->stream_count); context 797 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c struct dc_state *context, context 800 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(&context->res_ctx, stream); context 814 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c struct dc_state *context, context 827 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->res_ctx.pipe_ctx, context 829 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c &context->bw_ctx.bw.dce)) context 838 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c &context->bw_ctx.bw.dce, sizeof(context->bw_ctx.bw.dce))) { context 852 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[0].b_mark, context 853 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[0].a_mark, context 854 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.urgent_wm_ns[0].b_mark, context 855 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.urgent_wm_ns[0].a_mark, context 856 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[0].b_mark, context 857 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[0].a_mark, context 858 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[1].b_mark, context 859 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[1].a_mark, context 860 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.urgent_wm_ns[1].b_mark, context 861 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.urgent_wm_ns[1].a_mark, context 862 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[1].b_mark, context 863 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[1].a_mark, context 864 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[2].b_mark, context 865 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.nbp_state_change_wm_ns[2].a_mark, context 866 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.urgent_wm_ns[2].b_mark, context 867 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.urgent_wm_ns[2].a_mark, context 868 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[2].b_mark, context 869 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.stutter_exit_wm_ns[2].a_mark, context 870 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.stutter_mode_enable, context 871 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.cpuc_state_change_enable, context 872 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.cpup_state_change_enable, context 873 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.nbp_state_change_enable, context 874 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.all_displays_in_sync, context 875 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.dispclk_khz, context 876 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.sclk_khz, context 877 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.sclk_deep_sleep_khz, context 878 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.yclk_khz, context 879 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c context->bw_ctx.bw.dce.blackout_recovery_time_us); context 886 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c struct dc_state *context, context 892 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c &context->res_ctx, stream); context 903 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c &context->res_ctx, dc->res_pool, context 910 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c &context->res_ctx, context 918 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c struct dc_state *context) context 922 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c for (i = 0; i < context->stream_count; i++) { context 923 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c if (context->stream_status[i].plane_count == 0) context 926 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c if (context->stream_status[i].plane_count > 1) context 929 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c if (context->stream_status[i].plane_states[0]->format context 958 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c struct dc_state *context) context 960 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.c if (!dce112_validate_surface_sets(context)) context 42 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.h struct dc_state *context, context 47 drivers/gpu/drm/amd/display/dc/dce112/dce112_resource.h struct dc_state *context, context 801 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c struct dc_state *context, context 808 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c if (context->res_ctx.pipe_ctx[i].stream) context 814 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c context->bw_ctx.bw.dce.dispclk_khz = 681000; context 815 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c context->bw_ctx.bw.dce.yclk_khz = 250000 * MEMORY_TYPE_MULTIPLIER_CZ; context 817 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c context->bw_ctx.bw.dce.dispclk_khz = 0; context 818 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c context->bw_ctx.bw.dce.yclk_khz = 0; context 825 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c struct dc_state *context) context 829 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c for (i = 0; i < context->stream_count; i++) { context 830 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c if (context->stream_status[i].plane_count == 0) context 833 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c if (context->stream_status[i].plane_count > 1) context 836 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c if (context->stream_status[i].plane_states[0]->format context 846 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c struct dc_state *context) context 848 drivers/gpu/drm/amd/display/dc/dce80/dce80_resource.c if (!dce80_validate_surface_sets(context)) context 733 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context, context 819 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context) context 1076 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c static void dcn10_init_pipes(struct dc *dc, struct dc_state *context) context 1081 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c for (i = 0; i < context->stream_count; i++) { context 1082 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (context->streams[i]->apply_seamless_boot_optimization) { context 1090 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 1115 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 1129 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 1307 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context) context 1315 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 1828 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context) context 2282 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context) context 2295 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c bool should_divided_by_2 = context->bw_ctx.bw.dcn.clk.dppclk_khz <= context 2473 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context) context 2476 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c dcn10_enable_plane(dc, pipe_ctx, context); context 2478 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c update_dchubp_dpp(dc, pipe_ctx, context); context 2500 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context) context 2520 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c dcn10_program_pipe(dc, pipe_ctx, context); context 2523 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c program_all_pipe_in_tree(dc, pipe_ctx->bottom_pipe, context); context 2528 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context, context 2534 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 2554 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context) context 2562 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c find_top_pipe_for_stream(dc, context, stream); context 2579 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c lock_all_pipes(dc, context, true); context 2596 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 2626 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c program_all_pipe_in_tree(dc, top_pipe_to_program, context); context 2631 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c dc->hwss.program_all_writeback_pipes_in_tree(dc, stream, context); context 2635 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 2648 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c lock_all_pipes(dc, context, false); context 2661 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c dc->hwss.optimize_bandwidth(dc, context); context 2669 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c static void dcn10_stereo_hw_frame_pack_wa(struct dc *dc, struct dc_state *context) context 2673 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c for (i = 0; i < context->stream_count; i++) { context 2674 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (context->streams[i]->timing.timing_3d_format context 2687 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context) context 2695 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (context->stream_count == 0) context 2696 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c context->bw_ctx.bw.dcn.clk.phyclk_khz = 0; context 2700 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c context, context 2705 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c &context->bw_ctx.bw.dcn.watermarks, context 2708 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c dcn10_stereo_hw_frame_pack_wa(dc, context); context 2719 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context) context 2727 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (context->stream_count == 0) context 2728 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c context->bw_ctx.bw.dcn.clk.phyclk_khz = 0; context 2732 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c context, context 2737 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c &context->bw_ctx.bw.dcn.watermarks, context 2740 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c dcn10_stereo_hw_frame_pack_wa(dc, context); context 3105 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context, context 3113 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 3263 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context = dc->current_state; context 3265 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_clocks *current_clocks = &context->bw_ctx.bw.dcn.clk; context 3269 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c context, clock_type, &clock_cfg); context 3293 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c context, true); context 3302 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct dc_state *context = dc->current_state; context 3305 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c dc->clk_mgr->funcs->get_clock(dc->clk_mgr, context, clock_type, clock_cfg); context 35 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.h const struct dc_state *context, context 47 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.h struct dc_state *context); context 79 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.h struct dc_state *context); context 83 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.h struct dc_state *context, context 91 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.h struct dc_state *context, context 1039 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c struct dc_state *context, context 1042 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(&context->res_ctx, stream); context 1089 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c struct dc_state *context, context 1093 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c struct resource_context *res_ctx = &context->res_ctx; context 1147 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c static enum dc_status dcn10_validate_global(struct dc *dc, struct dc_state *context) context 1155 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c for (i = 0; i < context->stream_count; i++) { context 1156 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c if (context->stream_status[i].plane_count == 0) context 1159 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c if (context->stream_status[i].plane_count > 2) context 1162 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c for (j = 0; j < context->stream_status[i].plane_count; j++) { context 1164 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c context->stream_status[i].plane_states[j]; context 525 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct dc_state *context, context 826 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c static void dcn20_update_odm(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe_ctx) context 928 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct dc_state *context) context 1004 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct dc_state *context) context 1007 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c context->commit_hints.full_update_needed ? 1 : pipe_ctx->plane_state->update_flags.bits.full_update; context 1010 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c dcn20_enable_plane(dc, pipe_ctx, context); context 1012 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c update_dchubp_dpp(dc, pipe_ctx, context); context 1034 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct dc_state *context) context 1052 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c dc->hwss.update_odm(dc, context, pipe_ctx); context 1056 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c dcn20_program_pipe(dc, pipe_ctx, context); context 1060 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c dcn20_program_all_pipe_in_tree(dc, pipe_ctx->bottom_pipe, context); context 1063 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c dcn20_program_all_pipe_in_tree(dc, pipe_ctx->next_odm_pipe, context); context 1149 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct dc_state *context) context 1157 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c find_top_pipe_for_stream(dc, context, stream); context 1167 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 1183 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c lock_all_pipes(dc, context, true); context 1194 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 1224 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c dcn20_program_all_pipe_in_tree(dc, top_pipe_to_program, context); context 1228 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c dc->hwss.program_all_writeback_pipes_in_tree(dc, stream, context); context 1232 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 1246 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c lock_all_pipes(dc, context, false); context 1274 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct dc_state *context) context 1280 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c context, context 1285 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c &context->bw_ctx.bw.dcn.watermarks, context 1292 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct dc_state *context) context 1298 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c &context->bw_ctx.bw.dcn.watermarks, context 1304 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c context, context 1310 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct dc_state *context) context 1315 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c if (!dc->res_pool->funcs->validate_bandwidth(dc, context, false)) context 1319 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c dc->hwss.prepare_bandwidth(dc, context); context 1323 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 1359 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct dc_state *context) context 1376 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c mcif_wb->funcs->config_mcif_arb(mcif_wb, &context->bw_ctx.bw.dcn.bw_writeback.mcif_wb_arb[wb_info->dwb_pipe_inst]); context 1627 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct dc_state *context) context 1695 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct dc_state *context) context 1703 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 1998 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct dc_state *context = dc->current_state; context 2057 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 2094 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i]; context 35 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.h struct dc_state *context, context 51 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.h struct dc_state *context); context 55 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.h struct dc_state *context); context 59 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.h struct dc_state *context); context 1497 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c enum dc_status dcn20_build_mapped_resource(const struct dc *dc, struct dc_state *context, struct dc_stream_state *stream) context 1500 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(&context->res_ctx, stream); context 2201 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c struct dc_state *context, context 2213 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (!context->res_ctx.pipe_ctx[i].stream) context 2217 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (context->res_ctx.pipe_ctx[i].stream->writeback_info[j].wb_enabled == false) context 2221 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c wb_arb_params = &context->bw_ctx.bw.dcn.bw_writeback.mcif_wb_arb[dwb_pipe]; context 2223 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (context->res_ctx.pipe_ctx[i].stream->writeback_info[j].dwb_params.out_format == dwb_scaler_mode_yuv420) { context 2224 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (context->res_ctx.pipe_ctx[i].stream->writeback_info[j].dwb_params.output_depth == DWB_OUTPUT_PIXEL_DEPTH_8BPC) context 2232 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c wb_arb_params->cli_watermark[k] = get_wm_writeback_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2233 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c wb_arb_params->pstate_watermark[k] = get_wm_writeback_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2235 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c wb_arb_params->time_per_pixel = 16.0 / context->res_ctx.pipe_ctx[i].stream->phy_pix_clk; /* 4 bit fraction, ms */ context 2366 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c struct dc_state *context, context 2375 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c bool odm_capable = context->bw_ctx.dml.ip.odm_capable; context 2390 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i]; context 2408 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c release_dsc(&context->res_ctx, dc->res_pool, &odm_pipe->stream_res.dsc); context 2421 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i]; context 2444 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c &context->res_ctx, pipes); context 2447 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c &context->res_ctx, pipes); context 2456 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.ip.odm_capable = 0; context 2458 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, pipe_cnt); context 2460 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.ip.odm_capable = odm_capable; context 2464 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (vlevel <= context->bw_ctx.dml.soc.num_states) context 2465 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (!dcn20_validate_dsc(dc, context)) { context 2467 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c vlevel = context->bw_ctx.dml.soc.num_states + 1; context 2471 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (vlevel > context->bw_ctx.dml.soc.num_states && odm_capable) context 2472 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, pipe_cnt); context 2474 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (vlevel > context->bw_ctx.dml.soc.num_states) context 2477 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if ((context->stream_count > split_threshold && dc->current_state->stream_count <= split_threshold) context 2478 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c || (context->stream_count <= split_threshold && dc->current_state->stream_count > split_threshold)) context 2479 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->commit_hints.full_update_needed = true; context 2487 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i]; context 2513 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (context->stream_count > split_threshold) context 2518 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (!context->res_ctx.pipe_ctx[i].stream) context 2520 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c for (; vlevel_unsplit <= context->bw_ctx.dml.soc.num_states; vlevel_unsplit++) context 2521 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (context->bw_ctx.dml.vba.NoOfDPP[vlevel_unsplit][0][pipe_idx] == 1) context 2527 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i]; context 2539 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.vba.ODMCombineEnabled[pipe_idx] = true; context 2540 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.vba.ODMCombineEnablePerState[vlevel][pipe_idx] = true; context 2542 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (force_split && context->bw_ctx.dml.vba.NoOfDPP[vlevel][context->bw_ctx.dml.vba.maxMpcComb][pipe_idx] == 1) context 2543 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.vba.RequiredDPPCLK[vlevel][context->bw_ctx.dml.vba.maxMpcComb][pipe_idx] /= 2; context 2544 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (!pipe->top_pipe && !pipe->plane_state && context->bw_ctx.dml.vba.ODMCombineEnabled[pipe_idx]) { context 2545 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c hsplit_pipe = dcn20_find_secondary_pipe(dc, &context->res_ctx, dc->res_pool, pipe); context 2548 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c &context->res_ctx, dc->res_pool, context 2552 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dcn20_build_mapped_resource(dc, context, pipe->stream); context 2570 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (avoid_split && vlevel_unsplit <= context->bw_ctx.dml.soc.num_states && !force_split && !need_split3d) { context 2573 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.vba.maxMpcComb = 0; context 2575 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c need_split = context->bw_ctx.dml.vba.NoOfDPP[vlevel][context->bw_ctx.dml.vba.maxMpcComb][pipe_idx] == 2; context 2579 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c && context->bw_ctx.dml.vba.ODMCombineEnabled[pipe_idx]) context 2585 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c hsplit_pipe = dcn20_find_secondary_pipe(dc, &context->res_ctx, dc->res_pool, pipe); context 2590 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (context->bw_ctx.dml.vba.ODMCombineEnabled[pipe_idx]) { context 2592 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c &context->res_ctx, dc->res_pool, context 2595 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dcn20_build_mapped_resource(dc, context, pipe->stream); context 2598 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c &context->res_ctx, dc->res_pool, context 2609 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (failed_non_odm_dsc && !dcn20_validate_dsc(dc, context)) { context 2610 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.vba.ValidationStatus[context->bw_ctx.dml.vba.soc.num_states] = context 2629 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c struct dc *dc, struct dc_state *context, context 2638 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (!context->res_ctx.pipe_ctx[i].stream) context 2642 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[pipe_cnt].clks_cfg.dispclk_mhz = context->bw_ctx.dml.vba.RequiredDISPCLK[vlevel][context->bw_ctx.dml.vba.maxMpcComb]; context 2646 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.vba.RequiredDPPCLK[vlevel][context->bw_ctx.dml.vba.maxMpcComb][pipe_idx]; context 2647 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (context->bw_ctx.dml.vba.BlendingAndTiming[pipe_idx] == pipe_idx) context 2649 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.vba.ODMCombineEnablePerState[vlevel][pipe_idx]; context 2655 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.vba.RequiredDPPCLK[vlevel][context->bw_ctx.dml.vba.maxMpcComb][pipe_split_from[i]]; context 2656 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (context->bw_ctx.dml.vba.BlendingAndTiming[pipe_split_from[i]] == pipe_split_from[i]) context 2658 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.vba.ODMCombineEnablePerState[vlevel][pipe_split_from[i]]; context 2664 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[pipe_cnt].clks_cfg.dispclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dispclk_mhz; context 2665 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[pipe_cnt].clks_cfg.dppclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dppclk_mhz; context 2678 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c &context->res_ctx, pipes); context 2681 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c &context->res_ctx, pipes); context 2687 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[0].clks_cfg.dcfclk_mhz = context->bw_ctx.dml.soc.clock_limits[vlevel].dcfclk_mhz; context 2688 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[vlevel].socclk_mhz; context 2693 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[0].clks_cfg.dcfclk_mhz = context->bw_ctx.dml.soc.clock_limits[1].dcfclk_mhz; context 2694 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[1].socclk_mhz; context 2696 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.b.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2697 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2698 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2699 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2700 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.b.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2704 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[0].clks_cfg.dcfclk_mhz = context->bw_ctx.dml.soc.clock_limits[2].dcfclk_mhz; context 2705 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[2].socclk_mhz; context 2707 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.c.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2708 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2709 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2710 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2711 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.c.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2715 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[0].clks_cfg.dcfclk_mhz = context->bw_ctx.dml.soc.clock_limits[2].dcfclk_mhz; context 2716 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[2].socclk_mhz; context 2718 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.d.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2719 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2720 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2721 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2722 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.d.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2725 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[0].clks_cfg.dcfclk_mhz = context->bw_ctx.dml.soc.clock_limits[vlevel].dcfclk_mhz; context 2726 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[vlevel].socclk_mhz; context 2727 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.a.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2728 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2729 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2730 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2731 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.watermarks.a.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000; context 2735 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c struct dc *dc, struct dc_state *context, context 2744 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dc->res_pool->funcs->set_mcif_arb_params(dc, context, pipes, pipe_cnt); context 2746 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.dispclk_khz = context->bw_ctx.dml.vba.DISPCLK * 1000; context 2747 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.dcfclk_khz = context->bw_ctx.dml.vba.DCFCLK * 1000; context 2748 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.socclk_khz = context->bw_ctx.dml.vba.SOCCLK * 1000; context 2749 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.dramclk_khz = context->bw_ctx.dml.vba.DRAMSpeed * 1000 / 16; context 2750 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz = context->bw_ctx.dml.vba.DCFCLKDeepSleep * 1000; context 2751 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.fclk_khz = 0; context 2752 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.p_state_change_support = context 2753 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][context->bw_ctx.dml.vba.maxMpcComb] context 2755 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.dppclk_khz = 0; context 2763 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (!context->res_ctx.pipe_ctx[i].stream) context 2770 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dst->vstartup_start = context->bw_ctx.dml.vba.VStartup[pipe_idx_unsplit]; context 2771 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dst->vupdate_offset = context->bw_ctx.dml.vba.VUpdateOffsetPix[pipe_idx_unsplit]; context 2772 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dst->vupdate_width = context->bw_ctx.dml.vba.VUpdateWidthPix[pipe_idx_unsplit]; context 2773 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dst->vready_offset = context->bw_ctx.dml.vba.VReadyOffsetPix[pipe_idx_unsplit]; context 2785 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dst_j->vstartup_start = context->bw_ctx.dml.vba.VStartup[pipe_idx_unsplit]; context 2786 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dst_j->vupdate_offset = context->bw_ctx.dml.vba.VUpdateOffsetPix[pipe_idx_unsplit]; context 2787 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dst_j->vupdate_width = context->bw_ctx.dml.vba.VUpdateWidthPix[pipe_idx_unsplit]; context 2788 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dst_j->vready_offset = context->bw_ctx.dml.vba.VReadyOffsetPix[pipe_idx_unsplit]; context 2799 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (!context->res_ctx.pipe_ctx[i].stream) context 2801 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (context->bw_ctx.bw.dcn.clk.dppclk_khz < pipes[pipe_idx].clks_cfg.dppclk_mhz * 1000) context 2802 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.dppclk_khz = pipes[pipe_idx].clks_cfg.dppclk_mhz * 1000; context 2803 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz = context 2806 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->res_ctx.pipe_ctx[i].pipe_dlg_param = pipes[pipe_idx].pipe.dest; context 2810 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.dppclk_khz; context 2811 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz; context 2812 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = context->bw_ctx.dml.soc.clock_limits[vlevel].dppclk_mhz * 1000; context 2813 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz = context->bw_ctx.dml.soc.clock_limits[vlevel].dispclk_mhz * 1000; context 2816 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c bool cstate_en = context->bw_ctx.dml.vba.PrefetchMode[vlevel][context->bw_ctx.dml.vba.maxMpcComb] != 2; context 2818 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (!context->res_ctx.pipe_ctx[i].stream) context 2821 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.funcs.rq_dlg_get_dlg_reg(&context->bw_ctx.dml, context 2822 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c &context->res_ctx.pipe_ctx[i].dlg_regs, context 2823 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c &context->res_ctx.pipe_ctx[i].ttu_regs, context 2828 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.p_state_change_support, context 2831 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.funcs.rq_dlg_get_rq_reg(&context->bw_ctx.dml, context 2832 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c &context->res_ctx.pipe_ctx[i].rq_regs, context 2838 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c static bool dcn20_validate_bandwidth_internal(struct dc *dc, struct dc_state *context, context 2853 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c out = dcn20_fast_validate_bw(dc, context, pipes, &pipe_cnt, pipe_split_from, &vlevel); context 2868 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dcn20_calculate_wm(dc, context, pipes, &pipe_cnt, pipe_split_from, vlevel); context 2869 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dcn20_calculate_dlg_params(dc, context, pipes, pipe_cnt, vlevel); context 2877 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dml_get_status_message(context->bw_ctx.dml.vba.ValidationStatus[context->bw_ctx.dml.vba.soc.num_states])); context 2891 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c bool dcn20_validate_bandwidth(struct dc *dc, struct dc_state *context, context 2897 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c double p_state_latency_us = context->bw_ctx.dml.soc.dram_clock_change_latency_us; context 2900 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c return dcn20_validate_bandwidth_internal(dc, context, true); context 2904 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c voltage_supported = dcn20_validate_bandwidth_internal(dc, context, false); context 2905 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c full_pstate_supported = context->bw_ctx.bw.dcn.clk.p_state_change_support; context 2907 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (context->bw_ctx.dml.soc.dummy_pstate_latency_us == 0 || context 2909 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.p_state_change_support = true; context 2914 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.soc.dram_clock_change_latency_us = context->bw_ctx.dml.soc.dummy_pstate_latency_us; context 2916 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c voltage_supported = dcn20_validate_bandwidth_internal(dc, context, false); context 2917 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c dummy_pstate_supported = context->bw_ctx.bw.dcn.clk.p_state_change_support; context 2920 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.bw.dcn.clk.p_state_change_support = false; context 2928 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c memcpy(&context->bw_ctx.dml, &dc->dml, sizeof(struct display_mode_lib)); context 2929 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c context->bw_ctx.dml.soc.dram_clock_change_latency_us = p_state_latency_us; context 115 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h struct dc_state *context, context 118 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h bool dcn20_validate_bandwidth(struct dc *dc, struct dc_state *context, bool fast_validate); context 121 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h struct dc_state *context, context 127 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h struct dc *dc, struct dc_state *context, context 132 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h enum dc_status dcn20_build_mapped_resource(const struct dc *dc, struct dc_state *context, struct dc_stream_state *stream); context 980 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c struct dc *dc, struct dc_state *context, context 994 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c if (!context->res_ctx.pipe_ctx[i].stream) context 998 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c pipes[pipe_cnt].clks_cfg.dispclk_mhz = context->bw_ctx.dml.vba.RequiredDISPCLK[vlevel_req][context->bw_ctx.dml.vba.maxMpcComb]; context 1002 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c context->bw_ctx.dml.vba.RequiredDPPCLK[vlevel_req][context->bw_ctx.dml.vba.maxMpcComb][pipe_idx]; context 1003 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c if (context->bw_ctx.dml.vba.BlendingAndTiming[pipe_idx] == pipe_idx) context 1005 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c context->bw_ctx.dml.vba.ODMCombineEnablePerState[vlevel_req][pipe_idx]; context 1011 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c context->bw_ctx.dml.vba.RequiredDPPCLK[vlevel_req][context->bw_ctx.dml.vba.maxMpcComb][pipe_split_from[i]]; context 1012 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c if (context->bw_ctx.dml.vba.BlendingAndTiming[pipe_split_from[i]] == pipe_split_from[i]) context 1014 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c context->bw_ctx.dml.vba.ODMCombineEnablePerState[vlevel_req][pipe_split_from[i]]; context 1024 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c &context->res_ctx, pipes); context 1027 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c &context->res_ctx, pipes); context 1041 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c calculate_wm_set_for_vlevel(vlevel, table_entry, &context->bw_ctx.bw.dcn.watermarks.d, context 1042 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c &context->bw_ctx.dml, pipes, pipe_cnt); context 1046 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c calculate_wm_set_for_vlevel(vlevel, table_entry, &context->bw_ctx.bw.dcn.watermarks.c, context 1047 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c &context->bw_ctx.dml, pipes, pipe_cnt); context 1051 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c calculate_wm_set_for_vlevel(vlevel, table_entry, &context->bw_ctx.bw.dcn.watermarks.b, context 1052 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c &context->bw_ctx.dml, pipes, pipe_cnt); context 1057 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c calculate_wm_set_for_vlevel(vlevel, table_entry, &context->bw_ctx.bw.dcn.watermarks.a, context 1058 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c &context->bw_ctx.dml, pipes, pipe_cnt); context 1062 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c bool dcn21_validate_bandwidth(struct dc *dc, struct dc_state *context, context 1077 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c out = dcn20_fast_validate_bw(dc, context, pipes, &pipe_cnt, pipe_split_from, &vlevel); context 1092 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c dcn21_calculate_wm(dc, context, pipes, &pipe_cnt, pipe_split_from, vlevel); context 1093 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c dcn20_calculate_dlg_params(dc, context, pipes, pipe_cnt, vlevel); context 1101 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c dml_get_status_message(context->bw_ctx.dml.vba.ValidationStatus[context->bw_ctx.dml.vba.soc.num_states])); context 101 drivers/gpu/drm/amd/display/dc/inc/core_types.h struct dc_state *context, context 111 drivers/gpu/drm/amd/display/dc/inc/core_types.h struct dc_state *context); context 114 drivers/gpu/drm/amd/display/dc/inc/core_types.h struct dc_state *context, context 144 drivers/gpu/drm/amd/display/dc/inc/core_types.h struct dc_state *context, context 624 drivers/gpu/drm/amd/display/dc/inc/dcn_calcs.h struct dc_state *context, context 171 drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr.h struct dc_state *context, context 180 drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr.h struct dc_state *context, context 296 drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr_internal.h struct dc_state *context); context 93 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h void (*init_pipes)(struct dc *dc, struct dc_state *context); context 96 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h struct dc *dc, struct dc_state *context); context 99 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h struct dc *dc, struct dc_state *context); context 105 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h struct dc_state *context); context 166 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h void (*enable_accelerated_mode)(struct dc *dc, struct dc_state *context); context 227 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h struct dc_state *context); context 230 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h struct dc_state *context); context 235 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h struct dc_state *context); context 252 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h struct dc_state *context, context 317 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h void (*update_odm)(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe_ctx); context 321 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h struct dc_state *context); context 325 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h struct dc_state *context); context 329 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h struct dc_state *context); context 91 drivers/gpu/drm/amd/display/dc/inc/resource.h struct dc_state *context, context 98 drivers/gpu/drm/amd/display/dc/inc/resource.h struct dc_state *context); context 137 drivers/gpu/drm/amd/display/dc/inc/resource.h struct dc_state *context, context 152 drivers/gpu/drm/amd/display/dc/inc/resource.h struct dc_state *context, context 161 drivers/gpu/drm/amd/display/dc/inc/resource.h struct dc_state *context, context 166 drivers/gpu/drm/amd/display/dc/inc/resource.h struct dc_state *context, context 63 drivers/gpu/drm/amd/display/include/logger_interface.h struct dc_state *context); context 41 drivers/gpu/drm/amd/powerplay/inc/ppinterrupt.h void *context; /* Pointer to callback function context */ context 1372 drivers/gpu/drm/drm_gem.c if (entry->context != fence->context) context 575 drivers/gpu/drm/drm_ioc32.c int context; /**< Context handle */ context 598 drivers/gpu/drm/drm_ioc32.c d.context = d32.context; context 47 drivers/gpu/drm/drm_lock.c static int drm_lock_take(struct drm_lock_data *lock_data, unsigned int context); context 60 drivers/gpu/drm/drm_lock.c unsigned int context) context 71 drivers/gpu/drm/drm_lock.c new = context | _DRM_LOCK_HELD | context 79 drivers/gpu/drm/drm_lock.c if (_DRM_LOCKING_CONTEXT(old) == context) { context 81 drivers/gpu/drm/drm_lock.c if (context != DRM_KERNEL_CONTEXT) { context 83 drivers/gpu/drm/drm_lock.c context); context 89 drivers/gpu/drm/drm_lock.c if ((_DRM_LOCKING_CONTEXT(new)) == context && (new & _DRM_LOCK_HELD)) { context 109 drivers/gpu/drm/drm_lock.c unsigned int context) context 117 drivers/gpu/drm/drm_lock.c new = context | _DRM_LOCK_HELD; context 124 drivers/gpu/drm/drm_lock.c unsigned int context) context 144 drivers/gpu/drm/drm_lock.c if (_DRM_LOCK_IS_HELD(old) && _DRM_LOCKING_CONTEXT(old) != context) { context 146 drivers/gpu/drm/drm_lock.c context, _DRM_LOCKING_CONTEXT(old)); context 177 drivers/gpu/drm/drm_lock.c if (lock->context == DRM_KERNEL_CONTEXT) { context 179 drivers/gpu/drm/drm_lock.c task_pid_nr(current), lock->context); context 184 drivers/gpu/drm/drm_lock.c lock->context, task_pid_nr(current), context 201 drivers/gpu/drm/drm_lock.c if (drm_lock_take(&master->lock, lock->context)) { context 222 drivers/gpu/drm/drm_lock.c DRM_DEBUG("%d %s\n", lock->context, context 230 drivers/gpu/drm/drm_lock.c dev->sigdata.context = lock->context; context 238 drivers/gpu/drm/drm_lock.c lock->context); context 265 drivers/gpu/drm/drm_lock.c if (lock->context == DRM_KERNEL_CONTEXT) { context 267 drivers/gpu/drm/drm_lock.c task_pid_nr(current), lock->context); context 271 drivers/gpu/drm/drm_lock.c if (drm_legacy_lock_free(&master->lock, lock->context)) { context 1377 drivers/gpu/drm/drm_syncobj.c if (iter->context != fence->context) { context 64 drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.c struct etnaviv_iommu_context *context, context 68 drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.c return etnaviv_iommu_get_suballoc_va(context, mapping, memory_base, context 72 drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.c void etnaviv_cmdbuf_suballoc_unmap(struct etnaviv_iommu_context *context, context 75 drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.c etnaviv_iommu_put_suballoc_va(context, mapping); context 31 drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.h struct etnaviv_iommu_context *context, context 34 drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.h void etnaviv_cmdbuf_suballoc_unmap(struct etnaviv_iommu_context *context, context 226 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct etnaviv_iommu_context *context) context 231 drivers/gpu/drm/etnaviv/etnaviv_gem.c if (mapping->context == context) context 270 drivers/gpu/drm/etnaviv/etnaviv_gem.c if (mapping->context == mmu_context) context 308 drivers/gpu/drm/etnaviv/etnaviv_gem.c mapping->context = mmu_context; context 533 drivers/gpu/drm/etnaviv/etnaviv_gem.c struct etnaviv_iommu_context *context = mapping->context; context 537 drivers/gpu/drm/etnaviv/etnaviv_gem.c if (context) { context 538 drivers/gpu/drm/etnaviv/etnaviv_gem.c etnaviv_iommu_unmap_gem(context, mapping); context 539 drivers/gpu/drm/etnaviv/etnaviv_gem.c etnaviv_iommu_context_put(context); context 28 drivers/gpu/drm/etnaviv/etnaviv_gem.h struct etnaviv_iommu_context *context; context 28 drivers/gpu/drm/etnaviv/etnaviv_iommu.c to_v1_context(struct etnaviv_iommu_context *context) context 30 drivers/gpu/drm/etnaviv/etnaviv_iommu.c return container_of(context, struct etnaviv_iommuv1_context, base); context 33 drivers/gpu/drm/etnaviv/etnaviv_iommu.c static void etnaviv_iommuv1_free(struct etnaviv_iommu_context *context) context 35 drivers/gpu/drm/etnaviv/etnaviv_iommu.c struct etnaviv_iommuv1_context *v1_context = to_v1_context(context); context 37 drivers/gpu/drm/etnaviv/etnaviv_iommu.c drm_mm_takedown(&context->mm); context 39 drivers/gpu/drm/etnaviv/etnaviv_iommu.c dma_free_wc(context->global->dev, PT_SIZE, v1_context->pgtable_cpu, context 42 drivers/gpu/drm/etnaviv/etnaviv_iommu.c context->global->v1.shared_context = NULL; context 47 drivers/gpu/drm/etnaviv/etnaviv_iommu.c static int etnaviv_iommuv1_map(struct etnaviv_iommu_context *context, context 51 drivers/gpu/drm/etnaviv/etnaviv_iommu.c struct etnaviv_iommuv1_context *v1_context = to_v1_context(context); context 62 drivers/gpu/drm/etnaviv/etnaviv_iommu.c static size_t etnaviv_iommuv1_unmap(struct etnaviv_iommu_context *context, context 65 drivers/gpu/drm/etnaviv/etnaviv_iommu.c struct etnaviv_iommuv1_context *v1_context = to_v1_context(context); context 71 drivers/gpu/drm/etnaviv/etnaviv_iommu.c v1_context->pgtable_cpu[index] = context->global->bad_page_dma; context 76 drivers/gpu/drm/etnaviv/etnaviv_iommu.c static size_t etnaviv_iommuv1_dump_size(struct etnaviv_iommu_context *context) context 81 drivers/gpu/drm/etnaviv/etnaviv_iommu.c static void etnaviv_iommuv1_dump(struct etnaviv_iommu_context *context, context 84 drivers/gpu/drm/etnaviv/etnaviv_iommu.c struct etnaviv_iommuv1_context *v1_context = to_v1_context(context); context 90 drivers/gpu/drm/etnaviv/etnaviv_iommu.c struct etnaviv_iommu_context *context) context 92 drivers/gpu/drm/etnaviv/etnaviv_iommu.c struct etnaviv_iommuv1_context *v1_context = to_v1_context(context); context 96 drivers/gpu/drm/etnaviv/etnaviv_iommu.c gpu_write(gpu, VIVS_MC_MEMORY_BASE_ADDR_RA, context->global->memory_base); context 97 drivers/gpu/drm/etnaviv/etnaviv_iommu.c gpu_write(gpu, VIVS_MC_MEMORY_BASE_ADDR_FE, context->global->memory_base); context 98 drivers/gpu/drm/etnaviv/etnaviv_iommu.c gpu_write(gpu, VIVS_MC_MEMORY_BASE_ADDR_TX, context->global->memory_base); context 99 drivers/gpu/drm/etnaviv/etnaviv_iommu.c gpu_write(gpu, VIVS_MC_MEMORY_BASE_ADDR_PEZ, context->global->memory_base); context 100 drivers/gpu/drm/etnaviv/etnaviv_iommu.c gpu_write(gpu, VIVS_MC_MEMORY_BASE_ADDR_PE, context->global->memory_base); context 126 drivers/gpu/drm/etnaviv/etnaviv_iommu.c struct etnaviv_iommu_context *context; context 136 drivers/gpu/drm/etnaviv/etnaviv_iommu.c context = global->v1.shared_context; context 137 drivers/gpu/drm/etnaviv/etnaviv_iommu.c etnaviv_iommu_context_get(context); context 139 drivers/gpu/drm/etnaviv/etnaviv_iommu.c return context; context 156 drivers/gpu/drm/etnaviv/etnaviv_iommu.c context = &v1_context->base; context 157 drivers/gpu/drm/etnaviv/etnaviv_iommu.c context->global = global; context 158 drivers/gpu/drm/etnaviv/etnaviv_iommu.c kref_init(&context->refcount); context 159 drivers/gpu/drm/etnaviv/etnaviv_iommu.c mutex_init(&context->lock); context 160 drivers/gpu/drm/etnaviv/etnaviv_iommu.c INIT_LIST_HEAD(&context->mappings); context 161 drivers/gpu/drm/etnaviv/etnaviv_iommu.c drm_mm_init(&context->mm, GPU_MEM_START, PT_ENTRIES * SZ_4K); context 162 drivers/gpu/drm/etnaviv/etnaviv_iommu.c context->global->v1.shared_context = context; context 166 drivers/gpu/drm/etnaviv/etnaviv_iommu.c return context; context 42 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c to_v2_context(struct etnaviv_iommu_context *context) context 44 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c return container_of(context, struct etnaviv_iommuv2_context, base); context 47 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c static void etnaviv_iommuv2_free(struct etnaviv_iommu_context *context) context 49 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); context 52 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c drm_mm_takedown(&context->mm); context 56 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c dma_free_wc(context->global->dev, SZ_4K, context 61 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c dma_free_wc(context->global->dev, SZ_4K, v2_context->mtlb_cpu, context 64 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c clear_bit(v2_context->id, context->global->v2.pta_alloc); context 92 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c static int etnaviv_iommuv2_map(struct etnaviv_iommu_context *context, context 96 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); context 121 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c static size_t etnaviv_iommuv2_unmap(struct etnaviv_iommu_context *context, context 124 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *etnaviv_domain = to_v2_context(context); context 138 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c static size_t etnaviv_iommuv2_dump_size(struct etnaviv_iommu_context *context) context 140 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); context 151 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c static void etnaviv_iommuv2_dump(struct etnaviv_iommu_context *context, void *buf) context 153 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); context 166 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommu_context *context) context 168 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); context 177 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c (u32)context->global->bad_page_dma); context 186 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommu_context *context) context 188 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); context 196 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c lower_32_bits(context->global->v2.pta_dma)); context 198 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c upper_32_bits(context->global->v2.pta_dma)); context 202 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c lower_32_bits(context->global->bad_page_dma)); context 204 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c lower_32_bits(context->global->bad_page_dma)); context 207 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c upper_32_bits(context->global->bad_page_dma)) | context 209 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c upper_32_bits(context->global->bad_page_dma))); context 211 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c context->global->v2.pta_cpu[v2_context->id] = v2_context->mtlb_dma | context 223 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c u32 etnaviv_iommuv2_get_mtlb_addr(struct etnaviv_iommu_context *context) context 225 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); context 230 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c unsigned short etnaviv_iommuv2_get_pta_id(struct etnaviv_iommu_context *context) context 232 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommuv2_context *v2_context = to_v2_context(context); context 237 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommu_context *context) context 241 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c etnaviv_iommuv2_restore_nonsec(gpu, context); context 244 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c etnaviv_iommuv2_restore_sec(gpu, context); context 265 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c struct etnaviv_iommu_context *context; context 292 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c context = &v2_context->base; context 293 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c context->global = global; context 294 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c kref_init(&context->refcount); context 295 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c mutex_init(&context->lock); context 296 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c INIT_LIST_HEAD(&context->mappings); context 297 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c drm_mm_init(&context->mm, SZ_4K, (u64)SZ_1G * 4 - SZ_4K); context 299 drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c return context; context 16 drivers/gpu/drm/etnaviv/etnaviv_mmu.c static void etnaviv_context_unmap(struct etnaviv_iommu_context *context, context 29 drivers/gpu/drm/etnaviv/etnaviv_mmu.c unmapped_page = context->global->ops->unmap(context, iova, context 39 drivers/gpu/drm/etnaviv/etnaviv_mmu.c static int etnaviv_context_map(struct etnaviv_iommu_context *context, context 55 drivers/gpu/drm/etnaviv/etnaviv_mmu.c ret = context->global->ops->map(context, iova, paddr, pgsize, context 67 drivers/gpu/drm/etnaviv/etnaviv_mmu.c etnaviv_context_unmap(context, orig_iova, orig_size - size); context 72 drivers/gpu/drm/etnaviv/etnaviv_mmu.c static int etnaviv_iommu_map(struct etnaviv_iommu_context *context, u32 iova, context 79 drivers/gpu/drm/etnaviv/etnaviv_mmu.c if (!context || !sgt) context 88 drivers/gpu/drm/etnaviv/etnaviv_mmu.c ret = etnaviv_context_map(context, da, pa, bytes, prot); context 103 drivers/gpu/drm/etnaviv/etnaviv_mmu.c etnaviv_context_unmap(context, da, bytes); context 109 drivers/gpu/drm/etnaviv/etnaviv_mmu.c static void etnaviv_iommu_unmap(struct etnaviv_iommu_context *context, u32 iova, context 119 drivers/gpu/drm/etnaviv/etnaviv_mmu.c etnaviv_context_unmap(context, da, bytes); context 129 drivers/gpu/drm/etnaviv/etnaviv_mmu.c static void etnaviv_iommu_remove_mapping(struct etnaviv_iommu_context *context, context 134 drivers/gpu/drm/etnaviv/etnaviv_mmu.c etnaviv_iommu_unmap(context, mapping->vram_node.start, context 139 drivers/gpu/drm/etnaviv/etnaviv_mmu.c static int etnaviv_iommu_find_iova(struct etnaviv_iommu_context *context, context 146 drivers/gpu/drm/etnaviv/etnaviv_mmu.c lockdep_assert_held(&context->lock); context 154 drivers/gpu/drm/etnaviv/etnaviv_mmu.c ret = drm_mm_insert_node_in_range(&context->mm, node, context 160 drivers/gpu/drm/etnaviv/etnaviv_mmu.c drm_mm_scan_init(&scan, &context->mm, size, 0, 0, mode); context 164 drivers/gpu/drm/etnaviv/etnaviv_mmu.c list_for_each_entry(free, &context->mappings, mmu_node) { context 206 drivers/gpu/drm/etnaviv/etnaviv_mmu.c etnaviv_iommu_remove_mapping(context, m); context 207 drivers/gpu/drm/etnaviv/etnaviv_mmu.c m->context = NULL; context 223 drivers/gpu/drm/etnaviv/etnaviv_mmu.c static int etnaviv_iommu_insert_exact(struct etnaviv_iommu_context *context, context 226 drivers/gpu/drm/etnaviv/etnaviv_mmu.c return drm_mm_insert_node_in_range(&context->mm, node, size, 0, 0, va, context 230 drivers/gpu/drm/etnaviv/etnaviv_mmu.c int etnaviv_iommu_map_gem(struct etnaviv_iommu_context *context, context 240 drivers/gpu/drm/etnaviv/etnaviv_mmu.c mutex_lock(&context->lock); context 243 drivers/gpu/drm/etnaviv/etnaviv_mmu.c if (context->global->version == ETNAVIV_IOMMU_V1 && context 250 drivers/gpu/drm/etnaviv/etnaviv_mmu.c list_add_tail(&mapping->mmu_node, &context->mappings); context 259 drivers/gpu/drm/etnaviv/etnaviv_mmu.c ret = etnaviv_iommu_insert_exact(context, node, context 262 drivers/gpu/drm/etnaviv/etnaviv_mmu.c ret = etnaviv_iommu_find_iova(context, node, context 268 drivers/gpu/drm/etnaviv/etnaviv_mmu.c ret = etnaviv_iommu_map(context, node->start, sgt, etnaviv_obj->base.size, context 276 drivers/gpu/drm/etnaviv/etnaviv_mmu.c list_add_tail(&mapping->mmu_node, &context->mappings); context 277 drivers/gpu/drm/etnaviv/etnaviv_mmu.c context->flush_seq++; context 279 drivers/gpu/drm/etnaviv/etnaviv_mmu.c mutex_unlock(&context->lock); context 284 drivers/gpu/drm/etnaviv/etnaviv_mmu.c void etnaviv_iommu_unmap_gem(struct etnaviv_iommu_context *context, context 289 drivers/gpu/drm/etnaviv/etnaviv_mmu.c mutex_lock(&context->lock); context 292 drivers/gpu/drm/etnaviv/etnaviv_mmu.c if (mapping->vram_node.mm == &context->mm) context 293 drivers/gpu/drm/etnaviv/etnaviv_mmu.c etnaviv_iommu_remove_mapping(context, mapping); context 296 drivers/gpu/drm/etnaviv/etnaviv_mmu.c context->flush_seq++; context 297 drivers/gpu/drm/etnaviv/etnaviv_mmu.c mutex_unlock(&context->lock); context 302 drivers/gpu/drm/etnaviv/etnaviv_mmu.c struct etnaviv_iommu_context *context = context 305 drivers/gpu/drm/etnaviv/etnaviv_mmu.c etnaviv_cmdbuf_suballoc_unmap(context, &context->cmdbuf_mapping); context 307 drivers/gpu/drm/etnaviv/etnaviv_mmu.c context->global->ops->free(context); context 309 drivers/gpu/drm/etnaviv/etnaviv_mmu.c void etnaviv_iommu_context_put(struct etnaviv_iommu_context *context) context 311 drivers/gpu/drm/etnaviv/etnaviv_mmu.c kref_put(&context->refcount, etnaviv_iommu_context_free); context 351 drivers/gpu/drm/etnaviv/etnaviv_mmu.c struct etnaviv_iommu_context *context) context 353 drivers/gpu/drm/etnaviv/etnaviv_mmu.c context->global->ops->restore(gpu, context); context 356 drivers/gpu/drm/etnaviv/etnaviv_mmu.c int etnaviv_iommu_get_suballoc_va(struct etnaviv_iommu_context *context, context 361 drivers/gpu/drm/etnaviv/etnaviv_mmu.c mutex_lock(&context->lock); context 365 drivers/gpu/drm/etnaviv/etnaviv_mmu.c mutex_unlock(&context->lock); context 375 drivers/gpu/drm/etnaviv/etnaviv_mmu.c if (context->global->version == ETNAVIV_IOMMU_V1) { context 381 drivers/gpu/drm/etnaviv/etnaviv_mmu.c ret = etnaviv_iommu_find_iova(context, node, size); context 383 drivers/gpu/drm/etnaviv/etnaviv_mmu.c mutex_unlock(&context->lock); context 388 drivers/gpu/drm/etnaviv/etnaviv_mmu.c ret = etnaviv_context_map(context, node->start, paddr, size, context 392 drivers/gpu/drm/etnaviv/etnaviv_mmu.c mutex_unlock(&context->lock); context 396 drivers/gpu/drm/etnaviv/etnaviv_mmu.c context->flush_seq++; context 399 drivers/gpu/drm/etnaviv/etnaviv_mmu.c list_add_tail(&mapping->mmu_node, &context->mappings); context 402 drivers/gpu/drm/etnaviv/etnaviv_mmu.c mutex_unlock(&context->lock); context 407 drivers/gpu/drm/etnaviv/etnaviv_mmu.c void etnaviv_iommu_put_suballoc_va(struct etnaviv_iommu_context *context, context 412 drivers/gpu/drm/etnaviv/etnaviv_mmu.c mutex_lock(&context->lock); context 415 drivers/gpu/drm/etnaviv/etnaviv_mmu.c if (mapping->use > 0 || context->global->version == ETNAVIV_IOMMU_V1) { context 416 drivers/gpu/drm/etnaviv/etnaviv_mmu.c mutex_unlock(&context->lock); context 420 drivers/gpu/drm/etnaviv/etnaviv_mmu.c etnaviv_context_unmap(context, node->start, node->size); context 422 drivers/gpu/drm/etnaviv/etnaviv_mmu.c mutex_unlock(&context->lock); context 425 drivers/gpu/drm/etnaviv/etnaviv_mmu.c size_t etnaviv_iommu_dump_size(struct etnaviv_iommu_context *context) context 427 drivers/gpu/drm/etnaviv/etnaviv_mmu.c return context->global->ops->dump_size(context); context 430 drivers/gpu/drm/etnaviv/etnaviv_mmu.c void etnaviv_iommu_dump(struct etnaviv_iommu_context *context, void *buf) context 432 drivers/gpu/drm/etnaviv/etnaviv_mmu.c context->global->ops->dump(context, buf); context 25 drivers/gpu/drm/etnaviv/etnaviv_mmu.h int (*map)(struct etnaviv_iommu_context *context, unsigned long iova, context 27 drivers/gpu/drm/etnaviv/etnaviv_mmu.h size_t (*unmap)(struct etnaviv_iommu_context *context, unsigned long iova, context 89 drivers/gpu/drm/etnaviv/etnaviv_mmu.h int etnaviv_iommu_map_gem(struct etnaviv_iommu_context *context, context 92 drivers/gpu/drm/etnaviv/etnaviv_mmu.h void etnaviv_iommu_unmap_gem(struct etnaviv_iommu_context *context, context 121 drivers/gpu/drm/etnaviv/etnaviv_mmu.h u32 etnaviv_iommuv2_get_mtlb_addr(struct etnaviv_iommu_context *context); context 122 drivers/gpu/drm/etnaviv/etnaviv_mmu.h unsigned short etnaviv_iommuv2_get_pta_id(struct etnaviv_iommu_context *context); context 21 drivers/gpu/drm/exynos/exynos_drm_crtc.h void *context); context 2125 drivers/gpu/drm/i915/display/intel_cdclk.c const char *context) context 2128 drivers/gpu/drm/i915/display/intel_cdclk.c context, cdclk_state->cdclk, cdclk_state->vco, context 44 drivers/gpu/drm/i915/display/intel_cdclk.h const char *context); context 12072 drivers/gpu/drm/i915/display/intel_display.c const char *context) context 12083 drivers/gpu/drm/i915/display/intel_display.c yesno(pipe_config->base.enable), context); context 178 drivers/gpu/drm/i915/display/intel_overlay.c struct intel_context *context; context 229 drivers/gpu/drm/i915/display/intel_overlay.c rq = i915_request_create(overlay->context); context 1361 drivers/gpu/drm/i915/display/intel_overlay.c overlay->context = dev_priv->engine[RCS0]->kernel_context; context 1362 drivers/gpu/drm/i915/display/intel_overlay.c GEM_BUG_ON(!overlay->context); context 1207 drivers/gpu/drm/i915/gem/i915_gem_context.c struct intel_sseu *context) context 1224 drivers/gpu/drm/i915/gem/i915_gem_context.c if (overflows_type(user->slice_mask, context->slice_mask) || context 1225 drivers/gpu/drm/i915/gem/i915_gem_context.c overflows_type(user->subslice_mask, context->subslice_mask) || context 1227 drivers/gpu/drm/i915/gem/i915_gem_context.c context->min_eus_per_subslice) || context 1229 drivers/gpu/drm/i915/gem/i915_gem_context.c context->max_eus_per_subslice)) context 1242 drivers/gpu/drm/i915/gem/i915_gem_context.c context->slice_mask = user->slice_mask; context 1243 drivers/gpu/drm/i915/gem/i915_gem_context.c context->subslice_mask = user->subslice_mask; context 1244 drivers/gpu/drm/i915/gem/i915_gem_context.c context->min_eus_per_subslice = user->min_eus_per_subslice; context 1245 drivers/gpu/drm/i915/gem/i915_gem_context.c context->max_eus_per_subslice = user->max_eus_per_subslice; context 1251 drivers/gpu/drm/i915/gem/i915_gem_context.c unsigned int req_s = hweight8(context->slice_mask); context 1252 drivers/gpu/drm/i915/gem/i915_gem_context.c unsigned int req_ss = hweight8(context->subslice_mask); context 225 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c struct intel_context *context; /* logical state for the request */ context 703 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c err = i915_gem_evict_vm(eb->context->vm); context 777 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c vma = i915_vma_instance(obj, eb->context->vm, NULL); context 1176 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c rq = i915_request_create(eb->context); context 2258 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c eb->context = ce; context 2272 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c struct intel_context *ce = eb->context; context 2643 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c eb.request = i915_request_create(eb.context); context 365 drivers/gpu/drm/i915/gt/intel_breadcrumbs.c rq->fence.context, rq->fence.seqno, context 1153 drivers/gpu/drm/i915/gt/intel_engine_cs.c rq->fence.context, rq->fence.seqno, context 183 drivers/gpu/drm/i915/gt/intel_lrc.c struct intel_context context; context 596 drivers/gpu/drm/i915/gt/intel_lrc.c struct virtual_engine *ve = container_of(ce, typeof(*ve), context); context 719 drivers/gpu/drm/i915/gt/intel_lrc.c ports[0]->fence.context, context 724 drivers/gpu/drm/i915/gt/intel_lrc.c ports[1] ? ports[1]->fence.context : 0, context 902 drivers/gpu/drm/i915/gt/intel_lrc.c inflight = intel_context_inflight(&ve->context); context 917 drivers/gpu/drm/i915/gt/intel_lrc.c if (!list_empty(&ve->context.signal_link)) { context 918 drivers/gpu/drm/i915/gt/intel_lrc.c list_move_tail(&ve->context.signal_link, context 1102 drivers/gpu/drm/i915/gt/intel_lrc.c last->fence.context, context 1129 drivers/gpu/drm/i915/gt/intel_lrc.c last->fence.context, context 1185 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(rq->hw_context != &ve->context); context 1201 drivers/gpu/drm/i915/gt/intel_lrc.c rq->fence.context, context 1217 drivers/gpu/drm/i915/gt/intel_lrc.c u32 *regs = ve->context.lrc_reg_state; context 1220 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(READ_ONCE(ve->context.inflight)); context 1223 drivers/gpu/drm/i915/gt/intel_lrc.c if (!list_empty(&ve->context.signals)) context 3421 drivers/gpu/drm/i915/gt/intel_lrc.c container_of(kref, typeof(*ve), context.ref); context 3426 drivers/gpu/drm/i915/gt/intel_lrc.c GEM_BUG_ON(ve->context.inflight); context 3445 drivers/gpu/drm/i915/gt/intel_lrc.c if (ve->context.state) context 3446 drivers/gpu/drm/i915/gt/intel_lrc.c __execlists_context_fini(&ve->context); context 3447 drivers/gpu/drm/i915/gt/intel_lrc.c intel_context_fini(&ve->context); context 3475 drivers/gpu/drm/i915/gt/intel_lrc.c virtual_update_register_offsets(ve->context.lrc_reg_state, context 3481 drivers/gpu/drm/i915/gt/intel_lrc.c struct virtual_engine *ve = container_of(ce, typeof(*ve), context); context 3495 drivers/gpu/drm/i915/gt/intel_lrc.c struct virtual_engine *ve = container_of(ce, typeof(*ve), context); context 3506 drivers/gpu/drm/i915/gt/intel_lrc.c struct virtual_engine *ve = container_of(ce, typeof(*ve), context); context 3544 drivers/gpu/drm/i915/gt/intel_lrc.c rq->fence.context, rq->fence.seqno, context 3638 drivers/gpu/drm/i915/gt/intel_lrc.c rq->fence.context, context 3768 drivers/gpu/drm/i915/gt/intel_lrc.c intel_context_init(&ve->context, ctx, &ve->base); context 3835 drivers/gpu/drm/i915/gt/intel_lrc.c err = __execlists_context_alloc(&ve->context, siblings[0]); context 3839 drivers/gpu/drm/i915/gt/intel_lrc.c __set_bit(CONTEXT_ALLOC_BIT, &ve->context.flags); context 3841 drivers/gpu/drm/i915/gt/intel_lrc.c return &ve->context; context 3844 drivers/gpu/drm/i915/gt/intel_lrc.c intel_context_put(&ve->context); context 121 drivers/gpu/drm/i915/gt/intel_reset.c rq->fence.context, context 719 drivers/gpu/drm/i915/gt/intel_reset.c engine->name, request->fence.context, request->fence.seqno); context 56 drivers/gpu/drm/i915/gt/intel_timeline.h u64 context, u32 seqno) context 58 drivers/gpu/drm/i915/gt/intel_timeline.h return i915_syncmap_set(&tl->sync, context, seqno); context 64 drivers/gpu/drm/i915/gt/intel_timeline.h return __intel_timeline_sync_set(tl, fence->context, fence->seqno); context 68 drivers/gpu/drm/i915/gt/intel_timeline.h u64 context, u32 seqno) context 70 drivers/gpu/drm/i915/gt/intel_timeline.h return i915_syncmap_is_later(&tl->sync, context, seqno); context 76 drivers/gpu/drm/i915/gt/intel_timeline.h return __intel_timeline_sync_is_later(tl, fence->context, fence->seqno); context 111 drivers/gpu/drm/i915/gt/selftest_hangcheck.c return hws->node.start + offset_in_page(sizeof(u32)*rq->fence.context); context 272 drivers/gpu/drm/i915/gt/selftest_hangcheck.c return READ_ONCE(h->seqno[rq->fence.context % (PAGE_SIZE/sizeof(u32))]); context 699 drivers/gpu/drm/i915/gt/selftest_hangcheck.c rq->fence.context, context 1754 drivers/gpu/drm/i915/gt/selftest_lrc.c request[nc]->fence.context, context 1759 drivers/gpu/drm/i915/gt/selftest_lrc.c request[nc]->fence.context, context 1902 drivers/gpu/drm/i915/gt/selftest_lrc.c request[n]->fence.context, context 1907 drivers/gpu/drm/i915/gt/selftest_lrc.c request[n]->fence.context, context 11 drivers/gpu/drm/i915/gt/selftests/mock_timeline.c void mock_timeline_init(struct intel_timeline *timeline, u64 context) context 14 drivers/gpu/drm/i915/gt/selftests/mock_timeline.c timeline->fence_context = context; context 12 drivers/gpu/drm/i915/gt/selftests/mock_timeline.h void mock_timeline_init(struct intel_timeline *timeline, u64 context); context 459 drivers/gpu/drm/i915/i915_gpu_error.c prefix, erq->pid, erq->context, erq->seqno, context 547 drivers/gpu/drm/i915/i915_gpu_error.c error_print_context(m, " Active context: ", &ee->context); context 687 drivers/gpu/drm/i915/i915_gpu_error.c ee->context.comm, context 688 drivers/gpu/drm/i915/i915_gpu_error.c ee->context.pid); context 746 drivers/gpu/drm/i915/i915_gpu_error.c if (ee->context.pid) context 748 drivers/gpu/drm/i915/i915_gpu_error.c ee->context.comm, context 749 drivers/gpu/drm/i915/i915_gpu_error.c ee->context.pid); context 1175 drivers/gpu/drm/i915/i915_gpu_error.c erq->context = request->fence.context; context 1388 drivers/gpu/drm/i915/i915_gpu_error.c error->simulated |= record_context(&ee->context, request); context 1610 drivers/gpu/drm/i915/i915_gpu_error.c error->engine->context.comm, context 1611 drivers/gpu/drm/i915/i915_gpu_error.c error->engine->context.pid); context 125 drivers/gpu/drm/i915/i915_gpu_error.h } context; context 146 drivers/gpu/drm/i915/i915_gpu_error.h u32 context; context 228 drivers/gpu/drm/i915/i915_request.c rq->fence.context, rq->fence.seqno, context 324 drivers/gpu/drm/i915/i915_request.c rq->fence.context, rq->fence.seqno, context 387 drivers/gpu/drm/i915/i915_request.c request->fence.context, request->fence.seqno, context 479 drivers/gpu/drm/i915/i915_request.c request->fence.context, request->fence.seqno, context 967 drivers/gpu/drm/i915/i915_request.c if (fence->context == rq->fence.context) context 971 drivers/gpu/drm/i915/i915_request.c if (fence->context && context 985 drivers/gpu/drm/i915/i915_request.c if (fence->context) context 1194 drivers/gpu/drm/i915/i915_request.c engine->name, rq->fence.context, rq->fence.seqno); context 681 drivers/gpu/drm/i915/i915_trace.h __entry->ctx = rq->fence.context; context 710 drivers/gpu/drm/i915/i915_trace.h __entry->ctx = rq->fence.context; context 755 drivers/gpu/drm/i915/i915_trace.h __entry->ctx = rq->fence.context; context 786 drivers/gpu/drm/i915/i915_trace.h __entry->ctx = rq->fence.context; context 851 drivers/gpu/drm/i915/i915_trace.h __entry->ctx = rq->fence.context; context 385 drivers/gpu/drm/i915/selftests/i915_request.c rq->fence.context, rq->fence.seqno, context 401 drivers/gpu/drm/i915/selftests/i915_request.c rq->fence.context, rq->fence.seqno); context 163 drivers/gpu/drm/i915/selftests/i915_syncmap.c static int check_one(struct i915_syncmap **sync, u64 context, u32 seqno) context 167 drivers/gpu/drm/i915/selftests/i915_syncmap.c err = i915_syncmap_set(sync, context, seqno); context 173 drivers/gpu/drm/i915/selftests/i915_syncmap.c context, (*sync)->height, (*sync)->prefix); context 179 drivers/gpu/drm/i915/selftests/i915_syncmap.c context); context 193 drivers/gpu/drm/i915/selftests/i915_syncmap.c if (!i915_syncmap_is_later(sync, context, seqno)) { context 195 drivers/gpu/drm/i915/selftests/i915_syncmap.c context, seqno); context 217 drivers/gpu/drm/i915/selftests/i915_syncmap.c u64 context = i915_prandom_u64_state(&prng); context 225 drivers/gpu/drm/i915/selftests/i915_syncmap.c err = check_one(&sync, context, context 238 drivers/gpu/drm/i915/selftests/i915_syncmap.c static int check_leaf(struct i915_syncmap **sync, u64 context, u32 seqno) context 242 drivers/gpu/drm/i915/selftests/i915_syncmap.c err = i915_syncmap_set(sync, context, seqno); context 248 drivers/gpu/drm/i915/selftests/i915_syncmap.c context, (*sync)->height, (*sync)->prefix); context 254 drivers/gpu/drm/i915/selftests/i915_syncmap.c context, (*sync)->bitmap, hweight32((*sync)->bitmap)); context 262 drivers/gpu/drm/i915/selftests/i915_syncmap.c if (!i915_syncmap_is_later(sync, context, seqno)) { context 264 drivers/gpu/drm/i915/selftests/i915_syncmap.c context, seqno); context 297 drivers/gpu/drm/i915/selftests/i915_syncmap.c u64 context = BIT_ULL(order); context 300 drivers/gpu/drm/i915/selftests/i915_syncmap.c err = check_leaf(&sync, context, 0); context 321 drivers/gpu/drm/i915/selftests/i915_syncmap.c if (__sync_child(join)[__sync_branch_idx(join, context)] != sync) { context 346 drivers/gpu/drm/i915/selftests/i915_syncmap.c u64 context = step * BIT_ULL(order); context 348 drivers/gpu/drm/i915/selftests/i915_syncmap.c err = i915_syncmap_set(&sync, context, 0); context 354 drivers/gpu/drm/i915/selftests/i915_syncmap.c context, order, step, sync->height, sync->prefix); context 363 drivers/gpu/drm/i915/selftests/i915_syncmap.c u64 context = step * BIT_ULL(order); context 365 drivers/gpu/drm/i915/selftests/i915_syncmap.c if (!i915_syncmap_is_later(&sync, context, 0)) { context 367 drivers/gpu/drm/i915/selftests/i915_syncmap.c context, order, step); context 373 drivers/gpu/drm/i915/selftests/i915_syncmap.c if (i915_syncmap_is_later(&sync, context + idx, 0)) { context 375 drivers/gpu/drm/i915/selftests/i915_syncmap.c context + idx, order, step); context 385 drivers/gpu/drm/i915/selftests/i915_syncmap.c u64 context = step * BIT_ULL(order); context 387 drivers/gpu/drm/i915/selftests/i915_syncmap.c if (!i915_syncmap_is_later(&sync, context, 0)) { context 389 drivers/gpu/drm/i915/selftests/i915_syncmap.c context, order, step); context 414 drivers/gpu/drm/i915/selftests/i915_syncmap.c u64 context = i915_prandom_u64_state(&prng) & ~MASK; context 417 drivers/gpu/drm/i915/selftests/i915_syncmap.c if (i915_syncmap_is_later(&sync, context, 0)) /* Skip repeats */ context 421 drivers/gpu/drm/i915/selftests/i915_syncmap.c err = i915_syncmap_set(&sync, context + idx, 0); context 427 drivers/gpu/drm/i915/selftests/i915_syncmap.c context, sync->height, sync->prefix); context 434 drivers/gpu/drm/i915/selftests/i915_syncmap.c context, idx, context 469 drivers/gpu/drm/i915/selftests/i915_syncmap.c u64 context = idx * BIT_ULL(order) + idx; context 471 drivers/gpu/drm/i915/selftests/i915_syncmap.c err = i915_syncmap_set(&sync, context, 0); context 477 drivers/gpu/drm/i915/selftests/i915_syncmap.c context, order, idx, context 562 drivers/gpu/drm/i915/selftests/i915_syncmap.c u64 context = i915_prandom_u64_state(&prng); context 564 drivers/gpu/drm/i915/selftests/i915_syncmap.c err = i915_syncmap_set(&sync, context, 0); context 582 drivers/gpu/drm/i915/selftests/i915_syncmap.c u64 context = i915_prandom_u64_state(&ctx); context 584 drivers/gpu/drm/i915/selftests/i915_syncmap.c if (i915_syncmap_is_later(&sync, context, seqno) != expect) { context 586 drivers/gpu/drm/i915/selftests/i915_syncmap.c context, last_seqno, seqno, expect); context 591 drivers/gpu/drm/i915/selftests/i915_syncmap.c err = i915_syncmap_set(&sync, context, seqno); context 71 drivers/gpu/drm/i915/selftests/igt_spinner.c return hws->node.start + seqno_offset(rq->fence.context); context 173 drivers/gpu/drm/i915/selftests/igt_spinner.c u32 *seqno = spin->seqno + seqno_offset(rq->fence.context); context 21 drivers/gpu/drm/lima/lima_ctx.c err = lima_sched_context_init(dev->pipe + i, ctx->context + i, &ctx->guilty); context 34 drivers/gpu/drm/lima/lima_ctx.c lima_sched_context_fini(dev->pipe + i, ctx->context + i); context 45 drivers/gpu/drm/lima/lima_ctx.c lima_sched_context_fini(ctx->dev->pipe + i, ctx->context + i); context 14 drivers/gpu/drm/lima/lima_ctx.h struct lima_sched_context context[lima_pipe_num]; context 276 drivers/gpu/drm/lima/lima_gem.c submit->task, submit->ctx->context + submit->pipe, context 295 drivers/gpu/drm/lima/lima_gem.c submit->ctx->context + submit->pipe, submit->task); context 109 drivers/gpu/drm/lima/lima_sched.c struct lima_sched_context *context, context 122 drivers/gpu/drm/lima/lima_sched.c err = drm_sched_job_init(&task->base, &context->base, vm); context 159 drivers/gpu/drm/lima/lima_sched.c struct lima_sched_context *context, context 164 drivers/gpu/drm/lima/lima_sched.c return drm_sched_entity_init(&context->base, &rq, 1, guilty); context 168 drivers/gpu/drm/lima/lima_sched.c struct lima_sched_context *context) context 170 drivers/gpu/drm/lima/lima_sched.c drm_sched_entity_fini(&context->base); context 173 drivers/gpu/drm/lima/lima_sched.c struct dma_fence *lima_sched_context_queue_task(struct lima_sched_context *context, context 178 drivers/gpu/drm/lima/lima_sched.c drm_sched_entity_push_job(&task->base, &context->base); context 76 drivers/gpu/drm/lima/lima_sched.h struct lima_sched_context *context, context 82 drivers/gpu/drm/lima/lima_sched.h struct lima_sched_context *context, context 85 drivers/gpu/drm/lima/lima_sched.h struct lima_sched_context *context); context 86 drivers/gpu/drm/lima/lima_sched.h struct dma_fence *lima_sched_context_queue_task(struct lima_sched_context *context, context 739 drivers/gpu/drm/meson/meson_dw_hdmi.c static int meson_dw_hdmi_reg_read(void *context, unsigned int reg, context 742 drivers/gpu/drm/meson/meson_dw_hdmi.c struct meson_dw_hdmi *dw_hdmi = context; context 750 drivers/gpu/drm/meson/meson_dw_hdmi.c static int meson_dw_hdmi_reg_write(void *context, unsigned int reg, context 753 drivers/gpu/drm/meson/meson_dw_hdmi.c struct meson_dw_hdmi *dw_hdmi = context; context 24 drivers/gpu/drm/msm/msm_fence.c fctx->context = dma_fence_context_alloc(1); context 135 drivers/gpu/drm/msm/msm_fence.c fctx->context, ++fctx->last_fence); context 15 drivers/gpu/drm/msm/msm_fence.h unsigned context; context 713 drivers/gpu/drm/msm/msm_gem.c if (fence && (fence->context != fctx->context)) { context 726 drivers/gpu/drm/msm/msm_gem.c if (fence->context != fctx->context) { context 455 drivers/gpu/drm/msm/msm_gem_submit.c if (!dma_fence_match_context(in_fence, ring->fctx->context)) context 26 drivers/gpu/drm/nouveau/include/nvkm/core/ramht.h int chid, int addr, u32 handle, u32 context); context 82 drivers/gpu/drm/nouveau/nouveau_fence.c if (fence->context < drm->chan.context_base || context 83 drivers/gpu/drm/nouveau/nouveau_fence.c fence->context >= drm->chan.context_base + drm->chan.nr) context 177 drivers/gpu/drm/nouveau/nouveau_fence.c fctx->context = chan->drm->chan.context_base + chan->chid; context 212 drivers/gpu/drm/nouveau/nouveau_fence.c &fctx->lock, fctx->context, ++fctx->sequence); context 215 drivers/gpu/drm/nouveau/nouveau_fence.c &fctx->lock, fctx->context, ++fctx->sequence); context 44 drivers/gpu/drm/nouveau/nouveau_fence.h u32 context; context 61 drivers/gpu/drm/nouveau/nvkm/core/ramht.c int chid, int addr, u32 handle, u32 context) context 88 drivers/gpu/drm/nouveau/nvkm/core/ramht.c if (addr < 0) context |= inst << -addr; context 89 drivers/gpu/drm/nouveau/nvkm/core/ramht.c else context |= inst >> addr; context 94 drivers/gpu/drm/nouveau/nvkm/core/ramht.c nvkm_wo32(ramht->gpuobj, (co << 3) + 4, context); context 108 drivers/gpu/drm/nouveau/nvkm/core/ramht.c int chid, int addr, u32 handle, u32 context) context 119 drivers/gpu/drm/nouveau/nvkm/core/ramht.c addr, handle, context); context 179 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c u32 context; context 183 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c case NVKM_ENGINE_SW : context = 0x00000000; break; context 184 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c case NVKM_ENGINE_GR : context = 0x00100000; break; context 186 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c case NVKM_ENGINE_MSPPP : context = 0x00200000; break; context 188 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c case NVKM_ENGINE_CE0 : context = 0x00300000; break; context 190 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c case NVKM_ENGINE_MSPDEC: context = 0x00400000; break; context 193 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c case NVKM_ENGINE_VIC : context = 0x00500000; break; context 195 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c case NVKM_ENGINE_MSVLD : context = 0x00600000; break; context 201 drivers/gpu/drm/nouveau/nvkm/engine/fifo/chang84.c return nvkm_ramht_insert(chan->ramht, object, 0, 4, handle, context); context 163 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c u32 context; context 167 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c case NVKM_ENGINE_SW : context = 0x00000000; break; context 168 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c case NVKM_ENGINE_GR : context = 0x00100000; break; context 169 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c case NVKM_ENGINE_MPEG : context = 0x00200000; break; context 175 drivers/gpu/drm/nouveau/nvkm/engine/fifo/channv50.c return nvkm_ramht_insert(chan->ramht, object, 0, 4, handle, context); context 52 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv04.c u32 context = 0x80000000 | chan->base.chid << 24; context 58 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv04.c case NVKM_ENGINE_SW : context |= 0x00000000; break; context 59 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv04.c case NVKM_ENGINE_GR : context |= 0x00010000; break; context 60 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv04.c case NVKM_ENGINE_MPEG : context |= 0x00020000; break; context 68 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv04.c handle, context); context 148 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c u32 context = chan->base.chid << 23; context 154 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c case NVKM_ENGINE_SW : context |= 0x00000000; break; context 155 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c case NVKM_ENGINE_GR : context |= 0x00100000; break; context 156 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c case NVKM_ENGINE_MPEG : context |= 0x00200000; break; context 164 drivers/gpu/drm/nouveau/nvkm/engine/fifo/dmanv40.c handle, context); context 39 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c struct context *ctx = info; context 53 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c struct context *ctx = info; context 98 drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c struct context ctx = { .outp = (u32 *)(bios->data + pdcb) }; context 290 drivers/gpu/drm/panel/panel-ilitek-ili9322.c static int ili9322_regmap_spi_write(void *context, const void *data, context 293 drivers/gpu/drm/panel/panel-ilitek-ili9322.c struct device *dev = context; context 305 drivers/gpu/drm/panel/panel-ilitek-ili9322.c static int ili9322_regmap_spi_read(void *context, const void *reg, context 308 drivers/gpu/drm/panel/panel-ilitek-ili9322.c struct device *dev = context; context 100 drivers/gpu/drm/qxl/qxl_release.c fence->context & ~0xf0000000, sc); context 90 drivers/gpu/drm/scheduler/gpu_scheduler_trace.h __entry->ctx = fence->context; context 403 drivers/gpu/drm/scheduler/sched_entity.c if (fence->context == entity->fence_context || context 404 drivers/gpu/drm/scheduler/sched_entity.c fence->context == entity->fence_context + 1) { context 176 drivers/gpu/drm/scheduler/sched_main.c if (fence->context == entity->fence_context) context 336 drivers/gpu/drm/scheduler/sched_main.c if (bad->s_fence->scheduled.context == context 489 drivers/gpu/drm/scheduler/sched_main.c guilty_context = s_job->s_fence->scheduled.context; context 492 drivers/gpu/drm/scheduler/sched_main.c if (found_guilty && s_job->s_fence->scheduled.context == guilty_context) context 266 drivers/gpu/drm/tegra/drm.c static void tegra_drm_context_free(struct tegra_drm_context *context) context 268 drivers/gpu/drm/tegra/drm.c context->client->ops->close_channel(context); context 269 drivers/gpu/drm/tegra/drm.c kfree(context); context 325 drivers/gpu/drm/tegra/drm.c int tegra_drm_submit(struct tegra_drm_context *context, context 329 drivers/gpu/drm/tegra/drm.c struct host1x_client *client = &context->client->base; context 355 drivers/gpu/drm/tegra/drm.c job = host1x_job_alloc(context->channel, args->num_cmdbufs, context 472 drivers/gpu/drm/tegra/drm.c job->is_addr_reg = context->client->ops->is_addr_reg; context 473 drivers/gpu/drm/tegra/drm.c job->is_valid_class = context->client->ops->is_valid_class; context 481 drivers/gpu/drm/tegra/drm.c err = host1x_job_pin(job, context->client->base.dev); context 587 drivers/gpu/drm/tegra/drm.c struct tegra_drm_context *context) context 591 drivers/gpu/drm/tegra/drm.c err = client->ops->open_channel(client, context); context 595 drivers/gpu/drm/tegra/drm.c err = idr_alloc(&fpriv->contexts, context, 1, 0, GFP_KERNEL); context 597 drivers/gpu/drm/tegra/drm.c client->ops->close_channel(context); context 601 drivers/gpu/drm/tegra/drm.c context->client = client; context 602 drivers/gpu/drm/tegra/drm.c context->id = err; context 613 drivers/gpu/drm/tegra/drm.c struct tegra_drm_context *context; context 617 drivers/gpu/drm/tegra/drm.c context = kzalloc(sizeof(*context), GFP_KERNEL); context 618 drivers/gpu/drm/tegra/drm.c if (!context) context 625 drivers/gpu/drm/tegra/drm.c err = tegra_client_open(fpriv, client, context); context 629 drivers/gpu/drm/tegra/drm.c args->context = context->id; context 634 drivers/gpu/drm/tegra/drm.c kfree(context); context 645 drivers/gpu/drm/tegra/drm.c struct tegra_drm_context *context; context 650 drivers/gpu/drm/tegra/drm.c context = idr_find(&fpriv->contexts, args->context); context 651 drivers/gpu/drm/tegra/drm.c if (!context) { context 656 drivers/gpu/drm/tegra/drm.c idr_remove(&fpriv->contexts, context->id); context 657 drivers/gpu/drm/tegra/drm.c tegra_drm_context_free(context); context 669 drivers/gpu/drm/tegra/drm.c struct tegra_drm_context *context; context 675 drivers/gpu/drm/tegra/drm.c context = idr_find(&fpriv->contexts, args->context); context 676 drivers/gpu/drm/tegra/drm.c if (!context) { context 681 drivers/gpu/drm/tegra/drm.c if (args->index >= context->client->base.num_syncpts) { context 686 drivers/gpu/drm/tegra/drm.c syncpt = context->client->base.syncpts[args->index]; context 699 drivers/gpu/drm/tegra/drm.c struct tegra_drm_context *context; context 704 drivers/gpu/drm/tegra/drm.c context = idr_find(&fpriv->contexts, args->context); context 705 drivers/gpu/drm/tegra/drm.c if (!context) { context 710 drivers/gpu/drm/tegra/drm.c err = context->client->ops->submit(context, args, drm, file); context 722 drivers/gpu/drm/tegra/drm.c struct tegra_drm_context *context; context 729 drivers/gpu/drm/tegra/drm.c context = idr_find(&fpriv->contexts, args->context); context 730 drivers/gpu/drm/tegra/drm.c if (!context) { context 735 drivers/gpu/drm/tegra/drm.c if (args->syncpt >= context->client->base.num_syncpts) { context 740 drivers/gpu/drm/tegra/drm.c syncpt = context->client->base.syncpts[args->syncpt]; context 943 drivers/gpu/drm/tegra/drm.c struct tegra_drm_context *context = p; context 945 drivers/gpu/drm/tegra/drm.c tegra_drm_context_free(context); context 71 drivers/gpu/drm/tegra/drm.h struct tegra_drm_context *context); context 72 drivers/gpu/drm/tegra/drm.h void (*close_channel)(struct tegra_drm_context *context); context 75 drivers/gpu/drm/tegra/drm.h int (*submit)(struct tegra_drm_context *context, context 80 drivers/gpu/drm/tegra/drm.h int tegra_drm_submit(struct tegra_drm_context *context, context 103 drivers/gpu/drm/tegra/gr2d.c struct tegra_drm_context *context) context 107 drivers/gpu/drm/tegra/gr2d.c context->channel = host1x_channel_get(gr2d->channel); context 108 drivers/gpu/drm/tegra/gr2d.c if (!context->channel) context 114 drivers/gpu/drm/tegra/gr2d.c static void gr2d_close_channel(struct tegra_drm_context *context) context 116 drivers/gpu/drm/tegra/gr2d.c host1x_channel_put(context->channel); context 111 drivers/gpu/drm/tegra/gr3d.c struct tegra_drm_context *context) context 115 drivers/gpu/drm/tegra/gr3d.c context->channel = host1x_channel_get(gr3d->channel); context 116 drivers/gpu/drm/tegra/gr3d.c if (!context->channel) context 122 drivers/gpu/drm/tegra/gr3d.c static void gr3d_close_channel(struct tegra_drm_context *context) context 124 drivers/gpu/drm/tegra/gr3d.c host1x_channel_put(context->channel); context 284 drivers/gpu/drm/tegra/vic.c struct tegra_drm_context *context) context 301 drivers/gpu/drm/tegra/vic.c context->channel = host1x_channel_get(vic->channel); context 302 drivers/gpu/drm/tegra/vic.c if (!context->channel) { context 314 drivers/gpu/drm/tegra/vic.c static void vic_close_channel(struct tegra_drm_context *context) context 316 drivers/gpu/drm/tegra/vic.c struct vic *vic = to_vic(context->client); context 318 drivers/gpu/drm/tegra/vic.c host1x_channel_put(context->channel); context 132 drivers/gpu/drm/udl/udl_main.c struct urb_node *unode = urb->context; context 202 drivers/gpu/drm/via/via_drv.h extern int via_init_context(struct drm_device *dev, int context); context 203 drivers/gpu/drm/via/via_drv.h extern int via_final_context(struct drm_device *dev, int context); context 220 drivers/gpu/drm/via/via_drv.h extern void via_release_futex(drm_via_private_t *dev_priv, int context); context 79 drivers/gpu/drm/via/via_mm.c int via_final_context(struct drm_device *dev, int context) context 83 drivers/gpu/drm/via/via_mm.c via_release_futex(dev_priv, context); context 49 drivers/gpu/drm/via/via_video.c void via_release_futex(drm_via_private_t *dev_priv, int context) context 59 drivers/gpu/drm/via/via_video.c if ((_DRM_LOCKING_CONTEXT(*lock) == context)) { context 96 drivers/gpu/drm/virtio/virtgpu_drv.h uint64_t context; context 83 drivers/gpu/drm/virtio/virtgpu_fence.c dma_fence_init(&fence->f, &virtio_fence_ops, &drv->lock, drv->context, 0); context 146 drivers/gpu/drm/virtio/virtgpu_ioctl.c if (!dma_fence_match_context(in_fence, vgdev->fence_drv.context)) context 143 drivers/gpu/drm/virtio/virtgpu_kms.c vgdev->fence_drv.context = dma_fence_context_alloc(1); context 537 drivers/gpu/drm/vmwgfx/device_include/svga_reg.h SVGACBContext context; /* Must be zero */ context 555 drivers/gpu/drm/vmwgfx/device_include/svga_reg.h SVGACBContext context; /* Must be zero */ context 572 drivers/gpu/drm/vmwgfx/device_include/svga_reg.h SVGACBContext context; context 600 drivers/gpu/drm/vmwgfx/device_include/svga_reg.h SVGACBContext context; context 612 drivers/gpu/drm/vmwgfx/device_include/svga_reg.h SVGACBContext context; context 196 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c static int vmw_cmdbuf_startstop(struct vmw_cmdbuf_man *man, u32 context, context 198 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c static int vmw_cmdbuf_preempt(struct vmw_cmdbuf_man *man, u32 context); context 1167 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c static int vmw_cmdbuf_preempt(struct vmw_cmdbuf_man *man, u32 context) context 1175 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c cmd.body.context = SVGA_CB_CONTEXT_0 + context; context 1191 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c static int vmw_cmdbuf_startstop(struct vmw_cmdbuf_man *man, u32 context, context 1201 drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c cmd.body.context = SVGA_CB_CONTEXT_0 + context; context 425 drivers/gpu/drm/vmwgfx/vmwgfx_drv.h struct vmw_resource *context; context 173 drivers/greybus/es2.c struct usb_ctrlrequest *dr = urb->context; context 818 drivers/greybus/es2.c struct gb_host_device *hd = urb->context; context 861 drivers/greybus/es2.c struct gb_message *message = urb->context; context 1031 drivers/greybus/es2.c struct es2_ap_dev *es2 = urb->context; context 315 drivers/hid/hid-hyperv.c static void mousevsc_on_channel_callback(void *context) context 317 drivers/hid/hid-hyperv.c struct hv_device *device = context; context 98 drivers/hid/hid-u2fzero.c struct u2fzero_transfer_context *ctx = urb->context; context 116 drivers/hid/hid-u2fzero.c dev->urb->context = &ctx; context 273 drivers/hid/usbhid/hid-core.c struct hid_device *hid = urb->context; context 284 drivers/hid/usbhid/hid-core.c hid_input_report(urb->context, HID_INPUT_REPORT, context 431 drivers/hid/usbhid/hid-core.c struct hid_device *hid = urb->context; context 478 drivers/hid/usbhid/hid-core.c struct hid_device *hid = urb->context; context 486 drivers/hid/usbhid/hid-core.c hid_input_report(urb->context, context 102 drivers/hid/usbhid/usbkbd.c struct usb_kbd *kbd = urb->context; context 193 drivers/hid/usbhid/usbkbd.c struct usb_kbd *kbd = urb->context; context 51 drivers/hid/usbhid/usbmouse.c struct usb_mouse *mouse = urb->context; context 211 drivers/hsi/clients/cmt_speech.c struct cs_hsi_iface *hi = msg->context; context 218 drivers/hsi/clients/cmt_speech.c struct cs_hsi_iface *hi = msg->context; context 283 drivers/hsi/clients/cmt_speech.c msg->context = hi; context 296 drivers/hsi/clients/cmt_speech.c struct cs_hsi_iface *hi = msg->context; context 330 drivers/hsi/clients/cmt_speech.c rxmsg->context = hi; context 339 drivers/hsi/clients/cmt_speech.c txmsg->context = hi; context 428 drivers/hsi/clients/cmt_speech.c struct cs_hsi_iface *hi = msg->context; context 460 drivers/hsi/clients/cmt_speech.c struct cs_hsi_iface *hi = msg->context; context 511 drivers/hsi/clients/cmt_speech.c struct cs_hsi_iface *hi = msg->context; context 575 drivers/hsi/clients/cmt_speech.c struct cs_hsi_iface *hi = msg->context; context 602 drivers/hsi/clients/cmt_speech.c struct cs_hsi_iface *hi = msg->context; context 678 drivers/hsi/clients/cmt_speech.c struct cs_hsi_iface *hi = msg->context; context 193 drivers/hsi/clients/ssi_protocol.c skb = msg->context; context 194 drivers/hsi/clients/ssi_protocol.c pr_debug("free data: msg %p context %p skb %p\n", msg, msg->context, context 212 drivers/hsi/clients/ssi_protocol.c msg->context = skb; context 555 drivers/hsi/clients/ssi_protocol.c ssip_free_data(msg->context); context 565 drivers/hsi/clients/ssi_protocol.c data = msg->context; context 590 drivers/hsi/clients/ssi_protocol.c skb = dmsg->context; context 591 drivers/hsi/clients/ssi_protocol.c msg->context = dmsg; context 653 drivers/hsi/clients/ssi_protocol.c skb = msg->context; context 446 drivers/hsi/controllers/omap_ssi_port.c msg->ttype, msg->context); context 116 drivers/hv/channel.c void (*onchannelcallback)(void *context), void *context) context 140 drivers/hv/channel.c newchannel->channel_callback_context = context; context 242 drivers/hv/channel.c void (*onchannelcallback)(void *context), void *context) context 244 drivers/hv/channel.c return __vmbus_open(newchannel, NULL, 0, onchannelcallback, context); context 254 drivers/hv/channel.c void (*onchannelcallback)(void *context), void *context) context 264 drivers/hv/channel.c onchannelcallback, context); context 1364 drivers/hv/channel_mgmt.c void vmbus_onmessage(void *context) context 1366 drivers/hv/channel_mgmt.c struct hv_message *msg = context; context 1380 drivers/hv/hv_balloon.c static void balloon_onchannelcallback(void *context); context 1472 drivers/hv/hv_balloon.c static void balloon_onchannelcallback(void *context) context 1474 drivers/hv/hv_balloon.c struct hv_device *dev = context; context 225 drivers/hv/hv_fcopy.c void hv_fcopy_onchannelcallback(void *context) context 227 drivers/hv/hv_fcopy.c struct vmbus_channel *channel = context; context 634 drivers/hv/hv_kvp.c void hv_kvp_onchannelcallback(void *context) context 636 drivers/hv/hv_kvp.c struct vmbus_channel *channel = context; context 287 drivers/hv/hv_snapshot.c void hv_vss_onchannelcallback(void *context) context 289 drivers/hv/hv_snapshot.c struct vmbus_channel *channel = context; context 78 drivers/hv/hv_util.c static void shutdown_onchannelcallback(void *context); context 86 drivers/hv/hv_util.c static void timesync_onchannelcallback(void *context); context 93 drivers/hv/hv_util.c static void heartbeat_onchannelcallback(void *context); context 126 drivers/hv/hv_util.c static void shutdown_onchannelcallback(void *context) context 128 drivers/hv/hv_util.c struct vmbus_channel *channel = context; context 276 drivers/hv/hv_util.c static void timesync_onchannelcallback(void *context) context 278 drivers/hv/hv_util.c struct vmbus_channel *channel = context; context 337 drivers/hv/hv_util.c static void heartbeat_onchannelcallback(void *context) context 339 drivers/hv/hv_util.c struct vmbus_channel *channel = context; context 355 drivers/hv/hyperv_vmbus.h void hv_kvp_onchannelcallback(void *context); context 359 drivers/hv/hyperv_vmbus.h void hv_vss_onchannelcallback(void *context); context 363 drivers/hv/hyperv_vmbus.h void hv_fcopy_onchannelcallback(void *context); context 346 drivers/hwmon/aspeed-pwm-tacho.c static int regmap_aspeed_pwm_tacho_reg_write(void *context, unsigned int reg, context 349 drivers/hwmon/aspeed-pwm-tacho.c void __iomem *regs = (void __iomem *)context; context 355 drivers/hwmon/aspeed-pwm-tacho.c static int regmap_aspeed_pwm_tacho_reg_read(void *context, unsigned int reg, context 358 drivers/hwmon/aspeed-pwm-tacho.c void __iomem *regs = (void __iomem *)context; context 1184 drivers/i2c/busses/i2c-i801.c void *context, context 348 drivers/i2c/busses/i2c-scmi.c void *context, void **return_value) context 352 drivers/i2c/busses/i2c-scmi.c struct acpi_smbus_cmi *smbus_cmi = context; context 64 drivers/iio/chemical/bme680_spi.c static int bme680_regmap_spi_write(void *context, const void *data, context 67 drivers/iio/chemical/bme680_spi.c struct bme680_spi_bus_context *ctx = context; context 87 drivers/iio/chemical/bme680_spi.c static int bme680_regmap_spi_read(void *context, const void *reg, context 90 drivers/iio/chemical/bme680_spi.c struct bme680_spi_bus_context *ctx = context; context 14 drivers/iio/pressure/bmp280-spi.c static int bmp280_regmap_spi_write(void *context, const void *data, context 17 drivers/iio/pressure/bmp280-spi.c struct device *dev = context; context 31 drivers/iio/pressure/bmp280-spi.c static int bmp280_regmap_spi_read(void *context, const void *reg, context 34 drivers/iio/pressure/bmp280-spi.c struct device *dev = context; context 61 drivers/infiniband/core/addr.c void *context; context 63 drivers/infiniband/core/addr.c struct rdma_dev_addr *addr, void *context); context 644 drivers/infiniband/core/addr.c req->addr, req->context); context 664 drivers/infiniband/core/addr.c struct rdma_dev_addr *addr, void *context), context 665 drivers/infiniband/core/addr.c bool resolve_by_gid_attr, void *context) context 692 drivers/infiniband/core/addr.c req->context = context; context 803 drivers/infiniband/core/addr.c struct rdma_dev_addr *addr, void *context) context 805 drivers/infiniband/core/addr.c ((struct resolve_cb_context *)context)->status = status; context 806 drivers/infiniband/core/addr.c complete(&((struct resolve_cb_context *)context)->comp); context 89 drivers/infiniband/core/cache.c void *context; context 358 drivers/infiniband/core/cache.c ret = attr->device->ops.add_gid(attr, &entry->context); context 407 drivers/infiniband/core/cache.c ib_dev->ops.del_gid(&entry->attr, &entry->context); context 736 drivers/infiniband/core/cache.c void *context) context 758 drivers/infiniband/core/cache.c if (filter(gid, &entry->attr, context)) { context 369 drivers/infiniband/core/cm.c m->context[0] = cm_id_priv; context 405 drivers/infiniband/core/cm.c if (msg->context[0]) context 406 drivers/infiniband/core/cm.c cm_deref_id(msg->context[0]); context 850 drivers/infiniband/core/cm.c void *context) context 863 drivers/infiniband/core/cm.c cm_id_priv->id.context = context; context 1222 drivers/infiniband/core/cm.c if (cm_id->cm_handler != cm_handler || cm_id->context) { context 1466 drivers/infiniband/core/cm.c cm_id_priv->msg->context[1] = (void *) (unsigned long) IB_CM_REQ_SENT; context 1972 drivers/infiniband/core/cm.c cm_id_priv->id.context = listen_cm_id_priv->id.context; context 2120 drivers/infiniband/core/cm.c msg->context[1] = (void *) (unsigned long) IB_CM_REP_SENT; context 2504 drivers/infiniband/core/cm.c msg->context[1] = (void *) (unsigned long) IB_CM_DREQ_SENT; context 3061 drivers/infiniband/core/cm.c cm_id_priv->msg->context[1] = (void *) (unsigned long) context 3160 drivers/infiniband/core/cm.c msg->context[1] = (void *) (unsigned long) IB_CM_ESTABLISHED; context 3510 drivers/infiniband/core/cm.c msg->context[1] = (void *) (unsigned long) IB_CM_SIDR_REQ_SENT; context 3600 drivers/infiniband/core/cm.c cm_id_priv->id.context = cur_cm_id_priv->id.context; context 3736 drivers/infiniband/core/cm.c cm_id_priv = msg->context[0]; context 3740 drivers/infiniband/core/cm.c state = (enum ib_cm_state) (unsigned long) msg->context[1]; context 3789 drivers/infiniband/core/cm.c port = mad_agent->context; context 3798 drivers/infiniband/core/cm.c if (!msg->context[0] && (attr_index != CM_REJ_COUNTER)) context 3814 drivers/infiniband/core/cm.c if (msg->context[0] && msg->context[1]) context 3993 drivers/infiniband/core/cm.c struct cm_port *port = mad_agent->context; context 367 drivers/infiniband/core/cma.c void *context; context 877 drivers/infiniband/core/cma.c void *context, enum rdma_ucm_port_space ps, context 889 drivers/infiniband/core/cma.c id_priv->id.context = context; context 1869 drivers/infiniband/core/cma.c cma_deref_id(id_priv->id.context); context 1923 drivers/infiniband/core/cma.c struct rdma_id_private *id_priv = cm_id->context; context 2019 drivers/infiniband/core/cma.c listen_id->event_handler, listen_id->context, context 2079 drivers/infiniband/core/cma.c id = __rdma_create_id(net, listen_id->event_handler, listen_id->context, context 2183 drivers/infiniband/core/cma.c cm_id->context = conn_id; context 2267 drivers/infiniband/core/cma.c struct rdma_id_private *id_priv = iw_id->context; context 2347 drivers/infiniband/core/cma.c listen_id = cm_id->context; context 2356 drivers/infiniband/core/cma.c listen_id->id.context, context 2382 drivers/infiniband/core/cma.c cm_id->context = conn_id; context 2461 drivers/infiniband/core/cma.c struct rdma_id_private *id_priv = id->context; context 2463 drivers/infiniband/core/cma.c id->context = id_priv->id.context; context 2554 drivers/infiniband/core/cma.c void *context) context 2556 drivers/infiniband/core/cma.c struct cma_work *work = context; context 3026 drivers/infiniband/core/cma.c struct rdma_dev_addr *dev_addr, void *context) context 3028 drivers/infiniband/core/cma.c struct rdma_id_private *id_priv = context; context 3674 drivers/infiniband/core/cma.c struct rdma_id_private *id_priv = cm_id->context; context 4152 drivers/infiniband/core/cma.c struct cma_multicast *mc = multicast->context; context 4178 drivers/infiniband/core/cma.c event.param.ud.private_data = mc->context; context 4310 drivers/infiniband/core/cma.c mc->multicast.ib->context = mc; context 4425 drivers/infiniband/core/cma.c u8 join_state, void *context) context 4444 drivers/infiniband/core/cma.c mc->context = context; context 244 drivers/infiniband/core/iwcm.c void *context) context 255 drivers/infiniband/core/iwcm.c cm_id_priv->id.context = context; context 781 drivers/infiniband/core/iwcm.c listen_id_priv->id.context); context 232 drivers/infiniband/core/mad.c void *context, context 394 drivers/infiniband/core/mad.c mad_agent_priv->agent.context = context; context 542 drivers/infiniband/core/mad.c void *context) context 578 drivers/infiniband/core/mad.c mad_snoop_priv->agent.context = context; context 1404 drivers/infiniband/core/mad.c void *context) context 177 drivers/infiniband/core/mad_rmpp.c msg->context[0] = ah; context 211 drivers/infiniband/core/mad_rmpp.c if (mad_send_wc->send_buf->context[0] == mad_send_wc->send_buf->ah) context 125 drivers/infiniband/core/multicast.c void *context); context 127 drivers/infiniband/core/multicast.c void *context); context 512 drivers/infiniband/core/multicast.c void *context) context 514 drivers/infiniband/core/multicast.c struct mcast_group *group = context; context 546 drivers/infiniband/core/multicast.c void *context) context 548 drivers/infiniband/core/multicast.c struct mcast_group *group = context; context 613 drivers/infiniband/core/multicast.c void *context) context 633 drivers/infiniband/core/multicast.c member->multicast.context = context; context 564 drivers/infiniband/core/nldev.c cq->uobject->context->res.id)) context 637 drivers/infiniband/core/nldev.c pd->uobject->context->res.id)) context 156 drivers/infiniband/core/rdma_core.c WARN_ON(!uobj->context); context 160 drivers/infiniband/core/rdma_core.c uobj->context = NULL; context 290 drivers/infiniband/core/rdma_core.c uobj->context = ucontext; context 422 drivers/infiniband/core/rdma_core.c attrs->context = uobj->context; context 446 drivers/infiniband/core/rdma_core.c ret = ib_rdmacg_try_charge(&uobj->cg_obj, uobj->context->device, context 506 drivers/infiniband/core/rdma_core.c attrs->context = ret->context; context 512 drivers/infiniband/core/rdma_core.c ib_rdmacg_uncharge(&uobj->cg_obj, uobj->context->device, context 538 drivers/infiniband/core/rdma_core.c ib_rdmacg_uncharge(&uobj->cg_obj, uobj->context->device, context 775 drivers/infiniband/core/rdma_core.c .context = uobj->context, context 848 drivers/infiniband/core/rdma_core.c attrs.context = obj->context; context 58 drivers/infiniband/core/sa.h void *context), context 59 drivers/infiniband/core/sa.h void *context, struct ib_sa_query **sa_query); context 128 drivers/infiniband/core/sa_query.c void *context; context 134 drivers/infiniband/core/sa_query.c void *context; context 141 drivers/infiniband/core/sa_query.c void *context; context 147 drivers/infiniband/core/sa_query.c void *context; context 153 drivers/infiniband/core/sa_query.c void *context; context 752 drivers/infiniband/core/sa_query.c struct sa_path_rec *sa_rec = query->mad_buf->context[1]; context 759 drivers/infiniband/core/sa_query.c query->mad_buf->context[1] = NULL; context 1375 drivers/infiniband/core/sa_query.c query->mad_buf->context[0] = query; context 1486 drivers/infiniband/core/sa_query.c query->callback(status, &rec, query->context); context 1499 drivers/infiniband/core/sa_query.c query->callback(status, &opa, query->context); context 1501 drivers/infiniband/core/sa_query.c query->callback(status, &rec, query->context); context 1505 drivers/infiniband/core/sa_query.c query->callback(status, NULL, query->context); context 1549 drivers/infiniband/core/sa_query.c void *context), context 1550 drivers/infiniband/core/sa_query.c void *context, context 1600 drivers/infiniband/core/sa_query.c query->context = context; context 1626 drivers/infiniband/core/sa_query.c query->sa_query.mad_buf->context[1] = (query->conv_pr) ? context 1659 drivers/infiniband/core/sa_query.c query->callback(status, &rec, query->context); context 1661 drivers/infiniband/core/sa_query.c query->callback(status, NULL, query->context); context 1703 drivers/infiniband/core/sa_query.c void *context), context 1704 drivers/infiniband/core/sa_query.c void *context, context 1737 drivers/infiniband/core/sa_query.c query->context = context; context 1782 drivers/infiniband/core/sa_query.c query->callback(status, &rec, query->context); context 1784 drivers/infiniband/core/sa_query.c query->callback(status, NULL, query->context); context 1800 drivers/infiniband/core/sa_query.c void *context), context 1801 drivers/infiniband/core/sa_query.c void *context, context 1829 drivers/infiniband/core/sa_query.c query->context = context; context 1874 drivers/infiniband/core/sa_query.c query->callback(status, &rec, query->context); context 1876 drivers/infiniband/core/sa_query.c query->callback(status, NULL, query->context); context 1891 drivers/infiniband/core/sa_query.c void *context), context 1892 drivers/infiniband/core/sa_query.c void *context, context 1926 drivers/infiniband/core/sa_query.c query->context = context; context 1989 drivers/infiniband/core/sa_query.c static void ib_classportinfo_cb(void *context) context 1991 drivers/infiniband/core/sa_query.c struct ib_classport_info_context *cb_ctx = context; context 2045 drivers/infiniband/core/sa_query.c query->callback(query->context); context 2056 drivers/infiniband/core/sa_query.c void (*callback)(void *context), context 2057 drivers/infiniband/core/sa_query.c void *context, context 2081 drivers/infiniband/core/sa_query.c query->context = context; context 2164 drivers/infiniband/core/sa_query.c struct ib_sa_query *query = mad_send_wc->send_buf->context[0]; context 2202 drivers/infiniband/core/sa_query.c query = send_buf->context[0]; context 305 drivers/infiniband/core/ucma.c struct ucma_context *ctx = cm_id->context; context 344 drivers/infiniband/core/ucma.c struct ucma_context *ctx = cm_id->context; context 437 drivers/infiniband/core/ucma.c ctx->cm_id->context = ctx; context 196 drivers/infiniband/core/umem.c struct ib_ucontext *context; context 212 drivers/infiniband/core/umem.c context = container_of(udata, struct uverbs_attr_bundle, driver_udata) context 213 drivers/infiniband/core/umem.c ->context; context 214 drivers/infiniband/core/umem.c if (!context) context 237 drivers/infiniband/core/umem.c umem->ibdev = context->device; context 292 drivers/infiniband/core/umem.c dma_get_max_seg_size(context->device->dma_device), context 300 drivers/infiniband/core/umem.c umem->nmap = ib_dma_map_sg_attrs(context->device, context 315 drivers/infiniband/core/umem.c __ib_umem_release(context->device, umem, 0); context 296 drivers/infiniband/core/umem_odp.c struct ib_ucontext *context = context 298 drivers/infiniband/core/umem_odp.c ->context; context 306 drivers/infiniband/core/umem_odp.c if (!context) context 308 drivers/infiniband/core/umem_odp.c if (WARN_ON_ONCE(!context->device->ops.invalidate_range)) context 315 drivers/infiniband/core/umem_odp.c umem->ibdev = context->device; context 389 drivers/infiniband/core/umem_odp.c struct ib_ucontext *context; context 396 drivers/infiniband/core/umem_odp.c context = container_of(udata, struct uverbs_attr_bundle, driver_udata) context 397 drivers/infiniband/core/umem_odp.c ->context; context 398 drivers/infiniband/core/umem_odp.c if (!context) context 402 drivers/infiniband/core/umem_odp.c WARN_ON_ONCE(!context->device->ops.invalidate_range)) context 409 drivers/infiniband/core/umem_odp.c umem_odp->umem.ibdev = context->device; context 212 drivers/infiniband/core/user_mad.c struct ib_umad_file *file = agent->context; context 213 drivers/infiniband/core/user_mad.c struct ib_umad_packet *packet = send_wc->send_buf->context[0]; context 232 drivers/infiniband/core/user_mad.c struct ib_umad_file *file = agent->context; context 581 drivers/infiniband/core/user_mad.c packet->msg->context[0] = packet; context 244 drivers/infiniband/core/uverbs_cmd.c attrs->context = ucontext; context 598 drivers/infiniband/core/uverbs_ioctl.c pbundle->bundle.context = NULL; /* only valid if bundle has uobject */ context 704 drivers/infiniband/core/uverbs_main.c bundle.context = NULL; /* only valid if bundle has uobject */ context 57 drivers/infiniband/core/uverbs_std_types_counters.c struct ib_device *ib_dev = attrs->context->device; context 67 drivers/infiniband/core/uverbs_std_types_cq.c struct ib_device *ib_dev = attrs->context->device; context 58 drivers/infiniband/core/uverbs_std_types_dm.c struct ib_device *ib_dev = attrs->context->device; context 75 drivers/infiniband/core/uverbs_std_types_dm.c dm = ib_dev->ops.alloc_dm(ib_dev, attrs->context, &attr, attrs); context 313 drivers/infiniband/core/uverbs_std_types_flow_action.c struct ib_device *ib_dev = attrs->context->device; context 662 drivers/infiniband/core/verbs.c void *context) context 664 drivers/infiniband/core/verbs.c struct find_gid_index_context *ctx = context; context 683 drivers/infiniband/core/verbs.c struct find_gid_index_context context = {.vlan_id = vlan_id, context 687 drivers/infiniband/core/verbs.c &context); context 1049 drivers/infiniband/core/verbs.c static void __ib_shared_qp_event_handler(struct ib_event *event, void *context) context 1051 drivers/infiniband/core/verbs.c struct ib_qp *qp = context; context 304 drivers/infiniband/hw/bnxt_re/ib_verbs.c int bnxt_re_del_gid(const struct ib_gid_attr *attr, void **context) context 314 drivers/infiniband/hw/bnxt_re/ib_verbs.c ctx = *context; context 357 drivers/infiniband/hw/bnxt_re/ib_verbs.c int bnxt_re_add_gid(const struct ib_gid_attr *attr, void **context) context 376 drivers/infiniband/hw/bnxt_re/ib_verbs.c *context = ctx_tbl[tbl_idx]; context 392 drivers/infiniband/hw/bnxt_re/ib_verbs.c *context = ctx; context 158 drivers/infiniband/hw/bnxt_re/ib_verbs.h int bnxt_re_del_gid(const struct ib_gid_attr *attr, void **context); context 159 drivers/infiniband/hw/bnxt_re/ib_verbs.h int bnxt_re_add_gid(const struct ib_gid_attr *attr, void **context); context 212 drivers/infiniband/hw/bnxt_re/ib_verbs.h void bnxt_re_dealloc_ucontext(struct ib_ucontext *context); context 213 drivers/infiniband/hw/bnxt_re/ib_verbs.h int bnxt_re_mmap(struct ib_ucontext *context, struct vm_area_struct *vma); context 65 drivers/infiniband/hw/cxgb3/iwch_provider.c static void iwch_dealloc_ucontext(struct ib_ucontext *context) context 67 drivers/infiniband/hw/cxgb3/iwch_provider.c struct iwch_dev *rhp = to_iwch_dev(context->device); context 68 drivers/infiniband/hw/cxgb3/iwch_provider.c struct iwch_ucontext *ucontext = to_iwch_ucontext(context); context 71 drivers/infiniband/hw/cxgb3/iwch_provider.c pr_debug("%s context %p\n", __func__, context); context 81 drivers/infiniband/hw/cxgb3/iwch_provider.c struct iwch_ucontext *context = to_iwch_ucontext(ucontext); context 85 drivers/infiniband/hw/cxgb3/iwch_provider.c cxio_init_ucontext(&rhp->rdev, &context->uctx); context 86 drivers/infiniband/hw/cxgb3/iwch_provider.c INIT_LIST_HEAD(&context->mmaps); context 87 drivers/infiniband/hw/cxgb3/iwch_provider.c spin_lock_init(&context->mmap_lock); context 237 drivers/infiniband/hw/cxgb3/iwch_provider.c static int iwch_mmap(struct ib_ucontext *context, struct vm_area_struct *vma) context 254 drivers/infiniband/hw/cxgb3/iwch_provider.c rdev_p = &(to_iwch_dev(context->device)->rdev); context 255 drivers/infiniband/hw/cxgb3/iwch_provider.c ucontext = to_iwch_ucontext(context); context 61 drivers/infiniband/hw/cxgb4/provider.c static void c4iw_dealloc_ucontext(struct ib_ucontext *context) context 63 drivers/infiniband/hw/cxgb4/provider.c struct c4iw_ucontext *ucontext = to_c4iw_ucontext(context); context 67 drivers/infiniband/hw/cxgb4/provider.c pr_debug("context %p\n", context); context 79 drivers/infiniband/hw/cxgb4/provider.c struct c4iw_ucontext *context = to_c4iw_ucontext(ucontext); context 86 drivers/infiniband/hw/cxgb4/provider.c c4iw_init_dev_ucontext(&rhp->rdev, &context->uctx); context 87 drivers/infiniband/hw/cxgb4/provider.c INIT_LIST_HEAD(&context->mmaps); context 88 drivers/infiniband/hw/cxgb4/provider.c spin_lock_init(&context->mmap_lock); context 102 drivers/infiniband/hw/cxgb4/provider.c spin_lock(&context->mmap_lock); context 103 drivers/infiniband/hw/cxgb4/provider.c uresp.status_page_key = context->key; context 104 drivers/infiniband/hw/cxgb4/provider.c context->key += PAGE_SIZE; context 105 drivers/infiniband/hw/cxgb4/provider.c spin_unlock(&context->mmap_lock); context 115 drivers/infiniband/hw/cxgb4/provider.c insert_mmap(context, mm); context 124 drivers/infiniband/hw/cxgb4/provider.c static int c4iw_mmap(struct ib_ucontext *context, struct vm_area_struct *vma) context 140 drivers/infiniband/hw/cxgb4/provider.c rdev = &(to_c4iw_dev(context->device)->rdev); context 141 drivers/infiniband/hw/cxgb4/provider.c ucontext = to_c4iw_ucontext(context); context 1043 drivers/infiniband/hw/hfi1/chip.c unsigned int context, u64 err_status); context 1046 drivers/infiniband/hw/hfi1/chip.c unsigned int context, u64 err_status); context 1048 drivers/infiniband/hw/hfi1/chip.c unsigned int context, u64 err_status); context 1201 drivers/infiniband/hw/hfi1/chip.c u64 (*rw_cntr)(const struct cntr_entry *, void *context, int vl, context 1394 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1396 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = context; context 1411 drivers/infiniband/hw/hfi1/chip.c void *context, int idx, int mode, u64 data) context 1413 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1421 drivers/infiniband/hw/hfi1/chip.c void *context, int idx, int mode, u64 data) context 1423 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1431 drivers/infiniband/hw/hfi1/chip.c void *context, int idx, int mode, u64 data) context 1433 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1441 drivers/infiniband/hw/hfi1/chip.c void *context, int idx, int mode, context 1444 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1451 drivers/infiniband/hw/hfi1/chip.c static u64 dev_access_u64_csr(const struct cntr_entry *entry, void *context, context 1454 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = context; context 1472 drivers/infiniband/hw/hfi1/chip.c static u64 dc_access_lcb_cntr(const struct cntr_entry *entry, void *context, context 1475 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = context; context 1496 drivers/infiniband/hw/hfi1/chip.c static u64 port_access_u32_csr(const struct cntr_entry *entry, void *context, context 1499 drivers/infiniband/hw/hfi1/chip.c struct hfi1_pportdata *ppd = context; context 1507 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1509 drivers/infiniband/hw/hfi1/chip.c struct hfi1_pportdata *ppd = context; context 1546 drivers/infiniband/hw/hfi1/chip.c static u64 access_sw_link_dn_cnt(const struct cntr_entry *entry, void *context, context 1549 drivers/infiniband/hw/hfi1/chip.c struct hfi1_pportdata *ppd = context; context 1556 drivers/infiniband/hw/hfi1/chip.c static u64 access_sw_link_up_cnt(const struct cntr_entry *entry, void *context, context 1559 drivers/infiniband/hw/hfi1/chip.c struct hfi1_pportdata *ppd = context; context 1567 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1570 drivers/infiniband/hw/hfi1/chip.c struct hfi1_pportdata *ppd = (struct hfi1_pportdata *)context; context 1578 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1580 drivers/infiniband/hw/hfi1/chip.c struct hfi1_pportdata *ppd = (struct hfi1_pportdata *)context; context 1595 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1598 drivers/infiniband/hw/hfi1/chip.c struct hfi1_pportdata *ppd = context; context 1608 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1610 drivers/infiniband/hw/hfi1/chip.c struct hfi1_pportdata *ppd = context; context 1655 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1657 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = context; context 1664 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1666 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = context; context 1673 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1675 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = context; context 1681 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1683 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1689 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1691 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = context; context 1697 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1699 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = context; context 1705 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1707 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = context; context 1713 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1715 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1723 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1726 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1732 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1735 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1741 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1744 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1750 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 1753 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1759 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1762 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1769 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1771 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1777 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 1780 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1786 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1789 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1795 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1798 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1804 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 1807 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1814 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1816 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1822 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 1825 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1831 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1834 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1845 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1847 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1857 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1860 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1866 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1869 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1875 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1878 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1884 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1887 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1893 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1896 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1903 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1905 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1912 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1914 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1920 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 1923 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1929 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1932 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1938 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1940 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1946 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1949 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1955 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1958 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1965 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1967 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1974 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 1976 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1982 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1985 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 1991 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 1994 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2000 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2003 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2009 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2012 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2018 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2021 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2027 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2030 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2037 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2039 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2045 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2048 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2054 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2057 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2063 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2066 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2072 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2075 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2081 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2084 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2090 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2093 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2099 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2102 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2109 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2111 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2118 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2120 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2127 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2129 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2136 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2138 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2145 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2147 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2153 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2156 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2163 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2165 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2171 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2174 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2180 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2183 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2190 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2192 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2198 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2201 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2207 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2210 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2216 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2219 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2229 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2232 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2238 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2241 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2247 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2250 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2256 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2259 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2265 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2268 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2274 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2277 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2283 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2286 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2292 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2295 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2301 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2304 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2311 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2313 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2320 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2322 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2328 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2331 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2337 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2340 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2346 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2349 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2355 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2358 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2364 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2367 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2373 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2376 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2382 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2385 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2392 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2394 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2401 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2403 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2410 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2412 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2419 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2421 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2428 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2430 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2437 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2439 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2446 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2448 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2455 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2457 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2464 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2466 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2473 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2475 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2482 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2484 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2491 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2493 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2500 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2502 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2508 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2511 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2517 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2520 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2526 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2529 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2535 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2538 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2544 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2547 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2554 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2556 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2563 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2565 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2572 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2574 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2581 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2583 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2590 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2592 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2599 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2601 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2608 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2610 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2617 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2619 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2626 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2628 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2634 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2637 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2643 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2646 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2653 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2655 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2662 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2664 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2670 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2673 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2679 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2682 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2688 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2691 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2697 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2700 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2706 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2709 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2715 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2718 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2724 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2727 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2734 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2736 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2743 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2745 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2751 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2754 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2760 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2763 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2769 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2772 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2778 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2781 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2787 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2790 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2796 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2799 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2810 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2812 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2819 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2821 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2828 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2830 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2837 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2839 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2845 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2848 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2854 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2857 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2863 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2866 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2873 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2875 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2881 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2884 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2890 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2893 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2899 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2902 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2909 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2911 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2918 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2920 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2926 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2929 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2935 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2938 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2944 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2947 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2953 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 2956 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2963 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2965 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2971 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2974 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2980 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 2983 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2990 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 2992 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 2999 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3001 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3008 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3010 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3017 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3019 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3026 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3028 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3035 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3037 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3044 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3046 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3053 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3055 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3062 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3064 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3070 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3073 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3079 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3082 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3088 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3091 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3097 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3100 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3106 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3109 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3115 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 3118 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3124 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3127 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3138 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3140 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3147 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3149 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3155 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3158 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3164 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3167 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3178 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3180 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3187 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3189 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3195 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3198 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3204 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 3207 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3214 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3216 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3222 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3225 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3231 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3234 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3240 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3243 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3249 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3252 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3258 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3261 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3267 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3270 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3276 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3279 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3285 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3288 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3294 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3297 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3303 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3306 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3312 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3315 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3321 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3324 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3330 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3333 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3339 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3342 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3348 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 3351 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3358 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3360 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3366 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3369 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3376 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3378 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3385 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3387 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3394 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3396 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3403 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3405 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3412 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3414 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3421 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3423 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3430 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3432 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3439 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3441 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3448 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3450 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3457 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3459 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3466 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3468 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3475 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3477 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3484 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3486 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3493 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3495 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3502 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3504 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3511 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3513 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3520 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3522 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3529 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3531 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3538 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3540 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3547 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3549 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3556 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3558 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3565 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3567 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3574 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3576 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3583 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3585 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3592 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3594 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3601 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3603 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3609 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3612 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3618 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 3621 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3627 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3630 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3636 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3639 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3646 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3648 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3654 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3657 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3663 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3666 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3673 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3675 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3682 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3684 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3690 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3693 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3700 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3702 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3708 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3711 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3718 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3720 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3726 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3729 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3736 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3738 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3745 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3747 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3758 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3760 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3766 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 3769 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3775 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3778 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3789 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3791 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3797 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3800 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3807 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3809 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3815 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 3818 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3824 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3827 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3838 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3840 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3847 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3849 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3856 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3858 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3864 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3867 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3873 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3876 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3883 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3885 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3892 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3894 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3901 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3903 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3909 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3912 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3918 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3921 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3927 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3930 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3936 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3939 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3945 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3948 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3954 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3957 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3963 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 3966 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3973 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3975 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3981 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, context 3984 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3990 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 3992 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 3998 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 4001 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 4007 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 4010 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 4017 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) context 4019 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 4025 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 4028 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 4034 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 4037 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 4043 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 4046 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 4052 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, context 4055 drivers/infiniband/hw/hfi1/chip.c struct hfi1_devdata *dd = (struct hfi1_devdata *)context; context 4075 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) \ context 4077 drivers/infiniband/hw/hfi1/chip.c struct hfi1_pportdata *ppd = (struct hfi1_pportdata *)context; \ context 4089 drivers/infiniband/hw/hfi1/chip.c void *context, int vl, int mode, u64 data) \ context 4091 drivers/infiniband/hw/hfi1/chip.c struct hfi1_pportdata *ppd = (struct hfi1_pportdata *)context; \ context 5904 drivers/infiniband/hw/hfi1/chip.c u32 context, context 5913 drivers/infiniband/hw/hfi1/chip.c reg = read_kctxt_csr(dd, context, eri->status); context 5916 drivers/infiniband/hw/hfi1/chip.c write_kctxt_csr(dd, context, eri->clear, reg); context 5918 drivers/infiniband/hw/hfi1/chip.c eri->handler(dd, context, reg); context 5929 drivers/infiniband/hw/hfi1/chip.c mask = read_kctxt_csr(dd, context, eri->mask); context 5931 drivers/infiniband/hw/hfi1/chip.c write_kctxt_csr(dd, context, eri->mask, mask); context 12222 drivers/infiniband/hw/hfi1/chip.c u64 *psval, void *context, int vl) context 12234 drivers/infiniband/hw/hfi1/chip.c val = entry->rw_cntr(entry, context, vl, CNTR_MODE_R, 0); context 12274 drivers/infiniband/hw/hfi1/chip.c u64 *psval, void *context, int vl, u64 data) context 12288 drivers/infiniband/hw/hfi1/chip.c val = entry->rw_cntr(entry, context, vl, CNTR_MODE_W, context 12292 drivers/infiniband/hw/hfi1/chip.c val = entry->rw_cntr(entry, context, vl, CNTR_MODE_W, context 12296 drivers/infiniband/hw/hfi1/chip.c val = entry->rw_cntr(entry, context, vl, CNTR_MODE_W, data); context 345 drivers/infiniband/hw/hfi1/init.c struct hfi1_ctxtdata **context) context 364 drivers/infiniband/hw/hfi1/init.c *context = NULL; context 488 drivers/infiniband/hw/hfi1/init.c *context = rcd; context 493 drivers/infiniband/hw/hfi1/init.c *context = NULL; context 444 drivers/infiniband/hw/hfi1/pio.c int ret, i, j, context; context 470 drivers/infiniband/hw/hfi1/pio.c context = 0; context 477 drivers/infiniband/hw/hfi1/pio.c &dd->send_contexts[context]; context 482 drivers/infiniband/hw/hfi1/pio.c context++; context 500 drivers/infiniband/hw/hfi1/pio.c u32 context; context 507 drivers/infiniband/hw/hfi1/pio.c context = chip_send_contexts(dd) - index - 1; context 508 drivers/infiniband/hw/hfi1/pio.c dd->hw_to_sw[context] = index; context 510 drivers/infiniband/hw/hfi1/pio.c *hw_context = context; context 537 drivers/infiniband/hw/hfi1/pio.c static inline u32 group_context(u32 context, u32 group) context 539 drivers/infiniband/hw/hfi1/pio.c return (context >> group) << group; context 1667 drivers/infiniband/hw/hfi1/tid_rdma.c void *context, int vl, int mode, u64 data) context 1669 drivers/infiniband/hw/hfi1/tid_rdma.c struct hfi1_devdata *dd = context; context 242 drivers/infiniband/hw/hfi1/tid_rdma.h void *context, int vl, int mode, u64 data); context 97 drivers/infiniband/hw/hns/hns_roce_cmd.c *context = &hr_dev->cmd.context[token & hr_dev->cmd.token_mask]; context 99 drivers/infiniband/hw/hns/hns_roce_cmd.c if (token != context->token) context 102 drivers/infiniband/hw/hns/hns_roce_cmd.c context->result = (status == HNS_ROCE_CMD_SUCCESS) ? 0 : (-EIO); context 103 drivers/infiniband/hw/hns/hns_roce_cmd.c context->out_param = out_param; context 104 drivers/infiniband/hw/hns/hns_roce_cmd.c complete(&context->done); context 114 drivers/infiniband/hw/hns/hns_roce_cmd.c struct hns_roce_cmd_context *context; context 120 drivers/infiniband/hw/hns/hns_roce_cmd.c context = &cmd->context[cmd->free_head]; context 121 drivers/infiniband/hw/hns/hns_roce_cmd.c context->token += cmd->token_mask + 1; context 122 drivers/infiniband/hw/hns/hns_roce_cmd.c cmd->free_head = context->next; context 125 drivers/infiniband/hw/hns/hns_roce_cmd.c init_completion(&context->done); context 129 drivers/infiniband/hw/hns/hns_roce_cmd.c context->token, 1); context 138 drivers/infiniband/hw/hns/hns_roce_cmd.c if (!wait_for_completion_timeout(&context->done, context 145 drivers/infiniband/hw/hns/hns_roce_cmd.c ret = context->result; context 153 drivers/infiniband/hw/hns/hns_roce_cmd.c context->next = cmd->free_head; context 154 drivers/infiniband/hw/hns/hns_roce_cmd.c cmd->free_head = context - cmd->context; context 234 drivers/infiniband/hw/hns/hns_roce_cmd.c hr_cmd->context = kmalloc_array(hr_cmd->max_cmds, context 235 drivers/infiniband/hw/hns/hns_roce_cmd.c sizeof(*hr_cmd->context), context 237 drivers/infiniband/hw/hns/hns_roce_cmd.c if (!hr_cmd->context) context 241 drivers/infiniband/hw/hns/hns_roce_cmd.c hr_cmd->context[i].token = i; context 242 drivers/infiniband/hw/hns/hns_roce_cmd.c hr_cmd->context[i].next = i + 1; context 245 drivers/infiniband/hw/hns/hns_roce_cmd.c hr_cmd->context[hr_cmd->max_cmds - 1].next = -1; context 261 drivers/infiniband/hw/hns/hns_roce_cmd.c kfree(hr_cmd->context); context 308 drivers/infiniband/hw/hns/hns_roce_cq.c struct hns_roce_ucontext *context = rdma_udata_to_drv_context( context 327 drivers/infiniband/hw/hns/hns_roce_cq.c ret = hns_roce_db_map_user(context, udata, ucmd.db_addr, context 388 drivers/infiniband/hw/hns/hns_roce_cq.c struct hns_roce_ucontext *context = rdma_udata_to_drv_context( context 393 drivers/infiniband/hw/hns/hns_roce_cq.c hns_roce_db_unmap_user(context, &hr_cq->db); context 11 drivers/infiniband/hw/hns/hns_roce_db.c int hns_roce_db_map_user(struct hns_roce_ucontext *context, context 20 drivers/infiniband/hw/hns/hns_roce_db.c mutex_lock(&context->page_mutex); context 22 drivers/infiniband/hw/hns/hns_roce_db.c list_for_each_entry(page, &context->page_list, list) context 41 drivers/infiniband/hw/hns/hns_roce_db.c list_add(&page->list, &context->page_list); context 51 drivers/infiniband/hw/hns/hns_roce_db.c mutex_unlock(&context->page_mutex); context 56 drivers/infiniband/hw/hns/hns_roce_db.c void hns_roce_db_unmap_user(struct hns_roce_ucontext *context, context 59 drivers/infiniband/hw/hns/hns_roce_db.c mutex_lock(&context->page_mutex); context 68 drivers/infiniband/hw/hns/hns_roce_db.c mutex_unlock(&context->page_mutex); context 614 drivers/infiniband/hw/hns/hns_roce_device.h struct hns_roce_cmd_context *context; context 1266 drivers/infiniband/hw/hns/hns_roce_device.h int hns_roce_db_map_user(struct hns_roce_ucontext *context, context 1269 drivers/infiniband/hw/hns/hns_roce_device.h void hns_roce_db_unmap_user(struct hns_roce_ucontext *context, context 2490 drivers/infiniband/hw/hns/hns_roce_hw_v1.c struct hns_roce_qp_context *context, context 2558 drivers/infiniband/hw/hns/hns_roce_hw_v1.c memcpy(mailbox->buf, context, sizeof(*context)); context 2574 drivers/infiniband/hw/hns/hns_roce_hw_v1.c struct hns_roce_sqp_context *context; context 2583 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context = kzalloc(sizeof(*context), GFP_KERNEL); context 2584 drivers/infiniband/hw/hns/hns_roce_hw_v1.c if (!context) context 2596 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_4, context 2600 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_4, context 2604 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_4, QP1C_BYTES_4_PD_M, context 2607 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->sq_rq_bt_l = cpu_to_le32((u32)(dma_handle)); context 2608 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_12, context 2613 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_16, QP1C_BYTES_16_RQ_HEAD_M, context 2615 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_16, QP1C_BYTES_16_PORT_NUM_M, context 2617 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qp1c_bytes_16, context 2620 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qp1c_bytes_16, QP1C_BYTES_16_RQ_BA_FLG_S, context 2622 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qp1c_bytes_16, QP1C_BYTES_16_SQ_BA_FLG_S, context 2624 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qp1c_bytes_16, QP1C_BYTES_16_QP1_ERR_S, context 2627 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_20, QP1C_BYTES_20_SQ_HEAD_M, context 2629 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_20, QP1C_BYTES_20_PKEY_IDX_M, context 2633 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->cur_rq_wqe_ba_l = context 2636 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_28, context 2640 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_28, context 2644 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_32, context 2648 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_32, context 2653 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->cur_sq_wqe_ba_l = cpu_to_le32((u32)mtts[0]); context 2655 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_40, context 2659 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qp1c_bytes_40, context 2666 drivers/infiniband/hw/hns/hns_roce_hw_v1.c hr_qp->phy_port * sizeof(*context)); context 2668 drivers/infiniband/hw/hns/hns_roce_hw_v1.c writel(le32_to_cpu(context->qp1c_bytes_4), addr); context 2669 drivers/infiniband/hw/hns/hns_roce_hw_v1.c writel(le32_to_cpu(context->sq_rq_bt_l), addr + 1); context 2670 drivers/infiniband/hw/hns/hns_roce_hw_v1.c writel(le32_to_cpu(context->qp1c_bytes_12), addr + 2); context 2671 drivers/infiniband/hw/hns/hns_roce_hw_v1.c writel(le32_to_cpu(context->qp1c_bytes_16), addr + 3); context 2672 drivers/infiniband/hw/hns/hns_roce_hw_v1.c writel(le32_to_cpu(context->qp1c_bytes_20), addr + 4); context 2673 drivers/infiniband/hw/hns/hns_roce_hw_v1.c writel(le32_to_cpu(context->cur_rq_wqe_ba_l), addr + 5); context 2674 drivers/infiniband/hw/hns/hns_roce_hw_v1.c writel(le32_to_cpu(context->qp1c_bytes_28), addr + 6); context 2675 drivers/infiniband/hw/hns/hns_roce_hw_v1.c writel(le32_to_cpu(context->qp1c_bytes_32), addr + 7); context 2676 drivers/infiniband/hw/hns/hns_roce_hw_v1.c writel(le32_to_cpu(context->cur_sq_wqe_ba_l), addr + 8); context 2677 drivers/infiniband/hw/hns/hns_roce_hw_v1.c writel(le32_to_cpu(context->qp1c_bytes_40), addr + 9); context 2682 drivers/infiniband/hw/hns/hns_roce_hw_v1.c hr_qp->phy_port * sizeof(*context)); context 2688 drivers/infiniband/hw/hns/hns_roce_hw_v1.c hr_qp->phy_port * sizeof(*context), reg_val); context 2704 drivers/infiniband/hw/hns/hns_roce_hw_v1.c kfree(context); context 2708 drivers/infiniband/hw/hns/hns_roce_hw_v1.c kfree(context); context 2719 drivers/infiniband/hw/hns/hns_roce_hw_v1.c struct hns_roce_qp_context *context; context 2733 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context = kzalloc(sizeof(*context), GFP_KERNEL); context 2734 drivers/infiniband/hw/hns/hns_roce_hw_v1.c if (!context) context 2760 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_4, context 2765 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_4, context 2767 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_4, context 2770 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_4, context 2774 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_4, context 2778 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_4, context 2780 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_4, context 2784 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_4, context 2788 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_4, context 2793 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_8, context 2797 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_8, context 2803 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_12, context 2808 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_12, context 2813 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_16, context 2818 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_4, context 2822 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_4, context 2825 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_4, context 2829 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_4, context 2834 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_4, context 2838 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_4, context 2844 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_4, context 2846 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_4, context 2850 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_4, context 2854 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_4, context 2859 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_8, context 2863 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_8, context 2869 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_12, context 2874 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_12, context 2879 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_12, context 2884 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_16, context 2898 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->sq_rq_bt_l = cpu_to_le32((u32)(dma_handle)); context 2899 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_24, context 2903 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_24, context 2906 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_24, context 2910 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->irrl_ba_l = cpu_to_le32((u32)(dma_handle_2)); context 2911 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_32, context 2916 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_32, context 2919 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_32, context 2922 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_32, context 2932 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_32, context 2935 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_32, context 2938 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_32, context 2944 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_36, context 2951 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_36, context 2958 drivers/infiniband/hw/hns/hns_roce_hw_v1.c memcpy(&(context->dmac_l), dmac, 4); context 2960 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_44, context 2964 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_44, context 2968 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_44, context 2973 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_48, context 2977 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_48, context 2981 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_48, context 2985 drivers/infiniband/hw/hns/hns_roce_hw_v1.c memcpy(context->dgid, grh->dgid.raw, context 2988 drivers/infiniband/hw/hns/hns_roce_hw_v1.c dev_dbg(dev, "dmac:%x :%lx\n", context->dmac_l, context 2989 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_get_field(context->qpc_bytes_44, context 2993 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_68, context 2997 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_68, context 3002 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->cur_rq_wqe_ba_l = context 3005 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_76, context 3009 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_76, context 3013 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->rx_rnr_time = 0; context 3015 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_84, context 3019 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_84, context 3023 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_88, context 3027 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_88, context 3029 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_88, context 3031 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_88, context 3035 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_88, context 3040 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->dma_length = 0; context 3041 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->r_key = 0; context 3042 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->va_l = 0; context 3043 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->va_h = 0; context 3045 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_108, context 3048 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_108, context 3050 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_108, context 3053 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_112, context 3056 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_112, context 3061 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_156, context 3065 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_156, context 3083 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->rx_cur_sq_wqe_ba_l = cpu_to_le32((u32)(mtts[0])); context 3085 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_120, context 3090 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_124, context 3093 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_124, context 3097 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_128, context 3101 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_128, context 3103 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_128, context 3107 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_128, context 3110 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_132, context 3113 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_132, context 3117 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_136, context 3121 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_136, context 3126 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_140, context 3130 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_140, context 3133 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_140, context 3136 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_148, context 3139 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_148, context 3143 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_148, context 3147 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_148, context 3151 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->rnr_retry = 0; context 3153 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_156, context 3160 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_156, context 3165 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_156, context 3170 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_156, context 3174 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_156, context 3178 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_156, context 3183 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_156, context 3187 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_156, context 3190 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->pkt_use_len = 0; context 3192 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_164, context 3195 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_164, context 3199 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_168, context 3203 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_168, context 3206 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_168, context 3209 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_168, context 3211 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_168, context 3213 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_168, context 3215 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->sge_use_len = 0; context 3217 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_176, context 3220 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_176, context 3224 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_180, context 3227 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_180, context 3231 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context->tx_cur_sq_wqe_ba_l = cpu_to_le32((u32)(mtts[0])); context 3233 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_188, context 3237 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_bit(context->qpc_bytes_188, context 3239 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_188, context 3256 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_set_field(context->qpc_bytes_144, context 3263 drivers/infiniband/hw/hns/hns_roce_hw_v1.c to_hns_roce_state(new_state), context, context 3317 drivers/infiniband/hw/hns/hns_roce_hw_v1.c kfree(context); context 3385 drivers/infiniband/hw/hns/hns_roce_hw_v1.c struct hns_roce_sqp_context context; context 3397 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context.qp1c_bytes_4 = cpu_to_le32(roce_read(hr_dev, addr)); context 3398 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context.sq_rq_bt_l = cpu_to_le32(roce_read(hr_dev, addr + 1)); context 3399 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context.qp1c_bytes_12 = cpu_to_le32(roce_read(hr_dev, addr + 2)); context 3400 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context.qp1c_bytes_16 = cpu_to_le32(roce_read(hr_dev, addr + 3)); context 3401 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context.qp1c_bytes_20 = cpu_to_le32(roce_read(hr_dev, addr + 4)); context 3402 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context.cur_rq_wqe_ba_l = cpu_to_le32(roce_read(hr_dev, addr + 5)); context 3403 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context.qp1c_bytes_28 = cpu_to_le32(roce_read(hr_dev, addr + 6)); context 3404 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context.qp1c_bytes_32 = cpu_to_le32(roce_read(hr_dev, addr + 7)); context 3405 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context.cur_sq_wqe_ba_l = cpu_to_le32(roce_read(hr_dev, addr + 8)); context 3406 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context.qp1c_bytes_40 = cpu_to_le32(roce_read(hr_dev, addr + 9)); context 3408 drivers/infiniband/hw/hns/hns_roce_hw_v1.c hr_qp->state = roce_get_field(context.qp1c_bytes_4, context 3421 drivers/infiniband/hw/hns/hns_roce_hw_v1.c qp_attr->pkey_index = roce_get_field(context.qp1c_bytes_20, context 3456 drivers/infiniband/hw/hns/hns_roce_hw_v1.c struct hns_roce_qp_context *context; context 3461 drivers/infiniband/hw/hns/hns_roce_hw_v1.c context = kzalloc(sizeof(*context), GFP_KERNEL); context 3462 drivers/infiniband/hw/hns/hns_roce_hw_v1.c if (!context) context 3475 drivers/infiniband/hw/hns/hns_roce_hw_v1.c ret = hns_roce_v1_query_qpc(hr_dev, hr_qp, context); context 3482 drivers/infiniband/hw/hns/hns_roce_hw_v1.c state = roce_get_field(context->qpc_bytes_144, context 3493 drivers/infiniband/hw/hns/hns_roce_hw_v1.c qp_attr->path_mtu = (enum ib_mtu)roce_get_field(context->qpc_bytes_48, context 3501 drivers/infiniband/hw/hns/hns_roce_hw_v1.c qp_attr->rq_psn = roce_get_field(context->qpc_bytes_88, context 3504 drivers/infiniband/hw/hns/hns_roce_hw_v1.c qp_attr->sq_psn = (u32)roce_get_field(context->qpc_bytes_164, context 3507 drivers/infiniband/hw/hns/hns_roce_hw_v1.c qp_attr->dest_qp_num = (u8)roce_get_field(context->qpc_bytes_36, context 3510 drivers/infiniband/hw/hns/hns_roce_hw_v1.c qp_attr->qp_access_flags = ((roce_get_bit(context->qpc_bytes_4, context 3512 drivers/infiniband/hw/hns/hns_roce_hw_v1.c ((roce_get_bit(context->qpc_bytes_4, context 3514 drivers/infiniband/hw/hns/hns_roce_hw_v1.c ((roce_get_bit(context->qpc_bytes_4, context 3523 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_get_field(context->qpc_bytes_156, context 3528 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_get_field(context->qpc_bytes_48, context 3532 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_get_field(context->qpc_bytes_36, context 3536 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_get_field(context->qpc_bytes_44, context 3540 drivers/infiniband/hw/hns/hns_roce_hw_v1.c roce_get_field(context->qpc_bytes_48, context 3544 drivers/infiniband/hw/hns/hns_roce_hw_v1.c memcpy(grh->dgid.raw, context->dgid, context 3548 drivers/infiniband/hw/hns/hns_roce_hw_v1.c qp_attr->pkey_index = roce_get_field(context->qpc_bytes_12, context 3553 drivers/infiniband/hw/hns/hns_roce_hw_v1.c qp_attr->max_rd_atomic = 1 << roce_get_field(context->qpc_bytes_156, context 3556 drivers/infiniband/hw/hns/hns_roce_hw_v1.c qp_attr->max_dest_rd_atomic = 1 << roce_get_field(context->qpc_bytes_32, context 3559 drivers/infiniband/hw/hns/hns_roce_hw_v1.c qp_attr->min_rnr_timer = (u8)(roce_get_field(context->qpc_bytes_24, context 3562 drivers/infiniband/hw/hns/hns_roce_hw_v1.c qp_attr->timeout = (u8)(roce_get_field(context->qpc_bytes_156, context 3565 drivers/infiniband/hw/hns/hns_roce_hw_v1.c qp_attr->retry_cnt = roce_get_field(context->qpc_bytes_148, context 3568 drivers/infiniband/hw/hns/hns_roce_hw_v1.c qp_attr->rnr_retry = (u8)le32_to_cpu(context->rnr_retry); context 3587 drivers/infiniband/hw/hns/hns_roce_hw_v1.c kfree(context); context 3168 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_v2_qp_context *context, context 3178 drivers/infiniband/hw/hns/hns_roce_hw_v2.c memcpy(mailbox->buf, context, sizeof(*context) * 2); context 3190 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_v2_qp_context *context, context 3206 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_76_srqn_op_en, V2_QPC_BYTE_76_RRE_S, context 3210 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_76_srqn_op_en, V2_QPC_BYTE_76_RWE_S, context 3214 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_76_srqn_op_en, V2_QPC_BYTE_76_ATE_S, context 3220 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_v2_qp_context *context, context 3224 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_4_sqpn_tst, context 3229 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_4_sqpn_tst, context 3239 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_20_smac_sgid_idx, context 3245 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_20_smac_sgid_idx, context 3259 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_v2_qp_context *context, context 3271 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_4_sqpn_tst, V2_QPC_BYTE_4_TST_M, context 3276 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_4_sqpn_tst, V2_QPC_BYTE_4_SQPN_M, context 3281 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_16_buf_ba_pg_sz, V2_QPC_BYTE_16_PD_M, context 3286 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_20_smac_sgid_idx, V2_QPC_BYTE_20_RQWS_M, context 3291 drivers/infiniband/hw/hns/hns_roce_hw_v2.c set_qpc_wqe_cnt(hr_qp, context, qpc_mask); context 3294 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_24_mtu_tc, V2_QPC_BYTE_24_VLAN_ID_M, context 3323 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_68_rq_db, context 3329 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_68_rq_db, context 3336 drivers/infiniband/hw/hns/hns_roce_hw_v2.c context->rq_db_record_addr = cpu_to_le32(hr_qp->rdb.dma >> 32); context 3339 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_76_srqn_op_en, V2_QPC_BYTE_76_RQIE_S, context 3343 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_80_rnr_rx_cqn, V2_QPC_BYTE_80_RX_CQN_M, context 3348 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_76_srqn_op_en, context 3353 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_76_srqn_op_en, context 3443 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_172_sq_psn, V2_QPC_BYTE_172_ACK_REQ_FREQ_M, context 3452 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_172_sq_psn, V2_QPC_BYTE_172_FRE_S, 1); context 3533 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_252_err_txcqn, V2_QPC_BYTE_252_TX_CQN_M, context 3551 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_v2_qp_context *context, context 3562 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_4_sqpn_tst, V2_QPC_BYTE_4_TST_M, context 3568 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_76_srqn_op_en, V2_QPC_BYTE_76_RRE_S, context 3573 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_76_srqn_op_en, V2_QPC_BYTE_76_RWE_S, context 3579 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_76_srqn_op_en, V2_QPC_BYTE_76_ATE_S, context 3585 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_76_srqn_op_en, V2_QPC_BYTE_76_RRE_S, context 3590 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_76_srqn_op_en, V2_QPC_BYTE_76_RWE_S, context 3595 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_76_srqn_op_en, V2_QPC_BYTE_76_ATE_S, context 3601 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_16_buf_ba_pg_sz, V2_QPC_BYTE_16_PD_M, context 3606 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_80_rnr_rx_cqn, V2_QPC_BYTE_80_RX_CQN_M, context 3611 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_252_err_txcqn, V2_QPC_BYTE_252_TX_CQN_M, context 3617 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_76_srqn_op_en, context 3621 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_76_srqn_op_en, context 3628 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_4_sqpn_tst, V2_QPC_BYTE_4_SQPN_M, context 3634 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_56_dqpn_err, V2_QPC_BYTE_56_DQPN_M, context 3668 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_v2_qp_context *context, context 3719 drivers/infiniband/hw/hns/hns_roce_hw_v2.c context->wqe_sge_ba = cpu_to_le32(wqe_sge_ba >> 3); context 3728 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_12_sq_hop, V2_QPC_BYTE_12_WQE_SGE_BA_M, context 3733 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_12_sq_hop, V2_QPC_BYTE_12_SQ_HOP_NUM_M, context 3740 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_20_smac_sgid_idx, context 3750 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_20_smac_sgid_idx, context 3759 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_16_buf_ba_pg_sz, context 3767 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_16_buf_ba_pg_sz, context 3775 drivers/infiniband/hw/hns/hns_roce_hw_v2.c context->rq_cur_blk_addr = cpu_to_le32(mtts[0] >> PAGE_ADDR_SHIFT); context 3778 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_92_srq_info, context 3786 drivers/infiniband/hw/hns/hns_roce_hw_v2.c context->rq_nxt_blk_addr = cpu_to_le32(mtts[1] >> PAGE_ADDR_SHIFT); context 3789 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_104_rq_sge, context 3797 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_132_trrl, V2_QPC_BYTE_132_TRRL_BA_M, context 3801 drivers/infiniband/hw/hns/hns_roce_hw_v2.c context->trrl_ba = cpu_to_le32(dma_handle_3 >> (16 + 4)); context 3803 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_140_raq, V2_QPC_BYTE_140_TRRL_BA_M, context 3809 drivers/infiniband/hw/hns/hns_roce_hw_v2.c context->irrl_ba = cpu_to_le32(dma_handle_2 >> 6); context 3811 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_208_irrl, V2_QPC_BYTE_208_IRRL_BA_M, context 3817 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_208_irrl, V2_QPC_BYTE_208_RMT_E2E_S, 1); context 3820 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_252_err_txcqn, V2_QPC_BYTE_252_SIG_TYPE_S, context 3831 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_28_at_fl, V2_QPC_BYTE_28_LBI_S, 1); context 3836 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_56_dqpn_err, V2_QPC_BYTE_56_DQPN_M, context 3844 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_20_smac_sgid_idx, context 3852 drivers/infiniband/hw/hns/hns_roce_hw_v2.c memcpy(&(context->dmac), dmac, sizeof(u32)); context 3853 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_52_udpspn_dmac, V2_QPC_BYTE_52_DMAC_M, context 3860 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_56_dqpn_err, V2_QPC_BYTE_56_LP_PKTN_INI_M, context 3866 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_24_mtu_tc, V2_QPC_BYTE_24_MTU_M, context 3869 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_24_mtu_tc, V2_QPC_BYTE_24_MTU_M, context 3875 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_84_rq_ci_pi, context 3893 drivers/infiniband/hw/hns/hns_roce_hw_v2.c context->rq_rnr_timer = 0; context 3902 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_168_irrl_idx, context 3914 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_v2_qp_context *context, context 3957 drivers/infiniband/hw/hns/hns_roce_hw_v2.c context->sq_cur_blk_addr = cpu_to_le32(sq_cur_blk >> PAGE_ADDR_SHIFT); context 3958 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_168_irrl_idx, context 3967 drivers/infiniband/hw/hns/hns_roce_hw_v2.c context->sq_cur_sge_blk_addr = ((ibqp->qp_type == IB_QPT_GSI) || context 3971 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_184_irrl_idx, context 3983 drivers/infiniband/hw/hns/hns_roce_hw_v2.c context->rx_sq_cur_blk_addr = context 3985 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_232_irrl_sge, context 4030 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_212_lsn, V2_QPC_BYTE_212_LSN_M, context 4059 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_v2_qp_context *context, context 4090 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_76_srqn_op_en, context 4094 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_168_irrl_idx, context 4100 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_24_mtu_tc, V2_QPC_BYTE_24_VLAN_ID_M, context 4116 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_52_udpspn_dmac, V2_QPC_BYTE_52_UDPSPN_M, context 4123 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_20_smac_sgid_idx, context 4130 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_24_mtu_tc, V2_QPC_BYTE_24_HOP_LIMIT_M, context 4136 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_24_mtu_tc, V2_QPC_BYTE_24_TC_M, context 4139 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_24_mtu_tc, V2_QPC_BYTE_24_TC_M, context 4143 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_28_at_fl, V2_QPC_BYTE_28_FL_M, context 4147 drivers/infiniband/hw/hns/hns_roce_hw_v2.c memcpy(context->dgid, grh->dgid.raw, sizeof(grh->dgid.raw)); context 4149 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_28_at_fl, V2_QPC_BYTE_28_SL_M, context 4163 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_v2_qp_context *context, context 4171 drivers/infiniband/hw/hns/hns_roce_hw_v2.c modify_qp_reset_to_init(ibqp, attr, attr_mask, context, context 4174 drivers/infiniband/hw/hns/hns_roce_hw_v2.c modify_qp_init_to_init(ibqp, attr, attr_mask, context, context 4177 drivers/infiniband/hw/hns/hns_roce_hw_v2.c ret = modify_qp_init_to_rtr(ibqp, attr, attr_mask, context, context 4182 drivers/infiniband/hw/hns/hns_roce_hw_v2.c ret = modify_qp_rtr_to_rts(ibqp, attr, attr_mask, context, context 4202 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_v2_qp_context *context, context 4210 drivers/infiniband/hw/hns/hns_roce_hw_v2.c ret = hns_roce_v2_set_path(ibqp, attr, attr_mask, context, context 4218 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_28_at_fl, context 4231 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_212_lsn, context 4239 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_212_lsn, context 4249 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_244_rnr_rxack, context 4256 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_244_rnr_rxack, context 4266 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_172_sq_psn, context 4273 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_196_sq_psn, context 4280 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_220_retry_psn_msn, context 4287 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_224_retry_msg, context 4295 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_224_retry_msg, context 4303 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_244_rnr_rxack, context 4313 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_140_raq, V2_QPC_BYTE_140_RR_MAX_M, context 4321 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_208_irrl, V2_QPC_BYTE_208_SR_MAX_M, context 4330 drivers/infiniband/hw/hns/hns_roce_hw_v2.c set_access_flags(hr_qp, context, qpc_mask, attr, attr_mask); context 4333 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_80_rnr_rx_cqn, context 4344 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_108_rx_reqepsn, context 4351 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_152_raq, V2_QPC_BYTE_152_RAQ_PSN_M, context 4359 drivers/infiniband/hw/hns/hns_roce_hw_v2.c context->qkey_xrcd = cpu_to_le32(attr->qkey); context 4393 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_v2_qp_context *context = ctx; context 4404 drivers/infiniband/hw/hns/hns_roce_hw_v2.c memset(context, 0, sizeof(*context)); context 4407 drivers/infiniband/hw/hns/hns_roce_hw_v2.c new_state, context, qpc_mask); context 4413 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_160_sq_ci_pi, context 4422 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_84_rq_ci_pi, context 4433 drivers/infiniband/hw/hns/hns_roce_hw_v2.c ret = hns_roce_v2_set_opt_fields(ibqp, attr, attr_mask, context, context 4438 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_bit(context->byte_108_rx_reqepsn, V2_QPC_BYTE_108_INV_CREDIT_S, context 4444 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_set_field(context->byte_60_qpst_tempid, V2_QPC_BYTE_60_QP_ST_M, context 4527 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_v2_qp_context context = {}; context 4544 drivers/infiniband/hw/hns/hns_roce_hw_v2.c ret = hns_roce_v2_query_qpc(hr_dev, hr_qp, &context); context 4551 drivers/infiniband/hw/hns/hns_roce_hw_v2.c state = roce_get_field(context.byte_60_qpst_tempid, context 4561 drivers/infiniband/hw/hns/hns_roce_hw_v2.c qp_attr->path_mtu = (enum ib_mtu)roce_get_field(context.byte_24_mtu_tc, context 4569 drivers/infiniband/hw/hns/hns_roce_hw_v2.c qp_attr->rq_psn = roce_get_field(context.byte_108_rx_reqepsn, context 4572 drivers/infiniband/hw/hns/hns_roce_hw_v2.c qp_attr->sq_psn = (u32)roce_get_field(context.byte_172_sq_psn, context 4575 drivers/infiniband/hw/hns/hns_roce_hw_v2.c qp_attr->dest_qp_num = (u8)roce_get_field(context.byte_56_dqpn_err, context 4578 drivers/infiniband/hw/hns/hns_roce_hw_v2.c qp_attr->qp_access_flags = ((roce_get_bit(context.byte_76_srqn_op_en, context 4580 drivers/infiniband/hw/hns/hns_roce_hw_v2.c ((roce_get_bit(context.byte_76_srqn_op_en, context 4582 drivers/infiniband/hw/hns/hns_roce_hw_v2.c ((roce_get_bit(context.byte_76_srqn_op_en, context 4591 drivers/infiniband/hw/hns/hns_roce_hw_v2.c roce_get_field(context.byte_28_at_fl, context 4594 drivers/infiniband/hw/hns/hns_roce_hw_v2.c grh->flow_label = roce_get_field(context.byte_28_at_fl, context 4597 drivers/infiniband/hw/hns/hns_roce_hw_v2.c grh->sgid_index = roce_get_field(context.byte_20_smac_sgid_idx, context 4600 drivers/infiniband/hw/hns/hns_roce_hw_v2.c grh->hop_limit = roce_get_field(context.byte_24_mtu_tc, context 4603 drivers/infiniband/hw/hns/hns_roce_hw_v2.c grh->traffic_class = roce_get_field(context.byte_24_mtu_tc, context 4607 drivers/infiniband/hw/hns/hns_roce_hw_v2.c memcpy(grh->dgid.raw, context.dgid, sizeof(grh->dgid.raw)); context 4612 drivers/infiniband/hw/hns/hns_roce_hw_v2.c qp_attr->max_rd_atomic = 1 << roce_get_field(context.byte_208_irrl, context 4615 drivers/infiniband/hw/hns/hns_roce_hw_v2.c qp_attr->max_dest_rd_atomic = 1 << roce_get_field(context.byte_140_raq, context 4618 drivers/infiniband/hw/hns/hns_roce_hw_v2.c qp_attr->min_rnr_timer = (u8)roce_get_field(context.byte_80_rnr_rx_cqn, context 4621 drivers/infiniband/hw/hns/hns_roce_hw_v2.c qp_attr->timeout = (u8)roce_get_field(context.byte_28_at_fl, context 4624 drivers/infiniband/hw/hns/hns_roce_hw_v2.c qp_attr->retry_cnt = roce_get_field(context.byte_212_lsn, context 4627 drivers/infiniband/hw/hns/hns_roce_hw_v2.c qp_attr->rnr_retry = le32_to_cpu(context.rq_rnr_timer); context 4692 drivers/infiniband/hw/hns/hns_roce_hw_v2.c struct hns_roce_ucontext *context = context 4699 drivers/infiniband/hw/hns/hns_roce_hw_v2.c hns_roce_db_unmap_user(context, &hr_qp->sdb); context 4702 drivers/infiniband/hw/hns/hns_roce_hw_v2.c hns_roce_db_unmap_user(context, &hr_qp->rdb); context 76 drivers/infiniband/hw/hns/hns_roce_main.c static int hns_roce_add_gid(const struct ib_gid_attr *attr, void **context) context 90 drivers/infiniband/hw/hns/hns_roce_main.c static int hns_roce_del_gid(const struct ib_gid_attr *attr, void **context) context 314 drivers/infiniband/hw/hns/hns_roce_main.c struct hns_roce_ucontext *context = to_hr_ucontext(uctx); context 323 drivers/infiniband/hw/hns/hns_roce_main.c ret = hns_roce_uar_alloc(hr_dev, &context->uar); context 328 drivers/infiniband/hw/hns/hns_roce_main.c INIT_LIST_HEAD(&context->page_list); context 329 drivers/infiniband/hw/hns/hns_roce_main.c mutex_init(&context->page_mutex); context 339 drivers/infiniband/hw/hns/hns_roce_main.c hns_roce_uar_free(hr_dev, &context->uar); context 347 drivers/infiniband/hw/hns/hns_roce_main.c struct hns_roce_ucontext *context = to_hr_ucontext(ibcontext); context 349 drivers/infiniband/hw/hns/hns_roce_main.c hns_roce_uar_free(to_hr_dev(ibcontext->device), &context->uar); context 352 drivers/infiniband/hw/hns/hns_roce_main.c static int hns_roce_mmap(struct ib_ucontext *context, context 355 drivers/infiniband/hw/hns/hns_roce_main.c struct hns_roce_dev *hr_dev = to_hr_dev(context->device); context 359 drivers/infiniband/hw/hns/hns_roce_main.c return rdma_user_mmap_io(context, vma, context 360 drivers/infiniband/hw/hns/hns_roce_main.c to_hr_ucontext(context)->uar.pfn, context 372 drivers/infiniband/hw/hns/hns_roce_main.c return rdma_user_mmap_io(context, vma, context 13 drivers/infiniband/hw/hns/hns_roce_restrack.c struct hns_roce_v2_cq_context *context) context 16 drivers/infiniband/hw/hns/hns_roce_restrack.c roce_get_field(context->byte_4_pg_ceqn, context 22 drivers/infiniband/hw/hns/hns_roce_restrack.c roce_get_field(context->byte_4_pg_ceqn, context 28 drivers/infiniband/hw/hns/hns_roce_restrack.c roce_get_field(context->byte_8_cqn, context 34 drivers/infiniband/hw/hns/hns_roce_restrack.c roce_get_field(context->byte_16_hop_addr, context 41 drivers/infiniband/hw/hns/hns_roce_restrack.c roce_get_field(context->byte_28_cq_pi, context 48 drivers/infiniband/hw/hns/hns_roce_restrack.c roce_get_field(context->byte_32_cq_ci, context 55 drivers/infiniband/hw/hns/hns_roce_restrack.c roce_get_field(context->byte_56_cqe_period_maxcnt, context 62 drivers/infiniband/hw/hns/hns_roce_restrack.c roce_get_field(context->byte_56_cqe_period_maxcnt, context 68 drivers/infiniband/hw/hns/hns_roce_restrack.c roce_get_field(context->byte_52_cqe_cnt, context 85 drivers/infiniband/hw/hns/hns_roce_restrack.c struct hns_roce_v2_cq_context *context; context 92 drivers/infiniband/hw/hns/hns_roce_restrack.c context = kzalloc(sizeof(struct hns_roce_v2_cq_context), GFP_KERNEL); context 93 drivers/infiniband/hw/hns/hns_roce_restrack.c if (!context) context 96 drivers/infiniband/hw/hns/hns_roce_restrack.c ret = hr_dev->dfx->query_cqc_info(hr_dev, hr_cq->cqn, (int *)context); context 106 drivers/infiniband/hw/hns/hns_roce_restrack.c if (hns_roce_fill_cq(msg, context)) { context 112 drivers/infiniband/hw/hns/hns_roce_restrack.c kfree(context); context 119 drivers/infiniband/hw/hns/hns_roce_restrack.c kfree(context); context 169 drivers/infiniband/hw/i40iw/i40iw_cm.h u32 context; context 159 drivers/infiniband/hw/i40iw/i40iw_verbs.c static void i40iw_dealloc_ucontext(struct ib_ucontext *context) context 169 drivers/infiniband/hw/i40iw/i40iw_verbs.c static int i40iw_mmap(struct ib_ucontext *context, struct vm_area_struct *vma) context 175 drivers/infiniband/hw/i40iw/i40iw_verbs.c ucontext = to_ucontext(context); context 290 drivers/infiniband/hw/mlx4/alias_GUID.c void *context) context 293 drivers/infiniband/hw/mlx4/alias_GUID.c struct mlx4_alias_guid_work_context *cb_ctx = context; context 302 drivers/infiniband/hw/mlx4/alias_GUID.c if (!context) context 186 drivers/infiniband/hw/mlx4/cq.c struct mlx4_ib_ucontext *context = rdma_udata_to_drv_context( context 223 drivers/infiniband/hw/mlx4/cq.c uar = &context->uar; context 275 drivers/infiniband/hw/mlx4/cq.c mlx4_ib_db_unmap_user(context, &cq->db); context 50 drivers/infiniband/hw/mlx4/doorbell.c struct mlx4_ib_ucontext *context = rdma_udata_to_drv_context( context 53 drivers/infiniband/hw/mlx4/doorbell.c mutex_lock(&context->db_page_mutex); context 55 drivers/infiniband/hw/mlx4/doorbell.c list_for_each_entry(page, &context->db_page_list, list) context 74 drivers/infiniband/hw/mlx4/doorbell.c list_add(&page->list, &context->db_page_list); context 82 drivers/infiniband/hw/mlx4/doorbell.c mutex_unlock(&context->db_page_mutex); context 87 drivers/infiniband/hw/mlx4/doorbell.c void mlx4_ib_db_unmap_user(struct mlx4_ib_ucontext *context, struct mlx4_db *db) context 89 drivers/infiniband/hw/mlx4/doorbell.c mutex_lock(&context->db_page_mutex); context 97 drivers/infiniband/hw/mlx4/doorbell.c mutex_unlock(&context->db_page_mutex); context 1026 drivers/infiniband/hw/mlx4/mad.c if (mad_send_wc->send_buf->context[0]) context 1027 drivers/infiniband/hw/mlx4/mad.c rdma_destroy_ah(mad_send_wc->send_buf->context[0], 0); context 256 drivers/infiniband/hw/mlx4/main.c static int mlx4_ib_add_gid(const struct ib_gid_attr *attr, void **context) context 273 drivers/infiniband/hw/mlx4/main.c if (!context) context 297 drivers/infiniband/hw/mlx4/main.c *context = port_gid_table->gids[free].ctx; context 308 drivers/infiniband/hw/mlx4/main.c *context = ctx; context 316 drivers/infiniband/hw/mlx4/main.c *context = NULL; context 331 drivers/infiniband/hw/mlx4/main.c *context = NULL; context 341 drivers/infiniband/hw/mlx4/main.c static int mlx4_ib_del_gid(const struct ib_gid_attr *attr, void **context) context 343 drivers/infiniband/hw/mlx4/main.c struct gid_cache_context *ctx = *context; context 1097 drivers/infiniband/hw/mlx4/main.c struct mlx4_ib_ucontext *context = to_mucontext(uctx); context 1118 drivers/infiniband/hw/mlx4/main.c err = mlx4_uar_alloc(to_mdev(ibdev)->dev, &context->uar); context 1122 drivers/infiniband/hw/mlx4/main.c INIT_LIST_HEAD(&context->db_page_list); context 1123 drivers/infiniband/hw/mlx4/main.c mutex_init(&context->db_page_mutex); context 1125 drivers/infiniband/hw/mlx4/main.c INIT_LIST_HEAD(&context->wqn_ranges_list); context 1126 drivers/infiniband/hw/mlx4/main.c mutex_init(&context->wqn_ranges_mutex); context 1134 drivers/infiniband/hw/mlx4/main.c mlx4_uar_free(to_mdev(ibdev)->dev, &context->uar); context 1143 drivers/infiniband/hw/mlx4/main.c struct mlx4_ib_ucontext *context = to_mucontext(ibcontext); context 1145 drivers/infiniband/hw/mlx4/main.c mlx4_uar_free(to_mdev(ibcontext->device)->dev, &context->uar); context 1152 drivers/infiniband/hw/mlx4/main.c static int mlx4_ib_mmap(struct ib_ucontext *context, struct vm_area_struct *vma) context 1154 drivers/infiniband/hw/mlx4/main.c struct mlx4_ib_dev *dev = to_mdev(context->device); context 1158 drivers/infiniband/hw/mlx4/main.c return rdma_user_mmap_io(context, vma, context 1159 drivers/infiniband/hw/mlx4/main.c to_mucontext(context)->uar.pfn, context 1167 drivers/infiniband/hw/mlx4/main.c context, vma, context 1168 drivers/infiniband/hw/mlx4/main.c to_mucontext(context)->uar.pfn + context 1181 drivers/infiniband/hw/mlx4/main.c context, vma, context 728 drivers/infiniband/hw/mlx4/mlx4_ib.h void mlx4_ib_db_unmap_user(struct mlx4_ib_ucontext *context, struct mlx4_db *db); context 780 drivers/infiniband/hw/mlx4/qp.c static int mlx4_ib_alloc_wqn(struct mlx4_ib_ucontext *context, context 783 drivers/infiniband/hw/mlx4/qp.c struct mlx4_ib_dev *dev = to_mdev(context->ibucontext.device); context 787 drivers/infiniband/hw/mlx4/qp.c mutex_lock(&context->wqn_ranges_mutex); context 789 drivers/infiniband/hw/mlx4/qp.c range = list_first_entry_or_null(&context->wqn_ranges_list, context 808 drivers/infiniband/hw/mlx4/qp.c list_add(&range->list, &context->wqn_ranges_list); context 825 drivers/infiniband/hw/mlx4/qp.c mutex_unlock(&context->wqn_ranges_mutex); context 830 drivers/infiniband/hw/mlx4/qp.c static void mlx4_ib_release_wqn(struct mlx4_ib_ucontext *context, context 833 drivers/infiniband/hw/mlx4/qp.c struct mlx4_ib_dev *dev = to_mdev(context->ibucontext.device); context 836 drivers/infiniband/hw/mlx4/qp.c mutex_lock(&context->wqn_ranges_mutex); context 855 drivers/infiniband/hw/mlx4/qp.c mutex_unlock(&context->wqn_ranges_mutex); context 864 drivers/infiniband/hw/mlx4/qp.c struct mlx4_ib_ucontext *context = rdma_udata_to_drv_context( context 941 drivers/infiniband/hw/mlx4/qp.c err = mlx4_ib_alloc_wqn(context, qp, range_size, &qpn); context 978 drivers/infiniband/hw/mlx4/qp.c mlx4_ib_release_wqn(context, qp, 0); context 980 drivers/infiniband/hw/mlx4/qp.c mlx4_ib_db_unmap_user(context, &qp->db); context 999 drivers/infiniband/hw/mlx4/qp.c struct mlx4_ib_ucontext *context = rdma_udata_to_drv_context( context 1277 drivers/infiniband/hw/mlx4/qp.c mlx4_ib_db_unmap_user(context, &qp->db); context 1951 drivers/infiniband/hw/mlx4/qp.c struct mlx4_qp_context *context) context 1958 drivers/infiniband/hw/mlx4/qp.c context->pri_path.sched_queue = MLX4_IB_DEFAULT_SCHED_QUEUE | ((qp->port - 1) << 6); context 1965 drivers/infiniband/hw/mlx4/qp.c context->pri_path.grh_mylmc = 0x80 | (u8) smac_index; context 2110 drivers/infiniband/hw/mlx4/qp.c static void fill_qp_rss_context(struct mlx4_qp_context *context, context 2115 drivers/infiniband/hw/mlx4/qp.c rss_context = (void *)context + offsetof(struct mlx4_qp_context, context 2147 drivers/infiniband/hw/mlx4/qp.c struct mlx4_qp_context *context; context 2182 drivers/infiniband/hw/mlx4/qp.c context = kzalloc(sizeof *context, GFP_KERNEL); context 2183 drivers/infiniband/hw/mlx4/qp.c if (!context) context 2186 drivers/infiniband/hw/mlx4/qp.c context->flags = cpu_to_be32((to_mlx4_state(new_state) << 28) | context 2190 drivers/infiniband/hw/mlx4/qp.c context->flags |= cpu_to_be32(MLX4_QP_PM_MIGRATED << 11); context 2195 drivers/infiniband/hw/mlx4/qp.c context->flags |= cpu_to_be32(MLX4_QP_PM_MIGRATED << 11); context 2198 drivers/infiniband/hw/mlx4/qp.c context->flags |= cpu_to_be32(MLX4_QP_PM_REARM << 11); context 2201 drivers/infiniband/hw/mlx4/qp.c context->flags |= cpu_to_be32(MLX4_QP_PM_ARMED << 11); context 2207 drivers/infiniband/hw/mlx4/qp.c context->param3 |= cpu_to_be32(1 << 25); context 2210 drivers/infiniband/hw/mlx4/qp.c context->param3 |= cpu_to_be32(1 << 29); context 2213 drivers/infiniband/hw/mlx4/qp.c context->mtu_msgmax = (IB_MTU_4096 << 5) | 11; context 2215 drivers/infiniband/hw/mlx4/qp.c context->mtu_msgmax = (MLX4_RAW_QP_MTU << 5) | MLX4_RAW_QP_MSGMAX; context 2218 drivers/infiniband/hw/mlx4/qp.c context->mtu_msgmax = (IB_MTU_4096 << 5) | context 2221 drivers/infiniband/hw/mlx4/qp.c context->mtu_msgmax = (IB_MTU_4096 << 5) | 13; context 2228 drivers/infiniband/hw/mlx4/qp.c context->mtu_msgmax = (attr->path_mtu << 5) | context 2234 drivers/infiniband/hw/mlx4/qp.c context->rq_size_stride = ilog2(qp->rq.wqe_cnt) << 3; context 2235 drivers/infiniband/hw/mlx4/qp.c context->rq_size_stride |= qp->rq.wqe_shift - 4; context 2239 drivers/infiniband/hw/mlx4/qp.c context->sq_size_stride = ilog2(qp->sq.wqe_cnt) << 3; context 2240 drivers/infiniband/hw/mlx4/qp.c context->sq_size_stride |= qp->sq.wqe_shift - 4; context 2246 drivers/infiniband/hw/mlx4/qp.c context->sq_size_stride |= !!qp->sq_no_prefetch << 7; context 2247 drivers/infiniband/hw/mlx4/qp.c context->xrcd = cpu_to_be32((u32) qp->xrcdn); context 2249 drivers/infiniband/hw/mlx4/qp.c context->param3 |= cpu_to_be32(1 << 30); context 2253 drivers/infiniband/hw/mlx4/qp.c context->usr_page = cpu_to_be32( context 2256 drivers/infiniband/hw/mlx4/qp.c context->usr_page = cpu_to_be32( context 2260 drivers/infiniband/hw/mlx4/qp.c context->remote_qpn = cpu_to_be32(attr->dest_qp_num); context 2265 drivers/infiniband/hw/mlx4/qp.c mlx4_set_sched(&context->pri_path, attr->port_num); context 2281 drivers/infiniband/hw/mlx4/qp.c context->pri_path.counter_index = counter_index; context 2284 drivers/infiniband/hw/mlx4/qp.c context->pri_path.fl |= context 2286 drivers/infiniband/hw/mlx4/qp.c context->pri_path.vlan_control |= context 2290 drivers/infiniband/hw/mlx4/qp.c context->pri_path.counter_index = context 2303 drivers/infiniband/hw/mlx4/qp.c context->rlkey_roce_mode |= (qpc_roce_mode << 6); context 2309 drivers/infiniband/hw/mlx4/qp.c context->pri_path.disable_pkey_check = 0x40; context 2310 drivers/infiniband/hw/mlx4/qp.c context->pri_path.pkey_index = attr->pkey_index; context 2331 drivers/infiniband/hw/mlx4/qp.c if (mlx4_set_path(dev, attr, attr_mask, qp, &context->pri_path, context 2346 drivers/infiniband/hw/mlx4/qp.c context->rlkey_roce_mode |= (qpc_roce_mode << 6); context 2352 drivers/infiniband/hw/mlx4/qp.c context->pri_path.ackto |= attr->timeout << 3; context 2366 drivers/infiniband/hw/mlx4/qp.c &context->alt_path, context 2370 drivers/infiniband/hw/mlx4/qp.c context->alt_path.pkey_index = attr->alt_pkey_index; context 2371 drivers/infiniband/hw/mlx4/qp.c context->alt_path.ackto = attr->alt_timeout << 3; context 2375 drivers/infiniband/hw/mlx4/qp.c context->pd = cpu_to_be32(pd->pdn); context 2378 drivers/infiniband/hw/mlx4/qp.c context->params1 = cpu_to_be32(MLX4_IB_ACK_REQ_FREQ << 28); context 2384 drivers/infiniband/hw/mlx4/qp.c context->cqn_send = cpu_to_be32(send_cq->mcq.cqn); context 2385 drivers/infiniband/hw/mlx4/qp.c context->cqn_recv = cpu_to_be32(recv_cq->mcq.cqn); context 2389 drivers/infiniband/hw/mlx4/qp.c context->params1 |= cpu_to_be32(1 << 11); context 2392 drivers/infiniband/hw/mlx4/qp.c context->params1 |= cpu_to_be32(attr->rnr_retry << 13); context 2397 drivers/infiniband/hw/mlx4/qp.c context->params1 |= cpu_to_be32(attr->retry_cnt << 16); context 2403 drivers/infiniband/hw/mlx4/qp.c context->params1 |= context 2409 drivers/infiniband/hw/mlx4/qp.c context->next_send_psn = cpu_to_be32(attr->sq_psn); context 2413 drivers/infiniband/hw/mlx4/qp.c context->params2 |= context 2419 drivers/infiniband/hw/mlx4/qp.c context->params2 |= to_mlx4_access_flags(qp, attr, attr_mask); context 2424 drivers/infiniband/hw/mlx4/qp.c context->params2 |= cpu_to_be32(MLX4_QP_BIT_RIC); context 2427 drivers/infiniband/hw/mlx4/qp.c context->rnr_nextrecvpsn |= cpu_to_be32(attr->min_rnr_timer << 24); context 2431 drivers/infiniband/hw/mlx4/qp.c context->rnr_nextrecvpsn |= cpu_to_be32(attr->rq_psn); context 2437 drivers/infiniband/hw/mlx4/qp.c context->qkey = cpu_to_be32(IB_QP_SET_QKEY); context 2449 drivers/infiniband/hw/mlx4/qp.c context->qkey = cpu_to_be32(attr->qkey); context 2455 drivers/infiniband/hw/mlx4/qp.c context->srqn = cpu_to_be32(1 << 24 | context 2461 drivers/infiniband/hw/mlx4/qp.c context->db_rec_addr = cpu_to_be64(qp->db.dma); context 2467 drivers/infiniband/hw/mlx4/qp.c context->pri_path.sched_queue = (qp->port - 1) << 6; context 2471 drivers/infiniband/hw/mlx4/qp.c context->pri_path.sched_queue |= MLX4_IB_DEFAULT_QP0_SCHED_QUEUE; context 2473 drivers/infiniband/hw/mlx4/qp.c context->pri_path.fl = 0x80; context 2476 drivers/infiniband/hw/mlx4/qp.c context->pri_path.fl = 0x80; context 2477 drivers/infiniband/hw/mlx4/qp.c context->pri_path.sched_queue |= MLX4_IB_DEFAULT_SCHED_QUEUE; context 2483 drivers/infiniband/hw/mlx4/qp.c context->pri_path.feup = 1 << 7; /* don't fsm */ context 2488 drivers/infiniband/hw/mlx4/qp.c err = handle_eth_ud_smac_index(dev, qp, context); context 2500 drivers/infiniband/hw/mlx4/qp.c context->pri_path.ackto = (context->pri_path.ackto & 0xf8) | context 2505 drivers/infiniband/hw/mlx4/qp.c context->srqn = cpu_to_be32(7 << 28); context 2514 drivers/infiniband/hw/mlx4/qp.c context->pri_path.ackto = MLX4_IB_LINK_TYPE_ETH; context 2528 drivers/infiniband/hw/mlx4/qp.c context->rlkey_roce_mode |= (1 << 4); context 2554 drivers/infiniband/hw/mlx4/qp.c fill_qp_rss_context(context, qp); context 2555 drivers/infiniband/hw/mlx4/qp.c context->flags |= cpu_to_be32(1 << MLX4_RSS_QPC_FLAG_OFFSET); context 2559 drivers/infiniband/hw/mlx4/qp.c to_mlx4_state(new_state), context, optpar, context 2645 drivers/infiniband/hw/mlx4/qp.c kfree(context); context 4027 drivers/infiniband/hw/mlx4/qp.c struct mlx4_qp_context context; context 4041 drivers/infiniband/hw/mlx4/qp.c err = mlx4_qp_query(dev->dev, &qp->mqp, &context); context 4047 drivers/infiniband/hw/mlx4/qp.c mlx4_state = be32_to_cpu(context.flags) >> 28; context 4051 drivers/infiniband/hw/mlx4/qp.c qp_attr->path_mtu = context.mtu_msgmax >> 5; context 4053 drivers/infiniband/hw/mlx4/qp.c to_ib_mig_state((be32_to_cpu(context.flags) >> 11) & 0x3); context 4054 drivers/infiniband/hw/mlx4/qp.c qp_attr->qkey = be32_to_cpu(context.qkey); context 4055 drivers/infiniband/hw/mlx4/qp.c qp_attr->rq_psn = be32_to_cpu(context.rnr_nextrecvpsn) & 0xffffff; context 4056 drivers/infiniband/hw/mlx4/qp.c qp_attr->sq_psn = be32_to_cpu(context.next_send_psn) & 0xffffff; context 4057 drivers/infiniband/hw/mlx4/qp.c qp_attr->dest_qp_num = be32_to_cpu(context.remote_qpn) & 0xffffff; context 4059 drivers/infiniband/hw/mlx4/qp.c to_ib_qp_access_flags(be32_to_cpu(context.params2)); context 4062 drivers/infiniband/hw/mlx4/qp.c to_rdma_ah_attr(dev, &qp_attr->ah_attr, &context.pri_path); context 4063 drivers/infiniband/hw/mlx4/qp.c to_rdma_ah_attr(dev, &qp_attr->alt_ah_attr, &context.alt_path); context 4064 drivers/infiniband/hw/mlx4/qp.c qp_attr->alt_pkey_index = context.alt_path.pkey_index & 0x7f; context 4069 drivers/infiniband/hw/mlx4/qp.c qp_attr->pkey_index = context.pri_path.pkey_index & 0x7f; context 4073 drivers/infiniband/hw/mlx4/qp.c qp_attr->port_num = context.pri_path.sched_queue & 0x40 ? 2 : 1; context 4078 drivers/infiniband/hw/mlx4/qp.c qp_attr->max_rd_atomic = 1 << ((be32_to_cpu(context.params1) >> 21) & 0x7); context 4081 drivers/infiniband/hw/mlx4/qp.c 1 << ((be32_to_cpu(context.params2) >> 21) & 0x7); context 4083 drivers/infiniband/hw/mlx4/qp.c (be32_to_cpu(context.rnr_nextrecvpsn) >> 24) & 0x1f; context 4084 drivers/infiniband/hw/mlx4/qp.c qp_attr->timeout = context.pri_path.ackto >> 3; context 4085 drivers/infiniband/hw/mlx4/qp.c qp_attr->retry_cnt = (be32_to_cpu(context.params1) >> 16) & 0x7; context 4086 drivers/infiniband/hw/mlx4/qp.c qp_attr->rnr_retry = (be32_to_cpu(context.params1) >> 13) & 0x7; context 4087 drivers/infiniband/hw/mlx4/qp.c qp_attr->alt_timeout = context.alt_path.ackto >> 3; context 716 drivers/infiniband/hw/mlx5/cq.c struct mlx5_ib_ucontext *context = rdma_udata_to_drv_context( context 742 drivers/infiniband/hw/mlx5/cq.c err = mlx5_ib_db_map_user(context, udata, ucmd.db_addr, &cq->db); context 766 drivers/infiniband/hw/mlx5/cq.c *index = context->bfregi.sys_pages[0]; context 808 drivers/infiniband/hw/mlx5/cq.c MLX5_SET(create_cq_in, *cqb, uid, context->devx_uid); context 815 drivers/infiniband/hw/mlx5/cq.c mlx5_ib_db_unmap_user(context, &cq->db); context 824 drivers/infiniband/hw/mlx5/cq.c struct mlx5_ib_ucontext *context = rdma_udata_to_drv_context( context 827 drivers/infiniband/hw/mlx5/cq.c mlx5_ib_db_unmap_user(context, &cq->db); context 1660 drivers/infiniband/hw/mlx5/devx.c static void devx_query_callback(int status, struct mlx5_async_work *context) context 1663 drivers/infiniband/hw/mlx5/devx.c container_of(context, struct devx_async_data, cb_work); context 46 drivers/infiniband/hw/mlx5/doorbell.c int mlx5_ib_db_map_user(struct mlx5_ib_ucontext *context, context 53 drivers/infiniband/hw/mlx5/doorbell.c mutex_lock(&context->db_page_mutex); context 55 drivers/infiniband/hw/mlx5/doorbell.c list_for_each_entry(page, &context->db_page_list, list) context 74 drivers/infiniband/hw/mlx5/doorbell.c list_add(&page->list, &context->db_page_list); context 82 drivers/infiniband/hw/mlx5/doorbell.c mutex_unlock(&context->db_page_mutex); context 87 drivers/infiniband/hw/mlx5/doorbell.c void mlx5_ib_db_unmap_user(struct mlx5_ib_ucontext *context, struct mlx5_db *db) context 89 drivers/infiniband/hw/mlx5/doorbell.c mutex_lock(&context->db_page_mutex); context 97 drivers/infiniband/hw/mlx5/doorbell.c mutex_unlock(&context->db_page_mutex); context 619 drivers/infiniband/hw/mlx5/main.c __always_unused void **context) context 626 drivers/infiniband/hw/mlx5/main.c __always_unused void **context) context 1646 drivers/infiniband/hw/mlx5/main.c static int allocate_uars(struct mlx5_ib_dev *dev, struct mlx5_ib_ucontext *context) context 1652 drivers/infiniband/hw/mlx5/main.c bfregi = &context->bfregi; context 1675 drivers/infiniband/hw/mlx5/main.c struct mlx5_ib_ucontext *context) context 1680 drivers/infiniband/hw/mlx5/main.c bfregi = &context->bfregi; context 1773 drivers/infiniband/hw/mlx5/main.c struct mlx5_ib_ucontext *context = to_mucontext(uctx); context 1839 drivers/infiniband/hw/mlx5/main.c bfregi = &context->bfregi; context 1863 drivers/infiniband/hw/mlx5/main.c err = allocate_uars(dev, context); context 1871 drivers/infiniband/hw/mlx5/main.c context->devx_uid = err; context 1874 drivers/infiniband/hw/mlx5/main.c err = mlx5_ib_alloc_transport_domain(dev, &context->tdn, context 1875 drivers/infiniband/hw/mlx5/main.c context->devx_uid); context 1885 drivers/infiniband/hw/mlx5/main.c INIT_LIST_HEAD(&context->db_page_list); context 1886 drivers/infiniband/hw/mlx5/main.c mutex_init(&context->db_page_mutex); context 1956 drivers/infiniband/hw/mlx5/main.c context->cqe_version = resp.cqe_version; context 1957 drivers/infiniband/hw/mlx5/main.c context->lib_caps = req.lib_caps; context 1958 drivers/infiniband/hw/mlx5/main.c print_lib_caps(dev, context->lib_caps); context 1963 drivers/infiniband/hw/mlx5/main.c atomic_set(&context->tx_port_affinity, context 1971 drivers/infiniband/hw/mlx5/main.c mlx5_ib_dealloc_transport_domain(dev, context->tdn, context->devx_uid); context 1974 drivers/infiniband/hw/mlx5/main.c mlx5_ib_devx_destroy(dev, context->devx_uid); context 1977 drivers/infiniband/hw/mlx5/main.c deallocate_uars(dev, context); context 1991 drivers/infiniband/hw/mlx5/main.c struct mlx5_ib_ucontext *context = to_mucontext(ibcontext); context 1995 drivers/infiniband/hw/mlx5/main.c bfregi = &context->bfregi; context 1996 drivers/infiniband/hw/mlx5/main.c mlx5_ib_dealloc_transport_domain(dev, context->tdn, context->devx_uid); context 1998 drivers/infiniband/hw/mlx5/main.c if (context->devx_uid) context 1999 drivers/infiniband/hw/mlx5/main.c mlx5_ib_devx_destroy(dev, context->devx_uid); context 2001 drivers/infiniband/hw/mlx5/main.c deallocate_uars(dev, context); context 2060 drivers/infiniband/hw/mlx5/main.c struct mlx5_ib_ucontext *context) context 2082 drivers/infiniband/hw/mlx5/main.c struct mlx5_ib_ucontext *context) context 2084 drivers/infiniband/hw/mlx5/main.c struct mlx5_bfreg_info *bfregi = &context->bfregi; context 2167 drivers/infiniband/hw/mlx5/main.c err = rdma_user_mmap_io(&context->ibucontext, vma, pfn, PAGE_SIZE, context 2192 drivers/infiniband/hw/mlx5/main.c static int dm_mmap(struct ib_ucontext *context, struct vm_area_struct *vma) context 2194 drivers/infiniband/hw/mlx5/main.c struct mlx5_ib_ucontext *mctx = to_mucontext(context); context 2195 drivers/infiniband/hw/mlx5/main.c struct mlx5_ib_dev *dev = to_mdev(context->device); context 2209 drivers/infiniband/hw/mlx5/main.c return rdma_user_mmap_io(context, vma, pfn, map_size, context 2215 drivers/infiniband/hw/mlx5/main.c struct mlx5_ib_ucontext *context = to_mucontext(ibcontext); context 2226 drivers/infiniband/hw/mlx5/main.c return uar_mmap(dev, command, vma, context); context 2246 drivers/infiniband/hw/mlx5/main.c return rdma_user_mmap_io(&context->ibucontext, vma, pfn, context 2250 drivers/infiniband/hw/mlx5/main.c return mlx5_ib_mmap_clock_info_page(dev, vma, context); context 2365 drivers/infiniband/hw/mlx5/main.c struct ib_ucontext *context, context 2394 drivers/infiniband/hw/mlx5/main.c err = handle_alloc_dm_memic(context, dm, context 2399 drivers/infiniband/hw/mlx5/main.c err = handle_alloc_dm_sw_icm(context, dm, context 2404 drivers/infiniband/hw/mlx5/main.c err = handle_alloc_dm_sw_icm(context, dm, context 2476 drivers/infiniband/hw/mlx5/main.c struct mlx5_ib_ucontext *context = rdma_udata_to_drv_context( context 2479 drivers/infiniband/hw/mlx5/main.c uid = context ? context->devx_uid : 0; context 6935 drivers/infiniband/hw/mlx5/main.c static void mlx5_ib_remove(struct mlx5_core_dev *mdev, void *context) context 6940 drivers/infiniband/hw/mlx5/main.c if (MLX5_ESWITCH_MANAGER(mdev) && context == mdev) { context 6946 drivers/infiniband/hw/mlx5/main.c mpi = context; context 6956 drivers/infiniband/hw/mlx5/main.c dev = context; context 1022 drivers/infiniband/hw/mlx5/mlx5_ib.h struct mlx5_ib_ucontext *context = rdma_udata_to_drv_context( context 1025 drivers/infiniband/hw/mlx5/mlx5_ib.h return to_mdev(context->ibucontext.device); context 1099 drivers/infiniband/hw/mlx5/mlx5_ib.h int mlx5_ib_db_map_user(struct mlx5_ib_ucontext *context, context 1102 drivers/infiniband/hw/mlx5/mlx5_ib.h void mlx5_ib_db_unmap_user(struct mlx5_ib_ucontext *context, struct mlx5_db *db); context 1241 drivers/infiniband/hw/mlx5/mlx5_ib.h struct ib_ucontext *context, context 87 drivers/infiniband/hw/mlx5/mr.c static void reg_mr_callback(int status, struct mlx5_async_work *context) context 90 drivers/infiniband/hw/mlx5/mr.c container_of(context, struct mlx5_ib_mr, cb_work); context 808 drivers/infiniband/hw/mlx5/mr.c struct mlx5_ib_umr_context *context = context 811 drivers/infiniband/hw/mlx5/mr.c context->status = wc->status; context 812 drivers/infiniband/hw/mlx5/mr.c complete(&context->done); context 815 drivers/infiniband/hw/mlx5/mr.c static inline void mlx5_ib_init_umr_context(struct mlx5_ib_umr_context *context) context 817 drivers/infiniband/hw/mlx5/mr.c context->cqe.done = mlx5_ib_umr_done; context 818 drivers/infiniband/hw/mlx5/mr.c context->status = -1; context 819 drivers/infiniband/hw/mlx5/mr.c init_completion(&context->done); context 781 drivers/infiniband/hw/mlx5/qp.c struct mlx5_ib_ucontext *context = context 790 drivers/infiniband/hw/mlx5/qp.c mlx5_ib_db_unmap_user(context, &rwq->db); context 862 drivers/infiniband/hw/mlx5/qp.c struct mlx5_ib_ucontext *context; context 882 drivers/infiniband/hw/mlx5/qp.c context = rdma_udata_to_drv_context(udata, struct mlx5_ib_ucontext, context 885 drivers/infiniband/hw/mlx5/qp.c uar_index = bfregn_to_uar_index(dev, &context->bfregi, context 899 drivers/infiniband/hw/mlx5/qp.c bfregn = alloc_bfreg(dev, &context->bfregi); context 906 drivers/infiniband/hw/mlx5/qp.c uar_index = bfregn_to_uar_index(dev, &context->bfregi, bfregn, context 950 drivers/infiniband/hw/mlx5/qp.c resp->bfreg_index = adjust_bfregn(dev, &context->bfregi, bfregn); context 955 drivers/infiniband/hw/mlx5/qp.c err = mlx5_ib_db_map_user(context, udata, ucmd.db_addr, &qp->db); context 971 drivers/infiniband/hw/mlx5/qp.c mlx5_ib_db_unmap_user(context, &qp->db); context 981 drivers/infiniband/hw/mlx5/qp.c mlx5_ib_free_bfreg(dev, &context->bfregi, bfregn); context 989 drivers/infiniband/hw/mlx5/qp.c struct mlx5_ib_ucontext *context = context 995 drivers/infiniband/hw/mlx5/qp.c mlx5_ib_db_unmap_user(context, &qp->db); context 1003 drivers/infiniband/hw/mlx5/qp.c mlx5_ib_free_bfreg(dev, &context->bfregi, qp->bfregn); context 3390 drivers/infiniband/hw/mlx5/qp.c struct mlx5_qp_context context = {}; context 3400 drivers/infiniband/hw/mlx5/qp.c context.qp_counter_set_usr_page &= cpu_to_be32(0xffffff); context 3401 drivers/infiniband/hw/mlx5/qp.c context.qp_counter_set_usr_page |= cpu_to_be32(set_id << 24); context 3405 drivers/infiniband/hw/mlx5/qp.c &context, &base->mqp); context 3456 drivers/infiniband/hw/mlx5/qp.c struct mlx5_qp_context *context; context 3471 drivers/infiniband/hw/mlx5/qp.c context = kzalloc(sizeof(*context), GFP_KERNEL); context 3472 drivers/infiniband/hw/mlx5/qp.c if (!context) context 3476 drivers/infiniband/hw/mlx5/qp.c context->flags = cpu_to_be32(mlx5_st << 16); context 3479 drivers/infiniband/hw/mlx5/qp.c context->flags |= cpu_to_be32(MLX5_QP_PM_MIGRATED << 11); context 3483 drivers/infiniband/hw/mlx5/qp.c context->flags |= cpu_to_be32(MLX5_QP_PM_MIGRATED << 11); context 3486 drivers/infiniband/hw/mlx5/qp.c context->flags |= cpu_to_be32(MLX5_QP_PM_REARM << 11); context 3489 drivers/infiniband/hw/mlx5/qp.c context->flags |= cpu_to_be32(MLX5_QP_PM_ARMED << 11); context 3506 drivers/infiniband/hw/mlx5/qp.c context->flags |= cpu_to_be32(tx_affinity << 24); context 3512 drivers/infiniband/hw/mlx5/qp.c context->mtu_msgmax = (IB_MTU_256 << 5) | 8; context 3516 drivers/infiniband/hw/mlx5/qp.c context->mtu_msgmax = (IB_MTU_4096 << 5) | 12; context 3524 drivers/infiniband/hw/mlx5/qp.c context->mtu_msgmax = (attr->path_mtu << 5) | context 3529 drivers/infiniband/hw/mlx5/qp.c context->log_pg_sz_remote_qpn = cpu_to_be32(attr->dest_qp_num); context 3532 drivers/infiniband/hw/mlx5/qp.c context->pri_path.pkey_index = cpu_to_be16(attr->pkey_index); context 3537 drivers/infiniband/hw/mlx5/qp.c context->pri_path.port = qp->port; context 3540 drivers/infiniband/hw/mlx5/qp.c context->pri_path.port = attr->port_num; context 3543 drivers/infiniband/hw/mlx5/qp.c err = mlx5_set_path(dev, qp, &attr->ah_attr, &context->pri_path, context 3551 drivers/infiniband/hw/mlx5/qp.c context->pri_path.ackto_lt |= attr->timeout << 3; context 3555 drivers/infiniband/hw/mlx5/qp.c &context->alt_path, context 3566 drivers/infiniband/hw/mlx5/qp.c context->flags_pd = cpu_to_be32(pd ? pd->pdn : to_mpd(dev->devr.p0)->pdn); context 3567 drivers/infiniband/hw/mlx5/qp.c context->cqn_send = send_cq ? cpu_to_be32(send_cq->mcq.cqn) : 0; context 3568 drivers/infiniband/hw/mlx5/qp.c context->cqn_recv = recv_cq ? cpu_to_be32(recv_cq->mcq.cqn) : 0; context 3569 drivers/infiniband/hw/mlx5/qp.c context->params1 = cpu_to_be32(MLX5_IB_ACK_REQ_FREQ << 28); context 3572 drivers/infiniband/hw/mlx5/qp.c context->params1 |= cpu_to_be32(attr->rnr_retry << 13); context 3575 drivers/infiniband/hw/mlx5/qp.c context->params1 |= cpu_to_be32(attr->retry_cnt << 16); context 3579 drivers/infiniband/hw/mlx5/qp.c context->params1 |= context 3584 drivers/infiniband/hw/mlx5/qp.c context->next_send_psn = cpu_to_be32(attr->sq_psn); context 3588 drivers/infiniband/hw/mlx5/qp.c context->params2 |= context 3599 drivers/infiniband/hw/mlx5/qp.c context->params2 |= access_flags; context 3603 drivers/infiniband/hw/mlx5/qp.c context->rnr_nextrecvpsn |= cpu_to_be32(attr->min_rnr_timer << 24); context 3606 drivers/infiniband/hw/mlx5/qp.c context->rnr_nextrecvpsn |= cpu_to_be32(attr->rq_psn); context 3609 drivers/infiniband/hw/mlx5/qp.c context->qkey = cpu_to_be32(attr->qkey); context 3612 drivers/infiniband/hw/mlx5/qp.c context->db_rec_addr = cpu_to_be64(qp->db.dma); context 3626 drivers/infiniband/hw/mlx5/qp.c context->qp_counter_set_usr_page |= context 3631 drivers/infiniband/hw/mlx5/qp.c context->sq_crq_size |= cpu_to_be16(1 << 4); context 3634 drivers/infiniband/hw/mlx5/qp.c context->deth_sqpn = cpu_to_be32(1); context 3692 drivers/infiniband/hw/mlx5/qp.c err = mlx5_core_qp_modify(dev->mdev, op, optpar, context, context 3740 drivers/infiniband/hw/mlx5/qp.c kfree(context); context 5631 drivers/infiniband/hw/mlx5/qp.c struct mlx5_qp_context *context; context 5646 drivers/infiniband/hw/mlx5/qp.c context = (struct mlx5_qp_context *)MLX5_ADDR_OF(query_qp_out, outb, qpc); context 5648 drivers/infiniband/hw/mlx5/qp.c mlx5_state = be32_to_cpu(context->flags) >> 28; context 5651 drivers/infiniband/hw/mlx5/qp.c qp_attr->path_mtu = context->mtu_msgmax >> 5; context 5653 drivers/infiniband/hw/mlx5/qp.c to_ib_mig_state((be32_to_cpu(context->flags) >> 11) & 0x3); context 5654 drivers/infiniband/hw/mlx5/qp.c qp_attr->qkey = be32_to_cpu(context->qkey); context 5655 drivers/infiniband/hw/mlx5/qp.c qp_attr->rq_psn = be32_to_cpu(context->rnr_nextrecvpsn) & 0xffffff; context 5656 drivers/infiniband/hw/mlx5/qp.c qp_attr->sq_psn = be32_to_cpu(context->next_send_psn) & 0xffffff; context 5657 drivers/infiniband/hw/mlx5/qp.c qp_attr->dest_qp_num = be32_to_cpu(context->log_pg_sz_remote_qpn) & 0xffffff; context 5659 drivers/infiniband/hw/mlx5/qp.c to_ib_qp_access_flags(be32_to_cpu(context->params2)); context 5662 drivers/infiniband/hw/mlx5/qp.c to_rdma_ah_attr(dev, &qp_attr->ah_attr, &context->pri_path); context 5663 drivers/infiniband/hw/mlx5/qp.c to_rdma_ah_attr(dev, &qp_attr->alt_ah_attr, &context->alt_path); context 5665 drivers/infiniband/hw/mlx5/qp.c be16_to_cpu(context->alt_path.pkey_index); context 5670 drivers/infiniband/hw/mlx5/qp.c qp_attr->pkey_index = be16_to_cpu(context->pri_path.pkey_index); context 5671 drivers/infiniband/hw/mlx5/qp.c qp_attr->port_num = context->pri_path.port; context 5676 drivers/infiniband/hw/mlx5/qp.c qp_attr->max_rd_atomic = 1 << ((be32_to_cpu(context->params1) >> 21) & 0x7); context 5679 drivers/infiniband/hw/mlx5/qp.c 1 << ((be32_to_cpu(context->params2) >> 21) & 0x7); context 5681 drivers/infiniband/hw/mlx5/qp.c (be32_to_cpu(context->rnr_nextrecvpsn) >> 24) & 0x1f; context 5682 drivers/infiniband/hw/mlx5/qp.c qp_attr->timeout = context->pri_path.ackto_lt >> 3; context 5683 drivers/infiniband/hw/mlx5/qp.c qp_attr->retry_cnt = (be32_to_cpu(context->params1) >> 16) & 0x7; context 5684 drivers/infiniband/hw/mlx5/qp.c qp_attr->rnr_retry = (be32_to_cpu(context->params1) >> 13) & 0x7; context 5685 drivers/infiniband/hw/mlx5/qp.c qp_attr->alt_timeout = context->alt_path.ackto_lt >> 3; context 392 drivers/infiniband/hw/mthca/mthca_cmd.c struct mthca_cmd_context *context = context 393 drivers/infiniband/hw/mthca/mthca_cmd.c &dev->cmd.context[token & dev->cmd.token_mask]; context 396 drivers/infiniband/hw/mthca/mthca_cmd.c if (token != context->token) context 399 drivers/infiniband/hw/mthca/mthca_cmd.c context->result = 0; context 400 drivers/infiniband/hw/mthca/mthca_cmd.c context->status = status; context 401 drivers/infiniband/hw/mthca/mthca_cmd.c context->out_param = out_param; context 403 drivers/infiniband/hw/mthca/mthca_cmd.c complete(&context->done); context 416 drivers/infiniband/hw/mthca/mthca_cmd.c struct mthca_cmd_context *context; context 422 drivers/infiniband/hw/mthca/mthca_cmd.c context = &dev->cmd.context[dev->cmd.free_head]; context 423 drivers/infiniband/hw/mthca/mthca_cmd.c context->token += dev->cmd.token_mask + 1; context 424 drivers/infiniband/hw/mthca/mthca_cmd.c dev->cmd.free_head = context->next; context 427 drivers/infiniband/hw/mthca/mthca_cmd.c init_completion(&context->done); context 432 drivers/infiniband/hw/mthca/mthca_cmd.c op, context->token, 1); context 436 drivers/infiniband/hw/mthca/mthca_cmd.c if (!wait_for_completion_timeout(&context->done, timeout)) { context 441 drivers/infiniband/hw/mthca/mthca_cmd.c err = context->result; context 445 drivers/infiniband/hw/mthca/mthca_cmd.c if (context->status) { context 447 drivers/infiniband/hw/mthca/mthca_cmd.c op, context->status); context 448 drivers/infiniband/hw/mthca/mthca_cmd.c err = mthca_status_to_errno(context->status); context 452 drivers/infiniband/hw/mthca/mthca_cmd.c *out_param = context->out_param; context 460 drivers/infiniband/hw/mthca/mthca_cmd.c context->next = dev->cmd.free_head; context 461 drivers/infiniband/hw/mthca/mthca_cmd.c dev->cmd.free_head = context - dev->cmd.context; context 562 drivers/infiniband/hw/mthca/mthca_cmd.c dev->cmd.context = kmalloc_array(dev->cmd.max_cmds, context 565 drivers/infiniband/hw/mthca/mthca_cmd.c if (!dev->cmd.context) context 569 drivers/infiniband/hw/mthca/mthca_cmd.c dev->cmd.context[i].token = i; context 570 drivers/infiniband/hw/mthca/mthca_cmd.c dev->cmd.context[i].next = i + 1; context 573 drivers/infiniband/hw/mthca/mthca_cmd.c dev->cmd.context[dev->cmd.max_cmds - 1].next = -1; context 604 drivers/infiniband/hw/mthca/mthca_cmd.c kfree(dev->cmd.context); context 128 drivers/infiniband/hw/mthca/mthca_dev.h struct mthca_cmd_context *context; context 309 drivers/infiniband/hw/mthca/mthca_provider.c struct mthca_ucontext *context = to_mucontext(uctx); context 321 drivers/infiniband/hw/mthca/mthca_provider.c err = mthca_uar_alloc(to_mdev(ibdev), &context->uar); context 325 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab = mthca_init_user_db_tab(to_mdev(ibdev)); context 326 drivers/infiniband/hw/mthca/mthca_provider.c if (IS_ERR(context->db_tab)) { context 327 drivers/infiniband/hw/mthca/mthca_provider.c err = PTR_ERR(context->db_tab); context 328 drivers/infiniband/hw/mthca/mthca_provider.c mthca_uar_free(to_mdev(ibdev), &context->uar); context 333 drivers/infiniband/hw/mthca/mthca_provider.c mthca_cleanup_user_db_tab(to_mdev(ibdev), &context->uar, context->db_tab); context 334 drivers/infiniband/hw/mthca/mthca_provider.c mthca_uar_free(to_mdev(ibdev), &context->uar); context 338 drivers/infiniband/hw/mthca/mthca_provider.c context->reg_mr_warned = 0; context 343 drivers/infiniband/hw/mthca/mthca_provider.c static void mthca_dealloc_ucontext(struct ib_ucontext *context) context 345 drivers/infiniband/hw/mthca/mthca_provider.c mthca_cleanup_user_db_tab(to_mdev(context->device), &to_mucontext(context)->uar, context 346 drivers/infiniband/hw/mthca/mthca_provider.c to_mucontext(context)->db_tab); context 347 drivers/infiniband/hw/mthca/mthca_provider.c mthca_uar_free(to_mdev(context->device), &to_mucontext(context)->uar); context 350 drivers/infiniband/hw/mthca/mthca_provider.c static int mthca_mmap_uar(struct ib_ucontext *context, context 359 drivers/infiniband/hw/mthca/mthca_provider.c to_mucontext(context)->uar.pfn, context 411 drivers/infiniband/hw/mthca/mthca_provider.c struct mthca_ucontext *context = rdma_udata_to_drv_context( context 423 drivers/infiniband/hw/mthca/mthca_provider.c err = mthca_map_user_db(to_mdev(ibsrq->device), &context->uar, context 424 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, ucmd.db_index, context 438 drivers/infiniband/hw/mthca/mthca_provider.c mthca_unmap_user_db(to_mdev(ibsrq->device), &context->uar, context 439 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, ucmd.db_index); context 444 drivers/infiniband/hw/mthca/mthca_provider.c if (context && ib_copy_to_udata(udata, &srq->srqn, sizeof(__u32))) { context 455 drivers/infiniband/hw/mthca/mthca_provider.c struct mthca_ucontext *context = context 461 drivers/infiniband/hw/mthca/mthca_provider.c mthca_unmap_user_db(to_mdev(srq->device), &context->uar, context 462 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, to_msrq(srq)->db_index); context 472 drivers/infiniband/hw/mthca/mthca_provider.c struct mthca_ucontext *context = rdma_udata_to_drv_context( context 496 drivers/infiniband/hw/mthca/mthca_provider.c err = mthca_map_user_db(to_mdev(pd->device), &context->uar, context 497 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, context 504 drivers/infiniband/hw/mthca/mthca_provider.c err = mthca_map_user_db(to_mdev(pd->device), &context->uar, context 505 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, context 509 drivers/infiniband/hw/mthca/mthca_provider.c &context->uar, context 510 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, context 529 drivers/infiniband/hw/mthca/mthca_provider.c &context->uar, context 530 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, context 533 drivers/infiniband/hw/mthca/mthca_provider.c &context->uar, context 534 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, context 584 drivers/infiniband/hw/mthca/mthca_provider.c struct mthca_ucontext *context = context 591 drivers/infiniband/hw/mthca/mthca_provider.c &context->uar, context 592 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, context 595 drivers/infiniband/hw/mthca/mthca_provider.c &context->uar, context 596 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, context 614 drivers/infiniband/hw/mthca/mthca_provider.c struct mthca_ucontext *context = rdma_udata_to_drv_context( context 627 drivers/infiniband/hw/mthca/mthca_provider.c err = mthca_map_user_db(to_mdev(ibdev), &context->uar, context 628 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, ucmd.set_db_index, context 633 drivers/infiniband/hw/mthca/mthca_provider.c err = mthca_map_user_db(to_mdev(ibdev), &context->uar, context 634 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, ucmd.arm_db_index, context 651 drivers/infiniband/hw/mthca/mthca_provider.c err = mthca_init_cq(to_mdev(ibdev), nent, context, context 669 drivers/infiniband/hw/mthca/mthca_provider.c mthca_unmap_user_db(to_mdev(ibdev), &context->uar, context 670 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, ucmd.arm_db_index); context 674 drivers/infiniband/hw/mthca/mthca_provider.c mthca_unmap_user_db(to_mdev(ibdev), &context->uar, context 675 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, ucmd.set_db_index); context 804 drivers/infiniband/hw/mthca/mthca_provider.c struct mthca_ucontext *context = context 811 drivers/infiniband/hw/mthca/mthca_provider.c &context->uar, context 812 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, context 815 drivers/infiniband/hw/mthca/mthca_provider.c &context->uar, context 816 drivers/infiniband/hw/mthca/mthca_provider.c context->db_tab, context 859 drivers/infiniband/hw/mthca/mthca_provider.c struct mthca_ucontext *context = rdma_udata_to_drv_context( context 869 drivers/infiniband/hw/mthca/mthca_provider.c if (!context->reg_mr_warned) { context 874 drivers/infiniband/hw/mthca/mthca_provider.c ++context->reg_mr_warned; context 162 drivers/infiniband/hw/mthca/mthca_qp.c struct mthca_qp_context context; context 438 drivers/infiniband/hw/mthca/mthca_qp.c struct mthca_qp_context *context; context 461 drivers/infiniband/hw/mthca/mthca_qp.c context = &qp_param->context; context 462 drivers/infiniband/hw/mthca/mthca_qp.c mthca_state = be32_to_cpu(context->flags) >> 28; context 466 drivers/infiniband/hw/mthca/mthca_qp.c qp_attr->path_mtu = context->mtu_msgmax >> 5; context 468 drivers/infiniband/hw/mthca/mthca_qp.c to_ib_mig_state((be32_to_cpu(context->flags) >> 11) & 0x3); context 469 drivers/infiniband/hw/mthca/mthca_qp.c qp_attr->qkey = be32_to_cpu(context->qkey); context 470 drivers/infiniband/hw/mthca/mthca_qp.c qp_attr->rq_psn = be32_to_cpu(context->rnr_nextrecvpsn) & 0xffffff; context 471 drivers/infiniband/hw/mthca/mthca_qp.c qp_attr->sq_psn = be32_to_cpu(context->next_send_psn) & 0xffffff; context 472 drivers/infiniband/hw/mthca/mthca_qp.c qp_attr->dest_qp_num = be32_to_cpu(context->remote_qpn) & 0xffffff; context 474 drivers/infiniband/hw/mthca/mthca_qp.c to_ib_qp_access_flags(be32_to_cpu(context->params2)); context 477 drivers/infiniband/hw/mthca/mthca_qp.c to_rdma_ah_attr(dev, &qp_attr->ah_attr, &context->pri_path); context 478 drivers/infiniband/hw/mthca/mthca_qp.c to_rdma_ah_attr(dev, &qp_attr->alt_ah_attr, &context->alt_path); context 480 drivers/infiniband/hw/mthca/mthca_qp.c be32_to_cpu(context->alt_path.port_pkey) & 0x7f; context 485 drivers/infiniband/hw/mthca/mthca_qp.c qp_attr->pkey_index = be32_to_cpu(context->pri_path.port_pkey) & 0x7f; context 487 drivers/infiniband/hw/mthca/mthca_qp.c (be32_to_cpu(context->pri_path.port_pkey) >> 24) & 0x3; context 492 drivers/infiniband/hw/mthca/mthca_qp.c qp_attr->max_rd_atomic = 1 << ((be32_to_cpu(context->params1) >> 21) & 0x7); context 495 drivers/infiniband/hw/mthca/mthca_qp.c 1 << ((be32_to_cpu(context->params2) >> 21) & 0x7); context 497 drivers/infiniband/hw/mthca/mthca_qp.c (be32_to_cpu(context->rnr_nextrecvpsn) >> 24) & 0x1f; context 498 drivers/infiniband/hw/mthca/mthca_qp.c qp_attr->timeout = context->pri_path.ackto >> 3; context 499 drivers/infiniband/hw/mthca/mthca_qp.c qp_attr->retry_cnt = (be32_to_cpu(context->params1) >> 16) & 0x7; context 500 drivers/infiniband/hw/mthca/mthca_qp.c qp_attr->rnr_retry = context->pri_path.rnr_retry >> 5; context 501 drivers/infiniband/hw/mthca/mthca_qp.c qp_attr->alt_timeout = context->alt_path.ackto >> 3; context 564 drivers/infiniband/hw/mthca/mthca_qp.c struct mthca_ucontext *context = rdma_udata_to_drv_context( context 578 drivers/infiniband/hw/mthca/mthca_qp.c qp_context = &qp_param->context; context 627 drivers/infiniband/hw/mthca/mthca_qp.c qp_context->usr_page = cpu_to_be32(context->uar.index); context 100 drivers/infiniband/hw/mthca/mthca_srq.c struct mthca_tavor_srq_context *context, context 106 drivers/infiniband/hw/mthca/mthca_srq.c memset(context, 0, sizeof *context); context 108 drivers/infiniband/hw/mthca/mthca_srq.c context->wqe_base_ds = cpu_to_be64(1 << (srq->wqe_shift - 4)); context 109 drivers/infiniband/hw/mthca/mthca_srq.c context->state_pd = cpu_to_be32(pd->pd_num); context 110 drivers/infiniband/hw/mthca/mthca_srq.c context->lkey = cpu_to_be32(srq->mr.ibmr.lkey); context 113 drivers/infiniband/hw/mthca/mthca_srq.c context->uar = cpu_to_be32(ucontext->uar.index); context 115 drivers/infiniband/hw/mthca/mthca_srq.c context->uar = cpu_to_be32(dev->driver_uar.index); context 121 drivers/infiniband/hw/mthca/mthca_srq.c struct mthca_arbel_srq_context *context, context 128 drivers/infiniband/hw/mthca/mthca_srq.c memset(context, 0, sizeof *context); context 136 drivers/infiniband/hw/mthca/mthca_srq.c context->state_logsize_srqn = cpu_to_be32(logsize << 24 | srq->srqn); context 137 drivers/infiniband/hw/mthca/mthca_srq.c context->lkey = cpu_to_be32(srq->mr.ibmr.lkey); context 138 drivers/infiniband/hw/mthca/mthca_srq.c context->db_index = cpu_to_be32(srq->db_index); context 139 drivers/infiniband/hw/mthca/mthca_srq.c context->logstride_usrpage = cpu_to_be32((srq->wqe_shift - 4) << 29); context 141 drivers/infiniband/hw/mthca/mthca_srq.c context->logstride_usrpage |= cpu_to_be32(ucontext->uar.index); context 143 drivers/infiniband/hw/mthca/mthca_srq.c context->logstride_usrpage |= cpu_to_be32(dev->driver_uar.index); context 144 drivers/infiniband/hw/mthca/mthca_srq.c context->eq_pd = cpu_to_be32(MTHCA_EQ_ASYNC << 24 | pd->pd_num); context 545 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c int ocrdma_mmap(struct ib_ucontext *context, struct vm_area_struct *vma) context 547 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c struct ocrdma_ucontext *ucontext = get_ocrdma_ucontext(context); context 548 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c struct ocrdma_dev *dev = get_ocrdma_dev(context->device); context 653 drivers/infiniband/hw/qedr/main.c static void qedr_unaffiliated_event(void *context, u8 event_code) context 658 drivers/infiniband/hw/qedr/main.c static void qedr_affiliated_event(void *context, u8 e_code, void *fw_handle) context 664 drivers/infiniband/hw/qedr/main.c struct qedr_dev *dev = (struct qedr_dev *)context; context 814 drivers/infiniband/hw/qedr/main.c events.context = dev; context 104 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_mpa_request(void *context, struct qed_iwarp_cm_event_params *params) context 106 drivers/infiniband/hw/qedr/qedr_iw_cm.c struct qedr_iw_listener *listener = (struct qedr_iw_listener *)context; context 139 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_issue_event(void *context, context 143 drivers/infiniband/hw/qedr/qedr_iw_cm.c struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context; context 162 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_close_event(void *context, struct qed_iwarp_cm_event_params *params) context 164 drivers/infiniband/hw/qedr/qedr_iw_cm.c struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context; context 167 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_issue_event(context, params, IW_CM_EVENT_CLOSE); context 173 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_qp_event(void *context, context 177 drivers/infiniband/hw/qedr/qedr_iw_cm.c struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context; context 245 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_disconnect_event(void *context, context 249 drivers/infiniband/hw/qedr/qedr_iw_cm.c struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context; context 271 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_passive_complete(void *context, context 274 drivers/infiniband/hw/qedr/qedr_iw_cm.c struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context; context 288 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_issue_event(context, params, IW_CM_EVENT_ESTABLISHED); context 291 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_close_event(context, params); context 295 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_active_complete(void *context, context 298 drivers/infiniband/hw/qedr/qedr_iw_cm.c struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context; context 301 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_issue_event(context, params, IW_CM_EVENT_CONNECT_REPLY); context 308 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_mpa_reply(void *context, struct qed_iwarp_cm_event_params *params) context 310 drivers/infiniband/hw/qedr/qedr_iw_cm.c struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context; context 320 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_event_handler(void *context, struct qed_iwarp_cm_event_params *params) context 322 drivers/infiniband/hw/qedr/qedr_iw_cm.c struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context; context 327 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_mpa_request(context, params); context 330 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_mpa_reply(context, params); context 333 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_passive_complete(context, params); context 336 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_active_complete(context, params); context 339 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_disconnect_event(context, params); context 342 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_close_event(context, params); context 345 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_qp_event(context, params, IB_EVENT_QP_FATAL, context 349 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_qp_event(context, params, IB_EVENT_QP_FATAL, context 353 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_qp_event(context, params, IB_EVENT_QP_FATAL, context 357 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_qp_event(context, params, IB_EVENT_QP_ACCESS_ERR, context 361 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_qp_event(context, params, IB_EVENT_QP_FATAL, context 365 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_qp_event(context, params, IB_EVENT_QP_FATAL, context 369 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_qp_event(context, params, IB_EVENT_QP_ACCESS_ERR, context 373 drivers/infiniband/hw/qedr/qedr_iw_cm.c qedr_iw_qp_event(context, params, IB_EVENT_QP_FATAL, context 387 drivers/infiniband/hw/qedr/verbs.c int qedr_mmap(struct ib_ucontext *context, struct vm_area_struct *vma) context 389 drivers/infiniband/hw/qedr/verbs.c struct qedr_ucontext *ucontext = get_qedr_ucontext(context); context 390 drivers/infiniband/hw/qedr/verbs.c struct qedr_dev *dev = get_qedr_dev(context->device); context 460 drivers/infiniband/hw/qedr/verbs.c struct qedr_ucontext *context = rdma_udata_to_drv_context( context 470 drivers/infiniband/hw/qedr/verbs.c pd->uctx = context; context 259 drivers/infiniband/hw/usnic/usnic_ib_sysfs.c static QPN_ATTR_RO(context); context 648 drivers/infiniband/hw/usnic/usnic_ib_verbs.c struct usnic_ib_ucontext *context = to_ucontext(uctx); context 652 drivers/infiniband/hw/usnic/usnic_ib_verbs.c INIT_LIST_HEAD(&context->qp_grp_list); context 654 drivers/infiniband/hw/usnic/usnic_ib_verbs.c list_add_tail(&context->link, &us_ibdev->ctx_list); context 662 drivers/infiniband/hw/usnic/usnic_ib_verbs.c struct usnic_ib_ucontext *context = to_uucontext(ibcontext); context 667 drivers/infiniband/hw/usnic/usnic_ib_verbs.c WARN_ON_ONCE(!list_empty(&context->qp_grp_list)); context 668 drivers/infiniband/hw/usnic/usnic_ib_verbs.c list_del(&context->link); context 672 drivers/infiniband/hw/usnic/usnic_ib_verbs.c int usnic_ib_mmap(struct ib_ucontext *context, context 675 drivers/infiniband/hw/usnic/usnic_ib_verbs.c struct usnic_ib_ucontext *uctx = to_ucontext(context); context 686 drivers/infiniband/hw/usnic/usnic_ib_verbs.c us_ibdev = to_usdev(context->device); context 70 drivers/infiniband/hw/usnic/usnic_ib_verbs.h int usnic_ib_mmap(struct ib_ucontext *context, context 117 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c struct pvrdma_ucontext *context = rdma_udata_to_drv_context( context 182 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c cmd->ctx_handle = context ? context->ctx_handle : 0; context 200 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c cq->uar = &context->uar; context 65 drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c static int pvrdma_add_gid(const struct ib_gid_attr *attr, void **context); context 66 drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c static int pvrdma_del_gid(const struct ib_gid_attr *attr, void **context); context 642 drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c static int pvrdma_add_gid(const struct ib_gid_attr *attr, void **context) context 678 drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c static int pvrdma_del_gid(const struct ib_gid_attr *attr, void **context) context 317 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c struct pvrdma_ucontext *context = to_vucontext(uctx); context 328 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c context->dev = vdev; context 329 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c ret = pvrdma_uar_alloc(vdev, &context->uar); context 335 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c cmd->pfn = context->uar.pfn; context 337 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c cmd->pfn64 = context->uar.pfn; context 347 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c context->ctx_handle = resp->ctx_handle; context 353 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c pvrdma_uar_free(vdev, &context->uar); context 354 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c pvrdma_dealloc_ucontext(&context->ibucontext); context 361 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c pvrdma_uar_free(vdev, &context->uar); context 371 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c struct pvrdma_ucontext *context = to_vucontext(ibcontext); context 377 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c cmd->ctx_handle = context->ctx_handle; context 379 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c ret = pvrdma_cmd_post(context->dev, &req, NULL, 0); context 381 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c dev_warn(&context->dev->pdev->dev, context 385 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c pvrdma_uar_free(to_vdev(ibcontext->device), &context->uar); context 397 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c struct pvrdma_ucontext *context = to_vucontext(ibcontext); context 402 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c dev_dbg(&context->dev->pdev->dev, "create mmap region\n"); context 405 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c dev_warn(&context->dev->pdev->dev, context 413 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c if (io_remap_pfn_range(vma, start, context->uar.pfn, size, context 438 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c struct pvrdma_ucontext *context = rdma_udata_to_drv_context( context 446 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c cmd->ctx_handle = context ? context->ctx_handle : 0; context 398 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.h int pvrdma_mmap(struct ib_ucontext *context, struct vm_area_struct *vma); context 400 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.h void pvrdma_dealloc_ucontext(struct ib_ucontext *context); context 75 drivers/infiniband/sw/rdmavt/mmap.c struct rvt_dev_info *rdi = ib_to_rvt(ip->context->device); context 111 drivers/infiniband/sw/rdmavt/mmap.c int rvt_mmap(struct ib_ucontext *context, struct vm_area_struct *vma) context 113 drivers/infiniband/sw/rdmavt/mmap.c struct rvt_dev_info *rdi = ib_to_rvt(context->device); context 128 drivers/infiniband/sw/rdmavt/mmap.c if (context != ip->context || (__u64)offset != ip->offset) context 182 drivers/infiniband/sw/rdmavt/mmap.c ip->context = context 184 drivers/infiniband/sw/rdmavt/mmap.c ->context; context 55 drivers/infiniband/sw/rdmavt/mmap.h int rvt_mmap(struct ib_ucontext *context, struct vm_area_struct *vma); context 307 drivers/infiniband/sw/rdmavt/vt.c static void rvt_dealloc_ucontext(struct ib_ucontext *context) context 86 drivers/infiniband/sw/rxe/rxe_loc.h struct ib_ucontext *context; context 98 drivers/infiniband/sw/rxe/rxe_loc.h int rxe_mmap(struct ib_ucontext *context, struct vm_area_struct *vma); context 49 drivers/infiniband/sw/rxe/rxe_mmap.c struct rxe_dev *rxe = to_rdev(ip->context->device); context 91 drivers/infiniband/sw/rxe/rxe_mmap.c int rxe_mmap(struct ib_ucontext *context, struct vm_area_struct *vma) context 93 drivers/infiniband/sw/rxe/rxe_mmap.c struct rxe_dev *rxe = to_rdev(context->device); context 106 drivers/infiniband/sw/rxe/rxe_mmap.c if (context != ip->context || (__u64)offset != ip->info.offset) context 170 drivers/infiniband/sw/rxe/rxe_mmap.c ip->context = context 172 drivers/infiniband/sw/rxe/rxe_mmap.c ->context; context 445 drivers/infiniband/ulp/ipoib/ipoib_cm.c struct net_device *dev = cm_id->context; context 457 drivers/infiniband/ulp/ipoib/ipoib_cm.c cm_id->context = p; context 517 drivers/infiniband/ulp/ipoib/ipoib_cm.c p = cm_id->context; context 988 drivers/infiniband/ulp/ipoib/ipoib_cm.c struct ipoib_cm_tx *p = cm_id->context; context 1256 drivers/infiniband/ulp/ipoib/ipoib_cm.c struct ipoib_cm_tx *tx = cm_id->context; context 370 drivers/infiniband/ulp/ipoib/ipoib_multicast.c struct ipoib_mcast *mcast = multicast->context; context 47 drivers/infiniband/ulp/iser/iser_verbs.c static void iser_qp_event_callback(struct ib_event *cause, void *context) context 642 drivers/infiniband/ulp/iser/iser_verbs.c iser_conn = (struct iser_conn *)cma_id->context; context 697 drivers/infiniband/ulp/iser/iser_verbs.c iser_conn = (struct iser_conn *)cma_id->context; context 743 drivers/infiniband/ulp/iser/iser_verbs.c struct iser_conn *iser_conn = (struct iser_conn *)cma_id->context; context 786 drivers/infiniband/ulp/iser/iser_verbs.c iser_conn = (struct iser_conn *)cma_id->context; context 809 drivers/infiniband/ulp/iser/iser_verbs.c struct iser_conn *iser_conn = (struct iser_conn *)cma_id->context; context 823 drivers/infiniband/ulp/iser/iser_verbs.c struct iser_conn *iser_conn = (struct iser_conn *)cma_id->context; context 840 drivers/infiniband/ulp/iser/iser_verbs.c iser_conn = (struct iser_conn *)cma_id->context; context 843 drivers/infiniband/ulp/iser/iser_verbs.c event->status, cma_id->context, cma_id); context 65 drivers/infiniband/ulp/isert/ib_isert.c isert_qp_event_callback(struct ib_event *e, void *context) context 67 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = context; context 495 drivers/infiniband/ulp/isert/ib_isert.c struct isert_np *isert_np = cma_id->context; context 510 drivers/infiniband/ulp/isert/ib_isert.c cma_id, cma_id->context); context 596 drivers/infiniband/ulp/isert/ib_isert.c struct isert_np *isert_np = cma_id->context; context 634 drivers/infiniband/ulp/isert/ib_isert.c struct isert_np *isert_np = isert_conn->cm_id->context; context 749 drivers/infiniband/ulp/isert/ib_isert.c struct isert_np *isert_np = cma_id->context; context 755 drivers/infiniband/ulp/isert/ib_isert.c event->status, cma_id, cma_id->context); context 758 drivers/infiniband/ulp/isert/ib_isert.c return isert_np_cma_handler(cma_id->context, event->event); context 992 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 1103 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 1844 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 1895 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 1910 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 1932 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 1951 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 1969 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 1987 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 2022 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 2178 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 2226 drivers/infiniband/ulp/isert/ib_isert.c ret = isert_rdma_rw_ctx_post(isert_cmd, conn->context, context 2262 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 2317 drivers/infiniband/ulp/isert/ib_isert.c isert_dbg("id %p context %p\n", id, id->context); context 2412 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 2493 drivers/infiniband/ulp/isert/ib_isert.c conn->context = isert_conn; context 2626 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 2644 drivers/infiniband/ulp/isert/ib_isert.c struct isert_conn *isert_conn = conn->context; context 635 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema.c port = mad_agent->context; context 274 drivers/infiniband/ulp/srp/ib_srp.c static void srp_qp_event(struct ib_event *event, void *context) context 2706 drivers/infiniband/ulp/srp/ib_srp.c struct srp_rdma_ch *ch = cm_id->context; context 2818 drivers/infiniband/ulp/srp/ib_srp.c struct srp_rdma_ch *ch = cm_id->context; context 475 drivers/infiniband/ulp/srpt/ib_srpt.c struct srpt_port *sport = (struct srpt_port *)mad_agent->context; context 2238 drivers/infiniband/ulp/srpt/ib_srpt.c ib_cm_id->context = ch; context 2242 drivers/infiniband/ulp/srpt/ib_srpt.c rdma_cm_id->context = ch; context 2474 drivers/infiniband/ulp/srpt/ib_srpt.c rdma_cm_id->context = NULL; context 2476 drivers/infiniband/ulp/srpt/ib_srpt.c ib_cm_id->context = NULL; context 2521 drivers/infiniband/ulp/srpt/ib_srpt.c return srpt_cm_req_recv(cm_id->context, cm_id, NULL, param->port, context 2629 drivers/infiniband/ulp/srpt/ib_srpt.c struct srpt_rdma_ch *ch = cm_id->context; context 2681 drivers/infiniband/ulp/srpt/ib_srpt.c struct srpt_rdma_ch *ch = cm_id->context; context 140 drivers/input/joystick/iforce/iforce-usb.c struct iforce_usb *iforce_usb = urb->context; context 174 drivers/input/joystick/iforce/iforce-usb.c struct iforce_usb *iforce_usb = urb->context; context 32 drivers/input/joystick/pxrc.c struct pxrc *pxrc = urb->context; context 908 drivers/input/joystick/xpad.c struct usb_xpad *xpad = urb->context; context 1042 drivers/input/joystick/xpad.c struct usb_xpad *xpad = urb->context; context 228 drivers/input/keyboard/adp5588-keys.c gpio_data->context); context 249 drivers/input/keyboard/adp5588-keys.c gpio_data->context); context 545 drivers/input/keyboard/adp5589-keys.c gpio_data->context); context 566 drivers/input/keyboard/adp5589-keys.c gpio_data->context); context 600 drivers/input/keyboard/applespi.c message->context = applespi; context 726 drivers/input/keyboard/applespi.c static void applespi_async_write_complete(void *context) context 728 drivers/input/keyboard/applespi.c struct applespi_data *applespi = context; context 1528 drivers/input/keyboard/applespi.c static void applespi_async_read_complete(void *context) context 1530 drivers/input/keyboard/applespi.c struct applespi_data *applespi = context; context 1548 drivers/input/keyboard/applespi.c static u32 applespi_notify(acpi_handle gpe_device, u32 gpe, void *context) context 1550 drivers/input/keyboard/applespi.c struct applespi_data *applespi = context; context 425 drivers/input/misc/ati_remote2.c struct ati_remote2 *ar2 = urb->context; context 454 drivers/input/misc/ati_remote2.c struct ati_remote2 *ar2 = urb->context; context 363 drivers/input/misc/cm109.c struct cm109_dev *dev = urb->context; context 434 drivers/input/misc/cm109.c struct cm109_dev *dev = urb->context; context 913 drivers/input/misc/ims-pcu.c void *context) context 915 drivers/input/misc/ims-pcu.c struct ims_pcu *pcu = context; context 1476 drivers/input/misc/ims-pcu.c struct ims_pcu *pcu = urb->context; context 374 drivers/input/misc/keyspan_remote.c struct usb_keyspan *dev = urb->context; context 88 drivers/input/misc/powermate.c struct powermate_device *pm = urb->context; context 196 drivers/input/misc/powermate.c struct powermate_device *pm = urb->context; context 412 drivers/input/misc/yealink.c struct yealink_dev *yld = urb->context; context 450 drivers/input/misc/yealink.c struct yealink_dev *yld = urb->context; context 447 drivers/input/mouse/appletouch.c struct atp *dev = urb->context; context 520 drivers/input/mouse/appletouch.c struct atp *dev = urb->context; context 660 drivers/input/mouse/appletouch.c struct atp *dev = urb->context; context 701 drivers/input/mouse/bcm5974.c struct bcm5974 *dev = urb->context; context 732 drivers/input/mouse/bcm5974.c struct bcm5974 *dev = urb->context; context 190 drivers/input/mouse/synaptics_usb.c struct synusb *synusb = urb->context; context 238 drivers/input/serio/hyperv-keyboard.c static void hv_kbd_on_channel_callback(void *context) context 241 drivers/input/serio/hyperv-keyboard.c struct hv_device *hv_dev = context; context 41 drivers/input/tablet/acecad.c struct usb_acecad *acecad = urb->context; context 414 drivers/input/tablet/aiptek.c struct aiptek *aiptek = urb->context; context 629 drivers/input/tablet/gtco.c struct gtco *device = urbinfo->context; context 254 drivers/input/tablet/hanwang.c struct hanwang *hanwang = urb->context; context 33 drivers/input/tablet/kbtab.c struct kbtab *kbtab = urb->context; context 177 drivers/input/tablet/pegasus_notetaker.c struct pegasus *pegasus = urb->context; context 644 drivers/input/touchscreen/ad7877.c m->context = ts; context 992 drivers/input/touchscreen/ads7846.c m->context = ts; context 1037 drivers/input/touchscreen/ads7846.c m->context = ts; context 1082 drivers/input/touchscreen/ads7846.c m->context = ts; context 1113 drivers/input/touchscreen/ads7846.c m->context = ts; context 1146 drivers/input/touchscreen/ads7846.c m->context = ts; context 1489 drivers/input/touchscreen/usbtouchscreen.c struct usbtouch_usb *usbtouch = urb->context; context 2088 drivers/iommu/dmar.c void *context, void **retval) context 215 drivers/iommu/intel-iommu-debugfs.c struct context_entry *context; context 235 drivers/iommu/intel-iommu-debugfs.c context = iommu_context_addr(iommu, bus, devfn, 0); context 236 drivers/iommu/intel-iommu-debugfs.c if (!context) context 239 drivers/iommu/intel-iommu-debugfs.c if (!context_present(context)) context 245 drivers/iommu/intel-iommu-debugfs.c tbl_wlk.ctx_entry = context; context 249 drivers/iommu/intel-iommu-debugfs.c pasid_dir_ptr = context->lo & VTD_PAGE_MASK; context 250 drivers/iommu/intel-iommu-debugfs.c pasid_dir_size = get_pasid_dir_size(context); context 211 drivers/iommu/intel-iommu.c static inline void context_clear_pasid_enable(struct context_entry *context) context 213 drivers/iommu/intel-iommu.c context->lo &= ~(1ULL << 11); context 216 drivers/iommu/intel-iommu.c static inline bool context_pasid_enabled(struct context_entry *context) context 218 drivers/iommu/intel-iommu.c return !!(context->lo & (1ULL << 11)); context 221 drivers/iommu/intel-iommu.c static inline void context_set_copied(struct context_entry *context) context 223 drivers/iommu/intel-iommu.c context->hi |= (1ull << 3); context 226 drivers/iommu/intel-iommu.c static inline bool context_copied(struct context_entry *context) context 228 drivers/iommu/intel-iommu.c return !!(context->hi & (1ULL << 3)); context 231 drivers/iommu/intel-iommu.c static inline bool __context_present(struct context_entry *context) context 233 drivers/iommu/intel-iommu.c return (context->lo & 1); context 236 drivers/iommu/intel-iommu.c bool context_present(struct context_entry *context) context 238 drivers/iommu/intel-iommu.c return context_pasid_enabled(context) ? context 239 drivers/iommu/intel-iommu.c __context_present(context) : context 240 drivers/iommu/intel-iommu.c __context_present(context) && !context_copied(context); context 243 drivers/iommu/intel-iommu.c static inline void context_set_present(struct context_entry *context) context 245 drivers/iommu/intel-iommu.c context->lo |= 1; context 248 drivers/iommu/intel-iommu.c static inline void context_set_fault_enable(struct context_entry *context) context 250 drivers/iommu/intel-iommu.c context->lo &= (((u64)-1) << 2) | 1; context 253 drivers/iommu/intel-iommu.c static inline void context_set_translation_type(struct context_entry *context, context 256 drivers/iommu/intel-iommu.c context->lo &= (((u64)-1) << 4) | 3; context 257 drivers/iommu/intel-iommu.c context->lo |= (value & 3) << 2; context 260 drivers/iommu/intel-iommu.c static inline void context_set_address_root(struct context_entry *context, context 263 drivers/iommu/intel-iommu.c context->lo &= ~VTD_PAGE_MASK; context 264 drivers/iommu/intel-iommu.c context->lo |= value & VTD_PAGE_MASK; context 267 drivers/iommu/intel-iommu.c static inline void context_set_address_width(struct context_entry *context, context 270 drivers/iommu/intel-iommu.c context->hi |= value & 7; context 273 drivers/iommu/intel-iommu.c static inline void context_set_domain_id(struct context_entry *context, context 276 drivers/iommu/intel-iommu.c context->hi |= (value & ((1 << 16) - 1)) << 8; context 284 drivers/iommu/intel-iommu.c static inline void context_clear_entry(struct context_entry *context) context 286 drivers/iommu/intel-iommu.c context->lo = 0; context 287 drivers/iommu/intel-iommu.c context->hi = 0; context 700 drivers/iommu/intel-iommu.c struct context_entry *context; context 712 drivers/iommu/intel-iommu.c context = phys_to_virt(*entry & VTD_PAGE_MASK); context 718 drivers/iommu/intel-iommu.c context = alloc_pgtable_page(iommu->node); context 719 drivers/iommu/intel-iommu.c if (!context) context 722 drivers/iommu/intel-iommu.c __iommu_flush_cache(iommu, (void *)context, CONTEXT_SIZE); context 723 drivers/iommu/intel-iommu.c phy_addr = virt_to_phys((void *)context); context 727 drivers/iommu/intel-iommu.c return &context[devfn]; context 840 drivers/iommu/intel-iommu.c struct context_entry *context; context 845 drivers/iommu/intel-iommu.c context = iommu_context_addr(iommu, bus, devfn, 0); context 846 drivers/iommu/intel-iommu.c if (context) context 847 drivers/iommu/intel-iommu.c ret = context_present(context); context 856 drivers/iommu/intel-iommu.c struct context_entry *context; context 863 drivers/iommu/intel-iommu.c context = iommu_context_addr(iommu, i, 0, 0); context 864 drivers/iommu/intel-iommu.c if (context) context 865 drivers/iommu/intel-iommu.c free_pgtable_page(context); context 870 drivers/iommu/intel-iommu.c context = iommu_context_addr(iommu, i, 0x80, 0); context 871 drivers/iommu/intel-iommu.c if (context) context 872 drivers/iommu/intel-iommu.c free_pgtable_page(context); context 1946 drivers/iommu/intel-iommu.c context_set_sm_rid2pasid(struct context_entry *context, unsigned long pasid) context 1948 drivers/iommu/intel-iommu.c context->hi |= pasid & ((1 << 20) - 1); context 1949 drivers/iommu/intel-iommu.c context->hi |= (1 << 20); context 1956 drivers/iommu/intel-iommu.c static inline void context_set_sm_dte(struct context_entry *context) context 1958 drivers/iommu/intel-iommu.c context->lo |= (1 << 2); context 1965 drivers/iommu/intel-iommu.c static inline void context_set_sm_pre(struct context_entry *context) context 1967 drivers/iommu/intel-iommu.c context->lo |= (1 << 4); context 1981 drivers/iommu/intel-iommu.c struct context_entry *context; context 1999 drivers/iommu/intel-iommu.c context = iommu_context_addr(iommu, bus, devfn, 1); context 2000 drivers/iommu/intel-iommu.c if (!context) context 2004 drivers/iommu/intel-iommu.c if (context_present(context)) context 2016 drivers/iommu/intel-iommu.c if (context_copied(context)) { context 2017 drivers/iommu/intel-iommu.c u16 did_old = context_domain_id(context); context 2029 drivers/iommu/intel-iommu.c context_clear_entry(context); context 2038 drivers/iommu/intel-iommu.c context->lo = (u64)virt_to_phys(table->table) | context 2042 drivers/iommu/intel-iommu.c context_set_sm_rid2pasid(context, PASID_RID2PASID); context 2050 drivers/iommu/intel-iommu.c context_set_sm_dte(context); context 2052 drivers/iommu/intel-iommu.c context_set_sm_pre(context); context 2057 drivers/iommu/intel-iommu.c context_set_domain_id(context, did); context 2077 drivers/iommu/intel-iommu.c context_set_address_root(context, virt_to_phys(pgd)); context 2078 drivers/iommu/intel-iommu.c context_set_address_width(context, agaw); context 2085 drivers/iommu/intel-iommu.c context_set_address_width(context, iommu->msagaw); context 2088 drivers/iommu/intel-iommu.c context_set_translation_type(context, translation); context 2091 drivers/iommu/intel-iommu.c context_set_fault_enable(context); context 2092 drivers/iommu/intel-iommu.c context_set_present(context); context 2093 drivers/iommu/intel-iommu.c domain_flush_cache(domain, context, sizeof(*context)); context 2375 drivers/iommu/intel-iommu.c struct context_entry *context; context 2382 drivers/iommu/intel-iommu.c context = iommu_context_addr(iommu, bus, devfn, 0); context 2383 drivers/iommu/intel-iommu.c if (!context) { context 2387 drivers/iommu/intel-iommu.c did_old = context_domain_id(context); context 2388 drivers/iommu/intel-iommu.c context_clear_entry(context); context 2389 drivers/iommu/intel-iommu.c __iommu_flush_cache(iommu, context, sizeof(*context)); context 5735 drivers/iommu/intel-iommu.c struct context_entry *context; context 5753 drivers/iommu/intel-iommu.c context = iommu_context_addr(iommu, info->bus, info->devfn, 0); context 5754 drivers/iommu/intel-iommu.c if (WARN_ON(!context)) context 5757 drivers/iommu/intel-iommu.c ctx_lo = context[0].lo; context 5761 drivers/iommu/intel-iommu.c context[0].lo = ctx_lo; context 145 drivers/irqchip/qcom-irq-combiner.c static acpi_status count_registers_cb(struct acpi_resource *ares, void *context) context 147 drivers/irqchip/qcom-irq-combiner.c int *count = context; context 176 drivers/irqchip/qcom-irq-combiner.c static acpi_status get_registers_cb(struct acpi_resource *ares, void *context) context 178 drivers/irqchip/qcom-irq-combiner.c struct get_registers_context *ctx = context; context 105 drivers/isdn/hardware/mISDN/hfcsusb.c struct hfcsusb *hw = (struct hfcsusb *) urb->context; context 940 drivers/isdn/hardware/mISDN/hfcsusb.c usb_complete_t complete, void *context) context 945 drivers/isdn/hardware/mISDN/hfcsusb.c complete, context); context 963 drivers/isdn/hardware/mISDN/hfcsusb.c struct iso_urb *context_iso_urb = (struct iso_urb *) urb->context; context 1064 drivers/isdn/hardware/mISDN/hfcsusb.c (usb_complete_t)rx_iso_complete, urb->context); context 1086 drivers/isdn/hardware/mISDN/hfcsusb.c struct usb_fifo *fifo = (struct usb_fifo *) urb->context; context 1157 drivers/isdn/hardware/mISDN/hfcsusb.c struct iso_urb *context_iso_urb = (struct iso_urb *) urb->context; context 1226 drivers/isdn/hardware/mISDN/hfcsusb.c (usb_complete_t)tx_iso_complete, urb->context); context 193 drivers/leds/leds-lp55xx-common.c static void lp55xx_firmware_loaded(const struct firmware *fw, void *context) context 195 drivers/leds/leds-lp55xx-common.c struct lp55xx_chip *chip = context; context 250 drivers/md/dm-bio-prison-v1.c void *context, context 256 drivers/md/dm-bio-prison-v1.c visit_fn(context, cell); context 102 drivers/md/dm-bio-prison-v1.h void *context, struct dm_bio_prison_cell *cell); context 547 drivers/md/dm-bufio.c static void dmio_complete(unsigned long error, void *context) context 549 drivers/md/dm-bufio.c struct dm_buffer *b = context; context 562 drivers/md/dm-bufio.c .notify.context = b, context 293 drivers/md/dm-cache-metadata.c vt.context = NULL; context 1150 drivers/md/dm-cache-metadata.c load_discard_fn fn, void *context) context 1171 drivers/md/dm-cache-metadata.c r = fn(context, cmd->discard_block_size, to_dblock(b), context 1188 drivers/md/dm-cache-metadata.c r = fn(context, cmd->discard_block_size, to_dblock(b), false); context 1198 drivers/md/dm-cache-metadata.c load_discard_fn fn, void *context) context 1203 drivers/md/dm-cache-metadata.c r = __load_discards(cmd, fn, context); context 1274 drivers/md/dm-cache-metadata.c void *context; context 1325 drivers/md/dm-cache-metadata.c load_mapping_fn fn, void *context) context 1351 drivers/md/dm-cache-metadata.c r = fn(context, oblock, to_cblock(cb), dirty, context 1367 drivers/md/dm-cache-metadata.c load_mapping_fn fn, void *context) context 1393 drivers/md/dm-cache-metadata.c r = fn(context, oblock, to_cblock(cb), dirty, context 1406 drivers/md/dm-cache-metadata.c load_mapping_fn fn, void *context) context 1446 drivers/md/dm-cache-metadata.c fn, context); context 1450 drivers/md/dm-cache-metadata.c fn, context); context 1495 drivers/md/dm-cache-metadata.c load_mapping_fn fn, void *context) context 1500 drivers/md/dm-cache-metadata.c r = __load_mappings(cmd, policy, fn, context); context 1506 drivers/md/dm-cache-metadata.c static int __dump_mapping(void *context, uint64_t cblock, void *leaf) context 1585 drivers/md/dm-cache-metadata.c static int is_dirty_callback(uint32_t index, bool *value, void *context) context 1587 drivers/md/dm-cache-metadata.c unsigned long *bits = context; context 1690 drivers/md/dm-cache-metadata.c static int get_hint(uint32_t index, void *value_le, void *context) context 1693 drivers/md/dm-cache-metadata.c struct dm_cache_policy *policy = context; context 79 drivers/md/dm-cache-metadata.h typedef int (*load_discard_fn)(void *context, sector_t discard_block_size, context 82 drivers/md/dm-cache-metadata.h load_discard_fn fn, void *context); context 90 drivers/md/dm-cache-metadata.h typedef int (*load_mapping_fn)(void *context, dm_oblock_t oblock, context 96 drivers/md/dm-cache-metadata.h void *context); context 148 drivers/md/dm-cache-target.c blk_status_t (*commit_op)(void *context); context 155 drivers/md/dm-cache-target.c void (*issue_op)(struct bio *bio, void *context); context 912 drivers/md/dm-cache-target.c static void issue_op(struct bio *bio, void *context) context 914 drivers/md/dm-cache-target.c struct cache *cache = context; context 1176 drivers/md/dm-cache-target.c static void copy_complete(int read_err, unsigned long write_err, void *context) context 1178 drivers/md/dm-cache-target.c struct dm_cache_migration *mg = container_of(context, struct dm_cache_migration, k); context 1841 drivers/md/dm-cache-target.c static blk_status_t commit_op(void *context) context 1843 drivers/md/dm-cache-target.c struct cache *cache = context; context 2884 drivers/md/dm-cache-target.c static int load_mapping(void *context, dm_oblock_t oblock, dm_cblock_t cblock, context 2888 drivers/md/dm-cache-target.c struct cache *cache = context; context 2957 drivers/md/dm-cache-target.c static int load_discard(void *context, sector_t discard_block_size, context 2960 drivers/md/dm-cache-target.c struct discard_load_info *li = context; context 762 drivers/md/dm-clone-target.c static void hydration_kcopyd_callback(int read_err, unsigned long write_err, void *context) context 766 drivers/md/dm-clone-target.c struct dm_clone_region_hydration *tmp, *hd = context; context 43 drivers/md/dm-delay.c struct delay_c *context; context 253 drivers/md/dm-delay.c delayed->context = dc; context 365 drivers/md/dm-era-target.c static void ws_inc(void *context, const void *value) context 367 drivers/md/dm-era-target.c struct era_metadata *md = context; context 377 drivers/md/dm-era-target.c static void ws_dec(void *context, const void *value) context 379 drivers/md/dm-era-target.c struct era_metadata *md = context; context 389 drivers/md/dm-era-target.c static int ws_eq(void *context, const void *value1, const void *value2) context 401 drivers/md/dm-era-target.c vt->context = md; context 412 drivers/md/dm-era-target.c vt.context = NULL; context 125 drivers/md/dm-exception-store.h void *context; context 774 drivers/md/dm-integrity.c static void complete_journal_op(void *context) context 776 drivers/md/dm-integrity.c struct journal_completion *comp = context; context 927 drivers/md/dm-integrity.c static void complete_journal_io(unsigned long error, void *context) context 929 drivers/md/dm-integrity.c struct journal_completion *comp = context; context 962 drivers/md/dm-integrity.c io_req.notify.context = comp; context 1079 drivers/md/dm-integrity.c io_req.notify.context = data; context 2132 drivers/md/dm-integrity.c static void complete_copy_from_journal(unsigned long error, void *context) context 2134 drivers/md/dm-integrity.c struct journal_io *io = context; context 38 drivers/md/dm-io.c void *context; context 118 drivers/md/dm-io.c void *context = io->context; context 125 drivers/md/dm-io.c fn(error_bits, context); context 424 drivers/md/dm-io.c static void sync_io_complete(unsigned long error, void *context) context 426 drivers/md/dm-io.c struct sync_io *sio = context; context 451 drivers/md/dm-io.c io->context = &sio; context 468 drivers/md/dm-io.c struct dpages *dp, io_notify_fn fn, void *context) context 474 drivers/md/dm-io.c fn(1, context); context 483 drivers/md/dm-io.c io->context = context; context 554 drivers/md/dm-io.c io_req->notify.context); context 371 drivers/md/dm-kcopyd.c void *context; context 489 drivers/md/dm-kcopyd.c void *context = job->context; context 505 drivers/md/dm-kcopyd.c fn(read_err, write_err, context); context 515 drivers/md/dm-kcopyd.c static void complete_io(unsigned long error, void *context) context 517 drivers/md/dm-kcopyd.c struct kcopyd_job *job = (struct kcopyd_job *) context; context 560 drivers/md/dm-kcopyd.c .notify.context = job, context 690 drivers/md/dm-kcopyd.c void *context) context 695 drivers/md/dm-kcopyd.c struct kcopyd_job *sub_job = (struct kcopyd_job *) context; context 739 drivers/md/dm-kcopyd.c sub_job->context = sub_job; context 776 drivers/md/dm-kcopyd.c unsigned int flags, dm_kcopyd_notify_fn fn, void *context) context 841 drivers/md/dm-kcopyd.c job->context = context; context 856 drivers/md/dm-kcopyd.c unsigned flags, dm_kcopyd_notify_fn fn, void *context) context 858 drivers/md/dm-kcopyd.c dm_kcopyd_copy(kc, NULL, num_dests, dests, flags, fn, context); context 863 drivers/md/dm-kcopyd.c dm_kcopyd_notify_fn fn, void *context) context 872 drivers/md/dm-kcopyd.c job->context = context; context 320 drivers/md/dm-log-userspace-base.c log->context = lc; context 328 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 355 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 366 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 383 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 394 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 413 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 441 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 566 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 651 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 680 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 716 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 742 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 774 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 800 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 837 drivers/md/dm-log-userspace-base.c struct log_c *lc = log->context; context 511 drivers/md/dm-log.c log->context = lc; context 531 drivers/md/dm-log.c struct log_c *lc = (struct log_c *) log->context; context 568 drivers/md/dm-log.c struct log_c *lc = (struct log_c *) log->context; context 589 drivers/md/dm-log.c struct log_c *lc = (struct log_c *) log->context; context 651 drivers/md/dm-log.c struct log_c *lc = (struct log_c *) log->context; context 657 drivers/md/dm-log.c struct log_c *lc = (struct log_c *) log->context; context 664 drivers/md/dm-log.c struct log_c *lc = (struct log_c *) log->context; context 670 drivers/md/dm-log.c struct log_c *lc = (struct log_c *) log->context; context 683 drivers/md/dm-log.c struct log_c *lc = log->context; context 722 drivers/md/dm-log.c struct log_c *lc = (struct log_c *) log->context; context 728 drivers/md/dm-log.c struct log_c *lc = (struct log_c *) log->context; context 735 drivers/md/dm-log.c struct log_c *lc = (struct log_c *) log->context; context 758 drivers/md/dm-log.c struct log_c *lc = (struct log_c *) log->context; context 772 drivers/md/dm-log.c struct log_c *lc = (struct log_c *) log->context; context 785 drivers/md/dm-log.c struct log_c *lc = log->context; context 805 drivers/md/dm-log.c struct log_c *lc = log->context; context 26 drivers/md/dm-path-selector.h void *context; context 62 drivers/md/dm-queue-length.c ps->context = s; context 78 drivers/md/dm-queue-length.c struct selector *s = ps->context; context 83 drivers/md/dm-queue-length.c ps->context = NULL; context 114 drivers/md/dm-queue-length.c struct selector *s = ps->context; context 162 drivers/md/dm-queue-length.c struct selector *s = ps->context; context 173 drivers/md/dm-queue-length.c struct selector *s = ps->context; context 189 drivers/md/dm-queue-length.c struct selector *s = ps->context; context 92 drivers/md/dm-raid1.c static void wakeup_mirrord(void *context) context 94 drivers/md/dm-raid1.c struct mirror_set *ms = context; context 116 drivers/md/dm-raid1.c static void wakeup_all_recovery_waiters(void *context) context 137 drivers/md/dm-raid1.c static void dispatch_bios(void *context, struct bio_list *bio_list) context 139 drivers/md/dm-raid1.c struct mirror_set *ms = context; context 297 drivers/md/dm-raid1.c void *context) context 299 drivers/md/dm-raid1.c struct dm_region *reg = context; context 505 drivers/md/dm-raid1.c static void read_callback(unsigned long error, void *context) context 507 drivers/md/dm-raid1.c struct bio *bio = context; context 543 drivers/md/dm-raid1.c .notify.context = bio, context 596 drivers/md/dm-raid1.c static void write_callback(unsigned long error, void *context) context 599 drivers/md/dm-raid1.c struct bio *bio = (struct bio *) context; context 657 drivers/md/dm-raid1.c .notify.context = bio, context 89 drivers/md/dm-region-hash.c void *context; context 93 drivers/md/dm-region-hash.c void (*dispatch_bios)(void *context, struct bio_list *bios); context 96 drivers/md/dm-region-hash.c void (*wakeup_workers)(void *context); context 99 drivers/md/dm-region-hash.c void (*wakeup_all_recovery_waiters)(void *context); context 137 drivers/md/dm-region-hash.c return reg->rh->context; context 162 drivers/md/dm-region-hash.c void *context, void (*dispatch_bios)(void *context, context 164 drivers/md/dm-region-hash.c void (*wakeup_workers)(void *context), context 165 drivers/md/dm-region-hash.c void (*wakeup_all_recovery_waiters)(void *context), context 190 drivers/md/dm-region-hash.c rh->context = context; context 379 drivers/md/dm-region-hash.c rh->dispatch_bios(rh->context, ®->delayed_bios); context 381 drivers/md/dm-region-hash.c rh->wakeup_all_recovery_waiters(rh->context); context 580 drivers/md/dm-region-hash.c rh->wakeup_workers(rh->context); context 638 drivers/md/dm-region-hash.c rh->wakeup_all_recovery_waiters(rh->context); context 673 drivers/md/dm-region-hash.c rh->wakeup_workers(rh->context); context 718 drivers/md/dm-region-hash.c rh->wakeup_workers(rh->context); context 73 drivers/md/dm-round-robin.c ps->context = s; context 79 drivers/md/dm-round-robin.c struct selector *s = ps->context; context 84 drivers/md/dm-round-robin.c ps->context = NULL; context 116 drivers/md/dm-round-robin.c struct selector *s = ps->context; context 160 drivers/md/dm-round-robin.c struct selector *s = ps->context; context 171 drivers/md/dm-round-robin.c struct selector *s = ps->context; context 184 drivers/md/dm-round-robin.c struct selector *s = ps->context; context 58 drivers/md/dm-service-time.c ps->context = s; context 74 drivers/md/dm-service-time.c struct selector *s = ps->context; context 79 drivers/md/dm-service-time.c ps->context = NULL; context 111 drivers/md/dm-service-time.c struct selector *s = ps->context; context 175 drivers/md/dm-service-time.c struct selector *s = ps->context; context 186 drivers/md/dm-service-time.c struct selector *s = ps->context; context 276 drivers/md/dm-service-time.c struct selector *s = ps->context; context 96 drivers/md/dm-snap-persistent.c void *context; context 572 drivers/md/dm-snap-persistent.c return (struct pstore *) store->context; context 724 drivers/md/dm-snap-persistent.c cb->context = callback_context; context 759 drivers/md/dm-snap-persistent.c cb->callback(cb->context, ps->valid); context 894 drivers/md/dm-snap-persistent.c store->context = ps; context 28 drivers/md/dm-snap-transient.c kfree(store->context); context 42 drivers/md/dm-snap-transient.c struct transient_c *tc = store->context; context 68 drivers/md/dm-snap-transient.c *sectors_allocated = ((struct transient_c *) store->context)->next_free; context 82 drivers/md/dm-snap-transient.c store->context = tc; context 815 drivers/md/dm-snap.c static int dm_add_exception(void *context, chunk_t old, chunk_t new) context 818 drivers/md/dm-snap.c struct dm_snapshot *s = context; context 1015 drivers/md/dm-snap.c void *context); context 1124 drivers/md/dm-snap.c static void merge_callback(int read_err, unsigned long write_err, void *context) context 1126 drivers/md/dm-snap.c struct dm_snapshot *s = context; context 1636 drivers/md/dm-snap.c static void pending_complete(void *context, int success) context 1638 drivers/md/dm-snap.c struct dm_snap_pending_exception *pe = context; context 1737 drivers/md/dm-snap.c static void copy_callback(int read_err, unsigned long write_err, void *context) context 1739 drivers/md/dm-snap.c struct dm_snap_pending_exception *pe = context; context 1910 drivers/md/dm-snap.c static void zero_callback(int read_err, unsigned long write_err, void *context) context 1912 drivers/md/dm-snap.c struct bio *bio = context; context 1320 drivers/md/dm-table.c void (*fn)(void *), void *context) context 1324 drivers/md/dm-table.c t->event_context = context; context 314 drivers/md/dm-thin-metadata.c static void data_block_inc(void *context, const void *value_le) context 316 drivers/md/dm-thin-metadata.c struct dm_space_map *sm = context; context 326 drivers/md/dm-thin-metadata.c static void data_block_dec(void *context, const void *value_le) context 328 drivers/md/dm-thin-metadata.c struct dm_space_map *sm = context; context 338 drivers/md/dm-thin-metadata.c static int data_block_equal(void *context, const void *value1_le, const void *value2_le) context 352 drivers/md/dm-thin-metadata.c static void subtree_inc(void *context, const void *value) context 354 drivers/md/dm-thin-metadata.c struct dm_btree_info *info = context; context 363 drivers/md/dm-thin-metadata.c static void subtree_dec(void *context, const void *value) context 365 drivers/md/dm-thin-metadata.c struct dm_btree_info *info = context; context 375 drivers/md/dm-thin-metadata.c static int subtree_equal(void *context, const void *value1_le, const void *value2_le) context 458 drivers/md/dm-thin-metadata.c pmd->info.value_type.context = pmd->data_sm; context 469 drivers/md/dm-thin-metadata.c pmd->tl_info.value_type.context = &pmd->bl_info; context 477 drivers/md/dm-thin-metadata.c pmd->bl_info.value_type.context = pmd->data_sm; context 485 drivers/md/dm-thin-metadata.c pmd->details_info.value_type.context = NULL; context 2057 drivers/md/dm-thin-metadata.c void *context) context 2062 drivers/md/dm-thin-metadata.c r = dm_sm_register_threshold_callback(pmd->metadata_sm, threshold, fn, context); context 2070 drivers/md/dm-thin-metadata.c void *context) context 2074 drivers/md/dm-thin-metadata.c pmd->pre_commit_context = context; context 220 drivers/md/dm-thin-metadata.h void *context); context 234 drivers/md/dm-thin-metadata.h typedef int (*dm_pool_pre_commit_fn)(void *context); context 238 drivers/md/dm-thin-metadata.h void *context); context 476 drivers/md/dm-thin.c void *context, context 479 drivers/md/dm-thin.c dm_cell_visit_release(pool->prison, fn, context, cell); context 854 drivers/md/dm-thin.c static void copy_complete(int read_err, unsigned long write_err, void *context) context 856 drivers/md/dm-thin.c struct dm_thin_new_mapping *m = context; context 907 drivers/md/dm-thin.c static void __inc_remap_and_issue_cell(void *context, context 910 drivers/md/dm-thin.c struct remap_info *info = context; context 1820 drivers/md/dm-thin.c static void __remap_and_issue_shared_cell(void *context, context 1823 drivers/md/dm-thin.c struct remap_info *info = context; context 3203 drivers/md/dm-thin.c static void metadata_low_callback(void *context) context 3205 drivers/md/dm-thin.c struct pool *pool = context; context 3224 drivers/md/dm-thin.c static int metadata_pre_commit_callback(void *context) context 3226 drivers/md/dm-thin.c struct pool_c *pt = context; context 434 drivers/md/dm-writecache.c static void writecache_notify_io(unsigned long error, void *context) context 436 drivers/md/dm-writecache.c struct io_notify *endio = context; context 487 drivers/md/dm-writecache.c req.notify.context = &endio; context 98 drivers/md/dm-zoned-reclaim.c void *context) context 100 drivers/md/dm-zoned-reclaim.c struct dmz_reclaim *zrc = context; context 2093 drivers/md/dm.c static void event_callback(void *context) context 2097 drivers/md/dm.c struct mapped_device *md = (struct mapped_device *) context; context 53 drivers/md/dm.h void (*fn)(void *), void *context); context 116 drivers/md/persistent-data/dm-array.c fn(info->value_type.context, element_at(info, ab, i)); context 188 drivers/md/persistent-data/dm-array.c vt->inc(vt->context, value); context 212 drivers/md/persistent-data/dm-array.c vt->dec(vt->context, element_at(info, ab, i - 1)); context 576 drivers/md/persistent-data/dm-array.c static void block_inc(void *context, const void *value) context 579 drivers/md/persistent-data/dm-array.c struct dm_array_info *info = context; context 585 drivers/md/persistent-data/dm-array.c static void block_dec(void *context, const void *value) context 593 drivers/md/persistent-data/dm-array.c struct dm_array_info *info = context; context 624 drivers/md/persistent-data/dm-array.c static int block_equal(void *context, const void *value1, const void *value2) context 641 drivers/md/persistent-data/dm-array.c bvt->context = info; context 699 drivers/md/persistent-data/dm-array.c value_fn fn, void *context, unsigned base, unsigned new_nr) context 709 drivers/md/persistent-data/dm-array.c r = fn(base + i, element_at(info, ab, i), context); context 714 drivers/md/persistent-data/dm-array.c vt->inc(vt->context, element_at(info, ab, i)); context 722 drivers/md/persistent-data/dm-array.c uint32_t size, value_fn fn, void *context) context 742 drivers/md/persistent-data/dm-array.c r = populate_ablock_with_values(info, ab, fn, context, context 824 drivers/md/persistent-data/dm-array.c (!vt->equal || !vt->equal(vt->context, old_value, value))) { context 825 drivers/md/persistent-data/dm-array.c vt->dec(vt->context, old_value); context 827 drivers/md/persistent-data/dm-array.c vt->inc(vt->context, value); context 851 drivers/md/persistent-data/dm-array.c int (*fn)(void *context, uint64_t key, void *leaf); context 852 drivers/md/persistent-data/dm-array.c void *context; context 855 drivers/md/persistent-data/dm-array.c static int walk_ablock(void *context, uint64_t *keys, void *leaf) context 857 drivers/md/persistent-data/dm-array.c struct walk_info *wi = context; context 874 drivers/md/persistent-data/dm-array.c r = wi->fn(wi->context, keys[0] * max_entries + i, context 887 drivers/md/persistent-data/dm-array.c void *context) context 893 drivers/md/persistent-data/dm-array.c wi.context = context; context 129 drivers/md/persistent-data/dm-array.h typedef int (*value_fn)(uint32_t index, void *value_le, void *context); context 131 drivers/md/persistent-data/dm-array.h uint32_t size, value_fn fn, void *context); context 180 drivers/md/persistent-data/dm-array.h int (*fn)(void *context, uint64_t key, void *leaf), context 181 drivers/md/persistent-data/dm-array.h void *context); context 19 drivers/md/persistent-data/dm-bitset.c .context = NULL, context 45 drivers/md/persistent-data/dm-bitset.c void *context; context 48 drivers/md/persistent-data/dm-bitset.c static int pack_bits(uint32_t index, void *value, void *context) context 51 drivers/md/persistent-data/dm-bitset.c struct packer_context *p = context; context 57 drivers/md/persistent-data/dm-bitset.c r = p->fn(index * 64 + bit, &bv, p->context); context 73 drivers/md/persistent-data/dm-bitset.c uint32_t size, bit_value_fn fn, void *context) context 78 drivers/md/persistent-data/dm-bitset.c p.context = context; context 107 drivers/md/persistent-data/dm-bitset.h typedef int (*bit_value_fn)(uint32_t index, bool *value, void *context); context 109 drivers/md/persistent-data/dm-bitset.h uint32_t size, bit_value_fn fn, void *context); context 546 drivers/md/persistent-data/dm-btree-remove.c info->value_type.dec(info->value_type.context, context 655 drivers/md/persistent-data/dm-btree-remove.c info->value_type.dec(info->value_type.context, context 245 drivers/md/persistent-data/dm-btree-spine.c static void le64_inc(void *context, const void *value_le) context 247 drivers/md/persistent-data/dm-btree-spine.c struct dm_transaction_manager *tm = context; context 254 drivers/md/persistent-data/dm-btree-spine.c static void le64_dec(void *context, const void *value_le) context 256 drivers/md/persistent-data/dm-btree-spine.c struct dm_transaction_manager *tm = context; context 263 drivers/md/persistent-data/dm-btree-spine.c static int le64_equal(void *context, const void *value1_le, const void *value2_le) context 275 drivers/md/persistent-data/dm-btree-spine.c vt->context = tm; context 82 drivers/md/persistent-data/dm-btree.c vt->inc(vt->context, value_ptr(n, i)); context 325 drivers/md/persistent-data/dm-btree.c info->value_type.dec(info->value_type.context, context 827 drivers/md/persistent-data/dm-btree.c info->value_type.context, context 830 drivers/md/persistent-data/dm-btree.c info->value_type.dec(info->value_type.context, context 950 drivers/md/persistent-data/dm-btree.c int (*fn)(void *context, uint64_t *keys, void *leaf), context 951 drivers/md/persistent-data/dm-btree.c void *context) context 968 drivers/md/persistent-data/dm-btree.c r = walk_node(info, value64(n, i), fn, context); context 973 drivers/md/persistent-data/dm-btree.c r = fn(context, &keys, value_ptr(n, i)); context 985 drivers/md/persistent-data/dm-btree.c int (*fn)(void *context, uint64_t *keys, void *leaf), context 986 drivers/md/persistent-data/dm-btree.c void *context) context 989 drivers/md/persistent-data/dm-btree.c return walk_node(info, root, fn, context); context 41 drivers/md/persistent-data/dm-btree.h void *context; context 61 drivers/md/persistent-data/dm-btree.h void (*inc)(void *context, const void *value); context 68 drivers/md/persistent-data/dm-btree.h void (*dec)(void *context, const void *value); context 75 drivers/md/persistent-data/dm-btree.h int (*equal)(void *context, const void *value1, const void *value2); context 176 drivers/md/persistent-data/dm-btree.h int (*fn)(void *context, uint64_t *keys, void *leaf), context 177 drivers/md/persistent-data/dm-btree.h void *context); context 411 drivers/md/persistent-data/dm-space-map-common.c int (*mutator)(void *context, uint32_t old, uint32_t *new), context 412 drivers/md/persistent-data/dm-space-map-common.c void *context, enum allocation_event *ev) context 446 drivers/md/persistent-data/dm-space-map-common.c r = mutator(context, old, &ref_count); context 498 drivers/md/persistent-data/dm-space-map-common.c static int set_ref_count(void *context, uint32_t old, uint32_t *new) context 500 drivers/md/persistent-data/dm-space-map-common.c *new = *((uint32_t *) context); context 510 drivers/md/persistent-data/dm-space-map-common.c static int inc_ref_count(void *context, uint32_t old, uint32_t *new) context 521 drivers/md/persistent-data/dm-space-map-common.c static int dec_ref_count(void *context, uint32_t old, uint32_t *new) context 29 drivers/md/persistent-data/dm-space-map-metadata.c void *context; context 39 drivers/md/persistent-data/dm-space-map-metadata.c dm_sm_threshold_fn fn, void *context) context 44 drivers/md/persistent-data/dm-space-map-metadata.c t->context = context; context 61 drivers/md/persistent-data/dm-space-map-metadata.c t->fn(t->context); context 515 drivers/md/persistent-data/dm-space-map-metadata.c void *context) context 519 drivers/md/persistent-data/dm-space-map-metadata.c set_threshold(&smm->threshold, threshold, fn, context); context 12 drivers/md/persistent-data/dm-space-map.h typedef void (*dm_sm_threshold_fn)(void *context); context 72 drivers/md/persistent-data/dm-space-map.h void *context); context 148 drivers/md/persistent-data/dm-space-map.h void *context) context 151 drivers/md/persistent-data/dm-space-map.h return sm->register_threshold_callback(sm, threshold, fn, context); context 44 drivers/media/common/siano/smscoreapi.c void *context; context 593 drivers/media/common/siano/smscoreapi.c client->onremove_handler(client->context); context 720 drivers/media/common/siano/smscoreapi.c dev->context = params->context; context 756 drivers/media/common/siano/smscoreapi.c rc = coredev->sendrequest_handler(coredev->context, buffer, size); context 844 drivers/media/common/siano/smscoreapi.c coredev->sendrequest_handler(coredev->context, &mtu_msg, context 857 drivers/media/common/siano/smscoreapi.c coredev->sendrequest_handler(coredev->context, &crys_msg, context 924 drivers/media/common/siano/smscoreapi.c rc = coredev->preload_handler(coredev->context); context 1014 drivers/media/common/siano/smscoreapi.c rc = coredev->sendrequest_handler(coredev->context, msg, context 1034 drivers/media/common/siano/smscoreapi.c return coredev->postload_handler(coredev->context); context 1175 drivers/media/common/siano/smscoreapi.c : loadfirmware_handler(coredev->context, fw_buf, context 1382 drivers/media/common/siano/smscoreapi.c coredev->detectmode_handler(coredev->context, context 1386 drivers/media/common/siano/smscoreapi.c rc = coredev->setmode_handler(coredev->context, mode); context 1515 drivers/media/common/siano/smscoreapi.c rc = client->onresponse_handler(client->context, cb); context 1735 drivers/media/common/siano/smscoreapi.c newclient->context = params->context; context 1741 drivers/media/common/siano/smscoreapi.c pr_debug("%p %d %d\n", params->context, params->data_type, context 1770 drivers/media/common/siano/smscoreapi.c pr_debug("%p\n", client->context); context 1815 drivers/media/common/siano/smscoreapi.c return coredev->sendrequest_handler(coredev->context, buffer, size); context 1863 drivers/media/common/siano/smscoreapi.c return coredev->sendrequest_handler(coredev->context, context 1887 drivers/media/common/siano/smscoreapi.c return coredev->sendrequest_handler(coredev->context, context 102 drivers/media/common/siano/smscoreapi.h typedef int (*setmode_t)(void *context, int mode); context 103 drivers/media/common/siano/smscoreapi.h typedef void (*detectmode_t)(void *context, int *mode); context 104 drivers/media/common/siano/smscoreapi.h typedef int (*sendrequest_t)(void *context, void *buffer, size_t size); context 105 drivers/media/common/siano/smscoreapi.h typedef int (*loadfirmware_t)(void *context, void *buffer, size_t size); context 106 drivers/media/common/siano/smscoreapi.h typedef int (*preload_t)(void *context); context 107 drivers/media/common/siano/smscoreapi.h typedef int (*postload_t)(void *context); context 109 drivers/media/common/siano/smscoreapi.h typedef int (*onresponse_t)(void *context, struct smscore_buffer_t *cb); context 110 drivers/media/common/siano/smscoreapi.h typedef void (*onremove_t)(void *context); context 140 drivers/media/common/siano/smscoreapi.h void *context; context 149 drivers/media/common/siano/smscoreapi.h void *context; context 167 drivers/media/common/siano/smscoreapi.h void *context; context 505 drivers/media/common/siano/smsdvb-main.c static int smsdvb_onresponse(void *context, struct smscore_buffer_t *cb) context 507 drivers/media/common/siano/smsdvb-main.c struct smsdvb_client_t *client = (struct smsdvb_client_t *) context; context 631 drivers/media/common/siano/smsdvb-main.c static void smsdvb_onremove(void *context) context 635 drivers/media/common/siano/smsdvb-main.c smsdvb_unregister_client((struct smsdvb_client_t *) context); context 1141 drivers/media/common/siano/smsdvb-main.c params.context = client; context 1362 drivers/media/dvb-frontends/af9013.c static int af9013_regmap_write(void *context, const void *data, size_t count) context 1364 drivers/media/dvb-frontends/af9013.c struct i2c_client *client = context; context 1400 drivers/media/dvb-frontends/af9013.c static int af9013_regmap_read(void *context, const void *reg_buf, context 1403 drivers/media/dvb-frontends/af9013.c struct i2c_client *client = context; context 6268 drivers/media/dvb-frontends/drxk_hard.c void *context) context 6270 drivers/media/dvb-frontends/drxk_hard.c struct drxk_state *state = context; context 690 drivers/media/dvb-frontends/rtl2830.c static int rtl2830_regmap_read(void *context, const void *reg_buf, context 693 drivers/media/dvb-frontends/rtl2830.c struct i2c_client *client = context; context 719 drivers/media/dvb-frontends/rtl2830.c static int rtl2830_regmap_write(void *context, const void *data, size_t count) context 721 drivers/media/dvb-frontends/rtl2830.c struct i2c_client *client = context; context 742 drivers/media/dvb-frontends/rtl2830.c static int rtl2830_regmap_gather_write(void *context, const void *reg, context 746 drivers/media/dvb-frontends/rtl2830.c struct i2c_client *client = context; context 219 drivers/media/dvb-frontends/rtl2832_sdr.c struct rtl2832_sdr_dev *dev = urb->context; context 78 drivers/media/firewire/firedtv-fw.c struct fw_iso_context *context; context 94 drivers/media/firewire/firedtv-fw.c return fw_iso_context_queue(ctx->context, &p, &ctx->buffer, context 98 drivers/media/firewire/firedtv-fw.c static void handle_iso(struct fw_iso_context *context, u32 cycle, context 128 drivers/media/firewire/firedtv-fw.c fw_iso_context_queue_flush(ctx->context); context 142 drivers/media/firewire/firedtv-fw.c ctx->context = fw_iso_context_create(device->card, context 145 drivers/media/firewire/firedtv-fw.c if (IS_ERR(ctx->context)) { context 146 drivers/media/firewire/firedtv-fw.c err = PTR_ERR(ctx->context); context 167 drivers/media/firewire/firedtv-fw.c err = fw_iso_context_start(ctx->context, -1, 0, context 178 drivers/media/firewire/firedtv-fw.c fw_iso_context_destroy(ctx->context); context 189 drivers/media/firewire/firedtv-fw.c fw_iso_context_stop(ctx->context); context 191 drivers/media/firewire/firedtv-fw.c fw_iso_context_destroy(ctx->context); context 88 drivers/media/mmc/siano/smssdio.c static int smssdio_sendrequest(void *context, void *buffer, size_t size) context 93 drivers/media/mmc/siano/smssdio.c smsdev = context; context 258 drivers/media/mmc/siano/smssdio.c params.context = smsdev; context 196 drivers/media/pci/saa7164/saa7164-types.h u8 *context; context 2747 drivers/media/platform/coda/coda-common.c static void coda_fw_callback(const struct firmware *fw, void *context); context 2765 drivers/media/platform/coda/coda-common.c static void coda_fw_callback(const struct firmware *fw, void *context) context 2767 drivers/media/platform/coda/coda-common.c struct coda_dev *dev = context; context 377 drivers/media/platform/exynos4-is/fimc-is.c static void fimc_is_load_firmware(const struct firmware *fw, void *context) context 379 drivers/media/platform/exynos4-is/fimc-is.c struct fimc_is *is = context; context 372 drivers/media/platform/meson/ao-cec-g12a.c static int meson_ao_cec_g12a_read(void *context, unsigned int addr, context 375 drivers/media/platform/meson/ao-cec-g12a.c struct meson_ao_cec_g12a_device *ao_cec = context; context 396 drivers/media/platform/meson/ao-cec-g12a.c static int meson_ao_cec_g12a_write(void *context, unsigned int addr, context 399 drivers/media/platform/meson/ao-cec-g12a.c struct meson_ao_cec_g12a_device *ao_cec = context; context 1060 drivers/media/platform/ti-vpe/vpdma.c static void vpdma_firmware_cb(const struct firmware *f, void *context) context 1062 drivers/media/platform/ti-vpe/vpdma.c struct vpdma_data *vpdma = context; context 364 drivers/media/radio/si470x/radio-si470x-usb.c struct si470x_device *radio = urb->context; context 385 drivers/media/rc/ati_remote.c struct ati_remote *ati_remote = urb->context; context 481 drivers/media/rc/ati_remote.c struct ati_remote *ati_remote = urb->context; context 654 drivers/media/rc/ati_remote.c struct ati_remote *ati_remote = urb->context; context 95 drivers/media/rc/igorplugusb.c struct igorplugusb *ir = urb->context; context 154 drivers/media/rc/iguanair.c ir = urb->context; context 182 drivers/media/rc/iguanair.c struct iguanair *ir = urb->context; context 1046 drivers/media/rc/imon.c ictx = (struct imon_context *)urb->context; context 1713 drivers/media/rc/imon.c ictx = (struct imon_context *)urb->context; context 1754 drivers/media/rc/imon.c ictx = (struct imon_context *)urb->context; context 96 drivers/media/rc/imon_raw.c struct imon *imon = urb->context; context 795 drivers/media/rc/mceusb.c complete(urb->context); context 1367 drivers/media/rc/mceusb.c ir = urb->context; context 691 drivers/media/rc/redrat3.c struct redrat3_dev *rr3 = urb->context; context 888 drivers/media/rc/redrat3.c struct redrat3_dev *rr3 = urb->context; context 907 drivers/media/rc/redrat3.c struct redrat3_dev *rr3 = urb->context; context 200 drivers/media/rc/streamzap.c sz = urb->context; context 83 drivers/media/rc/ttusbir.c struct ttusbir *tt = urb->context; context 159 drivers/media/rc/ttusbir.c struct ttusbir *tt = urb->context; context 250 drivers/media/rc/ttusbir.c urb->context = tt; context 91 drivers/media/rc/xbox_remote.c struct xbox_remote *xbox_remote = urb->context; context 119 drivers/media/rc/xbox_remote.c struct xbox_remote *xbox_remote = urb->context; context 1361 drivers/media/tuners/tuner-xc2028.c void *context) context 1363 drivers/media/tuners/tuner-xc2028.c struct dvb_frontend *fe = context; context 269 drivers/media/usb/airspy/airspy.c struct airspy *s = urb->context; context 213 drivers/media/usb/as102/as102_usb_drv.c struct as102_dev_t *as102_dev = urb->context; context 450 drivers/media/usb/as102/as10x_cmd.h union as10x_fw_context context; context 34 drivers/media/usb/as102/as10x_cmd_cfg.c sizeof(pcmd->body.context.req)); context 37 drivers/media/usb/as102/as10x_cmd_cfg.c pcmd->body.context.req.proc_id = cpu_to_le16(CONTROL_PROC_CONTEXT); context 38 drivers/media/usb/as102/as10x_cmd_cfg.c pcmd->body.context.req.tag = cpu_to_le16(tag); context 39 drivers/media/usb/as102/as10x_cmd_cfg.c pcmd->body.context.req.type = cpu_to_le16(GET_CONTEXT_DATA); context 45 drivers/media/usb/as102/as10x_cmd_cfg.c sizeof(pcmd->body.context.req) context 48 drivers/media/usb/as102/as10x_cmd_cfg.c sizeof(prsp->body.context.rsp) context 63 drivers/media/usb/as102/as10x_cmd_cfg.c *pvalue = le32_to_cpu((__force __le32)prsp->body.context.rsp.reg_val.u.value32); context 90 drivers/media/usb/as102/as10x_cmd_cfg.c sizeof(pcmd->body.context.req)); context 93 drivers/media/usb/as102/as10x_cmd_cfg.c pcmd->body.context.req.proc_id = cpu_to_le16(CONTROL_PROC_CONTEXT); context 95 drivers/media/usb/as102/as10x_cmd_cfg.c pcmd->body.context.req.reg_val.u.value32 = (__force u32)cpu_to_le32(value); context 96 drivers/media/usb/as102/as10x_cmd_cfg.c pcmd->body.context.req.tag = cpu_to_le16(tag); context 97 drivers/media/usb/as102/as10x_cmd_cfg.c pcmd->body.context.req.type = cpu_to_le16(SET_CONTEXT_DATA); context 103 drivers/media/usb/as102/as10x_cmd_cfg.c sizeof(pcmd->body.context.req) context 106 drivers/media/usb/as102/as10x_cmd_cfg.c sizeof(prsp->body.context.rsp) context 185 drivers/media/usb/as102/as10x_cmd_cfg.c err = prsp->body.context.rsp.error; context 188 drivers/media/usb/as102/as10x_cmd_cfg.c (le16_to_cpu(prsp->body.context.rsp.proc_id) == proc_id)) { context 110 drivers/media/usb/au0828/au0828-dvb.c struct au0828_dev *dev = purb->context; context 112 drivers/media/usb/au0828/au0828-video.c struct au0828_dmaqueue *dma_q = urb->context; context 494 drivers/media/usb/au0828/au0828-video.c struct au0828_dmaqueue *dma_q = urb->context; context 379 drivers/media/usb/b2c2/flexcop-usb.c struct flexcop_usb *fc_usb = urb->context; context 466 drivers/media/usb/b2c2/flexcop-usb.c urb->context = fc_usb; context 200 drivers/media/usb/cpia2/cpia2_usb.c struct camera_data *cam = (struct camera_data *) urb->context; context 688 drivers/media/usb/cpia2/cpia2_usb.c urb->context = cam; context 1349 drivers/media/usb/cx231xx/cx231xx-417.c struct cx231xx_dmaqueue *dma_q = urb->context; context 1375 drivers/media/usb/cx231xx/cx231xx-417.c struct cx231xx_dmaqueue *dma_q = urb->context; context 83 drivers/media/usb/cx231xx/cx231xx-audio.c struct cx231xx *dev = urb->context; context 175 drivers/media/usb/cx231xx/cx231xx-audio.c struct cx231xx *dev = urb->context; context 292 drivers/media/usb/cx231xx/cx231xx-audio.c urb->context = dev; context 353 drivers/media/usb/cx231xx/cx231xx-audio.c urb->context = dev; context 786 drivers/media/usb/cx231xx/cx231xx-core.c struct cx231xx_dmaqueue *dma_q = urb->context; context 832 drivers/media/usb/cx231xx/cx231xx-core.c struct cx231xx_dmaqueue *dma_q = urb->context; context 73 drivers/media/usb/cx231xx/cx231xx-vbi.c struct cx231xx_dmaqueue *dma_q = urb->context; context 292 drivers/media/usb/cx231xx/cx231xx-vbi.c struct cx231xx_dmaqueue *dma_q = urb->context; context 263 drivers/media/usb/cx231xx/cx231xx-video.c struct cx231xx_dmaqueue *dma_q = urb->context; context 360 drivers/media/usb/cx231xx/cx231xx-video.c struct cx231xx_dmaqueue *dma_q = urb->context; context 1294 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_regmap_write(void *context, const void *data, size_t count) context 1296 drivers/media/usb/dvb-usb-v2/af9015.c struct dvb_usb_device *d = context; context 1314 drivers/media/usb/dvb-usb-v2/af9015.c static int af9015_regmap_read(void *context, const void *reg_buf, context 1317 drivers/media/usb/dvb-usb-v2/af9015.c struct dvb_usb_device *d = context; context 269 drivers/media/usb/dvb-usb-v2/lmedm04.c struct dvb_usb_adapter *adap = lme_urb->context; context 21 drivers/media/usb/dvb-usb-v2/usb_urb.c struct usb_data_stream *stream = urb->context; context 184 drivers/media/usb/dvb-usb-v2/usb_urb.c urb->context = stream; context 632 drivers/media/usb/dvb-usb/cxusb-analog.c struct dvb_usb_device *dvbdev = u->context; context 839 drivers/media/usb/dvb-usb/cxusb-analog.c surb->context = dvbdev; context 710 drivers/media/usb/dvb-usb/dib0700_core.c struct dvb_usb_device *d = purb->context; context 17 drivers/media/usb/dvb-usb/usb-urb.c struct usb_data_stream *stream = urb->context; context 192 drivers/media/usb/dvb-usb/usb-urb.c urb->context = stream; context 80 drivers/media/usb/em28xx/em28xx-audio.c struct em28xx *dev = urb->context; context 881 drivers/media/usb/em28xx/em28xx-audio.c urb->context = dev; context 779 drivers/media/usb/em28xx/em28xx-core.c struct em28xx *dev = urb->context; context 747 drivers/media/usb/go7007/go7007-usb.c struct go7007 *go = (struct go7007 *)urb->context; context 787 drivers/media/usb/go7007/go7007-usb.c struct go7007 *go = (struct go7007 *)urb->context; context 810 drivers/media/usb/go7007/go7007-usb.c struct go7007 *go = (struct go7007 *)urb->context; context 98 drivers/media/usb/gspca/benq.c urb->context = gspca_dev; context 141 drivers/media/usb/gspca/benq.c struct gspca_dev *gspca_dev = (struct gspca_dev *) urb->context; context 82 drivers/media/usb/gspca/gspca.c struct gspca_dev *gspca_dev = (struct gspca_dev *) urb->context; context 323 drivers/media/usb/gspca/gspca.c struct gspca_dev *gspca_dev = (struct gspca_dev *) urb->context; context 336 drivers/media/usb/gspca/gspca.c struct gspca_dev *gspca_dev = (struct gspca_dev *) urb->context; context 710 drivers/media/usb/gspca/gspca.c urb->context = gspca_dev; context 219 drivers/media/usb/gspca/konica.c urb->context = gspca_dev; context 256 drivers/media/usb/gspca/konica.c struct gspca_dev *gspca_dev = (struct gspca_dev *) urb->context; context 483 drivers/media/usb/hackrf/hackrf.c struct hackrf_dev *dev = urb->context; context 528 drivers/media/usb/hackrf/hackrf.c struct hackrf_dev *dev = urb->context; context 71 drivers/media/usb/hdpvr/hdpvr-video.c struct hdpvr_buffer *buf = (struct hdpvr_buffer *)urb->context; context 365 drivers/media/usb/msi2500/msi2500.c struct msi2500_dev *dev = (struct msi2500_dev *)urb->context; context 517 drivers/media/usb/msi2500/msi2500.c urb->context = dev; context 3535 drivers/media/usb/pvrusb2/pvrusb2-hdw.c struct pvr2_hdw *hdw = urb->context; context 3544 drivers/media/usb/pvrusb2/pvrusb2-hdw.c struct pvr2_hdw *hdw = urb->context; context 417 drivers/media/usb/pvrusb2/pvrusb2-io.c struct pvr2_buffer *bp = urb->context; context 276 drivers/media/usb/pwc/pwc-if.c struct pwc_device *pdev = (struct pwc_device *)urb->context; context 465 drivers/media/usb/pwc/pwc-if.c urb->context = pdev; context 494 drivers/media/usb/s2255/s2255drv.c struct s2255_fw *data = urb->context; context 2041 drivers/media/usb/s2255/s2255drv.c pipe_info = purb->context; context 88 drivers/media/usb/siano/smsusb.c struct smsusb_urb_t *surb = (struct smsusb_urb_t *) urb->context; context 206 drivers/media/usb/siano/smsusb.c static int smsusb_sendrequest(void *context, void *buffer, size_t size) context 208 drivers/media/usb/siano/smsusb.c struct smsusb_device_t *dev = (struct smsusb_device_t *) context; context 301 drivers/media/usb/siano/smsusb.c static void smsusb1_detectmode(void *context, int *mode) context 304 drivers/media/usb/siano/smsusb.c ((struct smsusb_device_t *) context)->udev->product; context 323 drivers/media/usb/siano/smsusb.c static int smsusb1_setmode(void *context, int mode) context 333 drivers/media/usb/siano/smsusb.c return smsusb_sendrequest(context, &msg, sizeof(msg)); context 447 drivers/media/usb/siano/smsusb.c params.context = dev; context 298 drivers/media/usb/stk1160/stk1160-video.c struct stk1160 *dev = urb->context; context 479 drivers/media/usb/stk1160/stk1160-video.c urb->context = dev; context 317 drivers/media/usb/stkwebcam/stk-webcam.c dev = (struct stk_camera *) urb->context; context 472 drivers/media/usb/stkwebcam/stk-webcam.c urb->context = dev; context 76 drivers/media/usb/tm6000/tm6000-dvb.c struct tm6000_core *dev = urb->context; context 186 drivers/media/usb/tm6000/tm6000-input.c struct tm6000_core *dev = urb->context; context 118 drivers/media/usb/tm6000/tm6000-video.c struct tm6000_dmaqueue *dma_q = urb->context; context 286 drivers/media/usb/tm6000/tm6000-video.c struct tm6000_dmaqueue *dma_q = urb->context; context 371 drivers/media/usb/tm6000/tm6000-video.c struct tm6000_dmaqueue *dma_q = urb->context; context 417 drivers/media/usb/tm6000/tm6000-video.c struct tm6000_dmaqueue *dma_q = urb->context; context 747 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c struct ttusb *ttusb = urb->context; context 846 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c urb->context = ttusb; context 250 drivers/media/usb/ttusb-dec/ttusb_dec.c struct ttusb_dec *dec = urb->context; context 798 drivers/media/usb/ttusb-dec/ttusb_dec.c struct ttusb_dec *dec = urb->context; context 852 drivers/media/usb/ttusb-dec/ttusb_dec.c urb->context = dec; context 124 drivers/media/usb/usbtv/usbtv-audio.c struct usbtv *chip = urb->context; context 460 drivers/media/usb/usbtv/usbtv-video.c struct usbtv *usbtv = (struct usbtv *)ip->context; context 506 drivers/media/usb/usbtv/usbtv-video.c ip->context = usbtv; context 1263 drivers/media/usb/usbvision/usbvision-core.c struct usb_usbvision *usbvision = urb->context; context 1399 drivers/media/usb/usbvision/usbvision-core.c struct usb_usbvision *usbvision = (struct usb_usbvision *)urb->context; context 2299 drivers/media/usb/usbvision/usbvision-core.c urb->context = usbvision; context 160 drivers/media/usb/uvc/uvc_status.c struct uvc_device *dev = urb->context; context 196 drivers/media/usb/uvc/uvc_status.c struct uvc_device *dev = urb->context; context 1495 drivers/media/usb/uvc/uvc_video.c struct uvc_urb *uvc_urb = urb->context; context 1728 drivers/media/usb/uvc/uvc_video.c urb->context = uvc_urb; context 959 drivers/media/usb/zr364xx/zr364xx.c pipe_info = purb->context; context 35 drivers/mfd/atmel-hlcdc.c static int regmap_atmel_hlcdc_reg_write(void *context, unsigned int reg, context 38 drivers/mfd/atmel-hlcdc.c struct atmel_hlcdc_regmap *hregmap = context; context 53 drivers/mfd/atmel-hlcdc.c static int regmap_atmel_hlcdc_reg_read(void *context, unsigned int reg, context 56 drivers/mfd/atmel-hlcdc.c struct atmel_hlcdc_regmap *hregmap = context; context 241 drivers/mfd/dln2.c struct dln2_dev *dln2 = urb->context; context 270 drivers/mfd/intel_soc_pmic_bxtwc.c static int regmap_ipc_byte_reg_read(void *context, unsigned int reg, context 277 drivers/mfd/intel_soc_pmic_bxtwc.c struct intel_soc_pmic *pmic = context; context 303 drivers/mfd/intel_soc_pmic_bxtwc.c static int regmap_ipc_byte_reg_write(void *context, unsigned int reg, context 309 drivers/mfd/intel_soc_pmic_bxtwc.c struct intel_soc_pmic *pmic = context; context 71 drivers/mfd/intel_soc_pmic_chtwc.c static int cht_wc_byte_reg_read(void *context, unsigned int reg, context 74 drivers/mfd/intel_soc_pmic_chtwc.c struct i2c_client *client = context; context 93 drivers/mfd/intel_soc_pmic_chtwc.c static int cht_wc_byte_reg_write(void *context, unsigned int reg, context 96 drivers/mfd/intel_soc_pmic_chtwc.c struct i2c_client *client = context; context 74 drivers/mfd/intel_soc_pmic_mrfld.c static int bcove_ipc_byte_reg_read(void *context, unsigned int reg, context 88 drivers/mfd/intel_soc_pmic_mrfld.c static int bcove_ipc_byte_reg_write(void *context, unsigned int reg, context 61 drivers/mfd/mc13xxx-spi.c static int mc13xxx_spi_read(void *context, const void *reg, size_t reg_size, context 67 drivers/mfd/mc13xxx-spi.c struct device *dev = context; context 90 drivers/mfd/mc13xxx-spi.c static int mc13xxx_spi_write(void *context, const void *data, size_t count) context 92 drivers/mfd/mc13xxx-spi.c struct device *dev = context; context 189 drivers/mfd/retu-mfd.c static int retu_regmap_read(void *context, const void *reg, size_t reg_size, context 193 drivers/mfd/retu-mfd.c struct device *dev = context; context 206 drivers/mfd/retu-mfd.c static int retu_regmap_write(void *context, const void *data, size_t count) context 210 drivers/mfd/retu-mfd.c struct device *dev = context; context 191 drivers/mfd/si476x-prop.c static int si476x_core_regmap_write(void *context, unsigned int reg, context 194 drivers/mfd/si476x-prop.c return si476x_core_cmd_set_property(context, reg, val); context 197 drivers/mfd/si476x-prop.c static int si476x_core_regmap_read(void *context, unsigned int reg, context 200 drivers/mfd/si476x-prop.c struct si476x_core *core = context; context 112 drivers/mfd/sprd-sc27xx-spi.c static int sprd_pmic_spi_write(void *context, const void *data, size_t count) context 114 drivers/mfd/sprd-sc27xx-spi.c struct device *dev = context; context 120 drivers/mfd/sprd-sc27xx-spi.c static int sprd_pmic_spi_read(void *context, context 124 drivers/mfd/sprd-sc27xx-spi.c struct device *dev = context; context 511 drivers/mfd/tps65010.c int status = board->teardown(client, board->context); context 646 drivers/mfd/tps65010.c status = board->setup(client, board->context); context 192 drivers/misc/cxl/cxllib.c attr->pid = mm->context.id; context 681 drivers/misc/cxl/native.c pid = ctx->mm->context.id; context 349 drivers/misc/eeprom/eeprom_93xx46.c static void select_assert(void *context) context 351 drivers/misc/eeprom/eeprom_93xx46.c struct eeprom_93xx46_dev *edev = context; context 356 drivers/misc/eeprom/eeprom_93xx46.c static void select_deassert(void *context) context 358 drivers/misc/eeprom/eeprom_93xx46.c struct eeprom_93xx46_dev *edev = context; context 65 drivers/misc/lattice-ecp3-config.c static void firmware_load(const struct firmware *fw, void *context) context 67 drivers/misc/lattice-ecp3-config.c struct spi_device *spi = (struct spi_device *)context; context 276 drivers/misc/mic/vop/vop_main.c bool context, context 289 drivers/misc/mic/vop/vop_main.c return __vring_new_virtqueue(index, vring, vdev, weak_barriers, context, context 7 drivers/misc/ocxl/context.c int ocxl_context_alloc(struct ocxl_context **context, struct ocxl_afu *afu, context 45 drivers/misc/ocxl/context.c *context = ctx; context 82 drivers/misc/ocxl/context.c pidr = mm->context.id; context 238 drivers/misc/ocxl/link.c WARN_ON(pe_data->mm->context.id != pid); context 68 drivers/misc/pvpanic.c pvpanic_walk_resources(struct acpi_resource *res, void *context) context 110 drivers/misc/vexpress-syscfg.c static int vexpress_syscfg_read(void *context, unsigned int index, context 113 drivers/misc/vexpress-syscfg.c struct vexpress_syscfg_func *func = context; context 118 drivers/misc/vexpress-syscfg.c static int vexpress_syscfg_write(void *context, unsigned int index, context 121 drivers/misc/vexpress-syscfg.c struct vexpress_syscfg_func *func = context; context 139 drivers/misc/vexpress-syscfg.c void *context) context 142 drivers/misc/vexpress-syscfg.c struct vexpress_syscfg *syscfg = context; context 219 drivers/misc/vexpress-syscfg.c static void vexpress_syscfg_regmap_exit(struct regmap *regmap, void *context) context 221 drivers/misc/vexpress-syscfg.c struct vexpress_syscfg *syscfg = context; context 1391 drivers/misc/vmw_balloon.c b->vmci_doorbell.context, context 44 drivers/misc/vmw_vmci/vmci_context.c static void ctx_signal_notify(struct vmci_ctx *context) context 46 drivers/misc/vmw_vmci/vmci_context.c *context->notify = true; context 49 drivers/misc/vmw_vmci/vmci_context.c static void ctx_clear_notify(struct vmci_ctx *context) context 51 drivers/misc/vmw_vmci/vmci_context.c *context->notify = false; context 58 drivers/misc/vmw_vmci/vmci_context.c static void ctx_clear_notify_call(struct vmci_ctx *context) context 60 drivers/misc/vmw_vmci/vmci_context.c if (context->pending_datagrams == 0 && context 61 drivers/misc/vmw_vmci/vmci_context.c vmci_handle_arr_get_size(context->pending_doorbell_array) == 0) context 62 drivers/misc/vmw_vmci/vmci_context.c ctx_clear_notify(context); context 69 drivers/misc/vmw_vmci/vmci_context.c void vmci_ctx_check_signal_notify(struct vmci_ctx *context) context 71 drivers/misc/vmw_vmci/vmci_context.c spin_lock(&context->lock); context 72 drivers/misc/vmw_vmci/vmci_context.c if (context->pending_datagrams) context 73 drivers/misc/vmw_vmci/vmci_context.c ctx_signal_notify(context); context 74 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 85 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context; context 107 drivers/misc/vmw_vmci/vmci_context.c context = kzalloc(sizeof(*context), GFP_KERNEL); context 108 drivers/misc/vmw_vmci/vmci_context.c if (!context) { context 114 drivers/misc/vmw_vmci/vmci_context.c kref_init(&context->kref); context 115 drivers/misc/vmw_vmci/vmci_context.c spin_lock_init(&context->lock); context 116 drivers/misc/vmw_vmci/vmci_context.c INIT_LIST_HEAD(&context->list_item); context 117 drivers/misc/vmw_vmci/vmci_context.c INIT_LIST_HEAD(&context->datagram_queue); context 118 drivers/misc/vmw_vmci/vmci_context.c INIT_LIST_HEAD(&context->notifier_list); context 121 drivers/misc/vmw_vmci/vmci_context.c init_waitqueue_head(&context->host_context.wait_queue); context 123 drivers/misc/vmw_vmci/vmci_context.c context->queue_pair_array = context 125 drivers/misc/vmw_vmci/vmci_context.c if (!context->queue_pair_array) { context 130 drivers/misc/vmw_vmci/vmci_context.c context->doorbell_array = context 132 drivers/misc/vmw_vmci/vmci_context.c if (!context->doorbell_array) { context 137 drivers/misc/vmw_vmci/vmci_context.c context->pending_doorbell_array = context 139 drivers/misc/vmw_vmci/vmci_context.c if (!context->pending_doorbell_array) { context 144 drivers/misc/vmw_vmci/vmci_context.c context->user_version = user_version; context 146 drivers/misc/vmw_vmci/vmci_context.c context->priv_flags = priv_flags; context 149 drivers/misc/vmw_vmci/vmci_context.c context->cred = get_cred(cred); context 151 drivers/misc/vmw_vmci/vmci_context.c context->notify = &ctx_dummy_notify; context 152 drivers/misc/vmw_vmci/vmci_context.c context->notify_page = NULL; context 168 drivers/misc/vmw_vmci/vmci_context.c context->cid = cid; context 170 drivers/misc/vmw_vmci/vmci_context.c list_add_tail_rcu(&context->list_item, &ctx_list.head); context 173 drivers/misc/vmw_vmci/vmci_context.c return context; context 176 drivers/misc/vmw_vmci/vmci_context.c vmci_handle_arr_destroy(context->doorbell_array); context 178 drivers/misc/vmw_vmci/vmci_context.c vmci_handle_arr_destroy(context->queue_pair_array); context 180 drivers/misc/vmw_vmci/vmci_context.c kfree(context); context 188 drivers/misc/vmw_vmci/vmci_context.c void vmci_ctx_destroy(struct vmci_ctx *context) context 191 drivers/misc/vmw_vmci/vmci_context.c list_del_rcu(&context->list_item); context 195 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 262 drivers/misc/vmw_vmci/vmci_context.c ev.msg.hdr.dst.context); context 278 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context; context 280 drivers/misc/vmw_vmci/vmci_context.c context = vmci_ctx_get(cid); context 281 drivers/misc/vmw_vmci/vmci_context.c if (context == NULL) context 284 drivers/misc/vmw_vmci/vmci_context.c spin_lock(&context->lock); context 286 drivers/misc/vmw_vmci/vmci_context.c *pending = context->pending_datagrams; context 287 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 288 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 299 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context; context 310 drivers/misc/vmw_vmci/vmci_context.c context = vmci_ctx_get(cid); context 311 drivers/misc/vmw_vmci/vmci_context.c if (!context) { context 320 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 328 drivers/misc/vmw_vmci/vmci_context.c spin_lock(&context->lock); context 339 drivers/misc/vmw_vmci/vmci_context.c if (context->datagram_queue_size + vmci_dg_size >= context 345 drivers/misc/vmw_vmci/vmci_context.c context->datagram_queue_size + vmci_dg_size >= context 347 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 348 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 354 drivers/misc/vmw_vmci/vmci_context.c list_add(&dq_entry->list_item, &context->datagram_queue); context 355 drivers/misc/vmw_vmci/vmci_context.c context->pending_datagrams++; context 356 drivers/misc/vmw_vmci/vmci_context.c context->datagram_queue_size += vmci_dg_size; context 357 drivers/misc/vmw_vmci/vmci_context.c ctx_signal_notify(context); context 358 drivers/misc/vmw_vmci/vmci_context.c wake_up(&context->host_context.wait_queue); context 359 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 360 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 372 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context; context 377 drivers/misc/vmw_vmci/vmci_context.c list_for_each_entry_rcu(context, &ctx_list.head, list_item) { context 378 drivers/misc/vmw_vmci/vmci_context.c if (context->cid == cid) { context 393 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *c, *context = NULL; context 409 drivers/misc/vmw_vmci/vmci_context.c context = c; context 410 drivers/misc/vmw_vmci/vmci_context.c kref_get(&context->kref); context 416 drivers/misc/vmw_vmci/vmci_context.c return context; context 426 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context = container_of(kref, struct vmci_ctx, kref); context 435 drivers/misc/vmw_vmci/vmci_context.c ctx_fire_notification(context->cid, context->priv_flags); context 442 drivers/misc/vmw_vmci/vmci_context.c temp_handle = vmci_handle_arr_get_entry(context->queue_pair_array, 0); context 445 drivers/misc/vmw_vmci/vmci_context.c context) < VMCI_SUCCESS) { context 452 drivers/misc/vmw_vmci/vmci_context.c vmci_handle_arr_remove_entry(context->queue_pair_array, context 456 drivers/misc/vmw_vmci/vmci_context.c vmci_handle_arr_get_entry(context->queue_pair_array, 0); context 464 drivers/misc/vmw_vmci/vmci_context.c &context->datagram_queue, list_item) { context 472 drivers/misc/vmw_vmci/vmci_context.c &context->notifier_list, node) { context 477 drivers/misc/vmw_vmci/vmci_context.c vmci_handle_arr_destroy(context->queue_pair_array); context 478 drivers/misc/vmw_vmci/vmci_context.c vmci_handle_arr_destroy(context->doorbell_array); context 479 drivers/misc/vmw_vmci/vmci_context.c vmci_handle_arr_destroy(context->pending_doorbell_array); context 480 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_unset_notify(context); context 481 drivers/misc/vmw_vmci/vmci_context.c if (context->cred) context 482 drivers/misc/vmw_vmci/vmci_context.c put_cred(context->cred); context 483 drivers/misc/vmw_vmci/vmci_context.c kfree(context); context 495 drivers/misc/vmw_vmci/vmci_context.c void vmci_ctx_put(struct vmci_ctx *context) context 497 drivers/misc/vmw_vmci/vmci_context.c kref_put(&context->kref, ctx_free_ctx); context 508 drivers/misc/vmw_vmci/vmci_context.c int vmci_ctx_dequeue_datagram(struct vmci_ctx *context, context 517 drivers/misc/vmw_vmci/vmci_context.c spin_lock(&context->lock); context 518 drivers/misc/vmw_vmci/vmci_context.c if (context->pending_datagrams == 0) { context 519 drivers/misc/vmw_vmci/vmci_context.c ctx_clear_notify_call(context); context 520 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 525 drivers/misc/vmw_vmci/vmci_context.c list_item = context->datagram_queue.next; context 533 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 540 drivers/misc/vmw_vmci/vmci_context.c context->pending_datagrams--; context 541 drivers/misc/vmw_vmci/vmci_context.c context->datagram_queue_size -= dq_entry->dg_size; context 542 drivers/misc/vmw_vmci/vmci_context.c if (context->pending_datagrams == 0) { context 543 drivers/misc/vmw_vmci/vmci_context.c ctx_clear_notify_call(context); context 551 drivers/misc/vmw_vmci/vmci_context.c list_item = context->datagram_queue.next; context 562 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 576 drivers/misc/vmw_vmci/vmci_context.c void vmci_ctx_unset_notify(struct vmci_ctx *context) context 580 drivers/misc/vmw_vmci/vmci_context.c spin_lock(&context->lock); context 582 drivers/misc/vmw_vmci/vmci_context.c notify_page = context->notify_page; context 583 drivers/misc/vmw_vmci/vmci_context.c context->notify = &ctx_dummy_notify; context 584 drivers/misc/vmw_vmci/vmci_context.c context->notify_page = NULL; context 586 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 600 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context; context 605 drivers/misc/vmw_vmci/vmci_context.c context = vmci_ctx_get(context_id); context 606 drivers/misc/vmw_vmci/vmci_context.c if (!context) context 616 drivers/misc/vmw_vmci/vmci_context.c if (context->priv_flags & VMCI_PRIVILEGE_FLAG_RESTRICTED) { context 630 drivers/misc/vmw_vmci/vmci_context.c spin_lock(&context->lock); context 632 drivers/misc/vmw_vmci/vmci_context.c if (context->n_notifiers < VMCI_MAX_CONTEXTS) { context 633 drivers/misc/vmw_vmci/vmci_context.c list_for_each_entry(n, &context->notifier_list, node) { context 645 drivers/misc/vmw_vmci/vmci_context.c &context->notifier_list); context 646 drivers/misc/vmw_vmci/vmci_context.c context->n_notifiers++; context 654 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 657 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 667 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context; context 672 drivers/misc/vmw_vmci/vmci_context.c context = vmci_ctx_get(context_id); context 673 drivers/misc/vmw_vmci/vmci_context.c if (!context) context 678 drivers/misc/vmw_vmci/vmci_context.c spin_lock(&context->lock); context 680 drivers/misc/vmw_vmci/vmci_context.c &context->notifier_list, node) { context 683 drivers/misc/vmw_vmci/vmci_context.c context->n_notifiers--; context 688 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 695 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 700 drivers/misc/vmw_vmci/vmci_context.c static int vmci_ctx_get_chkpt_notifiers(struct vmci_ctx *context, context 708 drivers/misc/vmw_vmci/vmci_context.c if (context->n_notifiers == 0) { context 714 drivers/misc/vmw_vmci/vmci_context.c data_size = context->n_notifiers * sizeof(*notifiers); context 724 drivers/misc/vmw_vmci/vmci_context.c list_for_each_entry(entry, &context->notifier_list, node) context 725 drivers/misc/vmw_vmci/vmci_context.c notifiers[i++] = entry->handle.context; context 732 drivers/misc/vmw_vmci/vmci_context.c static int vmci_ctx_get_chkpt_doorbells(struct vmci_ctx *context, context 738 drivers/misc/vmw_vmci/vmci_context.c n_doorbells = vmci_handle_arr_get_size(context->doorbell_array); context 752 drivers/misc/vmw_vmci/vmci_context.c context->doorbell_array, i); context 772 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context; context 775 drivers/misc/vmw_vmci/vmci_context.c context = vmci_ctx_get(context_id); context 776 drivers/misc/vmw_vmci/vmci_context.c if (!context) context 779 drivers/misc/vmw_vmci/vmci_context.c spin_lock(&context->lock); context 783 drivers/misc/vmw_vmci/vmci_context.c result = vmci_ctx_get_chkpt_notifiers(context, buf_size, pbuf); context 798 drivers/misc/vmw_vmci/vmci_context.c result = vmci_ctx_get_chkpt_doorbells(context, buf_size, pbuf); context 807 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 808 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 864 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context; context 867 drivers/misc/vmw_vmci/vmci_context.c context = vmci_ctx_get(context_id); context 868 drivers/misc/vmw_vmci/vmci_context.c if (context == NULL) context 871 drivers/misc/vmw_vmci/vmci_context.c spin_lock(&context->lock); context 873 drivers/misc/vmw_vmci/vmci_context.c *db_handle_array = context->pending_doorbell_array; context 874 drivers/misc/vmw_vmci/vmci_context.c context->pending_doorbell_array = context 876 drivers/misc/vmw_vmci/vmci_context.c if (!context->pending_doorbell_array) { context 877 drivers/misc/vmw_vmci/vmci_context.c context->pending_doorbell_array = *db_handle_array; context 883 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 884 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 900 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context = vmci_ctx_get(context_id); context 902 drivers/misc/vmw_vmci/vmci_context.c spin_lock(&context->lock); context 914 drivers/misc/vmw_vmci/vmci_context.c context->pending_doorbell_array); context 922 drivers/misc/vmw_vmci/vmci_context.c context->pending_doorbell_array); context 924 drivers/misc/vmw_vmci/vmci_context.c vmci_handle_arr_destroy(context->pending_doorbell_array); context 925 drivers/misc/vmw_vmci/vmci_context.c context->pending_doorbell_array = db_handle_array; context 928 drivers/misc/vmw_vmci/vmci_context.c ctx_clear_notify_call(context); context 930 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 931 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 946 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context; context 952 drivers/misc/vmw_vmci/vmci_context.c context = vmci_ctx_get(context_id); context 953 drivers/misc/vmw_vmci/vmci_context.c if (context == NULL) context 956 drivers/misc/vmw_vmci/vmci_context.c spin_lock(&context->lock); context 957 drivers/misc/vmw_vmci/vmci_context.c if (!vmci_handle_arr_has_entry(context->doorbell_array, handle)) context 958 drivers/misc/vmw_vmci/vmci_context.c result = vmci_handle_arr_append_entry(&context->doorbell_array, context 963 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 964 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 975 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context; context 981 drivers/misc/vmw_vmci/vmci_context.c context = vmci_ctx_get(context_id); context 982 drivers/misc/vmw_vmci/vmci_context.c if (context == NULL) context 985 drivers/misc/vmw_vmci/vmci_context.c spin_lock(&context->lock); context 987 drivers/misc/vmw_vmci/vmci_context.c vmci_handle_arr_remove_entry(context->doorbell_array, handle); context 988 drivers/misc/vmw_vmci/vmci_context.c vmci_handle_arr_remove_entry(context->pending_doorbell_array, handle); context 989 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 991 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 1003 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context; context 1009 drivers/misc/vmw_vmci/vmci_context.c context = vmci_ctx_get(context_id); context 1010 drivers/misc/vmw_vmci/vmci_context.c if (context == NULL) context 1013 drivers/misc/vmw_vmci/vmci_context.c spin_lock(&context->lock); context 1015 drivers/misc/vmw_vmci/vmci_context.c struct vmci_handle_arr *arr = context->doorbell_array; context 1019 drivers/misc/vmw_vmci/vmci_context.c struct vmci_handle_arr *arr = context->pending_doorbell_array; context 1022 drivers/misc/vmw_vmci/vmci_context.c spin_unlock(&context->lock); context 1024 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 1050 drivers/misc/vmw_vmci/vmci_context.c dst_context = vmci_ctx_get(handle.context); context 1052 drivers/misc/vmw_vmci/vmci_context.c pr_devel("Invalid context (ID=0x%x)\n", handle.context); context 1056 drivers/misc/vmw_vmci/vmci_context.c if (src_cid != handle.context) { context 1060 drivers/misc/vmw_vmci/vmci_context.c VMCI_CONTEXT_IS_VM(handle.context)) { context 1062 drivers/misc/vmw_vmci/vmci_context.c src_cid, handle.context); context 1070 drivers/misc/vmw_vmci/vmci_context.c handle.context, handle.resource); context 1085 drivers/misc/vmw_vmci/vmci_context.c if (handle.context == VMCI_HOST_CONTEXT_ID) { context 1117 drivers/misc/vmw_vmci/vmci_context.c bool vmci_ctx_supports_host_qp(struct vmci_ctx *context) context 1119 drivers/misc/vmw_vmci/vmci_context.c return context && context->user_version >= VMCI_VERSION_HOSTQP; context 1126 drivers/misc/vmw_vmci/vmci_context.c int vmci_ctx_qp_create(struct vmci_ctx *context, struct vmci_handle handle) context 1130 drivers/misc/vmw_vmci/vmci_context.c if (context == NULL || vmci_handle_is_invalid(handle)) context 1133 drivers/misc/vmw_vmci/vmci_context.c if (!vmci_handle_arr_has_entry(context->queue_pair_array, handle)) context 1135 drivers/misc/vmw_vmci/vmci_context.c &context->queue_pair_array, handle); context 1146 drivers/misc/vmw_vmci/vmci_context.c int vmci_ctx_qp_destroy(struct vmci_ctx *context, struct vmci_handle handle) context 1150 drivers/misc/vmw_vmci/vmci_context.c if (context == NULL || vmci_handle_is_invalid(handle)) context 1153 drivers/misc/vmw_vmci/vmci_context.c hndl = vmci_handle_arr_remove_entry(context->queue_pair_array, handle); context 1163 drivers/misc/vmw_vmci/vmci_context.c bool vmci_ctx_qp_exists(struct vmci_ctx *context, struct vmci_handle handle) context 1165 drivers/misc/vmw_vmci/vmci_context.c if (context == NULL || vmci_handle_is_invalid(handle)) context 1168 drivers/misc/vmw_vmci/vmci_context.c return vmci_handle_arr_has_entry(context->queue_pair_array, handle); context 1181 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context; context 1183 drivers/misc/vmw_vmci/vmci_context.c context = vmci_ctx_get(context_id); context 1184 drivers/misc/vmw_vmci/vmci_context.c if (!context) context 1187 drivers/misc/vmw_vmci/vmci_context.c flags = context->priv_flags; context 1188 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 1207 drivers/misc/vmw_vmci/vmci_context.c struct vmci_ctx *context = vmci_ctx_get(context_id); context 1208 drivers/misc/vmw_vmci/vmci_context.c if (context) { context 1209 drivers/misc/vmw_vmci/vmci_context.c if (context->cred) context 1210 drivers/misc/vmw_vmci/vmci_context.c is_owner = uid_eq(context->cred->uid, uid); context 1211 drivers/misc/vmw_vmci/vmci_context.c vmci_ctx_put(context); context 129 drivers/misc/vmw_vmci/vmci_context.h void vmci_ctx_destroy(struct vmci_ctx *context); context 131 drivers/misc/vmw_vmci/vmci_context.h bool vmci_ctx_supports_host_qp(struct vmci_ctx *context); context 133 drivers/misc/vmw_vmci/vmci_context.h int vmci_ctx_dequeue_datagram(struct vmci_ctx *context, context 137 drivers/misc/vmw_vmci/vmci_context.h void vmci_ctx_put(struct vmci_ctx *context); context 147 drivers/misc/vmw_vmci/vmci_context.h int vmci_ctx_qp_create(struct vmci_ctx *context, struct vmci_handle handle); context 148 drivers/misc/vmw_vmci/vmci_context.h int vmci_ctx_qp_destroy(struct vmci_ctx *context, struct vmci_handle handle); context 149 drivers/misc/vmw_vmci/vmci_context.h bool vmci_ctx_qp_exists(struct vmci_ctx *context, struct vmci_handle handle); context 151 drivers/misc/vmw_vmci/vmci_context.h void vmci_ctx_check_signal_notify(struct vmci_ctx *context); context 152 drivers/misc/vmw_vmci/vmci_context.h void vmci_ctx_unset_notify(struct vmci_ctx *context); context 167 drivers/misc/vmw_vmci/vmci_context.h static inline u32 vmci_ctx_get_id(struct vmci_ctx *context) context 169 drivers/misc/vmw_vmci/vmci_context.h if (!context) context 171 drivers/misc/vmw_vmci/vmci_context.h return context->cid; context 92 drivers/misc/vmw_vmci/vmci_datagram.c handle.context, handle.resource, result); context 166 drivers/misc/vmw_vmci/vmci_datagram.c if (dg->dst.context == VMCI_HYPERVISOR_CONTEXT_ID) context 170 drivers/misc/vmw_vmci/vmci_datagram.c if (dg->src.context != context_id) { context 172 drivers/misc/vmw_vmci/vmci_datagram.c context_id, dg->src.context, dg->src.resource); context 181 drivers/misc/vmw_vmci/vmci_datagram.c dg->src.context, dg->src.resource); context 186 drivers/misc/vmw_vmci/vmci_datagram.c if (dg->dst.context == VMCI_HOST_CONTEXT_ID) { context 191 drivers/misc/vmw_vmci/vmci_datagram.c if (dg->src.context == VMCI_HYPERVISOR_CONTEXT_ID && context 200 drivers/misc/vmw_vmci/vmci_datagram.c dg->dst.context, dg->dst.resource); context 217 drivers/misc/vmw_vmci/vmci_datagram.c dg->src.context == VMCI_HOST_CONTEXT_ID) { context 253 drivers/misc/vmw_vmci/vmci_datagram.c if (context_id != dg->dst.context) { context 256 drivers/misc/vmw_vmci/vmci_datagram.c (dg->dst.context))) { context 265 drivers/misc/vmw_vmci/vmci_datagram.c context_id, dg->dst.context); context 275 drivers/misc/vmw_vmci/vmci_datagram.c retval = vmci_ctx_enqueue_datagram(dg->dst.context, new_dg); context 332 drivers/misc/vmw_vmci/vmci_datagram.c dg->src.context, dg->dst.context, retval); context 363 drivers/misc/vmw_vmci/vmci_datagram.c dg->dst.context, dg->dst.resource); context 467 drivers/misc/vmw_vmci/vmci_datagram.c handle.context, handle.resource); context 87 drivers/misc/vmw_vmci/vmci_doorbell.c if (priv_flags == NULL || handle.context == VMCI_INVALID_ID) context 90 drivers/misc/vmw_vmci/vmci_doorbell.c if (handle.context == VMCI_HOST_CONTEXT_ID) { context 102 drivers/misc/vmw_vmci/vmci_doorbell.c } else if (handle.context == VMCI_HYPERVISOR_CONTEXT_ID) { context 109 drivers/misc/vmw_vmci/vmci_doorbell.c *priv_flags = vmci_context_get_priv_flags(handle.context); context 299 drivers/misc/vmw_vmci/vmci_doorbell.c handle.context, handle.resource); context 307 drivers/misc/vmw_vmci/vmci_doorbell.c handle.context, handle.resource); context 449 drivers/misc/vmw_vmci/vmci_doorbell.c if (handle->context == VMCI_HOST_CONTEXT_ID || context 451 drivers/misc/vmw_vmci/vmci_doorbell.c vmci_get_context_id() == handle->context)) { context 457 drivers/misc/vmw_vmci/vmci_doorbell.c handle->context, handle->resource); context 479 drivers/misc/vmw_vmci/vmci_doorbell.c new_handle.context, new_handle.resource, result); context 525 drivers/misc/vmw_vmci/vmci_doorbell.c handle.context, handle.resource); context 553 drivers/misc/vmw_vmci/vmci_doorbell.c handle.context, handle.resource, result); context 292 drivers/misc/vmw_vmci/vmci_guest.c if (dg->src.context == VMCI_HYPERVISOR_CONTEXT_ID && context 85 drivers/misc/vmw_vmci/vmci_host.c struct vmci_ctx *context; context 138 drivers/misc/vmw_vmci/vmci_host.c vmci_ctx_destroy(vmci_host_dev->context); context 139 drivers/misc/vmw_vmci/vmci_host.c vmci_host_dev->context = NULL; context 163 drivers/misc/vmw_vmci/vmci_host.c struct vmci_ctx *context = vmci_host_dev->context; context 169 drivers/misc/vmw_vmci/vmci_host.c poll_wait(filp, &context->host_context.wait_queue, context 172 drivers/misc/vmw_vmci/vmci_host.c spin_lock(&context->lock); context 173 drivers/misc/vmw_vmci/vmci_host.c if (context->pending_datagrams > 0 || context 175 drivers/misc/vmw_vmci/vmci_host.c context->pending_doorbell_array) > 0) { context 178 drivers/misc/vmw_vmci/vmci_host.c spin_unlock(&context->lock); context 216 drivers/misc/vmw_vmci/vmci_host.c static int vmci_host_setup_notify(struct vmci_ctx *context, context 221 drivers/misc/vmw_vmci/vmci_host.c if (context->notify_page) { context 237 drivers/misc/vmw_vmci/vmci_host.c retval = get_user_pages_fast(uva, 1, FOLL_WRITE, &context->notify_page); context 239 drivers/misc/vmw_vmci/vmci_host.c context->notify_page = NULL; context 246 drivers/misc/vmw_vmci/vmci_host.c context->notify = kmap(context->notify_page) + (uva & (PAGE_SIZE - 1)); context 247 drivers/misc/vmw_vmci/vmci_host.c vmci_ctx_check_signal_notify(context); context 314 drivers/misc/vmw_vmci/vmci_host.c vmci_host_dev->context = vmci_ctx_create(init_block.cid, context 319 drivers/misc/vmw_vmci/vmci_host.c if (IS_ERR(vmci_host_dev->context)) { context 320 drivers/misc/vmw_vmci/vmci_host.c retval = PTR_ERR(vmci_host_dev->context); context 329 drivers/misc/vmw_vmci/vmci_host.c init_block.cid = vmci_ctx_get_id(vmci_host_dev->context); context 331 drivers/misc/vmw_vmci/vmci_host.c vmci_ctx_destroy(vmci_host_dev->context); context 332 drivers/misc/vmw_vmci/vmci_host.c vmci_host_dev->context = NULL; context 391 drivers/misc/vmw_vmci/vmci_host.c dg->dst.context, dg->dst.resource, context 392 drivers/misc/vmw_vmci/vmci_host.c dg->src.context, dg->src.resource, context 396 drivers/misc/vmw_vmci/vmci_host.c cid = vmci_ctx_get_id(vmci_host_dev->context); context 421 drivers/misc/vmw_vmci/vmci_host.c recv_info.result = vmci_ctx_dequeue_datagram(vmci_host_dev->context, context 465 drivers/misc/vmw_vmci/vmci_host.c vmci_host_dev->context); context 490 drivers/misc/vmw_vmci/vmci_host.c vmci_host_dev->context); context 496 drivers/misc/vmw_vmci/vmci_host.c vmci_host_dev->context); context 531 drivers/misc/vmw_vmci/vmci_host.c vmci_host_dev->context, context 539 drivers/misc/vmw_vmci/vmci_host.c vmci_host_dev->context, 0); context 596 drivers/misc/vmw_vmci/vmci_host.c vmci_host_dev->context); context 640 drivers/misc/vmw_vmci/vmci_host.c vmci_host_dev->context); context 666 drivers/misc/vmw_vmci/vmci_host.c cid = vmci_ctx_get_id(vmci_host_dev->context); context 689 drivers/misc/vmw_vmci/vmci_host.c cid = vmci_ctx_get_id(vmci_host_dev->context); context 713 drivers/misc/vmw_vmci/vmci_host.c cid = vmci_ctx_get_id(vmci_host_dev->context); context 750 drivers/misc/vmw_vmci/vmci_host.c cid = vmci_ctx_get_id(vmci_host_dev->context); context 785 drivers/misc/vmw_vmci/vmci_host.c vmci_host_setup_notify(vmci_host_dev->context, context 788 drivers/misc/vmw_vmci/vmci_host.c vmci_ctx_unset_notify(vmci_host_dev->context); context 816 drivers/misc/vmw_vmci/vmci_host.c cid = vmci_ctx_get_id(vmci_host_dev->context); context 875 drivers/misc/vmw_vmci/vmci_host.c cid = vmci_ctx_get_id(vmci_host_dev->context); context 910 drivers/misc/vmw_vmci/vmci_queue_pair.c handle.context, handle.resource, result); context 1194 drivers/misc/vmw_vmci/vmci_queue_pair.c if (queue_pair_entry->qp.handle.context != context_id || context 1281 drivers/misc/vmw_vmci/vmci_queue_pair.c struct vmci_ctx *context, context 1286 drivers/misc/vmw_vmci/vmci_queue_pair.c const u32 context_id = vmci_ctx_get_id(context); context 1300 drivers/misc/vmw_vmci/vmci_queue_pair.c if (handle.context != context_id && handle.context != peer) context 1317 drivers/misc/vmw_vmci/vmci_queue_pair.c if (vmci_ctx_get_id(context) == VMCI_HOST_CONTEXT_ID && !is_local) { context 1343 drivers/misc/vmw_vmci/vmci_queue_pair.c !!(context->priv_flags & VMCI_PRIVILEGE_FLAG_RESTRICTED); context 1411 drivers/misc/vmw_vmci/vmci_queue_pair.c handle.context, handle.resource, result); context 1423 drivers/misc/vmw_vmci/vmci_queue_pair.c vmci_ctx_qp_create(context, entry->qp.handle); context 1509 drivers/misc/vmw_vmci/vmci_queue_pair.c struct vmci_ctx *context, context 1514 drivers/misc/vmw_vmci/vmci_queue_pair.c const u32 context_id = vmci_ctx_get_id(context); context 1540 drivers/misc/vmw_vmci/vmci_queue_pair.c if ((context->priv_flags & VMCI_PRIVILEGE_FLAG_RESTRICTED) && context 1565 drivers/misc/vmw_vmci/vmci_queue_pair.c if (!vmci_ctx_supports_host_qp(context)) context 1659 drivers/misc/vmw_vmci/vmci_queue_pair.c entry->create_id, entry->qp.handle.context, context 1675 drivers/misc/vmw_vmci/vmci_queue_pair.c vmci_ctx_qp_create(context, entry->qp.handle); context 1694 drivers/misc/vmw_vmci/vmci_queue_pair.c struct vmci_ctx *context, context 1700 drivers/misc/vmw_vmci/vmci_queue_pair.c const u32 context_id = vmci_ctx_get_id(context); context 1709 drivers/misc/vmw_vmci/vmci_queue_pair.c !context || context_id == VMCI_INVALID_ID || context 1710 drivers/misc/vmw_vmci/vmci_queue_pair.c handle.context == VMCI_INVALID_ID) { context 1724 drivers/misc/vmw_vmci/vmci_queue_pair.c if (!is_local && vmci_ctx_qp_exists(context, handle)) { context 1726 drivers/misc/vmw_vmci/vmci_queue_pair.c context_id, handle.context, handle.resource); context 1739 drivers/misc/vmw_vmci/vmci_queue_pair.c context, wakeup_cb, client_data, ent); context 1745 drivers/misc/vmw_vmci/vmci_queue_pair.c context, wakeup_cb, client_data, ent); context 1773 drivers/misc/vmw_vmci/vmci_queue_pair.c struct vmci_ctx *context; context 1784 drivers/misc/vmw_vmci/vmci_queue_pair.c context = vmci_ctx_get(VMCI_HOST_CONTEXT_ID); context 1788 drivers/misc/vmw_vmci/vmci_queue_pair.c produce_size, consume_size, NULL, context, context 1811 drivers/misc/vmw_vmci/vmci_queue_pair.c vmci_ctx_put(context); context 1856 drivers/misc/vmw_vmci/vmci_queue_pair.c struct vmci_ctx *context; context 1858 drivers/misc/vmw_vmci/vmci_queue_pair.c context = vmci_ctx_get(VMCI_HOST_CONTEXT_ID); context 1860 drivers/misc/vmw_vmci/vmci_queue_pair.c result = vmci_qp_broker_detach(handle, context); context 1862 drivers/misc/vmw_vmci/vmci_queue_pair.c vmci_ctx_put(context); context 1928 drivers/misc/vmw_vmci/vmci_queue_pair.c struct vmci_ctx *context) context 1932 drivers/misc/vmw_vmci/vmci_queue_pair.c page_store, context, NULL, NULL, NULL, NULL); context 1954 drivers/misc/vmw_vmci/vmci_queue_pair.c struct vmci_ctx *context) context 1958 drivers/misc/vmw_vmci/vmci_queue_pair.c const u32 context_id = vmci_ctx_get_id(context); context 1960 drivers/misc/vmw_vmci/vmci_queue_pair.c if (vmci_handle_is_invalid(handle) || !context || context 1974 drivers/misc/vmw_vmci/vmci_queue_pair.c if (!vmci_ctx_qp_exists(context, handle)) { context 1976 drivers/misc/vmw_vmci/vmci_queue_pair.c context_id, handle.context, handle.resource); context 2030 drivers/misc/vmw_vmci/vmci_queue_pair.c entry->create_id, entry->qp.handle.context, context 2070 drivers/misc/vmw_vmci/vmci_queue_pair.c int vmci_qp_broker_detach(struct vmci_handle handle, struct vmci_ctx *context) context 2073 drivers/misc/vmw_vmci/vmci_queue_pair.c const u32 context_id = vmci_ctx_get_id(context); context 2078 drivers/misc/vmw_vmci/vmci_queue_pair.c if (vmci_handle_is_invalid(handle) || !context || context 2085 drivers/misc/vmw_vmci/vmci_queue_pair.c if (!vmci_ctx_qp_exists(context, handle)) { context 2087 drivers/misc/vmw_vmci/vmci_queue_pair.c context_id, handle.context, handle.resource); context 2095 drivers/misc/vmw_vmci/vmci_queue_pair.c context_id, handle.context, handle.resource); context 2137 drivers/misc/vmw_vmci/vmci_queue_pair.c handle.context, handle.resource, context 2174 drivers/misc/vmw_vmci/vmci_queue_pair.c vmci_ctx_qp_destroy(context, handle); context 2185 drivers/misc/vmw_vmci/vmci_queue_pair.c vmci_ctx_qp_destroy(context, handle); context 2201 drivers/misc/vmw_vmci/vmci_queue_pair.c struct vmci_ctx *context, context 2205 drivers/misc/vmw_vmci/vmci_queue_pair.c const u32 context_id = vmci_ctx_get_id(context); context 2208 drivers/misc/vmw_vmci/vmci_queue_pair.c if (vmci_handle_is_invalid(handle) || !context || context 2214 drivers/misc/vmw_vmci/vmci_queue_pair.c if (!vmci_ctx_qp_exists(context, handle)) { context 2216 drivers/misc/vmw_vmci/vmci_queue_pair.c context_id, handle.context, handle.resource); context 2224 drivers/misc/vmw_vmci/vmci_queue_pair.c context_id, handle.context, handle.resource); context 2310 drivers/misc/vmw_vmci/vmci_queue_pair.c struct vmci_ctx *context, context 2314 drivers/misc/vmw_vmci/vmci_queue_pair.c const u32 context_id = vmci_ctx_get_id(context); context 2317 drivers/misc/vmw_vmci/vmci_queue_pair.c if (vmci_handle_is_invalid(handle) || !context || context 2323 drivers/misc/vmw_vmci/vmci_queue_pair.c if (!vmci_ctx_qp_exists(context, handle)) { context 2325 drivers/misc/vmw_vmci/vmci_queue_pair.c context_id, handle.context, handle.resource); context 2333 drivers/misc/vmw_vmci/vmci_queue_pair.c context_id, handle.context, handle.resource); context 2348 drivers/misc/vmw_vmci/vmci_queue_pair.c handle.context, handle.resource, result); context 146 drivers/misc/vmw_vmci/vmci_queue_pair.h struct vmci_ctx *context); context 149 drivers/misc/vmw_vmci/vmci_queue_pair.h struct vmci_ctx *context); context 150 drivers/misc/vmw_vmci/vmci_queue_pair.h int vmci_qp_broker_detach(struct vmci_handle handle, struct vmci_ctx *context); context 161 drivers/misc/vmw_vmci/vmci_queue_pair.h struct vmci_ctx *context, u64 guest_mem); context 163 drivers/misc/vmw_vmci/vmci_queue_pair.h struct vmci_ctx *context, u32 gid); context 47 drivers/misc/vmw_vmci/vmci_resource.c u32 cid = r->handle.context; context 52 drivers/misc/vmw_vmci/vmci_resource.c (cid == handle.context || cid == VMCI_INVALID_ID || context 53 drivers/misc/vmw_vmci/vmci_resource.c handle.context == VMCI_INVALID_ID)) { context 110 drivers/misc/vmw_vmci/vmci_resource.c handle.resource = vmci_resource_find_id(handle.context, context 42 drivers/misc/vmw_vmci/vmci_route.c if (VMCI_INVALID_ID == dst->context) context 46 drivers/misc/vmw_vmci/vmci_route.c if (VMCI_HYPERVISOR_CONTEXT_ID == dst->context) { context 64 drivers/misc/vmw_vmci/vmci_route.c if (VMCI_HOST_CONTEXT_ID == src->context) context 75 drivers/misc/vmw_vmci/vmci_route.c if (VMCI_INVALID_ID == src->context && context 77 drivers/misc/vmw_vmci/vmci_route.c src->context = vmci_get_context_id(); context 85 drivers/misc/vmw_vmci/vmci_route.c if (VMCI_HOST_CONTEXT_ID == dst->context) { context 94 drivers/misc/vmw_vmci/vmci_route.c if (src->context == VMCI_HYPERVISOR_CONTEXT_ID) { context 113 drivers/misc/vmw_vmci/vmci_route.c if (VMCI_INVALID_ID == src->context) context 114 drivers/misc/vmw_vmci/vmci_route.c src->context = vmci_get_context_id(); context 130 drivers/misc/vmw_vmci/vmci_route.c if (VMCI_INVALID_ID == src->context) { context 139 drivers/misc/vmw_vmci/vmci_route.c src->context = VMCI_HOST_CONTEXT_ID; context 153 drivers/misc/vmw_vmci/vmci_route.c if (vmci_ctx_exists(dst->context)) { context 154 drivers/misc/vmw_vmci/vmci_route.c if (VMCI_INVALID_ID == src->context) { context 165 drivers/misc/vmw_vmci/vmci_route.c src->context = VMCI_HOST_CONTEXT_ID; context 166 drivers/misc/vmw_vmci/vmci_route.c } else if (VMCI_CONTEXT_IS_VM(src->context) && context 167 drivers/misc/vmw_vmci/vmci_route.c src->context != dst->context) { context 209 drivers/misc/vmw_vmci/vmci_route.c if (VMCI_INVALID_ID == src->context) context 210 drivers/misc/vmw_vmci/vmci_route.c src->context = vmci_get_context_id(); context 165 drivers/mmc/host/ushc.c struct ushc_data *ushc = urb->context; context 199 drivers/mmc/host/ushc.c struct ushc_data *ushc = urb->context; context 209 drivers/mmc/host/ushc.c struct ushc_data *ushc = urb->context; context 217 drivers/mmc/host/ushc.c struct ushc_data *ushc = urb->context; context 447 drivers/mmc/host/vub300.c struct vub300_mmc_host *vub300 = (struct vub300_mmc_host *)urb->context; context 455 drivers/mmc/host/vub300.c struct vub300_mmc_host *vub300 = (struct vub300_mmc_host *)urb->context; context 793 drivers/mmc/host/vub300.c struct vub300_mmc_host *vub300 = (struct vub300_mmc_host *)urb->context; context 822 drivers/mmc/host/vub300.c struct vub300_mmc_host *vub300 = (struct vub300_mmc_host *)urb->context; context 1394 drivers/mmc/host/vub300.c complete((struct completion *)urb->context); context 1411 drivers/mmc/host/vub300.c vub300->urb->context = &done; context 189 drivers/net/can/m_can/tcan4x5x.c static int regmap_spi_gather_write(void *context, const void *reg, context 193 drivers/net/can/m_can/tcan4x5x.c struct device *dev = context; context 211 drivers/net/can/m_can/tcan4x5x.c static int tcan4x5x_regmap_write(void *context, const void *data, size_t count) context 216 drivers/net/can/m_can/tcan4x5x.c return regmap_spi_gather_write(context, reg, 4, val, count - 4); context 219 drivers/net/can/m_can/tcan4x5x.c static int regmap_spi_async_write(void *context, context 232 drivers/net/can/m_can/tcan4x5x.c static int tcan4x5x_regmap_read(void *context, context 236 drivers/net/can/m_can/tcan4x5x.c struct device *dev = context; context 262 drivers/net/can/usb/ems_usb.c struct ems_usb *dev = urb->context; context 408 drivers/net/can/usb/ems_usb.c struct ems_usb *dev = urb->context; context 494 drivers/net/can/usb/ems_usb.c struct ems_tx_urb_context *context = urb->context; context 498 drivers/net/can/usb/ems_usb.c BUG_ON(!context); context 500 drivers/net/can/usb/ems_usb.c dev = context->dev; context 519 drivers/net/can/usb/ems_usb.c netdev->stats.tx_bytes += context->dlc; context 521 drivers/net/can/usb/ems_usb.c can_get_echo_skb(netdev, context->echo_index); context 524 drivers/net/can/usb/ems_usb.c context->echo_index = MAX_TX_URBS; context 730 drivers/net/can/usb/ems_usb.c struct ems_tx_urb_context *context = NULL; context 777 drivers/net/can/usb/ems_usb.c context = &dev->tx_contexts[i]; context 786 drivers/net/can/usb/ems_usb.c if (!context) { context 795 drivers/net/can/usb/ems_usb.c context->dev = dev; context 796 drivers/net/can/usb/ems_usb.c context->echo_index = i; context 797 drivers/net/can/usb/ems_usb.c context->dlc = cf->can_dlc; context 800 drivers/net/can/usb/ems_usb.c size, ems_usb_write_bulk_callback, context); context 804 drivers/net/can/usb/ems_usb.c can_put_echo_skb(skb, netdev, context->echo_index); context 810 drivers/net/can/usb/ems_usb.c can_free_echo_skb(netdev, context->echo_index); context 349 drivers/net/can/usb/esd_usb2.c struct esd_tx_urb_context *context; context 354 drivers/net/can/usb/esd_usb2.c context = &priv->tx_contexts[msg->msg.txdone.hnd & (MAX_TX_URBS - 1)]; context 358 drivers/net/can/usb/esd_usb2.c stats->tx_bytes += context->dlc; context 359 drivers/net/can/usb/esd_usb2.c can_get_echo_skb(netdev, context->echo_index); context 362 drivers/net/can/usb/esd_usb2.c can_free_echo_skb(netdev, context->echo_index); context 366 drivers/net/can/usb/esd_usb2.c context->echo_index = MAX_TX_URBS; context 374 drivers/net/can/usb/esd_usb2.c struct esd_usb2 *dev = urb->context; context 453 drivers/net/can/usb/esd_usb2.c struct esd_tx_urb_context *context = urb->context; context 458 drivers/net/can/usb/esd_usb2.c WARN_ON(!context); context 460 drivers/net/can/usb/esd_usb2.c priv = context->priv; context 705 drivers/net/can/usb/esd_usb2.c struct esd_tx_urb_context *context = NULL; context 756 drivers/net/can/usb/esd_usb2.c context = &priv->tx_contexts[i]; context 764 drivers/net/can/usb/esd_usb2.c if (!context) { context 770 drivers/net/can/usb/esd_usb2.c context->priv = priv; context 771 drivers/net/can/usb/esd_usb2.c context->echo_index = i; context 772 drivers/net/can/usb/esd_usb2.c context->dlc = cf->can_dlc; context 779 drivers/net/can/usb/esd_usb2.c esd_usb2_write_bulk_callback, context); context 785 drivers/net/can/usb/esd_usb2.c can_put_echo_skb(skb, netdev, context->echo_index); context 795 drivers/net/can/usb/esd_usb2.c can_free_echo_skb(netdev, context->echo_index); context 292 drivers/net/can/usb/gs_usb.c struct gs_usb *usbcan = urb->context; context 449 drivers/net/can/usb/gs_usb.c struct gs_tx_context *txc = urb->context; context 205 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c struct net_device *netdev = urb->context; context 269 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c struct kvaser_usb *dev = urb->context; context 482 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c struct kvaser_usb_tx_urb_context *context = urb->context; context 486 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c if (WARN_ON(!context)) context 489 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c priv = context->priv; context 507 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c struct kvaser_usb_tx_urb_context *context = NULL; context 528 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c context = &priv->tx_contexts[i]; context 530 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c context->echo_index = i; context 541 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c if (!context) { context 548 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c buf = dev->ops->dev_frame_to_cmd(priv, skb, &context->dlc, &cmd_len, context 549 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c context->echo_index); context 555 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c context->echo_index = dev->max_tx_urbs; context 563 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c context->priv = priv; context 565 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c can_put_echo_skb(skb, netdev, context->echo_index); context 571 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c context); context 578 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c can_free_echo_skb(netdev, context->echo_index); context 579 drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c context->echo_index = dev->max_tx_urbs; context 1094 drivers/net/can/usb/kvaser_usb/kvaser_usb_hydra.c struct kvaser_usb_tx_urb_context *context; context 1118 drivers/net/can/usb/kvaser_usb/kvaser_usb_hydra.c context = &priv->tx_contexts[transid % dev->max_tx_urbs]; context 1123 drivers/net/can/usb/kvaser_usb/kvaser_usb_hydra.c stats->tx_bytes += can_dlc2len(context->dlc); context 1128 drivers/net/can/usb/kvaser_usb/kvaser_usb_hydra.c can_get_echo_skb(priv->netdev, context->echo_index); context 1129 drivers/net/can/usb/kvaser_usb/kvaser_usb_hydra.c context->echo_index = dev->max_tx_urbs; context 546 drivers/net/can/usb/kvaser_usb/kvaser_usb_leaf.c struct kvaser_usb_tx_urb_context *context; context 567 drivers/net/can/usb/kvaser_usb/kvaser_usb_leaf.c context = &priv->tx_contexts[tid % dev->max_tx_urbs]; context 593 drivers/net/can/usb/kvaser_usb/kvaser_usb_leaf.c stats->tx_bytes += context->dlc; context 597 drivers/net/can/usb/kvaser_usb/kvaser_usb_leaf.c can_get_echo_skb(priv->netdev, context->echo_index); context 598 drivers/net/can/usb/kvaser_usb/kvaser_usb_leaf.c context->echo_index = dev->max_tx_urbs; context 221 drivers/net/can/usb/mcba_usb.c struct mcba_usb_ctx *ctx = urb->context; context 569 drivers/net/can/usb/mcba_usb.c struct mcba_priv *priv = urb->context; context 268 drivers/net/can/usb/peak_usb/pcan_usb.c struct pcan_usb *pdev = urb->context; context 167 drivers/net/can/usb/peak_usb/pcan_usb_core.c struct peak_usb_device *dev = urb->context; context 232 drivers/net/can/usb/peak_usb/pcan_usb_core.c struct peak_tx_urb_context *context = urb->context; context 236 drivers/net/can/usb/peak_usb/pcan_usb_core.c BUG_ON(!context); context 238 drivers/net/can/usb/peak_usb/pcan_usb_core.c dev = context->dev; context 251 drivers/net/can/usb/peak_usb/pcan_usb_core.c netdev->stats.tx_bytes += context->data_len; context 270 drivers/net/can/usb/peak_usb/pcan_usb_core.c can_get_echo_skb(netdev, context->echo_index); context 271 drivers/net/can/usb/peak_usb/pcan_usb_core.c context->echo_index = PCAN_USB_MAX_TX_URBS; context 285 drivers/net/can/usb/peak_usb/pcan_usb_core.c struct peak_tx_urb_context *context = NULL; context 298 drivers/net/can/usb/peak_usb/pcan_usb_core.c context = dev->tx_contexts + i; context 302 drivers/net/can/usb/peak_usb/pcan_usb_core.c if (!context) { context 307 drivers/net/can/usb/peak_usb/pcan_usb_core.c urb = context->urb; context 319 drivers/net/can/usb/peak_usb/pcan_usb_core.c context->echo_index = i; context 322 drivers/net/can/usb/peak_usb/pcan_usb_core.c context->data_len = cfd->len; context 326 drivers/net/can/usb/peak_usb/pcan_usb_core.c can_put_echo_skb(skb, netdev, context->echo_index); context 332 drivers/net/can/usb/peak_usb/pcan_usb_core.c can_free_echo_skb(netdev, context->echo_index); context 337 drivers/net/can/usb/peak_usb/pcan_usb_core.c context->echo_index = PCAN_USB_MAX_TX_URBS; context 427 drivers/net/can/usb/peak_usb/pcan_usb_core.c struct peak_tx_urb_context *context; context 445 drivers/net/can/usb/peak_usb/pcan_usb_core.c context = dev->tx_contexts + i; context 446 drivers/net/can/usb/peak_usb/pcan_usb_core.c context->dev = dev; context 447 drivers/net/can/usb/peak_usb/pcan_usb_core.c context->urb = urb; context 452 drivers/net/can/usb/peak_usb/pcan_usb_core.c peak_usb_write_bulk_callback, context); context 470 drivers/net/can/usb/peak_usb/pcan_usb_pro.c peak_usb_restart_complete(urb->context); context 640 drivers/net/can/usb/ucan.c struct ucan_urb_context *context; context 660 drivers/net/can/usb/ucan.c context = &up->context_array[echo_index]; context 661 drivers/net/can/usb/ucan.c dlc = READ_ONCE(context->dlc); context 666 drivers/net/can/usb/ucan.c if (!ucan_release_context(up, context)) context 689 drivers/net/can/usb/ucan.c struct ucan_priv *up = urb->context; context 818 drivers/net/can/usb/ucan.c struct ucan_urb_context *context = urb->context; context 821 drivers/net/can/usb/ucan.c if (WARN_ON_ONCE(!context)) context 830 drivers/net/can/usb/ucan.c up = context->up; context 846 drivers/net/can/usb/ucan.c can_free_echo_skb(up->netdev, context - up->context_array); context 852 drivers/net/can/usb/ucan.c if (!ucan_release_context(up, context)) context 1048 drivers/net/can/usb/ucan.c struct ucan_urb_context *context, context 1089 drivers/net/can/usb/ucan.c context->dlc = cf->can_dlc; context 1097 drivers/net/can/usb/ucan.c m, mlen, ucan_write_bulk_callback, context); context 1118 drivers/net/can/usb/ucan.c struct ucan_urb_context *context; context 1127 drivers/net/can/usb/ucan.c context = ucan_alloc_context(up); context 1128 drivers/net/can/usb/ucan.c echo_index = context - up->context_array; context 1130 drivers/net/can/usb/ucan.c if (WARN_ON_ONCE(!context)) context 1134 drivers/net/can/usb/ucan.c urb = ucan_prepare_tx_urb(up, context, cf, echo_index); context 1152 drivers/net/can/usb/ucan.c if (!ucan_release_context(up, context)) context 1182 drivers/net/can/usb/ucan.c if (!ucan_release_context(up, context)) context 501 drivers/net/can/usb/usb_8dev.c struct usb_8dev_priv *priv = urb->context; context 563 drivers/net/can/usb/usb_8dev.c struct usb_8dev_tx_urb_context *context = urb->context; context 567 drivers/net/can/usb/usb_8dev.c BUG_ON(!context); context 569 drivers/net/can/usb/usb_8dev.c priv = context->priv; context 586 drivers/net/can/usb/usb_8dev.c netdev->stats.tx_bytes += context->dlc; context 588 drivers/net/can/usb/usb_8dev.c can_get_echo_skb(netdev, context->echo_index); context 593 drivers/net/can/usb/usb_8dev.c context->echo_index = MAX_TX_URBS; context 607 drivers/net/can/usb/usb_8dev.c struct usb_8dev_tx_urb_context *context = NULL; context 646 drivers/net/can/usb/usb_8dev.c context = &priv->tx_contexts[i]; context 654 drivers/net/can/usb/usb_8dev.c if (!context) context 657 drivers/net/can/usb/usb_8dev.c context->priv = priv; context 658 drivers/net/can/usb/usb_8dev.c context->echo_index = i; context 659 drivers/net/can/usb/usb_8dev.c context->dlc = cf->can_dlc; context 663 drivers/net/can/usb/usb_8dev.c buf, size, usb_8dev_write_bulk_callback, context); context 667 drivers/net/can/usb/usb_8dev.c can_put_echo_skb(skb, netdev, context->echo_index); context 694 drivers/net/can/usb/usb_8dev.c can_free_echo_skb(netdev, context->echo_index); context 2694 drivers/net/ethernet/amd/xgbe/xgbe-drv.c unsigned int last, error, context_next, context; context 2744 drivers/net/ethernet/amd/xgbe/xgbe-drv.c context = XGMAC_GET_BITS(packet->attributes, context 2760 drivers/net/ethernet/amd/xgbe/xgbe-drv.c if (!context) { context 1020 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c void hw_atl_rpo_outer_vlan_tag_mode_set(void *context, context 1023 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c aq_hw_write_reg_bit(context, HW_ATL_RPO_OUTER_VL_INS_MODE_ADR, context 1029 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c u32 hw_atl_rpo_outer_vlan_tag_mode_get(void *context) context 1031 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c return aq_hw_read_reg_bit(context, HW_ATL_RPO_OUTER_VL_INS_MODE_ADR, context 494 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.h void hw_atl_rpo_outer_vlan_tag_mode_set(void *context, context 497 drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.h u32 hw_atl_rpo_outer_vlan_tag_mode_get(void *context); context 1648 drivers/net/ethernet/broadcom/bnx2x/bnx2x.h struct hw_context context[ILT_MAX_L2_LINES]; context 7975 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c ilt->lines[cdu_ilt_start + i].page = bp->context[i].vcxt; context 7977 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c bp->context[i].cxt_mapping; context 7978 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c ilt->lines[cdu_ilt_start + i].size = bp->context[i].size; context 8278 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c BNX2X_PCI_FREE(bp->context[i].vcxt, bp->context[i].cxt_mapping, context 8279 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c bp->context[i].size); context 8368 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c bp->context[i].size = min(CDU_ILT_PAGE_SZ, context 8370 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c bp->context[i].vcxt = BNX2X_PCI_ALLOC(&bp->context[i].cxt_mapping, context 8371 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c bp->context[i].size); context 8372 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c if (!bp->context[i].vcxt) context 8374 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c allocated += bp->context[i].size; context 8765 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c &bp->context[cxt_index].vcxt[cxt_offset].eth; context 14583 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c &bp->context[cxt_index]. context 1345 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c struct hw_dma *cxt = &bp->vfdb->context[i]; context 329 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.h struct hw_dma context[BNX2X_VF_CIDS/ILT_PAGE_CIDS]; context 330 drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.h #define BP_VF_CXT_PAGE(bp, i) (&(bp)->vfdb->context[i]) context 3583 drivers/net/ethernet/broadcom/cnic.c u32 l5_cid, struct cnic_sock **csk, void *context) context 3609 drivers/net/ethernet/broadcom/cnic.c csk1->context = context; context 262 drivers/net/ethernet/broadcom/cnic_if.h void *context; context 1409 drivers/net/ethernet/cavium/thunder/thunder_bgx.c u32 lvl, void *context, void **rv) context 1411 drivers/net/ethernet/cavium/thunder/thunder_bgx.c struct bgx *bgx = context; context 1429 drivers/net/ethernet/cavium/thunder/thunder_bgx.c void *context, void **ret_val) context 1432 drivers/net/ethernet/cavium/thunder/thunder_bgx.c struct bgx *bgx = context; context 1016 drivers/net/ethernet/emulex/benet/be_cmds.c AMAP_SET_BITS(struct amap_eq_context, valid, req->context, 1); context 1018 drivers/net/ethernet/emulex/benet/be_cmds.c AMAP_SET_BITS(struct amap_eq_context, size, req->context, 0); context 1019 drivers/net/ethernet/emulex/benet/be_cmds.c AMAP_SET_BITS(struct amap_eq_context, count, req->context, context 1021 drivers/net/ethernet/emulex/benet/be_cmds.c be_dws_cpu_to_le(req->context, sizeof(req->context)); context 1170 drivers/net/ethernet/emulex/benet/be_cmds.c ctxt = &req->context; context 1207 drivers/net/ethernet/emulex/benet/be_cmds.c be_dws_cpu_to_le(ctxt, sizeof(req->context)); context 1248 drivers/net/ethernet/emulex/benet/be_cmds.c ctxt = &req->context; context 1282 drivers/net/ethernet/emulex/benet/be_cmds.c be_dws_cpu_to_le(ctxt, sizeof(req->context)); context 1313 drivers/net/ethernet/emulex/benet/be_cmds.c ctxt = &req->context; context 1326 drivers/net/ethernet/emulex/benet/be_cmds.c be_dws_cpu_to_le(ctxt, sizeof(req->context)); context 2361 drivers/net/ethernet/emulex/benet/be_cmds.c ctxt = &req->context; context 2372 drivers/net/ethernet/emulex/benet/be_cmds.c be_dws_cpu_to_le(ctxt, sizeof(req->context)); context 3898 drivers/net/ethernet/emulex/benet/be_cmds.c ctxt = &req->context; context 3926 drivers/net/ethernet/emulex/benet/be_cmds.c be_dws_cpu_to_le(req->context, sizeof(req->context)); context 3953 drivers/net/ethernet/emulex/benet/be_cmds.c ctxt = &req->context; context 3969 drivers/net/ethernet/emulex/benet/be_cmds.c be_dws_cpu_to_le(req->context, sizeof(req->context)); context 3976 drivers/net/ethernet/emulex/benet/be_cmds.c be_dws_le_to_cpu(&resp->context, sizeof(resp->context)); context 3978 drivers/net/ethernet/emulex/benet/be_cmds.c pvid, &resp->context); context 3983 drivers/net/ethernet/emulex/benet/be_cmds.c port_fwd_type, &resp->context); context 3987 drivers/net/ethernet/emulex/benet/be_cmds.c spoofchk, &resp->context); context 392 drivers/net/ethernet/emulex/benet/be_cmds.h u8 context[sizeof(struct amap_eq_context) / 8]; context 495 drivers/net/ethernet/emulex/benet/be_cmds.h u8 context[sizeof(struct amap_cq_context_be) / 8]; context 557 drivers/net/ethernet/emulex/benet/be_cmds.h u8 context[sizeof(struct amap_mcc_context_be) / 8]; context 566 drivers/net/ethernet/emulex/benet/be_cmds.h u8 context[sizeof(struct amap_mcc_context_v1) / 8]; context 1470 drivers/net/ethernet/emulex/benet/be_cmds.h u8 context[sizeof(struct amap_lancer_write_obj_context) / 8]; context 1867 drivers/net/ethernet/emulex/benet/be_cmds.h u8 context[sizeof(struct amap_set_hsw_context) / 8]; context 1891 drivers/net/ethernet/emulex/benet/be_cmds.h u8 context[sizeof(struct amap_get_hsw_req_context) / 8]; context 1896 drivers/net/ethernet/emulex/benet/be_cmds.h u8 context[sizeof(struct amap_get_hsw_resp_context) / 8]; context 767 drivers/net/ethernet/huawei/hinic/hinic_port.c rss_type->ipv4 = HINIC_RSS_TYPE_GET(ctx_tbl.context, IPV4); context 768 drivers/net/ethernet/huawei/hinic/hinic_port.c rss_type->ipv6 = HINIC_RSS_TYPE_GET(ctx_tbl.context, IPV6); context 769 drivers/net/ethernet/huawei/hinic/hinic_port.c rss_type->ipv6_ext = HINIC_RSS_TYPE_GET(ctx_tbl.context, IPV6_EXT); context 770 drivers/net/ethernet/huawei/hinic/hinic_port.c rss_type->tcp_ipv4 = HINIC_RSS_TYPE_GET(ctx_tbl.context, TCP_IPV4); context 771 drivers/net/ethernet/huawei/hinic/hinic_port.c rss_type->tcp_ipv6 = HINIC_RSS_TYPE_GET(ctx_tbl.context, TCP_IPV6); context 772 drivers/net/ethernet/huawei/hinic/hinic_port.c rss_type->tcp_ipv6_ext = HINIC_RSS_TYPE_GET(ctx_tbl.context, context 774 drivers/net/ethernet/huawei/hinic/hinic_port.c rss_type->udp_ipv4 = HINIC_RSS_TYPE_GET(ctx_tbl.context, UDP_IPV4); context 775 drivers/net/ethernet/huawei/hinic/hinic_port.c rss_type->udp_ipv6 = HINIC_RSS_TYPE_GET(ctx_tbl.context, UDP_IPV6); context 286 drivers/net/ethernet/huawei/hinic/hinic_port.h u32 context; context 1703 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c u32 context = 0; context 1709 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c ret = mvpp22_rss_context_create(port, &context); context 1713 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c table = mvpp22_rss_table_get(port->priv, context); context 1717 drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c port->rss_ctx[0] = context; context 342 drivers/net/ethernet/mellanox/mlx4/cmd.c struct mlx4_cmd_context *context; context 350 drivers/net/ethernet/mellanox/mlx4/cmd.c context = &cmd->context[cmd->free_head]; context 351 drivers/net/ethernet/mellanox/mlx4/cmd.c context->token += cmd->token_mask + 1; context 352 drivers/net/ethernet/mellanox/mlx4/cmd.c cmd->free_head = context->next; context 355 drivers/net/ethernet/mellanox/mlx4/cmd.c reinit_completion(&context->done); context 365 drivers/net/ethernet/mellanox/mlx4/cmd.c if (!wait_for_completion_timeout(&context->done, context 372 drivers/net/ethernet/mellanox/mlx4/cmd.c err = context->result; context 373 drivers/net/ethernet/mellanox/mlx4/cmd.c if (err && context->fw_status != CMD_STAT_MULTI_FUNC_REQ) { context 375 drivers/net/ethernet/mellanox/mlx4/cmd.c vhcr_cmd, context->fw_status); context 376 drivers/net/ethernet/mellanox/mlx4/cmd.c if (mlx4_closing_cmd_fatal_error(op, context->fw_status)) context 399 drivers/net/ethernet/mellanox/mlx4/cmd.c context->next = cmd->free_head; context 400 drivers/net/ethernet/mellanox/mlx4/cmd.c cmd->free_head = context - cmd->context; context 663 drivers/net/ethernet/mellanox/mlx4/cmd.c struct mlx4_cmd_context *context = context 664 drivers/net/ethernet/mellanox/mlx4/cmd.c &priv->cmd.context[token & priv->cmd.token_mask]; context 667 drivers/net/ethernet/mellanox/mlx4/cmd.c if (token != context->token) context 670 drivers/net/ethernet/mellanox/mlx4/cmd.c context->fw_status = status; context 671 drivers/net/ethernet/mellanox/mlx4/cmd.c context->result = mlx4_status_to_errno(status); context 672 drivers/net/ethernet/mellanox/mlx4/cmd.c context->out_param = out_param; context 674 drivers/net/ethernet/mellanox/mlx4/cmd.c complete(&context->done); context 682 drivers/net/ethernet/mellanox/mlx4/cmd.c struct mlx4_cmd_context *context; context 690 drivers/net/ethernet/mellanox/mlx4/cmd.c context = &cmd->context[cmd->free_head]; context 691 drivers/net/ethernet/mellanox/mlx4/cmd.c context->token += cmd->token_mask + 1; context 692 drivers/net/ethernet/mellanox/mlx4/cmd.c cmd->free_head = context->next; context 702 drivers/net/ethernet/mellanox/mlx4/cmd.c reinit_completion(&context->done); context 705 drivers/net/ethernet/mellanox/mlx4/cmd.c in_modifier, op_modifier, op, context->token, 1); context 711 drivers/net/ethernet/mellanox/mlx4/cmd.c wait_for_completion_interruptible_timeout(&context->done, context 714 drivers/net/ethernet/mellanox/mlx4/cmd.c context->fw_status = 0; context 715 drivers/net/ethernet/mellanox/mlx4/cmd.c context->out_param = 0; context 716 drivers/net/ethernet/mellanox/mlx4/cmd.c context->result = 0; context 719 drivers/net/ethernet/mellanox/mlx4/cmd.c ret_wait = (long)wait_for_completion_timeout(&context->done, context 734 drivers/net/ethernet/mellanox/mlx4/cmd.c err = context->result; context 744 drivers/net/ethernet/mellanox/mlx4/cmd.c context->fw_status == CMD_STAT_BAD_SIZE) context 746 drivers/net/ethernet/mellanox/mlx4/cmd.c op, context->fw_status); context 749 drivers/net/ethernet/mellanox/mlx4/cmd.c op, context->fw_status); context 752 drivers/net/ethernet/mellanox/mlx4/cmd.c else if (mlx4_closing_cmd_fatal_error(op, context->fw_status)) context 759 drivers/net/ethernet/mellanox/mlx4/cmd.c *out_param = context->out_param; context 766 drivers/net/ethernet/mellanox/mlx4/cmd.c context->next = cmd->free_head; context 767 drivers/net/ethernet/mellanox/mlx4/cmd.c cmd->free_head = context - cmd->context; context 2629 drivers/net/ethernet/mellanox/mlx4/cmd.c priv->cmd.context = kmalloc_array(priv->cmd.max_cmds, context 2632 drivers/net/ethernet/mellanox/mlx4/cmd.c if (!priv->cmd.context) context 2639 drivers/net/ethernet/mellanox/mlx4/cmd.c priv->cmd.context[i].token = i; context 2640 drivers/net/ethernet/mellanox/mlx4/cmd.c priv->cmd.context[i].next = i + 1; context 2645 drivers/net/ethernet/mellanox/mlx4/cmd.c init_completion(&priv->cmd.context[i].done); context 2648 drivers/net/ethernet/mellanox/mlx4/cmd.c priv->cmd.context[priv->cmd.max_cmds - 1].next = -1; context 2684 drivers/net/ethernet/mellanox/mlx4/cmd.c kfree(priv->cmd.context); context 2685 drivers/net/ethernet/mellanox/mlx4/cmd.c priv->cmd.context = NULL; context 2753 drivers/net/ethernet/mellanox/mlx4/cmd.c struct mlx4_cmd_context *context; context 2757 drivers/net/ethernet/mellanox/mlx4/cmd.c if (priv->cmd.context) { context 2759 drivers/net/ethernet/mellanox/mlx4/cmd.c context = &priv->cmd.context[i]; context 2760 drivers/net/ethernet/mellanox/mlx4/cmd.c context->fw_status = CMD_STAT_INTERNAL_ERR; context 2761 drivers/net/ethernet/mellanox/mlx4/cmd.c context->result = context 2763 drivers/net/ethernet/mellanox/mlx4/cmd.c complete(&context->done); context 42 drivers/net/ethernet/mellanox/mlx4/en_resources.c int user_prio, struct mlx4_qp_context *context) context 47 drivers/net/ethernet/mellanox/mlx4/en_resources.c memset(context, 0, sizeof(*context)); context 48 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->flags = cpu_to_be32(7 << 16 | rss << MLX4_RSS_QPC_FLAG_OFFSET); context 49 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->pd = cpu_to_be32(mdev->priv_pdn); context 50 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->mtu_msgmax = 0xff; context 52 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->rq_size_stride = ilog2(size) << 3 | (ilog2(stride) - 4); context 54 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->sq_size_stride = ilog2(size) << 3 | (ilog2(stride) - 4); context 56 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->params2 |= cpu_to_be32(MLX4_QP_BIT_FPP); context 59 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->sq_size_stride = ilog2(TXBB_SIZE) - 4; context 61 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->usr_page = cpu_to_be32(mlx4_to_hw_uar_index(mdev->dev, context 63 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->local_qpn = cpu_to_be32(qpn); context 64 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->pri_path.ackto = 1 & 0x07; context 65 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->pri_path.sched_queue = 0x83 | (priv->port - 1) << 6; context 68 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->pri_path.sched_queue |= user_prio << 3; context 69 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->pri_path.feup = MLX4_FEUP_FORCE_ETH_UP; context 71 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->pri_path.counter_index = priv->counter_index; context 72 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->cqn_send = cpu_to_be32(cqn); context 73 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->cqn_recv = cpu_to_be32(cqn); context 76 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->pri_path.counter_index != context 80 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->pri_path.fl |= MLX4_FL_ETH_SRC_CHECK_MC_LB; context 81 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->pri_path.control |= MLX4_CTRL_ETH_SRC_CHECK_IF_COUNTER; context 83 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->db_rec_addr = cpu_to_be64(priv->res.db.dma << 2); context 85 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->param3 |= cpu_to_be32(1 << 30); context 90 drivers/net/ethernet/mellanox/mlx4/en_resources.c context->srqn = cpu_to_be32(7 << 28); /* this fills bits 30:28 */ context 1062 drivers/net/ethernet/mellanox/mlx4/en_rx.c struct mlx4_qp_context *context; context 1065 drivers/net/ethernet/mellanox/mlx4/en_rx.c context = kmalloc(sizeof(*context), GFP_KERNEL); context 1066 drivers/net/ethernet/mellanox/mlx4/en_rx.c if (!context) context 1076 drivers/net/ethernet/mellanox/mlx4/en_rx.c memset(context, 0, sizeof(*context)); context 1078 drivers/net/ethernet/mellanox/mlx4/en_rx.c qpn, ring->cqn, -1, context); context 1079 drivers/net/ethernet/mellanox/mlx4/en_rx.c context->db_rec_addr = cpu_to_be64(ring->wqres.db.dma); context 1083 drivers/net/ethernet/mellanox/mlx4/en_rx.c context->param3 |= cpu_to_be32(1 << 29); context 1091 drivers/net/ethernet/mellanox/mlx4/en_rx.c err = mlx4_qp_to_ready(mdev->dev, &ring->wqres.mtt, context, qp, state); context 1098 drivers/net/ethernet/mellanox/mlx4/en_rx.c kfree(context); context 1139 drivers/net/ethernet/mellanox/mlx4/en_rx.c struct mlx4_qp_context context; context 1195 drivers/net/ethernet/mellanox/mlx4/en_rx.c priv->rx_ring[0]->cqn, -1, &context); context 1202 drivers/net/ethernet/mellanox/mlx4/en_rx.c ptr = ((void *) &context) + offsetof(struct mlx4_qp_context, pri_path) context 1232 drivers/net/ethernet/mellanox/mlx4/en_rx.c err = mlx4_qp_to_ready(mdev->dev, &priv->res.mtt, &context, context 3024 drivers/net/ethernet/mellanox/mlx4/fw.c struct mlx4_set_port_general_context *context; context 3031 drivers/net/ethernet/mellanox/mlx4/fw.c context = mailbox->buf; context 3033 drivers/net/ethernet/mellanox/mlx4/fw.c context->flags2 |= SET_PORT_GEN_PHV_VALID; context 3035 drivers/net/ethernet/mellanox/mlx4/fw.c context->phv_en |= SET_PORT_GEN_PHV_EN; context 88 drivers/net/ethernet/mellanox/mlx4/fw_qos.c struct mlx4_set_port_prio2tc_context *context; context 97 drivers/net/ethernet/mellanox/mlx4/fw_qos.c context = mailbox->buf; context 100 drivers/net/ethernet/mellanox/mlx4/fw_qos.c context->prio2tc[i >> 1] = prio2tc[i] << 4 | prio2tc[i + 1]; context 115 drivers/net/ethernet/mellanox/mlx4/fw_qos.c struct mlx4_set_port_scheduler_context *context; context 124 drivers/net/ethernet/mellanox/mlx4/fw_qos.c context = mailbox->buf; context 127 drivers/net/ethernet/mellanox/mlx4/fw_qos.c struct mlx4_port_scheduler_tc_cfg_be *tc = &context->tc[i]; context 45 drivers/net/ethernet/mellanox/mlx4/intf.c void *context; context 61 drivers/net/ethernet/mellanox/mlx4/intf.c dev_ctx->context = intf->add(&priv->dev); context 63 drivers/net/ethernet/mellanox/mlx4/intf.c if (dev_ctx->context) { context 68 drivers/net/ethernet/mellanox/mlx4/intf.c intf->activate(&priv->dev, dev_ctx->context); context 84 drivers/net/ethernet/mellanox/mlx4/intf.c intf->remove(&priv->dev, dev_ctx->context); context 168 drivers/net/ethernet/mellanox/mlx4/intf.c dev_ctx->intf->remove(dev, dev_ctx->context); context 169 drivers/net/ethernet/mellanox/mlx4/intf.c dev_ctx->context = dev_ctx->intf->add(dev); context 193 drivers/net/ethernet/mellanox/mlx4/intf.c dev_ctx->intf->event(dev, dev_ctx->context, type, param); context 259 drivers/net/ethernet/mellanox/mlx4/intf.c result = dev_ctx->intf->get_dev(dev, dev_ctx->context, port); context 637 drivers/net/ethernet/mellanox/mlx4/mlx4.h struct mlx4_cmd_context *context; context 752 drivers/net/ethernet/mellanox/mlx4/mlx4_en.h struct mlx4_qp_context *context); context 1612 drivers/net/ethernet/mellanox/mlx4/port.c struct mlx4_set_port_general_context *context; context 1619 drivers/net/ethernet/mellanox/mlx4/port.c context = mailbox->buf; context 1620 drivers/net/ethernet/mellanox/mlx4/port.c context->flags = SET_PORT_GEN_ALL_VALID; context 1621 drivers/net/ethernet/mellanox/mlx4/port.c context->mtu = cpu_to_be16(mtu); context 1622 drivers/net/ethernet/mellanox/mlx4/port.c context->pptx = (pptx * (!pfctx)) << 7; context 1623 drivers/net/ethernet/mellanox/mlx4/port.c context->pfctx = pfctx; context 1624 drivers/net/ethernet/mellanox/mlx4/port.c context->pprx = (pprx * (!pfcrx)) << 7; context 1625 drivers/net/ethernet/mellanox/mlx4/port.c context->pfcrx = pfcrx; context 1628 drivers/net/ethernet/mellanox/mlx4/port.c context->flags |= SET_PORT_ROCE_2_FLAGS; context 1629 drivers/net/ethernet/mellanox/mlx4/port.c context->roce_mode |= context 1646 drivers/net/ethernet/mellanox/mlx4/port.c struct mlx4_set_port_rqp_calc_context *context; context 1658 drivers/net/ethernet/mellanox/mlx4/port.c context = mailbox->buf; context 1659 drivers/net/ethernet/mellanox/mlx4/port.c context->base_qpn = cpu_to_be32(base_qpn); context 1660 drivers/net/ethernet/mellanox/mlx4/port.c context->n_mac = dev->caps.log_num_macs; context 1661 drivers/net/ethernet/mellanox/mlx4/port.c context->promisc = cpu_to_be32(promisc << SET_PORT_PROMISC_SHIFT | context 1663 drivers/net/ethernet/mellanox/mlx4/port.c context->mcast = cpu_to_be32(m_promisc << SET_PORT_MC_PROMISC_SHIFT | context 1665 drivers/net/ethernet/mellanox/mlx4/port.c context->intra_no_vlan = 0; context 1666 drivers/net/ethernet/mellanox/mlx4/port.c context->no_vlan = MLX4_NO_VLAN_IDX; context 1667 drivers/net/ethernet/mellanox/mlx4/port.c context->intra_vlan_miss = 0; context 1668 drivers/net/ethernet/mellanox/mlx4/port.c context->vlan_miss = MLX4_VLAN_MISS_IDX; context 1683 drivers/net/ethernet/mellanox/mlx4/port.c struct mlx4_set_port_general_context *context; context 1690 drivers/net/ethernet/mellanox/mlx4/port.c context = mailbox->buf; context 1691 drivers/net/ethernet/mellanox/mlx4/port.c context->flags2 |= MLX4_FLAG2_V_USER_MTU_MASK; context 1692 drivers/net/ethernet/mellanox/mlx4/port.c context->user_mtu = cpu_to_be16(user_mtu); context 1707 drivers/net/ethernet/mellanox/mlx4/port.c struct mlx4_set_port_general_context *context; context 1714 drivers/net/ethernet/mellanox/mlx4/port.c context = mailbox->buf; context 1715 drivers/net/ethernet/mellanox/mlx4/port.c context->flags2 |= MLX4_FLAG2_V_USER_MAC_MASK; context 1716 drivers/net/ethernet/mellanox/mlx4/port.c memcpy(context->user_mac, user_mac, sizeof(context->user_mac)); context 1731 drivers/net/ethernet/mellanox/mlx4/port.c struct mlx4_set_port_general_context *context; context 1738 drivers/net/ethernet/mellanox/mlx4/port.c context = mailbox->buf; context 1739 drivers/net/ethernet/mellanox/mlx4/port.c context->flags2 |= MLX4_FLAG2_V_IGNORE_FCS_MASK; context 1741 drivers/net/ethernet/mellanox/mlx4/port.c context->ignore_fcs |= MLX4_IGNORE_FCS_MASK; context 1743 drivers/net/ethernet/mellanox/mlx4/port.c context->ignore_fcs &= ~MLX4_IGNORE_FCS_MASK; context 1774 drivers/net/ethernet/mellanox/mlx4/port.c struct mlx4_set_port_vxlan_context *context; context 1779 drivers/net/ethernet/mellanox/mlx4/port.c context = mailbox->buf; context 1780 drivers/net/ethernet/mellanox/mlx4/port.c memset(context, 0, sizeof(*context)); context 1782 drivers/net/ethernet/mellanox/mlx4/port.c context->modify_flags = VXLAN_ENABLE_MODIFY | VXLAN_STEERING_MODIFY; context 1784 drivers/net/ethernet/mellanox/mlx4/port.c context->enable_flags = VXLAN_ENABLE; context 1785 drivers/net/ethernet/mellanox/mlx4/port.c context->steering = steering; context 89 drivers/net/ethernet/mellanox/mlx4/qp.c struct mlx4_qp_context *context, context 165 drivers/net/ethernet/mellanox/mlx4/qp.c context->mtt_base_addr_h = mtt_addr >> 32; context 166 drivers/net/ethernet/mellanox/mlx4/qp.c context->mtt_base_addr_l = cpu_to_be32(mtt_addr & 0xffffffff); context 167 drivers/net/ethernet/mellanox/mlx4/qp.c context->log_page_size = mtt->page_shift - MLX4_ICM_PAGE_SHIFT; context 173 drivers/net/ethernet/mellanox/mlx4/qp.c context->roce_entropy = context 177 drivers/net/ethernet/mellanox/mlx4/qp.c memcpy(mailbox->buf + 8, context, sizeof(*context)); context 210 drivers/net/ethernet/mellanox/mlx4/qp.c struct mlx4_qp_context *context, context 214 drivers/net/ethernet/mellanox/mlx4/qp.c return __mlx4_qp_modify(dev, mtt, cur_state, new_state, context, context 894 drivers/net/ethernet/mellanox/mlx4/qp.c struct mlx4_qp_context *context) context 907 drivers/net/ethernet/mellanox/mlx4/qp.c memcpy(context, mailbox->buf + 8, sizeof(*context)); context 915 drivers/net/ethernet/mellanox/mlx4/qp.c struct mlx4_qp_context *context, context 928 drivers/net/ethernet/mellanox/mlx4/qp.c context->flags &= cpu_to_be32(~(0xf << 28)); context 929 drivers/net/ethernet/mellanox/mlx4/qp.c context->flags |= cpu_to_be32(states[i + 1] << 28); context 931 drivers/net/ethernet/mellanox/mlx4/qp.c context->params2 &= ~cpu_to_be32(MLX4_QP_BIT_FPP); context 933 drivers/net/ethernet/mellanox/mlx4/qp.c context, 0, 0, qp); context 949 drivers/net/ethernet/mellanox/mlx4/qp.c struct mlx4_qp_context context; context 954 drivers/net/ethernet/mellanox/mlx4/qp.c err = mlx4_qp_query(dev, &qp, &context); context 956 drivers/net/ethernet/mellanox/mlx4/qp.c u32 dest_qpn = be32_to_cpu(context.remote_qpn) & 0xffffff; context 2935 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c struct mlx4_qp_context *context) context 2944 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c context->qkey = cpu_to_be32(qkey); context 3761 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c struct mlx4_qp_context *context = inbox->buf + 8; context 3762 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c adjust_proxy_tun_qkey(dev, vhcr, context); context 3888 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c struct mlx4_qp_context *context = inbox->buf + 8; context 3890 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c err = adjust_qp_sched_queue(dev, slave, context, inbox); context 3899 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c adjust_proxy_tun_qkey(dev, vhcr, context); context 3910 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c struct mlx4_qp_context *context = inbox->buf + 8; context 3912 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c err = adjust_qp_sched_queue(dev, slave, context, inbox); context 3921 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c adjust_proxy_tun_qkey(dev, vhcr, context); context 3932 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c struct mlx4_qp_context *context = inbox->buf + 8; context 3933 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c int err = adjust_qp_sched_queue(dev, slave, context, inbox); context 3936 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c adjust_proxy_tun_qkey(dev, vhcr, context); context 3947 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c struct mlx4_qp_context *context = inbox->buf + 8; context 3949 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c err = adjust_qp_sched_queue(dev, slave, context, inbox); context 3956 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c adjust_proxy_tun_qkey(dev, vhcr, context); context 3969 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c struct mlx4_qp_context *context = inbox->buf + 8; context 3971 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c err = adjust_qp_sched_queue(dev, slave, context, inbox); context 3978 drivers/net/ethernet/mellanox/mlx4/resource_tracker.c adjust_proxy_tun_qkey(dev, vhcr, context); context 69 drivers/net/ethernet/mellanox/mlx5/core/accel/ipsec.c void mlx5_accel_esp_free_hw_context(void *context) context 71 drivers/net/ethernet/mellanox/mlx5/core/accel/ipsec.c mlx5_fpga_ipsec_delete_sa_ctx(context); context 54 drivers/net/ethernet/mellanox/mlx5/core/accel/ipsec.h void mlx5_accel_esp_free_hw_context(void *context); context 74 drivers/net/ethernet/mellanox/mlx5/core/accel/ipsec.h static inline void mlx5_accel_esp_free_hw_context(void *context) context 77 drivers/net/ethernet/mellanox/mlx5/core/cmd.c void *context, int page_queue) context 91 drivers/net/ethernet/mellanox/mlx5/core/cmd.c ent->context = context; context 872 drivers/net/ethernet/mellanox/mlx5/core/cmd.c ent->callback(-EAGAIN, ent->context); context 1019 drivers/net/ethernet/mellanox/mlx5/core/cmd.c void *context, int page_queue, u8 *status, context 1032 drivers/net/ethernet/mellanox/mlx5/core/cmd.c ent = alloc_cmd(cmd, in, out, uout, uout_size, callback, context, context 1470 drivers/net/ethernet/mellanox/mlx5/core/cmd.c void *context; context 1537 drivers/net/ethernet/mellanox/mlx5/core/cmd.c context = ent->context; context 1555 drivers/net/ethernet/mellanox/mlx5/core/cmd.c callback(err, context); context 1674 drivers/net/ethernet/mellanox/mlx5/core/cmd.c int out_size, mlx5_cmd_cbk_t callback, void *context, context 1719 drivers/net/ethernet/mellanox/mlx5/core/cmd.c err = mlx5_cmd_invoke(dev, inb, outb, out, out_size, callback, context, context 44 drivers/net/ethernet/mellanox/mlx5/core/dev.c void *context; context 68 drivers/net/ethernet/mellanox/mlx5/core/dev.c dev_ctx->context = intf->add(dev); context 69 drivers/net/ethernet/mellanox/mlx5/core/dev.c if (dev_ctx->context) { context 79 drivers/net/ethernet/mellanox/mlx5/core/dev.c if (!dev_ctx->context) context 108 drivers/net/ethernet/mellanox/mlx5/core/dev.c intf->remove(dev, dev_ctx->context); context 125 drivers/net/ethernet/mellanox/mlx5/core/dev.c if (intf->attach(dev, dev_ctx->context)) context 131 drivers/net/ethernet/mellanox/mlx5/core/dev.c dev_ctx->context = intf->add(dev); context 132 drivers/net/ethernet/mellanox/mlx5/core/dev.c if (!dev_ctx->context) context 161 drivers/net/ethernet/mellanox/mlx5/core/dev.c intf->detach(dev, dev_ctx->context); context 166 drivers/net/ethernet/mellanox/mlx5/core/dev.c intf->remove(dev, dev_ctx->context); context 225 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c void *context) context 228 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c struct mlx5e_err_ctx *err_ctx = context; context 138 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c void *context) context 141 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c struct mlx5e_err_ctx *err_ctx = context; context 101 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c static int mlx5e_tls_get_sync_data(struct mlx5e_tls_offload_context_tx *context, context 109 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c spin_lock_irqsave(&context->base.lock, flags); context 110 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c record = tls_get_record(&context->base, tcp_seq, &info->rcd_sn); context 139 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c spin_unlock_irqrestore(&context->base.lock, flags); context 188 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c mlx5e_tls_handle_ooo(struct mlx5e_tls_offload_context_tx *context, context 203 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c if (mlx5e_tls_get_sync_data(context, tcp_seq, &info)) { context 226 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c if (unlikely(mlx5e_tls_add_metadata(skb, context->swid))) { context 239 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c context->expected_seq = tcp_seq + skb->len - headln; context 266 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c struct mlx5e_tls_offload_context_tx *context; context 289 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c context = mlx5e_get_tls_tx_context(tls_ctx); context 290 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c expected_seq = context->expected_seq; context 293 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c skb = mlx5e_tls_handle_ooo(context, sq, skb, wqe, pi, priv->tls); context 297 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c if (unlikely(mlx5e_tls_add_metadata(skb, context->swid))) { context 304 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls_rxtx.c context->expected_seq = skb_seq + datalen; context 145 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c struct mlx5_fpga_ipsec_cmd_context *context; context 148 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context = container_of(buf, struct mlx5_fpga_ipsec_cmd_context, context 152 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context->status = MLX5_FPGA_IPSEC_CMD_SEND_FAIL; context 153 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c complete(&context->complete); context 176 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c struct mlx5_fpga_ipsec_cmd_context *context; context 191 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context = list_first_entry_or_null(&fdev->ipsec->pending_cmds, context 194 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c if (context) context 195 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c list_del(&context->list); context 198 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c if (!context) { context 202 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c mlx5_fpga_dbg(fdev, "Handling response for %p\n", context); context 205 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context->status_code = syndrome_to_errno(syndrome); context 206 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context->status = MLX5_FPGA_IPSEC_CMD_COMPLETE; context 207 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c memcpy(&context->resp, resp, sizeof(*resp)); context 209 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c if (context->status_code) context 213 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c complete(&context->complete); context 219 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c struct mlx5_fpga_ipsec_cmd_context *context; context 230 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context = kzalloc(sizeof(*context) + cmd_size, GFP_ATOMIC); context 231 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c if (!context) context 234 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context->status = MLX5_FPGA_IPSEC_CMD_PENDING; context 235 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context->dev = fdev; context 236 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context->buf.complete = mlx5_fpga_ipsec_send_complete; context 237 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c init_completion(&context->complete); context 238 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c memcpy(&context->command, cmd, cmd_size); context 239 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context->buf.sg[0].size = cmd_size; context 240 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context->buf.sg[0].data = &context->command; context 243 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c res = mlx5_fpga_sbu_conn_sendmsg(fdev->ipsec->conn, &context->buf); context 245 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c list_add_tail(&context->list, &fdev->ipsec->pending_cmds); context 250 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c kfree(context); context 255 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c return context; context 260 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c struct mlx5_fpga_ipsec_cmd_context *context = ctx; context 265 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c res = wait_for_completion_timeout(&context->complete, timeout); context 267 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c mlx5_fpga_warn(context->dev, "Failure waiting for IPSec command response\n"); context 271 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c if (context->status == MLX5_FPGA_IPSEC_CMD_COMPLETE) context 272 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c res = context->status_code; context 420 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c struct mlx5_fpga_ipsec_cmd_context *context; context 426 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context = mlx5_fpga_ipsec_cmd_exec(mdev, &cmd, sizeof(cmd)); context 427 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c if (IS_ERR(context)) context 428 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c return PTR_ERR(context); context 430 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c err = mlx5_fpga_ipsec_cmd_wait(context); context 434 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c if ((context->resp.flags & cmd.flags) != cmd.flags) { context 435 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c mlx5_fpga_err(context->dev, "Failed to set capabilities. cmd 0x%08x vs resp 0x%08x\n", context 437 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context->resp.flags); context 442 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c kfree(context); context 678 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c void *context; context 698 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context = ERR_PTR(-EINVAL); context 703 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context = fpga_xfrm->sa_ctx; context 717 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context = ERR_PTR(-EEXIST); context 728 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c context = ERR_PTR(err); context 751 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c return context; context 845 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c void mlx5_fpga_ipsec_delete_sa_ctx(void *context) context 848 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c ((struct mlx5_fpga_ipsec_sa_ctx *)context)->fpga_xfrm; context 50 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.h void mlx5_fpga_ipsec_delete_sa_ctx(void *context); context 164 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c struct mlx5_qp_context *context; context 168 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c context = kzalloc(sizeof(*context), GFP_KERNEL); context 169 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c if (!context) context 172 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c context->flags = cpu_to_be32(MLX5_QP_PM_MIGRATED << 11); context 173 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c context->pri_path.port = 1; context 174 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c context->pri_path.pkey_index = cpu_to_be16(ipriv->pkey_index); context 175 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c context->qkey = cpu_to_be32(IB_DEFAULT_Q_KEY); context 177 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c ret = mlx5_core_qp_modify(mdev, MLX5_CMD_OP_RST2INIT_QP, 0, context, qp); context 182 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c memset(context, 0, sizeof(*context)); context 184 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c ret = mlx5_core_qp_modify(mdev, MLX5_CMD_OP_INIT2RTR_QP, 0, context, qp); context 190 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c ret = mlx5_core_qp_modify(mdev, MLX5_CMD_OP_RTR2RTS_QP, 0, context, qp); context 196 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c kfree(context); context 200 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c mlx5_core_qp_modify(mdev, MLX5_CMD_OP_2ERR_QP, 0, &context, qp); context 201 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c kfree(context); context 209 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c struct mlx5_qp_context context; context 212 drivers/net/ethernet/mellanox/mlx5/core/ipoib/ipoib.c err = mlx5_core_qp_modify(mdev, MLX5_CMD_OP_2RST_QP, 0, &context, context 53 drivers/net/ethernet/mellanox/mlx5/core/lib/hv.c int mlx5_hv_register_invalidate(struct mlx5_core_dev *dev, void *context, context 54 drivers/net/ethernet/mellanox/mlx5/core/lib/hv.c void (*block_invalidate)(void *context, context 57 drivers/net/ethernet/mellanox/mlx5/core/lib/hv.c return hyperv_reg_block_invalidate(dev->pdev, context, context 16 drivers/net/ethernet/mellanox/mlx5/core/lib/hv.h int mlx5_hv_register_invalidate(struct mlx5_core_dev *dev, void *context, context 17 drivers/net/ethernet/mellanox/mlx5/core/lib/hv.h void (*block_invalidate)(void *context, context 96 drivers/net/ethernet/mellanox/mlx5/core/lib/hv_vhca.c void mlx5_hv_vhca_invalidate(void *context, u64 block_mask) context 98 drivers/net/ethernet/mellanox/mlx5/core/lib/hv_vhca.c struct mlx5_hv_vhca *hv_vhca = (struct mlx5_hv_vhca *)context; context 36 drivers/net/ethernet/mellanox/mlx5/core/lib/hv_vhca.h void mlx5_hv_vhca_invalidate(void *context, u64 block_mask); context 46 drivers/net/ethernet/mellanox/mlx5/core/lib/hv_vhca.h void *context); context 74 drivers/net/ethernet/mellanox/mlx5/core/lib/hv_vhca.h static inline void mlx5_hv_vhca_invalidate(void *context, context 87 drivers/net/ethernet/mellanox/mlx5/core/lib/hv_vhca.h void *context) context 137 drivers/net/ethernet/mellanox/mlx5/core/mlx5_core.h void *context, u32 *element_id); context 139 drivers/net/ethernet/mellanox/mlx5/core/mlx5_core.h void *context, u32 element_id, context 54 drivers/net/ethernet/mellanox/mlx5/core/mr.c struct mlx5_async_work *context) context 73 drivers/net/ethernet/mellanox/mlx5/core/mr.c callback, context); context 51 drivers/net/ethernet/microchip/encx24j600-regmap.c static void regmap_lock_mutex(void *context) context 53 drivers/net/ethernet/microchip/encx24j600-regmap.c struct encx24j600_context *ctx = context; context 58 drivers/net/ethernet/microchip/encx24j600-regmap.c static void regmap_unlock_mutex(void *context) context 60 drivers/net/ethernet/microchip/encx24j600-regmap.c struct encx24j600_context *ctx = context; context 65 drivers/net/ethernet/microchip/encx24j600-regmap.c static int regmap_encx24j600_sfr_read(void *context, u8 reg, u8 *val, context 68 drivers/net/ethernet/microchip/encx24j600-regmap.c struct encx24j600_context *ctx = context; context 173 drivers/net/ethernet/microchip/encx24j600-regmap.c static int regmap_encx24j600_sfr_write(void *context, u8 reg, u8 *val, context 176 drivers/net/ethernet/microchip/encx24j600-regmap.c struct encx24j600_context *ctx = context; context 193 drivers/net/ethernet/microchip/encx24j600-regmap.c static int regmap_encx24j600_reg_update_bits(void *context, unsigned int reg, context 197 drivers/net/ethernet/microchip/encx24j600-regmap.c struct encx24j600_context *ctx = context; context 225 drivers/net/ethernet/microchip/encx24j600-regmap.c int regmap_encx24j600_spi_write(void *context, u8 reg, const u8 *data, context 228 drivers/net/ethernet/microchip/encx24j600-regmap.c struct encx24j600_context *ctx = context; context 238 drivers/net/ethernet/microchip/encx24j600-regmap.c int regmap_encx24j600_spi_read(void *context, u8 reg, u8 *data, size_t count) context 240 drivers/net/ethernet/microchip/encx24j600-regmap.c struct encx24j600_context *ctx = context; context 249 drivers/net/ethernet/microchip/encx24j600-regmap.c static int regmap_encx24j600_write(void *context, const void *data, context 258 drivers/net/ethernet/microchip/encx24j600-regmap.c return regmap_encx24j600_spi_write(context, reg, dout, len); context 263 drivers/net/ethernet/microchip/encx24j600-regmap.c return regmap_encx24j600_sfr_write(context, reg, dout, len); context 266 drivers/net/ethernet/microchip/encx24j600-regmap.c static int regmap_encx24j600_read(void *context, context 278 drivers/net/ethernet/microchip/encx24j600-regmap.c return regmap_encx24j600_spi_read(context, reg, val, val_size); context 285 drivers/net/ethernet/microchip/encx24j600-regmap.c return regmap_encx24j600_sfr_read(context, reg, val, val_size); context 350 drivers/net/ethernet/microchip/encx24j600-regmap.c static int regmap_encx24j600_phy_reg_read(void *context, unsigned int reg, context 353 drivers/net/ethernet/microchip/encx24j600-regmap.c struct encx24j600_context *ctx = context; context 388 drivers/net/ethernet/microchip/encx24j600-regmap.c static int regmap_encx24j600_phy_reg_write(void *context, unsigned int reg, context 391 drivers/net/ethernet/microchip/encx24j600-regmap.c struct encx24j600_context *ctx = context; context 433 drivers/net/ethernet/microchip/encx24j600_hw.h int regmap_encx24j600_spi_write(void *context, u8 reg, const u8 *data, context 435 drivers/net/ethernet/microchip/encx24j600_hw.h int regmap_encx24j600_spi_read(void *context, u8 reg, u8 *data, size_t count); context 140 drivers/net/ethernet/microchip/lan743x_main.c static void lan743x_intr_software_isr(void *context) context 142 drivers/net/ethernet/microchip/lan743x_main.c struct lan743x_adapter *adapter = context; context 153 drivers/net/ethernet/microchip/lan743x_main.c static void lan743x_tx_isr(void *context, u32 int_sts, u32 flags) context 155 drivers/net/ethernet/microchip/lan743x_main.c struct lan743x_tx *tx = context; context 195 drivers/net/ethernet/microchip/lan743x_main.c static void lan743x_rx_isr(void *context, u32 int_sts, u32 flags) context 197 drivers/net/ethernet/microchip/lan743x_main.c struct lan743x_rx *rx = context; context 236 drivers/net/ethernet/microchip/lan743x_main.c static void lan743x_intr_shared_isr(void *context, u32 int_sts, u32 flags) context 238 drivers/net/ethernet/microchip/lan743x_main.c struct lan743x_adapter *adapter = context; context 322 drivers/net/ethernet/microchip/lan743x_main.c vector->handler(vector->context, context 375 drivers/net/ethernet/microchip/lan743x_main.c void *context) context 386 drivers/net/ethernet/microchip/lan743x_main.c vector->context = context; context 394 drivers/net/ethernet/microchip/lan743x_main.c vector->context = NULL; context 409 drivers/net/ethernet/microchip/lan743x_main.c vector->context = NULL; context 574 drivers/net/ethernet/microchip/lan743x_main.h typedef void(*lan743x_vector_handler)(void *context, u32 int_sts, u32 flags); context 600 drivers/net/ethernet/microchip/lan743x_main.h void *context; context 768 drivers/net/ethernet/microchip/lan743x_ptp.c void lan743x_ptp_isr(void *context) context 770 drivers/net/ethernet/microchip/lan743x_ptp.c struct lan743x_adapter *adapter = (struct lan743x_adapter *)context; context 28 drivers/net/ethernet/microchip/lan743x_ptp.h void lan743x_ptp_isr(void *context); context 506 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c u32 reg_sz, u32 context, void *dest) context 516 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c result = nfp_cpp_writel(cpp, cpp_id, csr_ctx_ptr_offs, context); context 3110 drivers/net/ethernet/qlogic/qed/qed_iwarp.c events.affiliated_event(events.context, context 3119 drivers/net/ethernet/qlogic/qed/qed_iwarp.c events.affiliated_event(events.context, context 3127 drivers/net/ethernet/qlogic/qed/qed_iwarp.c p_hwfn->p_rdma_info->events.context, context 431 drivers/net/ethernet/qlogic/qed/qed_rdma.c events->context = params->events->context; context 86 drivers/net/ethernet/qlogic/qed/qed_roce.c events.affiliated_event(events.context, fw_event_code, context 91 drivers/net/ethernet/qlogic/qed/qed_roce.c events.affiliated_event(events.context, fw_event_code, context 2231 drivers/net/ethernet/sfc/ef10.c struct efx_msi_context *context = dev_id; context 2232 drivers/net/ethernet/sfc/ef10.c struct efx_nic *efx = context->efx; context 2239 drivers/net/ethernet/sfc/ef10.c if (context->index == efx->irq_level) context 2243 drivers/net/ethernet/sfc/ef10.c efx_schedule_channel_irq(efx->channel[context->index]); context 2651 drivers/net/ethernet/sfc/ef10.c static int efx_ef10_get_rss_flags(struct efx_nic *efx, u32 context, u32 *flags) context 2677 drivers/net/ethernet/sfc/ef10.c MCDI_SET_DWORD(inbuf, RSS_CONTEXT_GET_FLAGS_IN_RSS_CONTEXT_ID, context); context 2771 drivers/net/ethernet/sfc/ef10.c static int efx_ef10_free_rss_context(struct efx_nic *efx, u32 context) context 2776 drivers/net/ethernet/sfc/ef10.c context); context 2781 drivers/net/ethernet/sfc/ef10.c static int efx_ef10_populate_rss_table(struct efx_nic *efx, u32 context, context 2789 drivers/net/ethernet/sfc/ef10.c context); context 2809 drivers/net/ethernet/sfc/ef10.c context); context 1587 drivers/net/ethernet/sfc/falcon/farch.c struct ef4_msi_context *context = dev_id; context 1588 drivers/net/ethernet/sfc/falcon/farch.c struct ef4_nic *efx = context->efx; context 1600 drivers/net/ethernet/sfc/falcon/farch.c if (context->index == efx->irq_level) { context 1608 drivers/net/ethernet/sfc/falcon/farch.c ef4_schedule_channel_irq(efx->channel[context->index]); context 1596 drivers/net/ethernet/sfc/farch.c struct efx_msi_context *context = dev_id; context 1597 drivers/net/ethernet/sfc/farch.c struct efx_nic *efx = context->efx; context 1609 drivers/net/ethernet/sfc/farch.c if (context->index == efx->irq_level) { context 1617 drivers/net/ethernet/sfc/farch.c efx_schedule_channel_irq(efx->channel[context->index]); context 1113 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c unsigned int context_next, context; context 1169 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c context = XLGMAC_GET_REG_BITS( context 1186 drivers/net/ethernet/synopsys/dwc-xlgmac-net.c if (!context) { context 97 drivers/net/ethernet/ti/netcp_core.c void (*txtstamp)(void *context, struct sk_buff *skb); context 2538 drivers/net/ethernet/ti/netcp_ethss.c static void gbe_txtstamp(void *context, struct sk_buff *skb) context 2540 drivers/net/ethernet/ti/netcp_ethss.c struct gbe_intf *gbe_intf = context; context 3103 drivers/net/ethernet/via/via-velocity.c static void velocity_save_context(struct velocity_info *vptr, struct velocity_context *context) context 3110 drivers/net/ethernet/via/via-velocity.c *((u32 *) (context->mac_reg + i)) = readl(ptr + i); context 3113 drivers/net/ethernet/via/via-velocity.c *((u32 *) (context->mac_reg + i)) = readl(ptr + i); context 3116 drivers/net/ethernet/via/via-velocity.c *((u32 *) (context->mac_reg + i)) = readl(ptr + i); context 3137 drivers/net/ethernet/via/via-velocity.c velocity_save_context(vptr, &vptr->context); context 3144 drivers/net/ethernet/via/via-velocity.c velocity_save_context(vptr, &vptr->context); context 3163 drivers/net/ethernet/via/via-velocity.c static void velocity_restore_context(struct velocity_info *vptr, struct velocity_context *context) context 3170 drivers/net/ethernet/via/via-velocity.c writel(*((u32 *) (context->mac_reg + i)), ptr + i); context 3175 drivers/net/ethernet/via/via-velocity.c writeb(~(*((u8 *) (context->mac_reg + i))), ptr + i + 4); context 3177 drivers/net/ethernet/via/via-velocity.c writeb(*((u8 *) (context->mac_reg + i)), ptr + i); context 3181 drivers/net/ethernet/via/via-velocity.c writel(*((u32 *) (context->mac_reg + i)), ptr + i); context 3184 drivers/net/ethernet/via/via-velocity.c writel(*((u32 *) (context->mac_reg + i)), ptr + i); context 3187 drivers/net/ethernet/via/via-velocity.c writeb(*((u8 *) (context->mac_reg + i)), ptr + i); context 3210 drivers/net/ethernet/via/via-velocity.c velocity_restore_context(vptr, &vptr->context); context 1483 drivers/net/ethernet/via/via-velocity.h struct velocity_context context; context 1510 drivers/net/fjes/fjes_main.c void *context, void **return_value) context 1513 drivers/net/fjes/fjes_main.c bool *found = context; context 198 drivers/net/hyperv/hyperv_net.h void netvsc_channel_cb(void *context); context 1340 drivers/net/hyperv/netvsc.c void netvsc_channel_cb(void *context) context 1342 drivers/net/hyperv/netvsc.c struct netvsc_channel *nvchan = context; context 72 drivers/net/ieee802154/at86rf230.c void (*complete)(void *context); context 115 drivers/net/ieee802154/at86rf230.c const u8 state, void (*complete)(void *context)); context 338 drivers/net/ieee802154/at86rf230.c at86rf230_async_error_recover_complete(void *context) context 340 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 350 drivers/net/ieee802154/at86rf230.c at86rf230_async_error_recover(void *context) context 352 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 374 drivers/net/ieee802154/at86rf230.c void (*complete)(void *context)) context 390 drivers/net/ieee802154/at86rf230.c void (*complete)(void *context)) context 403 drivers/net/ieee802154/at86rf230.c at86rf230_async_state_assert(void *context) context 405 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 458 drivers/net/ieee802154/at86rf230.c ctx->complete(context); context 475 drivers/net/ieee802154/at86rf230.c at86rf230_async_state_delay(void *context) context 477 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 566 drivers/net/ieee802154/at86rf230.c at86rf230_async_state_change_start(void *context) context 568 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 584 drivers/net/ieee802154/at86rf230.c ctx->complete(context); context 601 drivers/net/ieee802154/at86rf230.c const u8 state, void (*complete)(void *context)) context 611 drivers/net/ieee802154/at86rf230.c at86rf230_sync_state_change_complete(void *context) context 613 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 642 drivers/net/ieee802154/at86rf230.c at86rf230_tx_complete(void *context) context 644 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 652 drivers/net/ieee802154/at86rf230.c at86rf230_tx_on(void *context) context 654 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 662 drivers/net/ieee802154/at86rf230.c at86rf230_tx_trac_check(void *context) context 664 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 696 drivers/net/ieee802154/at86rf230.c at86rf230_rx_read_frame_complete(void *context) context 698 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 724 drivers/net/ieee802154/at86rf230.c at86rf230_rx_trac_check(void *context) context 726 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 761 drivers/net/ieee802154/at86rf230.c at86rf230_irq_trx_end(void *context) context 763 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 777 drivers/net/ieee802154/at86rf230.c at86rf230_irq_status(void *context) context 779 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 802 drivers/net/ieee802154/at86rf230.c state->msg.context = state; context 842 drivers/net/ieee802154/at86rf230.c at86rf230_write_frame_complete(void *context) context 844 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 857 drivers/net/ieee802154/at86rf230.c at86rf230_write_frame(void *context) context 859 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 880 drivers/net/ieee802154/at86rf230.c at86rf230_xmit_tx_on(void *context) context 882 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 890 drivers/net/ieee802154/at86rf230.c at86rf230_xmit_start(void *context) context 892 drivers/net/ieee802154/at86rf230.c struct at86rf230_state_change *ctx = context; context 205 drivers/net/ieee802154/atusb.c struct sk_buff *skb = urb->context; context 227 drivers/net/ieee802154/atusb.c urb->context = NULL; context 283 drivers/net/ieee802154/atusb.c struct sk_buff *skb = urb->context; context 315 drivers/net/ieee802154/atusb.c urb->context = NULL; /* skb is gone */ context 321 drivers/net/ieee802154/atusb.c struct sk_buff *skb = urb->context; context 329 drivers/net/ieee802154/atusb.c urb->context = NULL; context 352 drivers/net/ieee802154/atusb.c kfree_skb(urb->context); context 840 drivers/net/ieee802154/ca8210.c static void ca8210_spi_transfer_complete(void *context) context 842 drivers/net/ieee802154/ca8210.c struct cas_control *cas_ctl = context; context 953 drivers/net/ieee802154/ca8210.c cas_ctl->msg.context = cas_ctl; context 445 drivers/net/ieee802154/mcr20a.c mcr20a_write_tx_buf_complete(void *context) context 447 drivers/net/ieee802154/mcr20a.c struct mcr20a_local *lp = context; context 782 drivers/net/ieee802154/mcr20a.c mcr20a_handle_rx_read_buf_complete(void *context) context 784 drivers/net/ieee802154/mcr20a.c struct mcr20a_local *lp = context; context 815 drivers/net/ieee802154/mcr20a.c mcr20a_handle_rx_read_len_complete(void *context) context 817 drivers/net/ieee802154/mcr20a.c struct mcr20a_local *lp = context; context 883 drivers/net/ieee802154/mcr20a.c mcr20a_irq_clean_complete(void *context) context 885 drivers/net/ieee802154/mcr20a.c struct mcr20a_local *lp = context; context 932 drivers/net/ieee802154/mcr20a.c static void mcr20a_irq_status_complete(void *context) context 935 drivers/net/ieee802154/mcr20a.c struct mcr20a_local *lp = context; context 1020 drivers/net/ieee802154/mcr20a.c lp->tx_buf_msg.context = lp; context 1038 drivers/net/ieee802154/mcr20a.c lp->reg_msg.context = lp; context 1051 drivers/net/ieee802154/mcr20a.c lp->rx_buf_msg.context = lp; context 1071 drivers/net/ieee802154/mcr20a.c lp->irq_msg.context = lp; context 505 drivers/net/ieee802154/mrf24j40.c static int mrf24j40_long_regmap_write(void *context, const void *data, context 508 drivers/net/ieee802154/mrf24j40.c struct spi_device *spi = context; context 525 drivers/net/ieee802154/mrf24j40.c mrf24j40_long_regmap_read(void *context, const void *reg, size_t reg_size, context 528 drivers/net/ieee802154/mrf24j40.c struct spi_device *spi = context; context 540 drivers/net/ieee802154/mrf24j40.c static void write_tx_buf_complete(void *context) context 542 drivers/net/ieee802154/mrf24j40.c struct mrf24j40 *devrec = context; context 752 drivers/net/ieee802154/mrf24j40.c static void mrf24j40_handle_rx_read_buf_complete(void *context) context 754 drivers/net/ieee802154/mrf24j40.c struct mrf24j40 *devrec = context; context 779 drivers/net/ieee802154/mrf24j40.c static void mrf24j40_handle_rx_read_buf(void *context) context 781 drivers/net/ieee802154/mrf24j40.c struct mrf24j40 *devrec = context; context 800 drivers/net/ieee802154/mrf24j40.c static void mrf24j40_handle_rx_read_len(void *context) context 802 drivers/net/ieee802154/mrf24j40.c struct mrf24j40 *devrec = context; context 1020 drivers/net/ieee802154/mrf24j40.c static void mrf24j40_intstat_complete(void *context) context 1022 drivers/net/ieee802154/mrf24j40.c struct mrf24j40 *devrec = context; context 1184 drivers/net/ieee802154/mrf24j40.c devrec->tx_msg.context = devrec; context 1195 drivers/net/ieee802154/mrf24j40.c devrec->tx_post_msg.context = devrec; context 1205 drivers/net/ieee802154/mrf24j40.c devrec->rx_msg.context = devrec; context 1212 drivers/net/ieee802154/mrf24j40.c devrec->rx_buf_msg.context = devrec; context 1228 drivers/net/ieee802154/mrf24j40.c devrec->irq_msg.context = devrec; context 141 drivers/net/phy/at803x.c struct at803x_context *context) context 143 drivers/net/phy/at803x.c context->bmcr = phy_read(phydev, MII_BMCR); context 144 drivers/net/phy/at803x.c context->advertise = phy_read(phydev, MII_ADVERTISE); context 145 drivers/net/phy/at803x.c context->control1000 = phy_read(phydev, MII_CTRL1000); context 146 drivers/net/phy/at803x.c context->int_enable = phy_read(phydev, AT803X_INTR_ENABLE); context 147 drivers/net/phy/at803x.c context->smart_speed = phy_read(phydev, AT803X_SMART_SPEED); context 148 drivers/net/phy/at803x.c context->led_control = phy_read(phydev, AT803X_LED_CONTROL); context 153 drivers/net/phy/at803x.c const struct at803x_context *context) context 155 drivers/net/phy/at803x.c phy_write(phydev, MII_BMCR, context->bmcr); context 156 drivers/net/phy/at803x.c phy_write(phydev, MII_ADVERTISE, context->advertise); context 157 drivers/net/phy/at803x.c phy_write(phydev, MII_CTRL1000, context->control1000); context 158 drivers/net/phy/at803x.c phy_write(phydev, AT803X_INTR_ENABLE, context->int_enable); context 159 drivers/net/phy/at803x.c phy_write(phydev, AT803X_SMART_SPEED, context->smart_speed); context 160 drivers/net/phy/at803x.c phy_write(phydev, AT803X_LED_CONTROL, context->led_control); context 324 drivers/net/phy/at803x.c struct at803x_context context; context 326 drivers/net/phy/at803x.c at803x_context_save(phydev, &context); context 333 drivers/net/phy/at803x.c at803x_context_restore(phydev, &context); context 280 drivers/net/phy/mdio-xgene.c void *context, void **ret) context 282 drivers/net/phy/mdio-xgene.c struct mii_bus *mdio = context; context 214 drivers/net/usb/catc.c struct catc *catc = urb->context; context 280 drivers/net/usb/catc.c struct catc *catc = urb->context; context 374 drivers/net/usb/catc.c struct catc *catc = urb->context; context 500 drivers/net/usb/catc.c struct catc *catc = urb->context; context 85 drivers/net/usb/cdc-phonet.c struct sk_buff *skb = req->context; context 139 drivers/net/usb/cdc-phonet.c struct net_device *dev = req->context; context 43 drivers/net/usb/cdc_eem.c dev_kfree_skb(urb->context); context 743 drivers/net/usb/hso.c struct hso_net *odev = urb->context; context 985 drivers/net/usb/hso.c struct hso_net *odev = urb->context; context 1181 drivers/net/usb/hso.c struct hso_serial *serial = urb->context; context 1450 drivers/net/usb/hso.c struct hso_serial *serial = urb->context; context 1837 drivers/net/usb/hso.c struct hso_shared_int *shared_int = urb->context; context 1909 drivers/net/usb/hso.c struct hso_serial *serial = urb->context; context 1960 drivers/net/usb/hso.c struct hso_serial *serial = urb->context; context 171 drivers/net/usb/ipheth.c dev = urb->context; context 221 drivers/net/usb/ipheth.c dev = urb->context; context 490 drivers/net/usb/kaweth.c struct kaweth_device *kaweth = u->context; context 575 drivers/net/usb/kaweth.c struct kaweth_device *kaweth = urb->context; context 759 drivers/net/usb/kaweth.c struct kaweth_device *kaweth = urb->context; context 1211 drivers/net/usb/kaweth.c struct usb_api_data *awd = (struct usb_api_data *)urb->context; context 1230 drivers/net/usb/kaweth.c urb->context = &awd; context 2795 drivers/net/usb/lan78xx.c struct sk_buff *skb = (struct sk_buff *)urb->context; context 3249 drivers/net/usb/lan78xx.c struct sk_buff *skb = (struct sk_buff *)urb->context; context 3591 drivers/net/usb/lan78xx.c struct lan78xx_net *dev = urb->context; context 4123 drivers/net/usb/lan78xx.c skb = (struct sk_buff *)res->context; context 115 drivers/net/usb/pegasus.c struct usb_ctrlrequest *req = (struct usb_ctrlrequest *)urb->context; context 459 drivers/net/usb/pegasus.c pegasus_t *pegasus = urb->context; context 606 drivers/net/usb/pegasus.c pegasus_t *pegasus = urb->context; context 642 drivers/net/usb/pegasus.c pegasus_t *pegasus = urb->context; context 717 drivers/net/usb/r8152.c struct r8152 *context; context 725 drivers/net/usb/r8152.c struct r8152 *context; context 1353 drivers/net/usb/r8152.c agg = urb->context; context 1357 drivers/net/usb/r8152.c tp = agg->context; context 1414 drivers/net/usb/r8152.c agg = urb->context; context 1418 drivers/net/usb/r8152.c tp = agg->context; context 1459 drivers/net/usb/r8152.c tp = urb->context; context 1558 drivers/net/usb/r8152.c rx_agg->context = tp; context 1655 drivers/net/usb/r8152.c tp->tx_info[i].context = tp; context 189 drivers/net/usb/rtl8150.c struct async_req *req = (struct async_req *)urb->context; context 392 drivers/net/usb/rtl8150.c dev = urb->context; context 461 drivers/net/usb/rtl8150.c dev = urb->context; context 481 drivers/net/usb/rtl8150.c dev = urb->context; context 173 drivers/net/usb/usbnet.c struct usbnet *dev = urb->context; context 577 drivers/net/usb/usbnet.c struct sk_buff *skb = (struct sk_buff *) urb->context; context 1243 drivers/net/usb/usbnet.c struct sk_buff *skb = (struct sk_buff *) urb->context; context 1895 drivers/net/usb/usbnet.c skb = (struct sk_buff *)res->context; context 2100 drivers/net/usb/usbnet.c struct usb_ctrlrequest *req = (struct usb_ctrlrequest *)urb->context; context 231 drivers/net/wimax/i2400m/usb-fw.c complete(urb->context); context 126 drivers/net/wimax/i2400m/usb-notif.c struct i2400mu *i2400mu = urb->context; context 94 drivers/net/wireless/ath/ar5523/ar5523.c struct ar5523 *ar = urb->context; context 223 drivers/net/wireless/ath/ar5523/ar5523.c struct ar5523_tx_cmd *cmd = urb->context; context 526 drivers/net/wireless/ath/ar5523/ar5523.c struct ar5523_rx_data *data = urb->context; context 738 drivers/net/wireless/ath/ar5523/ar5523.c struct sk_buff *skb = urb->context; context 127 drivers/net/wireless/ath/ath10k/usb.c struct ath10k_urb_context *urb_context = urb->context; context 184 drivers/net/wireless/ath/ath10k/usb.c struct ath10k_urb_context *urb_context = urb->context; context 901 drivers/net/wireless/ath/ath6kl/core.h void ath6kl_tx_complete(struct htc_target *context, context 53 drivers/net/wireless/ath/ath6kl/hif.c int ath6kl_hif_rw_comp_handler(void *context, int status) context 55 drivers/net/wireless/ath/ath6kl/hif.c struct htc_packet *packet = context; context 61 drivers/net/wireless/ath/ath6kl/hif.c packet->completion(packet->context, packet); context 275 drivers/net/wireless/ath/ath6kl/hif.h int ath6kl_hif_rw_comp_handler(void *context, int status); context 322 drivers/net/wireless/ath/ath6kl/htc.h struct htc_target *context; context 635 drivers/net/wireless/ath/ath6kl/htc.h static inline void set_htc_pkt_info(struct htc_packet *packet, void *context, context 639 drivers/net/wireless/ath/ath6kl/htc.h packet->pkt_cntxt = context; context 652 drivers/net/wireless/ath/ath6kl/htc.h static inline void set_htc_rxpkt_info(struct htc_packet *packet, void *context, context 656 drivers/net/wireless/ath/ath6kl/htc.h packet->pkt_cntxt = context; context 637 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->context = target; context 931 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->completion(packet->context, packet); context 1567 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void htc_ctrl_rx(struct htc_target *context, struct htc_packet *packets) context 1575 drivers/net/wireless/ath/ath6kl/htc_mbox.c reclaim_rx_ctrl_buf(context, packets); context 1589 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_reclaim_rxbuf(context, packets, &context->endpoint[0]); context 1160 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void htc_rxctrl_complete(struct htc_target *context, context 450 drivers/net/wireless/ath/ath6kl/sdio.c void *context; context 456 drivers/net/wireless/ath/ath6kl/sdio.c context = req->packet; context 458 drivers/net/wireless/ath/ath6kl/sdio.c ath6kl_hif_rw_comp_handler(context, status); context 1227 drivers/net/wireless/ath/ath6kl/sdio.c void *context; context 1244 drivers/net/wireless/ath/ath6kl/sdio.c context = req->packet; context 1246 drivers/net/wireless/ath/ath6kl/sdio.c ath6kl_hif_rw_comp_handler(context, -ECANCELED); context 499 drivers/net/wireless/ath/ath6kl/usb.c struct ath6kl_urb_context *urb_context = urb->context; context 556 drivers/net/wireless/ath/ath6kl/usb.c struct ath6kl_urb_context *urb_context = urb->context; context 103 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c static void owl_fw_cb(const struct firmware *fw, void *context) context 105 drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c struct pci_dev *pdev = (struct pci_dev *)context; context 75 drivers/net/wireless/ath/ath9k/hif_usb.c struct cmd_buf *cmd = (struct cmd_buf *)urb->context; context 139 drivers/net/wireless/ath/ath9k/hif_usb.c struct cmd_buf *cmd = (struct cmd_buf *)urb->context; context 257 drivers/net/wireless/ath/ath9k/hif_usb.c struct tx_buf *tx_buf = (struct tx_buf *) urb->context; context 646 drivers/net/wireless/ath/ath9k/hif_usb.c struct rx_buf *rx_buf = (struct rx_buf *)urb->context; context 693 drivers/net/wireless/ath/ath9k/hif_usb.c struct rx_buf *rx_buf = (struct rx_buf *)urb->context; context 732 drivers/net/wireless/ath/ath9k/hif_usb.c urb->context = NULL; context 755 drivers/net/wireless/ath/ath9k/hif_usb.c urb->context = NULL; context 1116 drivers/net/wireless/ath/ath9k/hif_usb.c static void ath9k_hif_usb_firmware_cb(const struct firmware *fw, void *context); context 1188 drivers/net/wireless/ath/ath9k/hif_usb.c static void ath9k_hif_usb_firmware_cb(const struct firmware *fw, void *context) context 1190 drivers/net/wireless/ath/ath9k/hif_usb.c struct hif_device_usb *hif_dev = context; context 172 drivers/net/wireless/ath/carl9170/usb.c dev_kfree_skb_irq(urb->context); context 181 drivers/net/wireless/ath/carl9170/usb.c carl9170_tx_callback(ar, (void *)urb->context); context 241 drivers/net/wireless/ath/carl9170/usb.c struct ar9170 *ar = urb->context; context 279 drivers/net/wireless/ath/carl9170/usb.c struct ar9170 *ar = urb->context; context 372 drivers/net/wireless/ath/carl9170/usb.c struct sk_buff *skb = (void *)urb->context; context 400 drivers/net/wireless/ath/carl9170/usb.c struct ar9170 *ar = (struct ar9170 *)urb->context; context 562 drivers/net/wireless/ath/carl9170/usb.c struct sk_buff *skb = (void *)urb->context; context 1022 drivers/net/wireless/ath/carl9170/usb.c void *context) context 1024 drivers/net/wireless/ath/carl9170/usb.c struct ar9170 *ar = context; context 1200 drivers/net/wireless/atmel/at76c50x-usb.c struct at76_priv *priv = urb->context; context 1551 drivers/net/wireless/atmel/at76c50x-usb.c struct at76_priv *priv = urb->context; context 1731 drivers/net/wireless/atmel/at76c50x-usb.c struct at76_priv *priv = urb->context; context 2177 drivers/net/wireless/broadcom/b43/main.c static void b43_fw_cb(const struct firmware *firmware, void *context) context 2179 drivers/net/wireless/broadcom/b43/main.c struct b43_request_fw_context *ctx = context; context 1485 drivers/net/wireless/broadcom/b43legacy/main.c static void b43legacy_fw_cb(const struct firmware *firmware, void *context) context 1487 drivers/net/wireless/broadcom/b43legacy/main.c struct b43legacy_wldev *dev = context; context 225 drivers/net/wireless/broadcom/brcm80211/brcmfmac/usb.c (struct brcmf_usbdev_info *)urb->context; context 237 drivers/net/wireless/broadcom/brcm80211/brcmfmac/usb.c (struct brcmf_usbdev_info *)urb->context; context 465 drivers/net/wireless/broadcom/brcm80211/brcmfmac/usb.c struct brcmf_usbreq *req = (struct brcmf_usbreq *)urb->context; context 487 drivers/net/wireless/broadcom/brcm80211/brcmfmac/usb.c struct brcmf_usbreq *req = (struct brcmf_usbreq *)urb->context; context 704 drivers/net/wireless/broadcom/brcm80211/brcmfmac/usb.c (struct brcmf_usbdev_info *)urb->context; context 1294 drivers/net/wireless/cisco/airo.c static int RxSeqValid (struct airo_info *ai,miccntx *context,int mcast,u32 micSeq); context 1295 drivers/net/wireless/cisco/airo.c static void MoveWindow(miccntx *context, u32 micSeq); context 1296 drivers/net/wireless/cisco/airo.c static void emmh32_setseed(emmh32_context *context, u8 *pkey, int keylen, context 1298 drivers/net/wireless/cisco/airo.c static void emmh32_init(emmh32_context *context); context 1299 drivers/net/wireless/cisco/airo.c static void emmh32_update(emmh32_context *context, u8 *pOctets, int len); context 1300 drivers/net/wireless/cisco/airo.c static void emmh32_final(emmh32_context *context, u8 digest[4]); context 1400 drivers/net/wireless/cisco/airo.c miccntx *context; context 1406 drivers/net/wireless/cisco/airo.c context = &ai->mod[0].mCtx; context 1408 drivers/net/wireless/cisco/airo.c context = &ai->mod[0].uCtx; context 1410 drivers/net/wireless/cisco/airo.c if (!context->valid) context 1418 drivers/net/wireless/cisco/airo.c mic->seq = htonl(context->tx); context 1419 drivers/net/wireless/cisco/airo.c context->tx += 2; context 1421 drivers/net/wireless/cisco/airo.c emmh32_init(&context->seed); // Mic the packet context 1422 drivers/net/wireless/cisco/airo.c emmh32_update(&context->seed,frame->da,ETH_ALEN * 2); // DA,SA context 1423 drivers/net/wireless/cisco/airo.c emmh32_update(&context->seed,(u8*)&mic->typelen,10); // Type/Length and Snap context 1424 drivers/net/wireless/cisco/airo.c emmh32_update(&context->seed,(u8*)&mic->seq,sizeof(mic->seq)); //SEQ context 1425 drivers/net/wireless/cisco/airo.c emmh32_update(&context->seed,(u8*)(frame + 1),payLen); //payload context 1426 drivers/net/wireless/cisco/airo.c emmh32_final(&context->seed, (u8*)&mic->mic); context 1458 drivers/net/wireless/cisco/airo.c miccntx *context; context 1496 drivers/net/wireless/cisco/airo.c context = mcast ? &ai->mod[i].mCtx : &ai->mod[i].uCtx; context 1499 drivers/net/wireless/cisco/airo.c if (!context->valid) { context 1509 drivers/net/wireless/cisco/airo.c emmh32_init(&context->seed); context 1510 drivers/net/wireless/cisco/airo.c emmh32_update(&context->seed, eth->da, ETH_ALEN*2); context 1511 drivers/net/wireless/cisco/airo.c emmh32_update(&context->seed, (u8 *)&mic->typelen, sizeof(mic->typelen)+sizeof(mic->u.snap)); context 1512 drivers/net/wireless/cisco/airo.c emmh32_update(&context->seed, (u8 *)&mic->seq,sizeof(mic->seq)); context 1513 drivers/net/wireless/cisco/airo.c emmh32_update(&context->seed, (u8 *)(eth + 1),payLen); context 1515 drivers/net/wireless/cisco/airo.c emmh32_final(&context->seed, digest); context 1525 drivers/net/wireless/cisco/airo.c if (RxSeqValid(ai, context, mcast, micSEQ) == SUCCESS) { context 1558 drivers/net/wireless/cisco/airo.c static int RxSeqValid (struct airo_info *ai,miccntx *context,int mcast,u32 micSeq) context 1568 drivers/net/wireless/cisco/airo.c context->window = (micSeq > 33) ? micSeq : 33; context 1569 drivers/net/wireless/cisco/airo.c context->rx = 0; // Reset rx context 1573 drivers/net/wireless/cisco/airo.c context->window = (micSeq > 33) ? micSeq : 33; // Move window context 1574 drivers/net/wireless/cisco/airo.c context->rx = 0; // Reset rx context 1578 drivers/net/wireless/cisco/airo.c seq = micSeq - (context->window - 33); context 1586 drivers/net/wireless/cisco/airo.c MoveWindow(context,micSeq); context 1594 drivers/net/wireless/cisco/airo.c if (!(context->rx & index)) { context 1597 drivers/net/wireless/cisco/airo.c context->rx |= index; context 1599 drivers/net/wireless/cisco/airo.c MoveWindow(context,micSeq); context 1606 drivers/net/wireless/cisco/airo.c static void MoveWindow(miccntx *context, u32 micSeq) context 1611 drivers/net/wireless/cisco/airo.c if (micSeq > context->window) { context 1612 drivers/net/wireless/cisco/airo.c shift = (micSeq - context->window) >> 1; context 1616 drivers/net/wireless/cisco/airo.c context->rx >>= shift; context 1618 drivers/net/wireless/cisco/airo.c context->rx = 0; context 1620 drivers/net/wireless/cisco/airo.c context->window = micSeq; //Move window context 1630 drivers/net/wireless/cisco/airo.c context->accum += (u64)(val) * be32_to_cpu(context->coeff[coeff_position++]); context 1633 drivers/net/wireless/cisco/airo.c static void emmh32_setseed(emmh32_context *context, u8 *pkey, int keylen, context 1646 drivers/net/wireless/cisco/airo.c memset(context->coeff, 0, sizeof(context->coeff)); context 1647 drivers/net/wireless/cisco/airo.c sg_init_one(&sg, context->coeff, sizeof(context->coeff)); context 1651 drivers/net/wireless/cisco/airo.c skcipher_request_set_crypt(req, &sg, &sg, sizeof(context->coeff), iv); context 1658 drivers/net/wireless/cisco/airo.c static void emmh32_init(emmh32_context *context) context 1661 drivers/net/wireless/cisco/airo.c context->accum = 0; context 1662 drivers/net/wireless/cisco/airo.c context->position = 0; context 1666 drivers/net/wireless/cisco/airo.c static void emmh32_update(emmh32_context *context, u8 *pOctets, int len) context 1672 drivers/net/wireless/cisco/airo.c coeff_position = context->position >> 2; context 1675 drivers/net/wireless/cisco/airo.c byte_position = context->position & 3; context 1680 drivers/net/wireless/cisco/airo.c context->part.d8[byte_position++] = *pOctets++; context 1681 drivers/net/wireless/cisco/airo.c context->position++; context 1684 drivers/net/wireless/cisco/airo.c MIC_ACCUM(ntohl(context->part.d32)); context 1690 drivers/net/wireless/cisco/airo.c context->position += 4; context 1698 drivers/net/wireless/cisco/airo.c context->part.d8[byte_position++] = *pOctets++; context 1699 drivers/net/wireless/cisco/airo.c context->position++; context 1708 drivers/net/wireless/cisco/airo.c static void emmh32_final(emmh32_context *context, u8 digest[4]) context 1716 drivers/net/wireless/cisco/airo.c coeff_position = context->position >> 2; context 1719 drivers/net/wireless/cisco/airo.c byte_position = context->position & 3; context 1722 drivers/net/wireless/cisco/airo.c val = ntohl(context->part.d32); context 1727 drivers/net/wireless/cisco/airo.c sum = context->accum; context 4669 drivers/net/wireless/intel/iwlegacy/4965-mac.c void *context); context 4766 drivers/net/wireless/intel/iwlegacy/4965-mac.c il4965_ucode_callback(const struct firmware *ucode_raw, void *context) context 4768 drivers/net/wireless/intel/iwlegacy/4965-mac.c struct il_priv *il = context; context 240 drivers/net/wireless/intel/iwlwifi/dvm/tx.c static int iwl_sta_id_or_broadcast(struct iwl_rxon_context *context, context 246 drivers/net/wireless/intel/iwlwifi/dvm/tx.c return context->bcast_sta_id; context 211 drivers/net/wireless/intel/iwlwifi/iwl-drv.c void *context); context 1318 drivers/net/wireless/intel/iwlwifi/iwl-drv.c static void iwl_req_fw_callback(const struct firmware *ucode_raw, void *context) context 1320 drivers/net/wireless/intel/iwlwifi/iwl-drv.c struct iwl_drv *drv = context; context 231 drivers/net/wireless/intel/iwlwifi/mvm/ops.c enum iwl_rx_handler_context context; context 236 drivers/net/wireless/intel/iwlwifi/mvm/ops.c { .cmd_id = _cmd_id, .fn = _fn, .context = _context } context 238 drivers/net/wireless/intel/iwlwifi/mvm/ops.c { .cmd_id = WIDE_ID(_grp, _cmd), .fn = _fn, .context = _context } context 910 drivers/net/wireless/intel/iwlwifi/mvm/ops.c enum iwl_rx_handler_context context; context 945 drivers/net/wireless/intel/iwlwifi/mvm/ops.c if (entry->context == RX_HANDLER_ASYNC_LOCKED) context 950 drivers/net/wireless/intel/iwlwifi/mvm/ops.c if (entry->context == RX_HANDLER_ASYNC_LOCKED) context 1011 drivers/net/wireless/intel/iwlwifi/mvm/ops.c if (rx_h->context == RX_HANDLER_SYNC) { context 1025 drivers/net/wireless/intel/iwlwifi/mvm/ops.c entry->context = rx_h->context; context 485 drivers/net/wireless/intersil/hostap/hostap_hw.c long context, u16 resp0, context 487 drivers/net/wireless/intersil/hostap/hostap_hw.c long context) context 513 drivers/net/wireless/intersil/hostap/hostap_hw.c entry->context = context; context 690 drivers/net/wireless/intersil/hostap/hostap_hw.c entry->callback(dev, entry->context, entry->resp0, context 1682 drivers/net/wireless/intersil/hostap/hostap_hw.c static void prism2_transmit_cb(struct net_device *dev, long context, context 1687 drivers/net/wireless/intersil/hostap/hostap_hw.c int idx = (int) context; context 555 drivers/net/wireless/intersil/hostap/hostap_wlan.h void (*callback)(struct net_device *dev, long context, u16 resp0, context 557 drivers/net/wireless/intersil/hostap/hostap_wlan.h long context; context 522 drivers/net/wireless/intersil/orinoco/orinoco_usb.c struct request_context *ctx = urb->context; context 1394 drivers/net/wireless/intersil/orinoco/orinoco_usb.c struct ezusb_priv *upriv = (struct ezusb_priv *) urb->context; context 491 drivers/net/wireless/intersil/p54/p54pci.c void *context) context 493 drivers/net/wireless/intersil/p54/p54pci.c struct p54p_priv *priv = context; context 142 drivers/net/wireless/intersil/p54/p54usb.c struct sk_buff *skb = (struct sk_buff *) urb->context; context 174 drivers/net/wireless/intersil/p54/p54usb.c urb->context = skb; context 197 drivers/net/wireless/intersil/p54/p54usb.c struct sk_buff *skb = urb->context; context 920 drivers/net/wireless/intersil/p54/p54usb.c void *context) context 922 drivers/net/wireless/intersil/p54/p54usb.c struct p54u_priv *priv = context; context 32 drivers/net/wireless/marvell/libertas/firmware.c void (*cb)(const struct firmware *fw, void *context)) context 45 drivers/net/wireless/marvell/libertas/firmware.c static void main_firmware_cb(const struct firmware *firmware, void *context) context 47 drivers/net/wireless/marvell/libertas/firmware.c struct lbs_private *priv = context; context 64 drivers/net/wireless/marvell/libertas/firmware.c static void helper_firmware_cb(const struct firmware *firmware, void *context) context 66 drivers/net/wireless/marvell/libertas/firmware.c struct lbs_private *priv = context; context 87 drivers/net/wireless/marvell/libertas/if_usb.c struct if_usb_card *cardp = (struct if_usb_card *) urb->context; context 491 drivers/net/wireless/marvell/libertas/if_usb.c struct if_usb_card *cardp = urb->context; context 655 drivers/net/wireless/marvell/libertas/if_usb.c struct if_usb_card *cardp = urb->context; context 462 drivers/net/wireless/marvell/libertas_tf/if_usb.c struct if_usb_card *cardp = urb->context; context 626 drivers/net/wireless/marvell/libertas_tf/if_usb.c struct if_usb_card *cardp = urb->context; context 518 drivers/net/wireless/marvell/mwifiex/main.c static int _mwifiex_fw_dpc(const struct firmware *firmware, void *context) context 522 drivers/net/wireless/marvell/mwifiex/main.c struct mwifiex_adapter *adapter = context; context 681 drivers/net/wireless/marvell/mwifiex/main.c static void mwifiex_fw_dpc(const struct firmware *firmware, void *context) context 683 drivers/net/wireless/marvell/mwifiex/main.c _mwifiex_fw_dpc(firmware, context); context 2418 drivers/net/wireless/marvell/mwifiex/pcie.c static irqreturn_t mwifiex_pcie_interrupt(int irq, void *context) context 2420 drivers/net/wireless/marvell/mwifiex/pcie.c struct mwifiex_msix_context *ctx = context; context 165 drivers/net/wireless/marvell/mwifiex/usb.c struct urb_context *context = (struct urb_context *)urb->context; context 166 drivers/net/wireless/marvell/mwifiex/usb.c struct mwifiex_adapter *adapter = context->adapter; context 167 drivers/net/wireless/marvell/mwifiex/usb.c struct sk_buff *skb = context->skb; context 178 drivers/net/wireless/marvell/mwifiex/usb.c if (card->rx_cmd_ep == context->ep) context 189 drivers/net/wireless/marvell/mwifiex/usb.c if (card->rx_cmd_ep != context->ep) context 198 drivers/net/wireless/marvell/mwifiex/usb.c status = mwifiex_usb_recv(adapter, skb, context->ep); context 210 drivers/net/wireless/marvell/mwifiex/usb.c if (card->rx_cmd_ep == context->ep) context 218 drivers/net/wireless/marvell/mwifiex/usb.c if (card->rx_cmd_ep != context->ep) context 231 drivers/net/wireless/marvell/mwifiex/usb.c if (card->rx_cmd_ep != context->ep) context 238 drivers/net/wireless/marvell/mwifiex/usb.c if (card->rx_cmd_ep == context->ep) context 243 drivers/net/wireless/marvell/mwifiex/usb.c if (card->rx_cmd_ep == context->ep) { context 244 drivers/net/wireless/marvell/mwifiex/usb.c mwifiex_usb_submit_rx_urb(context, size); context 247 drivers/net/wireless/marvell/mwifiex/usb.c mwifiex_usb_submit_rx_urb(context, size); context 249 drivers/net/wireless/marvell/mwifiex/usb.c context->skb = NULL; context 258 drivers/net/wireless/marvell/mwifiex/usb.c struct urb_context *context = (struct urb_context *)(urb->context); context 259 drivers/net/wireless/marvell/mwifiex/usb.c struct mwifiex_adapter *adapter = context->adapter; context 267 drivers/net/wireless/marvell/mwifiex/usb.c if (context->ep == card->tx_cmd_ep) { context 275 drivers/net/wireless/marvell/mwifiex/usb.c mwifiex_write_data_complete(adapter, context->skb, 0, context 279 drivers/net/wireless/marvell/mwifiex/usb.c if (context->ep == port->tx_data_ep) { context 807 drivers/net/wireless/marvell/mwifiex/usb.c struct urb_context *context, context 814 drivers/net/wireless/marvell/mwifiex/usb.c context->adapter = adapter; context 815 drivers/net/wireless/marvell/mwifiex/usb.c context->ep = ep; context 816 drivers/net/wireless/marvell/mwifiex/usb.c context->skb = skb_send; context 817 drivers/net/wireless/marvell/mwifiex/usb.c tx_urb = context->urb; context 824 drivers/net/wireless/marvell/mwifiex/usb.c (void *)context, card->tx_cmd_interval); context 829 drivers/net/wireless/marvell/mwifiex/usb.c mwifiex_usb_tx_complete, (void *)context); context 959 drivers/net/wireless/marvell/mwifiex/usb.c struct urb_context *context = NULL; context 1069 drivers/net/wireless/marvell/mwifiex/usb.c context = &port->tx_data_list[port->tx_data_ix++]; context 1071 drivers/net/wireless/marvell/mwifiex/usb.c context, skb_send); context 1099 drivers/net/wireless/marvell/mwifiex/usb.c context = &port->tx_data_list[port->tx_data_ix++]; context 1101 drivers/net/wireless/marvell/mwifiex/usb.c context, skb_send); context 1169 drivers/net/wireless/marvell/mwifiex/usb.c struct urb_context *context = NULL; context 1187 drivers/net/wireless/marvell/mwifiex/usb.c context = &card->tx_cmd; context 1219 drivers/net/wireless/marvell/mwifiex/usb.c context = &port->tx_data_list[port->tx_data_ix++]; context 1222 drivers/net/wireless/marvell/mwifiex/usb.c return mwifiex_usb_construct_send_urb(adapter, port, ep, context, skb); context 521 drivers/net/wireless/marvell/mwl8k.c static void mwl8k_fw_state_machine(const struct firmware *fw, void *context); context 5727 drivers/net/wireless/marvell/mwl8k.c static void mwl8k_fw_state_machine(const struct firmware *fw, void *context) context 5729 drivers/net/wireless/marvell/mwl8k.c struct mwl8k_priv *priv = context; context 373 drivers/net/wireless/mediatek/mt76/usb.c void *context) context 387 drivers/net/wireless/mediatek/mt76/usb.c urb->context = context; context 497 drivers/net/wireless/mediatek/mt76/usb.c struct mt76_dev *dev = urb->context; context 739 drivers/net/wireless/mediatek/mt76/usb.c struct mt76_queue_entry *e = urb->context; context 184 drivers/net/wireless/mediatek/mt7601u/dma.c struct mt7601u_dev *dev = urb->context; context 231 drivers/net/wireless/mediatek/mt7601u/dma.c struct mt7601u_tx_queue *q = urb->context; context 56 drivers/net/wireless/mediatek/mt7601u/usb.c usb_complete_t complete_fn, void *context) context 68 drivers/net/wireless/mediatek/mt7601u/usb.c complete_fn, context); context 82 drivers/net/wireless/mediatek/mt7601u/usb.c struct completion *cmpl = urb->context; context 61 drivers/net/wireless/mediatek/mt7601u/usb.h usb_complete_t complete_fn, void *context); context 1294 drivers/net/wireless/ralink/rt2x00/rt2500usb.c struct queue_entry *entry = (struct queue_entry *)urb->context; context 177 drivers/net/wireless/ralink/rt2x00/rt2x00usb.c struct rt2x00_async_read_data *rd = urb->context; context 267 drivers/net/wireless/ralink/rt2x00/rt2x00usb.c struct queue_entry *entry = (struct queue_entry *)urb->context; context 371 drivers/net/wireless/ralink/rt2x00/rt2x00usb.c struct queue_entry *entry = (struct queue_entry *)urb->context; context 118 drivers/net/wireless/realtek/rtl818x/rtl8187/dev.c kfree(urb->context); context 188 drivers/net/wireless/realtek/rtl818x/rtl8187/dev.c struct sk_buff *skb = (struct sk_buff *)urb->context; context 327 drivers/net/wireless/realtek/rtl818x/rtl8187/dev.c struct sk_buff *skb = (struct sk_buff *)urb->context; context 403 drivers/net/wireless/realtek/rtl818x/rtl8187/dev.c urb->context = skb; context 462 drivers/net/wireless/realtek/rtl818x/rtl8187/dev.c struct ieee80211_hw *hw = (struct ieee80211_hw *)urb->context; context 4701 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c struct sk_buff *skb = (struct sk_buff *)urb->context; context 5100 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c skb = (struct sk_buff *)rx_urb->urb.context; context 5147 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c skb = (struct sk_buff *)rx_urb->urb.context; context 5359 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c struct sk_buff *skb = (struct sk_buff *)urb->context; context 5368 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c rx_urb->urb.context = NULL; context 5415 drivers/net/wireless/realtek/rtl8xxxu/rtl8xxxu_core.c struct rtl8xxxu_priv *priv = (struct rtl8xxxu_priv *)urb->context; context 72 drivers/net/wireless/realtek/rtlwifi/core.c static void rtl_fw_do_work(const struct firmware *firmware, void *context, context 75 drivers/net/wireless/realtek/rtlwifi/core.c struct ieee80211_hw *hw = context; context 114 drivers/net/wireless/realtek/rtlwifi/core.c void rtl_fw_cb(const struct firmware *firmware, void *context) context 116 drivers/net/wireless/realtek/rtlwifi/core.c rtl_fw_do_work(firmware, context, false); context 120 drivers/net/wireless/realtek/rtlwifi/core.c void rtl_wowlan_fw_cb(const struct firmware *firmware, void *context) context 122 drivers/net/wireless/realtek/rtlwifi/core.c rtl_fw_do_work(firmware, context, true); context 54 drivers/net/wireless/realtek/rtlwifi/core.h void rtl_fw_cb(const struct firmware *firmware, void *context); context 55 drivers/net/wireless/realtek/rtlwifi/core.h void rtl_wowlan_fw_cb(const struct firmware *firmware, void *context); context 62 drivers/net/wireless/realtek/rtlwifi/rtl8192se/sw.c static void rtl92se_fw_cb(const struct firmware *firmware, void *context) context 64 drivers/net/wireless/realtek/rtlwifi/rtl8192se/sw.c struct ieee80211_hw *hw = context; context 592 drivers/net/wireless/realtek/rtlwifi/usb.c struct rtl_usb *rtlusb = (struct rtl_usb *)_urb->context; context 831 drivers/net/wireless/realtek/rtlwifi/usb.c skb = (struct sk_buff *)_urb->context; context 861 drivers/net/wireless/realtek/rtlwifi/usb.c struct sk_buff *skb = (struct sk_buff *)urb->context; context 876 drivers/net/wireless/realtek/rtw88/main.c static void rtw_load_firmware_cb(const struct firmware *firmware, void *context) context 878 drivers/net/wireless/realtek/rtw88/main.c struct rtw_dev *rtwdev = context; context 263 drivers/net/wireless/rsi/rsi_91x_usb.c struct rx_usb_ctrl_block *rx_cb = urb->context; context 6550 drivers/net/wireless/ti/wlcore/main.c static void wlcore_nvs_cb(const struct firmware *fw, void *context) context 6552 drivers/net/wireless/ti/wlcore/main.c struct wl1271 *wl = context; context 123 drivers/net/wireless/zydas/zd1201.c struct zd1201 *zd = urb->context; context 187 drivers/net/wireless/zydas/zd1201.c struct zd1201 *zd = urb->context; context 194 drivers/net/wireless/zydas/zd1201.c struct zd1201 *zd = urb->context; context 488 drivers/net/wireless/zydas/zd1211rw/zd_mac.c struct ieee80211_hw * hw = zd_usb_to_hw(urb->context); context 360 drivers/net/wireless/zydas/zd1211rw/zd_usb.c struct zd_usb *usb = urb->context; context 375 drivers/net/wireless/zydas/zd1211rw/zd_usb.c struct zd_usb *usb = urb->context; context 386 drivers/net/wireless/zydas/zd1211rw/zd_usb.c struct zd_mac *mac = zd_hw_mac(zd_usb_to_hw(urb->context)); context 462 drivers/net/wireless/zydas/zd1211rw/zd_usb.c usb = urb->context; context 678 drivers/net/wireless/zydas/zd1211rw/zd_usb.c usb = urb->context; context 967 drivers/net/wireless/zydas/zd1211rw/zd_usb.c skb = (struct sk_buff *)urb->context; context 1779 drivers/net/wireless/zydas/zd1211rw/zd_usb.c struct zd_usb *usb = urb->context; context 358 drivers/nfc/microread/microread.c static void microread_im_transceive_cb(void *context, struct sk_buff *skb, context 361 drivers/nfc/microread/microread.c struct microread_info *info = context; context 70 drivers/nfc/nfcmrvl/usb.c struct nfcmrvl_usb_drv_data *drv_data = urb->context; context 159 drivers/nfc/nfcmrvl/usb.c struct sk_buff *skb = urb->context; context 61 drivers/nfc/pn533/usb.c struct pn533_usb_phy *phy = urb->context; context 86 drivers/nfc/pn533/usb.c struct pn533_usb_phy *phy = urb->context; context 355 drivers/nfc/pn533/usb.c struct pn533_acr122_poweron_rdr_arg *arg = urb->context; context 385 drivers/nfc/pn533/usb.c cntx = phy->in_urb->context; /* backup context */ context 388 drivers/nfc/pn533/usb.c phy->in_urb->context = &arg; context 410 drivers/nfc/pn533/usb.c phy->in_urb->context = cntx; /* restore context */ context 417 drivers/nfc/pn533/usb.c struct pn533_usb_phy *phy = urb->context; context 574 drivers/nfc/pn544/pn544.c static void pn544_hci_data_exchange_cb(void *context, struct sk_buff *skb, context 577 drivers/nfc/pn544/pn544.c struct pn544_hci_info *info = context; context 16 drivers/nfc/pn544/pn544.h typedef int (*fw_download_t)(void *context, const char *firmware_name, context 615 drivers/nfc/port100.c struct port100 *dev = urb->context; context 667 drivers/nfc/port100.c struct port100 *dev = urb->context; context 930 drivers/nfc/port100.c struct port100 *dev = urb->context; context 737 drivers/nfc/st21nfca/core.c static void st21nfca_hci_data_exchange_cb(void *context, struct sk_buff *skb, context 740 drivers/nfc/st21nfca/core.c struct st21nfca_hci_info *info = context; context 467 drivers/nfc/st21nfca/dep.c static void st21nfca_im_recv_atr_res_cb(void *context, struct sk_buff *skb, context 470 drivers/nfc/st21nfca/dep.c struct st21nfca_hci_info *info = context; context 578 drivers/nfc/st21nfca/dep.c static void st21nfca_im_recv_dep_res_cb(void *context, struct sk_buff *skb, context 581 drivers/nfc/st21nfca/dep.c struct st21nfca_hci_info *info = context; context 233 drivers/nvme/host/rdma.c static void nvme_rdma_qp_event(struct ib_event *event, void *context) context 1638 drivers/nvme/host/rdma.c struct nvme_rdma_queue *queue = cm_id->context; context 917 drivers/nvme/target/rdma.c struct nvmet_port *port = cm_id->context; context 1285 drivers/nvme/target/rdma.c queue->port = cm_id->context; context 1421 drivers/nvme/target/rdma.c port = cm_id->context; context 162 drivers/nvmem/bcm-ocotp.c static int bcm_otpc_read(void *context, unsigned int offset, void *val, context 165 drivers/nvmem/bcm-ocotp.c struct otpc_priv *priv = context; context 193 drivers/nvmem/bcm-ocotp.c static int bcm_otpc_write(void *context, unsigned int offset, void *val, context 196 drivers/nvmem/bcm-ocotp.c struct otpc_priv *priv = context; context 33 drivers/nvmem/imx-iim.c static int imx_iim_read(void *context, unsigned int offset, context 36 drivers/nvmem/imx-iim.c struct iim_priv *iim = context; context 71 drivers/nvmem/imx-ocotp-scu.c static int imx_scu_ocotp_read(void *context, unsigned int offset, context 74 drivers/nvmem/imx-ocotp-scu.c struct ocotp_priv *priv = context; context 119 drivers/nvmem/imx-ocotp.c static int imx_ocotp_read(void *context, unsigned int offset, context 122 drivers/nvmem/imx-ocotp.c struct ocotp_priv *priv = context; context 243 drivers/nvmem/imx-ocotp.c static int imx_ocotp_write(void *context, unsigned int offset, void *val, context 246 drivers/nvmem/imx-ocotp.c struct ocotp_priv *priv = context; context 90 drivers/nvmem/lpc18xx_eeprom.c static int lpc18xx_eeprom_gather_write(void *context, unsigned int reg, context 93 drivers/nvmem/lpc18xx_eeprom.c struct lpc18xx_eeprom_dev *eeprom = context; context 129 drivers/nvmem/lpc18xx_eeprom.c static int lpc18xx_eeprom_read(void *context, unsigned int offset, context 132 drivers/nvmem/lpc18xx_eeprom.c struct lpc18xx_eeprom_dev *eeprom = context; context 41 drivers/nvmem/lpc18xx_otp.c static int lpc18xx_otp_read(void *context, unsigned int offset, context 44 drivers/nvmem/lpc18xx_otp.c struct lpc18xx_otp *otp = context; context 17 drivers/nvmem/meson-efuse.c static int meson_efuse_read(void *context, unsigned int offset, context 24 drivers/nvmem/meson-efuse.c static int meson_efuse_write(void *context, unsigned int offset, context 136 drivers/nvmem/meson-mx-efuse.c static int meson_mx_efuse_read(void *context, unsigned int offset, context 139 drivers/nvmem/meson-mx-efuse.c struct meson_mx_efuse *efuse = context; context 18 drivers/nvmem/mtk-efuse.c static int mtk_reg_read(void *context, context 21 drivers/nvmem/mtk-efuse.c struct mtk_efuse_priv *priv = context; context 31 drivers/nvmem/mtk-efuse.c static int mtk_reg_write(void *context, context 34 drivers/nvmem/mtk-efuse.c struct mtk_efuse_priv *priv = context; context 58 drivers/nvmem/mxs-ocotp.c static int mxs_ocotp_read(void *context, unsigned int offset, context 61 drivers/nvmem/mxs-ocotp.c struct mxs_ocotp *otp = context; context 17 drivers/nvmem/qfprom.c static int qfprom_reg_read(void *context, context 20 drivers/nvmem/qfprom.c struct qfprom_priv *priv = context; context 55 drivers/nvmem/rockchip-efuse.c static int rockchip_rk3288_efuse_read(void *context, unsigned int offset, context 58 drivers/nvmem/rockchip-efuse.c struct rockchip_efuse_chip *efuse = context; context 95 drivers/nvmem/rockchip-efuse.c static int rockchip_rk3328_efuse_read(void *context, unsigned int offset, context 98 drivers/nvmem/rockchip-efuse.c struct rockchip_efuse_chip *efuse = context; context 150 drivers/nvmem/rockchip-efuse.c static int rockchip_rk3399_efuse_read(void *context, unsigned int offset, context 153 drivers/nvmem/rockchip-efuse.c struct rockchip_efuse_chip *efuse = context; context 106 drivers/nvmem/sc27xx-efuse.c static int sc27xx_efuse_read(void *context, u32 offset, void *val, size_t bytes) context 108 drivers/nvmem/sc27xx-efuse.c struct sc27xx_efuse *efuse = context; context 52 drivers/nvmem/snvs_lpgpr.c static int snvs_lpgpr_write(void *context, unsigned int offset, void *val, context 55 drivers/nvmem/snvs_lpgpr.c struct snvs_lpgpr_priv *priv = context; context 78 drivers/nvmem/snvs_lpgpr.c static int snvs_lpgpr_read(void *context, unsigned int offset, void *val, context 81 drivers/nvmem/snvs_lpgpr.c struct snvs_lpgpr_priv *priv = context; context 37 drivers/nvmem/stm32-romem.c static int stm32_romem_read(void *context, unsigned int offset, void *buf, context 40 drivers/nvmem/stm32-romem.c struct stm32_romem_priv *priv = context; context 68 drivers/nvmem/stm32-romem.c static int stm32_bsec_read(void *context, unsigned int offset, void *buf, context 71 drivers/nvmem/stm32-romem.c struct stm32_romem_priv *priv = context; context 115 drivers/nvmem/stm32-romem.c static int stm32_bsec_write(void *context, unsigned int offset, void *buf, context 118 drivers/nvmem/stm32-romem.c struct stm32_romem_priv *priv = context; context 40 drivers/nvmem/sunxi_sid.c static int sunxi_sid_read(void *context, unsigned int offset, context 43 drivers/nvmem/sunxi_sid.c struct sunxi_sid *sid = context; context 81 drivers/nvmem/sunxi_sid.c static int sun8i_sid_read_by_reg(void *context, unsigned int offset, context 84 drivers/nvmem/sunxi_sid.c struct sunxi_sid *sid = context; context 19 drivers/nvmem/uniphier-efuse.c static int uniphier_reg_read(void *context, context 22 drivers/nvmem/uniphier-efuse.c struct uniphier_efuse_priv *priv = context; context 146 drivers/nvmem/vf610-ocotp.c static int vf610_ocotp_read(void *context, unsigned int offset, context 149 drivers/nvmem/vf610-ocotp.c struct vf610_ocotp *ocotp = context; context 21 drivers/nvmem/zynqmp_nvmem.c static int zynqmp_nvmem_read(void *context, unsigned int offset, context 26 drivers/nvmem/zynqmp_nvmem.c struct zynqmp_nvmem_data *priv = context; context 42 drivers/pci/controller/pci-hyperv-intf.c int hyperv_reg_block_invalidate(struct pci_dev *dev, void *context, context 43 drivers/pci/controller/pci-hyperv-intf.c void (*block_invalidate)(void *context, context 49 drivers/pci/controller/pci-hyperv-intf.c return hvpci_block_ops.reg_blk_invalidate(dev, context, context 264 drivers/pci/controller/pci-hyperv.c void (*completion_func)(void *context, struct pci_response *resp, context 535 drivers/pci/controller/pci-hyperv.c void (*block_invalidate)(void *context, u64 block_mask); context 550 drivers/pci/controller/pci-hyperv.c static void hv_pci_onchannelcallback(void *context); context 562 drivers/pci/controller/pci-hyperv.c static void hv_pci_generic_compl(void *context, struct pci_response *resp, context 565 drivers/pci/controller/pci-hyperv.c struct hv_pci_compl *comp_pkt = context; context 895 drivers/pci/controller/pci-hyperv.c static void hv_pci_read_config_compl(void *context, struct pci_response *resp, context 898 drivers/pci/controller/pci-hyperv.c struct hv_read_config_compl *comp = context; context 994 drivers/pci/controller/pci-hyperv.c static void hv_pci_write_config_compl(void *context, struct pci_response *resp, context 997 drivers/pci/controller/pci-hyperv.c struct hv_pci_compl *comp_pkt = context; context 1082 drivers/pci/controller/pci-hyperv.c int hv_register_block_invalidate(struct pci_dev *pdev, void *context, context 1083 drivers/pci/controller/pci-hyperv.c void (*block_invalidate)(void *context, context 1096 drivers/pci/controller/pci-hyperv.c hpdev->invalidate_context = context; context 1285 drivers/pci/controller/pci-hyperv.c static void hv_pci_compose_compl(void *context, struct pci_response *resp, context 1288 drivers/pci/controller/pci-hyperv.c struct compose_comp_ctxt *comp_pkt = context; context 1831 drivers/pci/controller/pci-hyperv.c static void q_resource_requirements(void *context, struct pci_response *resp, context 1834 drivers/pci/controller/pci-hyperv.c struct q_res_req_compl *completion = context; context 2240 drivers/pci/controller/pci-hyperv.c static void hv_pci_onchannelcallback(void *context) context 2244 drivers/pci/controller/pci-hyperv.c struct hv_pcibus_device *hbus = context; context 182 drivers/pci/hotplug/acpi_pcihp.c check_hotplug(acpi_handle handle, u32 lvl, void *context, void **rv) context 184 drivers/pci/hotplug/acpi_pcihp.c int *found = (int *)context; context 61 drivers/pci/hotplug/acpiphp.h struct acpiphp_context *context; context 50 drivers/pci/hotplug/acpiphp_glue.c static void hotplug_event(u32 type, struct acpiphp_context *context); context 61 drivers/pci/hotplug/acpiphp_glue.c struct acpiphp_context *context; context 63 drivers/pci/hotplug/acpiphp_glue.c context = kzalloc(sizeof(*context), GFP_KERNEL); context 64 drivers/pci/hotplug/acpiphp_glue.c if (!context) context 67 drivers/pci/hotplug/acpiphp_glue.c context->refcount = 1; context 68 drivers/pci/hotplug/acpiphp_glue.c context->hp.notify = acpiphp_hotplug_notify; context 69 drivers/pci/hotplug/acpiphp_glue.c context->hp.fixup = acpiphp_post_dock_fixup; context 70 drivers/pci/hotplug/acpiphp_glue.c acpi_set_hp_context(adev, &context->hp); context 71 drivers/pci/hotplug/acpiphp_glue.c return context; context 82 drivers/pci/hotplug/acpiphp_glue.c struct acpiphp_context *context; context 87 drivers/pci/hotplug/acpiphp_glue.c context = to_acpiphp_context(adev->hp); context 88 drivers/pci/hotplug/acpiphp_glue.c context->refcount++; context 89 drivers/pci/hotplug/acpiphp_glue.c return context; context 100 drivers/pci/hotplug/acpiphp_glue.c static void acpiphp_put_context(struct acpiphp_context *context) context 102 drivers/pci/hotplug/acpiphp_glue.c if (--context->refcount) context 105 drivers/pci/hotplug/acpiphp_glue.c WARN_ON(context->bridge); context 106 drivers/pci/hotplug/acpiphp_glue.c context->hp.self->hp = NULL; context 107 drivers/pci/hotplug/acpiphp_glue.c kfree(context); context 122 drivers/pci/hotplug/acpiphp_glue.c struct acpiphp_context *context; context 125 drivers/pci/hotplug/acpiphp_glue.c context = acpiphp_get_context(adev); context 126 drivers/pci/hotplug/acpiphp_glue.c if (!context || context->func.parent->is_going_away) { context 130 drivers/pci/hotplug/acpiphp_glue.c get_bridge(context->func.parent); context 131 drivers/pci/hotplug/acpiphp_glue.c acpiphp_put_context(context); context 133 drivers/pci/hotplug/acpiphp_glue.c return context; context 136 drivers/pci/hotplug/acpiphp_glue.c static void acpiphp_let_context_go(struct acpiphp_context *context) context 138 drivers/pci/hotplug/acpiphp_glue.c put_bridge(context->func.parent); context 143 drivers/pci/hotplug/acpiphp_glue.c struct acpiphp_context *context; context 159 drivers/pci/hotplug/acpiphp_glue.c context = bridge->context; context 161 drivers/pci/hotplug/acpiphp_glue.c if (context) { context 163 drivers/pci/hotplug/acpiphp_glue.c put_bridge(context->func.parent); context 164 drivers/pci/hotplug/acpiphp_glue.c context->bridge = NULL; context 165 drivers/pci/hotplug/acpiphp_glue.c acpiphp_put_context(context); context 183 drivers/pci/hotplug/acpiphp_glue.c struct acpiphp_context *context = acpiphp_grab_context(adev); context 187 drivers/pci/hotplug/acpiphp_glue.c if (!context) context 190 drivers/pci/hotplug/acpiphp_glue.c bus = context->func.slot->bus; context 208 drivers/pci/hotplug/acpiphp_glue.c acpiphp_let_context_go(context); context 222 drivers/pci/hotplug/acpiphp_glue.c struct acpiphp_context *context; context 247 drivers/pci/hotplug/acpiphp_glue.c context = acpiphp_init_context(adev); context 248 drivers/pci/hotplug/acpiphp_glue.c if (!context) { context 253 drivers/pci/hotplug/acpiphp_glue.c newfunc = &context->func; context 276 drivers/pci/hotplug/acpiphp_glue.c acpiphp_put_context(context); context 772 drivers/pci/hotplug/acpiphp_glue.c static void hotplug_event(u32 type, struct acpiphp_context *context) context 774 drivers/pci/hotplug/acpiphp_glue.c acpi_handle handle = context->hp.self->handle; context 775 drivers/pci/hotplug/acpiphp_glue.c struct acpiphp_func *func = &context->func; context 780 drivers/pci/hotplug/acpiphp_glue.c bridge = context->bridge; context 828 drivers/pci/hotplug/acpiphp_glue.c struct acpiphp_context *context; context 830 drivers/pci/hotplug/acpiphp_glue.c context = acpiphp_grab_context(adev); context 831 drivers/pci/hotplug/acpiphp_glue.c if (!context) context 834 drivers/pci/hotplug/acpiphp_glue.c hotplug_event(type, context); context 835 drivers/pci/hotplug/acpiphp_glue.c acpiphp_let_context_go(context); context 888 drivers/pci/hotplug/acpiphp_glue.c struct acpiphp_context *context; context 896 drivers/pci/hotplug/acpiphp_glue.c context = acpiphp_get_context(adev); context 897 drivers/pci/hotplug/acpiphp_glue.c if (!context) context 900 drivers/pci/hotplug/acpiphp_glue.c bridge->context = context; context 901 drivers/pci/hotplug/acpiphp_glue.c context->bridge = bridge; context 903 drivers/pci/hotplug/acpiphp_glue.c get_bridge(context->func.parent); context 84 drivers/pci/hotplug/acpiphp_ibm.c static void ibm_handle_events(acpi_handle handle, u32 event, void *context); context 90 drivers/pci/hotplug/acpiphp_ibm.c u32 lvl, void *context, void **rv); context 251 drivers/pci/hotplug/acpiphp_ibm.c static void ibm_handle_events(acpi_handle handle, u32 event, void *context) context 255 drivers/pci/hotplug/acpiphp_ibm.c struct notification *note = context; context 385 drivers/pci/hotplug/acpiphp_ibm.c u32 lvl, void *context, void **rv) context 387 drivers/pci/hotplug/acpiphp_ibm.c acpi_handle *phandle = (acpi_handle *)context; context 61 drivers/pci/pci-acpi.c static acpi_status acpi_match_rc(acpi_handle handle, u32 lvl, void *context, context 64 drivers/pci/pci-acpi.c u16 *segment = context; context 826 drivers/pci/pci-acpi.c static void pci_acpi_wake_bus(struct acpi_device_wakeup_context *context) context 831 drivers/pci/pci-acpi.c adev = container_of(context, struct acpi_device, wakeup.context); context 840 drivers/pci/pci-acpi.c static void pci_acpi_wake_dev(struct acpi_device_wakeup_context *context) context 844 drivers/pci/pci-acpi.c pci_dev = to_pci_dev(context->dev); context 1211 drivers/pci/pcie/aer.c static irqreturn_t aer_isr(int irq, void *context) context 1213 drivers/pci/pcie/aer.c struct pcie_device *dev = (struct pcie_device *)context; context 1232 drivers/pci/pcie/aer.c static irqreturn_t aer_irq(int irq, void *context) context 1234 drivers/pci/pcie/aer.c struct pcie_device *pdev = (struct pcie_device *)context; context 49 drivers/pci/pcie/bw_notification.c static irqreturn_t pcie_bw_notification_irq(int irq, void *context) context 51 drivers/pci/pcie/bw_notification.c struct pcie_device *srv = context; context 67 drivers/pci/pcie/bw_notification.c static irqreturn_t pcie_bw_notification_handler(int irq, void *context) context 69 drivers/pci/pcie/bw_notification.c struct pcie_device *srv = context; context 227 drivers/pci/pcie/dpc.c static irqreturn_t dpc_handler(int irq, void *context) context 230 drivers/pci/pcie/dpc.c struct dpc_dev *dpc = context; context 267 drivers/pci/pcie/dpc.c static irqreturn_t dpc_irq(int irq, void *context) context 269 drivers/pci/pcie/dpc.c struct dpc_dev *dpc = (struct dpc_dev *)context; context 264 drivers/pci/pcie/pme.c static irqreturn_t pcie_pme_irq(int irq, void *context) context 271 drivers/pci/pcie/pme.c port = ((struct pcie_device *)context)->port; context 272 drivers/pci/pcie/pme.c data = get_service_data((struct pcie_device *)context); context 99 drivers/phy/amlogic/phy-meson-g12a-usb3-pcie.c static int phy_g12a_usb3_pcie_cr_bus_read(void *context, unsigned int addr, context 102 drivers/phy/amlogic/phy-meson-g12a-usb3-pcie.c struct phy_g12a_usb3_pcie_priv *priv = context; context 132 drivers/phy/amlogic/phy-meson-g12a-usb3-pcie.c static int phy_g12a_usb3_pcie_cr_bus_write(void *context, unsigned int addr, context 135 drivers/phy/amlogic/phy-meson-g12a-usb3-pcie.c struct phy_g12a_usb3_pcie_priv *priv = context; context 117 drivers/pinctrl/intel/pinctrl-intel.c struct intel_pinctrl_context context; context 1358 drivers/pinctrl/intel/pinctrl-intel.c pctrl->context.pads = pads; context 1359 drivers/pinctrl/intel/pinctrl-intel.c pctrl->context.communities = communities; context 1527 drivers/pinctrl/intel/pinctrl-intel.c pads = pctrl->context.pads; context 1546 drivers/pinctrl/intel/pinctrl-intel.c communities = pctrl->context.communities; context 1620 drivers/pinctrl/intel/pinctrl-intel.c pads = pctrl->context.pads; context 1656 drivers/pinctrl/intel/pinctrl-intel.c communities = pctrl->context.communities; context 314 drivers/pinctrl/pinctrl-mcp23s08.c static int mcp23sxx_spi_write(void *context, const void *data, size_t count) context 316 drivers/pinctrl/pinctrl-mcp23s08.c struct mcp23s08 *mcp = context; context 329 drivers/pinctrl/pinctrl-mcp23s08.c static int mcp23sxx_spi_gather_write(void *context, context 333 drivers/pinctrl/pinctrl-mcp23s08.c struct mcp23s08 *mcp = context; context 348 drivers/pinctrl/pinctrl-mcp23s08.c static int mcp23sxx_spi_read(void *context, const void *reg, size_t reg_size, context 351 drivers/pinctrl/pinctrl-mcp23s08.c struct mcp23s08 *mcp = context; context 997 drivers/pinctrl/pinctrl-sx150x.c static int sx150x_regmap_reg_read(void *context, unsigned int reg, context 1001 drivers/pinctrl/pinctrl-sx150x.c struct sx150x_pinctrl *pctl = context; context 1050 drivers/pinctrl/pinctrl-sx150x.c static int sx150x_regmap_reg_write(void *context, unsigned int reg, context 1054 drivers/pinctrl/pinctrl-sx150x.c struct sx150x_pinctrl *pctl = context; context 532 drivers/platform/chrome/cros_ec_lpc.c void *context, void **retval) context 534 drivers/platform/chrome/cros_ec_lpc.c *(bool *)context = true; context 286 drivers/platform/olpc/olpc-xo175-ec.c priv->msg.context = priv; context 1706 drivers/platform/x86/acer-wmi.c static void acer_wmi_notify(u32 value, void *context) context 559 drivers/platform/x86/apple-gmux.c static void gmux_notify_handler(acpi_handle device, u32 value, void *context) context 562 drivers/platform/x86/apple-gmux.c struct pnp_dev *pnp = (struct pnp_dev *)context; context 2013 drivers/platform/x86/asus-wmi.c static void asus_wmi_notify(u32 value, void *context) context 2015 drivers/platform/x86/asus-wmi.c struct asus_wmi *asus = context; context 245 drivers/platform/x86/dell-rbtn.c static void ACPI_SYSTEM_XFACE rbtn_clear_suspended_flag(void *context) context 247 drivers/platform/x86/dell-rbtn.c struct rbtn_data *rbtn_data = context; context 146 drivers/platform/x86/dell-smbios-wmi.c static int dell_smbios_wmi_probe(struct wmi_device *wdev, const void *context) context 47 drivers/platform/x86/dell-smo8800.c void *context) context 58 drivers/platform/x86/dell-smo8800.c *((u32 *)context) = irq->interrupts[0]; context 73 drivers/platform/x86/dell-wmi-aio.c static void dell_wmi_aio_notify(u32 value, void *context) context 102 drivers/platform/x86/dell-wmi-descriptor.c const void *context) context 664 drivers/platform/x86/dell-wmi.c static int dell_wmi_probe(struct wmi_device *wdev, const void *context) context 82 drivers/platform/x86/dell_rbu.c static int context; context 522 drivers/platform/x86/dell_rbu.c static void callbackfn_rbu(const struct firmware *fw, void *context) context 602 drivers/platform/x86/dell_rbu.c &rbu_device->dev, GFP_KERNEL, &context, context 516 drivers/platform/x86/hp-wmi.c static void hp_wmi_notify(u32 value, void *context) context 279 drivers/platform/x86/hp_accel.c lis3lv02d_get_resource(struct acpi_resource *resource, void *context) context 283 drivers/platform/x86/hp_accel.c u32 *device_irq = context; context 169 drivers/platform/x86/huawei-wmi.c static int huawei_wmi_probe(struct wmi_device *wdev, const void *context) context 937 drivers/platform/x86/ideapad-laptop.c static void ideapad_wmi_notify(u32 value, void *context) context 941 drivers/platform/x86/ideapad-laptop.c ideapad_input_report(context, value); context 341 drivers/platform/x86/intel-hid.c static void notify_handler(acpi_handle handle, u32 event, void *context) context 343 drivers/platform/x86/intel-hid.c struct platform_device *device = context; context 557 drivers/platform/x86/intel-hid.c check_acpi_dev(acpi_handle handle, u32 lvl, void *context, void **rv) context 559 drivers/platform/x86/intel-hid.c const struct acpi_device_id *ids = context; context 74 drivers/platform/x86/intel-vbtn.c static void notify_handler(acpi_handle handle, u32 event, void *context) context 76 drivers/platform/x86/intel-vbtn.c struct platform_device *device = context; context 245 drivers/platform/x86/intel-vbtn.c check_acpi_dev(acpi_handle handle, u32 lvl, void *context, void **rv) context 247 drivers/platform/x86/intel-vbtn.c const struct acpi_device_id *ids = context; context 60 drivers/platform/x86/intel-wmi-thunderbolt.c const void *context) context 402 drivers/platform/x86/intel_menlow.c void *context, void **rv) context 197 drivers/platform/x86/lg-laptop.c static void wmi_notify(u32 value, void *context) context 202 drivers/platform/x86/lg-laptop.c long data = (long)context; context 1761 drivers/platform/x86/mlx-platform.c mlxplat_mlxcpld_reg_read(void *context, unsigned int reg, unsigned int *val) context 1763 drivers/platform/x86/mlx-platform.c struct mlxplat_mlxcpld_regmap_context *ctx = context; context 1770 drivers/platform/x86/mlx-platform.c mlxplat_mlxcpld_reg_write(void *context, unsigned int reg, unsigned int val) context 1772 drivers/platform/x86/mlx-platform.c struct mlxplat_mlxcpld_regmap_context *ctx = context; context 171 drivers/platform/x86/msi-wmi.c static void msi_wmi_notify(u32 value, void *context) context 1283 drivers/platform/x86/sony-laptop.c void *context, void **return_value) context 4334 drivers/platform/x86/sony-laptop.c sony_pic_read_possible_resource(struct acpi_resource *resource, void *context) context 4337 drivers/platform/x86/sony-laptop.c struct sony_pic_dev *dev = (struct sony_pic_dev *)context; context 720 drivers/platform/x86/thinkpad_acpi.c u32 level, void *context, void **return_value) context 723 drivers/platform/x86/thinkpad_acpi.c if (!strcmp(context, "video")) { context 35 drivers/platform/x86/toshiba-wmi.c static void toshiba_wmi_notify(u32 value, void *context) context 49 drivers/platform/x86/wmi-bmof.c static int wmi_bmof_probe(struct wmi_device *wdev, const void *context) context 148 drivers/platform/x86/wmi.c return id->context; context 473 drivers/platform/x86/wmi.c static void wmi_notify_debug(u32 value, void *context) context 1274 drivers/platform/x86/wmi.c void *context) context 19 drivers/platform/x86/xiaomi-wmi.c .context = &(const unsigned int){key} context 26 drivers/platform/x86/xiaomi-wmi.c int xiaomi_wmi_probe(struct wmi_device *wdev, const void *context) context 30 drivers/platform/x86/xiaomi-wmi.c if (wdev == NULL || context == NULL) context 44 drivers/platform/x86/xiaomi-wmi.c data->key_code = *((const unsigned int *)context); context 287 drivers/pnp/pnpacpi/core.c u32 lvl, void *context, context 1518 drivers/rapidio/devices/rio_mport_cdev.c static int rio_mport_pw_handler(struct rio_mport *mport, void *context, context 1521 drivers/rapidio/devices/rio_mport_cdev.c struct mport_dev *md = context; context 38 drivers/rapidio/rio.c int (*pwcback)(struct rio_mport *mport, void *context, context 40 drivers/rapidio/rio.c void *context; context 562 drivers/rapidio/rio.c int rio_add_mport_pw_handler(struct rio_mport *mport, void *context, context 564 drivers/rapidio/rio.c void *context, union rio_pw_msg *msg, int step)) context 572 drivers/rapidio/rio.c pwrite->context = context; context 589 drivers/rapidio/rio.c int rio_del_mport_pw_handler(struct rio_mport *mport, void *context, context 591 drivers/rapidio/rio.c void *context, union rio_pw_msg *msg, int step)) context 598 drivers/rapidio/rio.c if (pwrite->pwcback == pwcback && pwrite->context == context) { context 1212 drivers/rapidio/rio.c pwrite->pwcback(mport, pwrite->context, pw_msg, 0); context 185 drivers/rapidio/rio_cm.c void *context; context 961 drivers/rapidio/rio_cm.c ch->context = NULL; context 1228 drivers/rapidio/rio_cm.c static int riocm_ch_bind(u16 ch_id, u8 mport_id, void *context) context 1264 drivers/rapidio/rio_cm.c ch->context = context; context 1439 drivers/remoteproc/remoteproc_core.c static void rproc_auto_boot_callback(const struct firmware *fw, void *context) context 1441 drivers/remoteproc/remoteproc_core.c struct rproc *rproc = context; context 1138 drivers/rtc/rtc-cmos.c static u32 rtc_handler(void *context) context 1140 drivers/rtc/rtc-cmos.c struct device *dev = context; context 159 drivers/rtc/rtc-meson.c static int meson_rtc_serial_bus_reg_read(void *context, unsigned int reg, context 162 drivers/rtc/rtc-meson.c struct meson_rtc *rtc = context; context 178 drivers/rtc/rtc-meson.c static int meson_rtc_serial_bus_reg_write(void *context, unsigned int reg, context 181 drivers/rtc/rtc-meson.c struct meson_rtc *rtc = context; context 258 drivers/rtc/rtc-meson.c static int meson_rtc_regmem_read(void *context, unsigned int offset, context 261 drivers/rtc/rtc-meson.c struct meson_rtc *rtc = context; context 270 drivers/rtc/rtc-meson.c static int meson_rtc_regmem_write(void *context, unsigned int offset, context 273 drivers/rtc/rtc-meson.c struct meson_rtc *rtc = context; context 212 drivers/rtc/rtc-palmas.c static irqreturn_t palmas_rtc_interrupt(int irq, void *context) context 214 drivers/rtc/rtc-palmas.c struct palmas_rtc *palmas_rtc = context; context 550 drivers/rtc/rtc-pcf2127.c static int pcf2127_i2c_write(void *context, const void *data, size_t count) context 552 drivers/rtc/rtc-pcf2127.c struct device *dev = context; context 563 drivers/rtc/rtc-pcf2127.c static int pcf2127_i2c_gather_write(void *context, context 567 drivers/rtc/rtc-pcf2127.c struct device *dev = context; context 592 drivers/rtc/rtc-pcf2127.c static int pcf2127_i2c_read(void *context, const void *reg, size_t reg_size, context 595 drivers/rtc/rtc-pcf2127.c struct device *dev = context; context 520 drivers/scsi/aacraid/aachba.c static void get_container_name_callback(void *context, struct fib * fibptr) context 525 drivers/scsi/aacraid/aachba.c scsicmd = (struct scsi_cmnd *) context; context 621 drivers/scsi/aacraid/aachba.c static void _aac_probe_container2(void * context, struct fib * fibptr) context 625 drivers/scsi/aacraid/aachba.c struct scsi_cmnd * scsicmd = (struct scsi_cmnd *)context; context 680 drivers/scsi/aacraid/aachba.c static void _aac_probe_container1(void * context, struct fib * fibptr) context 692 drivers/scsi/aacraid/aachba.c _aac_probe_container2(context, fibptr); context 696 drivers/scsi/aacraid/aachba.c scsicmd = (struct scsi_cmnd *) context; context 728 drivers/scsi/aacraid/aachba.c _aac_probe_container2(context, fibptr); context 976 drivers/scsi/aacraid/aachba.c static void get_container_serial_callback(void *context, struct fib * fibptr) context 983 drivers/scsi/aacraid/aachba.c scsicmd = (struct scsi_cmnd *) context; context 1209 drivers/scsi/aacraid/aachba.c static void io_callback(void *context, struct fib * fibptr); context 1559 drivers/scsi/aacraid/aachba.c static void aac_srb_callback(void *context, struct fib * fibptr); context 2307 drivers/scsi/aacraid/aachba.c static void io_callback(void *context, struct fib * fibptr) context 2314 drivers/scsi/aacraid/aachba.c scsicmd = (struct scsi_cmnd *) context; context 2600 drivers/scsi/aacraid/aachba.c static void synchronize_callback(void *context, struct fib *fibptr) context 2605 drivers/scsi/aacraid/aachba.c cmd = context; context 2762 drivers/scsi/aacraid/aachba.c static void aac_start_stop_callback(void *context, struct fib *fibptr) context 2764 drivers/scsi/aacraid/aachba.c struct scsi_cmnd *scsicmd = context; context 3470 drivers/scsi/aacraid/aachba.c static void aac_srb_callback(void *context, struct fib * fibptr) context 3475 drivers/scsi/aacraid/aachba.c scsicmd = (struct scsi_cmnd *) context; context 3762 drivers/scsi/aacraid/aachba.c void aac_hba_callback(void *context, struct fib *fibptr) context 3770 drivers/scsi/aacraid/aachba.c scsicmd = (struct scsi_cmnd *) context; context 2683 drivers/scsi/aacraid/aacraid.h void aac_fib_free(struct fib * context); context 2684 drivers/scsi/aacraid/aacraid.h void aac_fib_init(struct fib * context); context 2686 drivers/scsi/aacraid/aacraid.h int aac_fib_send(u16 command, struct fib * context, unsigned long size, int priority, int wait, int reply, fib_callback callback, void *ctxt); context 2687 drivers/scsi/aacraid/aacraid.h int aac_hba_send(u8 command, struct fib *context, context 2691 drivers/scsi/aacraid/aacraid.h int aac_fib_complete(struct fib * context); context 2692 drivers/scsi/aacraid/aacraid.h void aac_hba_callback(void *context, struct fib *fibptr); context 177 drivers/scsi/aacraid/commctrl.c struct aac_fib_context * context; context 208 drivers/scsi/aacraid/commctrl.c context = list_entry(entry, struct aac_fib_context, next); context 209 drivers/scsi/aacraid/commctrl.c if (context->unique == fibctx->unique) { context 227 drivers/scsi/aacraid/dpcsup.c static void aac_aif_callback(void *context, struct fib * fibptr) context 234 drivers/scsi/aacraid/dpcsup.c fibctx = (struct fib *)context; context 852 drivers/scsi/aacraid/linit.c void aac_tmf_callback(void *context, struct fib *fibptr) context 856 drivers/scsi/aacraid/linit.c struct aac_hba_map_info *info = context; context 502 drivers/scsi/aic94xx/aic94xx_sas.h __le16 context; /* Clear nexus context */ context 754 drivers/scsi/be2iscsi/be_cmds.c AMAP_SET_BITS(struct amap_eq_context, func, req->context, context 756 drivers/scsi/be2iscsi/be_cmds.c AMAP_SET_BITS(struct amap_eq_context, valid, req->context, 1); context 757 drivers/scsi/be2iscsi/be_cmds.c AMAP_SET_BITS(struct amap_eq_context, size, req->context, 0); context 758 drivers/scsi/be2iscsi/be_cmds.c AMAP_SET_BITS(struct amap_eq_context, count, req->context, context 760 drivers/scsi/be2iscsi/be_cmds.c AMAP_SET_BITS(struct amap_eq_context, delaymult, req->context, context 762 drivers/scsi/be2iscsi/be_cmds.c be_dws_cpu_to_le(req->context, sizeof(req->context)); context 784 drivers/scsi/be2iscsi/be_cmds.c void *ctxt = &req->context; context 824 drivers/scsi/be2iscsi/be_cmds.c be_dws_cpu_to_le(ctxt, sizeof(req->context)); context 866 drivers/scsi/be2iscsi/be_cmds.c ctxt = &req->context; context 885 drivers/scsi/be2iscsi/be_cmds.c be_dws_cpu_to_le(ctxt, sizeof(req->context)); context 984 drivers/scsi/be2iscsi/be_cmds.c void *ctxt = &req->context; context 1032 drivers/scsi/be2iscsi/be_cmds.c be_dws_cpu_to_le(ctxt, sizeof(req->context)); context 317 drivers/scsi/be2iscsi/be_cmds.h u8 context[sizeof(struct amap_eq_context) / 8]; /* dw[4] */ context 617 drivers/scsi/be2iscsi/be_cmds.h u8 context[sizeof(struct amap_cq_context) / 8]; context 652 drivers/scsi/be2iscsi/be_cmds.h u8 context[sizeof(struct amap_mcc_context) / 8]; context 911 drivers/scsi/be2iscsi/be_cmds.h struct be_default_pdu_context context; context 511 drivers/scsi/bnx2fc/bnx2fc.h void bnx2fc_indicate_kcqe(void *context, struct kcqe *kcq[], context 859 drivers/scsi/bnx2fc/bnx2fc_fcoe.c static void bnx2fc_indicate_netevent(void *context, unsigned long event, context 862 drivers/scsi/bnx2fc/bnx2fc_fcoe.c struct bnx2fc_hba *hba = (struct bnx2fc_hba *)context; context 1303 drivers/scsi/bnx2fc/bnx2fc_hwi.c void bnx2fc_indicate_kcqe(void *context, struct kcqe *kcq[], context 1306 drivers/scsi/bnx2fc/bnx2fc_hwi.c struct bnx2fc_hba *hba = (struct bnx2fc_hba *)context; context 2474 drivers/scsi/bnx2i/bnx2i_hwi.c static void bnx2i_indicate_kcqe(void *context, struct kcqe *kcqe[], context 2477 drivers/scsi/bnx2i/bnx2i_hwi.c struct bnx2i_hba *hba = context; context 2529 drivers/scsi/bnx2i/bnx2i_hwi.c static void bnx2i_indicate_netevent(void *context, unsigned long event, context 2532 drivers/scsi/bnx2i/bnx2i_hwi.c struct bnx2i_hba *hba = context; context 2570 drivers/scsi/bnx2i/bnx2i_hwi.c struct bnx2i_endpoint *ep = (struct bnx2i_endpoint *) cm_sk->context; context 2592 drivers/scsi/bnx2i/bnx2i_hwi.c struct bnx2i_endpoint *ep = (struct bnx2i_endpoint *) cm_sk->context; context 2608 drivers/scsi/bnx2i/bnx2i_hwi.c struct bnx2i_endpoint *ep = (struct bnx2i_endpoint *) cm_sk->context; context 2625 drivers/scsi/bnx2i/bnx2i_hwi.c struct bnx2i_endpoint *ep = (struct bnx2i_endpoint *) cm_sk->context; context 2642 drivers/scsi/bnx2i/bnx2i_hwi.c struct bnx2i_endpoint *ep = (struct bnx2i_endpoint *) cm_sk->context; context 2654 drivers/scsi/bnx2i/bnx2i_hwi.c static int bnx2i_send_nl_mesg(void *context, u32 msg_type, context 2657 drivers/scsi/bnx2i/bnx2i_hwi.c struct bnx2i_hba *hba = context; context 565 drivers/scsi/csiostor/csio_lnode.c csio_ln_fdmi_start(struct csio_lnode *ln, void *context) context 568 drivers/scsi/csiostor/csio_lnode.c struct csio_rnode *fdmi_rn = (struct csio_rnode *)context; context 360 drivers/scsi/cxlflash/ocxl_hw.c pid = current->mm->context.id; context 621 drivers/scsi/dpt_i2o.c static void *adpt_ioctl_from_context(adpt_hba *pHba, u32 context) context 624 drivers/scsi/dpt_i2o.c return (void *)(unsigned long)context; context 626 drivers/scsi/dpt_i2o.c void *p = pHba->ioctl_reply_context[context]; context 627 drivers/scsi/dpt_i2o.c pHba->ioctl_reply_context[context] = NULL; context 1264 drivers/scsi/dpt_i2o.c static void adpt_i2o_post_wait_complete(u32 context, int status) context 1280 drivers/scsi/dpt_i2o.c context &= 0x7fff; context 1284 drivers/scsi/dpt_i2o.c if(p1->id == context) { context 1293 drivers/scsi/dpt_i2o.c printk(KERN_DEBUG"dpti: Could Not find task %d in wait queue\n",context); context 2090 drivers/scsi/dpt_i2o.c u32 context; context 2140 drivers/scsi/dpt_i2o.c context = readl(reply+8); context 2141 drivers/scsi/dpt_i2o.c if(context & 0x40000000){ // IOCTL context 2148 drivers/scsi/dpt_i2o.c if(context & 0x80000000){ // Post wait message context 2155 drivers/scsi/dpt_i2o.c if(!(context & 0x40000000)) { context 2163 drivers/scsi/dpt_i2o.c printk(KERN_WARNING"%s: Apparent SCSI cmd in Post Wait Context - cmd=%p context=%x\n", pHba->name, cmd, context); context 2166 drivers/scsi/dpt_i2o.c adpt_i2o_post_wait_complete(context, status); context 995 drivers/scsi/esas2r/esas2r.h void esas2r_adapter_tasklet(unsigned long context); context 75 drivers/scsi/esas2r/esas2r_ioctl.c void *context; context 267 drivers/scsi/esas2r/esas2r_ioctl.c if (!(*bi->callback)(a, rq, &sgc, bi->context)) { context 295 drivers/scsi/esas2r/esas2r_ioctl.c struct esas2r_sg_context *sgc, void *context) context 392 drivers/scsi/esas2r/esas2r_ioctl.c struct esas2r_sg_context *sgc, void *context) context 394 drivers/scsi/esas2r/esas2r_ioctl.c struct atto_csmi *ci = (struct atto_csmi *)context; context 607 drivers/scsi/esas2r/esas2r_ioctl.c struct esas2r_request *rq, void *context) context 609 drivers/scsi/esas2r/esas2r_ioctl.c struct atto_csmi *ci = (struct atto_csmi *)context; context 657 drivers/scsi/esas2r/esas2r_ioctl.c bi.context = ci; context 747 drivers/scsi/esas2r/esas2r_ioctl.c void *context) context 1204 drivers/scsi/esas2r/esas2r_ioctl.c struct esas2r_request *rq, void *context) context 1240 drivers/scsi/esas2r/esas2r_ioctl.c bi.context = NULL; context 1542 drivers/scsi/esas2r/esas2r_main.c void esas2r_adapter_tasklet(unsigned long context) context 1544 drivers/scsi/esas2r/esas2r_main.c struct esas2r_adapter *a = (struct esas2r_adapter *)context; context 1326 drivers/scsi/hisi_sas/hisi_sas_v1_hw.c u32 irq_value, context, port_id, link_rate; context 1340 drivers/scsi/hisi_sas/hisi_sas_v1_hw.c context = hisi_sas_read32(hisi_hba, PHY_CONTEXT); context 1341 drivers/scsi/hisi_sas/hisi_sas_v1_hw.c if (context & 1 << phy_no) { context 1400 drivers/scsi/hisi_sas/hisi_sas_v2_hw.c u32 context; context 1402 drivers/scsi/hisi_sas/hisi_sas_v2_hw.c context = hisi_sas_read32(hisi_hba, PHY_CONTEXT); context 1403 drivers/scsi/hisi_sas/hisi_sas_v2_hw.c if (context & (1 << phy_no)) context 1439 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c u32 context, port_id, link_rate; context 1462 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c context = hisi_sas_read32(hisi_hba, PHY_CONTEXT); context 1463 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c if (context & (1 << phy_no)) { context 102 drivers/scsi/hptiop.c if (readl(&p->context)) context 105 drivers/scsi/hptiop.c writel(1, &p->context); context 305 drivers/scsi/hptiop.c writel(0, &req->context); context 312 drivers/scsi/hptiop.c if (readl(&req->context)) context 438 drivers/scsi/hptiop.c req->header.context = cpu_to_le32(IOP_REQUEST_TYPE_GET_CONFIG<<5); context 515 drivers/scsi/hptiop.c req->header.context = cpu_to_le32(IOP_REQUEST_TYPE_SET_CONFIG<<5); context 538 drivers/scsi/hptiop.c req->header.context = cpu_to_le32(IOP_REQUEST_TYPE_SET_CONFIG<<5); context 724 drivers/scsi/hptiop.c req->header.context, tag); context 806 drivers/scsi/hptiop.c readl(&req->context), tag); context 813 drivers/scsi/hptiop.c (readl(&req->context) | context 879 drivers/scsi/hptiop.c reqhdr->context = cpu_to_le32(IOPMU_QUEUE_ADDR_HOST_BIT | context 907 drivers/scsi/hptiop.c reqhdr->context = cpu_to_le32(_req->index<<8 | context 934 drivers/scsi/hptiop.c reqhdr->context = cpu_to_le32(IOPMU_QUEUE_ADDR_HOST_BIT | context 148 drivers/scsi/hptiop.h __le32 context; /* host context */ context 956 drivers/scsi/isci/remote_device.c u32 context; context 958 drivers/scsi/isci/remote_device.c context = request | context 963 drivers/scsi/isci/remote_device.c sci_controller_post_request(iport->owning_controller, context); context 146 drivers/scsi/lpfc/lpfc.h struct lpfc_nvmet_rcv_ctx *context; context 166 drivers/scsi/lpfc/lpfc.h void *context; context 1560 drivers/scsi/lpfc/lpfc_ct.c uint8_t retry, uint32_t context) context 1614 drivers/scsi/lpfc/lpfc_ct.c context); context 1656 drivers/scsi/lpfc/lpfc_ct.c CtReq->un.gid.Fc4Type = context; context 1668 drivers/scsi/lpfc/lpfc_ct.c CtReq->un.gid.PortType = context; context 1680 drivers/scsi/lpfc/lpfc_ct.c CtReq->un.gff.PortId = cpu_to_be32(context); context 1687 drivers/scsi/lpfc/lpfc_ct.c CtReq->un.gft.PortId = cpu_to_be32(context); context 1776 drivers/scsi/lpfc/lpfc_ct.c (context == FC_TYPE_NVME)) { context 1784 drivers/scsi/lpfc/lpfc_ct.c CtReq->un.rff.type_code = context; context 1788 drivers/scsi/lpfc/lpfc_ct.c (context == FC_TYPE_FCP)) context 1789 drivers/scsi/lpfc/lpfc_ct.c CtReq->un.rff.type_code = context; context 1798 drivers/scsi/lpfc/lpfc_ct.c (context == FC_TYPE_NVME) ? "NVME" : "FCP", context 1136 drivers/scsi/lpfc/lpfc_hw4.h struct eq_context context; context 1178 drivers/scsi/lpfc/lpfc_hw4.h uint32_t context[2]; context 1260 drivers/scsi/lpfc/lpfc_hw4.h struct cq_context context; context 1578 drivers/scsi/lpfc/lpfc_hw4.h struct rq_context context; context 1630 drivers/scsi/lpfc/lpfc_hw4.h struct rq_context context; context 1696 drivers/scsi/lpfc/lpfc_hw4.h struct mq_context context; context 1747 drivers/scsi/lpfc/lpfc_hw4.h struct mq_context context; context 12524 drivers/scsi/lpfc/lpfc_init.c lpfc_write_firmware(const struct firmware *fw, void *context) context 12526 drivers/scsi/lpfc/lpfc_init.c struct lpfc_hba *phba = (struct lpfc_hba *)context; context 372 drivers/scsi/lpfc/lpfc_nvmet.c struct lpfc_nvmet_rcv_ctx *ctxp = ctx_buf->context; context 431 drivers/scsi/lpfc/lpfc_nvmet.c ctxp = (struct lpfc_nvmet_rcv_ctx *)ctx_buf->context; context 1262 drivers/scsi/lpfc/lpfc_nvmet.c kfree(ctx_buf->context); context 1370 drivers/scsi/lpfc/lpfc_nvmet.c ctx_buf->context = kzalloc(sizeof(*ctx_buf->context), context 1372 drivers/scsi/lpfc/lpfc_nvmet.c if (!ctx_buf->context) { context 1379 drivers/scsi/lpfc/lpfc_nvmet.c ctx_buf->context->ctxbuf = ctx_buf; context 1380 drivers/scsi/lpfc/lpfc_nvmet.c ctx_buf->context->state = LPFC_NVMET_STE_FREE; context 1384 drivers/scsi/lpfc/lpfc_nvmet.c kfree(ctx_buf->context); context 1403 drivers/scsi/lpfc/lpfc_nvmet.c kfree(ctx_buf->context); context 2044 drivers/scsi/lpfc/lpfc_nvmet.c struct lpfc_nvmet_rcv_ctx *ctxp = ctx_buf->context; context 2319 drivers/scsi/lpfc/lpfc_nvmet.c ctxp = (struct lpfc_nvmet_rcv_ctx *)ctx_buf->context; context 5172 drivers/scsi/lpfc/lpfc_scsi.c uint64_t lun_id, lpfc_ctx_cmd context) context 5178 drivers/scsi/lpfc/lpfc_scsi.c cnt = lpfc_sli_sum_iocb(vport, tgt_id, lun_id, context); context 5182 drivers/scsi/lpfc/lpfc_scsi.c tgt_id, lun_id, context); context 5186 drivers/scsi/lpfc/lpfc_scsi.c cnt = lpfc_sli_sum_iocb(vport, tgt_id, lun_id, context); context 5191 drivers/scsi/lpfc/lpfc_scsi.c ((context == LPFC_CTX_LUN) ? "LUN" : context 5192 drivers/scsi/lpfc/lpfc_scsi.c ((context == LPFC_CTX_TGT) ? "TGT" : context 5193 drivers/scsi/lpfc/lpfc_scsi.c ((context == LPFC_CTX_HOST) ? "HOST" : "Unknown"))), context 14778 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_eq_context_size, &eq_create->u.request.context, context 14780 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_eq_context_valid, &eq_create->u.request.context, 1); context 14786 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_eq_context_autovalid, &eq_create->u.request.context, context 14792 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_eq_context_delay_multi, &eq_create->u.request.context, context 14805 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_eq_context_count, &eq_create->u.request.context, context 14809 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_eq_context_count, &eq_create->u.request.context, context 14813 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_eq_context_count, &eq_create->u.request.context, context 14817 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_eq_context_count, &eq_create->u.request.context, context 14821 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_eq_context_count, &eq_create->u.request.context, context 14907 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_cq_context_event, &cq_create->u.request.context, 1); context 14908 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_cq_context_valid, &cq_create->u.request.context, 1); context 14914 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_cq_eq_id_2, &cq_create->u.request.context, context 14916 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_cq_context_autovalid, &cq_create->u.request.context, context 14919 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_cq_eq_id, &cq_create->u.request.context, context 14927 drivers/scsi/lpfc/lpfc_sli.c cq_create->u.request.context.lpfc_cq_context_count = context 14930 drivers/scsi/lpfc/lpfc_sli.c &cq_create->u.request.context, context 14947 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_cq_context_count, &cq_create->u.request.context, context 14951 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_cq_context_count, &cq_create->u.request.context, context 14955 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_cq_context_count, &cq_create->u.request.context, context 15284 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_mq_context_cq_id, &mq_create->u.request.context, context 15286 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_mq_context_valid, &mq_create->u.request.context, 1); context 15289 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_mq_context_ring_size, &mq_create->u.request.context, context 15293 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_mq_context_ring_size, &mq_create->u.request.context, context 15297 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_mq_context_ring_size, &mq_create->u.request.context, context 15301 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_mq_context_ring_size, &mq_create->u.request.context, context 15376 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_mq_context_valid, &mq_create_ext->u.request.context, 1); context 15383 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_mq_context_cq_id, &mq_create_ext->u.request.context, context 15397 drivers/scsi/lpfc/lpfc_sli.c &mq_create_ext->u.request.context, context 15402 drivers/scsi/lpfc/lpfc_sli.c &mq_create_ext->u.request.context, context 15407 drivers/scsi/lpfc/lpfc_sli.c &mq_create_ext->u.request.context, context 15412 drivers/scsi/lpfc/lpfc_sli.c &mq_create_ext->u.request.context, context 15793 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 15795 drivers/scsi/lpfc/lpfc_sli.c rq_create->u.request.context.buffer_size = LPFC_HDR_BUF_SIZE; context 15797 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 15800 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 15815 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 15820 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 15825 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 15830 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 15834 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_rq_context_buf_size, &rq_create->u.request.context, context 15837 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_rq_context_cq_id, &rq_create->u.request.context, context 15928 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, hrq->entry_count); context 15930 drivers/scsi/lpfc/lpfc_sli.c rq_create->u.request.context.buffer_size = context 15933 drivers/scsi/lpfc/lpfc_sli.c rq_create->u.request.context.buffer_size = context 15935 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_rq_context_rqe_size, &rq_create->u.request.context, context 15937 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_rq_context_page_size, &rq_create->u.request.context, context 15952 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 15957 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 15962 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 15967 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 15973 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 15977 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 15980 drivers/scsi/lpfc/lpfc_sli.c bf_set(lpfc_rq_context_cq_id, &rq_create->u.request.context, context 16120 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 16123 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 16126 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 16129 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 16132 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 16135 drivers/scsi/lpfc/lpfc_sli.c &rq_create->u.request.context, context 1738 drivers/scsi/megaraid/megaraid_sas.h __le32 context; /*08h */ context 1806 drivers/scsi/megaraid/megaraid_sas.h __le32 context; /*08h */ context 1850 drivers/scsi/megaraid/megaraid_sas.h __le32 context; /*08h */ context 1879 drivers/scsi/megaraid/megaraid_sas.h __le32 context; /*08h */ context 1902 drivers/scsi/megaraid/megaraid_sas.h __le32 context; /*08h */ context 1930 drivers/scsi/megaraid/megaraid_sas.h __le32 context; /*08h */ context 1957 drivers/scsi/megaraid/megaraid_sas.h __le32 context; /*08h */ context 1984 drivers/scsi/megaraid/megaraid_sas.h __le32 context; /*08h */ context 2573 drivers/scsi/megaraid/megaraid_sas.h } context; context 355 drivers/scsi/megaraid/megaraid_sas_base.c cmd->frame->io.context = cpu_to_le32(cmd->index); context 2209 drivers/scsi/megaraid/megaraid_sas_base.c u32 context; context 2225 drivers/scsi/megaraid/megaraid_sas_base.c context = le32_to_cpu(instance->reply_queue[consumer]); context 2226 drivers/scsi/megaraid/megaraid_sas_base.c if (context >= instance->max_fw_cmds) { context 2228 drivers/scsi/megaraid/megaraid_sas_base.c context); context 2232 drivers/scsi/megaraid/megaraid_sas_base.c cmd = instance->cmd_list[context]; context 4271 drivers/scsi/megaraid/megaraid_sas_base.c cmd->frame->io.context = cpu_to_le32(cmd->index); context 5353 drivers/scsi/megaraid/megaraid_sas_base.c __le32 context; context 5377 drivers/scsi/megaraid/megaraid_sas_base.c context = init_frame->context; context 5380 drivers/scsi/megaraid/megaraid_sas_base.c init_frame->context = context; context 8069 drivers/scsi/megaraid/megaraid_sas_base.c cmd->frame->hdr.context = cpu_to_le32(cmd->index); context 3833 drivers/scsi/megaraid/megaraid_sas_fusion.c mfi_cmd->context.smid = cmd->index; context 3882 drivers/scsi/megaraid/megaraid_sas_fusion.c index = cmd->context.smid; context 4244 drivers/scsi/megaraid/megaraid_sas_fusion.c smid = le16_to_cpu(cmd_mfi->context.smid); context 4441 drivers/scsi/megaraid/megaraid_sas_fusion.c cmd_mfi->context.smid = cmd_fusion->index; context 356 drivers/scsi/mpt3sas/mpt3sas_ctl.c event_log[i].context = ioc->event_context++; context 222 drivers/scsi/mpt3sas/mpt3sas_ctl.h uint32_t context; context 25 drivers/scsi/qedf/drv_fcoe_fw_funcs.c struct e4_fcoe_task_context *ctx = task_params->context; context 118 drivers/scsi/qedf/drv_fcoe_fw_funcs.c struct e4_fcoe_task_context *ctx = task_params->context; context 13 drivers/scsi/qedf/drv_fcoe_fw_funcs.h struct e4_fcoe_task_context *context; context 631 drivers/scsi/qedf/qedf_io.c io_req->task_params->context = task_ctx; context 710 drivers/scsi/qedf/qedf_io.c io_req->task_params->context = task_ctx; context 1081 drivers/scsi/qedi/qedi_fw.c task_params.context = fw_task_ctx; context 1155 drivers/scsi/qedi/qedi_fw.c task_params.context = fw_task_ctx; context 1526 drivers/scsi/qedi/qedi_fw.c task_params.context = fw_task_ctx; context 1666 drivers/scsi/qedi/qedi_fw.c task_params.context = fw_task_ctx; context 1783 drivers/scsi/qedi/qedi_fw.c task_params.context = fw_task_ctx; context 2143 drivers/scsi/qedi/qedi_fw.c task_params.context = fw_task_ctx; context 205 drivers/scsi/qedi/qedi_fw_api.c struct e4_iscsi_task_context *context; context 210 drivers/scsi/qedi/qedi_fw_api.c context = task_params->context; context 211 drivers/scsi/qedi/qedi_fw_api.c val_byte = context->mstorm_ag_context.cdu_validation; context 212 drivers/scsi/qedi/qedi_fw_api.c memset(context, 0, sizeof(*context)); context 213 drivers/scsi/qedi/qedi_fw_api.c context->mstorm_ag_context.cdu_validation = val_byte; context 216 drivers/scsi/qedi/qedi_fw_api.c ARRAY_SIZE(context->ystorm_st_context.pdu_hdr.data.data); context 219 drivers/scsi/qedi/qedi_fw_api.c context->ystorm_st_context.pdu_hdr.data.data[index] = val; context 222 drivers/scsi/qedi/qedi_fw_api.c context->mstorm_st_context.task_type = task_type; context 223 drivers/scsi/qedi/qedi_fw_api.c context->mstorm_ag_context.task_cid = context 226 drivers/scsi/qedi/qedi_fw_api.c SET_FIELD(context->ustorm_ag_context.flags1, context 229 drivers/scsi/qedi/qedi_fw_api.c context->ustorm_st_context.task_type = task_type; context 230 drivers/scsi/qedi/qedi_fw_api.c context->ustorm_st_context.cq_rss_number = task_params->cq_rss_number; context 231 drivers/scsi/qedi/qedi_fw_api.c context->ustorm_ag_context.icid = cpu_to_le16(task_params->conn_icid); context 274 drivers/scsi/qedi/qedi_fw_api.c void set_rw_exp_data_acked_and_cont_len(struct e4_iscsi_task_context *context, context 286 drivers/scsi/qedi/qedi_fw_api.c SET_FIELD(context->ustorm_st_context.flags2, context 298 drivers/scsi/qedi/qedi_fw_api.c context->ustorm_ag_context.exp_data_acked = context 307 drivers/scsi/qedi/qedi_fw_api.c context->ustorm_ag_context.exp_data_acked = val; context 310 drivers/scsi/qedi/qedi_fw_api.c context->ustorm_ag_context.exp_data_acked = context 317 drivers/scsi/qedi/qedi_fw_api.c context->ustorm_ag_context.exp_cont_len = val; context 473 drivers/scsi/qedi/qedi_fw_api.c static void set_local_completion_context(struct e4_iscsi_task_context *context) context 475 drivers/scsi/qedi/qedi_fw_api.c SET_FIELD(context->ystorm_st_context.state.flags, context 477 drivers/scsi/qedi/qedi_fw_api.c SET_FIELD(context->ustorm_st_context.flags, context 501 drivers/scsi/qedi/qedi_fw_api.c cxt = task_params->context; context 563 drivers/scsi/qedi/qedi_fw_api.c init_ustorm_task_contexts(&task_params->context->ustorm_st_context, context 564 drivers/scsi/qedi/qedi_fw_api.c &task_params->context->ustorm_ag_context, context 569 drivers/scsi/qedi/qedi_fw_api.c set_rw_exp_data_acked_and_cont_len(task_params->context, conn_params, context 576 drivers/scsi/qedi/qedi_fw_api.c init_rtdif_task_context(&task_params->context->rdif_context, context 577 drivers/scsi/qedi/qedi_fw_api.c &task_params->context->tdif_context, context 620 drivers/scsi/qedi/qedi_fw_api.c cxt = task_params->context; context 662 drivers/scsi/qedi/qedi_fw_api.c cxt = task_params->context; context 669 drivers/scsi/qedi/qedi_fw_api.c set_local_completion_context(task_params->context); context 708 drivers/scsi/qedi/qedi_fw_api.c cxt = task_params->context; context 763 drivers/scsi/qedi/qedi_fw_api.c cxt = task_params->context; context 13 drivers/scsi/qedi/qedi_fw_iscsi.h struct e4_iscsi_task_context *context; context 62 drivers/scsi/qedi/qedi_main.c static int qedi_iscsi_event_cb(void *context, u8 fw_event_code, void *fw_handle) context 69 drivers/scsi/qedi/qedi_main.c if (!context || !fw_handle) { context 74 drivers/scsi/qedi/qedi_main.c qedi = (struct qedi_ctx *)context; context 1174 drivers/scsi/qla2xxx/qla_bsg.c ql84_mgmt->mgmt.mgmtp.u.info.context); context 106 drivers/scsi/qla2xxx/qla_bsg.h uint32_t context; context 1044 drivers/scsi/smartpqi/smartpqi.h void *context); context 1045 drivers/scsi/smartpqi/smartpqi.h void *context; context 2973 drivers/scsi/smartpqi/smartpqi_init.c io_request->context); context 3983 drivers/scsi/smartpqi/smartpqi_init.c void *context) context 3985 drivers/scsi/smartpqi/smartpqi_init.c struct completion *waiting = context; context 4080 drivers/scsi/smartpqi/smartpqi_init.c io_request->context = &wait; context 4917 drivers/scsi/smartpqi/smartpqi_init.c void *context) context 5078 drivers/scsi/smartpqi/smartpqi_init.c void *context) context 5188 drivers/scsi/smartpqi/smartpqi_init.c void *context) context 5608 drivers/scsi/smartpqi/smartpqi_init.c void *context) context 5610 drivers/scsi/smartpqi/smartpqi_init.c struct completion *waiting = context; context 5649 drivers/scsi/smartpqi/smartpqi_init.c io_request->context = &wait; context 7806 drivers/scsi/smartpqi/smartpqi_init.c io_request->context); context 402 drivers/scsi/storvsc_drv.c static void storvsc_on_channel_callback(void *context); context 1150 drivers/scsi/storvsc_drv.c static void storvsc_on_channel_callback(void *context) context 1152 drivers/scsi/storvsc_drv.c struct vmbus_channel *channel = (struct vmbus_channel *)context; context 204 drivers/scsi/vmw_pvscsi.c pvscsi_get_context(const struct pvscsi_adapter *adapter, u64 context) context 206 drivers/scsi/vmw_pvscsi.c return &adapter->cmd_map[context - 1]; context 266 drivers/scsi/vmw_pvscsi.c cmd.context = pvscsi_map_context(adapter, ctx); context 543 drivers/scsi/vmw_pvscsi.c ctx = pvscsi_get_context(adapter, e->context); context 751 drivers/scsi/vmw_pvscsi.c e->context = pvscsi_map_context(adapter, ctx); context 172 drivers/scsi/vmw_pvscsi.h u64 context; context 324 drivers/scsi/vmw_pvscsi.h u64 context; context 377 drivers/scsi/vmw_pvscsi.h u64 context; context 1172 drivers/soc/mediatek/mtk-pmic-wrap.c static int pwrap_regmap_read(void *context, u32 adr, u32 *rdata) context 1174 drivers/soc/mediatek/mtk-pmic-wrap.c return pwrap_read(context, adr, rdata); context 1177 drivers/soc/mediatek/mtk-pmic-wrap.c static int pwrap_regmap_write(void *context, u32 adr, u32 wdata) context 1179 drivers/soc/mediatek/mtk-pmic-wrap.c return pwrap_write(context, adr, wdata); context 323 drivers/spi/spi-mpc512x-psc.c m->complete(m->context); context 246 drivers/spi/spi-mpc52xx-psc.c m->complete(m->context); context 237 drivers/spi/spi-mpc52xx.c ms->message->complete(ms->message->context); context 292 drivers/spi/spi-mpc52xx.c ms->message->complete(ms->message->context); context 312 drivers/spi/spi-sh.c mesg->complete(mesg->context); context 331 drivers/spi/spi-sh.c mesg->complete(mesg->context); context 104 drivers/spi/spi-slave-system-control.c priv->msg.context = priv; context 78 drivers/spi/spi-slave-time.c priv->msg.context = priv; context 357 drivers/spi/spi-topcliff-pch.c data->current_msg->complete(data->current_msg->context); context 589 drivers/spi/spi-topcliff-pch.c pmsg->complete(pmsg->context); context 642 drivers/spi/spi-topcliff-pch.c data->current_msg->complete(data->current_msg->context); context 674 drivers/spi/spi-topcliff-pch.c pmsg->complete(pmsg->context); context 1138 drivers/spi/spi-topcliff-pch.c pmsg->complete(pmsg->context); context 1206 drivers/spi/spi-topcliff-pch.c (data->current_msg->context); context 267 drivers/spi/spi-txx9.c m->complete(m->context); context 1534 drivers/spi/spi.c mesg->complete(mesg->context); context 3424 drivers/spi/spi.c message->context = &done; context 3462 drivers/spi/spi.c message->context = NULL; context 142 drivers/staging/comedi/comedi_pci.c unsigned long context) context 144 drivers/staging/comedi/comedi_pci.c return comedi_auto_config(&pcidev->dev, driver, context); context 35 drivers/staging/comedi/comedi_pci.h unsigned long context); context 65 drivers/staging/comedi/comedi_usb.c unsigned long context) context 67 drivers/staging/comedi/comedi_usb.c return comedi_auto_config(&intf->dev, driver, context); context 20 drivers/staging/comedi/comedi_usb.h struct comedi_driver *driver, unsigned long context); context 445 drivers/staging/comedi/comedidev.h int (*auto_attach)(struct comedi_device *dev, unsigned long context); context 992 drivers/staging/comedi/comedidev.h struct comedi_insn *insn, unsigned long context), context 993 drivers/staging/comedi/comedidev.h unsigned long context); context 1026 drivers/staging/comedi/comedidev.h unsigned long context), context 1027 drivers/staging/comedi/comedidev.h unsigned long context); context 1036 drivers/staging/comedi/comedidev.h struct comedi_driver *driver, unsigned long context); context 294 drivers/staging/comedi/drivers.c unsigned long context), context 295 drivers/staging/comedi/drivers.c unsigned long context) context 301 drivers/staging/comedi/drivers.c ret = cb(dev, s, insn, context); context 843 drivers/staging/comedi/drivers.c unsigned long context), context 844 drivers/staging/comedi/drivers.c unsigned long context) context 854 drivers/staging/comedi/drivers.c ret = cb(dev, fw->data, fw->size, context); context 1033 drivers/staging/comedi/drivers.c struct comedi_driver *driver, unsigned long context) context 1067 drivers/staging/comedi/drivers.c ret = driver->auto_attach(dev, context); context 195 drivers/staging/comedi/drivers/8255_pci.c unsigned long context) context 203 drivers/staging/comedi/drivers/8255_pci.c if (context < ARRAY_SIZE(pci_8255_boards)) context 204 drivers/staging/comedi/drivers/8255_pci.c board = &pci_8255_boards[context]; context 745 drivers/staging/comedi/drivers/addi_apci_1500.c unsigned long context) context 108 drivers/staging/comedi/drivers/addi_apci_1516.c unsigned long context) context 116 drivers/staging/comedi/drivers/addi_apci_1516.c if (context < ARRAY_SIZE(apci1516_boardtypes)) context 117 drivers/staging/comedi/drivers/addi_apci_1516.c board = &apci1516_boardtypes[context]; context 89 drivers/staging/comedi/drivers/addi_apci_16xx.c unsigned long context) context 99 drivers/staging/comedi/drivers/addi_apci_16xx.c if (context < ARRAY_SIZE(apci16xx_boardtypes)) context 100 drivers/staging/comedi/drivers/addi_apci_16xx.c board = &apci16xx_boardtypes[context]; context 693 drivers/staging/comedi/drivers/addi_apci_3120.c unsigned long context) context 750 drivers/staging/comedi/drivers/addi_apci_3120.c unsigned long context) context 965 drivers/staging/comedi/drivers/addi_apci_3120.c unsigned long context) context 974 drivers/staging/comedi/drivers/addi_apci_3120.c if (context < ARRAY_SIZE(apci3120_boardtypes)) context 975 drivers/staging/comedi/drivers/addi_apci_3120.c board = &apci3120_boardtypes[context]; context 1008 drivers/staging/comedi/drivers/addi_apci_3120.c context == BOARD_APCI3001) context 422 drivers/staging/comedi/drivers/addi_apci_3xxx.c unsigned long context) context 599 drivers/staging/comedi/drivers/addi_apci_3xxx.c unsigned long context) context 754 drivers/staging/comedi/drivers/addi_apci_3xxx.c unsigned long context) context 764 drivers/staging/comedi/drivers/addi_apci_3xxx.c if (context < ARRAY_SIZE(apci3xxx_boardtypes)) context 765 drivers/staging/comedi/drivers/addi_apci_3xxx.c board = &apci3xxx_boardtypes[context]; context 45 drivers/staging/comedi/drivers/adl_pci6208.c unsigned long context) context 147 drivers/staging/comedi/drivers/adl_pci7x3x.c unsigned long context) context 156 drivers/staging/comedi/drivers/adl_pci7x3x.c if (context < ARRAY_SIZE(adl_pci7x3x_boards)) context 157 drivers/staging/comedi/drivers/adl_pci7x3x.c board = &adl_pci7x3x_boards[context]; context 519 drivers/staging/comedi/drivers/adl_pci9111.c unsigned long context) context 1305 drivers/staging/comedi/drivers/adl_pci9118.c unsigned long context) context 1667 drivers/staging/comedi/drivers/adl_pci9118.c unsigned long context) context 1672 drivers/staging/comedi/drivers/adl_pci9118.c if (context < ARRAY_SIZE(pci9118_boards)) context 1673 drivers/staging/comedi/drivers/adl_pci9118.c board = &pci9118_boards[context]; context 96 drivers/staging/comedi/drivers/adq12b.c unsigned long context) context 290 drivers/staging/comedi/drivers/adv_pci1710.c unsigned long context) context 746 drivers/staging/comedi/drivers/adv_pci1710.c unsigned long context) context 755 drivers/staging/comedi/drivers/adv_pci1710.c if (context < ARRAY_SIZE(boardtypes)) context 756 drivers/staging/comedi/drivers/adv_pci1710.c board = &boardtypes[context]; context 113 drivers/staging/comedi/drivers/adv_pci1720.c unsigned long context) context 76 drivers/staging/comedi/drivers/adv_pci1724.c unsigned long context) context 340 drivers/staging/comedi/drivers/adv_pci1760.c unsigned long context) context 345 drivers/staging/comedi/drivers/adv_pci_dio.c unsigned long context) context 353 drivers/staging/comedi/drivers/adv_pci_dio.c if (context < ARRAY_SIZE(boardtypes)) context 354 drivers/staging/comedi/drivers/adv_pci_dio.c board = &boardtypes[context]; context 363 drivers/staging/comedi/drivers/adv_pci_dio.c if (context == TYPE_PCI1736) context 368 drivers/staging/comedi/drivers/adv_pci_dio.c pci_dio_reset(dev, context); context 102 drivers/staging/comedi/drivers/aio_aio12_8.c unsigned long context) context 711 drivers/staging/comedi/drivers/amplc_pci230.c unsigned long context) context 48 drivers/staging/comedi/drivers/c6xdigio.c static int c6xdigio_chk_status(struct comedi_device *dev, unsigned long context) context 55 drivers/staging/comedi/drivers/c6xdigio.c if ((status & 0x80) != context) context 129 drivers/staging/comedi/drivers/cb_das16_cs.c unsigned long context) context 342 drivers/staging/comedi/drivers/cb_das16_cs.c unsigned long context) context 313 drivers/staging/comedi/drivers/cb_pcidas.c unsigned long context) context 471 drivers/staging/comedi/drivers/cb_pcidas.c unsigned long context) context 1248 drivers/staging/comedi/drivers/cb_pcidas.c unsigned long context) context 1257 drivers/staging/comedi/drivers/cb_pcidas.c if (context < ARRAY_SIZE(cb_pcidas_boards)) context 1258 drivers/staging/comedi/drivers/cb_pcidas.c board = &cb_pcidas_boards[context]; context 1734 drivers/staging/comedi/drivers/cb_pcidas64.c unsigned long context) context 3974 drivers/staging/comedi/drivers/cb_pcidas64.c unsigned long context) context 3982 drivers/staging/comedi/drivers/cb_pcidas64.c if (context < ARRAY_SIZE(pcidas64_boards)) context 3983 drivers/staging/comedi/drivers/cb_pcidas64.c board = &pcidas64_boards[context]; context 328 drivers/staging/comedi/drivers/cb_pcidda.c unsigned long context) context 337 drivers/staging/comedi/drivers/cb_pcidda.c if (context < ARRAY_SIZE(cb_pcidda_boards)) context 338 drivers/staging/comedi/drivers/cb_pcidda.c board = &cb_pcidda_boards[context]; context 160 drivers/staging/comedi/drivers/cb_pcimdas.c unsigned long context) context 308 drivers/staging/comedi/drivers/daqboard2000.c struct comedi_insn *insn, unsigned long context) context 313 drivers/staging/comedi/drivers/daqboard2000.c if (status & context) context 384 drivers/staging/comedi/drivers/daqboard2000.c struct comedi_insn *insn, unsigned long context) context 532 drivers/staging/comedi/drivers/daqboard2000.c size_t len, unsigned long context) context 678 drivers/staging/comedi/drivers/daqboard2000.c static int db2k_auto_attach(struct comedi_device *dev, unsigned long context) context 686 drivers/staging/comedi/drivers/daqboard2000.c if (context >= ARRAY_SIZE(db2k_boardtypes)) context 688 drivers/staging/comedi/drivers/daqboard2000.c board = &db2k_boardtypes[context]; context 158 drivers/staging/comedi/drivers/das08.c unsigned long context) context 51 drivers/staging/comedi/drivers/das08_cs.c unsigned long context) context 812 drivers/staging/comedi/drivers/das16.c unsigned long context) context 312 drivers/staging/comedi/drivers/das16m1.c unsigned long context) context 930 drivers/staging/comedi/drivers/das1800.c unsigned long context) context 368 drivers/staging/comedi/drivers/das6402.c unsigned long context) context 511 drivers/staging/comedi/drivers/das800.c unsigned long context) context 193 drivers/staging/comedi/drivers/dmm32at.c unsigned long context) context 197 drivers/staging/comedi/drivers/dmm32at.c status = inb(dev->iobase + context); context 441 drivers/staging/comedi/drivers/dmm32at.c unsigned long context) context 462 drivers/staging/comedi/drivers/dt2811.c unsigned long context) context 58 drivers/staging/comedi/drivers/dt2814.c unsigned long context) context 61 drivers/staging/comedi/drivers/dt2815.c unsigned long context) context 66 drivers/staging/comedi/drivers/dt2815.c if (status == context) context 589 drivers/staging/comedi/drivers/dt282x.c unsigned long context) context 594 drivers/staging/comedi/drivers/dt282x.c switch (context) { context 612 drivers/staging/comedi/drivers/dt3000.c unsigned long context) context 620 drivers/staging/comedi/drivers/dt3000.c if (context < ARRAY_SIZE(dt3k_boardtypes)) context 621 drivers/staging/comedi/drivers/dt3000.c board = &dt3k_boardtypes[context]; context 753 drivers/staging/comedi/drivers/dt9812.c unsigned long context) context 52 drivers/staging/comedi/drivers/dyna_pci10xx.c unsigned long context) context 91 drivers/staging/comedi/drivers/icp_multi.c unsigned long context) context 144 drivers/staging/comedi/drivers/icp_multi.c unsigned long context) context 171 drivers/staging/comedi/drivers/ii_pci20kc.c unsigned long context) context 421 drivers/staging/comedi/drivers/jr3_pci.c unsigned long context) context 671 drivers/staging/comedi/drivers/jr3_pci.c unsigned long context) context 684 drivers/staging/comedi/drivers/jr3_pci.c if (context < ARRAY_SIZE(jr3_pci_boards)) context 685 drivers/staging/comedi/drivers/jr3_pci.c board = &jr3_pci_boards[context]; context 310 drivers/staging/comedi/drivers/me4000.c unsigned long context) context 450 drivers/staging/comedi/drivers/me4000.c unsigned long context) context 1096 drivers/staging/comedi/drivers/me4000.c unsigned long context) context 1104 drivers/staging/comedi/drivers/me4000.c if (context < ARRAY_SIZE(me4000_boards)) context 1105 drivers/staging/comedi/drivers/me4000.c board = &me4000_boards[context]; context 218 drivers/staging/comedi/drivers/me_daq.c unsigned long context) context 341 drivers/staging/comedi/drivers/me_daq.c unsigned long context) context 428 drivers/staging/comedi/drivers/me_daq.c unsigned long context) context 436 drivers/staging/comedi/drivers/me_daq.c if (context < ARRAY_SIZE(me_boards)) context 437 drivers/staging/comedi/drivers/me_daq.c board = &me_boards[context]; context 110 drivers/staging/comedi/drivers/mf6x4.c unsigned long context) context 179 drivers/staging/comedi/drivers/mf6x4.c static int mf6x4_auto_attach(struct comedi_device *dev, unsigned long context) context 187 drivers/staging/comedi/drivers/mf6x4.c if (context < ARRAY_SIZE(mf6x4_boards)) context 188 drivers/staging/comedi/drivers/mf6x4.c board = &mf6x4_boards[context]; context 191 drivers/staging/comedi/drivers/mpc624.c unsigned long context) context 84 drivers/staging/comedi/drivers/multiq3.c unsigned long context) context 89 drivers/staging/comedi/drivers/multiq3.c if (status & context) context 371 drivers/staging/comedi/drivers/ni_6527.c unsigned long context) context 379 drivers/staging/comedi/drivers/ni_6527.c if (context < ARRAY_SIZE(ni6527_boards)) context 380 drivers/staging/comedi/drivers/ni_6527.c board = &ni6527_boards[context]; context 629 drivers/staging/comedi/drivers/ni_65xx.c unsigned long context) context 637 drivers/staging/comedi/drivers/ni_65xx.c if (context < ARRAY_SIZE(ni_65xx_boards)) context 638 drivers/staging/comedi/drivers/ni_65xx.c board = &ni_65xx_boards[context]; context 998 drivers/staging/comedi/drivers/ni_660x.c unsigned long context) context 1011 drivers/staging/comedi/drivers/ni_660x.c if (context < ARRAY_SIZE(ni_660x_boards)) context 1012 drivers/staging/comedi/drivers/ni_660x.c board = &ni_660x_boards[context]; context 157 drivers/staging/comedi/drivers/ni_670x.c unsigned long context) context 166 drivers/staging/comedi/drivers/ni_670x.c if (context < ARRAY_SIZE(ni_670x_boards)) context 167 drivers/staging/comedi/drivers/ni_670x.c board = &ni_670x_boards[context]; context 568 drivers/staging/comedi/drivers/ni_at_a2150.c unsigned long context) context 454 drivers/staging/comedi/drivers/ni_atmio16d.c unsigned long context) context 116 drivers/staging/comedi/drivers/ni_daq_700.c unsigned long context) context 210 drivers/staging/comedi/drivers/ni_daq_700.c unsigned long context) context 31 drivers/staging/comedi/drivers/ni_daq_dio24.c unsigned long context) context 215 drivers/staging/comedi/drivers/ni_labpc_common.c unsigned long context) context 1147 drivers/staging/comedi/drivers/ni_labpc_common.c unsigned long context) context 56 drivers/staging/comedi/drivers/ni_labpc_cs.c unsigned long context) context 68 drivers/staging/comedi/drivers/ni_labpc_pci.c unsigned long context) context 74 drivers/staging/comedi/drivers/ni_labpc_pci.c if (context < ARRAY_SIZE(labpc_pci_boards)) context 75 drivers/staging/comedi/drivers/ni_labpc_pci.c board = &labpc_pci_boards[context]; context 148 drivers/staging/comedi/drivers/ni_mio_cs.c unsigned long context) context 758 drivers/staging/comedi/drivers/ni_pcidio.c unsigned long context) context 761 drivers/staging/comedi/drivers/ni_pcidio.c int fpga_index = context; context 881 drivers/staging/comedi/drivers/ni_pcidio.c unsigned long context) context 890 drivers/staging/comedi/drivers/ni_pcidio.c if (context < ARRAY_SIZE(nidio_boards)) context 891 drivers/staging/comedi/drivers/ni_pcidio.c board = &nidio_boards[context]; context 1291 drivers/staging/comedi/drivers/ni_pcimio.c unsigned long context) context 1299 drivers/staging/comedi/drivers/ni_pcimio.c if (context < ARRAY_SIZE(ni_boards)) context 1300 drivers/staging/comedi/drivers/ni_pcimio.c board = &ni_boards[context]; context 508 drivers/staging/comedi/drivers/ni_usb6501.c unsigned long context) context 238 drivers/staging/comedi/drivers/pcl711.c unsigned long context) context 615 drivers/staging/comedi/drivers/pcl812.c unsigned long context) context 200 drivers/staging/comedi/drivers/pcl816.c unsigned long context) context 414 drivers/staging/comedi/drivers/pcl818.c unsigned long context) context 57 drivers/staging/comedi/drivers/pcmad.c unsigned long context) context 520 drivers/staging/comedi/drivers/pcmmio.c unsigned long context) context 603 drivers/staging/comedi/drivers/pcmmio.c unsigned long context) context 285 drivers/staging/comedi/drivers/quatech_daqp_cs.c unsigned long context) context 621 drivers/staging/comedi/drivers/quatech_daqp_cs.c unsigned long context) context 699 drivers/staging/comedi/drivers/quatech_daqp_cs.c unsigned long context) context 522 drivers/staging/comedi/drivers/rtd520.c unsigned long context) context 997 drivers/staging/comedi/drivers/rtd520.c unsigned long context) context 1209 drivers/staging/comedi/drivers/rtd520.c unsigned long context) context 1217 drivers/staging/comedi/drivers/rtd520.c if (context < ARRAY_SIZE(rtd520_boards)) context 1218 drivers/staging/comedi/drivers/rtd520.c board = &rtd520_boards[context]; context 136 drivers/staging/comedi/drivers/rti800.c unsigned long context) context 419 drivers/staging/comedi/drivers/s526.c unsigned long context) context 424 drivers/staging/comedi/drivers/s526.c if (status & context) { context 426 drivers/staging/comedi/drivers/s526.c outw(context, dev->iobase + S526_INT_STATUS_REG); context 231 drivers/staging/comedi/drivers/s626.c unsigned long context) context 321 drivers/staging/comedi/drivers/s626.c unsigned long context) context 325 drivers/staging/comedi/drivers/s626.c switch (context) { context 1475 drivers/staging/comedi/drivers/s626.c unsigned long context) context 290 drivers/staging/comedi/drivers/usbdux.c struct comedi_device *dev = urb->context; context 437 drivers/staging/comedi/drivers/usbdux.c struct comedi_device *dev = urb->context; context 495 drivers/staging/comedi/drivers/usbdux.c urb->context = dev; context 1152 drivers/staging/comedi/drivers/usbdux.c struct comedi_device *dev = urb->context; context 1367 drivers/staging/comedi/drivers/usbdux.c unsigned long context) context 1463 drivers/staging/comedi/drivers/usbdux.c urb->context = dev; context 1485 drivers/staging/comedi/drivers/usbdux.c urb->context = dev; context 260 drivers/staging/comedi/drivers/usbduxfast.c struct comedi_device *dev = urb->context; context 843 drivers/staging/comedi/drivers/usbduxfast.c unsigned long context) context 252 drivers/staging/comedi/drivers/usbduxsigma.c struct comedi_device *dev = urb->context; context 391 drivers/staging/comedi/drivers/usbduxsigma.c struct comedi_device *dev = urb->context; context 447 drivers/staging/comedi/drivers/usbduxsigma.c urb->context = dev; context 1014 drivers/staging/comedi/drivers/usbduxsigma.c struct comedi_device *dev = urb->context; context 1261 drivers/staging/comedi/drivers/usbduxsigma.c unsigned long context) context 1355 drivers/staging/comedi/drivers/usbduxsigma.c urb->context = NULL; context 1377 drivers/staging/comedi/drivers/usbduxsigma.c urb->context = NULL; context 781 drivers/staging/comedi/drivers/vmk80xx.c unsigned long context) context 788 drivers/staging/comedi/drivers/vmk80xx.c if (context < ARRAY_SIZE(vmk80xx_boardinfo)) context 789 drivers/staging/comedi/drivers/vmk80xx.c board = &vmk80xx_boardinfo[context]; context 142 drivers/staging/emxx_udc/emxx_udc.c udc = (struct nbu2ss_udc *)_req->context; context 171 drivers/staging/emxx_udc/emxx_udc.c udc->ep0_req.req.context = udc; context 796 drivers/staging/gdm724x/gdm_lte.c static int rx_complete(void *arg, void *data, int len, int context) context 47 drivers/staging/gdm724x/gdm_lte.h int context), context 48 drivers/staging/gdm724x/gdm_lte.h void *cb_data, int context); context 249 drivers/staging/gdm724x/gdm_mux.c struct mux_rx *r = urb->context; context 328 drivers/staging/gdm724x/gdm_mux.c struct mux_tx *t = urb->context; context 53 drivers/staging/gdm724x/gdm_usb.c void *data, int len, int context), context 55 drivers/staging/gdm724x/gdm_usb.c int context); context 455 drivers/staging/gdm724x/gdm_usb.c struct usb_rx *r = urb->context; context 481 drivers/staging/gdm724x/gdm_usb.c void *data, int len, int context), context 483 drivers/staging/gdm724x/gdm_usb.c int context) context 522 drivers/staging/gdm724x/gdm_usb.c if (context == KERNEL_THREAD) context 541 drivers/staging/gdm724x/gdm_usb.c struct usb_tx *t = urb->context; context 61 drivers/staging/gdm724x/gdm_usb.h int (*callback)(void *cb_data, void *data, int len, int context); context 94 drivers/staging/gdm724x/gdm_usb.h int (*rx_cb)(void *cb_data, void *data, int len, int context); context 287 drivers/staging/isdn/gigaset/bas-gigaset.c (unsigned long) urb->context, context 477 drivers/staging/isdn/gigaset/bas-gigaset.c struct inbuf_t *inbuf = urb->context; context 609 drivers/staging/isdn/gigaset/bas-gigaset.c struct cardstate *cs = urb->context; context 671 drivers/staging/isdn/gigaset/bas-gigaset.c struct cardstate *cs = urb->context; context 872 drivers/staging/isdn/gigaset/bas-gigaset.c bcs = urb->context; context 937 drivers/staging/isdn/gigaset/bas-gigaset.c ucx = urb->context; context 1517 drivers/staging/isdn/gigaset/bas-gigaset.c struct bas_cardstate *ucs = urb->context; context 1776 drivers/staging/isdn/gigaset/bas-gigaset.c struct cardstate *cs = urb->context; context 344 drivers/staging/isdn/gigaset/usb-gigaset.c struct cardstate *cs = urb->context; context 395 drivers/staging/isdn/gigaset/usb-gigaset.c struct cardstate *cs = urb->context; context 2810 drivers/staging/media/allegro-dvt/allegro-core.c static void allegro_fw_callback(const struct firmware *fw, void *context) context 2812 drivers/staging/media/allegro-dvt/allegro-core.c struct allegro_dev *dev = context; context 111 drivers/staging/most/core.c struct most_channel *c = mbo->context; context 822 drivers/staging/most/core.c struct most_channel *c = mbo->context; context 846 drivers/staging/most/core.c struct most_channel *c = mbo->context; context 923 drivers/staging/most/core.c c = mbo->context; context 970 drivers/staging/most/core.c mbo->context = c; context 1010 drivers/staging/most/core.c if (WARN_ONCE(!mbo || !mbo->context, context 1029 drivers/staging/most/core.c c = mbo->context; context 1114 drivers/staging/most/core.c struct most_channel *c = mbo->context; context 1137 drivers/staging/most/core.c struct most_channel *c = mbo->context; context 177 drivers/staging/most/core.h void *context; context 338 drivers/staging/most/usb/usb.c struct mbo *mbo = urb->context; context 487 drivers/staging/most/usb/usb.c struct mbo *mbo = urb->context; context 516 drivers/staging/ralink-gdma/ralink-gdma.c unsigned long flags, void *context) context 138 drivers/staging/rtl8188eu/core/rtw_cmd.c int rtw_cmd_thread(void *context) context 144 drivers/staging/rtl8188eu/core/rtw_cmd.c struct adapter *padapter = context; context 55 drivers/staging/rtl8188eu/include/rtw_cmd.h int rtw_cmd_thread(void *context); context 230 drivers/staging/rtl8188eu/include/rtw_mlme.h int event_thread(void *context); context 350 drivers/staging/rtl8188eu/os_dep/usb_ops_linux.c struct recv_buf *precvbuf = (struct recv_buf *)purb->context; context 573 drivers/staging/rtl8188eu/os_dep/usb_ops_linux.c struct xmit_buf *pxmitbuf = (struct xmit_buf *)purb->context; context 841 drivers/staging/rtl8192u/r8192U_core.c struct sk_buff *skb = (struct sk_buff *)urb->context; context 883 drivers/staging/rtl8192u/r8192U_core.c urb->context = skb; context 974 drivers/staging/rtl8192u/r8192U_core.c struct sk_buff *skb = (struct sk_buff *)tx_urb->context; context 32 drivers/staging/rtl8712/hal_init.c static void rtl871x_load_fw_cb(const struct firmware *firmware, void *context) context 34 drivers/staging/rtl8712/hal_init.c struct _adapter *adapter = context; context 290 drivers/staging/rtl8712/rtl8712_cmd.c int r8712_cmd_thread(void *context) context 297 drivers/staging/rtl8712/rtl8712_cmd.c struct _adapter *padapter = context; context 86 drivers/staging/rtl8712/rtl871x_cmd.h int r8712_cmd_thread(void *context); context 143 drivers/staging/rtl8712/usb_ops_linux.c struct io_queue *pio_q = (struct io_queue *)purb->context; context 185 drivers/staging/rtl8712/usb_ops_linux.c struct recv_buf *precvbuf = (struct recv_buf *)purb->context; context 331 drivers/staging/rtl8712/usb_ops_linux.c struct xmit_frame *pxmitframe = (struct xmit_frame *)purb->context; context 401 drivers/staging/rtl8723bs/core/rtw_cmd.c int rtw_cmd_thread(void *context) context 410 drivers/staging/rtl8723bs/core/rtw_cmd.c struct adapter *padapter = context; context 6868 drivers/staging/rtl8723bs/core/rtw_mlme_ext.c p->func(p->context); context 2981 drivers/staging/rtl8723bs/core/rtw_xmit.c int rtw_xmit_thread(void *context) context 2988 drivers/staging/rtl8723bs/core/rtw_xmit.c padapter = context; context 474 drivers/staging/rtl8723bs/hal/rtl8723bs_xmit.c int rtl8723bs_xmit_thread(void *context) context 482 drivers/staging/rtl8723bs/hal/rtl8723bs_xmit.c padapter = context; context 444 drivers/staging/rtl8723bs/include/rtl8723b_xmit.h int rtl8723bs_xmit_thread(void *context); context 130 drivers/staging/rtl8723bs/include/rtw_cmd.h int rtw_cmd_thread(void *context); context 799 drivers/staging/rtl8723bs/include/rtw_cmd.h void *context; context 487 drivers/staging/rtl8723bs/include/rtw_mlme.h int event_thread(void *context); context 511 drivers/staging/rtl8723bs/include/rtw_xmit.h int rtw_xmit_thread(void *context); context 698 drivers/staging/rts5208/rtsx_chip.h void *context; context 415 drivers/staging/unisys/include/iochannel.h void *context; context 421 drivers/staging/unisys/include/iochannel.h void *context; context 491 drivers/staging/unisys/visornic/visornic_main.c devdata->cmdrsp_rcv->net.enbdis.context = netdev; context 1061 drivers/staging/unisys/visornic/visornic_main.c cmdrsp->net.enbdis.context = netdev; context 1696 drivers/staging/unisys/visornic/visornic_main.c cmdrsp->net.enbdis.context; context 618 drivers/staging/uwb/hwa-rc.c struct hwarc *hwarc = urb->context; context 39 drivers/staging/uwb/i1480/dfu/dfu.c const char *cmd, u8 context, u8 expected_type, context 44 drivers/staging/uwb/i1480/dfu/dfu.c if (rceb->bEventContext != context) { context 48 drivers/staging/uwb/i1480/dfu/dfu.c rceb->bEventContext, context); context 86 drivers/staging/uwb/i1480/dfu/dfu.c u8 context; context 91 drivers/staging/uwb/i1480/dfu/dfu.c get_random_bytes(&context, 1); context 92 drivers/staging/uwb/i1480/dfu/dfu.c } while (context == 0x00 || context == 0xff); context 93 drivers/staging/uwb/i1480/dfu/dfu.c cmd->bCommandContext = context; context 133 drivers/staging/uwb/i1480/dfu/dfu.c result = i1480_rceb_check(i1480, i1480->evt_buf, cmd_name, context, context 186 drivers/staging/uwb/i1480/dfu/usb.c struct i1480 *i1480 = urb->context; context 95 drivers/staging/uwb/neh.c u8 context; context 162 drivers/staging/uwb/neh.c neh->context = result; context 172 drivers/staging/uwb/neh.c if (neh->context == 0) context 174 drivers/staging/uwb/neh.c if (test_bit(neh->context, rc->ctx_bm) == 0) { context 176 drivers/staging/uwb/neh.c neh->context); context 179 drivers/staging/uwb/neh.c clear_bit(neh->context, rc->ctx_bm); context 180 drivers/staging/uwb/neh.c neh->context = 0; context 223 drivers/staging/uwb/neh.c cmd->bCommandContext = neh->context; context 279 drivers/staging/uwb/neh.c if (neh->context) context 295 drivers/staging/uwb/neh.c && neh->context == rceb->bEventContext; context 565 drivers/staging/uwb/neh.c if (neh->context) context 170 drivers/staging/uwb/uwbd.c u8 type, context; context 175 drivers/staging/uwb/uwbd.c context = evt->notif.rceb->bEventContext; context 193 drivers/staging/uwb/uwbd.c type, event, context, result); context 90 drivers/staging/vc04_services/bcm2835-camera/mmal-msg.h u32 context; /* a u32 per message context */ context 399 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c m.h.context = msg_context->handle; context 583 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c if (!msg->h.context) { context 590 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c msg->h.context); context 593 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c msg->h.context); context 665 drivers/staging/vc04_services/bcm2835-camera/mmal-vchiq.c msg->h.context = msg_context->handle; context 731 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c static ssize_t vchiq_ioc_copy_element_data(void *context, void *dest, context 734 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c struct vchiq_io_copy_callback_context *cc = context; context 780 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c struct vchiq_io_copy_callback_context context; context 784 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c context.element = elements; context 785 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c context.element_offset = 0; context 786 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c context.elements_to_go = count; context 796 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c &context, total_size); context 2078 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c struct dump_context *context = (struct dump_context *)dump_context; context 2080 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c if (context->actual < context->space) { context 2083 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c if (context->offset > 0) { context 2084 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c int skip_bytes = min(len, (int)context->offset); context 2088 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c context->offset -= skip_bytes; context 2089 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c if (context->offset > 0) context 2092 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c copy_bytes = min(len, (int)(context->space - context->actual)); context 2095 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c if (copy_to_user(context->buf + context->actual, str, context 2097 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c context->actual = -EFAULT; context 2098 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c context->actual += copy_bytes; context 2107 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c if (copy_to_user(context->buf + context->actual - 1, context 2109 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c context->actual = -EFAULT; context 2208 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c struct dump_context context; context 2210 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c context.buf = buf; context 2211 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c context.actual = 0; context 2212 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c context.space = count; context 2213 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c context.offset = *ppos; context 2215 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c vchiq_dump_state(&context, &g_state); context 2217 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c *ppos += context.actual; context 2219 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c return context.actual; context 741 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c void *context, void *dest, context 744 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c memcpy(dest + offset, context + offset, maxsize); context 750 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c ssize_t (*copy_callback)(void *context, void *dest, context 752 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c void *context, context 763 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c copy_callback(context, dest + pos, context 785 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c ssize_t (*copy_callback)(void *context, void *dest, context 787 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c void *context, size_t size, int flags) context 919 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c copy_message_data(copy_callback, context, context 985 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c copy_message_data(copy_callback, context, context 1033 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c ssize_t (*copy_callback)(void *context, void *dest, context 1035 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c void *context, int size, int is_blocking) context 1070 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c copy_message_data(copy_callback, context, context 3108 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c ssize_t (*copy_callback)(void *context, void *dest, context 3110 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c void *context, context 3137 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c copy_callback, context, size, 1); context 3144 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_core.c copy_callback, context, size, 1); context 113 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_if.h ssize_t (*copy_callback)(void *context, void *dest, context 115 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_if.h void *context, context 103 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_shim.c ssize_t (*copy_callback)(void *context, void *dest, context 105 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_shim.c void *context, context 114 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_shim.c context, context 132 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_shim.c vchi_queue_kernel_message_callback(void *context, context 137 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_shim.c memcpy(dest, context + offset, maxsize); context 158 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_shim.c vchi_queue_user_message_callback(void *context, context 163 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_shim.c struct vchi_queue_user_message_context *copycontext = context; context 834 drivers/staging/vt6655/device_main.c struct vnt_td_info *context, u8 tsr0, u8 tsr1) context 843 drivers/staging/vt6655/device_main.c if (!context) context 846 drivers/staging/vt6655/device_main.c if (!context->skb) context 849 drivers/staging/vt6655/device_main.c fifo_head = (struct vnt_tx_fifo_head *)context->buf; context 853 drivers/staging/vt6655/device_main.c info = IEEE80211_SKB_CB(context->skb); context 60 drivers/staging/vt6656/int.c struct vnt_usb_send_context *context; context 69 drivers/staging/vt6656/int.c context = priv->tx_context[pkt_no]; context 71 drivers/staging/vt6656/int.c if (!context->skb) context 74 drivers/staging/vt6656/int.c info = IEEE80211_SKB_CB(context->skb); context 77 drivers/staging/vt6656/int.c if (context->fb_option && !(tsr & (TSR_TMO | TSR_RETRYTMO))) { context 87 drivers/staging/vt6656/int.c if (context->fb_option == AUTO_FB_0) context 89 drivers/staging/vt6656/int.c else if (context->fb_option == AUTO_FB_1) context 109 drivers/staging/vt6656/int.c ieee80211_tx_status_irqsafe(priv->hw, context->skb); context 111 drivers/staging/vt6656/int.c context->in_use = false; context 70 drivers/staging/vt6656/rxtx.c struct vnt_usb_send_context *context = NULL; context 79 drivers/staging/vt6656/rxtx.c context = priv->tx_context[ii]; context 80 drivers/staging/vt6656/rxtx.c if (!context->in_use) { context 81 drivers/staging/vt6656/rxtx.c context->in_use = true; context 82 drivers/staging/vt6656/rxtx.c memset(context->data, 0, context 85 drivers/staging/vt6656/rxtx.c context->hdr = NULL; context 87 drivers/staging/vt6656/rxtx.c return context; context 208 drivers/staging/vt6656/rxtx.c static __le16 vnt_get_rtscts_duration_le(struct vnt_usb_send_context *context, context 211 drivers/staging/vt6656/rxtx.c struct vnt_private *priv = context->priv; context 213 drivers/staging/vt6656/rxtx.c u32 frame_length = context->frame_len; context 214 drivers/staging/vt6656/rxtx.c u8 need_ack = context->need_ack; context 995 drivers/staging/vt6656/rxtx.c struct vnt_usb_send_context *context; context 1003 drivers/staging/vt6656/rxtx.c context = vnt_get_free_context(priv); context 1004 drivers/staging/vt6656/rxtx.c if (!context) { context 1010 drivers/staging/vt6656/rxtx.c context->skb = skb; context 1014 drivers/staging/vt6656/rxtx.c beacon_buffer = (struct vnt_beacon_buffer *)&context->data[0]; context 1066 drivers/staging/vt6656/rxtx.c beacon_buffer->pkt_no = context->pkt_no; context 1069 drivers/staging/vt6656/rxtx.c context->type = CONTEXT_BEACON_PACKET; context 1070 drivers/staging/vt6656/rxtx.c context->buf_len = count + 4; /* USB header */ context 1074 drivers/staging/vt6656/rxtx.c if (vnt_tx_context(priv, context) != STATUS_PENDING) context 1075 drivers/staging/vt6656/rxtx.c ieee80211_free_txskb(priv->hw, context->skb); context 144 drivers/staging/vt6656/usbpipe.c struct vnt_private *priv = urb->context; context 211 drivers/staging/vt6656/usbpipe.c struct vnt_rcb *rcb = urb->context; context 283 drivers/staging/vt6656/usbpipe.c struct vnt_usb_send_context *context = urb->context; context 284 drivers/staging/vt6656/usbpipe.c struct vnt_private *priv = context->priv; context 288 drivers/staging/vt6656/usbpipe.c dev_dbg(&priv->usb->dev, "Write %d bytes\n", context->buf_len); context 293 drivers/staging/vt6656/usbpipe.c context->in_use = false; context 301 drivers/staging/vt6656/usbpipe.c if (context->type == CONTEXT_DATA_PACKET) context 304 drivers/staging/vt6656/usbpipe.c if (urb->status || context->type == CONTEXT_BEACON_PACKET) { context 305 drivers/staging/vt6656/usbpipe.c if (context->skb) context 306 drivers/staging/vt6656/usbpipe.c ieee80211_free_txskb(priv->hw, context->skb); context 308 drivers/staging/vt6656/usbpipe.c context->in_use = false; context 313 drivers/staging/vt6656/usbpipe.c struct vnt_usb_send_context *context) context 316 drivers/staging/vt6656/usbpipe.c struct urb *urb = context->urb; context 319 drivers/staging/vt6656/usbpipe.c context->in_use = false; context 326 drivers/staging/vt6656/usbpipe.c context->data, context 327 drivers/staging/vt6656/usbpipe.c context->buf_len, context 329 drivers/staging/vt6656/usbpipe.c context); context 335 drivers/staging/vt6656/usbpipe.c context->in_use = false; context 37 drivers/staging/vt6656/usbpipe.h struct vnt_usb_send_context *context); context 303 drivers/staging/wlan-ng/hfa384x_usb.c pr_debug("urb->context=0x%08x\n", (unsigned int)urb->context); context 2915 drivers/staging/wlan-ng/hfa384x_usb.c struct wlandevice *wlandev = urb->context; context 3479 drivers/staging/wlan-ng/hfa384x_usb.c struct wlandevice *wlandev = urb->context; context 3551 drivers/staging/wlan-ng/hfa384x_usb.c struct hfa384x *hw = urb->context; context 209 drivers/staging/wusbcore/wa-nep.c struct wahc *wa = urb->context; context 431 drivers/staging/wusbcore/wa-xfer.c struct wa_xfer_abort_buffer *b = urb->context; context 734 drivers/staging/wusbcore/wa-xfer.c struct wa_seg *seg = urb->context; context 871 drivers/staging/wusbcore/wa-xfer.c struct wa_seg *seg = urb->context; context 944 drivers/staging/wusbcore/wa-xfer.c struct wa_seg *seg = urb->context; context 2211 drivers/staging/wusbcore/wa-xfer.c buf_in_urb->context = seg; context 2264 drivers/staging/wusbcore/wa-xfer.c buf_in_urb->context = seg; context 2584 drivers/staging/wusbcore/wa-xfer.c struct wa_seg *seg = urb->context; context 2753 drivers/staging/wusbcore/wa-xfer.c struct wahc *wa = urb->context; context 510 drivers/target/iscsi/cxgbit/cxgbit_cm.c conn->context = csk; context 756 drivers/target/iscsi/cxgbit/cxgbit_cm.c __cxgbit_free_conn(conn->context); context 233 drivers/target/iscsi/cxgbit/cxgbit_ddp.c struct cxgbit_sock *csk = conn->context; context 271 drivers/target/iscsi/cxgbit/cxgbit_ddp.c struct cxgbit_sock *csk = conn->context; context 534 drivers/target/iscsi/cxgbit/cxgbit_target.c struct cxgbit_sock *csk = conn->context; context 553 drivers/target/iscsi/cxgbit/cxgbit_target.c struct cxgbit_sock *csk = conn->context; context 591 drivers/target/iscsi/cxgbit/cxgbit_target.c struct cxgbit_sock *csk = conn->context; context 734 drivers/target/iscsi/cxgbit/cxgbit_target.c struct cxgbit_sock *csk = conn->context; context 794 drivers/target/iscsi/cxgbit/cxgbit_target.c struct cxgbit_sock *csk = conn->context; context 877 drivers/target/iscsi/cxgbit/cxgbit_target.c struct cxgbit_sock *csk = conn->context; context 1615 drivers/target/iscsi/cxgbit/cxgbit_target.c struct cxgbit_sock *csk = conn->context; context 1631 drivers/target/iscsi/cxgbit/cxgbit_target.c struct cxgbit_sock *csk = conn->context; context 174 drivers/thermal/intel/int340x_thermal/int3400_thermal.c struct acpi_osc_context context = { context 183 drivers/thermal/intel/int340x_thermal/int3400_thermal.c context.cap.pointer = buf; context 185 drivers/thermal/intel/int340x_thermal/int3400_thermal.c status = acpi_run_osc(handle, &context); context 187 drivers/thermal/intel/int340x_thermal/int3400_thermal.c ret = *((u32 *)(context.ret.pointer + 4)); context 193 drivers/thermal/intel/int340x_thermal/int3400_thermal.c kfree(context.ret.pointer); context 744 drivers/tty/serial/ifx6x60.c ifx_dev->spi_msg.context = ifx_dev; context 1127 drivers/tty/serial/mxs-auart.c static irqreturn_t mxs_auart_irq_handle(int irq, void *context) context 1130 drivers/tty/serial/mxs-auart.c struct mxs_auart_port *s = context; context 667 drivers/tty/serial/rp2.c static void rp2_fw_cb(const struct firmware *fw, void *context) context 669 drivers/tty/serial/rp2.c struct rp2_card *card = context; context 156 drivers/tty/serial/serial_mctrl_gpio.c static irqreturn_t mctrl_gpio_irq_handle(int irq, void *context) context 158 drivers/tty/serial/serial_mctrl_gpio.c struct mctrl_gpios *gpios = context; context 1162 drivers/tty/serial/ucc_uart.c static void uart_firmware_cont(const struct firmware *fw, void *context) context 1165 drivers/tty/serial/ucc_uart.c struct device *dev = context; context 93 drivers/uio/uio_hv_generic.c static void hv_uio_channel_cb(void *context) context 95 drivers/uio/uio_hv_generic.c struct vmbus_channel *chan = context; context 576 drivers/usb/atm/cxacru.c complete(urb->context); context 596 drivers/usb/atm/speedtch.c struct speedtch_instance_data *instance = int_urb->context; context 600 drivers/usb/atm/ueagle-atm.c void *context) context 602 drivers/usb/atm/ueagle-atm.c struct usb_device *usb = context; context 2088 drivers/usb/atm/ueagle-atm.c struct uea_softc *sc = urb->context; context 215 drivers/usb/atm/usbatm.c struct usbatm_channel *channel = urb->context; context 245 drivers/usb/atm/usbatm.c struct usbatm_channel *channel = urb->context; context 921 drivers/usb/chipidea/udc.c struct ci_hdrc *ci = req->context; context 957 drivers/usb/chipidea/udc.c ci->status->context = ci; context 343 drivers/usb/class/cdc-acm.c struct acm *acm = urb->context; context 474 drivers/usb/class/cdc-acm.c struct acm_rb *rb = urb->context; context 552 drivers/usb/class/cdc-acm.c struct acm_wb *wb = urb->context; context 745 drivers/usb/class/cdc-acm.c wb = urb->context; context 1636 drivers/usb/class/cdc-acm.c acm_start_wb(acm, urb->context); context 147 drivers/usb/class/cdc-wdm.c desc = urb->context; context 160 drivers/usb/class/cdc-wdm.c struct wdm_device *desc = urb->context; context 238 drivers/usb/class/cdc-wdm.c desc = urb->context; context 293 drivers/usb/class/usblp.c struct usblp *usblp = urb->context; context 317 drivers/usb/class/usblp.c struct usblp *usblp = urb->context; context 741 drivers/usb/class/usbtmc.c struct usbtmc_file_data *file_data = urb->context; context 1042 drivers/usb/class/usbtmc.c struct usbtmc_file_data *file_data = urb->context; context 2236 drivers/usb/class/usbtmc.c struct usbtmc_device_data *data = urb->context; context 602 drivers/usb/core/devio.c struct async *as = urb->context; context 1795 drivers/usb/core/devio.c as->urb->context = as; context 2059 drivers/usb/core/devio.c ps->disccontext.sival_int = ds.context; context 2178 drivers/usb/core/devio.c ps->disccontext.sival_ptr = ds.context; context 712 drivers/usb/core/hub.c struct usb_hub *hub = urb->context; context 35 drivers/usb/core/message.c struct api_context *ctx = urb->context; context 55 drivers/usb/core/message.c urb->context = &ctx; context 273 drivers/usb/core/message.c struct usb_sg_request *io = urb->context; context 418 drivers/usb/core/message.c urb->context = io; context 512 drivers/usb/dwc2/hcd.c static void dwc2_host_hub_info(struct dwc2_hsotg *hsotg, void *context, context 515 drivers/usb/dwc2/hcd.c struct urb *urb = context; context 3992 drivers/usb/dwc2/hcd.c struct dwc2_tt *dwc2_host_get_tt_info(struct dwc2_hsotg *hsotg, void *context, context 3995 drivers/usb/dwc2/hcd.c struct urb *urb = context; context 4055 drivers/usb/dwc2/hcd.c int dwc2_host_get_speed(struct dwc2_hsotg *hsotg, void *context) context 4057 drivers/usb/dwc2/hcd.c struct urb *urb = context; context 807 drivers/usb/dwc2/hcd.h void *context, gfp_t mem_flags, context 812 drivers/usb/dwc2/hcd.h int dwc2_host_get_speed(struct dwc2_hsotg *hsotg, void *context); context 1395 drivers/usb/gadget/composite.c if (!req->context) context 1398 drivers/usb/gadget/composite.c cdev = req->context; context 1602 drivers/usb/gadget/composite.c req->context = cdev; context 1853 drivers/usb/gadget/composite.c req->context = cdev; context 1976 drivers/usb/gadget/composite.c req->context = cdev; context 2119 drivers/usb/gadget/composite.c cdev->req->context = cdev; context 2164 drivers/usb/gadget/composite.c cdev->os_desc_req->context = cdev; context 2418 drivers/usb/gadget/composite.c req->context = cdev; context 554 drivers/usb/gadget/function/f_acm.c struct f_acm *acm = req->context; context 672 drivers/usb/gadget/function/f_acm.c acm->notify_req->context = acm; context 445 drivers/usb/gadget/function/f_ecm.c struct f_ecm *ecm = req->context; context 776 drivers/usb/gadget/function/f_ecm.c ecm->notify_req->context = ecm; context 325 drivers/usb/gadget/function/f_eem.c struct sk_buff *skb = (struct sk_buff *)req->context; context 448 drivers/usb/gadget/function/f_eem.c req->context = skb2; context 270 drivers/usb/gadget/function/f_fs.c struct ffs_data *ffs = req->context; context 709 drivers/usb/gadget/function/f_fs.c if (likely(req->context)) { context 712 drivers/usb/gadget/function/f_fs.c complete(req->context); context 852 drivers/usb/gadget/function/f_fs.c struct ffs_io_data *io_data = req->context; context 1071 drivers/usb/gadget/function/f_fs.c req->context = &done; context 1118 drivers/usb/gadget/function/f_fs.c req->context = io_data; context 1857 drivers/usb/gadget/function/f_fs.c ffs->ep0req->context = ffs; context 392 drivers/usb/gadget/function/f_hid.c req->context = hidg; context 463 drivers/usb/gadget/function/f_hid.c struct f_hidg *hidg = (struct f_hidg *) req->context; context 696 drivers/usb/gadget/function/f_hid.c req->context = hidg; context 248 drivers/usb/gadget/function/f_loopback.c struct usb_request *in_req = req->context; context 259 drivers/usb/gadget/function/f_loopback.c req = req->context; context 290 drivers/usb/gadget/function/f_loopback.c req->context); context 339 drivers/usb/gadget/function/f_loopback.c in_req->context = out_req; context 340 drivers/usb/gadget/function/f_loopback.c out_req->context = in_req; context 446 drivers/usb/gadget/function/f_mass_storage.c struct fsg_buffhd *bh = req->context; context 462 drivers/usb/gadget/function/f_mass_storage.c struct fsg_buffhd *bh = req->context; context 499 drivers/usb/gadget/function/f_mass_storage.c req->context = NULL; context 2276 drivers/usb/gadget/function/f_mass_storage.c bh->inreq->context = bh->outreq->context = bh; context 628 drivers/usb/gadget/function/f_ncm.c struct f_ncm *ncm = req->context; context 658 drivers/usb/gadget/function/f_ncm.c struct usb_function *f = req->context; context 662 drivers/usb/gadget/function/f_ncm.c req->context = NULL; context 755 drivers/usb/gadget/function/f_ncm.c req->context = f; context 1487 drivers/usb/gadget/function/f_ncm.c ncm->notify_req->context = ncm; context 204 drivers/usb/gadget/function/f_phonet.c struct sk_buff *skb = req->context; context 244 drivers/usb/gadget/function/f_phonet.c req->context = skb; context 303 drivers/usb/gadget/function/f_phonet.c req->context = page; context 315 drivers/usb/gadget/function/f_phonet.c struct page *page = req->context; context 411 drivers/usb/gadget/function/f_rndis.c struct f_rndis *rndis = req->context; context 450 drivers/usb/gadget/function/f_rndis.c struct f_rndis *rndis = req->context; context 488 drivers/usb/gadget/function/f_rndis.c req->context = rndis; context 505 drivers/usb/gadget/function/f_rndis.c req->context = rndis; context 774 drivers/usb/gadget/function/f_rndis.c rndis->notify_req->context = rndis; context 61 drivers/usb/gadget/function/f_tcm.c struct usbg_cmd *cmd = req->context; context 83 drivers/usb/gadget/function/f_tcm.c fu->bot_status.req->context = cmd; context 91 drivers/usb/gadget/function/f_tcm.c struct usbg_cmd *cmd = req->context; context 138 drivers/usb/gadget/function/f_tcm.c req->context = cmd; context 166 drivers/usb/gadget/function/f_tcm.c fu->bot_status.req->context = cmd; context 194 drivers/usb/gadget/function/f_tcm.c struct usbg_cmd *cmd = req->context; context 234 drivers/usb/gadget/function/f_tcm.c fu->bot_req_in->context = cmd; context 273 drivers/usb/gadget/function/f_tcm.c fu->bot_req_out->context = cmd; context 292 drivers/usb/gadget/function/f_tcm.c struct f_uas *fu = req->context; context 337 drivers/usb/gadget/function/f_tcm.c fu->cmd.req->context = fu; context 536 drivers/usb/gadget/function/f_tcm.c stream->req_in->context = cmd; context 557 drivers/usb/gadget/function/f_tcm.c stream->req_status->context = cmd; context 565 drivers/usb/gadget/function/f_tcm.c struct usbg_cmd *cmd = req->context; context 622 drivers/usb/gadget/function/f_tcm.c stream->req_status->context = cmd; context 656 drivers/usb/gadget/function/f_tcm.c stream->req_status->context = cmd; context 699 drivers/usb/gadget/function/f_tcm.c stream->req_status->context = cmd; context 721 drivers/usb/gadget/function/f_tcm.c struct f_uas *fu = req->context; context 777 drivers/usb/gadget/function/f_tcm.c fu->cmd.req->context = fu; context 954 drivers/usb/gadget/function/f_tcm.c struct usbg_cmd *cmd = req->context; context 996 drivers/usb/gadget/function/f_tcm.c req->context = cmd; context 323 drivers/usb/gadget/function/f_uac1_legacy.c struct f_audio *audio = req->context; context 360 drivers/usb/gadget/function/f_uac1_legacy.c struct f_audio *audio = req->context; context 412 drivers/usb/gadget/function/f_uac1_legacy.c req->context = audio; context 448 drivers/usb/gadget/function/f_uac1_legacy.c req->context = audio; context 624 drivers/usb/gadget/function/f_uac1_legacy.c req->context = audio; context 207 drivers/usb/gadget/function/f_uvc.c struct uvc_device *uvc = req->context; context 733 drivers/usb/gadget/function/f_uvc.c uvc->control_req->context = uvc; context 85 drivers/usb/gadget/function/u_audio.c struct uac_req *ur = req->context; context 394 drivers/usb/gadget/function/u_audio.c req->context = &prm->ureq[i]; context 470 drivers/usb/gadget/function/u_audio.c req->context = &prm->ureq[i]; context 236 drivers/usb/gadget/function/u_ether.c req->context = skb; context 255 drivers/usb/gadget/function/u_ether.c struct sk_buff *skb = req->context, *skb2; context 440 drivers/usb/gadget/function/u_ether.c struct sk_buff *skb = req->context; context 563 drivers/usb/gadget/function/u_ether.c req->context = skb; context 178 drivers/usb/gadget/function/uvc_video.c struct uvc_video *video = req->context; context 273 drivers/usb/gadget/function/uvc_video.c video->req[i]->context = video; context 275 drivers/usb/gadget/legacy/inode.c if (!req->context) context 281 drivers/usb/gadget/legacy/inode.c complete ((struct completion *)req->context); context 337 drivers/usb/gadget/legacy/inode.c req->context = &done; context 481 drivers/usb/gadget/legacy/inode.c struct kiocb *iocb = req->context; context 555 drivers/usb/gadget/legacy/inode.c req->context = iocb; context 1363 drivers/usb/gadget/legacy/inode.c req->context = NULL; context 1671 drivers/usb/gadget/legacy/inode.c dev->req->context = NULL; context 726 drivers/usb/gadget/udc/dummy_hcd.c req->req.context = dum; context 734 drivers/usb/host/ehci-hub.c struct completion *done = urb->context; context 794 drivers/usb/host/ehci-hub.c urb->context = done; context 273 drivers/usb/image/mdc800.c struct mdc800_data* mdc800=urb->context; context 365 drivers/usb/image/mdc800.c struct mdc800_data* mdc800=urb->context; context 383 drivers/usb/image/mdc800.c struct mdc800_data* mdc800=urb->context; context 187 drivers/usb/image/microtek.c MTS_DEBUG("transfer = 0x%x context = 0x%x\n",(int)transfer,(int)context ); \ context 188 drivers/usb/image/microtek.c MTS_DEBUG("status = 0x%x data-length = 0x%x sent = 0x%x\n",transfer->status,(int)context->data_length, (int)transfer->actual_length ); \ context 189 drivers/usb/image/microtek.c mts_debug_dump(context->instance);\ context 204 drivers/usb/image/microtek.c struct mts_transfer_context* context = (struct mts_transfer_context*)transfer->context; \ context 381 drivers/usb/image/microtek.c context->instance->usb_dev, context 386 drivers/usb/image/microtek.c context context 392 drivers/usb/image/microtek.c context->srb->result = DID_ERROR << 16; context 403 drivers/usb/image/microtek.c if ( likely(context->final_callback != NULL) ) context 404 drivers/usb/image/microtek.c context->final_callback(context->srb); context 411 drivers/usb/image/microtek.c context->srb->result &= MTS_SCSI_ERR_MASK; context 412 drivers/usb/image/microtek.c context->srb->result |= (unsigned)(*context->scsi_status)<<1; context 424 drivers/usb/image/microtek.c usb_rcvbulkpipe(context->instance->usb_dev, context 425 drivers/usb/image/microtek.c context->instance->ep_response), context 426 drivers/usb/image/microtek.c context->scsi_status, context 437 drivers/usb/image/microtek.c if ( context->data_length != transfer->actual_length ) { context 438 drivers/usb/image/microtek.c scsi_set_resid(context->srb, context->data_length - context 441 drivers/usb/image/microtek.c context->srb->result = (status == -ENOENT ? DID_ABORT : DID_ERROR)<<16; context 458 drivers/usb/image/microtek.c context->srb->result = DID_ABORT<<16; context 463 drivers/usb/image/microtek.c context->srb->result = DID_ERROR<<16; context 470 drivers/usb/image/microtek.c if (context->srb->cmnd[0] == REQUEST_SENSE) { context 472 drivers/usb/image/microtek.c context->data_pipe, context 473 drivers/usb/image/microtek.c context->srb->sense_buffer, context 474 drivers/usb/image/microtek.c context->data_length, context 476 drivers/usb/image/microtek.c } else { if ( context->data ) { context 478 drivers/usb/image/microtek.c context->data_pipe, context 479 drivers/usb/image/microtek.c context->data, context 480 drivers/usb/image/microtek.c context->data_length, context 481 drivers/usb/image/microtek.c scsi_sg_count(context->srb) > 1 ? context 494 drivers/usb/image/microtek.c MTS_DEBUG("Processing fragment %d of %d\n", context->fragment, context 495 drivers/usb/image/microtek.c scsi_sg_count(context->srb)); context 498 drivers/usb/image/microtek.c context->srb->result = (status == -ENOENT ? DID_ABORT : DID_ERROR)<<16; context 502 drivers/usb/image/microtek.c context->curr_sg = sg_next(context->curr_sg); context 504 drivers/usb/image/microtek.c context->data_pipe, context 505 drivers/usb/image/microtek.c sg_virt(context->curr_sg), context 506 drivers/usb/image/microtek.c context->curr_sg->length, context 507 drivers/usb/image/microtek.c sg_is_last(context->curr_sg) ? context 530 drivers/usb/image/microtek.c desc->context.instance = desc; context 531 drivers/usb/image/microtek.c desc->context.srb = srb; context 534 drivers/usb/image/microtek.c desc->context.data = NULL; context 535 drivers/usb/image/microtek.c desc->context.data_length = 0; context 538 drivers/usb/image/microtek.c desc->context.curr_sg = scsi_sglist(srb); context 539 drivers/usb/image/microtek.c desc->context.data = sg_virt(desc->context.curr_sg); context 540 drivers/usb/image/microtek.c desc->context.data_length = desc->context.curr_sg->length; context 561 drivers/usb/image/microtek.c desc->context.data_pipe = pipe; context 597 drivers/usb/image/microtek.c &desc->context context 602 drivers/usb/image/microtek.c desc->context.final_callback = callback; context 738 drivers/usb/image/microtek.c new_desc->context.scsi_status = kmalloc(1, GFP_KERNEL); context 739 drivers/usb/image/microtek.c if (!new_desc->context.scsi_status) context 780 drivers/usb/image/microtek.c kfree(new_desc->context.scsi_status); context 800 drivers/usb/image/microtek.c kfree(desc->context.scsi_status); context 45 drivers/usb/image/microtek.h struct mts_transfer_context context; context 158 drivers/usb/misc/adutux.c struct adu_device *dev = urb->context; context 203 drivers/usb/misc/adutux.c struct adu_device *dev = urb->context; context 79 drivers/usb/misc/appledisplay.c struct appledisplay *pdata = urb->context; context 317 drivers/usb/misc/chaoskey.c struct chaoskey *dev = urb->context; context 718 drivers/usb/misc/ftdi-elan.c struct usb_ftdi *ftdi = urb->context; context 158 drivers/usb/misc/iowarrior.c struct iowarrior *dev = urb->context; context 231 drivers/usb/misc/iowarrior.c dev = urb->context; context 221 drivers/usb/misc/ldusb.c struct ld_usb *dev = urb->context; context 282 drivers/usb/misc/ldusb.c struct ld_usb *dev = urb->context; context 708 drivers/usb/misc/legousbtower.c struct lego_usb_tower *dev = urb->context; context 767 drivers/usb/misc/legousbtower.c struct lego_usb_tower *dev = urb->context; context 387 drivers/usb/misc/lvstest.c struct lvs_rh *lvs = urb->context; context 193 drivers/usb/misc/sisusbvga/sisusb.c struct sisusb_urb_context *context = urb->context; context 196 drivers/usb/misc/sisusbvga/sisusb.c if (!context) context 199 drivers/usb/misc/sisusbvga/sisusb.c sisusb = context->sisusb; context 205 drivers/usb/misc/sisusbvga/sisusb.c if (context->actual_length) context 206 drivers/usb/misc/sisusbvga/sisusb.c *(context->actual_length) += urb->actual_length; context 209 drivers/usb/misc/sisusbvga/sisusb.c sisusb->urbstatus[context->urbindex] &= ~SU_URB_BUSY; context 268 drivers/usb/misc/sisusbvga/sisusb.c struct sisusb_usb_data *sisusb = urb->context; context 201 drivers/usb/misc/usblcd.c dev = urb->context; context 254 drivers/usb/misc/usbtest.c complete(urb->context); context 469 drivers/usb/misc/usbtest.c urb->context = &completion; context 1089 drivers/usb/misc/usbtest.c struct ctrl_ctx *ctx = urb->context; context 1205 drivers/usb/misc/usbtest.c struct ctrl_ctx context; context 1211 drivers/usb/misc/usbtest.c spin_lock_init(&context.lock); context 1212 drivers/usb/misc/usbtest.c context.dev = dev; context 1213 drivers/usb/misc/usbtest.c init_completion(&context.complete); context 1214 drivers/usb/misc/usbtest.c context.count = param->sglen * param->iterations; context 1215 drivers/usb/misc/usbtest.c context.pending = 0; context 1216 drivers/usb/misc/usbtest.c context.status = -ENOMEM; context 1217 drivers/usb/misc/usbtest.c context.param = param; context 1218 drivers/usb/misc/usbtest.c context.last = -1; context 1365 drivers/usb/misc/usbtest.c context.status = -EINVAL; context 1381 drivers/usb/misc/usbtest.c u->context = &context; context 1386 drivers/usb/misc/usbtest.c context.urb = urb; context 1387 drivers/usb/misc/usbtest.c spin_lock_irq(&context.lock); context 1389 drivers/usb/misc/usbtest.c context.status = usb_submit_urb(urb[i], GFP_ATOMIC); context 1390 drivers/usb/misc/usbtest.c if (context.status != 0) { context 1392 drivers/usb/misc/usbtest.c i, context.status); context 1393 drivers/usb/misc/usbtest.c context.count = context.pending; context 1396 drivers/usb/misc/usbtest.c context.pending++; context 1398 drivers/usb/misc/usbtest.c spin_unlock_irq(&context.lock); context 1403 drivers/usb/misc/usbtest.c if (context.pending > 0) context 1404 drivers/usb/misc/usbtest.c wait_for_completion(&context.complete); context 1415 drivers/usb/misc/usbtest.c return context.status; context 1431 drivers/usb/misc/usbtest.c complete(urb->context); context 1445 drivers/usb/misc/usbtest.c urb->context = &completion; context 1537 drivers/usb/misc/usbtest.c struct queued_ctx *ctx = urb->context; context 1928 drivers/usb/misc/usbtest.c struct transfer_context *ctx = urb->context; context 2041 drivers/usb/misc/usbtest.c struct transfer_context context; context 2054 drivers/usb/misc/usbtest.c memset(&context, 0, sizeof(context)); context 2055 drivers/usb/misc/usbtest.c context.count = param->iterations * param->sglen; context 2056 drivers/usb/misc/usbtest.c context.dev = dev; context 2057 drivers/usb/misc/usbtest.c context.is_iso = !!desc; context 2058 drivers/usb/misc/usbtest.c init_completion(&context.done); context 2059 drivers/usb/misc/usbtest.c spin_lock_init(&context.lock); context 2064 drivers/usb/misc/usbtest.c if (context.is_iso) context 2076 drivers/usb/misc/usbtest.c urbs[i]->context = &context; context 2080 drivers/usb/misc/usbtest.c if (context.is_iso) { context 2102 drivers/usb/misc/usbtest.c spin_lock_irq(&context.lock); context 2104 drivers/usb/misc/usbtest.c ++context.pending; context 2109 drivers/usb/misc/usbtest.c spin_unlock_irq(&context.lock); context 2115 drivers/usb/misc/usbtest.c context.pending--; context 2116 drivers/usb/misc/usbtest.c context.submit_error = 1; context 2120 drivers/usb/misc/usbtest.c spin_unlock_irq(&context.lock); context 2122 drivers/usb/misc/usbtest.c wait_for_completion(&context.done); context 2135 drivers/usb/misc/usbtest.c else if (context.submit_error) context 2137 drivers/usb/misc/usbtest.c else if (context.errors > context 2138 drivers/usb/misc/usbtest.c (context.is_iso ? context.packet_count / 10 : 0)) context 102 drivers/usb/misc/uss720.c rq = urb->context; context 78 drivers/usb/misc/yurex.c struct usb_yurex *dev = urb->context; context 128 drivers/usb/misc/yurex.c struct usb_yurex *dev = urb->context; context 2506 drivers/usb/musb/musb_core.c musb->context.frame = musb_readw(musb_base, MUSB_FRAME); context 2507 drivers/usb/musb/musb_core.c musb->context.testmode = musb_readb(musb_base, MUSB_TESTMODE); context 2508 drivers/usb/musb/musb_core.c musb->context.busctl = musb_readb(musb_base, MUSB_ULPI_BUSCONTROL); context 2509 drivers/usb/musb/musb_core.c musb->context.power = musb_readb(musb_base, MUSB_POWER); context 2510 drivers/usb/musb/musb_core.c musb->context.intrusbe = musb_readb(musb_base, MUSB_INTRUSBE); context 2511 drivers/usb/musb/musb_core.c musb->context.index = musb_readb(musb_base, MUSB_INDEX); context 2512 drivers/usb/musb/musb_core.c musb->context.devctl = musb_readb(musb_base, MUSB_DEVCTL); context 2526 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txmaxp = context 2528 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txcsr = context 2530 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxmaxp = context 2532 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxcsr = context 2536 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txfifoadd = context 2538 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxfifoadd = context 2540 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txfifosz = context 2542 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxfifosz = context 2546 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txtype = context 2548 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txinterval = context 2550 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxtype = context 2552 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxinterval = context 2555 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txfunaddr = context 2557 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txhubaddr = context 2559 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txhubport = context 2562 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxfunaddr = context 2564 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxhubaddr = context 2566 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxhubport = context 2578 drivers/usb/musb/musb_core.c musb_writew(musb_base, MUSB_FRAME, musb->context.frame); context 2579 drivers/usb/musb/musb_core.c musb_writeb(musb_base, MUSB_TESTMODE, musb->context.testmode); context 2580 drivers/usb/musb/musb_core.c musb_writeb(musb_base, MUSB_ULPI_BUSCONTROL, musb->context.busctl); context 2585 drivers/usb/musb/musb_core.c musb->context.power &= ~(MUSB_POWER_SUSPENDM | MUSB_POWER_RESUME); context 2586 drivers/usb/musb/musb_core.c power |= musb->context.power; context 2591 drivers/usb/musb/musb_core.c musb_writeb(musb_base, MUSB_INTRUSBE, musb->context.intrusbe); context 2592 drivers/usb/musb/musb_core.c if (musb->context.devctl & MUSB_DEVCTL_SESSION) context 2593 drivers/usb/musb/musb_core.c musb_writeb(musb_base, MUSB_DEVCTL, musb->context.devctl); context 2608 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txmaxp); context 2610 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txcsr); context 2612 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxmaxp); context 2614 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxcsr); context 2618 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txfifosz); context 2620 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxfifosz); context 2622 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txfifoadd); context 2624 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxfifoadd); context 2628 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txtype); context 2630 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txinterval); context 2632 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxtype); context 2635 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxinterval); context 2637 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txfunaddr); context 2639 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txhubaddr); context 2641 drivers/usb/musb/musb_core.c musb->context.index_regs[i].txhubport); context 2644 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxfunaddr); context 2646 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxhubaddr); context 2648 drivers/usb/musb/musb_core.c musb->context.index_regs[i].rxhubport); context 2650 drivers/usb/musb/musb_core.c musb_writeb(musb_base, MUSB_INDEX, musb->context.index); context 2718 drivers/usb/musb/musb_core.c if ((devctl & mask) != (musb->context.devctl & mask)) context 275 drivers/usb/musb/musb_core.h struct musb_context_registers context; context 297 drivers/usb/musb/musb_debugfs.c musb->context.devctl |= MUSB_DEVCTL_SESSION; context 111 drivers/usb/musb/musb_dsps.c struct dsps_context context; context 988 drivers/usb/musb/musb_dsps.c glue->context.control = musb_readl(mbase, wrp->control); context 989 drivers/usb/musb/musb_dsps.c glue->context.epintr = musb_readl(mbase, wrp->epintr_set); context 990 drivers/usb/musb/musb_dsps.c glue->context.coreintr = musb_readl(mbase, wrp->coreintr_set); context 991 drivers/usb/musb/musb_dsps.c glue->context.phy_utmi = musb_readl(mbase, wrp->phy_utmi); context 992 drivers/usb/musb/musb_dsps.c glue->context.mode = musb_readl(mbase, wrp->mode); context 993 drivers/usb/musb/musb_dsps.c glue->context.tx_mode = musb_readl(mbase, wrp->tx_mode); context 994 drivers/usb/musb/musb_dsps.c glue->context.rx_mode = musb_readl(mbase, wrp->rx_mode); context 1014 drivers/usb/musb/musb_dsps.c musb_writel(mbase, wrp->control, glue->context.control); context 1015 drivers/usb/musb/musb_dsps.c musb_writel(mbase, wrp->epintr_set, glue->context.epintr); context 1016 drivers/usb/musb/musb_dsps.c musb_writel(mbase, wrp->coreintr_set, glue->context.coreintr); context 1017 drivers/usb/musb/musb_dsps.c musb_writel(mbase, wrp->phy_utmi, glue->context.phy_utmi); context 1018 drivers/usb/musb/musb_dsps.c musb_writel(mbase, wrp->mode, glue->context.mode); context 1019 drivers/usb/musb/musb_dsps.c musb_writel(mbase, wrp->tx_mode, glue->context.tx_mode); context 1020 drivers/usb/musb/musb_dsps.c musb_writel(mbase, wrp->rx_mode, glue->context.rx_mode); context 527 drivers/usb/musb/omap2430.c musb->context.otg_interfsel = musb_readl(musb->mregs, context 551 drivers/usb/musb/omap2430.c musb->context.otg_interfsel); context 119 drivers/usb/serial/aircable.c struct usb_serial_port *port = urb->context; context 529 drivers/usb/serial/ark3116.c struct usb_serial_port *port = urb->context; context 592 drivers/usb/serial/ark3116.c struct usb_serial_port *port = urb->context; context 173 drivers/usb/serial/belkin_sa.c struct usb_serial_port *port = urb->context; context 238 drivers/usb/serial/belkin_sa.c struct usb_serial_port *port = urb->context; context 584 drivers/usb/serial/ch341.c struct usb_serial_port *port = urb->context; context 253 drivers/usb/serial/cyberjack.c struct usb_serial_port *port = urb->context; context 309 drivers/usb/serial/cyberjack.c struct usb_serial_port *port = urb->context; context 355 drivers/usb/serial/cyberjack.c struct usb_serial_port *port = urb->context; context 1027 drivers/usb/serial/cypress_m8.c struct usb_serial_port *port = urb->context; context 1179 drivers/usb/serial/cypress_m8.c struct usb_serial_port *port = urb->context; context 982 drivers/usb/serial/digi_acceleport.c struct usb_serial_port *port = urb->context; context 1320 drivers/usb/serial/digi_acceleport.c struct usb_serial_port *port = urb->context; context 1381 drivers/usb/serial/digi_acceleport.c struct usb_serial_port *port = urb->context; context 1472 drivers/usb/serial/digi_acceleport.c struct usb_serial_port *port = urb->context; context 289 drivers/usb/serial/f81232.c struct usb_serial_port *port = urb->context; context 327 drivers/usb/serial/f81232.c struct usb_serial_port *port = urb->context; context 1267 drivers/usb/serial/f81534.c port = urb->context; context 1291 drivers/usb/serial/f81534.c struct usb_serial_port *port = urb->context; context 2576 drivers/usb/serial/ftdi_sio.c struct usb_serial_port *port = urb->context; context 949 drivers/usb/serial/garmin_gps.c struct usb_serial_port *port = urb->context; context 1163 drivers/usb/serial/garmin_gps.c struct usb_serial_port *port = urb->context; context 1211 drivers/usb/serial/garmin_gps.c struct usb_serial_port *port = urb->context; context 347 drivers/usb/serial/generic.c struct usb_serial_port *port = urb->context; context 372 drivers/usb/serial/generic.c struct usb_serial_port *port = urb->context; context 437 drivers/usb/serial/generic.c struct usb_serial_port *port = urb->context; context 645 drivers/usb/serial/io_edgeport.c struct edgeport_serial *edge_serial = urb->context; context 759 drivers/usb/serial/io_edgeport.c struct edgeport_serial *edge_serial = urb->context; context 820 drivers/usb/serial/io_edgeport.c struct edgeport_port *edge_port = urb->context; context 848 drivers/usb/serial/io_edgeport.c struct edgeport_port *edge_port = urb->context; context 1626 drivers/usb/serial/io_ti.c struct edgeport_serial *edge_serial = urb->context; context 1729 drivers/usb/serial/io_ti.c struct edgeport_port *edge_port = urb->context; context 1809 drivers/usb/serial/io_ti.c struct usb_serial_port *port = urb->context; context 1933 drivers/usb/serial/io_ti.c urb->context = edge_serial; context 1953 drivers/usb/serial/io_ti.c urb->context = edge_port; context 315 drivers/usb/serial/ir-usb.c struct usb_serial_port *port = urb->context; context 359 drivers/usb/serial/ir-usb.c struct usb_serial_port *port = urb->context; context 160 drivers/usb/serial/iuu_phoenix.c struct usb_serial_port *port = urb->context; context 213 drivers/usb/serial/iuu_phoenix.c struct usb_serial_port *port = urb->context; context 243 drivers/usb/serial/iuu_phoenix.c struct usb_serial_port *port = urb->context; context 353 drivers/usb/serial/iuu_phoenix.c struct usb_serial_port *port = urb->context; context 374 drivers/usb/serial/iuu_phoenix.c struct usb_serial_port *port = urb->context; context 572 drivers/usb/serial/iuu_phoenix.c struct usb_serial_port *port = urb->context; context 636 drivers/usb/serial/iuu_phoenix.c struct usb_serial_port *port = urb->context; context 715 drivers/usb/serial/iuu_phoenix.c struct usb_serial_port *port = urb->context; context 799 drivers/usb/serial/keyspan.c port = urb->context; context 848 drivers/usb/serial/keyspan.c port = urb->context; context 864 drivers/usb/serial/keyspan.c port = urb->context; context 884 drivers/usb/serial/keyspan.c serial = urb->context; context 938 drivers/usb/serial/keyspan.c port = urb->context; context 952 drivers/usb/serial/keyspan.c port = urb->context; context 982 drivers/usb/serial/keyspan.c port = urb->context; context 1003 drivers/usb/serial/keyspan.c serial = urb->context; context 1057 drivers/usb/serial/keyspan.c serial = urb->context; context 1086 drivers/usb/serial/keyspan.c serial = urb->context; context 1150 drivers/usb/serial/keyspan.c port = urb->context; context 1194 drivers/usb/serial/keyspan.c serial = urb->context; context 1279 drivers/usb/serial/keyspan.c port = urb->context; context 1344 drivers/usb/serial/keyspan.c serial = urb->context; context 1388 drivers/usb/serial/keyspan.c port = urb->context; context 1410 drivers/usb/serial/keyspan.c serial = urb->context; context 1460 drivers/usb/serial/keyspan.c serial = urb->context; context 136 drivers/usb/serial/keyspan_pda.c struct usb_serial_port *port = urb->context; context 558 drivers/usb/serial/keyspan_pda.c struct usb_serial_port *port = urb->context; context 375 drivers/usb/serial/kl5kusb105.c struct usb_serial_port *port = urb->context; context 268 drivers/usb/serial/kobil_sct.c struct usb_serial_port *port = urb->context; context 394 drivers/usb/serial/mct_u232.c priv->read_urb->context = port; context 510 drivers/usb/serial/mct_u232.c struct usb_serial_port *port = urb->context; context 109 drivers/usb/serial/metro-usb.c struct usb_serial_port *port = urb->context; context 341 drivers/usb/serial/mos7720.c struct urbtracker *urbtrack = urb->context; context 900 drivers/usb/serial/mos7720.c struct usb_serial_port *port = urb->context; context 931 drivers/usb/serial/mos7720.c port = urb->context; context 964 drivers/usb/serial/mos7720.c mos7720_port = urb->context; context 450 drivers/usb/serial/mos7840.c mos7840_port = urb->context; context 626 drivers/usb/serial/mos7840.c serial = urb->context; context 742 drivers/usb/serial/mos7840.c mos7840_port = urb->context; context 807 drivers/usb/serial/mos7840.c mos7840_port = urb->context; context 472 drivers/usb/serial/mxuport.c struct usb_serial_port *port = urb->context; context 519 drivers/usb/serial/mxuport.c struct usb_serial_port *port = urb->context; context 563 drivers/usb/serial/mxuport.c struct usb_serial_port *port = urb->context; context 29 drivers/usb/serial/navman.c struct usb_serial_port *port = urb->context; context 140 drivers/usb/serial/omninet.c struct usb_serial_port *port = urb->context; context 70 drivers/usb/serial/opticon.c struct usb_serial_port *port = urb->context; context 154 drivers/usb/serial/opticon.c struct usb_serial_port *port = urb->context; context 2146 drivers/usb/serial/option.c struct usb_serial_port *port = urb->context; context 643 drivers/usb/serial/oti6858.c struct usb_serial_port *port = urb->context; context 767 drivers/usb/serial/oti6858.c struct usb_serial_port *port = urb->context; context 798 drivers/usb/serial/oti6858.c struct usb_serial_port *port = urb->context; context 952 drivers/usb/serial/pl2303.c struct usb_serial_port *port = urb->context; context 991 drivers/usb/serial/pl2303.c struct usb_serial_port *port = urb->context; context 513 drivers/usb/serial/quatech2.c serial = urb->context; context 603 drivers/usb/serial/quatech2.c port = urb->context; context 617 drivers/usb/serial/quatech2.c struct usb_serial *serial = urb->context; context 187 drivers/usb/serial/safe_serial.c struct usb_serial_port *port = urb->context; context 408 drivers/usb/serial/sierra.c struct usb_serial_port *port = urb->context; context 551 drivers/usb/serial/sierra.c port = urb->context; context 584 drivers/usb/serial/sierra.c struct usb_serial_port *port = urb->context; context 497 drivers/usb/serial/ssu100.c struct usb_serial_port *port = urb->context; context 34 drivers/usb/serial/symbolserial.c struct usb_serial_port *port = urb->context; context 677 drivers/usb/serial/ti_usb_3410_5052.c urb->context = tdev; context 751 drivers/usb/serial/ti_usb_3410_5052.c urb->context = tport; context 1110 drivers/usb/serial/ti_usb_3410_5052.c struct ti_device *tdev = urb->context; context 1192 drivers/usb/serial/ti_usb_3410_5052.c struct ti_port *tport = urb->context; context 1251 drivers/usb/serial/ti_usb_3410_5052.c struct ti_port *tport = urb->context; context 1496 drivers/usb/serial/ti_usb_3410_5052.c urb->context = tport; context 60 drivers/usb/serial/usb_debug.c struct usb_serial_port *port = urb->context; context 267 drivers/usb/serial/usb_wwan.c port = urb->context; context 307 drivers/usb/serial/usb_wwan.c port = urb->context; context 272 drivers/usb/serial/visor.c struct usb_serial_port *port = urb->context; context 492 drivers/usb/serial/whiteheat.c struct usb_serial_port *command_port = urb->context; context 90 drivers/usb/storage/onetouch.c struct usb_onetouch *onetouch = urb->context; context 103 drivers/usb/storage/transport.c struct completion *urb_done_ptr = urb->context; context 129 drivers/usb/storage/transport.c us->current_urb->context = &urb_done; context 302 drivers/usb/storage/uas.c struct Scsi_Host *shost = urb->context; context 396 drivers/usb/storage/uas.c struct scsi_cmnd *cmnd = urb->context; context 165 drivers/usb/usb-skeleton.c dev = urb->context; context 337 drivers/usb/usb-skeleton.c dev = urb->context; context 132 drivers/usb/usbip/stub_rx.c struct stub_priv *priv = (struct stub_priv *) urb->context; context 150 drivers/usb/usbip/stub_rx.c struct stub_priv *priv = (struct stub_priv *) urb->context; context 576 drivers/usb/usbip/stub_rx.c priv->urbs[i]->context = (void *) priv; context 42 drivers/usb/usbip/stub_tx.c struct stub_priv *priv = (struct stub_priv *) urb->context; context 119 drivers/usb/usbip/stub_tx.c struct stub_priv *priv = (struct stub_priv *) urb->context; context 133 drivers/video/fbdev/mmp/hw/mmp_spi.c m->complete(m->context); context 28 drivers/video/fbdev/omap2/omapfb/vrfb.c #define SMS_ROT_CONTROL(context) (0x0 + 0x10 * context) context 29 drivers/video/fbdev/omap2/omapfb/vrfb.c #define SMS_ROT_SIZE(context) (0x4 + 0x10 * context) context 30 drivers/video/fbdev/omap2/omapfb/vrfb.c #define SMS_ROT_PHYSICAL_BA(context) (0x8 + 0x10 * context) context 176 drivers/video/fbdev/omap2/omapfb/vrfb.c u8 ctx = vrfb->context; context 252 drivers/video/fbdev/omap2/omapfb/vrfb.c int ctx = vrfb->context; context 272 drivers/video/fbdev/omap2/omapfb/vrfb.c vrfb->context = 0xff; context 305 drivers/video/fbdev/omap2/omapfb/vrfb.c vrfb->context = ctx; context 1778 drivers/video/fbdev/smscufx.c struct urb_node *unode = urb->context; context 1813 drivers/video/fbdev/udlfb.c struct urb_node *unode = urb->context; context 852 drivers/virtio/virtio_ring.c bool context, context 894 drivers/virtio/virtio_ring.c vq = __vring_new_virtqueue(index, vring, vdev, weak_barriers, context, context 1560 drivers/virtio/virtio_ring.c bool context, context 1618 drivers/virtio/virtio_ring.c !context; context 2057 drivers/virtio/virtio_ring.c bool context, context 2092 drivers/virtio/virtio_ring.c !context; context 2138 drivers/virtio/virtio_ring.c bool context, context 2147 drivers/virtio/virtio_ring.c context, notify, callback, name); context 2151 drivers/virtio/virtio_ring.c context, notify, callback, name); context 2161 drivers/virtio/virtio_ring.c bool context, context 2173 drivers/virtio/virtio_ring.c return __vring_new_virtqueue(index, vring, vdev, weak_barriers, context, context 168 drivers/watchdog/pcwd_usb.c (struct usb_pcwd_private *)urb->context; context 338 drivers/xen/xen-acpi-cpuhotplug.c u32 lvl, void *context, void **rv) context 341 drivers/xen/xen-acpi-cpuhotplug.c int *action = context; context 95 drivers/xen/xen-acpi-memhotplug.c acpi_memory_get_resource(struct acpi_resource *resource, void *context) context 97 drivers/xen/xen-acpi-memhotplug.c struct acpi_memory_device *mem_device = context; context 325 drivers/xen/xen-acpi-processor.c read_acpi_id(acpi_handle handle, u32 lvl, void *context, void **rv) context 364 fs/binfmt_elf_fdpic.c current->mm->context.exec_fdpic_loadmap = 0; context 365 fs/binfmt_elf_fdpic.c current->mm->context.interp_fdpic_loadmap = 0; context 433 fs/binfmt_elf_fdpic.c current->mm->context.end_brk = current->mm->start_brk; context 570 fs/binfmt_elf_fdpic.c current->mm->context.exec_fdpic_loadmap = (unsigned long) sp; context 583 fs/binfmt_elf_fdpic.c current->mm->context.interp_fdpic_loadmap = (unsigned long) sp; context 1374 fs/binfmt_elf_fdpic.c prstatus->pr_exec_fdpic_loadmap = p->mm->context.exec_fdpic_loadmap; context 1375 fs/binfmt_elf_fdpic.c prstatus->pr_interp_fdpic_loadmap = p->mm->context.interp_fdpic_loadmap; context 733 fs/binfmt_flat.c current->mm->context.end_brk = memp + memp_size - stack_len; context 977 fs/binfmt_flat.c ((current->mm->context.end_brk + stack_len + 3) & ~3) - 4; context 111 fs/cachefiles/internal.h void *context; context 102 fs/ceph/addr.c snapc = ceph_get_snap_context(capsnap->context); context 500 fs/ceph/addr.c capsnap->context, capsnap->dirty_pages); context 505 fs/ceph/addr.c if (snapc && capsnap->context != page_snapc) context 524 fs/ceph/addr.c snapc = ceph_get_snap_context(capsnap->context); context 558 fs/ceph/addr.c if (capsnap->context == snapc) { context 2870 fs/ceph/caps.c ceph_put_snap_context(capsnap->context); context 2995 fs/ceph/caps.c if (capsnap->context == snapc) { context 3015 fs/ceph/caps.c inode, capsnap, capsnap->context->seq, context 3528 fs/ceph/caps.c ceph_put_snap_context(capsnap->context); context 877 fs/ceph/file.c snapc = ceph_get_snap_context(capsnap->context); context 1524 fs/ceph/file.c snapc = ceph_get_snap_context(capsnap->context); context 561 fs/ceph/snap.c capsnap->context = old_snapc; context 620 fs/ceph/snap.c capsnap->context, capsnap->context->seq, context 628 fs/ceph/snap.c inode, capsnap, capsnap->context, context 629 fs/ceph/snap.c capsnap->context->seq, ceph_cap_string(capsnap->dirty), context 197 fs/ceph/super.h struct ceph_snap_context *context; context 234 fs/cifs/smb2transport.c struct kvec context, __u8 *key, unsigned int key_size) context 288 fs/cifs/smb2transport.c context.iov_base, context.iov_len); context 316 fs/cifs/smb2transport.c struct kvec context; context 332 fs/cifs/smb2transport.c ptriplet->signing.context, ses->smb3signingkey, context 338 fs/cifs/smb2transport.c ptriplet->encryption.context, ses->smb3encryptionkey, context 344 fs/cifs/smb2transport.c ptriplet->decryption.context, context 380 fs/cifs/smb2transport.c d->context.iov_base = "SmbSign"; context 381 fs/cifs/smb2transport.c d->context.iov_len = 8; context 386 fs/cifs/smb2transport.c d->context.iov_base = "ServerIn "; context 387 fs/cifs/smb2transport.c d->context.iov_len = 10; context 392 fs/cifs/smb2transport.c d->context.iov_base = "ServerOut"; context 393 fs/cifs/smb2transport.c d->context.iov_len = 10; context 408 fs/cifs/smb2transport.c d->context.iov_base = ses->preauth_sha_hash; context 409 fs/cifs/smb2transport.c d->context.iov_len = 64; context 414 fs/cifs/smb2transport.c d->context.iov_base = ses->preauth_sha_hash; context 415 fs/cifs/smb2transport.c d->context.iov_len = 64; context 420 fs/cifs/smb2transport.c d->context.iov_base = ses->preauth_sha_hash; context 421 fs/cifs/smb2transport.c d->context.iov_len = 64; context 178 fs/cifs/smbdirect.c struct smbd_connection *info = id->context; context 238 fs/cifs/smbdirect.c smbd_qp_async_error_upcall(struct ib_event *event, void *context) context 240 fs/cifs/smbdirect.c struct smbd_connection *info = context; context 294 fs/crypto/fscrypt_private.h extern int fscrypt_hkdf_expand(struct fscrypt_hkdf *hkdf, u8 context, context 115 fs/crypto/hkdf.c int fscrypt_hkdf_expand(struct fscrypt_hkdf *hkdf, u8 context, context 133 fs/crypto/hkdf.c prefix[8] = context; context 44 fs/exportfs/expfs.c int (*acceptable)(void *context, struct dentry *dentry), context 45 fs/exportfs/expfs.c void *context) context 50 fs/exportfs/expfs.c if (acceptable(context, result)) context 60 fs/exportfs/expfs.c if (dentry != result && acceptable(context, dentry)) { context 422 fs/exportfs/expfs.c int (*acceptable)(void *, struct dentry *), void *context) context 464 fs/exportfs/expfs.c if (!acceptable(context, result)) { context 485 fs/exportfs/expfs.c alias = find_acceptable_alias(result, acceptable, context); context 553 fs/exportfs/expfs.c alias = find_acceptable_alias(result, acceptable, context); context 134 fs/fhandle.c static int vfs_dentry_acceptable(void *context, struct dentry *dentry) context 331 fs/fscache/internal.h void *fscache_get_context(struct fscache_cookie *cookie, void *context) context 334 fs/fscache/internal.h cookie->def->get_context(cookie->netfs_data, context); context 335 fs/fscache/internal.h return context; context 342 fs/fscache/internal.h void fscache_put_context(struct fscache_cookie *cookie, void *context) context 345 fs/fscache/internal.h cookie->def->put_context(cookie->netfs_data, context); context 293 fs/fscache/page.c if (op->context) context 294 fs/fscache/page.c fscache_put_context(op->cookie, op->context); context 306 fs/fscache/page.c void *context) context 326 fs/fscache/page.c op->context = context; context 333 fs/fscache/page.c if (context) context 334 fs/fscache/page.c fscache_get_context(op->cookie, context); context 432 fs/fscache/page.c void *context, context 459 fs/fscache/page.c end_io_func, context); context 563 fs/fscache/page.c void *context, context 590 fs/fscache/page.c op = fscache_alloc_retrieval(cookie, mapping, end_io_func, context); context 208 fs/nfs/filelayout/filelayout.c err = filelayout_async_handle_error(task, hdr->args.context->state, context 269 fs/nfs/filelayout/filelayout.c if (unlikely(test_bit(NFS_CONTEXT_BAD, &hdr->args.context->flags))) { context 286 fs/nfs/filelayout/filelayout.c if (nfs4_set_rw_stateid(&hdr->args.stateid, hdr->args.context, context 320 fs/nfs/filelayout/filelayout.c err = filelayout_async_handle_error(task, hdr->args.context->state, context 369 fs/nfs/filelayout/filelayout.c if (unlikely(test_bit(NFS_CONTEXT_BAD, &hdr->args.context->flags))) { context 384 fs/nfs/filelayout/filelayout.c if (nfs4_set_rw_stateid(&hdr->args.stateid, hdr->args.context, context 1330 fs/nfs/flexfilelayout/flexfilelayout.c err = ff_layout_async_handle_error(task, hdr->args.context->state, context 1415 fs/nfs/flexfilelayout/flexfilelayout.c if (unlikely(test_bit(NFS_CONTEXT_BAD, &hdr->args.context->flags))) { context 1503 fs/nfs/flexfilelayout/flexfilelayout.c err = ff_layout_async_handle_error(task, hdr->args.context->state, context 1592 fs/nfs/flexfilelayout/flexfilelayout.c if (unlikely(test_bit(NFS_CONTEXT_BAD, &hdr->args.context->flags))) { context 109 fs/nfs/fscache-index.c static void nfs_fh_get_context(void *cookie_netfs_data, void *context) context 111 fs/nfs/fscache-index.c get_nfs_open_context(context); context 119 fs/nfs/fscache-index.c static void nfs_fh_put_context(void *cookie_netfs_data, void *context) context 121 fs/nfs/fscache-index.c if (context) context 122 fs/nfs/fscache-index.c put_nfs_open_context(context); context 383 fs/nfs/fscache.c void *context, context 388 fs/nfs/fscache.c page, context, error); context 396 fs/nfs/fscache.c error = nfs_readpage_async(context, page->mapping->host, page); context 5157 fs/nfs/nfs4proc.c .state = hdr->args.context->state, context 5179 fs/nfs/nfs4proc.c args->context, context 5220 fs/nfs/nfs4proc.c if (nfs4_set_rw_stateid(&hdr->args.stateid, hdr->args.context, context 5224 fs/nfs/nfs4proc.c if (unlikely(test_bit(NFS_CONTEXT_BAD, &hdr->args.context->flags))) context 5238 fs/nfs/nfs4proc.c .state = hdr->args.context->state, context 5262 fs/nfs/nfs4proc.c args->context, context 1532 fs/nfs/nfs4trace.h hdr->args.context->state; context 1592 fs/nfs/nfs4trace.h hdr->args.context->state; context 538 fs/nfs/pagelist.c if (hdr->args.context) context 539 fs/nfs/pagelist.c put_nfs_open_context(hdr->args.context); context 578 fs/nfs/pagelist.c hdr->args.context = get_nfs_open_context(nfs_req_openctx(req)); context 2613 fs/nfs/pnfs.c set_bit(NFS_CONTEXT_RESEND_WRITES, &hdr->args.context->flags); context 250 fs/nfs/pnfs_nfs.c WARN_ON_ONCE(data->context); context 2037 fs/nfs/super.c memset(data->context, 0, sizeof(data->context)); context 2107 fs/nfs/super.c if (data->context[0]){ context 2110 fs/nfs/super.c data->context[NFS_MAX_CONTEXT_LEN] = '\0'; context 2111 fs/nfs/super.c rc = security_add_mnt_opt("context", data->context, context 2112 fs/nfs/super.c strlen(data->context), &args->lsm_opts); context 1684 fs/nfs/write.c put_nfs_open_context(data->context); context 1770 fs/nfs/write.c data->context = get_nfs_open_context(ctx); context 2307 fs/nfsd/nfs4xdr.c void *context, int len) context 2321 fs/nfsd/nfs4xdr.c p = xdr_encode_opaque(p, context, len); context 2327 fs/nfsd/nfs4xdr.c void *context, int len) context 2424 fs/nfsd/nfs4xdr.c void *context = NULL; context 2483 fs/nfsd/nfs4xdr.c &context, &contextlen); context 2913 fs/nfsd/nfs4xdr.c status = nfsd4_encode_security_label(xdr, rqstp, context, context 2926 fs/nfsd/nfs4xdr.c if (context) context 2927 fs/nfsd/nfs4xdr.c security_release_secctx(context, contextlen); context 52 fs/ocfs2/move_extents.c struct ocfs2_move_extents_context *context, context 57 fs/ocfs2/move_extents.c struct inode *inode = context->inode; context 62 fs/ocfs2/move_extents.c u64 ino = ocfs2_metadata_cache_owner(context->et.et_ci); context 78 fs/ocfs2/move_extents.c path = ocfs2_new_path_from_et(&context->et); context 111 fs/ocfs2/move_extents.c context->et.et_root_bh, context 118 fs/ocfs2/move_extents.c ret = ocfs2_split_extent(handle, &context->et, path, index, context 119 fs/ocfs2/move_extents.c &replace_rec, context->meta_ac, context 120 fs/ocfs2/move_extents.c &context->dealloc); context 126 fs/ocfs2/move_extents.c ocfs2_journal_dirty(handle, context->et.et_root_bh); context 128 fs/ocfs2/move_extents.c context->new_phys_cpos = new_p_cpos; context 138 fs/ocfs2/move_extents.c len, context->meta_ac, context 139 fs/ocfs2/move_extents.c &context->dealloc, 1); context 207 fs/ocfs2/move_extents.c static int ocfs2_defrag_extent(struct ocfs2_move_extents_context *context, context 210 fs/ocfs2/move_extents.c int ret, credits = 0, extra_blocks = 0, partial = context->partial; context 212 fs/ocfs2/move_extents.c struct inode *inode = context->inode; context 222 fs/ocfs2/move_extents.c BUG_ON(!context->refcount_loc); context 224 fs/ocfs2/move_extents.c ret = ocfs2_lock_refcount_tree(osb, context->refcount_loc, 1, context 232 fs/ocfs2/move_extents.c context->refcount_loc, context 243 fs/ocfs2/move_extents.c ret = ocfs2_lock_meta_allocator_move_extents(inode, &context->et, context 245 fs/ocfs2/move_extents.c &context->meta_ac, context 278 fs/ocfs2/move_extents.c ret = ocfs2_reserve_clusters(osb, *len, &context->data_ac); context 291 fs/ocfs2/move_extents.c ret = __ocfs2_claim_clusters(handle, context->data_ac, 1, *len, context 307 fs/ocfs2/move_extents.c context->range->me_flags &= ~OCFS2_MOVE_EXT_FL_COMPLETE; context 317 fs/ocfs2/move_extents.c ret = __ocfs2_move_extent(handle, context, cpos, new_len, phys_cpos, context 329 fs/ocfs2/move_extents.c ret = ocfs2_cow_sync_writeback(inode->i_sb, context->inode, cpos, *len); context 334 fs/ocfs2/move_extents.c if (need_free && context->data_ac) { context 335 fs/ocfs2/move_extents.c struct ocfs2_alloc_context *data_ac = context->data_ac; context 337 fs/ocfs2/move_extents.c if (context->data_ac->ac_which == OCFS2_AC_USE_LOCAL) context 353 fs/ocfs2/move_extents.c if (context->data_ac) { context 354 fs/ocfs2/move_extents.c ocfs2_free_alloc_context(context->data_ac); context 355 fs/ocfs2/move_extents.c context->data_ac = NULL; context 358 fs/ocfs2/move_extents.c if (context->meta_ac) { context 359 fs/ocfs2/move_extents.c ocfs2_free_alloc_context(context->meta_ac); context 360 fs/ocfs2/move_extents.c context->meta_ac = NULL; context 573 fs/ocfs2/move_extents.c static int ocfs2_move_extent(struct ocfs2_move_extents_context *context, context 579 fs/ocfs2/move_extents.c struct inode *inode = context->inode; context 588 fs/ocfs2/move_extents.c context->range->me_threshold); context 595 fs/ocfs2/move_extents.c BUG_ON(!context->refcount_loc); context 597 fs/ocfs2/move_extents.c ret = ocfs2_lock_refcount_tree(osb, context->refcount_loc, 1, context 605 fs/ocfs2/move_extents.c context->refcount_loc, context 616 fs/ocfs2/move_extents.c ret = ocfs2_lock_meta_allocator_move_extents(inode, &context->et, context 618 fs/ocfs2/move_extents.c &context->meta_ac, context 683 fs/ocfs2/move_extents.c ret = __ocfs2_move_extent(handle, context, cpos, len, phys_cpos, context 710 fs/ocfs2/move_extents.c ret = ocfs2_cow_sync_writeback(inode->i_sb, context->inode, cpos, len); context 728 fs/ocfs2/move_extents.c if (context->meta_ac) { context 729 fs/ocfs2/move_extents.c ocfs2_free_alloc_context(context->meta_ac); context 730 fs/ocfs2/move_extents.c context->meta_ac = NULL; context 770 fs/ocfs2/move_extents.c struct ocfs2_move_extents_context *context) context 776 fs/ocfs2/move_extents.c struct inode *inode = context->inode; context 778 fs/ocfs2/move_extents.c struct ocfs2_move_extents *range = context->range; context 787 fs/ocfs2/move_extents.c context->refcount_loc = le64_to_cpu(di->i_refcount_loc); context 789 fs/ocfs2/move_extents.c ocfs2_init_dinode_extent_tree(&context->et, INODE_CACHE(inode), di_bh); context 790 fs/ocfs2/move_extents.c ocfs2_init_dealloc_ctxt(&context->dealloc); context 798 fs/ocfs2/move_extents.c do_defrag = context->auto_defrag; context 868 fs/ocfs2/move_extents.c ret = ocfs2_defrag_extent(context, cpos, phys_cpos, context 871 fs/ocfs2/move_extents.c ret = ocfs2_move_extent(context, cpos, phys_cpos, context 883 fs/ocfs2/move_extents.c context->clusters_moved += alloc_size; context 894 fs/ocfs2/move_extents.c context->clusters_moved); context 896 fs/ocfs2/move_extents.c context->new_phys_cpos); context 899 fs/ocfs2/move_extents.c ocfs2_run_deallocs(osb, &context->dealloc); context 904 fs/ocfs2/move_extents.c static int ocfs2_move_extents(struct ocfs2_move_extents_context *context) context 908 fs/ocfs2/move_extents.c struct inode *inode = context->inode; context 938 fs/ocfs2/move_extents.c status = __ocfs2_move_extents_range(di_bh, context); context 991 fs/ocfs2/move_extents.c struct ocfs2_move_extents_context *context; context 1010 fs/ocfs2/move_extents.c context = kzalloc(sizeof(struct ocfs2_move_extents_context), GFP_NOFS); context 1011 fs/ocfs2/move_extents.c if (!context) { context 1017 fs/ocfs2/move_extents.c context->inode = inode; context 1018 fs/ocfs2/move_extents.c context->file = filp; context 1033 fs/ocfs2/move_extents.c context->range = ⦥ context 1036 fs/ocfs2/move_extents.c context->auto_defrag = 1; context 1049 fs/ocfs2/move_extents.c context->partial = 1; context 1063 fs/ocfs2/move_extents.c status = ocfs2_move_extents(context); context 1076 fs/ocfs2/move_extents.c kfree(context); context 57 fs/ocfs2/refcounttree.c int (*get_clusters)(struct ocfs2_cow_context *context, context 3112 fs/ocfs2/refcounttree.c struct ocfs2_cow_context *context, context 3118 fs/ocfs2/refcounttree.c struct ocfs2_caching_info *ci = context->data_et.et_ci; context 3126 fs/ocfs2/refcounttree.c ret = context->cow_duplicate_clusters(handle, context->inode, context 3134 fs/ocfs2/refcounttree.c ret = ocfs2_clear_ext_refcount(handle, &context->data_et, context 3136 fs/ocfs2/refcounttree.c context->meta_ac, &context->dealloc); context 3193 fs/ocfs2/refcounttree.c static int ocfs2_di_get_clusters(struct ocfs2_cow_context *context, context 3198 fs/ocfs2/refcounttree.c return ocfs2_get_clusters(context->inode, v_cluster, p_cluster, context 3203 fs/ocfs2/refcounttree.c struct ocfs2_cow_context *context, context 3213 fs/ocfs2/refcounttree.c struct ocfs2_caching_info *ref_ci = &context->ref_tree->rf_ci; context 3220 fs/ocfs2/refcounttree.c &context->data_et, context 3222 fs/ocfs2/refcounttree.c context->ref_root_bh, context 3223 fs/ocfs2/refcounttree.c &context->meta_ac, context 3224 fs/ocfs2/refcounttree.c &context->data_ac, &credits); context 3230 fs/ocfs2/refcounttree.c if (context->post_refcount) context 3231 fs/ocfs2/refcounttree.c credits += context->post_refcount->credits; context 3233 fs/ocfs2/refcounttree.c credits += context->extra_credits; context 3244 fs/ocfs2/refcounttree.c ret = ocfs2_get_refcount_rec(ref_ci, context->ref_root_bh, context 3267 fs/ocfs2/refcounttree.c &context->data_et, context 3270 fs/ocfs2/refcounttree.c context->meta_ac, context 3271 fs/ocfs2/refcounttree.c &context->dealloc); context 3280 fs/ocfs2/refcounttree.c context->data_ac, context 3288 fs/ocfs2/refcounttree.c ret = ocfs2_replace_clusters(handle, context, context 3299 fs/ocfs2/refcounttree.c context->ref_root_bh, context 3301 fs/ocfs2/refcounttree.c context->meta_ac, context 3302 fs/ocfs2/refcounttree.c &context->dealloc, delete); context 3316 fs/ocfs2/refcounttree.c if (context->post_refcount && context->post_refcount->func) { context 3317 fs/ocfs2/refcounttree.c ret = context->post_refcount->func(context->inode, handle, context 3318 fs/ocfs2/refcounttree.c context->post_refcount->para); context 3329 fs/ocfs2/refcounttree.c if (context->get_clusters == ocfs2_di_get_clusters) { context 3330 fs/ocfs2/refcounttree.c ret = ocfs2_cow_sync_writeback(sb, context->inode, cpos, context 3340 fs/ocfs2/refcounttree.c if (context->data_ac) { context 3341 fs/ocfs2/refcounttree.c ocfs2_free_alloc_context(context->data_ac); context 3342 fs/ocfs2/refcounttree.c context->data_ac = NULL; context 3344 fs/ocfs2/refcounttree.c if (context->meta_ac) { context 3345 fs/ocfs2/refcounttree.c ocfs2_free_alloc_context(context->meta_ac); context 3346 fs/ocfs2/refcounttree.c context->meta_ac = NULL; context 3353 fs/ocfs2/refcounttree.c static int ocfs2_replace_cow(struct ocfs2_cow_context *context) context 3356 fs/ocfs2/refcounttree.c struct inode *inode = context->inode; context 3357 fs/ocfs2/refcounttree.c u32 cow_start = context->cow_start, cow_len = context->cow_len; context 3367 fs/ocfs2/refcounttree.c ocfs2_init_dealloc_ctxt(&context->dealloc); context 3370 fs/ocfs2/refcounttree.c ret = context->get_clusters(context, cow_start, &p_cluster, context 3382 fs/ocfs2/refcounttree.c ret = ocfs2_make_clusters_writable(inode->i_sb, context, context 3394 fs/ocfs2/refcounttree.c if (ocfs2_dealloc_has_cluster(&context->dealloc)) { context 3396 fs/ocfs2/refcounttree.c ocfs2_run_deallocs(osb, &context->dealloc); context 3417 fs/ocfs2/refcounttree.c struct ocfs2_cow_context *context = NULL; context 3435 fs/ocfs2/refcounttree.c context = kzalloc(sizeof(struct ocfs2_cow_context), GFP_NOFS); context 3436 fs/ocfs2/refcounttree.c if (!context) { context 3449 fs/ocfs2/refcounttree.c context->inode = inode; context 3450 fs/ocfs2/refcounttree.c context->cow_start = cow_start; context 3451 fs/ocfs2/refcounttree.c context->cow_len = cow_len; context 3452 fs/ocfs2/refcounttree.c context->ref_tree = ref_tree; context 3453 fs/ocfs2/refcounttree.c context->ref_root_bh = ref_root_bh; context 3454 fs/ocfs2/refcounttree.c context->cow_duplicate_clusters = ocfs2_duplicate_clusters_by_page; context 3455 fs/ocfs2/refcounttree.c context->get_clusters = ocfs2_di_get_clusters; context 3457 fs/ocfs2/refcounttree.c ocfs2_init_dinode_extent_tree(&context->data_et, context 3460 fs/ocfs2/refcounttree.c ret = ocfs2_replace_cow(context); context 3474 fs/ocfs2/refcounttree.c kfree(context); context 3518 fs/ocfs2/refcounttree.c static int ocfs2_xattr_value_get_clusters(struct ocfs2_cow_context *context, context 3523 fs/ocfs2/refcounttree.c struct inode *inode = context->inode; context 3524 fs/ocfs2/refcounttree.c struct ocfs2_xattr_value_root *xv = context->cow_object; context 3629 fs/ocfs2/refcounttree.c struct ocfs2_cow_context *context = NULL; context 3644 fs/ocfs2/refcounttree.c context = kzalloc(sizeof(struct ocfs2_cow_context), GFP_NOFS); context 3645 fs/ocfs2/refcounttree.c if (!context) { context 3651 fs/ocfs2/refcounttree.c context->inode = inode; context 3652 fs/ocfs2/refcounttree.c context->cow_start = cow_start; context 3653 fs/ocfs2/refcounttree.c context->cow_len = cow_len; context 3654 fs/ocfs2/refcounttree.c context->ref_tree = ref_tree; context 3655 fs/ocfs2/refcounttree.c context->ref_root_bh = ref_root_bh; context 3656 fs/ocfs2/refcounttree.c context->cow_object = xv; context 3658 fs/ocfs2/refcounttree.c context->cow_duplicate_clusters = ocfs2_duplicate_clusters_by_jbd; context 3660 fs/ocfs2/refcounttree.c context->extra_credits = context 3662 fs/ocfs2/refcounttree.c context->get_clusters = ocfs2_xattr_value_get_clusters; context 3663 fs/ocfs2/refcounttree.c context->post_refcount = post; context 3665 fs/ocfs2/refcounttree.c ocfs2_init_xattr_value_extent_tree(&context->data_et, context 3668 fs/ocfs2/refcounttree.c ret = ocfs2_replace_cow(context); context 3673 fs/ocfs2/refcounttree.c kfree(context); context 71 fs/xfs/libxfs/xfs_attr_leaf.h struct xfs_attr_list_context *context); context 94 fs/xfs/scrub/attr.c struct xfs_attr_list_context context; context 108 fs/xfs/scrub/attr.c struct xfs_attr_list_context *context, context 118 fs/xfs/scrub/attr.c sx = container_of(context, struct xchk_xattr, context); context 121 fs/xfs/scrub/attr.c context->seen_enough = error; context 127 fs/xfs/scrub/attr.c xchk_ino_set_preen(sx->sc, context->dp->i_ino); context 146 fs/xfs/scrub/attr.c context->seen_enough = error; context 155 fs/xfs/scrub/attr.c args.geo = context->dp->i_mount->m_attr_geo; context 157 fs/xfs/scrub/attr.c args.dp = context->dp; context 161 fs/xfs/scrub/attr.c args.trans = context->tp; context 165 fs/xfs/scrub/attr.c error = xfs_attr_get_ilocked(context->dp, &args); context 174 fs/xfs/scrub/attr.c context->seen_enough = 1; context 494 fs/xfs/scrub/attr.c sx.context.dp = sc->ip; context 495 fs/xfs/scrub/attr.c sx.context.cursor = &cursor; context 496 fs/xfs/scrub/attr.c sx.context.resynch = 1; context 497 fs/xfs/scrub/attr.c sx.context.put_listent = xchk_xattr_listent; context 498 fs/xfs/scrub/attr.c sx.context.tp = sc->tp; context 499 fs/xfs/scrub/attr.c sx.context.flags = ATTR_INCOMPLETE; context 518 fs/xfs/scrub/attr.c error = xfs_attr_list_int_ilocked(&sx.context); context 523 fs/xfs/scrub/attr.c if (sx.context.seen_enough < 0) context 524 fs/xfs/scrub/attr.c error = sx.context.seen_enough; context 52 fs/xfs/xfs_attr_list.c xfs_attr_shortform_list(xfs_attr_list_context_t *context) context 61 fs/xfs/xfs_attr_list.c ASSERT(context != NULL); context 62 fs/xfs/xfs_attr_list.c dp = context->dp; context 69 fs/xfs/xfs_attr_list.c cursor = context->cursor; context 72 fs/xfs/xfs_attr_list.c trace_xfs_attr_list_sf(context); context 83 fs/xfs/xfs_attr_list.c if (context->bufsize == 0 || context 85 fs/xfs/xfs_attr_list.c (dp->i_afp->if_bytes + sf->hdr.count * 16) < context->bufsize)) { context 87 fs/xfs/xfs_attr_list.c context->put_listent(context, context 96 fs/xfs/xfs_attr_list.c if (context->seen_enough) context 100 fs/xfs/xfs_attr_list.c trace_xfs_attr_list_sf_all(context); context 105 fs/xfs/xfs_attr_list.c if (context->bufsize == 0) context 125 fs/xfs/xfs_attr_list.c context->dp->i_mount, sfe, context 177 fs/xfs/xfs_attr_list.c context->put_listent(context, context 182 fs/xfs/xfs_attr_list.c if (context->seen_enough) context 197 fs/xfs/xfs_attr_list.c struct xfs_attr_list_context *context, context 204 fs/xfs/xfs_attr_list.c struct xfs_inode *dp = context->dp; context 206 fs/xfs/xfs_attr_list.c struct xfs_trans *tp = context->tp; context 250 fs/xfs/xfs_attr_list.c trace_xfs_attr_list_node_descend(context, context 278 fs/xfs/xfs_attr_list.c struct xfs_attr_list_context *context) context 285 fs/xfs/xfs_attr_list.c struct xfs_inode *dp = context->dp; context 289 fs/xfs/xfs_attr_list.c trace_xfs_attr_node_list(context); context 291 fs/xfs/xfs_attr_list.c cursor = context->cursor; context 301 fs/xfs/xfs_attr_list.c error = xfs_da3_node_read(context->tp, dp, cursor->blkno, -1, context 312 fs/xfs/xfs_attr_list.c trace_xfs_attr_list_wrong_blk(context); context 313 fs/xfs/xfs_attr_list.c xfs_trans_brelse(context->tp, bp); context 324 fs/xfs/xfs_attr_list.c trace_xfs_attr_list_wrong_blk(context); context 325 fs/xfs/xfs_attr_list.c xfs_trans_brelse(context->tp, bp); context 329 fs/xfs/xfs_attr_list.c trace_xfs_attr_list_wrong_blk(context); context 330 fs/xfs/xfs_attr_list.c xfs_trans_brelse(context->tp, bp); context 335 fs/xfs/xfs_attr_list.c trace_xfs_attr_list_wrong_blk(context); context 336 fs/xfs/xfs_attr_list.c xfs_trans_brelse(context->tp, bp); context 348 fs/xfs/xfs_attr_list.c error = xfs_attr_node_list_lookup(context, cursor, &bp); context 361 fs/xfs/xfs_attr_list.c xfs_attr3_leaf_list_int(bp, context); context 363 fs/xfs/xfs_attr_list.c if (context->seen_enough || leafhdr.forw == 0) context 366 fs/xfs/xfs_attr_list.c xfs_trans_brelse(context->tp, bp); context 367 fs/xfs/xfs_attr_list.c error = xfs_attr3_leaf_read(context->tp, dp, cursor->blkno, -1, &bp); context 371 fs/xfs/xfs_attr_list.c xfs_trans_brelse(context->tp, bp); context 381 fs/xfs/xfs_attr_list.c struct xfs_attr_list_context *context) context 389 fs/xfs/xfs_attr_list.c struct xfs_mount *mp = context->dp->i_mount; context 391 fs/xfs/xfs_attr_list.c trace_xfs_attr_list_leaf(context); context 397 fs/xfs/xfs_attr_list.c cursor = context->cursor; context 403 fs/xfs/xfs_attr_list.c if (context->resynch) { context 407 fs/xfs/xfs_attr_list.c if (cursor->offset == context->dupcnt) { context 408 fs/xfs/xfs_attr_list.c context->dupcnt = 0; context 411 fs/xfs/xfs_attr_list.c context->dupcnt++; context 414 fs/xfs/xfs_attr_list.c context->dupcnt = 0; context 419 fs/xfs/xfs_attr_list.c trace_xfs_attr_list_notfound(context); context 426 fs/xfs/xfs_attr_list.c context->resynch = 0; context 441 fs/xfs/xfs_attr_list.c !(context->flags & ATTR_INCOMPLETE)) context 460 fs/xfs/xfs_attr_list.c context->put_listent(context, entry->flags, context 462 fs/xfs/xfs_attr_list.c if (context->seen_enough) context 466 fs/xfs/xfs_attr_list.c trace_xfs_attr_list_leaf_end(context); context 474 fs/xfs/xfs_attr_list.c xfs_attr_leaf_list(xfs_attr_list_context_t *context) context 479 fs/xfs/xfs_attr_list.c trace_xfs_attr_leaf_list(context); context 481 fs/xfs/xfs_attr_list.c context->cursor->blkno = 0; context 482 fs/xfs/xfs_attr_list.c error = xfs_attr3_leaf_read(context->tp, context->dp, 0, -1, &bp); context 486 fs/xfs/xfs_attr_list.c xfs_attr3_leaf_list_int(bp, context); context 487 fs/xfs/xfs_attr_list.c xfs_trans_brelse(context->tp, bp); context 493 fs/xfs/xfs_attr_list.c struct xfs_attr_list_context *context) context 495 fs/xfs/xfs_attr_list.c struct xfs_inode *dp = context->dp; context 505 fs/xfs/xfs_attr_list.c return xfs_attr_shortform_list(context); context 507 fs/xfs/xfs_attr_list.c return xfs_attr_leaf_list(context); context 508 fs/xfs/xfs_attr_list.c return xfs_attr_node_list(context); context 513 fs/xfs/xfs_attr_list.c xfs_attr_list_context_t *context) context 516 fs/xfs/xfs_attr_list.c xfs_inode_t *dp = context->dp; context 525 fs/xfs/xfs_attr_list.c error = xfs_attr_list_int_ilocked(context); context 543 fs/xfs/xfs_attr_list.c xfs_attr_list_context_t *context, context 549 fs/xfs/xfs_attr_list.c struct attrlist *alist = (struct attrlist *)context->alist; context 553 fs/xfs/xfs_attr_list.c ASSERT(!context->seen_enough); context 554 fs/xfs/xfs_attr_list.c ASSERT(!(context->flags & ATTR_KERNOVAL)); context 555 fs/xfs/xfs_attr_list.c ASSERT(context->count >= 0); context 556 fs/xfs/xfs_attr_list.c ASSERT(context->count < (ATTR_MAX_VALUELEN/8)); context 557 fs/xfs/xfs_attr_list.c ASSERT(context->firstu >= sizeof(*alist)); context 558 fs/xfs/xfs_attr_list.c ASSERT(context->firstu <= context->bufsize); context 563 fs/xfs/xfs_attr_list.c if (((context->flags & ATTR_SECURE) == 0) != context 566 fs/xfs/xfs_attr_list.c if (((context->flags & ATTR_ROOT) == 0) != context 571 fs/xfs/xfs_attr_list.c context->count * sizeof(alist->al_offset[0]); context 572 fs/xfs/xfs_attr_list.c context->firstu -= ATTR_ENTSIZE(namelen); context 573 fs/xfs/xfs_attr_list.c if (context->firstu < arraytop) { context 574 fs/xfs/xfs_attr_list.c trace_xfs_attr_list_full(context); context 576 fs/xfs/xfs_attr_list.c context->seen_enough = 1; context 580 fs/xfs/xfs_attr_list.c aep = (attrlist_ent_t *)&context->alist[context->firstu]; context 584 fs/xfs/xfs_attr_list.c alist->al_offset[context->count++] = context->firstu; context 585 fs/xfs/xfs_attr_list.c alist->al_count = context->count; context 586 fs/xfs/xfs_attr_list.c trace_xfs_attr_list_add(context); context 605 fs/xfs/xfs_attr_list.c xfs_attr_list_context_t context; context 633 fs/xfs/xfs_attr_list.c memset(&context, 0, sizeof(context)); context 634 fs/xfs/xfs_attr_list.c context.dp = dp; context 635 fs/xfs/xfs_attr_list.c context.cursor = cursor; context 636 fs/xfs/xfs_attr_list.c context.resynch = 1; context 637 fs/xfs/xfs_attr_list.c context.flags = flags; context 638 fs/xfs/xfs_attr_list.c context.alist = buffer; context 639 fs/xfs/xfs_attr_list.c context.bufsize = (bufsize & ~(sizeof(int)-1)); /* align */ context 640 fs/xfs/xfs_attr_list.c context.firstu = context.bufsize; context 641 fs/xfs/xfs_attr_list.c context.put_listent = xfs_attr_put_listent; context 643 fs/xfs/xfs_attr_list.c alist = (struct attrlist *)context.alist; context 646 fs/xfs/xfs_attr_list.c alist->al_offset[0] = context.bufsize; context 648 fs/xfs/xfs_attr_list.c error = xfs_attr_list_int(&context); context 130 fs/xfs/xfs_ioctl.c void *context, context 120 fs/xfs/xfs_xattr.c struct xfs_attr_list_context *context, context 129 fs/xfs/xfs_xattr.c if (context->count < 0 || context->seen_enough) context 132 fs/xfs/xfs_xattr.c if (!context->alist) context 135 fs/xfs/xfs_xattr.c arraytop = context->count + prefix_len + namelen + 1; context 136 fs/xfs/xfs_xattr.c if (arraytop > context->firstu) { context 137 fs/xfs/xfs_xattr.c context->count = -1; /* insufficient space */ context 138 fs/xfs/xfs_xattr.c context->seen_enough = 1; context 141 fs/xfs/xfs_xattr.c offset = (char *)context->alist + context->count; context 149 fs/xfs/xfs_xattr.c context->count += prefix_len + namelen + 1; context 155 fs/xfs/xfs_xattr.c struct xfs_attr_list_context *context, context 164 fs/xfs/xfs_xattr.c ASSERT(context->count >= 0); context 172 fs/xfs/xfs_xattr.c context, XATTR_SYSTEM_PREFIX, context 180 fs/xfs/xfs_xattr.c context, XATTR_SYSTEM_PREFIX, context 204 fs/xfs/xfs_xattr.c __xfs_xattr_put_listent(context, prefix, prefix_len, name, context 215 fs/xfs/xfs_xattr.c struct xfs_attr_list_context context; context 223 fs/xfs/xfs_xattr.c memset(&context, 0, sizeof(context)); context 224 fs/xfs/xfs_xattr.c context.dp = XFS_I(inode); context 225 fs/xfs/xfs_xattr.c context.cursor = &cursor; context 226 fs/xfs/xfs_xattr.c context.resynch = 1; context 227 fs/xfs/xfs_xattr.c context.alist = size ? data : NULL; context 228 fs/xfs/xfs_xattr.c context.bufsize = size; context 229 fs/xfs/xfs_xattr.c context.firstu = context.bufsize; context 230 fs/xfs/xfs_xattr.c context.put_listent = xfs_xattr_put_listent; context 232 fs/xfs/xfs_xattr.c error = xfs_attr_list_int(&context); context 235 fs/xfs/xfs_xattr.c if (context.count < 0) context 238 fs/xfs/xfs_xattr.c return context.count; context 310 include/acpi/acpi_bus.h void (*func)(struct acpi_device_wakeup_context *context); context 320 include/acpi/acpi_bus.h struct acpi_device_wakeup_context context; context 612 include/acpi/acpi_bus.h void (*func)(struct acpi_device_wakeup_context *context)); context 624 include/acpi/acpi_bus.h void (*func)(struct acpi_device_wakeup_context *context)) context 229 include/acpi/acpiosxf.h void *context); context 248 include/acpi/acpiosxf.h acpi_osd_exec_callback function, void *context); context 495 include/acpi/acpixf.h handler, void *context)) context 511 include/acpi/acpixf.h void *context, context 516 include/acpi/acpixf.h void *context, context 588 include/acpi/acpixf.h void *context)) context 595 include/acpi/acpixf.h void *context)) context 602 include/acpi/acpixf.h *context)) context 614 include/acpi/acpixf.h void *context)) context 622 include/acpi/acpixf.h void *context)) context 633 include/acpi/acpixf.h void *context)) context 648 include/acpi/acpixf.h void *context)) context 773 include/acpi/acpixf.h void *context); context 799 include/acpi/acpixf.h void *context)) context 803 include/acpi/acpixf.h user_function, void *context)) context 1025 include/acpi/actypes.h (ACPI_SYSTEM_XFACE * acpi_osd_handler) (void *context); context 1028 include/acpi/actypes.h (ACPI_SYSTEM_XFACE * acpi_osd_exec_callback) (void *context); context 1034 include/acpi/actypes.h u32 (*acpi_sci_handler) (void *context); context 1039 include/acpi/actypes.h u32 event_number, void *context); context 1045 include/acpi/actypes.h u32(*acpi_event_handler) (void *context); context 1048 include/acpi/actypes.h u32 (*acpi_gpe_handler) (acpi_handle gpe_device, u32 gpe_number, void *context); context 1051 include/acpi/actypes.h void (*acpi_notify_handler) (acpi_handle device, u32 value, void *context); context 1065 include/acpi/actypes.h u32 aml_offset, void *context); context 1070 include/acpi/actypes.h acpi_status (*acpi_table_handler) (u32 event, void *table, void *context); context 1112 include/acpi/actypes.h void *context, void **return_value); context 111 include/clocksource/timer-ti-dm.h struct timer_regs context; context 323 include/clocksource/timer-ti-dm.h timer->context.tsicr = OMAP_TIMER_CTRL_POSTED; context 351 include/drm/drm_device.h int context; context 777 include/drm/drm_drv.h int (*context_dtor) (struct drm_device *dev, int context); context 65 include/drm/drm_legacy.h int context; /**< Kernel queue for this buffer */ context 145 include/linux/acpi.h int (*create_thread)(acpi_osd_exec_callback function, void *context); context 163 include/linux/acpi.h int acpi_debugger_create_thread(acpi_osd_exec_callback function, void *context); context 185 include/linux/acpi.h void *context) context 365 include/linux/acpi.h typedef void (*wmi_notify_handler) (u32 value, void *context); context 477 include/linux/acpi.h int wake_irq, bool (*wakeup)(void *context), void *context); context 479 include/linux/acpi.h bool (*wakeup)(void *context), void *context); context 488 include/linux/acpi.h acpi_status acpi_run_osc(acpi_handle handle, struct acpi_osc_context *context); context 16 include/linux/asn1_ber_bytecode.h typedef int (*asn1_action_t)(void *context, context 16 include/linux/asn1_decoder.h void *context, context 162 include/linux/clk/ti.h u32 context; context 14 include/linux/compiler_types.h # define __must_hold(x) __attribute__((context(x,1,1))) context 15 include/linux/compiler_types.h # define __acquires(x) __attribute__((context(x,0,1))) context 16 include/linux/compiler_types.h # define __releases(x) __attribute__((context(x,1,0))) context 25 include/linux/dm-dirty-log.h void *context; context 28 include/linux/dm-io.h typedef void (*io_notify_fn)(unsigned long error, void *context); context 52 include/linux/dm-io.h void *context; /* Passed to callback */ context 63 include/linux/dm-kcopyd.h void *context); context 67 include/linux/dm-kcopyd.h unsigned flags, dm_kcopyd_notify_fn fn, void *context); context 81 include/linux/dm-kcopyd.h dm_kcopyd_notify_fn fn, void *context); context 86 include/linux/dm-kcopyd.h unsigned flags, dm_kcopyd_notify_fn fn, void *context); context 36 include/linux/dm-region-hash.h void *context, void (*dispatch_bios)(void *context, context 38 include/linux/dm-region-hash.h void (*wakeup_workers)(void *context), context 39 include/linux/dm-region-hash.h void (*wakeup_all_recovery_waiters)(void *context), context 79 include/linux/dma-fence-array.h u64 context, unsigned seqno, context 82 include/linux/dma-fence-array.h bool dma_fence_match_context(struct dma_fence *fence, u64 context); context 91 include/linux/dma-fence.h u64 context; context 267 include/linux/dma-fence.h spinlock_t *lock, u64 context, u64 seqno); context 462 include/linux/dma-fence.h if (WARN_ON(f1->context != f2->context)) context 480 include/linux/dma-fence.h if (WARN_ON(f1->context != f2->context)) context 581 include/linux/dma-fence.h __ff->context, __ff->seqno, ##args); \ context 587 include/linux/dma-fence.h pr_warn("f %llu#%llu: " fmt, __ff->context, __ff->seqno,\ context 594 include/linux/dma-fence.h pr_err("f %llu#%llu: " fmt, __ff->context, __ff->seqno, \ context 782 include/linux/dmaengine.h unsigned long flags, void *context); context 219 include/linux/exportfs.h void *context); context 434 include/linux/firewire.h typedef void (*fw_iso_callback_t)(struct fw_iso_context *context, context 437 include/linux/firewire.h typedef void (*fw_iso_mc_callback_t)(struct fw_iso_context *context, context 49 include/linux/firmware.h const char *name, struct device *device, gfp_t gfp, void *context, context 50 include/linux/firmware.h void (*cont)(const struct firmware *fw, void *context)); context 74 include/linux/firmware.h const char *name, struct device *device, gfp_t gfp, void *context, context 75 include/linux/firmware.h void (*cont)(const struct firmware *fw, void *context)) context 148 include/linux/fscache-cache.h void *context; /* netfs read context (pinned) */ context 481 include/linux/fscache-cache.h op->end_io_func(page, op->context, error); context 51 include/linux/fscache.h void *context, context 97 include/linux/fscache.h void (*get_context)(void *cookie_netfs_data, void *context); context 103 include/linux/fscache.h void (*put_context)(void *cookie_netfs_data, void *context); context 543 include/linux/fscache.h void *context, context 548 include/linux/fscache.h context, gfp); context 594 include/linux/fscache.h void *context, context 600 include/linux/fscache.h context, gfp); context 59 include/linux/fsl/bestcomm/bestcomm_priv.h u32 context; context 189 include/linux/hsi/hsi.h void *context; context 50 include/linux/hw_breakpoint.h void *context, context 66 include/linux/hw_breakpoint.h void *context, context 72 include/linux/hw_breakpoint.h void *context); context 98 include/linux/hw_breakpoint.h void *context, context 110 include/linux/hw_breakpoint.h void *context, context 115 include/linux/hw_breakpoint.h void *context) { return NULL; } context 767 include/linux/hyperv.h void (*onchannel_callback)(void *context); context 1001 include/linux/hyperv.h void vmbus_onmessage(void *context); context 1069 include/linux/hyperv.h void (*onchannel_callback)(void *context), context 1070 include/linux/hyperv.h void *context); context 1078 include/linux/hyperv.h void (*onchannel_callback)(void *context), context 1079 include/linux/hyperv.h void *context); context 1489 include/linux/hyperv.h void (*callback)(void *context); context 1608 include/linux/hyperv.h int hyperv_reg_block_invalidate(struct pci_dev *dev, void *context, context 1609 include/linux/hyperv.h void (*block_invalidate)(void *context, context 1617 include/linux/hyperv.h int (*reg_blk_invalidate)(struct pci_dev *dev, void *context, context 1618 include/linux/hyperv.h void (*block_invalidate)(void *context, context 180 include/linux/input/adp5589.h void *context); context 183 include/linux/input/adp5589.h void *context); context 184 include/linux/input/adp5589.h void *context; context 700 include/linux/intel-iommu.h bool context_present(struct context_entry *context); context 199 include/linux/mfd/tps65010.h int (*setup)(struct i2c_client *client, void *context); context 200 include/linux/mfd/tps65010.h int (*teardown)(struct i2c_client *client, void *context); context 201 include/linux/mfd/tps65010.h void *context; context 59 include/linux/mlx4/driver.h void (*remove)(struct mlx4_dev *dev, void *context); context 60 include/linux/mlx4/driver.h void (*event) (struct mlx4_dev *dev, void *context, context 62 include/linux/mlx4/driver.h void * (*get_dev)(struct mlx4_dev *dev, void *context, u8 port); context 63 include/linux/mlx4/driver.h void (*activate)(struct mlx4_dev *dev, void *context); context 479 include/linux/mlx4/qp.h struct mlx4_qp_context *context, enum mlx4_qp_optpar optpar, context 483 include/linux/mlx4/qp.h struct mlx4_qp_context *context); context 486 include/linux/mlx4/qp.h struct mlx4_qp_context *context, context 743 include/linux/mlx5/driver.h typedef void (*mlx5_cmd_cbk_t)(int status, void *context); context 757 include/linux/mlx5/driver.h void *context; context 904 include/linux/mlx5/driver.h typedef void (*mlx5_async_cbk_t)(int status, struct mlx5_async_work *context); context 953 include/linux/mlx5/driver.h struct mlx5_async_work *context); context 1063 include/linux/mlx5/driver.h void (*remove)(struct mlx5_core_dev *dev, void *context); context 1064 include/linux/mlx5/driver.h int (*attach)(struct mlx5_core_dev *dev, void *context); context 1065 include/linux/mlx5/driver.h void (*detach)(struct mlx5_core_dev *dev, void *context); context 465 include/linux/mm_types.h mm_context_t context; context 827 include/linux/mod_devicetable.h const void *context; context 639 include/linux/nfs_xdr.h struct nfs_open_context *context; context 1577 include/linux/nfs_xdr.h struct nfs_open_context *context; context 923 include/linux/perf_event.h void *context); context 164 include/linux/platform_data/adp5588.h void *context); context 167 include/linux/platform_data/adp5588.h void *context); context 168 include/linux/platform_data/adp5588.h void *context; context 14 include/linux/platform_data/max732x.h void *context; /* param to setup/teardown */ context 18 include/linux/platform_data/max732x.h void *context); context 21 include/linux/platform_data/max732x.h void *context); context 20 include/linux/platform_data/pca953x.h void *context; /* param to setup/teardown */ context 24 include/linux/platform_data/pca953x.h void *context); context 27 include/linux/platform_data/pca953x.h void *context); context 38 include/linux/platform_data/pcf857x.h void *context); context 41 include/linux/platform_data/pcf857x.h void *context); context 42 include/linux/platform_data/pcf857x.h void *context; context 38 include/linux/qed/qed_iscsi_if.h typedef int (*iscsi_event_cb_t) (void *context, context 63 include/linux/qed/qed_rdma_if.h void *context; context 64 include/linux/qed/qed_rdma_if.h void (*affiliated_event)(void *context, u8 fw_event_code, context 66 include/linux/qed/qed_rdma_if.h void (*unaffiliated_event)(void *context, u8 event_code); context 507 include/linux/qed/qed_rdma_if.h typedef int (*iwarp_event_handler) (void *context, context 372 include/linux/regmap.h int (*reg_read)(void *context, unsigned int reg, unsigned int *val); context 373 include/linux/regmap.h int (*reg_write)(void *context, unsigned int reg, unsigned int val); context 448 include/linux/regmap.h typedef int (*regmap_hw_write)(void *context, const void *data, context 450 include/linux/regmap.h typedef int (*regmap_hw_gather_write)(void *context, context 453 include/linux/regmap.h typedef int (*regmap_hw_async_write)(void *context, context 457 include/linux/regmap.h typedef int (*regmap_hw_read)(void *context, context 460 include/linux/regmap.h typedef int (*regmap_hw_reg_read)(void *context, unsigned int reg, context 462 include/linux/regmap.h typedef int (*regmap_hw_reg_write)(void *context, unsigned int reg, context 464 include/linux/regmap.h typedef int (*regmap_hw_reg_update_bits)(void *context, unsigned int reg, context 467 include/linux/regmap.h typedef void (*regmap_hw_free_context)(void *context); context 88 include/linux/seqno-fence.h struct dma_buf *sync_buf, uint32_t context, context 102 include/linux/seqno-fence.h dma_fence_init(&fence->base, &seqno_fence_ops, lock, context, seqno); context 34 include/linux/spi/eeprom.h void *context; context 897 include/linux/spi/spi.h void (*complete)(void *context); context 898 include/linux/spi/spi.h void *context; context 16 include/linux/ssbi.h ssbi_reg_read(void *context, unsigned int reg, unsigned int *val) context 21 include/linux/ssbi.h ret = ssbi_read(context, reg, &v, 1); context 29 include/linux/ssbi.h ssbi_reg_write(void *context, unsigned int reg, unsigned int val) context 32 include/linux/ssbi.h return ssbi_write(context, reg, &v, 1); context 285 include/linux/uio.h int (*f)(struct kvec *vec, void *context), context 286 include/linux/uio.h void *context); context 1583 include/linux/usb.h void *context; /* (in) context for completion */ context 1612 include/linux/usb.h void *context) context 1620 include/linux/usb.h urb->context = context; context 1642 include/linux/usb.h void *context) context 1649 include/linux/usb.h urb->context = context; context 1683 include/linux/usb.h void *context, context 1691 include/linux/usb.h urb->context = context; context 114 include/linux/usb/gadget.h void *context; context 56 include/linux/usbdevice_fs.h compat_caddr_t context; context 32 include/linux/vexpress.h struct regmap * (*regmap_init)(struct device *dev, void *context); context 33 include/linux/vexpress.h void (*regmap_exit)(struct regmap *regmap, void *context); context 37 include/linux/vexpress.h struct vexpress_config_bridge_ops *ops, void *context); context 132 include/linux/vmw_vmci_defs.h u32 context; context 137 include/linux/vmw_vmci_defs.h (struct vmci_handle){ .context = _cid, .resource = _rid } context 142 include/linux/vmw_vmci_defs.h return h1.context == h2.context && h1.resource == h2.resource; context 147 include/linux/vmw_vmci_defs.h .context = VMCI_INVALID_ID, context 163 include/linux/vmw_vmci_defs.h .context = VMCI_ANON_SRC_CONTEXT_ID, context 39 include/linux/wmi.h int (*probe)(struct wmi_device *wdev, const void *context); context 138 include/misc/ocxl.h int ocxl_context_alloc(struct ocxl_context **context, struct ocxl_afu *afu, context 367 include/net/nfc/nci_core.h void nci_hci_data_received_cb(void *context, struct sk_buff *skb, int err); context 41 include/net/nfc/nfc.h typedef void (*data_exchange_cb_t)(void *context, struct sk_buff *skb, context 44 include/net/nfc/nfc.h typedef void (*se_io_cb_t)(void *context, u8 *apdu, size_t apdu_len, int err); context 198 include/net/nsh.h __be32 context[4]; context 46 include/net/rsi_91x.h void (*set_bt_context)(void *priv, void *context); context 384 include/net/tls.h struct tls_record_info *tls_get_record(struct tls_offload_context_tx *context, context 104 include/rdma/ib_addr.h struct rdma_dev_addr *addr, void *context), context 105 include/rdma/ib_addr.h bool resolve_by_gid_attr, void *context); context 55 include/rdma/ib_cache.h void *context); context 318 include/rdma/ib_cm.h void *context; context 342 include/rdma/ib_cm.h void *context); context 494 include/rdma/ib_mad.h void *context[2]; context 616 include/rdma/ib_mad.h void *context; context 721 include/rdma/ib_mad.h void *context, context 751 include/rdma/ib_mad.h void *context); context 837 include/rdma/ib_mad.h void *context); context 457 include/rdma/ib_sa.h void *context), context 458 include/rdma/ib_sa.h void *context, struct ib_sa_query **query); context 467 include/rdma/ib_sa.h void *context), context 468 include/rdma/ib_sa.h void *context, struct ib_sa_query **sa_query); context 475 include/rdma/ib_sa.h void *context; context 513 include/rdma/ib_sa.h void *context); context 574 include/rdma/ib_sa.h void *context), context 575 include/rdma/ib_sa.h void *context, struct ib_sa_query **sa_query); context 1481 include/rdma/ib_verbs.h struct ib_ucontext *context; /* associated user context */ context 2349 include/rdma/ib_verbs.h int (*add_gid)(const struct ib_gid_attr *attr, void **context); context 2358 include/rdma/ib_verbs.h int (*del_gid)(const struct ib_gid_attr *attr, void **context); context 2361 include/rdma/ib_verbs.h int (*alloc_ucontext)(struct ib_ucontext *context, context 2363 include/rdma/ib_verbs.h void (*dealloc_ucontext)(struct ib_ucontext *context); context 2364 include/rdma/ib_verbs.h int (*mmap)(struct ib_ucontext *context, struct vm_area_struct *vma); context 2464 include/rdma/ib_verbs.h struct ib_ucontext *context, context 2862 include/rdma/ib_verbs.h uobj->context->cleanup_retryable); context 84 include/rdma/iw_cm.h void *context; /* client cb context */ context 130 include/rdma/iw_cm.h iw_cm_handler cm_handler, void *context); context 130 include/rdma/rdma_cm.h void *context; context 141 include/rdma/rdma_cm.h void *context, enum rdma_ucm_port_space ps, context 161 include/rdma/rdma_cm.h #define rdma_create_id(net, event_handler, context, ps, qp_type) \ context 162 include/rdma/rdma_cm.h __rdma_create_id((net), (event_handler), (context), (ps), (qp_type), \ context 335 include/rdma/rdma_cm.h u8 join_state, void *context); context 217 include/rdma/rdma_vt.h struct ib_ucontext *context; context 655 include/rdma/uverbs_ioctl.h struct ib_ucontext *context; context 680 include/rdma/uverbs_ioctl.h ->context, \ context 131 include/rdma/uverbs_std_types.h *ib_dev = attrs->context->device; context 604 include/target/iscsi/iscsi_target_core.h void *context; context 21 include/trace/events/dma_fence.h __field(unsigned int, context) context 28 include/trace/events/dma_fence.h __entry->context = fence->context; context 33 include/trace/events/dma_fence.h __get_str(driver), __get_str(timeline), __entry->context, context 298 include/uapi/drm/drm.h int context; context 403 include/uapi/drm/drm.h int context; /**< Context handle */ context 1148 include/uapi/drm/i915_drm.h #define i915_execbuffer2_set_context_id(eb2, context) \ context 1149 include/uapi/drm/i915_drm.h (eb2).rsvd1 = context & I915_EXEC_CONTEXT_ID_MASK context 428 include/uapi/drm/radeon_drm.h drm_radeon_context_regs_t context; context 59 include/uapi/drm/sis_drm.h int context; context 195 include/uapi/drm/tegra_drm.h __u64 context; context 208 include/uapi/drm/tegra_drm.h __u64 context; context 221 include/uapi/drm/tegra_drm.h __u64 context; context 249 include/uapi/drm/tegra_drm.h __u64 context; context 418 include/uapi/drm/tegra_drm.h __u64 context; context 130 include/uapi/drm/via_drm.h __u32 context; context 46 include/uapi/linux/nfs_mount.h char context[NFS_MAX_CONTEXT_LEN + 1]; /* 6 */ context 524 include/uapi/linux/openvswitch.h __be32 context[NSH_MD1_CONTEXT_SIZE]; context 64 include/uapi/linux/usbdevice_fs.h void __user *context; context 15 include/video/omapvrfb.h u8 context; context 1043 kernel/audit.c static void audit_log_common_recv_msg(struct audit_context *context, context 1054 kernel/audit.c *ab = audit_log_start(context, GFP_KERNEL, msg_type); context 287 kernel/audit.h extern void audit_kill_trees(struct audit_context *context); context 318 kernel/audit.h #define audit_kill_trees(context) BUG() context 527 kernel/audit_tree.c static void audit_tree_log_remove_rule(struct audit_context *context, context 534 kernel/audit_tree.c ab = audit_log_start(context, GFP_KERNEL, AUDIT_CONFIG_CHANGE); context 544 kernel/audit_tree.c static void kill_rules(struct audit_context *context, struct audit_tree *tree) context 555 kernel/audit_tree.c audit_tree_log_remove_rule(context, rule); context 977 kernel/audit_tree.c void audit_kill_trees(struct audit_context *context) context 979 kernel/audit_tree.c struct list_head *list = &context->killed_trees; context 988 kernel/audit_tree.c kill_rules(context, victim); context 847 kernel/auditsc.c static inline void audit_proctitle_free(struct audit_context *context) context 849 kernel/auditsc.c kfree(context->proctitle.value); context 850 kernel/auditsc.c context->proctitle.value = NULL; context 851 kernel/auditsc.c context->proctitle.len = 0; context 854 kernel/auditsc.c static inline void audit_free_module(struct audit_context *context) context 856 kernel/auditsc.c if (context->type == AUDIT_KERN_MODULE) { context 857 kernel/auditsc.c kfree(context->module.name); context 858 kernel/auditsc.c context->module.name = NULL; context 861 kernel/auditsc.c static inline void audit_free_names(struct audit_context *context) context 865 kernel/auditsc.c list_for_each_entry_safe(n, next, &context->names_list, list) { context 872 kernel/auditsc.c context->name_count = 0; context 873 kernel/auditsc.c path_put(&context->pwd); context 874 kernel/auditsc.c context->pwd.dentry = NULL; context 875 kernel/auditsc.c context->pwd.mnt = NULL; context 878 kernel/auditsc.c static inline void audit_free_aux(struct audit_context *context) context 882 kernel/auditsc.c while ((aux = context->aux)) { context 883 kernel/auditsc.c context->aux = aux->next; context 886 kernel/auditsc.c while ((aux = context->aux_pids)) { context 887 kernel/auditsc.c context->aux_pids = aux->next; context 894 kernel/auditsc.c struct audit_context *context; context 896 kernel/auditsc.c context = kzalloc(sizeof(*context), GFP_KERNEL); context 897 kernel/auditsc.c if (!context) context 899 kernel/auditsc.c context->state = state; context 900 kernel/auditsc.c context->prio = state == AUDIT_RECORD_CONTEXT ? ~0ULL : 0; context 901 kernel/auditsc.c INIT_LIST_HEAD(&context->killed_trees); context 902 kernel/auditsc.c INIT_LIST_HEAD(&context->names_list); context 903 kernel/auditsc.c return context; context 917 kernel/auditsc.c struct audit_context *context; context 930 kernel/auditsc.c if (!(context = audit_alloc_context(state))) { context 935 kernel/auditsc.c context->filterkey = key; context 937 kernel/auditsc.c audit_set_context(tsk, context); context 942 kernel/auditsc.c static inline void audit_free_context(struct audit_context *context) context 944 kernel/auditsc.c audit_free_module(context); context 945 kernel/auditsc.c audit_free_names(context); context 946 kernel/auditsc.c unroll_tree_refs(context, NULL, 0); context 947 kernel/auditsc.c free_tree_refs(context); context 948 kernel/auditsc.c audit_free_aux(context); context 949 kernel/auditsc.c kfree(context->filterkey); context 950 kernel/auditsc.c kfree(context->sockaddr); context 951 kernel/auditsc.c audit_proctitle_free(context); context 952 kernel/auditsc.c kfree(context); context 955 kernel/auditsc.c static int audit_log_pid_context(struct audit_context *context, pid_t pid, context 964 kernel/auditsc.c ab = audit_log_start(context, GFP_KERNEL, AUDIT_OBJ_PID); context 987 kernel/auditsc.c static void audit_log_execve_info(struct audit_context *context, context 1024 kernel/auditsc.c audit_log_format(*ab, "argc=%d", context->execve.argc); context 1096 kernel/auditsc.c *ab = audit_log_start(context, context 1153 kernel/auditsc.c } while (arg < context->execve.argc); context 1188 kernel/auditsc.c static void show_special(struct audit_context *context, int *call_panic) context 1193 kernel/auditsc.c ab = audit_log_start(context, GFP_KERNEL, context->type); context 1197 kernel/auditsc.c switch (context->type) { context 1199 kernel/auditsc.c int nargs = context->socketcall.nargs; context 1203 kernel/auditsc.c context->socketcall.args[i]); context 1206 kernel/auditsc.c u32 osid = context->ipc.osid; context 1209 kernel/auditsc.c from_kuid(&init_user_ns, context->ipc.uid), context 1210 kernel/auditsc.c from_kgid(&init_user_ns, context->ipc.gid), context 1211 kernel/auditsc.c context->ipc.mode); context 1223 kernel/auditsc.c if (context->ipc.has_perm) { context 1225 kernel/auditsc.c ab = audit_log_start(context, GFP_KERNEL, context 1231 kernel/auditsc.c context->ipc.qbytes, context 1232 kernel/auditsc.c context->ipc.perm_uid, context 1233 kernel/auditsc.c context->ipc.perm_gid, context 1234 kernel/auditsc.c context->ipc.perm_mode); context 1241 kernel/auditsc.c context->mq_open.oflag, context->mq_open.mode, context 1242 kernel/auditsc.c context->mq_open.attr.mq_flags, context 1243 kernel/auditsc.c context->mq_open.attr.mq_maxmsg, context 1244 kernel/auditsc.c context->mq_open.attr.mq_msgsize, context 1245 kernel/auditsc.c context->mq_open.attr.mq_curmsgs); context 1251 kernel/auditsc.c context->mq_sendrecv.mqdes, context 1252 kernel/auditsc.c context->mq_sendrecv.msg_len, context 1253 kernel/auditsc.c context->mq_sendrecv.msg_prio, context 1254 kernel/auditsc.c (long long) context->mq_sendrecv.abs_timeout.tv_sec, context 1255 kernel/auditsc.c context->mq_sendrecv.abs_timeout.tv_nsec); context 1259 kernel/auditsc.c context->mq_notify.mqdes, context 1260 kernel/auditsc.c context->mq_notify.sigev_signo); context 1263 kernel/auditsc.c struct mq_attr *attr = &context->mq_getsetattr.mqstat; context 1267 kernel/auditsc.c context->mq_getsetattr.mqdes, context 1272 kernel/auditsc.c audit_log_format(ab, "pid=%d", context->capset.pid); context 1273 kernel/auditsc.c audit_log_cap(ab, "cap_pi", &context->capset.cap.inheritable); context 1274 kernel/auditsc.c audit_log_cap(ab, "cap_pp", &context->capset.cap.permitted); context 1275 kernel/auditsc.c audit_log_cap(ab, "cap_pe", &context->capset.cap.effective); context 1276 kernel/auditsc.c audit_log_cap(ab, "cap_pa", &context->capset.cap.ambient); context 1279 kernel/auditsc.c audit_log_format(ab, "fd=%d flags=0x%x", context->mmap.fd, context 1280 kernel/auditsc.c context->mmap.flags); context 1283 kernel/auditsc.c audit_log_execve_info(context, &ab); context 1287 kernel/auditsc.c if (context->module.name) { context 1288 kernel/auditsc.c audit_log_untrustedstring(ab, context->module.name); context 1317 kernel/auditsc.c static void audit_log_name(struct audit_context *context, struct audit_names *n, context 1322 kernel/auditsc.c ab = audit_log_start(context, GFP_KERNEL, AUDIT_PATH); context 1341 kernel/auditsc.c audit_log_d_path(ab, " name=", &context->pwd); context 1406 kernel/auditsc.c struct audit_context *context = audit_context(); context 1409 kernel/auditsc.c if (!context || context->dummy) context 1412 kernel/auditsc.c ab = audit_log_start(context, GFP_KERNEL, AUDIT_PROCTITLE); context 1419 kernel/auditsc.c if (!context->proctitle.value) { context 1434 kernel/auditsc.c context->proctitle.value = buf; context 1435 kernel/auditsc.c context->proctitle.len = res; context 1437 kernel/auditsc.c msg = context->proctitle.value; context 1438 kernel/auditsc.c len = context->proctitle.len; context 1447 kernel/auditsc.c struct audit_context *context = audit_context(); context 1452 kernel/auditsc.c context->personality = current->personality; context 1454 kernel/auditsc.c ab = audit_log_start(context, GFP_KERNEL, AUDIT_SYSCALL); context 1458 kernel/auditsc.c context->arch, context->major); context 1459 kernel/auditsc.c if (context->personality != PER_LINUX) context 1460 kernel/auditsc.c audit_log_format(ab, " per=%lx", context->personality); context 1461 kernel/auditsc.c if (context->return_valid) context 1463 kernel/auditsc.c (context->return_valid==AUDITSC_SUCCESS)?"yes":"no", context 1464 kernel/auditsc.c context->return_code); context 1468 kernel/auditsc.c context->argv[0], context 1469 kernel/auditsc.c context->argv[1], context 1470 kernel/auditsc.c context->argv[2], context 1471 kernel/auditsc.c context->argv[3], context 1472 kernel/auditsc.c context->name_count); context 1475 kernel/auditsc.c audit_log_key(ab, context->filterkey); context 1478 kernel/auditsc.c for (aux = context->aux; aux; aux = aux->next) { context 1480 kernel/auditsc.c ab = audit_log_start(context, GFP_KERNEL, aux->type); context 1509 kernel/auditsc.c if (context->type) context 1510 kernel/auditsc.c show_special(context, &call_panic); context 1512 kernel/auditsc.c if (context->fds[0] >= 0) { context 1513 kernel/auditsc.c ab = audit_log_start(context, GFP_KERNEL, AUDIT_FD_PAIR); context 1516 kernel/auditsc.c context->fds[0], context->fds[1]); context 1521 kernel/auditsc.c if (context->sockaddr_len) { context 1522 kernel/auditsc.c ab = audit_log_start(context, GFP_KERNEL, AUDIT_SOCKADDR); context 1525 kernel/auditsc.c audit_log_n_hex(ab, (void *)context->sockaddr, context 1526 kernel/auditsc.c context->sockaddr_len); context 1531 kernel/auditsc.c for (aux = context->aux_pids; aux; aux = aux->next) { context 1535 kernel/auditsc.c if (audit_log_pid_context(context, axs->target_pid[i], context 1544 kernel/auditsc.c if (context->target_pid && context 1545 kernel/auditsc.c audit_log_pid_context(context, context->target_pid, context 1546 kernel/auditsc.c context->target_auid, context->target_uid, context 1547 kernel/auditsc.c context->target_sessionid, context 1548 kernel/auditsc.c context->target_sid, context->target_comm)) context 1551 kernel/auditsc.c if (context->pwd.dentry && context->pwd.mnt) { context 1552 kernel/auditsc.c ab = audit_log_start(context, GFP_KERNEL, AUDIT_CWD); context 1554 kernel/auditsc.c audit_log_d_path(ab, "cwd=", &context->pwd); context 1560 kernel/auditsc.c list_for_each_entry(n, &context->names_list, list) { context 1563 kernel/auditsc.c audit_log_name(context, n, NULL, i++, &call_panic); context 1569 kernel/auditsc.c ab = audit_log_start(context, GFP_KERNEL, AUDIT_EOE); context 1584 kernel/auditsc.c struct audit_context *context = tsk->audit_context; context 1586 kernel/auditsc.c if (!context) context 1589 kernel/auditsc.c if (!list_empty(&context->killed_trees)) context 1590 kernel/auditsc.c audit_kill_trees(context); context 1597 kernel/auditsc.c if (tsk == current && !context->dummy && context->in_syscall) { context 1598 kernel/auditsc.c context->return_valid = 0; context 1599 kernel/auditsc.c context->return_code = 0; context 1601 kernel/auditsc.c audit_filter_syscall(tsk, context, context 1603 kernel/auditsc.c audit_filter_inodes(tsk, context); context 1604 kernel/auditsc.c if (context->current_state == AUDIT_RECORD_CONTEXT) context 1609 kernel/auditsc.c audit_free_context(context); context 1631 kernel/auditsc.c struct audit_context *context = audit_context(); context 1634 kernel/auditsc.c if (!audit_enabled || !context) context 1637 kernel/auditsc.c BUG_ON(context->in_syscall || context->name_count); context 1639 kernel/auditsc.c state = context->state; context 1643 kernel/auditsc.c context->dummy = !audit_n_rules; context 1644 kernel/auditsc.c if (!context->dummy && state == AUDIT_BUILD_CONTEXT) { context 1645 kernel/auditsc.c context->prio = 0; context 1650 kernel/auditsc.c context->arch = syscall_get_arch(current); context 1651 kernel/auditsc.c context->major = major; context 1652 kernel/auditsc.c context->argv[0] = a1; context 1653 kernel/auditsc.c context->argv[1] = a2; context 1654 kernel/auditsc.c context->argv[2] = a3; context 1655 kernel/auditsc.c context->argv[3] = a4; context 1656 kernel/auditsc.c context->serial = 0; context 1657 kernel/auditsc.c context->in_syscall = 1; context 1658 kernel/auditsc.c context->current_state = state; context 1659 kernel/auditsc.c context->ppid = 0; context 1660 kernel/auditsc.c ktime_get_coarse_real_ts64(&context->ctime); context 1676 kernel/auditsc.c struct audit_context *context; context 1678 kernel/auditsc.c context = audit_context(); context 1679 kernel/auditsc.c if (!context) context 1682 kernel/auditsc.c if (!list_empty(&context->killed_trees)) context 1683 kernel/auditsc.c audit_kill_trees(context); context 1685 kernel/auditsc.c if (!context->dummy && context->in_syscall) { context 1687 kernel/auditsc.c context->return_valid = AUDITSC_SUCCESS; context 1689 kernel/auditsc.c context->return_valid = AUDITSC_FAILURE; context 1705 kernel/auditsc.c context->return_code = -EINTR; context 1707 kernel/auditsc.c context->return_code = return_code; context 1709 kernel/auditsc.c audit_filter_syscall(current, context, context 1711 kernel/auditsc.c audit_filter_inodes(current, context); context 1712 kernel/auditsc.c if (context->current_state == AUDIT_RECORD_CONTEXT) context 1716 kernel/auditsc.c context->in_syscall = 0; context 1717 kernel/auditsc.c context->prio = context->state == AUDIT_RECORD_CONTEXT ? ~0ULL : 0; context 1719 kernel/auditsc.c audit_free_module(context); context 1720 kernel/auditsc.c audit_free_names(context); context 1721 kernel/auditsc.c unroll_tree_refs(context, NULL, 0); context 1722 kernel/auditsc.c audit_free_aux(context); context 1723 kernel/auditsc.c context->aux = NULL; context 1724 kernel/auditsc.c context->aux_pids = NULL; context 1725 kernel/auditsc.c context->target_pid = 0; context 1726 kernel/auditsc.c context->target_sid = 0; context 1727 kernel/auditsc.c context->sockaddr_len = 0; context 1728 kernel/auditsc.c context->type = 0; context 1729 kernel/auditsc.c context->fds[0] = -1; context 1730 kernel/auditsc.c if (context->state != AUDIT_RECORD_CONTEXT) { context 1731 kernel/auditsc.c kfree(context->filterkey); context 1732 kernel/auditsc.c context->filterkey = NULL; context 1738 kernel/auditsc.c struct audit_context *context; context 1744 kernel/auditsc.c context = audit_context(); context 1745 kernel/auditsc.c p = context->trees; context 1746 kernel/auditsc.c count = context->tree_count; context 1752 kernel/auditsc.c if (likely(put_tree_ref(context, chunk))) context 1754 kernel/auditsc.c if (unlikely(!grow_tree_refs(context))) { context 1756 kernel/auditsc.c audit_set_auditable(context); context 1758 kernel/auditsc.c unroll_tree_refs(context, p, count); context 1761 kernel/auditsc.c put_tree_ref(context, chunk); context 1766 kernel/auditsc.c struct audit_context *context; context 1773 kernel/auditsc.c context = audit_context(); context 1774 kernel/auditsc.c p = context->trees; context 1775 kernel/auditsc.c count = context->tree_count; context 1787 kernel/auditsc.c if (unlikely(!put_tree_ref(context, chunk))) { context 1802 kernel/auditsc.c unroll_tree_refs(context, p, count); context 1806 kernel/auditsc.c if (grow_tree_refs(context)) { context 1808 kernel/auditsc.c unroll_tree_refs(context, p, count); context 1813 kernel/auditsc.c unroll_tree_refs(context, p, count); context 1814 kernel/auditsc.c audit_set_auditable(context); context 1820 kernel/auditsc.c static struct audit_names *audit_alloc_name(struct audit_context *context, context 1825 kernel/auditsc.c if (context->name_count < AUDIT_NAMES) { context 1826 kernel/auditsc.c aname = &context->preallocated_names[context->name_count]; context 1837 kernel/auditsc.c list_add_tail(&aname->list, &context->names_list); context 1839 kernel/auditsc.c context->name_count++; context 1854 kernel/auditsc.c struct audit_context *context = audit_context(); context 1857 kernel/auditsc.c list_for_each_entry(n, &context->names_list, list) { context 1877 kernel/auditsc.c struct audit_context *context = audit_context(); context 1880 kernel/auditsc.c if (!context->in_syscall) context 1883 kernel/auditsc.c n = audit_alloc_name(context, AUDIT_TYPE_UNKNOWN); context 1892 kernel/auditsc.c if (!context->pwd.dentry) context 1893 kernel/auditsc.c get_fs_pwd(current->fs, &context->pwd); context 1947 kernel/auditsc.c struct audit_context *context = audit_context(); context 1955 kernel/auditsc.c if (!context->in_syscall) context 1993 kernel/auditsc.c list_for_each_entry_reverse(n, &context->names_list, list) { context 2020 kernel/auditsc.c n = audit_alloc_name(context, AUDIT_TYPE_UNKNOWN); context 2065 kernel/auditsc.c struct audit_context *context = audit_context(); context 2073 kernel/auditsc.c if (!context->in_syscall) context 2096 kernel/auditsc.c list_for_each_entry(n, &context->names_list, list) { context 2113 kernel/auditsc.c list_for_each_entry(n, &context->names_list, list) { context 2133 kernel/auditsc.c n = audit_alloc_name(context, AUDIT_TYPE_PARENT); context 2140 kernel/auditsc.c found_child = audit_alloc_name(context, type); context 2195 kernel/auditsc.c struct audit_context *context = audit_context(); context 2198 kernel/auditsc.c memcpy(&context->mq_open.attr, attr, sizeof(struct mq_attr)); context 2200 kernel/auditsc.c memset(&context->mq_open.attr, 0, sizeof(struct mq_attr)); context 2202 kernel/auditsc.c context->mq_open.oflag = oflag; context 2203 kernel/auditsc.c context->mq_open.mode = mode; context 2205 kernel/auditsc.c context->type = AUDIT_MQ_OPEN; context 2219 kernel/auditsc.c struct audit_context *context = audit_context(); context 2220 kernel/auditsc.c struct timespec64 *p = &context->mq_sendrecv.abs_timeout; context 2227 kernel/auditsc.c context->mq_sendrecv.mqdes = mqdes; context 2228 kernel/auditsc.c context->mq_sendrecv.msg_len = msg_len; context 2229 kernel/auditsc.c context->mq_sendrecv.msg_prio = msg_prio; context 2231 kernel/auditsc.c context->type = AUDIT_MQ_SENDRECV; context 2243 kernel/auditsc.c struct audit_context *context = audit_context(); context 2246 kernel/auditsc.c context->mq_notify.sigev_signo = notification->sigev_signo; context 2248 kernel/auditsc.c context->mq_notify.sigev_signo = 0; context 2250 kernel/auditsc.c context->mq_notify.mqdes = mqdes; context 2251 kernel/auditsc.c context->type = AUDIT_MQ_NOTIFY; context 2262 kernel/auditsc.c struct audit_context *context = audit_context(); context 2263 kernel/auditsc.c context->mq_getsetattr.mqdes = mqdes; context 2264 kernel/auditsc.c context->mq_getsetattr.mqstat = *mqstat; context 2265 kernel/auditsc.c context->type = AUDIT_MQ_GETSETATTR; context 2275 kernel/auditsc.c struct audit_context *context = audit_context(); context 2276 kernel/auditsc.c context->ipc.uid = ipcp->uid; context 2277 kernel/auditsc.c context->ipc.gid = ipcp->gid; context 2278 kernel/auditsc.c context->ipc.mode = ipcp->mode; context 2279 kernel/auditsc.c context->ipc.has_perm = 0; context 2280 kernel/auditsc.c security_ipc_getsecid(ipcp, &context->ipc.osid); context 2281 kernel/auditsc.c context->type = AUDIT_IPC; context 2295 kernel/auditsc.c struct audit_context *context = audit_context(); context 2297 kernel/auditsc.c context->ipc.qbytes = qbytes; context 2298 kernel/auditsc.c context->ipc.perm_uid = uid; context 2299 kernel/auditsc.c context->ipc.perm_gid = gid; context 2300 kernel/auditsc.c context->ipc.perm_mode = mode; context 2301 kernel/auditsc.c context->ipc.has_perm = 1; context 2306 kernel/auditsc.c struct audit_context *context = audit_context(); context 2308 kernel/auditsc.c context->type = AUDIT_EXECVE; context 2309 kernel/auditsc.c context->execve.argc = bprm->argc; context 2321 kernel/auditsc.c struct audit_context *context = audit_context(); context 2325 kernel/auditsc.c context->type = AUDIT_SOCKETCALL; context 2326 kernel/auditsc.c context->socketcall.nargs = nargs; context 2327 kernel/auditsc.c memcpy(context->socketcall.args, args, nargs * sizeof(unsigned long)); context 2339 kernel/auditsc.c struct audit_context *context = audit_context(); context 2340 kernel/auditsc.c context->fds[0] = fd1; context 2341 kernel/auditsc.c context->fds[1] = fd2; context 2353 kernel/auditsc.c struct audit_context *context = audit_context(); context 2355 kernel/auditsc.c if (!context->sockaddr) { context 2359 kernel/auditsc.c context->sockaddr = p; context 2362 kernel/auditsc.c context->sockaddr_len = len; context 2363 kernel/auditsc.c memcpy(context->sockaddr, a, len); context 2369 kernel/auditsc.c struct audit_context *context = audit_context(); context 2371 kernel/auditsc.c context->target_pid = task_tgid_nr(t); context 2372 kernel/auditsc.c context->target_auid = audit_get_loginuid(t); context 2373 kernel/auditsc.c context->target_uid = task_uid(t); context 2374 kernel/auditsc.c context->target_sessionid = audit_get_sessionid(t); context 2375 kernel/auditsc.c security_task_getsecid(t, &context->target_sid); context 2376 kernel/auditsc.c memcpy(context->target_comm, t->comm, TASK_COMM_LEN); context 2445 kernel/auditsc.c struct audit_context *context = audit_context(); context 2453 kernel/auditsc.c ax->d.next = context->aux; context 2454 kernel/auditsc.c context->aux = (void *)ax; context 2486 kernel/auditsc.c struct audit_context *context = audit_context(); context 2487 kernel/auditsc.c context->capset.pid = task_tgid_nr(current); context 2488 kernel/auditsc.c context->capset.cap.effective = new->cap_effective; context 2489 kernel/auditsc.c context->capset.cap.inheritable = new->cap_effective; context 2490 kernel/auditsc.c context->capset.cap.permitted = new->cap_permitted; context 2491 kernel/auditsc.c context->capset.cap.ambient = new->cap_ambient; context 2492 kernel/auditsc.c context->type = AUDIT_CAPSET; context 2497 kernel/auditsc.c struct audit_context *context = audit_context(); context 2498 kernel/auditsc.c context->mmap.fd = fd; context 2499 kernel/auditsc.c context->mmap.flags = flags; context 2500 kernel/auditsc.c context->type = AUDIT_MMAP; context 2505 kernel/auditsc.c struct audit_context *context = audit_context(); context 2507 kernel/auditsc.c context->module.name = kstrdup(name, GFP_KERNEL); context 2508 kernel/auditsc.c if (!context->module.name) context 2510 kernel/auditsc.c context->type = AUDIT_KERN_MODULE; context 10425 kernel/events/core.c void *context, int cgroup_fd) context 10498 kernel/events/core.c context = parent_event->overflow_handler_context; context 10516 kernel/events/core.c event->overflow_handler_context = context; context 11351 kernel/events/core.c void *context) context 11365 kernel/events/core.c overflow_handler, context, -1); context 443 kernel/events/hw_breakpoint.c void *context, context 447 kernel/events/hw_breakpoint.c context); context 543 kernel/events/hw_breakpoint.c void *context) context 556 kernel/events/hw_breakpoint.c triggered, context); context 1165 kernel/ptrace.c tmp = mm->context.exec_fdpic_loadmap; context 1168 kernel/ptrace.c tmp = mm->context.interp_fdpic_loadmap; context 3708 kernel/trace/trace_events_hist.c struct snapshot_context *context = cond_data; context 3717 kernel/trace/trace_events_hist.c track_val = get_track_val(track_data->hist_data, context->elt, context 3724 kernel/trace/trace_events_hist.c memcpy(track_data->key, context->key, track_data->key_len); context 3726 kernel/trace/trace_events_hist.c elt_data = context->elt->private_data; context 3743 kernel/trace/trace_events_hist.c struct snapshot_context context; context 3745 kernel/trace/trace_events_hist.c context.elt = elt; context 3746 kernel/trace/trace_events_hist.c context.key = key; context 3748 kernel/trace/trace_events_hist.c tracing_snapshot_cond(file->tr, &context); context 168 lib/asn1_decoder.c void *context, context 346 lib/asn1_decoder.c ret = actions[act](context, hdr, tag, data + dp, len); context 439 lib/asn1_decoder.c ret = actions[act](context, hdr, 0, data + tdp, len); context 454 lib/asn1_decoder.c ret = actions[machine[pc + 1]](context, hdr, tag, data + tdp, len); context 1699 lib/iov_iter.c int (*f)(struct kvec *vec, void *context), context 1700 lib/iov_iter.c void *context) context 1710 lib/iov_iter.c err = f(&w, context); context 1714 lib/iov_iter.c err = f(&w, context);}) context 512 lib/test_firmware.c static void trigger_async_request_cb(const struct firmware *fw, void *context) context 726 lib/test_firmware.c static void trigger_batched_cb(const struct firmware *fw, void *context) context 728 lib/test_firmware.c struct test_batched_req *req = context; context 418 mm/nommu.c if (brk < mm->start_brk || brk > mm->context.end_brk) context 5878 mm/page_alloc.c unsigned long start_pfn, enum memmap_context context, context 5910 mm/page_alloc.c if (context == MEMMAP_EARLY) { context 5923 mm/page_alloc.c if (context == MEMMAP_HOTPLUG) context 241 net/9p/trans_rdma.c struct p9_client *c = id->context; context 356 net/9p/trans_rdma.c static void qp_event_handler(struct ib_event *event, void *context) context 359 net/9p/trans_rdma.c event->event, context); context 95 net/ipv4/netfilter/nf_nat_snmp_basic_main.c int snmp_version(void *context, size_t hdrlen, unsigned char tag, context 105 net/ipv4/netfilter/nf_nat_snmp_basic_main.c int snmp_helper(void *context, size_t hdrlen, unsigned char tag, context 108 net/ipv4/netfilter/nf_nat_snmp_basic_main.c struct snmp_ctx *ctx = (struct snmp_ctx *)context; context 39 net/nfc/hci/command.c static void nfc_hci_execute_cb(void *context, struct sk_buff *skb, int err) context 41 net/nfc/hci/command.c struct hcp_exec_waiter *hcp_ew = (struct hcp_exec_waiter *)context; context 699 net/nfc/hci/core.c static void hci_transceive_cb(void *context, struct sk_buff *skb, int err) context 701 net/nfc/hci/core.c struct nfc_hci_dev *hdev = context; context 402 net/nfc/nci/core.c static void nci_nfcc_loopback_cb(void *context, struct sk_buff *skb, int err) context 404 net/nfc/nci/core.c struct nci_dev *ndev = (struct nci_dev *)context; context 430 net/nfc/nci/hci.c void nci_hci_data_received_cb(void *context, context 433 net/nfc/nci/hci.c struct nci_dev *ndev = (struct nci_dev *)context; context 1458 net/nfc/netlink.c static void se_io_cb(void *context, u8 *apdu, size_t apdu_len, int err) context 1460 net/nfc/netlink.c struct se_io_ctx *ctx = context; context 138 net/nfc/rawsock.c static void rawsock_data_exchange_complete(void *context, struct sk_buff *skb, context 141 net/nfc/rawsock.c struct sock *sk = (struct sock *) context; context 612 net/openvswitch/actions.c nh->md1.context[i] = context 613 net/openvswitch/actions.c OVS_MASKED(nh->md1.context[i], key.context[i], context 614 net/openvswitch/actions.c mask.context[i]); context 616 net/openvswitch/actions.c memcpy(flow_key->nsh.context, nh->md1.context, context 617 net/openvswitch/actions.c sizeof(nh->md1.context)); context 620 net/openvswitch/actions.c memset(flow_key->nsh.context, 0, context 621 net/openvswitch/actions.c sizeof(flow_key->nsh.context)); context 511 net/openvswitch/flow.c memcpy(key->nsh.context, nh->md1.context, context 515 net/openvswitch/flow.c memset(key->nsh.context, 0, context 58 net/openvswitch/flow.h __be32 context[NSH_MD1_CONTEXT_SIZE]; context 1358 net/openvswitch/flow_netlink.c memcpy(nsh->context, md1->context, sizeof(*md1)); context 1359 net/openvswitch/flow_netlink.c memcpy(nsh_mask->context, md1_mask->context, context 1434 net/openvswitch/flow_netlink.c SW_FLOW_KEY_PUT(match, nsh.context[i], context 1435 net/openvswitch/flow_netlink.c md1->context[i], is_mask); context 1956 net/openvswitch/flow_netlink.c sizeof(nsh->context), nsh->context)) context 280 net/rds/ib_cm.c static void rds_ib_cq_comp_handler_recv(struct ib_cq *cq, void *context) context 282 net/rds/ib_cm.c struct rds_connection *conn = context; context 408 net/rds/ib_cm.c static void rds_ib_cq_comp_handler_send(struct ib_cq *cq, void *context) context 410 net/rds/ib_cm.c struct rds_connection *conn = context; context 835 net/rds/ib_cm.c BUG_ON(cm_id->context); context 839 net/rds/ib_cm.c cm_id->context = conn; context 870 net/rds/ib_cm.c struct rds_connection *conn = cm_id->context; context 378 net/rds/ib_send.c struct rds_connection *conn = ic->i_cm_id->context; context 54 net/rds/rdma_transport.c struct rds_connection *conn = cm_id->context; context 102 net/sunrpc/xprtrdma/svc_rdma_transport.c static void qp_event_handler(struct ib_event *event, void *context) context 104 net/sunrpc/xprtrdma/svc_rdma_transport.c struct svc_xprt *xprt = context; context 196 net/sunrpc/xprtrdma/svc_rdma_transport.c struct svcxprt_rdma *listen_xprt = new_cma_id->context; context 206 net/sunrpc/xprtrdma/svc_rdma_transport.c new_cma_id->context = newxprt; context 249 net/sunrpc/xprtrdma/svc_rdma_transport.c "event = %s (%d)\n", cma_id, cma_id->context, context 268 net/sunrpc/xprtrdma/svc_rdma_transport.c struct svcxprt_rdma *rdma = cma_id->context; context 116 net/sunrpc/xprtrdma/verbs.c rpcrdma_qp_event_handler(struct ib_event *event, void *context) context 118 net/sunrpc/xprtrdma/verbs.c struct rpcrdma_ep *ep = context; context 220 net/sunrpc/xprtrdma/verbs.c struct rpcrdma_xprt *r_xprt = id->context; context 580 net/tls/tls_device.c struct tls_record_info *tls_get_record(struct tls_offload_context_tx *context, context 583 net/tls/tls_device.c u64 record_sn = context->hint_record_sn; context 586 net/tls/tls_device.c info = context->retransmit_hint; context 592 net/tls/tls_device.c info = list_first_entry_or_null(&context->records_list, context 607 net/tls/tls_device.c last = list_last_entry(&context->records_list, context 614 net/tls/tls_device.c record_sn = context->unacked_record_sn; context 619 net/tls/tls_device.c list_for_each_entry_from_rcu(info, &context->records_list, list) { context 621 net/tls/tls_device.c if (!context->retransmit_hint || context 623 net/tls/tls_device.c context->retransmit_hint->end_seq)) { context 624 net/tls/tls_device.c context->hint_record_sn = record_sn; context 625 net/tls/tls_device.c context->retransmit_hint = info; context 1078 net/tls/tls_device.c struct tls_offload_context_rx *context; context 1110 net/tls/tls_device.c context = kzalloc(TLS_OFFLOAD_CONTEXT_SIZE_RX, GFP_KERNEL); context 1111 net/tls/tls_device.c if (!context) { context 1115 net/tls/tls_device.c context->resync_nh_reset = 1; context 1117 net/tls/tls_device.c ctx->priv_ctx_rx = context; context 186 net/vmw_vsock/vmci_transport.c vsock_addr_init(local, pkt->dg.dst.context, pkt->dst_port); context 187 net/vmw_vsock/vmci_transport.c vsock_addr_init(remote, pkt->dg.src.context, pkt->src_port); context 338 net/vmw_vsock/vmci_transport.c vsock_addr_init(&dst, pkt->dg.src.context, context 493 net/vmw_vsock/vmci_transport.c vsock_addr_init(&src, pkt->dg.src.context, pkt->src_port); context 637 net/vmw_vsock/vmci_transport.c if (!vmci_transport_allow_dgram(vsk, dg->src.context)) context 696 net/vmw_vsock/vmci_transport.c if (!vmci_transport_stream_allow(dg->src.context, -1) context 697 net/vmw_vsock/vmci_transport.c || vmci_transport_peer_rid(dg->src.context) != dg->src.resource) context 710 net/vmw_vsock/vmci_transport.c vsock_addr_init(&src, pkt->dg.src.context, pkt->src_port); context 711 net/vmw_vsock/vmci_transport.c vsock_addr_init(&dst, pkt->dg.dst.context, pkt->dst_port); context 754 net/vmw_vsock/vmci_transport.c if (!vmci_transport_allow_dgram(vsk, pkt->dg.src.context)) { context 911 net/vmw_vsock/vmci_transport.c vsock_sk(sk)->local_addr.svm_cid = pkt->dg.dst.context; context 970 net/vmw_vsock/vmci_transport.c vsock_sk(pending)->local_addr.svm_cid = pkt->dg.dst.context; context 1025 net/vmw_vsock/vmci_transport.c vsock_addr_init(&vpending->local_addr, pkt->dg.dst.context, context 1027 net/vmw_vsock/vmci_transport.c vsock_addr_init(&vpending->remote_addr, pkt->dg.src.context, context 1201 net/vmw_vsock/vmci_transport.c pkt->dg.src.context, context 1309 net/vmw_vsock/vmci_transport.c || pkt->dg.src.context != vsk->remote_addr.svm_cid context 1409 net/vmw_vsock/vmci_transport.c vsk->local_addr.svm_cid = pkt->dg.dst.context; context 1791 net/vmw_vsock/vmci_transport.c vsock_addr_init(vm_addr, dg->src.context, dg->src.resource); context 1017 net/wireless/reg.c static void regdb_fw_cb(const struct firmware *fw, void *context) context 1045 net/wireless/reg.c restore = context && query_regdb(context); context 1056 net/wireless/reg.c kfree(context); context 1555 security/keys/keyctl.c char *context; context 1576 security/keys/keyctl.c ret = security_key_getsecurity(key, &context); context 1590 security/keys/keyctl.c if (copy_to_user(buffer, context, buflen) != 0) context 1594 security/keys/keyctl.c kfree(context); context 396 security/selinux/hooks.c const char *fscontext, *context, *rootcontext, *defcontext; context 403 security/selinux/hooks.c kfree(opts->context); context 430 security/selinux/hooks.c A(context, true), context 713 security/selinux/hooks.c if (opts->context) { context 714 security/selinux/hooks.c rc = parse_sid(sb, opts->context, &context_sid); context 1012 security/selinux/hooks.c if (opts->context || opts->defcontext) context 1014 security/selinux/hooks.c opts->context = s; context 1027 security/selinux/hooks.c if (opts->context || opts->defcontext) context 1078 security/selinux/hooks.c char *context = NULL; context 1083 security/selinux/hooks.c &context, &len); context 1085 security/selinux/hooks.c bool has_comma = context && strchr(context, ','); context 1090 security/selinux/hooks.c seq_escape(m, context, "\"\n\\"); context 1094 security/selinux/hooks.c kfree(context); context 1373 security/selinux/hooks.c char *context; context 1378 security/selinux/hooks.c context = kmalloc(len + 1, GFP_NOFS); context 1379 security/selinux/hooks.c if (!context) context 1382 security/selinux/hooks.c context[len] = '\0'; context 1383 security/selinux/hooks.c rc = __vfs_getxattr(dentry, inode, XATTR_NAME_SELINUX, context, len); context 1385 security/selinux/hooks.c kfree(context); context 1393 security/selinux/hooks.c context = kmalloc(len + 1, GFP_NOFS); context 1394 security/selinux/hooks.c if (!context) context 1397 security/selinux/hooks.c context[len] = '\0'; context 1399 security/selinux/hooks.c context, len); context 1402 security/selinux/hooks.c kfree(context); context 1412 security/selinux/hooks.c rc = security_context_to_sid_default(&selinux_state, context, rc, sid, context 1420 security/selinux/hooks.c ino, dev, context); context 1423 security/selinux/hooks.c __func__, context, -rc, dev, ino); context 1426 security/selinux/hooks.c kfree(context); context 2702 security/selinux/hooks.c if (opts->context) { context 2703 security/selinux/hooks.c rc = parse_sid(sb, opts->context, &sid); context 2805 security/selinux/hooks.c if (src->context) { context 2806 security/selinux/hooks.c opts->context = kstrdup(src->context, GFP_KERNEL); context 2807 security/selinux/hooks.c if (!opts->context) context 2915 security/selinux/hooks.c char *context; context 2944 security/selinux/hooks.c &context, &clen); context 2947 security/selinux/hooks.c *value = context; context 3345 security/selinux/hooks.c char *context = NULL; context 3363 security/selinux/hooks.c isec->sid, &context, context 3367 security/selinux/hooks.c &context, &size); context 3372 security/selinux/hooks.c *buffer = context; context 3375 security/selinux/hooks.c kfree(context); context 3467 security/selinux/hooks.c char *context; context 3476 security/selinux/hooks.c context = kmalloc(clen, GFP_KERNEL); context 3477 security/selinux/hooks.c if (!context) context 3480 security/selinux/hooks.c rc = kernfs_xattr_get(kn_dir, XATTR_NAME_SELINUX, context, clen); context 3482 security/selinux/hooks.c kfree(context); context 3486 security/selinux/hooks.c rc = security_context_to_sid(&selinux_state, context, clen, &parent_sid, context 3488 security/selinux/hooks.c kfree(context); context 3509 security/selinux/hooks.c &context, &clen); context 3513 security/selinux/hooks.c rc = kernfs_xattr_set(kn, XATTR_NAME_SELINUX, context, clen, context 3515 security/selinux/hooks.c kfree(context); context 6603 security/selinux/hooks.c char *context = NULL; context 6608 security/selinux/hooks.c &context, &len); context 6611 security/selinux/hooks.c *_buffer = context; context 36 security/selinux/ss/context.h static inline void mls_context_init(struct context *c) context 41 security/selinux/ss/context.h static inline int mls_context_cpy(struct context *dst, struct context *src) context 61 security/selinux/ss/context.h static inline int mls_context_cpy_low(struct context *dst, struct context *src) context 81 security/selinux/ss/context.h static inline int mls_context_cpy_high(struct context *dst, struct context *src) context 98 security/selinux/ss/context.h static inline int mls_context_cmp(struct context *c1, struct context *c2) context 106 security/selinux/ss/context.h static inline void mls_context_destroy(struct context *c) context 113 security/selinux/ss/context.h static inline void context_init(struct context *c) context 118 security/selinux/ss/context.h static inline int context_cpy(struct context *dst, struct context *src) context 142 security/selinux/ss/context.h static inline void context_destroy(struct context *c) context 151 security/selinux/ss/context.h static inline int context_cmp(struct context *c1, struct context *c2) context 36 security/selinux/ss/mls.c int mls_compute_context_len(struct policydb *p, struct context *context) context 48 security/selinux/ss/mls.c int index_sens = context->range.level[l].sens; context 54 security/selinux/ss/mls.c e = &context->range.level[l].cat; context 73 security/selinux/ss/mls.c if (mls_level_eq(&context->range.level[0], context 74 security/selinux/ss/mls.c &context->range.level[1])) context 90 security/selinux/ss/mls.c struct context *context, context 108 security/selinux/ss/mls.c context->range.level[l].sens - 1)); context 114 security/selinux/ss/mls.c e = &context->range.level[l].cat; context 150 security/selinux/ss/mls.c if (mls_level_eq(&context->range.level[0], context 151 security/selinux/ss/mls.c &context->range.level[1])) context 193 security/selinux/ss/mls.c int mls_context_isvalid(struct policydb *p, struct context *c) context 237 security/selinux/ss/mls.c struct context *context, context 262 security/selinux/ss/mls.c struct context *defcon; context 271 security/selinux/ss/mls.c return mls_context_cpy(context, defcon); context 299 security/selinux/ss/mls.c context->range.level[l].sens = levdatum->level->sens; context 319 security/selinux/ss/mls.c rc = ebitmap_set_bit(&context->range.level[l].cat, context 336 security/selinux/ss/mls.c rc = ebitmap_set_bit(&context->range.level[l].cat, i, 1); context 345 security/selinux/ss/mls.c context->range.level[1].sens = context->range.level[0].sens; context 346 security/selinux/ss/mls.c rc = ebitmap_cpy(&context->range.level[1].cat, context 347 security/selinux/ss/mls.c &context->range.level[0].cat); context 361 security/selinux/ss/mls.c int mls_from_string(struct policydb *p, char *str, struct context *context, context 374 security/selinux/ss/mls.c rc = mls_context_to_sid(p, ':', tmpstr, context, context 385 security/selinux/ss/mls.c int mls_range_set(struct context *context, context 392 security/selinux/ss/mls.c context->range.level[l].sens = range->level[l].sens; context 393 security/selinux/ss/mls.c rc = ebitmap_cpy(&context->range.level[l].cat, context 403 security/selinux/ss/mls.c struct context *fromcon, struct user_datum *user, context 404 security/selinux/ss/mls.c struct context *usercon) context 449 security/selinux/ss/mls.c struct context *oldc, context 450 security/selinux/ss/mls.c struct context *newc) context 488 security/selinux/ss/mls.c struct context *scontext, context 489 security/selinux/ss/mls.c struct context *tcontext, context 492 security/selinux/ss/mls.c struct context *newcontext, context 563 security/selinux/ss/mls.c struct context *context, context 569 security/selinux/ss/mls.c secattr->attr.mls.lvl = context->range.level[0].sens - 1; context 584 security/selinux/ss/mls.c struct context *context, context 590 security/selinux/ss/mls.c context->range.level[0].sens = secattr->attr.mls.lvl + 1; context 591 security/selinux/ss/mls.c context->range.level[1].sens = context->range.level[0].sens; context 605 security/selinux/ss/mls.c struct context *context, context 613 security/selinux/ss/mls.c rc = ebitmap_netlbl_export(&context->range.level[0].cat, context 634 security/selinux/ss/mls.c struct context *context, context 642 security/selinux/ss/mls.c rc = ebitmap_netlbl_import(&context->range.level[0].cat, context 646 security/selinux/ss/mls.c memcpy(&context->range.level[1].cat, &context->range.level[0].cat, context 647 security/selinux/ss/mls.c sizeof(context->range.level[0].cat)); context 652 security/selinux/ss/mls.c ebitmap_destroy(&context->range.level[0].cat); context 28 security/selinux/ss/mls.h int mls_compute_context_len(struct policydb *p, struct context *context); context 29 security/selinux/ss/mls.h void mls_sid_to_context(struct policydb *p, struct context *context, context 31 security/selinux/ss/mls.h int mls_context_isvalid(struct policydb *p, struct context *c); context 38 security/selinux/ss/mls.h struct context *context, context 42 security/selinux/ss/mls.h int mls_from_string(struct policydb *p, char *str, struct context *context, context 45 security/selinux/ss/mls.h int mls_range_set(struct context *context, struct mls_range *range); context 49 security/selinux/ss/mls.h struct context *oldc, context 50 security/selinux/ss/mls.h struct context *newc); context 53 security/selinux/ss/mls.h struct context *scontext, context 54 security/selinux/ss/mls.h struct context *tcontext, context 57 security/selinux/ss/mls.h struct context *newcontext, context 61 security/selinux/ss/mls.h struct context *fromcon, struct user_datum *user, context 62 security/selinux/ss/mls.h struct context *usercon); context 66 security/selinux/ss/mls.h struct context *context, context 69 security/selinux/ss/mls.h struct context *context, context 72 security/selinux/ss/mls.h struct context *context, context 75 security/selinux/ss/mls.h struct context *context, context 79 security/selinux/ss/mls.h struct context *context, context 85 security/selinux/ss/mls.h struct context *context, context 91 security/selinux/ss/mls.h struct context *context, context 97 security/selinux/ss/mls.h struct context *context, context 360 security/selinux/ss/policydb.c context_destroy(&c->context[0]); context 361 security/selinux/ss/policydb.c context_destroy(&c->context[1]); context 864 security/selinux/ss/policydb.c if (!c->context[0].user) { context 877 security/selinux/ss/policydb.c rc = sidtab_set_initial(s, c->sid[0], &c->context[0]); context 915 security/selinux/ss/policydb.c int policydb_context_isvalid(struct policydb *p, struct context *c) context 1019 security/selinux/ss/policydb.c static int context_read_and_validate(struct context *c, context 2029 security/selinux/ss/policydb.c rc = context_read_and_validate(&newc->context[0], p, fp); context 2103 security/selinux/ss/policydb.c rc = context_read_and_validate(&c->context[0], p, fp); context 2118 security/selinux/ss/policydb.c rc = context_read_and_validate(&c->context[0], p, fp); context 2121 security/selinux/ss/policydb.c rc = context_read_and_validate(&c->context[1], p, fp); context 2132 security/selinux/ss/policydb.c rc = context_read_and_validate(&c->context[0], p, fp); context 2142 security/selinux/ss/policydb.c rc = context_read_and_validate(&c->context[0], p, fp); context 2164 security/selinux/ss/policydb.c rc = context_read_and_validate(&c->context[0], p, fp); context 2178 security/selinux/ss/policydb.c rc = context_read_and_validate(&c->context[0], p, fp); context 2208 security/selinux/ss/policydb.c rc = context_read_and_validate(&c->context[0], context 2235 security/selinux/ss/policydb.c rc = context_read_and_validate(&c->context[0], context 2709 security/selinux/ss/policydb.c static int context_write(struct policydb *p, struct context *c, context 3099 security/selinux/ss/policydb.c rc = context_write(p, &c->context[0], fp); context 3113 security/selinux/ss/policydb.c rc = context_write(p, &c->context[0], fp); context 3116 security/selinux/ss/policydb.c rc = context_write(p, &c->context[1], fp); context 3127 security/selinux/ss/policydb.c rc = context_write(p, &c->context[0], fp); context 3137 security/selinux/ss/policydb.c rc = context_write(p, &c->context[0], fp); context 3151 security/selinux/ss/policydb.c rc = context_write(p, &c->context[0], fp); context 3163 security/selinux/ss/policydb.c rc = context_write(p, &c->context[0], fp); context 3181 security/selinux/ss/policydb.c rc = context_write(p, &c->context[0], fp); context 3195 security/selinux/ss/policydb.c rc = context_write(p, &c->context[0], fp); context 3249 security/selinux/ss/policydb.c rc = context_write(p, &c->context[0], fp); context 200 security/selinux/ss/policydb.h struct context context[2]; /* security context(s) */ context 313 security/selinux/ss/policydb.h extern int policydb_context_isvalid(struct policydb *p, struct context *c); context 90 security/selinux/ss/services.c struct context *context, context 95 security/selinux/ss/services.c struct context *scontext, context 96 security/selinux/ss/services.c struct context *tcontext, context 263 security/selinux/ss/services.c struct context *scontext, context 264 security/selinux/ss/services.c struct context *tcontext, context 265 security/selinux/ss/services.c struct context *xcontext, context 269 security/selinux/ss/services.c struct context *c; context 454 security/selinux/ss/services.c struct context *scontext, context 455 security/selinux/ss/services.c struct context *tcontext, context 533 security/selinux/ss/services.c struct context *scontext, context 534 security/selinux/ss/services.c struct context *tcontext, context 538 security/selinux/ss/services.c struct context lo_scontext; context 539 security/selinux/ss/services.c struct context lo_tcontext, *tcontextp = tcontext; context 614 security/selinux/ss/services.c struct context *scontext, context 615 security/selinux/ss/services.c struct context *tcontext, context 719 security/selinux/ss/services.c struct context *ocontext, context 720 security/selinux/ss/services.c struct context *ncontext, context 721 security/selinux/ss/services.c struct context *tcontext, context 754 security/selinux/ss/services.c struct context *ocontext; context 755 security/selinux/ss/services.c struct context *ncontext; context 756 security/selinux/ss/services.c struct context *tcontext; context 858 security/selinux/ss/services.c struct context *old_context, *new_context; context 1008 security/selinux/ss/services.c struct context *scontext, *tcontext; context 1102 security/selinux/ss/services.c struct context *scontext = NULL, *tcontext = NULL; context 1157 security/selinux/ss/services.c struct context *scontext = NULL, *tcontext = NULL; context 1209 security/selinux/ss/services.c struct context *context, context 1218 security/selinux/ss/services.c if (context->len) { context 1219 security/selinux/ss/services.c *scontext_len = context->len; context 1221 security/selinux/ss/services.c *scontext = kstrdup(context->str, GFP_ATOMIC); context 1229 security/selinux/ss/services.c *scontext_len += strlen(sym_name(p, SYM_USERS, context->user - 1)) + 1; context 1230 security/selinux/ss/services.c *scontext_len += strlen(sym_name(p, SYM_ROLES, context->role - 1)) + 1; context 1231 security/selinux/ss/services.c *scontext_len += strlen(sym_name(p, SYM_TYPES, context->type - 1)) + 1; context 1232 security/selinux/ss/services.c *scontext_len += mls_compute_context_len(p, context); context 1247 security/selinux/ss/services.c sym_name(p, SYM_USERS, context->user - 1), context 1248 security/selinux/ss/services.c sym_name(p, SYM_ROLES, context->role - 1), context 1249 security/selinux/ss/services.c sym_name(p, SYM_TYPES, context->type - 1)); context 1251 security/selinux/ss/services.c mls_sid_to_context(p, context, &scontextp); context 1274 security/selinux/ss/services.c struct context *context; context 1306 security/selinux/ss/services.c context = sidtab_search_force(sidtab, sid); context 1308 security/selinux/ss/services.c context = sidtab_search(sidtab, sid); context 1309 security/selinux/ss/services.c if (!context) { context 1315 security/selinux/ss/services.c if (only_invalid && !context->len) context 1318 security/selinux/ss/services.c rc = context_struct_to_string(policydb, context, scontext, context 1377 security/selinux/ss/services.c struct context *ctx, context 1460 security/selinux/ss/services.c struct context context; context 1497 security/selinux/ss/services.c &context, def_sid); context 1499 security/selinux/ss/services.c context.str = str; context 1500 security/selinux/ss/services.c context.len = strlen(str) + 1; context 1504 security/selinux/ss/services.c rc = sidtab_context_to_sid(sidtab, &context, sid); context 1505 security/selinux/ss/services.c context_destroy(&context); context 1577 security/selinux/ss/services.c struct context *scontext, context 1578 security/selinux/ss/services.c struct context *tcontext, context 1580 security/selinux/ss/services.c struct context *newcontext) context 1611 security/selinux/ss/services.c struct context *newcontext, context 1648 security/selinux/ss/services.c struct context *scontext = NULL, *tcontext = NULL, newcontext; context 1897 security/selinux/ss/services.c struct context *context) context 1906 security/selinux/ss/services.c if (!context_struct_to_string(policydb, context, &s, &len)) { context 1928 security/selinux/ss/services.c static int convert_context(struct context *oldc, struct context *newc, void *p) context 2024 security/selinux/ss/services.c rc = mls_range_set(newc, &oc->context[0].range); context 2284 security/selinux/ss/services.c &c->context[0], context 2331 security/selinux/ss/services.c &c->context[0], context 2378 security/selinux/ss/services.c &c->context[0], context 2420 security/selinux/ss/services.c &c->context[0], context 2425 security/selinux/ss/services.c &c->context[1], context 2516 security/selinux/ss/services.c &c->context[0], context 2556 security/selinux/ss/services.c struct context *fromcon, usercon; context 2710 security/selinux/ss/services.c rc = sidtab_context_to_sid(sidtab, &c->context[0], &c->sid[0]); context 2773 security/selinux/ss/services.c rc = sidtab_context_to_sid(sidtab, &c->context[0], context 2968 security/selinux/ss/services.c struct context *context1; context 2969 security/selinux/ss/services.c struct context *context2; context 2970 security/selinux/ss/services.c struct context newcon; context 3066 security/selinux/ss/services.c struct context *nlbl_ctx; context 3067 security/selinux/ss/services.c struct context *xfrm_ctx; context 3267 security/selinux/ss/services.c struct context au_ctxt; context 3407 security/selinux/ss/services.c struct context *ctxt; context 3589 security/selinux/ss/services.c struct context *ctx; context 3590 security/selinux/ss/services.c struct context ctx_new; context 3657 security/selinux/ss/services.c struct context *ctx; context 40 security/selinux/ss/sidtab.c int sidtab_set_initial(struct sidtab *s, u32 sid, struct context *context) context 50 security/selinux/ss/sidtab.c rc = context_cpy(&entry->context, context); context 91 security/selinux/ss/sidtab.c static struct context *sidtab_do_lookup(struct sidtab *s, u32 index, int alloc) context 128 security/selinux/ss/sidtab.c return &entry->ptr_leaf->entries[index % SIDTAB_LEAF_ENTRIES].context; context 131 security/selinux/ss/sidtab.c static struct context *sidtab_lookup(struct sidtab *s, u32 index) context 142 security/selinux/ss/sidtab.c static struct context *sidtab_lookup_initial(struct sidtab *s, u32 sid) context 144 security/selinux/ss/sidtab.c return s->isids[sid - 1].set ? &s->isids[sid - 1].context : NULL; context 147 security/selinux/ss/sidtab.c static struct context *sidtab_search_core(struct sidtab *s, u32 sid, int force) context 149 security/selinux/ss/sidtab.c struct context *context; context 153 security/selinux/ss/sidtab.c context = sidtab_lookup(s, sid - (SECINITSID_NUM + 1)); context 155 security/selinux/ss/sidtab.c context = sidtab_lookup_initial(s, sid); context 156 security/selinux/ss/sidtab.c if (context && (!context->len || force)) context 157 security/selinux/ss/sidtab.c return context; context 163 security/selinux/ss/sidtab.c struct context *sidtab_search(struct sidtab *s, u32 sid) context 168 security/selinux/ss/sidtab.c struct context *sidtab_search_force(struct sidtab *s, u32 sid) context 175 security/selinux/ss/sidtab.c struct context *context, u32 *index) context 187 security/selinux/ss/sidtab.c context, index); context 197 security/selinux/ss/sidtab.c if (context_cmp(&node->entries[i].context, context)) { context 222 security/selinux/ss/sidtab.c static int sidtab_rcache_search(struct sidtab *s, struct context *context, context 233 security/selinux/ss/sidtab.c if (context_cmp(sidtab_do_lookup(s, v, 0), context)) { context 242 security/selinux/ss/sidtab.c static int sidtab_reverse_lookup(struct sidtab *s, struct context *context, context 248 security/selinux/ss/sidtab.c struct context *dst, *dst_convert; context 251 security/selinux/ss/sidtab.c rc = sidtab_rcache_search(s, context, index); context 261 security/selinux/ss/sidtab.c context, index); context 276 security/selinux/ss/sidtab.c if (context_cmp(sidtab_do_lookup(s, count, 0), context)) { context 296 security/selinux/ss/sidtab.c rc = context_cpy(dst, context); context 312 security/selinux/ss/sidtab.c rc = convert->func(context, dst_convert, convert->args); context 322 security/selinux/ss/sidtab.c if (context->len) context 324 security/selinux/ss/sidtab.c context->str); context 338 security/selinux/ss/sidtab.c int sidtab_context_to_sid(struct sidtab *s, struct context *context, u32 *sid) context 346 security/selinux/ss/sidtab.c if (entry->set && context_cmp(context, &entry->context)) { context 352 security/selinux/ss/sidtab.c rc = sidtab_reverse_lookup(s, context, sid); context 393 security/selinux/ss/sidtab.c rc = convert->func(&esrc->ptr_leaf->entries[i].context, context 394 security/selinux/ss/sidtab.c &edst->ptr_leaf->entries[i].context, context 476 security/selinux/ss/sidtab.c context_destroy(&node->entries[i].context); context 487 security/selinux/ss/sidtab.c context_destroy(&s->isids[i].context); context 20 security/selinux/ss/sidtab.h struct context context; context 60 security/selinux/ss/sidtab.h struct context context; context 64 security/selinux/ss/sidtab.h int (*func)(struct context *oldc, struct context *newc, void *args); context 94 security/selinux/ss/sidtab.h int sidtab_set_initial(struct sidtab *s, u32 sid, struct context *context); context 95 security/selinux/ss/sidtab.h struct context *sidtab_search(struct sidtab *s, u32 sid); context 96 security/selinux/ss/sidtab.h struct context *sidtab_search_force(struct sidtab *s, u32 sid); context 100 security/selinux/ss/sidtab.h int sidtab_context_to_sid(struct sidtab *s, struct context *context, u32 *sid); context 36 sound/firewire/amdtp-stream-trace.h __entry->channel = s->context->channel; context 96 sound/firewire/amdtp-stream.c s->context = ERR_PTR(-1); context 447 sound/firewire/amdtp-stream.c err = fw_iso_context_queue(s->context, params, &s->buffer.iso_buffer, context 776 sound/firewire/amdtp-stream.c static void out_stream_callback(struct fw_iso_context *context, u32 tstamp, context 815 sound/firewire/amdtp-stream.c fw_iso_context_queue_flush(s->context); context 818 sound/firewire/amdtp-stream.c static void in_stream_callback(struct fw_iso_context *context, u32 tstamp, context 853 sound/firewire/amdtp-stream.c fw_iso_context_queue_flush(s->context); context 857 sound/firewire/amdtp-stream.c static void amdtp_stream_first_callback(struct fw_iso_context *context, context 875 sound/firewire/amdtp-stream.c context->callback.sc = in_stream_callback; context 879 sound/firewire/amdtp-stream.c context->callback.sc = out_stream_callback; context 884 sound/firewire/amdtp-stream.c context->callback.sc(context, tstamp, header_length, header, s); context 961 sound/firewire/amdtp-stream.c s->context = fw_iso_context_create(fw_parent_device(s->unit)->card, context 964 sound/firewire/amdtp-stream.c if (IS_ERR(s->context)) { context 965 sound/firewire/amdtp-stream.c err = PTR_ERR(s->context); context 1011 sound/firewire/amdtp-stream.c err = fw_iso_context_start(s->context, -1, 0, tag); context 1021 sound/firewire/amdtp-stream.c fw_iso_context_destroy(s->context); context 1022 sound/firewire/amdtp-stream.c s->context = ERR_PTR(-1); context 1056 sound/firewire/amdtp-stream.c fw_iso_context_flush_completions(s->context); context 1075 sound/firewire/amdtp-stream.c fw_iso_context_flush_completions(s->context); context 1110 sound/firewire/amdtp-stream.c fw_iso_context_stop(s->context); context 1111 sound/firewire/amdtp-stream.c fw_iso_context_destroy(s->context); context 1112 sound/firewire/amdtp-stream.c s->context = ERR_PTR(-1); context 118 sound/firewire/amdtp-stream.h struct fw_iso_context *context; context 212 sound/firewire/amdtp-stream.h return !IS_ERR(s->context); context 274 sound/firewire/bebob/bebob_maudio.c bebob->rx_stream.context = ERR_PTR(-1); context 275 sound/firewire/bebob/bebob_maudio.c bebob->tx_stream.context = ERR_PTR(-1); context 58 sound/firewire/isight.c struct fw_iso_context *context; context 165 sound/firewire/isight.c static void isight_packet(struct fw_iso_context *context, u32 cycle, context 196 sound/firewire/isight.c err = fw_iso_context_queue(isight->context, &audio_packet, context 205 sound/firewire/isight.c fw_iso_context_queue_flush(isight->context); context 317 sound/firewire/isight.c if (!isight->context) context 320 sound/firewire/isight.c fw_iso_context_stop(isight->context); context 321 sound/firewire/isight.c fw_iso_context_destroy(isight->context); context 322 sound/firewire/isight.c isight->context = NULL; context 348 sound/firewire/isight.c if (isight->context) { context 367 sound/firewire/isight.c isight->context = fw_iso_context_create(isight->device->card, context 372 sound/firewire/isight.c if (IS_ERR(isight->context)) { context 373 sound/firewire/isight.c err = PTR_ERR(isight->context); context 374 sound/firewire/isight.c isight->context = NULL; context 379 sound/firewire/isight.c err = fw_iso_context_queue(isight->context, &audio_packet, context 389 sound/firewire/isight.c err = fw_iso_context_start(isight->context, -1, 0, context 397 sound/firewire/isight.c fw_iso_context_destroy(isight->context); context 398 sound/firewire/isight.c isight->context = NULL; context 241 sound/hda/hdac_regmap.c static int hda_reg_read(void *context, unsigned int reg, unsigned int *val) context 243 sound/hda/hdac_regmap.c struct hdac_device *codec = context; context 280 sound/hda/hdac_regmap.c static int hda_reg_write(void *context, unsigned int reg, unsigned int val) context 282 sound/hda/hdac_regmap.c struct hdac_device *codec = context; context 229 sound/pci/cs46xx/cs46xx_dsp_task_types.h struct dsp_hf_save_area context; context 1979 sound/pci/hda/hda_intel.c static void azx_firmware_cb(const struct firmware *fw, void *context) context 1981 sound/pci/hda/hda_intel.c struct snd_card *card = context; context 167 sound/soc/codecs/ab8500-codec.c static int ab8500_codec_read_reg(void *context, unsigned int reg, context 170 sound/soc/codecs/ab8500-codec.c struct device *dev = context; context 182 sound/soc/codecs/ab8500-codec.c static int ab8500_codec_write_reg(void *context, unsigned int reg, context 185 sound/soc/codecs/ab8500-codec.c struct device *dev = context; context 183 sound/soc/codecs/adau1701.c static int adau1701_reg_write(void *context, unsigned int reg, context 186 sound/soc/codecs/adau1701.c struct i2c_client *client = context; context 213 sound/soc/codecs/adau1701.c static int adau1701_reg_read(void *context, unsigned int reg, context 220 sound/soc/codecs/adau1701.c struct i2c_client *client = context; context 513 sound/soc/codecs/cx2072x.c static int cx2072x_reg_write(void *context, unsigned int reg, context 529 sound/soc/codecs/cx2072x.c return cx2072x_reg_raw_write(context, reg, &raw_value, size); context 532 sound/soc/codecs/cx2072x.c static int cx2072x_reg_read(void *context, unsigned int reg, context 535 sound/soc/codecs/cx2072x.c struct i2c_client *client = context; context 475 sound/soc/codecs/jz4725b.c static int jz4725b_codec_reg_read(void *context, unsigned int reg, context 478 sound/soc/codecs/jz4725b.c struct jz_icdc *icdc = context; context 500 sound/soc/codecs/jz4725b.c static int jz4725b_codec_reg_write(void *context, unsigned int reg, context 503 sound/soc/codecs/jz4725b.c struct jz_icdc *icdc = context; context 16 sound/soc/codecs/rl6347a.c int rl6347a_hw_write(void *context, unsigned int reg, unsigned int value) context 18 sound/soc/codecs/rl6347a.c struct i2c_client *client = context; context 59 sound/soc/codecs/rl6347a.c int rl6347a_hw_read(void *context, unsigned int reg, unsigned int *value) context 61 sound/soc/codecs/rl6347a.c struct i2c_client *client = context; context 28 sound/soc/codecs/rl6347a.h int rl6347a_hw_write(void *context, unsigned int reg, unsigned int value); context 29 sound/soc/codecs/rl6347a.h int rl6347a_hw_read(void *context, unsigned int reg, unsigned int *value); context 1118 sound/soc/codecs/rt5514.c static int rt5514_i2c_read(void *context, unsigned int reg, unsigned int *val) context 1120 sound/soc/codecs/rt5514.c struct i2c_client *client = context; context 1128 sound/soc/codecs/rt5514.c static int rt5514_i2c_write(void *context, unsigned int reg, unsigned int val) context 1130 sound/soc/codecs/rt5514.c struct i2c_client *client = context; context 4789 sound/soc/codecs/rt5677.c static int rt5677_read(void *context, unsigned int reg, unsigned int *val) context 4791 sound/soc/codecs/rt5677.c struct i2c_client *client = context; context 4811 sound/soc/codecs/rt5677.c static int rt5677_write(void *context, unsigned int reg, unsigned int val) context 4813 sound/soc/codecs/rt5677.c struct i2c_client *client = context; context 84 sound/soc/codecs/sti-sas.c static int sti_sas_read_reg(void *context, unsigned int reg, context 87 sound/soc/codecs/sti-sas.c struct sti_sas_data *drvdata = context; context 98 sound/soc/codecs/sti-sas.c static int sti_sas_write_reg(void *context, unsigned int reg, context 101 sound/soc/codecs/sti-sas.c struct sti_sas_data *drvdata = context; context 165 sound/soc/codecs/tas5086.c static int tas5086_reg_write(void *context, unsigned int reg, context 168 sound/soc/codecs/tas5086.c struct i2c_client *client = context; context 193 sound/soc/codecs/tas5086.c static int tas5086_reg_read(void *context, unsigned int reg, context 196 sound/soc/codecs/tas5086.c struct i2c_client *client = context; context 76 sound/soc/codecs/tas571x.c static int tas571x_reg_write(void *context, unsigned int reg, context 79 sound/soc/codecs/tas571x.c struct i2c_client *client = context; context 102 sound/soc/codecs/tas571x.c static int tas571x_reg_read(void *context, unsigned int reg, context 105 sound/soc/codecs/tas571x.c struct i2c_client *client = context; context 64 sound/soc/codecs/uda134x.c static int uda134x_regmap_write(void *context, unsigned int reg, context 67 sound/soc/codecs/uda134x.c struct uda134x_platform_data *pd = context; context 437 sound/soc/codecs/wm0010.c xfer->m.context = xfer; context 861 sound/soc/codecs/wm8958-dsp2.c static void wm8958_enh_eq_loaded(const struct firmware *fw, void *context) context 863 sound/soc/codecs/wm8958-dsp2.c struct snd_soc_component *component = context; context 873 sound/soc/codecs/wm8958-dsp2.c static void wm8958_mbc_vss_loaded(const struct firmware *fw, void *context) context 875 sound/soc/codecs/wm8958-dsp2.c struct snd_soc_component *component = context; context 885 sound/soc/codecs/wm8958-dsp2.c static void wm8958_mbc_loaded(const struct firmware *fw, void *context) context 887 sound/soc/codecs/wm8958-dsp2.c struct snd_soc_component *component = context; context 45 sound/soc/intel/atom/sst/sst.c static irqreturn_t intel_sst_interrupt_mrfld(int irq, void *context) context 52 sound/soc/intel/atom/sst/sst.c struct intel_sst_drv *drv = (struct intel_sst_drv *) context; context 114 sound/soc/intel/atom/sst/sst.c static irqreturn_t intel_sst_irq_thread_mrfld(int irq, void *context) context 116 sound/soc/intel/atom/sst/sst.c struct intel_sst_drv *drv = (struct intel_sst_drv *) context; context 486 sound/soc/intel/atom/sst/sst.h void sst_firmware_load_cb(const struct firmware *fw, void *context); context 312 sound/soc/intel/atom/sst/sst_loader.c void sst_firmware_load_cb(const struct firmware *fw, void *context) context 314 sound/soc/intel/atom/sst/sst_loader.c struct intel_sst_drv *ctx = context; context 171 sound/soc/intel/baytrail/sst-baytrail-dsp.c static irqreturn_t sst_byt_irq(int irq, void *context) context 173 sound/soc/intel/baytrail/sst-baytrail-dsp.c struct sst_dsp *sst = (struct sst_dsp *) context; context 296 sound/soc/intel/baytrail/sst-baytrail-ipc.c static irqreturn_t sst_byt_irq_thread(int irq, void *context) context 298 sound/soc/intel/baytrail/sst-baytrail-ipc.c struct sst_dsp *sst = (struct sst_dsp *) context; context 47 sound/soc/intel/common/sst-acpi.c static void sst_acpi_fw_cb(const struct firmware *fw, void *context) context 49 sound/soc/intel/common/sst-acpi.c struct platform_device *pdev = context; context 51 sound/soc/intel/common/sst-dsp-priv.h irqreturn_t (*irq_handler)(int irq, void *context); context 362 sound/soc/intel/common/sst-dsp-priv.h struct sst_module_runtime_context *context); context 364 sound/soc/intel/common/sst-dsp-priv.h struct sst_module_runtime_context *context); context 178 sound/soc/intel/common/sst-dsp.h irqreturn_t (*thread)(int irq, void *context); context 926 sound/soc/intel/common/sst-firmware.c struct sst_module_runtime_context *context) context 936 sound/soc/intel/common/sst-firmware.c context->buffer = dma_alloc_coherent(dsp->dma_dev, context 938 sound/soc/intel/common/sst-firmware.c &context->dma_buffer, GFP_DMA | GFP_KERNEL); context 939 sound/soc/intel/common/sst-firmware.c if (!context->buffer) { context 952 sound/soc/intel/common/sst-firmware.c ret = sst_dsp_dma_copyfrom(dsp, context->dma_buffer, context 961 sound/soc/intel/common/sst-firmware.c sst_memcpy32(context->buffer, dsp->addr.lpe + context 972 sound/soc/intel/common/sst-firmware.c struct sst_module_runtime_context *context) context 984 sound/soc/intel/common/sst-firmware.c if (!context->buffer) { context 997 sound/soc/intel/common/sst-firmware.c context->dma_buffer, module->persistent_size); context 1005 sound/soc/intel/common/sst-firmware.c context->buffer, module->persistent_size); context 1008 sound/soc/intel/common/sst-firmware.c context->buffer, context->dma_buffer); context 1009 sound/soc/intel/common/sst-firmware.c context->buffer = NULL; context 212 sound/soc/intel/haswell/sst-haswell-dsp.c static irqreturn_t hsw_irq(int irq, void *context) context 214 sound/soc/intel/haswell/sst-haswell-dsp.c struct sst_dsp *sst = (struct sst_dsp *) context; context 760 sound/soc/intel/haswell/sst-haswell-ipc.c static irqreturn_t hsw_irq_thread(int irq, void *context) context 762 sound/soc/intel/haswell/sst-haswell-ipc.c struct sst_dsp *sst = (struct sst_dsp *) context; context 107 sound/soc/intel/haswell/sst-haswell-pcm.c struct sst_module_runtime_context context; context 1299 sound/soc/intel/haswell/sst-haswell-pcm.c &pcm_data->context); context 1347 sound/soc/intel/haswell/sst-haswell-pcm.c &pcm_data->context); context 292 sound/soc/intel/skylake/cnl-sst.c static irqreturn_t cnl_dsp_irq_thread_handler(int irq, void *context) context 294 sound/soc/intel/skylake/cnl-sst.c struct sst_dsp *dsp = context; context 489 sound/soc/intel/skylake/skl-sst-ipc.c irqreturn_t skl_dsp_irq_thread_handler(int irq, void *context) context 491 sound/soc/intel/skylake/skl-sst-ipc.c struct sst_dsp *dsp = context; context 108 sound/soc/intel/skylake/skl-sst-ipc.h irqreturn_t skl_dsp_irq_thread_handler(int irq, void *context); context 34 sound/soc/soc-acpi.c void *context, void **ret) context 38 sound/soc/soc-acpi.c struct snd_soc_acpi_package_context *pkg_ctx = context; context 265 sound/soc/sof/intel/bdw.c static irqreturn_t bdw_irq_handler(int irq, void *context) context 267 sound/soc/sof/intel/bdw.c struct snd_sof_dev *sdev = context; context 279 sound/soc/sof/intel/bdw.c static irqreturn_t bdw_irq_thread(int irq, void *context) context 281 sound/soc/sof/intel/bdw.c struct snd_sof_dev *sdev = context; context 164 sound/soc/sof/intel/byt.c static irqreturn_t byt_irq_handler(int irq, void *context) context 166 sound/soc/sof/intel/byt.c struct snd_sof_dev *sdev = context; context 178 sound/soc/sof/intel/byt.c static irqreturn_t byt_irq_thread(int irq, void *context) context 180 sound/soc/sof/intel/byt.c struct snd_sof_dev *sdev = context; context 30 sound/soc/sof/intel/cnl.c static irqreturn_t cnl_ipc_irq_thread(int irq, void *context) context 32 sound/soc/sof/intel/cnl.c struct snd_sof_dev *sdev = context; context 131 sound/soc/sof/intel/hda-ipc.c irqreturn_t hda_dsp_ipc_irq_thread(int irq, void *context) context 133 sound/soc/sof/intel/hda-ipc.c struct snd_sof_dev *sdev = context; context 239 sound/soc/sof/intel/hda-ipc.c irqreturn_t hda_dsp_ipc_irq_handler(int irq, void *context) context 241 sound/soc/sof/intel/hda-ipc.c struct snd_sof_dev *sdev = context; context 536 sound/soc/sof/intel/hda-stream.c irqreturn_t hda_dsp_stream_interrupt(int irq, void *context) context 538 sound/soc/sof/intel/hda-stream.c struct hdac_bus *bus = context; context 587 sound/soc/sof/intel/hda-stream.c irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context) context 589 sound/soc/sof/intel/hda-stream.c struct hdac_bus *bus = context; context 499 sound/soc/sof/intel/hda.h irqreturn_t hda_dsp_stream_interrupt(int irq, void *context); context 500 sound/soc/sof/intel/hda.h irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context); context 528 sound/soc/sof/intel/hda.h irqreturn_t hda_dsp_ipc_irq_handler(int irq, void *context); context 529 sound/soc/sof/intel/hda.h irqreturn_t hda_dsp_ipc_irq_thread(int irq, void *context); context 116 sound/soc/sof/sof-priv.h irqreturn_t (*irq_handler)(int irq, void *context); /* optional */ context 117 sound/soc/sof/sof-priv.h irqreturn_t (*irq_thread)(int irq, void *context); /* optional */ context 29 sound/soc/sunxi/sun8i-adda-pr-regmap.c static int adda_reg_read(void *context, unsigned int reg, unsigned int *val) context 31 sound/soc/sunxi/sun8i-adda-pr-regmap.c void __iomem *base = (void __iomem *)context; context 52 sound/soc/sunxi/sun8i-adda-pr-regmap.c static int adda_reg_write(void *context, unsigned int reg, unsigned int val) context 54 sound/soc/sunxi/sun8i-adda-pr-regmap.c void __iomem *base = (void __iomem *)context; context 120 sound/soc/ti/davinci-mcasp.c struct davinci_mcasp_context context; context 2163 sound/soc/ti/davinci-mcasp.c mcasp->context.xrsr_regs = devm_kcalloc(&pdev->dev, context 2166 sound/soc/ti/davinci-mcasp.c if (!mcasp->context.xrsr_regs) { context 2383 sound/soc/ti/davinci-mcasp.c struct davinci_mcasp_context *context = &mcasp->context; context 2388 sound/soc/ti/davinci-mcasp.c context->config_regs[i] = mcasp_get_reg(mcasp, context_regs[i]); context 2392 sound/soc/ti/davinci-mcasp.c context->afifo_regs[0] = mcasp_get_reg(mcasp, reg); context 2396 sound/soc/ti/davinci-mcasp.c context->afifo_regs[1] = mcasp_get_reg(mcasp, reg); context 2400 sound/soc/ti/davinci-mcasp.c context->xrsr_regs[i] = mcasp_get_reg(mcasp, context 2409 sound/soc/ti/davinci-mcasp.c struct davinci_mcasp_context *context = &mcasp->context; context 2414 sound/soc/ti/davinci-mcasp.c mcasp_set_reg(mcasp, context_regs[i], context->config_regs[i]); context 2418 sound/soc/ti/davinci-mcasp.c mcasp_set_reg(mcasp, reg, context->afifo_regs[0]); context 2422 sound/soc/ti/davinci-mcasp.c mcasp_set_reg(mcasp, reg, context->afifo_regs[1]); context 2427 sound/soc/ti/davinci-mcasp.c context->xrsr_regs[i]); context 22 sound/usb/6fire/comm.c u8 *buffer, void *context, void(*handler)(struct urb *urb)) context 28 sound/usb/6fire/comm.c urb->context = context; context 35 sound/usb/6fire/comm.c struct comm_runtime *rt = urb->context; context 172 sound/usb/6fire/comm.c urb->context = rt; context 28 sound/usb/6fire/comm.h void *context, void(*handler)(struct urb *urb)); context 24 sound/usb/6fire/midi.c struct midi_runtime *rt = urb->context; context 288 sound/usb/6fire/pcm.c struct pcm_urb *in_urb = usb_urb->context; context 377 sound/usb/6fire/pcm.c struct pcm_urb *urb = usb_urb->context; context 580 sound/usb/6fire/pcm.c urb->instance.context = urb; context 207 sound/usb/bcd2000/bcd2000.c struct bcd2000 *bcd2k = urb->context; context 225 sound/usb/bcd2000/bcd2000.c struct bcd2000 *bcd2k = urb->context; context 620 sound/usb/caiaq/audio.c struct snd_usb_caiaq_cb_info *info = urb->context; context 678 sound/usb/caiaq/audio.c struct snd_usb_caiaq_cb_info *oinfo = out->context; context 696 sound/usb/caiaq/audio.c struct snd_usb_caiaq_cb_info *info = urb->context; context 751 sound/usb/caiaq/audio.c urbs[i]->context = &cdev->data_cb_info[i]; context 149 sound/usb/caiaq/device.c struct snd_usb_caiaqdev *cdev = urb->context; context 492 sound/usb/caiaq/input.c struct snd_usb_caiaqdev *cdev = urb->context; context 152 sound/usb/caiaq/midi.c struct snd_usb_caiaqdev *cdev = urb->context; context 357 sound/usb/endpoint.c struct snd_urb_ctx *ctx = urb->context; context 782 sound/usb/endpoint.c u->urb->context = u; context 821 sound/usb/endpoint.c u->urb->context = u; context 967 sound/usb/endpoint.c prepare_outbound_urb(ep, urb->context); context 969 sound/usb/endpoint.c prepare_inbound_urb(ep, urb->context); context 1100 sound/usb/endpoint.c in_ctx = urb->context; context 306 sound/usb/hiface/pcm.c struct pcm_urb *out_urb = usb_urb->context; context 51 sound/usb/line6/capture.c urb_in->context = line6pcm; context 150 sound/usb/line6/capture.c struct snd_line6_pcm *line6pcm = (struct snd_line6_pcm *)urb->context; context 141 sound/usb/line6/driver.c struct message *msg = (struct message *)urb->context; context 286 sound/usb/line6/driver.c struct usb_line6 *line6 = (struct usb_line6 *)urb->context; context 76 sound/usb/line6/midi.c struct usb_line6 *line6 = (struct usb_line6 *)urb->context; context 201 sound/usb/line6/playback.c urb_out->context = line6pcm; context 306 sound/usb/line6/playback.c struct snd_line6_pcm *line6pcm = (struct snd_line6_pcm *)urb->context; context 257 sound/usb/midi.c struct snd_usb_midi_in_endpoint *ep = urb->context; context 281 sound/usb/midi.c struct out_urb_context *context = urb->context; context 282 sound/usb/midi.c struct snd_usb_midi_out_endpoint *ep = context->ep; context 287 sound/usb/midi.c urb_index = context - ep->urbs; context 174 sound/usb/misc/ua101.c struct ua101 *ua = urb->urb.context; context 201 sound/usb/misc/ua101.c struct ua101 *ua = urb->context; context 349 sound/usb/misc/ua101.c struct ua101 *ua = urb->context; context 423 sound/usb/misc/ua101.c struct ua101 *ua = urb->context; context 1117 sound/usb/misc/ua101.c urb->urb.context = ua; context 3333 sound/usb/mixer.c struct usb_mixer_interface *mixer = urb->context; context 191 sound/usb/mixer_quirks.c struct usb_mixer_interface *mixer = urb->context; context 1940 sound/usb/mixer_scarlett_gen2.c struct usb_mixer_interface *mixer = urb->context; context 1564 sound/usb/pcm.c struct snd_urb_ctx *ctx = urb->context; context 63 sound/usb/usx2y/usb_stream.c urb->context = sk; context 505 sound/usb/usx2y/usb_stream.c struct usb_stream_kernel *sk = urb->context; context 512 sound/usb/usx2y/usb_stream.c struct usb_stream_kernel *sk = urb->context; context 600 sound/usb/usx2y/usb_stream.c struct usb_stream_kernel *sk = urb->context; context 645 sound/usb/usx2y/usb_stream.c struct usb_stream_kernel *sk = urb->context; context 167 sound/usb/usx2y/usbusx2y.c struct usX2Ydev *usX2Y = urb->context; context 177 sound/usb/usx2y/usbusx2y.c struct usX2Ydev *usX2Y = urb->context; context 285 sound/usb/usx2y/usbusx2yaudio.c struct snd_usX2Y_substream *subs = urb->context; context 342 sound/usb/usx2y/usbusx2yaudio.c struct snd_usX2Y_substream *subs = urb->context; context 436 sound/usb/usx2y/usbusx2yaudio.c (*purb)->context = subs; context 632 sound/usb/usx2y/usbusx2yaudio.c struct usX2Ydev *usX2Y = urb->context; context 220 sound/usb/usx2y/usx2yhwdeppcm.c struct snd_usX2Y_substream *subs = urb->context; context 279 sound/usb/usx2y/usx2yhwdeppcm.c struct snd_usX2Y_substream *subs = urb->context; context 334 sound/usb/usx2y/usx2yhwdeppcm.c (*purb)->context = subs; context 7 tools/build/feature/test-libcrypto.c MD5_CTX context; context 11 tools/build/feature/test-libcrypto.c MD5_Init(&context); context 12 tools/build/feature/test-libcrypto.c MD5_Update(&context, &dat[0], sizeof(dat)); context 13 tools/build/feature/test-libcrypto.c MD5_Final(&md[0], &context); context 298 tools/include/uapi/drm/drm.h int context; context 403 tools/include/uapi/drm/drm.h int context; /**< Context handle */ context 1148 tools/include/uapi/drm/i915_drm.h #define i915_execbuffer2_set_context_id(eb2, context) \ context 1149 tools/include/uapi/drm/i915_drm.h (eb2).rsvd1 = context & I915_EXEC_CONTEXT_ID_MASK context 64 tools/include/uapi/linux/usbdevice_fs.h void __user *context; context 80 tools/lib/traceevent/event-parse.c void *context; context 5475 tools/lib/traceevent/event-parse.c event->context); context 6197 tools/lib/traceevent/event-parse.c event->context = handle->context; context 6795 tools/lib/traceevent/event-parse.c tep_event_handler_func func, void *context) context 6808 tools/lib/traceevent/event-parse.c event->context = context; context 6837 tools/lib/traceevent/event-parse.c handle->context = context; context 6844 tools/lib/traceevent/event-parse.c tep_event_handler_func func, void *context) context 6855 tools/lib/traceevent/event-parse.c if (func != handler->func || context != handler->context) context 6879 tools/lib/traceevent/event-parse.c tep_event_handler_func func, void *context) context 6889 tools/lib/traceevent/event-parse.c if (event->handler == func && event->context == context) { context 6894 tools/lib/traceevent/event-parse.c event->context = NULL; context 6902 tools/lib/traceevent/event-parse.c func, context)) context 65 tools/lib/traceevent/event-parse.h void *context); context 289 tools/lib/traceevent/event-parse.h void *context; context 498 tools/lib/traceevent/event-parse.h tep_event_handler_func func, void *context); context 501 tools/lib/traceevent/event-parse.h tep_event_handler_func func, void *context); context 127 tools/lib/traceevent/plugins/plugin_function.c struct tep_event *event, void *context) context 30 tools/lib/traceevent/plugins/plugin_hrtimer.c struct tep_event *event, void *context) context 50 tools/lib/traceevent/plugins/plugin_hrtimer.c struct tep_event *event, void *context) context 28 tools/lib/traceevent/plugins/plugin_kmem.c struct tep_event *event, void *context) context 273 tools/lib/traceevent/plugins/plugin_kvm.c struct tep_event *event, void *context) context 296 tools/lib/traceevent/plugins/plugin_kvm.c struct tep_event *event, void *context) context 335 tools/lib/traceevent/plugins/plugin_kvm.c struct tep_event *event, void *context) context 349 tools/lib/traceevent/plugins/plugin_kvm.c struct tep_event *event, void *context) context 353 tools/lib/traceevent/plugins/plugin_kvm.c return kvm_nested_vmexit_inject_handler(s, record, event, context); context 375 tools/lib/traceevent/plugins/plugin_kvm.c struct tep_event *event, void *context) context 422 tools/lib/traceevent/plugins/plugin_kvm.c struct tep_event *event, void *context) context 435 tools/lib/traceevent/plugins/plugin_kvm.c return kvm_mmu_print_role(s, record, event, context); context 63 tools/lib/traceevent/plugins/plugin_mac80211.c struct tep_event *event, void *context) context 70 tools/lib/traceevent/plugins/plugin_sched_switch.c struct tep_event *event, void *context) context 99 tools/lib/traceevent/plugins/plugin_sched_switch.c struct tep_event *event, void *context) context 1554 tools/perf/builtin-top.c top.annotation_opts.context = 4; context 42 tools/perf/scripts/perl/Perf-Trace-Util/Context.c struct scripting_context * context = INT2PTR(struct scripting_context *,SvIV(ST(0))); context 46 tools/perf/scripts/perl/Perf-Trace-Util/Context.c RETVAL = common_pc(context); context 65 tools/perf/scripts/perl/Perf-Trace-Util/Context.c struct scripting_context * context = INT2PTR(struct scripting_context *,SvIV(ST(0))); context 69 tools/perf/scripts/perl/Perf-Trace-Util/Context.c RETVAL = common_flags(context); context 88 tools/perf/scripts/perl/Perf-Trace-Util/Context.c struct scripting_context * context = INT2PTR(struct scripting_context *,SvIV(ST(0))); context 92 tools/perf/scripts/perl/Perf-Trace-Util/Context.c RETVAL = common_lock_depth(context); context 26 tools/perf/scripts/python/Perf-Trace-Util/Context.c PyObject *context; context 29 tools/perf/scripts/python/Perf-Trace-Util/Context.c if (!PyArg_ParseTuple(args, "O", &context)) context 32 tools/perf/scripts/python/Perf-Trace-Util/Context.c scripting_context = _PyCapsule_GetPointer(context, NULL); context 42 tools/perf/scripts/python/Perf-Trace-Util/Context.c PyObject *context; context 45 tools/perf/scripts/python/Perf-Trace-Util/Context.c if (!PyArg_ParseTuple(args, "O", &context)) context 48 tools/perf/scripts/python/Perf-Trace-Util/Context.c scripting_context = _PyCapsule_GetPointer(context, NULL); context 58 tools/perf/scripts/python/Perf-Trace-Util/Context.c PyObject *context; context 61 tools/perf/scripts/python/Perf-Trace-Util/Context.c if (!PyArg_ParseTuple(args, "O", &context)) context 64 tools/perf/scripts/python/Perf-Trace-Util/Context.c scripting_context = _PyCapsule_GetPointer(context, NULL); context 2280 tools/perf/util/annotate.c bool context = opts->context; context 2321 tools/perf/util/annotate.c if (context && queue == NULL) { context 2333 tools/perf/util/annotate.c if (context) { context 2349 tools/perf/util/annotate.c if (!context) context 2351 tools/perf/util/annotate.c if (queue_len == context) context 93 tools/perf/util/annotate.h int context; context 41 tools/perf/util/cs-etm-decoder/cs-etm-decoder.c cs_etm_decoder__mem_access(const void *context, context 48 tools/perf/util/cs-etm-decoder/cs-etm-decoder.c struct cs_etm_decoder *decoder = (struct cs_etm_decoder *) context; context 506 tools/perf/util/cs-etm-decoder/cs-etm-decoder.c if (!elem->context.ctxt_id_valid) context 509 tools/perf/util/cs-etm-decoder/cs-etm-decoder.c tid = elem->context.context_id; context 523 tools/perf/util/cs-etm-decoder/cs-etm-decoder.c const void *context, context 529 tools/perf/util/cs-etm-decoder/cs-etm-decoder.c struct cs_etm_decoder *decoder = (struct cs_etm_decoder *) context; context 402 tools/perf/util/evsel.c #define MOD_PRINT(context, mod) do { \ context 403 tools/perf/util/evsel.c if (!attr->exclude_##context) { \ context 146 tools/perf/util/genelf.c MD5_CTX context; context 151 tools/perf/util/genelf.c MD5_Init(&context); context 152 tools/perf/util/genelf.c MD5_Update(&context, &load_addr, sizeof(load_addr)); context 153 tools/perf/util/genelf.c MD5_Update(&context, code, csize); context 154 tools/perf/util/genelf.c MD5_Final((unsigned char *)note->build_id, &context); context 797 tools/perf/util/scripting-engines/trace-event-python.c PyObject *handler, *context, *t, *obj = NULL, *callchain; context 844 tools/perf/util/scripting-engines/trace-event-python.c context = _PyCapsule_New(scripting_context, NULL, NULL); context 847 tools/perf/util/scripting-engines/trace-event-python.c PyTuple_SetItem(t, n++, context); context 466 tools/perf/util/thread-stack.c u64 context = callchain_context(ip, kernel_start); context 475 tools/perf/util/thread-stack.c chain->ips[0] = context; context 483 tools/perf/util/thread-stack.c last_context = context; context 487 tools/perf/util/thread-stack.c context = callchain_context(ip, kernel_start); context 488 tools/perf/util/thread-stack.c if (context != last_context) { context 491 tools/perf/util/thread-stack.c chain->ips[i++] = context; context 492 tools/perf/util/thread-stack.c last_context = context; context 15 tools/perf/util/trace-event-parse.c static int get_common_field(struct scripting_context *context, context 18 tools/perf/util/trace-event-parse.c struct tep_handle *pevent = context->pevent; context 35 tools/perf/util/trace-event-parse.c return tep_read_number(pevent, context->event_data + *offset, *size); context 38 tools/perf/util/trace-event-parse.c int common_lock_depth(struct scripting_context *context) context 44 tools/perf/util/trace-event-parse.c ret = get_common_field(context, &size, &offset, context 52 tools/perf/util/trace-event-parse.c int common_flags(struct scripting_context *context) context 58 tools/perf/util/trace-event-parse.c ret = get_common_field(context, &size, &offset, context 66 tools/perf/util/trace-event-parse.c int common_pc(struct scripting_context *context) context 72 tools/perf/util/trace-event-parse.c ret = get_common_field(context, &size, &offset, context 102 tools/perf/util/trace-event.h int common_pc(struct scripting_context *context); context 103 tools/perf/util/trace-event.h int common_flags(struct scripting_context *context); context 104 tools/perf/util/trace-event.h int common_lock_depth(struct scripting_context *context); context 898 tools/power/acpi/os_specific/service_layers/osunixxf.c void *context) context 1270 tools/power/acpi/os_specific/service_layers/osunixxf.c acpi_osd_exec_callback function, void *context) context 1276 tools/power/acpi/os_specific/service_layers/osunixxf.c pthread_create(&thread, NULL, (PTHREAD_CALLBACK) function, context); context 1291 tools/power/acpi/os_specific/service_layers/osunixxf.c acpi_osd_exec_callback function, void *context) context 1294 tools/power/acpi/os_specific/service_layers/osunixxf.c function(context); context 172 tools/testing/nvdimm/test/nfit.c u32 context; context 276 tools/testing/nvdimm/test/nfit.c fw->context++; context 279 tools/testing/nvdimm/test/nfit.c nd_cmd->context = fw->context; context 281 tools/testing/nvdimm/test/nfit.c dev_dbg(dev, "%s: context issued: %#x\n", __func__, nd_cmd->context); context 312 tools/testing/nvdimm/test/nfit.c if (nd_cmd->context != fw->context) { context 314 tools/testing/nvdimm/test/nfit.c __func__, nd_cmd->context, fw->context); context 354 tools/testing/nvdimm/test/nfit.c __func__, nd_cmd->context, nd_cmd->ctrl_flags); context 358 tools/testing/nvdimm/test/nfit.c if (nd_cmd->context != fw->context) { context 360 tools/testing/nvdimm/test/nfit.c __func__, nd_cmd->context, context 361 tools/testing/nvdimm/test/nfit.c fw->context); context 401 tools/testing/nvdimm/test/nfit.c if (nd_cmd->context != fw->context) { context 403 tools/testing/nvdimm/test/nfit.c __func__, nd_cmd->context, fw->context); context 408 tools/testing/nvdimm/test/nfit.c dev_dbg(dev, "%s context: %#x\n", __func__, nd_cmd->context); context 175 tools/testing/nvdimm/test/nfit_test.h __u32 context; context 180 tools/testing/nvdimm/test/nfit_test.h __u32 context; context 191 tools/testing/nvdimm/test/nfit_test.h __u32 context; context 196 tools/testing/nvdimm/test/nfit_test.h __u32 context; context 155 tools/testing/selftests/mqueue/mq_perf_tests.c void sig_action_SIGUSR1(int signum, siginfo_t *info, void *context); context 156 tools/testing/selftests/mqueue/mq_perf_tests.c void sig_action(int signum, siginfo_t *info, void *context); context 210 tools/testing/selftests/mqueue/mq_perf_tests.c void sig_action_SIGUSR1(int signum, siginfo_t *info, void *context) context 223 tools/testing/selftests/mqueue/mq_perf_tests.c void sig_action(int signum, siginfo_t *info, void *context) context 40 tools/testing/selftests/powerpc/math/fpu_signal.c void signal_fpu_sig(int sig, siginfo_t *info, void *context) context 43 tools/testing/selftests/powerpc/math/fpu_signal.c ucontext_t *uc = context; context 43 tools/testing/selftests/powerpc/math/vmx_signal.c void signal_vmx_sig(int sig, siginfo_t *info, void *context) context 46 tools/testing/selftests/powerpc/math/vmx_signal.c ucontext_t *uc = context; context 4531 virt/kvm/kvm_main.c static int kvm_vm_worker_thread(void *context) context 4537 virt/kvm/kvm_main.c struct kvm_vm_worker_thread_context *init_context = context;