sc                 10 arch/alpha/boot/stdio.c 	const char *sc;
sc                 12 arch/alpha/boot/stdio.c 	for (sc = s; count-- && *sc != '\0'; ++sc)
sc                 14 arch/alpha/boot/stdio.c 	return sc - s;
sc                127 arch/alpha/kernel/signal.c 	struct sigcontext sc;
sc                149 arch/alpha/kernel/signal.c restore_sigcontext(struct sigcontext __user *sc, struct pt_regs *regs)
sc                153 arch/alpha/kernel/signal.c 	long i, err = __get_user(regs->pc, &sc->sc_pc);
sc                159 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r0, sc->sc_regs+0);
sc                160 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r1, sc->sc_regs+1);
sc                161 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r2, sc->sc_regs+2);
sc                162 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r3, sc->sc_regs+3);
sc                163 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r4, sc->sc_regs+4);
sc                164 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r5, sc->sc_regs+5);
sc                165 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r6, sc->sc_regs+6);
sc                166 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r7, sc->sc_regs+7);
sc                167 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r8, sc->sc_regs+8);
sc                168 arch/alpha/kernel/signal.c 	err |= __get_user(sw->r9, sc->sc_regs+9);
sc                169 arch/alpha/kernel/signal.c 	err |= __get_user(sw->r10, sc->sc_regs+10);
sc                170 arch/alpha/kernel/signal.c 	err |= __get_user(sw->r11, sc->sc_regs+11);
sc                171 arch/alpha/kernel/signal.c 	err |= __get_user(sw->r12, sc->sc_regs+12);
sc                172 arch/alpha/kernel/signal.c 	err |= __get_user(sw->r13, sc->sc_regs+13);
sc                173 arch/alpha/kernel/signal.c 	err |= __get_user(sw->r14, sc->sc_regs+14);
sc                174 arch/alpha/kernel/signal.c 	err |= __get_user(sw->r15, sc->sc_regs+15);
sc                175 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r16, sc->sc_regs+16);
sc                176 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r17, sc->sc_regs+17);
sc                177 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r18, sc->sc_regs+18);
sc                178 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r19, sc->sc_regs+19);
sc                179 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r20, sc->sc_regs+20);
sc                180 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r21, sc->sc_regs+21);
sc                181 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r22, sc->sc_regs+22);
sc                182 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r23, sc->sc_regs+23);
sc                183 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r24, sc->sc_regs+24);
sc                184 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r25, sc->sc_regs+25);
sc                185 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r26, sc->sc_regs+26);
sc                186 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r27, sc->sc_regs+27);
sc                187 arch/alpha/kernel/signal.c 	err |= __get_user(regs->r28, sc->sc_regs+28);
sc                188 arch/alpha/kernel/signal.c 	err |= __get_user(regs->gp, sc->sc_regs+29);
sc                189 arch/alpha/kernel/signal.c 	err |= __get_user(usp, sc->sc_regs+30);
sc                193 arch/alpha/kernel/signal.c 		err |= __get_user(sw->fp[i], sc->sc_fpregs+i);
sc                194 arch/alpha/kernel/signal.c 	err |= __get_user(sw->fp[31], &sc->sc_fpcr);
sc                204 arch/alpha/kernel/signal.c do_sigreturn(struct sigcontext __user *sc)
sc                210 arch/alpha/kernel/signal.c 	if (!access_ok(sc, sizeof(*sc)))
sc                212 arch/alpha/kernel/signal.c 	if (__get_user(set.sig[0], &sc->sc_mask))
sc                217 arch/alpha/kernel/signal.c 	if (restore_sigcontext(sc, regs))
sc                271 arch/alpha/kernel/signal.c setup_sigcontext(struct sigcontext __user *sc, struct pt_regs *regs, 
sc                277 arch/alpha/kernel/signal.c 	err |= __put_user(on_sig_stack((unsigned long)sc), &sc->sc_onstack);
sc                278 arch/alpha/kernel/signal.c 	err |= __put_user(mask, &sc->sc_mask);
sc                279 arch/alpha/kernel/signal.c 	err |= __put_user(regs->pc, &sc->sc_pc);
sc                280 arch/alpha/kernel/signal.c 	err |= __put_user(8, &sc->sc_ps);
sc                282 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r0 , sc->sc_regs+0);
sc                283 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r1 , sc->sc_regs+1);
sc                284 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r2 , sc->sc_regs+2);
sc                285 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r3 , sc->sc_regs+3);
sc                286 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r4 , sc->sc_regs+4);
sc                287 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r5 , sc->sc_regs+5);
sc                288 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r6 , sc->sc_regs+6);
sc                289 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r7 , sc->sc_regs+7);
sc                290 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r8 , sc->sc_regs+8);
sc                291 arch/alpha/kernel/signal.c 	err |= __put_user(sw->r9   , sc->sc_regs+9);
sc                292 arch/alpha/kernel/signal.c 	err |= __put_user(sw->r10  , sc->sc_regs+10);
sc                293 arch/alpha/kernel/signal.c 	err |= __put_user(sw->r11  , sc->sc_regs+11);
sc                294 arch/alpha/kernel/signal.c 	err |= __put_user(sw->r12  , sc->sc_regs+12);
sc                295 arch/alpha/kernel/signal.c 	err |= __put_user(sw->r13  , sc->sc_regs+13);
sc                296 arch/alpha/kernel/signal.c 	err |= __put_user(sw->r14  , sc->sc_regs+14);
sc                297 arch/alpha/kernel/signal.c 	err |= __put_user(sw->r15  , sc->sc_regs+15);
sc                298 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r16, sc->sc_regs+16);
sc                299 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r17, sc->sc_regs+17);
sc                300 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r18, sc->sc_regs+18);
sc                301 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r19, sc->sc_regs+19);
sc                302 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r20, sc->sc_regs+20);
sc                303 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r21, sc->sc_regs+21);
sc                304 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r22, sc->sc_regs+22);
sc                305 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r23, sc->sc_regs+23);
sc                306 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r24, sc->sc_regs+24);
sc                307 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r25, sc->sc_regs+25);
sc                308 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r26, sc->sc_regs+26);
sc                309 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r27, sc->sc_regs+27);
sc                310 arch/alpha/kernel/signal.c 	err |= __put_user(regs->r28, sc->sc_regs+28);
sc                311 arch/alpha/kernel/signal.c 	err |= __put_user(regs->gp , sc->sc_regs+29);
sc                312 arch/alpha/kernel/signal.c 	err |= __put_user(sp, sc->sc_regs+30);
sc                313 arch/alpha/kernel/signal.c 	err |= __put_user(0, sc->sc_regs+31);
sc                316 arch/alpha/kernel/signal.c 		err |= __put_user(sw->fp[i], sc->sc_fpregs+i);
sc                317 arch/alpha/kernel/signal.c 	err |= __put_user(0, sc->sc_fpregs+31);
sc                318 arch/alpha/kernel/signal.c 	err |= __put_user(sw->fp[31], &sc->sc_fpcr);
sc                320 arch/alpha/kernel/signal.c 	err |= __put_user(regs->trap_a0, &sc->sc_traparg_a0);
sc                321 arch/alpha/kernel/signal.c 	err |= __put_user(regs->trap_a1, &sc->sc_traparg_a1);
sc                322 arch/alpha/kernel/signal.c 	err |= __put_user(regs->trap_a2, &sc->sc_traparg_a2);
sc                338 arch/alpha/kernel/signal.c 	err |= setup_sigcontext(&frame->sc, regs, set->sig[0], oldsp);
sc                362 arch/alpha/kernel/signal.c 	regs->r18 = (unsigned long) &frame->sc;	/* a2: sigcontext pointer */
sc                 62 arch/arm/boot/compressed/string.c 	const char *sc = s;
sc                 64 arch/arm/boot/compressed/string.c 	while (*sc != '\0')
sc                 65 arch/arm/boot/compressed/string.c 		sc++;
sc                 66 arch/arm/boot/compressed/string.c 	return sc - s;
sc                 71 arch/arm/boot/compressed/string.c 	const char *sc;
sc                 73 arch/arm/boot/compressed/string.c 	for (sc = s; count-- && *sc != '\0'; ++sc)
sc                 75 arch/arm/boot/compressed/string.c 	return sc - s;
sc                501 arch/arm/mach-omap2/clockdomains3xxx_data.c 	struct clockdomain **sc;
sc                518 arch/arm/mach-omap2/clockdomains3xxx_data.c 		sc = (rev == OMAP3430_REV_ES1_0) ?
sc                521 arch/arm/mach-omap2/clockdomains3xxx_data.c 		clkdm_register_clkdms(sc);
sc                327 arch/arm64/kernel/signal.c 	struct sigcontext __user *const sc = &sf->uc.uc_mcontext;
sc                329 arch/arm64/kernel/signal.c 	char __user *base = (char __user *)&sc->__reserved;
sc                331 arch/arm64/kernel/signal.c 	size_t limit = sizeof(sc->__reserved);
sc                 35 arch/c6x/kernel/signal.c 			      struct sigcontext __user *sc)
sc                 40 arch/c6x/kernel/signal.c #define COPY(x)  (err |= __get_user(regs->x, &sc->sc_##x))
sc                 97 arch/c6x/kernel/signal.c static int setup_sigcontext(struct sigcontext __user *sc, struct pt_regs *regs,
sc                102 arch/c6x/kernel/signal.c 	err |= __put_user(mask, &sc->sc_mask);
sc                105 arch/c6x/kernel/signal.c #define COPY(x) (err |= __put_user(regs->x, &sc->sc_##x))
sc                 16 arch/csky/kernel/signal.c static int restore_fpu_state(struct sigcontext __user *sc)
sc                 21 arch/csky/kernel/signal.c 	err = __copy_from_user(&user_fp, &sc->sc_user_fp, sizeof(user_fp));
sc                 28 arch/csky/kernel/signal.c static int save_fpu_state(struct sigcontext __user *sc)
sc                 34 arch/csky/kernel/signal.c 	return __copy_to_user(&sc->sc_user_fp, &user_fp, sizeof(user_fp));
sc                 52 arch/csky/kernel/signal.c 	struct sigcontext __user *sc)
sc                 57 arch/csky/kernel/signal.c 	err |= __copy_from_user(regs, &sc->sc_pt_regs, sizeof(struct pt_regs));
sc                 60 arch/csky/kernel/signal.c 	err |= restore_fpu_state(sc);
sc                100 arch/csky/kernel/signal.c 	struct sigcontext __user *sc = &frame->uc.uc_mcontext;
sc                103 arch/csky/kernel/signal.c 	err |= __copy_to_user(&sc->sc_pt_regs, regs, sizeof(struct pt_regs));
sc                104 arch/csky/kernel/signal.c 	err |= save_fpu_state(sc);
sc                133 arch/h8300/kernel/signal.c static int setup_sigcontext(struct sigcontext __user *sc, struct pt_regs *regs,
sc                138 arch/h8300/kernel/signal.c 	err |= __put_user(regs->er0, &sc->sc_er0);
sc                139 arch/h8300/kernel/signal.c 	err |= __put_user(regs->er1, &sc->sc_er1);
sc                140 arch/h8300/kernel/signal.c 	err |= __put_user(regs->er2, &sc->sc_er2);
sc                141 arch/h8300/kernel/signal.c 	err |= __put_user(regs->er3, &sc->sc_er3);
sc                142 arch/h8300/kernel/signal.c 	err |= __put_user(regs->er4, &sc->sc_er4);
sc                143 arch/h8300/kernel/signal.c 	err |= __put_user(regs->er5, &sc->sc_er5);
sc                144 arch/h8300/kernel/signal.c 	err |= __put_user(regs->er6, &sc->sc_er6);
sc                145 arch/h8300/kernel/signal.c 	err |= __put_user(rdusp(),   &sc->sc_usp);
sc                146 arch/h8300/kernel/signal.c 	err |= __put_user(regs->pc,  &sc->sc_pc);
sc                147 arch/h8300/kernel/signal.c 	err |= __put_user(regs->ccr, &sc->sc_ccr);
sc                148 arch/h8300/kernel/signal.c 	err |= __put_user(mask,      &sc->sc_mask);
sc                 36 arch/hexagon/kernel/signal.c static int setup_sigcontext(struct pt_regs *regs, struct sigcontext __user *sc)
sc                 41 arch/hexagon/kernel/signal.c 	err |= copy_to_user(&sc->sc_regs.r0, &regs->r00,
sc                 44 arch/hexagon/kernel/signal.c 	err |= __put_user(regs->sa0, &sc->sc_regs.sa0);
sc                 45 arch/hexagon/kernel/signal.c 	err |= __put_user(regs->lc0, &sc->sc_regs.lc0);
sc                 46 arch/hexagon/kernel/signal.c 	err |= __put_user(regs->sa1, &sc->sc_regs.sa1);
sc                 47 arch/hexagon/kernel/signal.c 	err |= __put_user(regs->lc1, &sc->sc_regs.lc1);
sc                 48 arch/hexagon/kernel/signal.c 	err |= __put_user(regs->m0, &sc->sc_regs.m0);
sc                 49 arch/hexagon/kernel/signal.c 	err |= __put_user(regs->m1, &sc->sc_regs.m1);
sc                 50 arch/hexagon/kernel/signal.c 	err |= __put_user(regs->usr, &sc->sc_regs.usr);
sc                 51 arch/hexagon/kernel/signal.c 	err |= __put_user(regs->preds, &sc->sc_regs.p3_0);
sc                 52 arch/hexagon/kernel/signal.c 	err |= __put_user(regs->gp, &sc->sc_regs.gp);
sc                 53 arch/hexagon/kernel/signal.c 	err |= __put_user(regs->ugp, &sc->sc_regs.ugp);
sc                 55 arch/hexagon/kernel/signal.c 	err |= __put_user(regs->cs0, &sc->sc_regs.cs0);
sc                 56 arch/hexagon/kernel/signal.c 	err |= __put_user(regs->cs1, &sc->sc_regs.cs1);
sc                 58 arch/hexagon/kernel/signal.c 	tmp = pt_elr(regs); err |= __put_user(tmp, &sc->sc_regs.pc);
sc                 59 arch/hexagon/kernel/signal.c 	tmp = pt_cause(regs); err |= __put_user(tmp, &sc->sc_regs.cause);
sc                 60 arch/hexagon/kernel/signal.c 	tmp = pt_badva(regs); err |= __put_user(tmp, &sc->sc_regs.badva);
sc                 66 arch/hexagon/kernel/signal.c 			      struct sigcontext __user *sc)
sc                 71 arch/hexagon/kernel/signal.c 	err |= copy_from_user(&regs->r00, &sc->sc_regs.r0,
sc                 74 arch/hexagon/kernel/signal.c 	err |= __get_user(regs->sa0, &sc->sc_regs.sa0);
sc                 75 arch/hexagon/kernel/signal.c 	err |= __get_user(regs->lc0, &sc->sc_regs.lc0);
sc                 76 arch/hexagon/kernel/signal.c 	err |= __get_user(regs->sa1, &sc->sc_regs.sa1);
sc                 77 arch/hexagon/kernel/signal.c 	err |= __get_user(regs->lc1, &sc->sc_regs.lc1);
sc                 78 arch/hexagon/kernel/signal.c 	err |= __get_user(regs->m0, &sc->sc_regs.m0);
sc                 79 arch/hexagon/kernel/signal.c 	err |= __get_user(regs->m1, &sc->sc_regs.m1);
sc                 80 arch/hexagon/kernel/signal.c 	err |= __get_user(regs->usr, &sc->sc_regs.usr);
sc                 81 arch/hexagon/kernel/signal.c 	err |= __get_user(regs->preds, &sc->sc_regs.p3_0);
sc                 82 arch/hexagon/kernel/signal.c 	err |= __get_user(regs->gp, &sc->sc_regs.gp);
sc                 83 arch/hexagon/kernel/signal.c 	err |= __get_user(regs->ugp, &sc->sc_regs.ugp);
sc                 85 arch/hexagon/kernel/signal.c 	err |= __get_user(regs->cs0, &sc->sc_regs.cs0);
sc                 86 arch/hexagon/kernel/signal.c 	err |= __get_user(regs->cs1, &sc->sc_regs.cs1);
sc                 88 arch/hexagon/kernel/signal.c 	err |= __get_user(tmp, &sc->sc_regs.pc); pt_set_elr(regs, tmp);
sc                197 arch/ia64/kernel/asm-offsets.c 	DEFINE(IA64_SIGFRAME_SIGCONTEXT_OFFSET, offsetof (struct sigframe, sc));
sc                 23 arch/ia64/kernel/sigframe.h 	struct sigcontext sc;
sc                 44 arch/ia64/kernel/signal.c restore_sigcontext (struct sigcontext __user *sc, struct sigscratch *scr)
sc                 53 arch/ia64/kernel/signal.c 	err  = __get_user(flags, &sc->sc_flags);
sc                 54 arch/ia64/kernel/signal.c 	err |= __get_user(nat, &sc->sc_nat);
sc                 55 arch/ia64/kernel/signal.c 	err |= __get_user(ip, &sc->sc_ip);			/* instruction pointer */
sc                 56 arch/ia64/kernel/signal.c 	err |= __get_user(cfm, &sc->sc_cfm);
sc                 57 arch/ia64/kernel/signal.c 	err |= __get_user(um, &sc->sc_um);			/* user mask */
sc                 58 arch/ia64/kernel/signal.c 	err |= __get_user(rsc, &sc->sc_ar_rsc);
sc                 59 arch/ia64/kernel/signal.c 	err |= __get_user(scr->pt.ar_unat, &sc->sc_ar_unat);
sc                 60 arch/ia64/kernel/signal.c 	err |= __get_user(scr->pt.ar_fpsr, &sc->sc_ar_fpsr);
sc                 61 arch/ia64/kernel/signal.c 	err |= __get_user(scr->pt.ar_pfs, &sc->sc_ar_pfs);
sc                 62 arch/ia64/kernel/signal.c 	err |= __get_user(scr->pt.pr, &sc->sc_pr);		/* predicates */
sc                 63 arch/ia64/kernel/signal.c 	err |= __get_user(scr->pt.b0, &sc->sc_br[0]);		/* b0 (rp) */
sc                 64 arch/ia64/kernel/signal.c 	err |= __get_user(scr->pt.b6, &sc->sc_br[6]);		/* b6 */
sc                 65 arch/ia64/kernel/signal.c 	err |= __copy_from_user(&scr->pt.r1, &sc->sc_gr[1], 8);	/* r1 */
sc                 66 arch/ia64/kernel/signal.c 	err |= __copy_from_user(&scr->pt.r8, &sc->sc_gr[8], 4*8);	/* r8-r11 */
sc                 67 arch/ia64/kernel/signal.c 	err |= __copy_from_user(&scr->pt.r12, &sc->sc_gr[12], 2*8);	/* r12-r13 */
sc                 68 arch/ia64/kernel/signal.c 	err |= __copy_from_user(&scr->pt.r15, &sc->sc_gr[15], 8);	/* r15 */
sc                 82 arch/ia64/kernel/signal.c 		err |= __get_user(scr->pt.ar_ccv, &sc->sc_ar_ccv);		/* ar.ccv */
sc                 83 arch/ia64/kernel/signal.c 		err |= __get_user(scr->pt.b7, &sc->sc_br[7]);			/* b7 */
sc                 84 arch/ia64/kernel/signal.c 		err |= __get_user(scr->pt.r14, &sc->sc_gr[14]);			/* r14 */
sc                 85 arch/ia64/kernel/signal.c 		err |= __copy_from_user(&scr->pt.ar_csd, &sc->sc_ar25, 2*8); /* ar.csd & ar.ssd */
sc                 86 arch/ia64/kernel/signal.c 		err |= __copy_from_user(&scr->pt.r2, &sc->sc_gr[2], 2*8);	/* r2-r3 */
sc                 87 arch/ia64/kernel/signal.c 		err |= __copy_from_user(&scr->pt.r16, &sc->sc_gr[16], 16*8);	/* r16-r31 */
sc                 93 arch/ia64/kernel/signal.c 		err |= __copy_from_user(current->thread.fph, &sc->sc_fr[32], 96*16);
sc                112 arch/ia64/kernel/signal.c 	struct sigcontext __user *sc;
sc                116 arch/ia64/kernel/signal.c 	sc = &((struct sigframe __user *) (scr->pt.r12 + 16))->sc;
sc                135 arch/ia64/kernel/signal.c 	if (!access_ok(sc, sizeof(*sc)))
sc                138 arch/ia64/kernel/signal.c 	if (GET_SIGSET(&set, &sc->sc_mask))
sc                143 arch/ia64/kernel/signal.c 	if (restore_sigcontext(sc, scr))
sc                150 arch/ia64/kernel/signal.c 	if (restore_altstack(&sc->sc_stack))
sc                166 arch/ia64/kernel/signal.c setup_sigcontext (struct sigcontext __user *sc, sigset_t *mask, struct sigscratch *scr)
sc                173 arch/ia64/kernel/signal.c 	if (on_sig_stack((unsigned long) sc))
sc                182 arch/ia64/kernel/signal.c 		err = __copy_to_user(&sc->sc_fr[32], current->thread.fph, 96*16);
sc                187 arch/ia64/kernel/signal.c 	err |= __put_user(flags, &sc->sc_flags);
sc                188 arch/ia64/kernel/signal.c 	err |= __put_user(nat, &sc->sc_nat);
sc                189 arch/ia64/kernel/signal.c 	err |= PUT_SIGSET(mask, &sc->sc_mask);
sc                190 arch/ia64/kernel/signal.c 	err |= __put_user(cfm, &sc->sc_cfm);
sc                191 arch/ia64/kernel/signal.c 	err |= __put_user(scr->pt.cr_ipsr & IA64_PSR_UM, &sc->sc_um);
sc                192 arch/ia64/kernel/signal.c 	err |= __put_user(scr->pt.ar_rsc, &sc->sc_ar_rsc);
sc                193 arch/ia64/kernel/signal.c 	err |= __put_user(scr->pt.ar_unat, &sc->sc_ar_unat);		/* ar.unat */
sc                194 arch/ia64/kernel/signal.c 	err |= __put_user(scr->pt.ar_fpsr, &sc->sc_ar_fpsr);		/* ar.fpsr */
sc                195 arch/ia64/kernel/signal.c 	err |= __put_user(scr->pt.ar_pfs, &sc->sc_ar_pfs);
sc                196 arch/ia64/kernel/signal.c 	err |= __put_user(scr->pt.pr, &sc->sc_pr);			/* predicates */
sc                197 arch/ia64/kernel/signal.c 	err |= __put_user(scr->pt.b0, &sc->sc_br[0]);			/* b0 (rp) */
sc                198 arch/ia64/kernel/signal.c 	err |= __put_user(scr->pt.b6, &sc->sc_br[6]);			/* b6 */
sc                199 arch/ia64/kernel/signal.c 	err |= __copy_to_user(&sc->sc_gr[1], &scr->pt.r1, 8);		/* r1 */
sc                200 arch/ia64/kernel/signal.c 	err |= __copy_to_user(&sc->sc_gr[8], &scr->pt.r8, 4*8);		/* r8-r11 */
sc                201 arch/ia64/kernel/signal.c 	err |= __copy_to_user(&sc->sc_gr[12], &scr->pt.r12, 2*8);	/* r12-r13 */
sc                202 arch/ia64/kernel/signal.c 	err |= __copy_to_user(&sc->sc_gr[15], &scr->pt.r15, 8);		/* r15 */
sc                203 arch/ia64/kernel/signal.c 	err |= __put_user(scr->pt.cr_iip + ia64_psr(&scr->pt)->ri, &sc->sc_ip);
sc                207 arch/ia64/kernel/signal.c 		err |= __put_user(scr->pt.ar_ccv, &sc->sc_ar_ccv);		/* ar.ccv */
sc                208 arch/ia64/kernel/signal.c 		err |= __put_user(scr->pt.b7, &sc->sc_br[7]);			/* b7 */
sc                209 arch/ia64/kernel/signal.c 		err |= __put_user(scr->pt.r14, &sc->sc_gr[14]);			/* r14 */
sc                210 arch/ia64/kernel/signal.c 		err |= __copy_to_user(&sc->sc_ar25, &scr->pt.ar_csd, 2*8); /* ar.csd & ar.ssd */
sc                211 arch/ia64/kernel/signal.c 		err |= __copy_to_user(&sc->sc_gr[2], &scr->pt.r2, 2*8);		/* r2-r3 */
sc                212 arch/ia64/kernel/signal.c 		err |= __copy_to_user(&sc->sc_gr[16], &scr->pt.r16, 16*8);	/* r16-r31 */
sc                274 arch/ia64/kernel/signal.c 	err |= __put_user(&frame->sc, &frame->arg2);
sc                275 arch/ia64/kernel/signal.c 	err |= __put_user(new_rbs, &frame->sc.sc_rbs_base);
sc                276 arch/ia64/kernel/signal.c 	err |= __put_user(0, &frame->sc.sc_loadrs);	/* initialize to zero */
sc                281 arch/ia64/kernel/signal.c 	err |= __save_altstack(&frame->sc.sc_stack, scr->pt.r12);
sc                282 arch/ia64/kernel/signal.c 	err |= setup_sigcontext(&frame->sc, set, scr);
sc                312 arch/ia64/kernel/signal.c 	       current->comm, current->pid, ksig->sig, scr->pt.r12, frame->sc.sc_ip, frame->handler);
sc                107 arch/m68k/atari/atakeyb.c #define	IS_SYNC_CODE(sc)	((sc) >= 0x04 && (sc) <= 0xfb)
sc                 11 arch/m68k/include/asm/string.h 	const char *sc = s;
sc                 20 arch/m68k/include/asm/string.h 		: "+a" (sc), "+d" (count));
sc                 21 arch/m68k/include/asm/string.h 	return sc - s;
sc                183 arch/m68k/kernel/signal.c static inline void save_a5_state(struct sigcontext *sc, struct pt_regs *regs)
sc                207 arch/m68k/kernel/signal.c static inline void save_a5_state(struct sigcontext *sc, struct pt_regs *regs)
sc                209 arch/m68k/kernel/signal.c 	sc->sc_a5 = ((struct switch_stack *)regs - 1)->a5;
sc                233 arch/m68k/kernel/signal.c 	struct sigcontext sc;
sc                256 arch/m68k/kernel/signal.c static inline int restore_fpu_state(struct sigcontext *sc)
sc                262 arch/m68k/kernel/signal.c 	    memcpy(current->thread.fpcntl, sc->sc_fpcntl, 12);
sc                263 arch/m68k/kernel/signal.c 	    memcpy(current->thread.fp, sc->sc_fpregs, 24);
sc                267 arch/m68k/kernel/signal.c 	if (CPU_IS_060 ? sc->sc_fpstate[2] : sc->sc_fpstate[0]) {
sc                270 arch/m68k/kernel/signal.c 		 (sc->sc_fpstate[0] != fpu_version))
sc                274 arch/m68k/kernel/signal.c 		    !(sc->sc_fpstate[1] == 0x18 || sc->sc_fpstate[1] == 0xb4))
sc                277 arch/m68k/kernel/signal.c 		    !(sc->sc_fpstate[1] == 0x38 || sc->sc_fpstate[1] == 0xd4))
sc                280 arch/m68k/kernel/signal.c 		if (!(sc->sc_fpstate[1] == 0x00 ||
sc                281 arch/m68k/kernel/signal.c                       sc->sc_fpstate[1] == 0x28 ||
sc                282 arch/m68k/kernel/signal.c                       sc->sc_fpstate[1] == 0x60))
sc                285 arch/m68k/kernel/signal.c 		if (!(sc->sc_fpstate[3] == 0x00 ||
sc                286 arch/m68k/kernel/signal.c                       sc->sc_fpstate[3] == 0x60 ||
sc                287 arch/m68k/kernel/signal.c 		      sc->sc_fpstate[3] == 0xe0))
sc                290 arch/m68k/kernel/signal.c 		if (!(sc->sc_fpstate[0] == 0x00 ||
sc                291 arch/m68k/kernel/signal.c 		      sc->sc_fpstate[0] == 0x05 ||
sc                292 arch/m68k/kernel/signal.c 		      sc->sc_fpstate[0] == 0xe5))
sc                303 arch/m68k/kernel/signal.c 				  : "m" (sc->sc_fpregs[0]),
sc                304 arch/m68k/kernel/signal.c 				    "m" (sc->sc_fpcntl[0]),
sc                305 arch/m68k/kernel/signal.c 				    "m" (sc->sc_fpcntl[1]),
sc                306 arch/m68k/kernel/signal.c 				    "m" (sc->sc_fpcntl[2]));
sc                313 arch/m68k/kernel/signal.c 				  : "m" (*sc->sc_fpregs),
sc                314 arch/m68k/kernel/signal.c 				    "m" (*sc->sc_fpcntl));
sc                319 arch/m68k/kernel/signal.c 		__asm__ volatile ("frestore %0" : : "m" (*sc->sc_fpstate));
sc                324 arch/m68k/kernel/signal.c 				  : : "m" (*sc->sc_fpstate));
sc                430 arch/m68k/kernel/signal.c static inline void save_fpu_state(struct sigcontext *sc, struct pt_regs *regs)
sc                434 arch/m68k/kernel/signal.c 		memcpy(sc->sc_fpcntl, current->thread.fpcntl, 12);
sc                435 arch/m68k/kernel/signal.c 		memcpy(sc->sc_fpregs, current->thread.fp, 24);
sc                441 arch/m68k/kernel/signal.c 				  : : "m" (*sc->sc_fpstate) : "memory");
sc                446 arch/m68k/kernel/signal.c 				  : : "m" (*sc->sc_fpstate) : "memory");
sc                449 arch/m68k/kernel/signal.c 	if (CPU_IS_060 ? sc->sc_fpstate[2] : sc->sc_fpstate[0]) {
sc                450 arch/m68k/kernel/signal.c 		fpu_version = sc->sc_fpstate[0];
sc                455 arch/m68k/kernel/signal.c 			if (*(unsigned short *) sc->sc_fpstate == 0x1f38)
sc                456 arch/m68k/kernel/signal.c 				sc->sc_fpstate[0x38] |= 1 << 3;
sc                464 arch/m68k/kernel/signal.c 					  : "=m" (sc->sc_fpregs[0]),
sc                465 arch/m68k/kernel/signal.c 					    "=m" (sc->sc_fpcntl[0]),
sc                466 arch/m68k/kernel/signal.c 					    "=m" (sc->sc_fpcntl[1]),
sc                467 arch/m68k/kernel/signal.c 					    "=m" (sc->sc_fpcntl[2])
sc                475 arch/m68k/kernel/signal.c 					  : "=m" (*sc->sc_fpregs),
sc                476 arch/m68k/kernel/signal.c 					    "=m" (*sc->sc_fpcntl)
sc                556 arch/m68k/kernel/signal.c static inline int restore_fpu_state(struct sigcontext *sc)
sc                566 arch/m68k/kernel/signal.c static inline void save_fpu_state(struct sigcontext *sc, struct pt_regs *regs)
sc                793 arch/m68k/kernel/signal.c 	if (__get_user(set.sig[0], &frame->sc.sc_mask) ||
sc                801 arch/m68k/kernel/signal.c 	if (restore_sigcontext(regs, &frame->sc, frame + 1))
sc                832 arch/m68k/kernel/signal.c static void setup_sigcontext(struct sigcontext *sc, struct pt_regs *regs,
sc                835 arch/m68k/kernel/signal.c 	sc->sc_mask = mask;
sc                836 arch/m68k/kernel/signal.c 	sc->sc_usp = rdusp();
sc                837 arch/m68k/kernel/signal.c 	sc->sc_d0 = regs->d0;
sc                838 arch/m68k/kernel/signal.c 	sc->sc_d1 = regs->d1;
sc                839 arch/m68k/kernel/signal.c 	sc->sc_a0 = regs->a0;
sc                840 arch/m68k/kernel/signal.c 	sc->sc_a1 = regs->a1;
sc                841 arch/m68k/kernel/signal.c 	sc->sc_sr = regs->sr;
sc                842 arch/m68k/kernel/signal.c 	sc->sc_pc = regs->pc;
sc                843 arch/m68k/kernel/signal.c 	sc->sc_formatvec = regs->format << 12 | regs->vector;
sc                844 arch/m68k/kernel/signal.c 	save_a5_state(sc, regs);
sc                845 arch/m68k/kernel/signal.c 	save_fpu_state(sc, regs);
sc                908 arch/m68k/kernel/signal.c 	err |= __put_user(&frame->sc, &frame->psc);
sc                915 arch/m68k/kernel/signal.c 	err |= copy_to_user (&frame->sc, &context, sizeof(context));
sc                 48 arch/microblaze/kernel/signal.c 	struct sigcontext sc;
sc                 60 arch/microblaze/kernel/signal.c 				struct sigcontext __user *sc, int *rval_p)
sc                 64 arch/microblaze/kernel/signal.c #define COPY(x)		{err |= __get_user(regs->x, &sc->regs.x); }
sc                120 arch/microblaze/kernel/signal.c setup_sigcontext(struct sigcontext __user *sc, struct pt_regs *regs,
sc                125 arch/microblaze/kernel/signal.c #define COPY(x)		{err |= __put_user(regs->x, &sc->regs.x); }
sc                139 arch/microblaze/kernel/signal.c 	err |= __put_user(mask, &sc->oldmask);
sc                107 arch/mips/include/asm/asm-eva.h #define kernel_sc(reg, addr)		sc reg, addr
sc                 32 arch/mips/include/asm/signal.h extern int protected_save_fp_context(void __user *sc);
sc                 33 arch/mips/include/asm/signal.h extern int protected_restore_fp_context(void __user *sc);
sc                683 arch/mips/include/uapi/asm/inst.h 	__BITFIELD_FIELD(unsigned int sc : 1,
sc                 45 arch/mips/kernel/signal.c static int (*save_fp_context)(void __user *sc);
sc                 46 arch/mips/kernel/signal.c static int (*restore_fp_context)(void __user *sc);
sc                 71 arch/mips/kernel/signal.c static int copy_fp_to_sigcontext(void __user *sc)
sc                 74 arch/mips/kernel/signal.c 	uint64_t __user *fpregs = sc + abi->off_sc_fpregs;
sc                 75 arch/mips/kernel/signal.c 	uint32_t __user *csr = sc + abi->off_sc_fpc_csr;
sc                 90 arch/mips/kernel/signal.c static int copy_fp_from_sigcontext(void __user *sc)
sc                 93 arch/mips/kernel/signal.c 	uint64_t __user *fpregs = sc + abi->off_sc_fpregs;
sc                 94 arch/mips/kernel/signal.c 	uint32_t __user *csr = sc + abi->off_sc_fpc_csr;
sc                111 arch/mips/kernel/signal.c static int copy_fp_to_sigcontext(void __user *sc)
sc                116 arch/mips/kernel/signal.c static int copy_fp_from_sigcontext(void __user *sc)
sc                126 arch/mips/kernel/signal.c static int save_hw_fp_context(void __user *sc)
sc                129 arch/mips/kernel/signal.c 	uint64_t __user *fpregs = sc + abi->off_sc_fpregs;
sc                130 arch/mips/kernel/signal.c 	uint32_t __user *csr = sc + abi->off_sc_fpc_csr;
sc                135 arch/mips/kernel/signal.c static int restore_hw_fp_context(void __user *sc)
sc                138 arch/mips/kernel/signal.c 	uint64_t __user *fpregs = sc + abi->off_sc_fpregs;
sc                139 arch/mips/kernel/signal.c 	uint32_t __user *csr = sc + abi->off_sc_fpc_csr;
sc                148 arch/mips/kernel/signal.c static inline void __user *sc_to_extcontext(void __user *sc)
sc                157 arch/mips/kernel/signal.c 	uc = container_of(sc, struct ucontext, uc_mcontext);
sc                325 arch/mips/kernel/signal.c int protected_save_fp_context(void __user *sc)
sc                328 arch/mips/kernel/signal.c 	uint64_t __user *fpregs = sc + abi->off_sc_fpregs;
sc                329 arch/mips/kernel/signal.c 	uint32_t __user *csr = sc + abi->off_sc_fpc_csr;
sc                330 arch/mips/kernel/signal.c 	uint32_t __user *used_math = sc + abi->off_sc_used_math;
sc                353 arch/mips/kernel/signal.c 			err = save_fp_context(sc);
sc                357 arch/mips/kernel/signal.c 			err = copy_fp_to_sigcontext(sc);
sc                370 arch/mips/kernel/signal.c 	ext_sz = err = save_extcontext(sc_to_extcontext(sc));
sc                378 arch/mips/kernel/signal.c int protected_restore_fp_context(void __user *sc)
sc                381 arch/mips/kernel/signal.c 	uint64_t __user *fpregs = sc + abi->off_sc_fpregs;
sc                382 arch/mips/kernel/signal.c 	uint32_t __user *csr = sc + abi->off_sc_fpc_csr;
sc                383 arch/mips/kernel/signal.c 	uint32_t __user *used_math = sc + abi->off_sc_used_math;
sc                416 arch/mips/kernel/signal.c 			err = restore_fp_context(sc);
sc                420 arch/mips/kernel/signal.c 			err = copy_fp_from_sigcontext(sc);
sc                434 arch/mips/kernel/signal.c 		err = restore_extcontext(sc_to_extcontext(sc));
sc                439 arch/mips/kernel/signal.c int setup_sigcontext(struct pt_regs *regs, struct sigcontext __user *sc)
sc                444 arch/mips/kernel/signal.c 	err |= __put_user(regs->cp0_epc, &sc->sc_pc);
sc                446 arch/mips/kernel/signal.c 	err |= __put_user(0, &sc->sc_regs[0]);
sc                448 arch/mips/kernel/signal.c 		err |= __put_user(regs->regs[i], &sc->sc_regs[i]);
sc                451 arch/mips/kernel/signal.c 	err |= __put_user(regs->acx, &sc->sc_acx);
sc                453 arch/mips/kernel/signal.c 	err |= __put_user(regs->hi, &sc->sc_mdhi);
sc                454 arch/mips/kernel/signal.c 	err |= __put_user(regs->lo, &sc->sc_mdlo);
sc                456 arch/mips/kernel/signal.c 		err |= __put_user(mfhi1(), &sc->sc_hi1);
sc                457 arch/mips/kernel/signal.c 		err |= __put_user(mflo1(), &sc->sc_lo1);
sc                458 arch/mips/kernel/signal.c 		err |= __put_user(mfhi2(), &sc->sc_hi2);
sc                459 arch/mips/kernel/signal.c 		err |= __put_user(mflo2(), &sc->sc_lo2);
sc                460 arch/mips/kernel/signal.c 		err |= __put_user(mfhi3(), &sc->sc_hi3);
sc                461 arch/mips/kernel/signal.c 		err |= __put_user(mflo3(), &sc->sc_lo3);
sc                462 arch/mips/kernel/signal.c 		err |= __put_user(rddsp(DSP_MASK), &sc->sc_dsp);
sc                470 arch/mips/kernel/signal.c 	err |= protected_save_fp_context(sc);
sc                516 arch/mips/kernel/signal.c int restore_sigcontext(struct pt_regs *regs, struct sigcontext __user *sc)
sc                525 arch/mips/kernel/signal.c 	err |= __get_user(regs->cp0_epc, &sc->sc_pc);
sc                528 arch/mips/kernel/signal.c 	err |= __get_user(regs->acx, &sc->sc_acx);
sc                530 arch/mips/kernel/signal.c 	err |= __get_user(regs->hi, &sc->sc_mdhi);
sc                531 arch/mips/kernel/signal.c 	err |= __get_user(regs->lo, &sc->sc_mdlo);
sc                533 arch/mips/kernel/signal.c 		err |= __get_user(treg, &sc->sc_hi1); mthi1(treg);
sc                534 arch/mips/kernel/signal.c 		err |= __get_user(treg, &sc->sc_lo1); mtlo1(treg);
sc                535 arch/mips/kernel/signal.c 		err |= __get_user(treg, &sc->sc_hi2); mthi2(treg);
sc                536 arch/mips/kernel/signal.c 		err |= __get_user(treg, &sc->sc_lo2); mtlo2(treg);
sc                537 arch/mips/kernel/signal.c 		err |= __get_user(treg, &sc->sc_hi3); mthi3(treg);
sc                538 arch/mips/kernel/signal.c 		err |= __get_user(treg, &sc->sc_lo3); mtlo3(treg);
sc                539 arch/mips/kernel/signal.c 		err |= __get_user(treg, &sc->sc_dsp); wrdsp(treg, DSP_MASK);
sc                543 arch/mips/kernel/signal.c 		err |= __get_user(regs->regs[i], &sc->sc_regs[i]);
sc                545 arch/mips/kernel/signal.c 	return err ?: protected_restore_fp_context(sc);
sc                913 arch/mips/kernel/signal.c static int smp_save_fp_context(void __user *sc)
sc                916 arch/mips/kernel/signal.c 	       ? save_hw_fp_context(sc)
sc                917 arch/mips/kernel/signal.c 	       : copy_fp_to_sigcontext(sc);
sc                920 arch/mips/kernel/signal.c static int smp_restore_fp_context(void __user *sc)
sc                923 arch/mips/kernel/signal.c 	       ? restore_hw_fp_context(sc)
sc                924 arch/mips/kernel/signal.c 	       : copy_fp_from_sigcontext(sc);
sc                 53 arch/mips/kernel/signal_o32.c 			      struct sigcontext32 __user *sc)
sc                 58 arch/mips/kernel/signal_o32.c 	err |= __put_user(regs->cp0_epc, &sc->sc_pc);
sc                 60 arch/mips/kernel/signal_o32.c 	err |= __put_user(0, &sc->sc_regs[0]);
sc                 62 arch/mips/kernel/signal_o32.c 		err |= __put_user(regs->regs[i], &sc->sc_regs[i]);
sc                 64 arch/mips/kernel/signal_o32.c 	err |= __put_user(regs->hi, &sc->sc_mdhi);
sc                 65 arch/mips/kernel/signal_o32.c 	err |= __put_user(regs->lo, &sc->sc_mdlo);
sc                 67 arch/mips/kernel/signal_o32.c 		err |= __put_user(rddsp(DSP_MASK), &sc->sc_dsp);
sc                 68 arch/mips/kernel/signal_o32.c 		err |= __put_user(mfhi1(), &sc->sc_hi1);
sc                 69 arch/mips/kernel/signal_o32.c 		err |= __put_user(mflo1(), &sc->sc_lo1);
sc                 70 arch/mips/kernel/signal_o32.c 		err |= __put_user(mfhi2(), &sc->sc_hi2);
sc                 71 arch/mips/kernel/signal_o32.c 		err |= __put_user(mflo2(), &sc->sc_lo2);
sc                 72 arch/mips/kernel/signal_o32.c 		err |= __put_user(mfhi3(), &sc->sc_hi3);
sc                 73 arch/mips/kernel/signal_o32.c 		err |= __put_user(mflo3(), &sc->sc_lo3);
sc                 80 arch/mips/kernel/signal_o32.c 	err |= protected_save_fp_context(sc);
sc                 86 arch/mips/kernel/signal_o32.c 				struct sigcontext32 __user *sc)
sc                 95 arch/mips/kernel/signal_o32.c 	err |= __get_user(regs->cp0_epc, &sc->sc_pc);
sc                 96 arch/mips/kernel/signal_o32.c 	err |= __get_user(regs->hi, &sc->sc_mdhi);
sc                 97 arch/mips/kernel/signal_o32.c 	err |= __get_user(regs->lo, &sc->sc_mdlo);
sc                 99 arch/mips/kernel/signal_o32.c 		err |= __get_user(treg, &sc->sc_hi1); mthi1(treg);
sc                100 arch/mips/kernel/signal_o32.c 		err |= __get_user(treg, &sc->sc_lo1); mtlo1(treg);
sc                101 arch/mips/kernel/signal_o32.c 		err |= __get_user(treg, &sc->sc_hi2); mthi2(treg);
sc                102 arch/mips/kernel/signal_o32.c 		err |= __get_user(treg, &sc->sc_lo2); mtlo2(treg);
sc                103 arch/mips/kernel/signal_o32.c 		err |= __get_user(treg, &sc->sc_hi3); mthi3(treg);
sc                104 arch/mips/kernel/signal_o32.c 		err |= __get_user(treg, &sc->sc_lo3); mtlo3(treg);
sc                105 arch/mips/kernel/signal_o32.c 		err |= __get_user(treg, &sc->sc_dsp); wrdsp(treg, DSP_MASK);
sc                109 arch/mips/kernel/signal_o32.c 		err |= __get_user(regs->regs[i], &sc->sc_regs[i]);
sc                111 arch/mips/kernel/signal_o32.c 	return err ?: protected_restore_fp_context(sc);
sc               1545 arch/mips/kvm/emulate.c 			if (inst.mfmc0_format.sc) {
sc               1461 arch/mips/net/ebpf_jit.c 				emit_instr(ctx, sc, MIPS_R_T8, mem_off, dst);
sc                 26 arch/nds32/kernel/signal.c 					 struct sigcontext __user *sc)
sc                 33 arch/nds32/kernel/signal.c 	__get_user_error(used_math_flag, &sc->used_math_flag, ret);
sc                 50 arch/nds32/kernel/signal.c 	return __copy_from_user(&tsk->thread.fpu, &sc->fpu,
sc                 55 arch/nds32/kernel/signal.c 				       struct sigcontext __user *sc)
sc                 60 arch/nds32/kernel/signal.c 	__put_user_error(used_math(), &sc->used_math_flag, ret);
sc                 72 arch/nds32/kernel/signal.c 	ret = __copy_to_user(&sc->fpu, &tsk->thread.fpu,
sc                 40 arch/openrisc/kernel/signal.c 			      struct sigcontext __user *sc)
sc                 52 arch/openrisc/kernel/signal.c 	err |= __copy_from_user(regs, sc->regs.gpr, 32 * sizeof(unsigned long));
sc                 53 arch/openrisc/kernel/signal.c 	err |= __copy_from_user(&regs->pc, &sc->regs.pc, sizeof(unsigned long));
sc                 54 arch/openrisc/kernel/signal.c 	err |= __copy_from_user(&regs->sr, &sc->regs.sr, sizeof(unsigned long));
sc                106 arch/openrisc/kernel/signal.c static int setup_sigcontext(struct pt_regs *regs, struct sigcontext __user *sc)
sc                114 arch/openrisc/kernel/signal.c 	err |= __copy_to_user(sc->regs.gpr, regs, 32 * sizeof(unsigned long));
sc                115 arch/openrisc/kernel/signal.c 	err |= __copy_to_user(&sc->regs.pc, &regs->pc, sizeof(unsigned long));
sc                116 arch/openrisc/kernel/signal.c 	err |= __copy_to_user(&sc->regs.sr, &regs->sr, sizeof(unsigned long));
sc                103 arch/parisc/boot/compressed/misc.c 	const char *sc;
sc                105 arch/parisc/boot/compressed/misc.c 	for (sc = s; *sc != '\0'; ++sc)
sc                107 arch/parisc/boot/compressed/misc.c 	return sc - s;
sc                 72 arch/parisc/kernel/signal.c restore_sigcontext(struct sigcontext __user *sc, struct pt_regs *regs)
sc                 76 arch/parisc/kernel/signal.c 	err |= __copy_from_user(regs->gr, sc->sc_gr, sizeof(regs->gr));
sc                 77 arch/parisc/kernel/signal.c 	err |= __copy_from_user(regs->fr, sc->sc_fr, sizeof(regs->fr));
sc                 78 arch/parisc/kernel/signal.c 	err |= __copy_from_user(regs->iaoq, sc->sc_iaoq, sizeof(regs->iaoq));
sc                 79 arch/parisc/kernel/signal.c 	err |= __copy_from_user(regs->iasq, sc->sc_iasq, sizeof(regs->iasq));
sc                 80 arch/parisc/kernel/signal.c 	err |= __get_user(regs->sar, &sc->sc_sar);
sc                194 arch/parisc/kernel/signal.c setup_sigcontext(struct sigcontext __user *sc, struct pt_regs *regs, int in_syscall)
sc                200 arch/parisc/kernel/signal.c 	if (on_sig_stack((unsigned long) sc))
sc                205 arch/parisc/kernel/signal.c 		err |= __put_user(regs->gr[31], &sc->sc_iaoq[0]);
sc                206 arch/parisc/kernel/signal.c 		err |= __put_user(regs->gr[31]+4, &sc->sc_iaoq[1]);
sc                207 arch/parisc/kernel/signal.c 		err |= __put_user(regs->sr[3], &sc->sc_iasq[0]);
sc                208 arch/parisc/kernel/signal.c 		err |= __put_user(regs->sr[3], &sc->sc_iasq[1]);
sc                212 arch/parisc/kernel/signal.c 		err |= __copy_to_user(sc->sc_iaoq, regs->iaoq, sizeof(regs->iaoq));
sc                213 arch/parisc/kernel/signal.c 		err |= __copy_to_user(sc->sc_iasq, regs->iasq, sizeof(regs->iasq));
sc                218 arch/parisc/kernel/signal.c 	err |= __put_user(flags, &sc->sc_flags);
sc                219 arch/parisc/kernel/signal.c 	err |= __copy_to_user(sc->sc_gr, regs->gr, sizeof(regs->gr));
sc                220 arch/parisc/kernel/signal.c 	err |= __copy_to_user(sc->sc_fr, regs->fr, sizeof(regs->fr));
sc                221 arch/parisc/kernel/signal.c 	err |= __put_user(regs->sar, &sc->sc_sar);
sc                 36 arch/parisc/kernel/signal32.c restore_sigcontext32(struct compat_sigcontext __user *sc, struct compat_regfile __user * rf,
sc                 47 arch/parisc/kernel/signal32.c 	DBG(2,"restore_sigcontext32: sc = 0x%p, rf = 0x%p, regs = 0x%p\n", sc, rf, regs);
sc                 48 arch/parisc/kernel/signal32.c 	DBG(2,"restore_sigcontext32: compat_sigcontext is %#lx bytes\n", sizeof(*sc));
sc                 50 arch/parisc/kernel/signal32.c 		err |= __get_user(compat_reg,&sc->sc_gr[regn]);
sc                 58 arch/parisc/kernel/signal32.c 	DBG(2,"restore_sigcontext32: sc->sc_fr = 0x%p (%#lx)\n",sc->sc_fr, sizeof(sc->sc_fr));
sc                 60 arch/parisc/kernel/signal32.c 	err |= __copy_from_user(regs->fr, sc->sc_fr, sizeof(regs->fr));
sc                 65 arch/parisc/kernel/signal32.c 	err |= __get_user(compat_reg, &sc->sc_iaoq[0]);
sc                 71 arch/parisc/kernel/signal32.c 			&sc->sc_iaoq[0], compat_reg);
sc                 73 arch/parisc/kernel/signal32.c 	err |= __get_user(compat_reg, &sc->sc_iaoq[1]);
sc                 79 arch/parisc/kernel/signal32.c 			&sc->sc_iaoq[1],compat_reg);	
sc                 83 arch/parisc/kernel/signal32.c 	err |= __get_user(compat_reg, &sc->sc_iasq[0]);
sc                 89 arch/parisc/kernel/signal32.c 	err |= __get_user(compat_reg, &sc->sc_iasq[1]);
sc                 97 arch/parisc/kernel/signal32.c 	err |= __get_user(compat_reg, &sc->sc_sar);
sc                115 arch/parisc/kernel/signal32.c setup_sigcontext32(struct compat_sigcontext __user *sc, struct compat_regfile __user * rf, 
sc                124 arch/parisc/kernel/signal32.c 	if (on_sig_stack((unsigned long) sc))
sc                135 arch/parisc/kernel/signal32.c 		err |= __put_user(compat_reg, &sc->sc_iaoq[0]);
sc                137 arch/parisc/kernel/signal32.c 				&sc->sc_iaoq[0], compat_reg);
sc                146 arch/parisc/kernel/signal32.c 		err |= __put_user(compat_reg, &sc->sc_iaoq[1]);
sc                148 arch/parisc/kernel/signal32.c 				&sc->sc_iaoq[1], compat_reg);
sc                156 arch/parisc/kernel/signal32.c 		err |= __put_user(compat_reg, &sc->sc_iasq[0]);
sc                157 arch/parisc/kernel/signal32.c 		err |= __put_user(compat_reg, &sc->sc_iasq[1]);		
sc                172 arch/parisc/kernel/signal32.c 		err |= __put_user(compat_reg, &sc->sc_iaoq[0]);
sc                174 arch/parisc/kernel/signal32.c 				&sc->sc_iaoq[0], compat_reg);
sc                181 arch/parisc/kernel/signal32.c 		err |= __put_user(compat_reg, &sc->sc_iaoq[1]);
sc                183 arch/parisc/kernel/signal32.c 				&sc->sc_iaoq[1], compat_reg);
sc                191 arch/parisc/kernel/signal32.c 		err |= __put_user(compat_reg, &sc->sc_iasq[0]);
sc                193 arch/parisc/kernel/signal32.c 				&sc->sc_iasq[0], compat_reg);
sc                201 arch/parisc/kernel/signal32.c 		err |= __put_user(compat_reg, &sc->sc_iasq[1]);
sc                203 arch/parisc/kernel/signal32.c 				&sc->sc_iasq[1], compat_reg);
sc                214 arch/parisc/kernel/signal32.c 	err |= __put_user(flags, &sc->sc_flags);
sc                221 arch/parisc/kernel/signal32.c 		err |= __put_user(compat_reg, &sc->sc_gr[regn]);
sc                235 arch/parisc/kernel/signal32.c 		sizeof(regs->fr), sizeof(sc->sc_fr));
sc                236 arch/parisc/kernel/signal32.c 	err |= __copy_to_user(sc->sc_fr, regs->fr, sizeof(regs->fr));
sc                239 arch/parisc/kernel/signal32.c 	err |= __put_user(compat_reg, &sc->sc_sar);
sc                 66 arch/parisc/kernel/signal32.h long restore_sigcontext32(struct compat_sigcontext __user *sc, 
sc                 69 arch/parisc/kernel/signal32.h long setup_sigcontext32(struct compat_sigcontext __user *sc, 
sc                 66 arch/powerpc/boot/cpm-serial.c 	u16 sc;   /* Status and Control */
sc                148 arch/powerpc/boot/cpm-serial.c 	rbdf->sc = 0xa000;
sc                153 arch/powerpc/boot/cpm-serial.c 	tbdf->sc = 0x2000;
sc                168 arch/powerpc/boot/cpm-serial.c 	while (tbdf->sc & 0x8000)
sc                175 arch/powerpc/boot/cpm-serial.c 	tbdf->sc |= 0x8000;
sc                181 arch/powerpc/boot/cpm-serial.c 	return !(rbdf->sc & 0x8000);
sc                194 arch/powerpc/boot/cpm-serial.c 	rbdf->sc |= 0x8000;
sc                 13 arch/powerpc/boot/stdio.c 	const char *sc;
sc                 15 arch/powerpc/boot/stdio.c 	for (sc = s; count-- && *sc != '\0'; ++sc)
sc                 17 arch/powerpc/boot/stdio.c 	return sc - s;
sc                271 arch/powerpc/kernel/prom_init.c 	const char *sc;
sc                273 arch/powerpc/kernel/prom_init.c 	for (sc = s; *sc != '\0'; ++sc)
sc                275 arch/powerpc/kernel/prom_init.c 	return sc - s;
sc               2774 arch/powerpc/kernel/prom_init.c 	u32 ac, sc;
sc               2784 arch/powerpc/kernel/prom_init.c 	prom_getprop(prom.root, "#size-cells", &sc, sizeof(sc));
sc               2785 arch/powerpc/kernel/prom_init.c 	if ((ac != 2) || (sc != 2))
sc               1361 arch/powerpc/kernel/signal_32.c 	struct sigcontext __user *sc;
sc               1379 arch/powerpc/kernel/signal_32.c 	sc = (struct sigcontext __user *) &frame->sctx;
sc               1384 arch/powerpc/kernel/signal_32.c 	if (__put_user(to_user_ptr(ksig->ka.sa.sa_handler), &sc->handler)
sc               1385 arch/powerpc/kernel/signal_32.c 	    || __put_user(oldset->sig[0], &sc->oldmask)
sc               1387 arch/powerpc/kernel/signal_32.c 	    || __put_user((oldset->sig[0] >> 32), &sc->_unused[3])
sc               1389 arch/powerpc/kernel/signal_32.c 	    || __put_user(oldset->sig[1], &sc->_unused[3])
sc               1391 arch/powerpc/kernel/signal_32.c 	    || __put_user(to_user_ptr(&frame->mctx), &sc->regs)
sc               1392 arch/powerpc/kernel/signal_32.c 	    || __put_user(ksig->sig, &sc->signal))
sc               1428 arch/powerpc/kernel/signal_32.c 	regs->gpr[4] = (unsigned long) sc;
sc               1456 arch/powerpc/kernel/signal_32.c 	struct sigcontext __user *sc;
sc               1470 arch/powerpc/kernel/signal_32.c 	sc = &sf->sctx;
sc               1471 arch/powerpc/kernel/signal_32.c 	addr = sc;
sc               1472 arch/powerpc/kernel/signal_32.c 	if (copy_from_user(&sigctx, sc, sizeof(sigctx)))
sc                 81 arch/powerpc/kernel/signal_64.c static elf_vrreg_t __user *sigcontext_vmx_regs(struct sigcontext __user *sc)
sc                 83 arch/powerpc/kernel/signal_64.c 	return (elf_vrreg_t __user *) (((unsigned long)sc->vmx_reserve + 15) & ~0xful);
sc                 91 arch/powerpc/kernel/signal_64.c static long setup_sigcontext(struct sigcontext __user *sc,
sc                104 arch/powerpc/kernel/signal_64.c 	elf_vrreg_t __user *v_regs = sigcontext_vmx_regs(sc);
sc                116 arch/powerpc/kernel/signal_64.c 	err |= __put_user(v_regs, &sc->v_regs);
sc                140 arch/powerpc/kernel/signal_64.c 	err |= __put_user(0, &sc->v_regs);
sc                144 arch/powerpc/kernel/signal_64.c 	err |= copy_fpr_to_user(&sc->fp_regs, tsk);
sc                167 arch/powerpc/kernel/signal_64.c 	err |= __put_user(&sc->gp_regs, &sc->regs);
sc                169 arch/powerpc/kernel/signal_64.c 	err |= __copy_to_user(&sc->gp_regs, regs, GP_REGS_SIZE);
sc                170 arch/powerpc/kernel/signal_64.c 	err |= __put_user(msr, &sc->gp_regs[PT_MSR]);
sc                171 arch/powerpc/kernel/signal_64.c 	err |= __put_user(softe, &sc->gp_regs[PT_SOFTE]);
sc                172 arch/powerpc/kernel/signal_64.c 	err |= __put_user(signr, &sc->signal);
sc                173 arch/powerpc/kernel/signal_64.c 	err |= __put_user(handler, &sc->handler);
sc                175 arch/powerpc/kernel/signal_64.c 		err |=  __put_user(set->sig[0], &sc->oldmask);
sc                192 arch/powerpc/kernel/signal_64.c static long setup_tm_sigcontexts(struct sigcontext __user *sc,
sc                207 arch/powerpc/kernel/signal_64.c 	elf_vrreg_t __user *v_regs = sigcontext_vmx_regs(sc);
sc                226 arch/powerpc/kernel/signal_64.c 	err |= __put_user(v_regs, &sc->v_regs);
sc                265 arch/powerpc/kernel/signal_64.c 	err |= __put_user(0, &sc->v_regs);
sc                270 arch/powerpc/kernel/signal_64.c 	err |= copy_ckfpr_to_user(&sc->fp_regs, tsk);
sc                300 arch/powerpc/kernel/signal_64.c 	err |= __put_user(&sc->gp_regs, &sc->regs);
sc                304 arch/powerpc/kernel/signal_64.c 	err |= __copy_to_user(&sc->gp_regs,
sc                307 arch/powerpc/kernel/signal_64.c 	err |= __put_user(msr, &sc->gp_regs[PT_MSR]);
sc                308 arch/powerpc/kernel/signal_64.c 	err |= __put_user(signr, &sc->signal);
sc                309 arch/powerpc/kernel/signal_64.c 	err |= __put_user(handler, &sc->handler);
sc                311 arch/powerpc/kernel/signal_64.c 		err |=  __put_user(set->sig[0], &sc->oldmask);
sc                322 arch/powerpc/kernel/signal_64.c 			      struct sigcontext __user *sc)
sc                342 arch/powerpc/kernel/signal_64.c 	err |= __copy_from_user(regs->gpr, sc->gp_regs, sizeof(regs->gpr));
sc                343 arch/powerpc/kernel/signal_64.c 	err |= __get_user(regs->nip, &sc->gp_regs[PT_NIP]);
sc                345 arch/powerpc/kernel/signal_64.c 	err |= __get_user(msr, &sc->gp_regs[PT_MSR]);
sc                348 arch/powerpc/kernel/signal_64.c 	err |= __get_user(regs->orig_gpr3, &sc->gp_regs[PT_ORIG_R3]);
sc                349 arch/powerpc/kernel/signal_64.c 	err |= __get_user(regs->ctr, &sc->gp_regs[PT_CTR]);
sc                350 arch/powerpc/kernel/signal_64.c 	err |= __get_user(regs->link, &sc->gp_regs[PT_LNK]);
sc                351 arch/powerpc/kernel/signal_64.c 	err |= __get_user(regs->xer, &sc->gp_regs[PT_XER]);
sc                352 arch/powerpc/kernel/signal_64.c 	err |= __get_user(regs->ccr, &sc->gp_regs[PT_CCR]);
sc                355 arch/powerpc/kernel/signal_64.c 	err |= __get_user(regs->dar, &sc->gp_regs[PT_DAR]);
sc                356 arch/powerpc/kernel/signal_64.c 	err |= __get_user(regs->dsisr, &sc->gp_regs[PT_DSISR]);
sc                357 arch/powerpc/kernel/signal_64.c 	err |= __get_user(regs->result, &sc->gp_regs[PT_RESULT]);
sc                362 arch/powerpc/kernel/signal_64.c 		err |=  __get_user(set->sig[0], &sc->oldmask);
sc                372 arch/powerpc/kernel/signal_64.c 	err |= __get_user(v_regs, &sc->v_regs);
sc                394 arch/powerpc/kernel/signal_64.c 	err |= copy_fpr_from_user(tsk, &sc->fp_regs);
sc                419 arch/powerpc/kernel/signal_64.c 				   struct sigcontext __user *sc,
sc                439 arch/powerpc/kernel/signal_64.c 	err |= __copy_from_user(&tsk->thread.ckpt_regs, sc->gp_regs,
sc                451 arch/powerpc/kernel/signal_64.c 	err |= __get_user(tsk->thread.tm_tfhar, &sc->gp_regs[PT_NIP]);
sc                454 arch/powerpc/kernel/signal_64.c 	err |= __get_user(msr, &sc->gp_regs[PT_MSR]);
sc                468 arch/powerpc/kernel/signal_64.c 			  &sc->gp_regs[PT_CTR]);
sc                470 arch/powerpc/kernel/signal_64.c 			  &sc->gp_regs[PT_LNK]);
sc                472 arch/powerpc/kernel/signal_64.c 			  &sc->gp_regs[PT_XER]);
sc                474 arch/powerpc/kernel/signal_64.c 			  &sc->gp_regs[PT_CCR]);
sc                480 arch/powerpc/kernel/signal_64.c 	err |= __get_user(regs->dar, &sc->gp_regs[PT_DAR]);
sc                481 arch/powerpc/kernel/signal_64.c 	err |= __get_user(regs->dsisr, &sc->gp_regs[PT_DSISR]);
sc                482 arch/powerpc/kernel/signal_64.c 	err |= __get_user(regs->result, &sc->gp_regs[PT_RESULT]);
sc                492 arch/powerpc/kernel/signal_64.c 	err |= __get_user(v_regs, &sc->v_regs);
sc                528 arch/powerpc/kernel/signal_64.c 	err |= copy_ckfpr_from_user(tsk, &sc->fp_regs);
sc                 25 arch/riscv/include/asm/asm.h #define REG_SC		__REG_SEL(sc.d, sc.w)
sc                 82 arch/riscv/kernel/signal.c 	struct sigcontext __user *sc)
sc                 86 arch/riscv/kernel/signal.c 	err = __copy_from_user(regs, &sc->sc_regs, sizeof(sc->sc_regs));
sc                 89 arch/riscv/kernel/signal.c 		err |= restore_fp_state(regs, &sc->sc_fpregs);
sc                136 arch/riscv/kernel/signal.c 	struct sigcontext __user *sc = &frame->uc.uc_mcontext;
sc                139 arch/riscv/kernel/signal.c 	err = __copy_to_user(&sc->sc_regs, regs, sizeof(sc->sc_regs));
sc                142 arch/riscv/kernel/signal.c 		err |= save_fp_state(regs, &sc->sc_fpregs);
sc                395 arch/s390/hypfs/hypfs_diag.c 	u8	sc;		/* Used subcode */
sc                422 arch/s390/hypfs/hypfs_diag.c 	d204->hdr.sc = diag204_store_sc;
sc                 54 arch/s390/include/asm/nmi.h 		u64 sc :  1; /* 17 storage error corrected */
sc                 38 arch/s390/kernel/compat_signal.c 	struct sigcontext32 sc;
sc                186 arch/s390/kernel/compat_signal.c 	if (get_compat_sigset(&set, (compat_sigset_t __user *)frame->sc.oldmask))
sc                279 arch/s390/kernel/compat_signal.c 	if (put_compat_sigset((compat_sigset_t __user *)frame->sc.oldmask,
sc                282 arch/s390/kernel/compat_signal.c 	if (__put_user(ptr_to_compat(&frame->sregs), &frame->sc.sregs))
sc                323 arch/s390/kernel/compat_signal.c 	regs->gprs[3] = (__force __u64) &frame->sc;
sc                 66 arch/s390/kernel/signal.c 	struct sigcontext sc;
sc                224 arch/s390/kernel/signal.c 	if (__copy_from_user(&set.sig, &frame->sc.oldmask, _SIGMASK_COPY_SIZE))
sc                291 arch/s390/kernel/signal.c 	struct sigcontext sc;
sc                313 arch/s390/kernel/signal.c 	memcpy(&sc.oldmask, &set->sig, _SIGMASK_COPY_SIZE);
sc                314 arch/s390/kernel/signal.c 	sc.sregs = (_sigregs __user __force *) &frame->sregs;
sc                315 arch/s390/kernel/signal.c 	if (__copy_to_user(&frame->sc, &sc, sizeof(frame->sc)))
sc                355 arch/s390/kernel/signal.c 	regs->gprs[3] = (unsigned long) &frame->sc;
sc                 65 arch/sh/kernel/signal_32.c 	struct sigcontext sc;
sc                 78 arch/sh/kernel/signal_32.c static inline int restore_sigcontext_fpu(struct sigcontext __user *sc)
sc                 86 arch/sh/kernel/signal_32.c 	return __copy_from_user(&tsk->thread.xstate->hardfpu, &sc->sc_fpregs[0],
sc                 90 arch/sh/kernel/signal_32.c static inline int save_sigcontext_fpu(struct sigcontext __user *sc,
sc                 99 arch/sh/kernel/signal_32.c 		return __put_user(0, &sc->sc_ownedfp);
sc                101 arch/sh/kernel/signal_32.c 	if (__put_user(1, &sc->sc_ownedfp))
sc                110 arch/sh/kernel/signal_32.c 	return __copy_to_user(&sc->sc_fpregs[0], &tsk->thread.xstate->hardfpu,
sc                116 arch/sh/kernel/signal_32.c restore_sigcontext(struct pt_regs *regs, struct sigcontext __user *sc, int *r0_p)
sc                120 arch/sh/kernel/signal_32.c #define COPY(x)		err |= __get_user(regs->x, &sc->sc_##x)
sc                142 arch/sh/kernel/signal_32.c 		err |= __get_user (owned_fp, &sc->sc_ownedfp);
sc                144 arch/sh/kernel/signal_32.c 			err |= restore_sigcontext_fpu(sc);
sc                149 arch/sh/kernel/signal_32.c 	err |= __get_user(*r0_p, &sc->sc_regs[0]);
sc                166 arch/sh/kernel/signal_32.c 	if (__get_user(set.sig[0], &frame->sc.oldmask)
sc                174 arch/sh/kernel/signal_32.c 	if (restore_sigcontext(regs, &frame->sc, &r0))
sc                219 arch/sh/kernel/signal_32.c setup_sigcontext(struct sigcontext __user *sc, struct pt_regs *regs,
sc                224 arch/sh/kernel/signal_32.c #define COPY(x)		err |= __put_user(regs->x, &sc->sc_##x)
sc                239 arch/sh/kernel/signal_32.c 	err |= save_sigcontext_fpu(sc, regs);
sc                243 arch/sh/kernel/signal_32.c 	err |= __put_user(mask, &sc->oldmask);
sc                278 arch/sh/kernel/signal_32.c 	err |= setup_sigcontext(&frame->sc, regs, set->sig[0]);
sc                313 arch/sh/kernel/signal_32.c 	regs->regs[6] = (unsigned long) &frame->sc;
sc                127 arch/sh/kernel/signal_64.c 	struct sigcontext sc;
sc                142 arch/sh/kernel/signal_64.c restore_sigcontext_fpu(struct pt_regs *regs, struct sigcontext __user *sc)
sc                147 arch/sh/kernel/signal_64.c 	err |= __get_user (fpvalid, &sc->sc_fpvalid);
sc                157 arch/sh/kernel/signal_64.c 	err |= __copy_from_user(&current->thread.xstate->hardfpu, &sc->sc_fpregs[0],
sc                164 arch/sh/kernel/signal_64.c setup_sigcontext_fpu(struct pt_regs *regs, struct sigcontext __user *sc)
sc                170 arch/sh/kernel/signal_64.c 	err |= __put_user(fpvalid, &sc->sc_fpvalid);
sc                182 arch/sh/kernel/signal_64.c 	err |= __copy_to_user(&sc->sc_fpregs[0], &current->thread.xstate->hardfpu,
sc                190 arch/sh/kernel/signal_64.c restore_sigcontext_fpu(struct pt_regs *regs, struct sigcontext __user *sc)
sc                195 arch/sh/kernel/signal_64.c setup_sigcontext_fpu(struct pt_regs *regs, struct sigcontext __user *sc)
sc                202 arch/sh/kernel/signal_64.c restore_sigcontext(struct pt_regs *regs, struct sigcontext __user *sc, long long *r2_p)
sc                208 arch/sh/kernel/signal_64.c #define COPY(x)		err |= __get_user(regs->x, &sc->sc_##x)
sc                233 arch/sh/kernel/signal_64.c         err |= __get_user(new_sr, &sc->sc_sr);
sc                243 arch/sh/kernel/signal_64.c 	err |= restore_sigcontext_fpu(regs, sc);
sc                246 arch/sh/kernel/signal_64.c 	err |= __get_user(*r2_p, &sc->sc_regs[REG_RET]);
sc                265 arch/sh/kernel/signal_64.c 	if (__get_user(set.sig[0], &frame->sc.oldmask)
sc                273 arch/sh/kernel/signal_64.c 	if (restore_sigcontext(regs, &frame->sc, &ret))
sc                322 arch/sh/kernel/signal_64.c setup_sigcontext(struct sigcontext __user *sc, struct pt_regs *regs,
sc                328 arch/sh/kernel/signal_64.c 	err |= setup_sigcontext_fpu(regs, sc);
sc                330 arch/sh/kernel/signal_64.c #define COPY(x)		err |= __put_user(regs->x, &sc->sc_##x)
sc                354 arch/sh/kernel/signal_64.c 	err |= __put_user(mask, &sc->oldmask);
sc                385 arch/sh/kernel/signal_64.c 	err |= setup_sigcontext(&frame->sc, regs, set->sig[0]);
sc                446 arch/sh/kernel/signal_64.c 	regs->regs[REG_ARG2] = (unsigned long long)(unsigned long)(signed long)&frame->sc;
sc                447 arch/sh/kernel/signal_64.c 	regs->regs[REG_ARG3] = (unsigned long long)(unsigned long)(signed long)&frame->sc;
sc                160 arch/x86/boot/string.c 	const char *sc;
sc                162 arch/x86/boot/string.c 	for (sc = s; *sc != '\0'; ++sc)
sc                164 arch/x86/boot/string.c 	return sc - s;
sc                 51 arch/x86/ia32/ia32_signal.c 	get_user_ex(regs->x, &sc->x);		\
sc                 56 arch/x86/ia32/ia32_signal.c 	get_user_ex(tmp, &sc->seg);			\
sc                 72 arch/x86/ia32/ia32_signal.c 				   struct sigcontext_32 __user *sc)
sc                 95 arch/x86/ia32/ia32_signal.c 		get_user_ex(tmpflags, &sc->flags);
sc                100 arch/x86/ia32/ia32_signal.c 		get_user_ex(tmp, &sc->fpstate);
sc                130 arch/x86/ia32/ia32_signal.c 	if (__get_user(set.sig[0], &frame->sc.oldmask)
sc                139 arch/x86/ia32/ia32_signal.c 	if (ia32_restore_sigcontext(regs, &frame->sc))
sc                180 arch/x86/ia32/ia32_signal.c static int ia32_setup_sigcontext(struct sigcontext_32 __user *sc,
sc                187 arch/x86/ia32/ia32_signal.c 		put_user_ex(get_user_seg(gs), (unsigned int __user *)&sc->gs);
sc                188 arch/x86/ia32/ia32_signal.c 		put_user_ex(get_user_seg(fs), (unsigned int __user *)&sc->fs);
sc                189 arch/x86/ia32/ia32_signal.c 		put_user_ex(get_user_seg(ds), (unsigned int __user *)&sc->ds);
sc                190 arch/x86/ia32/ia32_signal.c 		put_user_ex(get_user_seg(es), (unsigned int __user *)&sc->es);
sc                192 arch/x86/ia32/ia32_signal.c 		put_user_ex(regs->di, &sc->di);
sc                193 arch/x86/ia32/ia32_signal.c 		put_user_ex(regs->si, &sc->si);
sc                194 arch/x86/ia32/ia32_signal.c 		put_user_ex(regs->bp, &sc->bp);
sc                195 arch/x86/ia32/ia32_signal.c 		put_user_ex(regs->sp, &sc->sp);
sc                196 arch/x86/ia32/ia32_signal.c 		put_user_ex(regs->bx, &sc->bx);
sc                197 arch/x86/ia32/ia32_signal.c 		put_user_ex(regs->dx, &sc->dx);
sc                198 arch/x86/ia32/ia32_signal.c 		put_user_ex(regs->cx, &sc->cx);
sc                199 arch/x86/ia32/ia32_signal.c 		put_user_ex(regs->ax, &sc->ax);
sc                200 arch/x86/ia32/ia32_signal.c 		put_user_ex(current->thread.trap_nr, &sc->trapno);
sc                201 arch/x86/ia32/ia32_signal.c 		put_user_ex(current->thread.error_code, &sc->err);
sc                202 arch/x86/ia32/ia32_signal.c 		put_user_ex(regs->ip, &sc->ip);
sc                203 arch/x86/ia32/ia32_signal.c 		put_user_ex(regs->cs, (unsigned int __user *)&sc->cs);
sc                204 arch/x86/ia32/ia32_signal.c 		put_user_ex(regs->flags, &sc->flags);
sc                205 arch/x86/ia32/ia32_signal.c 		put_user_ex(regs->sp, &sc->sp_at_signal);
sc                206 arch/x86/ia32/ia32_signal.c 		put_user_ex(regs->ss, (unsigned int __user *)&sc->ss);
sc                208 arch/x86/ia32/ia32_signal.c 		put_user_ex(ptr_to_compat(fpstate), &sc->fpstate);
sc                211 arch/x86/ia32/ia32_signal.c 		put_user_ex(mask, &sc->oldmask);
sc                212 arch/x86/ia32/ia32_signal.c 		put_user_ex(current->thread.cr2, &sc->cr2);
sc                279 arch/x86/ia32/ia32_signal.c 	if (ia32_setup_sigcontext(&frame->sc, fpstate, regs, set->sig[0]))
sc                 26 arch/x86/include/asm/sigframe.h 	struct sigcontext_32 sc;
sc                 17 arch/x86/include/asm/sighandling.h int setup_sigcontext(struct sigcontext __user *sc, void __user *fpstate,
sc                 51 arch/x86/kernel/signal.c 	get_user_ex(regs->x, &sc->x);			\
sc                 56 arch/x86/kernel/signal.c 	get_user_ex(tmp, &sc->seg);			\
sc                 98 arch/x86/kernel/signal.c 			      struct sigcontext __user *sc,
sc                135 arch/x86/kernel/signal.c 		get_user_ex(tmpflags, &sc->flags);
sc                139 arch/x86/kernel/signal.c 		get_user_ex(buf_val, &sc->fpstate);
sc                159 arch/x86/kernel/signal.c int setup_sigcontext(struct sigcontext __user *sc, void __user *fpstate,
sc                167 arch/x86/kernel/signal.c 		put_user_ex(get_user_gs(regs), (unsigned int __user *)&sc->gs);
sc                168 arch/x86/kernel/signal.c 		put_user_ex(regs->fs, (unsigned int __user *)&sc->fs);
sc                169 arch/x86/kernel/signal.c 		put_user_ex(regs->es, (unsigned int __user *)&sc->es);
sc                170 arch/x86/kernel/signal.c 		put_user_ex(regs->ds, (unsigned int __user *)&sc->ds);
sc                173 arch/x86/kernel/signal.c 		put_user_ex(regs->di, &sc->di);
sc                174 arch/x86/kernel/signal.c 		put_user_ex(regs->si, &sc->si);
sc                175 arch/x86/kernel/signal.c 		put_user_ex(regs->bp, &sc->bp);
sc                176 arch/x86/kernel/signal.c 		put_user_ex(regs->sp, &sc->sp);
sc                177 arch/x86/kernel/signal.c 		put_user_ex(regs->bx, &sc->bx);
sc                178 arch/x86/kernel/signal.c 		put_user_ex(regs->dx, &sc->dx);
sc                179 arch/x86/kernel/signal.c 		put_user_ex(regs->cx, &sc->cx);
sc                180 arch/x86/kernel/signal.c 		put_user_ex(regs->ax, &sc->ax);
sc                182 arch/x86/kernel/signal.c 		put_user_ex(regs->r8, &sc->r8);
sc                183 arch/x86/kernel/signal.c 		put_user_ex(regs->r9, &sc->r9);
sc                184 arch/x86/kernel/signal.c 		put_user_ex(regs->r10, &sc->r10);
sc                185 arch/x86/kernel/signal.c 		put_user_ex(regs->r11, &sc->r11);
sc                186 arch/x86/kernel/signal.c 		put_user_ex(regs->r12, &sc->r12);
sc                187 arch/x86/kernel/signal.c 		put_user_ex(regs->r13, &sc->r13);
sc                188 arch/x86/kernel/signal.c 		put_user_ex(regs->r14, &sc->r14);
sc                189 arch/x86/kernel/signal.c 		put_user_ex(regs->r15, &sc->r15);
sc                192 arch/x86/kernel/signal.c 		put_user_ex(current->thread.trap_nr, &sc->trapno);
sc                193 arch/x86/kernel/signal.c 		put_user_ex(current->thread.error_code, &sc->err);
sc                194 arch/x86/kernel/signal.c 		put_user_ex(regs->ip, &sc->ip);
sc                196 arch/x86/kernel/signal.c 		put_user_ex(regs->cs, (unsigned int __user *)&sc->cs);
sc                197 arch/x86/kernel/signal.c 		put_user_ex(regs->flags, &sc->flags);
sc                198 arch/x86/kernel/signal.c 		put_user_ex(regs->sp, &sc->sp_at_signal);
sc                199 arch/x86/kernel/signal.c 		put_user_ex(regs->ss, (unsigned int __user *)&sc->ss);
sc                201 arch/x86/kernel/signal.c 		put_user_ex(regs->flags, &sc->flags);
sc                202 arch/x86/kernel/signal.c 		put_user_ex(regs->cs, &sc->cs);
sc                203 arch/x86/kernel/signal.c 		put_user_ex(0, &sc->gs);
sc                204 arch/x86/kernel/signal.c 		put_user_ex(0, &sc->fs);
sc                205 arch/x86/kernel/signal.c 		put_user_ex(regs->ss, &sc->ss);
sc                208 arch/x86/kernel/signal.c 		put_user_ex(fpstate, (unsigned long __user *)&sc->fpstate);
sc                211 arch/x86/kernel/signal.c 		put_user_ex(mask, &sc->oldmask);
sc                212 arch/x86/kernel/signal.c 		put_user_ex(current->thread.cr2, &sc->cr2);
sc                328 arch/x86/kernel/signal.c 	if (setup_sigcontext(&frame->sc, fpstate, regs, set->sig[0]))
sc                618 arch/x86/kernel/signal.c 	if (__get_user(set.sig[0], &frame->sc.oldmask) || (_NSIG_WORDS > 1
sc                629 arch/x86/kernel/signal.c 	if (restore_sigcontext(regs, &frame->sc, 0))
sc               6168 arch/x86/kvm/mmu.c mmu_shrink_scan(struct shrinker *shrink, struct shrink_control *sc)
sc               6171 arch/x86/kvm/mmu.c 	int nr_to_scan = sc->nr_to_scan;
sc               6229 arch/x86/kvm/mmu.c mmu_shrink_count(struct shrinker *shrink, struct shrink_control *sc)
sc                120 arch/x86/mm/pageattr-test.c 	struct split_state sa, sb, sc;
sc                241 arch/x86/mm/pageattr-test.c 	failed += print_split(&sc);
sc                156 arch/x86/um/signal.c 	struct sigcontext sc;
sc                162 arch/x86/um/signal.c 	err = copy_from_user(&sc, from, sizeof(sc));
sc                166 arch/x86/um/signal.c #define GETREG(regno, regname) regs->regs.gp[HOST_##regno] = sc.regname
sc                209 arch/x86/um/signal.c 			&((struct _fpstate __user *)sc.fpstate)->_fxsr_env[0],
sc                214 arch/x86/um/signal.c 		err = convert_fxsr_from_user(&fpx, (void *)sc.fpstate);
sc                228 arch/x86/um/signal.c 		err = copy_from_user(regs->regs.fp, (void *)sc.fpstate,
sc                240 arch/x86/um/signal.c 	struct sigcontext sc;
sc                243 arch/x86/um/signal.c 	memset(&sc, 0, sizeof(struct sigcontext));
sc                245 arch/x86/um/signal.c #define PUTREG(regno, regname) sc.regname = regs->regs.gp[HOST_##regno]
sc                272 arch/x86/um/signal.c 	sc.cr2 = fi->cr2;
sc                273 arch/x86/um/signal.c 	sc.err = fi->error_code;
sc                274 arch/x86/um/signal.c 	sc.trapno = fi->trap_no;
sc                283 arch/x86/um/signal.c 	sc.oldmask = mask;
sc                284 arch/x86/um/signal.c 	sc.fpstate = (unsigned long)to_fp;
sc                286 arch/x86/um/signal.c 	err = copy_to_user(to, &sc, sizeof(struct sigcontext));
sc                342 arch/x86/um/signal.c 	struct sigcontext sc;
sc                379 arch/x86/um/signal.c 	err |= copy_sc_to_user(&frame->sc, &frame->fpstate, regs, mask->sig[0]);
sc                457 arch/x86/um/signal.c 	struct sigcontext __user *sc = &frame->sc;
sc                460 arch/x86/um/signal.c 	if (copy_from_user(&set.sig[0], &sc->oldmask, sizeof(set.sig[0])) ||
sc                466 arch/x86/um/signal.c 	if (copy_sc_from_user(&current->thread.regs, sc))
sc                136 arch/xtensa/kernel/signal.c 	struct sigcontext __user *sc = &frame->uc.uc_mcontext;
sc                140 arch/xtensa/kernel/signal.c #define COPY(x)	err |= __put_user(regs->x, &sc->sc_##x)
sc                150 arch/xtensa/kernel/signal.c 	err |= __copy_to_user (sc->sc_a, regs->areg, 16 * 4);
sc                151 arch/xtensa/kernel/signal.c 	err |= __put_user(0, &sc->sc_xtregs);
sc                167 arch/xtensa/kernel/signal.c 	err |= __put_user(err ? NULL : &frame->xtregs, &sc->sc_xtregs);
sc                175 arch/xtensa/kernel/signal.c 	struct sigcontext __user *sc = &frame->uc.uc_mcontext;
sc                180 arch/xtensa/kernel/signal.c #define COPY(x)	err |= __get_user(regs->x, &sc->sc_##x)
sc                200 arch/xtensa/kernel/signal.c 	err |= __get_user(ps, &sc->sc_ps);
sc                209 arch/xtensa/kernel/signal.c 	err |= __copy_from_user(regs->areg, sc->sc_a, 16 * 4);
sc                486 crypto/twofish_common.c    ctx->s[2][i] = mds[2][q1[(a) ^ sc] ^ sg]; \
sc                494 crypto/twofish_common.c    ctx->s[2][i] = mds[2][q1[q0[(a) ^ sc] ^ sg] ^ sk]; \
sc                502 crypto/twofish_common.c    ctx->s[2][i] = mds[2][q1[q0[q0[(a) ^ sc] ^ sg] ^ sk] ^ so]; \
sc                579 crypto/twofish_common.c 	u8 sa = 0, sb = 0, sc = 0, sd = 0, se = 0, sf = 0, sg = 0, sh = 0;
sc                596 crypto/twofish_common.c 	CALC_S (sa, sb, sc, sd, 0, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */
sc                597 crypto/twofish_common.c 	CALC_S (sa, sb, sc, sd, 1, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */
sc                598 crypto/twofish_common.c 	CALC_S (sa, sb, sc, sd, 2, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */
sc                599 crypto/twofish_common.c 	CALC_S (sa, sb, sc, sd, 3, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */
sc                600 crypto/twofish_common.c 	CALC_S (sa, sb, sc, sd, 4, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */
sc                601 crypto/twofish_common.c 	CALC_S (sa, sb, sc, sd, 5, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */
sc                602 crypto/twofish_common.c 	CALC_S (sa, sb, sc, sd, 6, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */
sc                603 crypto/twofish_common.c 	CALC_S (sa, sb, sc, sd, 7, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */
sc                974 drivers/android/binder_alloc.c binder_shrink_count(struct shrinker *shrink, struct shrink_control *sc)
sc                981 drivers/android/binder_alloc.c binder_shrink_scan(struct shrinker *shrink, struct shrink_control *sc)
sc                986 drivers/android/binder_alloc.c 			    NULL, sc->nr_to_scan);
sc                 51 drivers/clk/qcom/gdsc.c static int gdsc_check_status(struct gdsc *sc, enum gdsc_status status)
sc                 57 drivers/clk/qcom/gdsc.c 	if (sc->flags & POLL_CFG_GDSCR)
sc                 58 drivers/clk/qcom/gdsc.c 		reg = sc->gdscr + CFG_GDSCR_OFFSET;
sc                 59 drivers/clk/qcom/gdsc.c 	else if (sc->gds_hw_ctrl)
sc                 60 drivers/clk/qcom/gdsc.c 		reg = sc->gds_hw_ctrl;
sc                 62 drivers/clk/qcom/gdsc.c 		reg = sc->gdscr;
sc                 64 drivers/clk/qcom/gdsc.c 	ret = regmap_read(sc->regmap, reg, &val);
sc                 68 drivers/clk/qcom/gdsc.c 	if (sc->flags & POLL_CFG_GDSCR) {
sc                 87 drivers/clk/qcom/gdsc.c static int gdsc_hwctrl(struct gdsc *sc, bool en)
sc                 91 drivers/clk/qcom/gdsc.c 	return regmap_update_bits(sc->regmap, sc->gdscr, HW_CONTROL_MASK, val);
sc                 94 drivers/clk/qcom/gdsc.c static int gdsc_poll_status(struct gdsc *sc, enum gdsc_status status)
sc                100 drivers/clk/qcom/gdsc.c 		if (gdsc_check_status(sc, status))
sc                104 drivers/clk/qcom/gdsc.c 	if (gdsc_check_status(sc, status))
sc                110 drivers/clk/qcom/gdsc.c static int gdsc_toggle_logic(struct gdsc *sc, enum gdsc_status status)
sc                115 drivers/clk/qcom/gdsc.c 	ret = regmap_update_bits(sc->regmap, sc->gdscr, SW_COLLAPSE_MASK, val);
sc                120 drivers/clk/qcom/gdsc.c 	if ((sc->flags & VOTABLE) && status == GDSC_OFF) {
sc                130 drivers/clk/qcom/gdsc.c 	if (sc->gds_hw_ctrl) {
sc                144 drivers/clk/qcom/gdsc.c 	ret = gdsc_poll_status(sc, status);
sc                145 drivers/clk/qcom/gdsc.c 	WARN(ret, "%s status stuck at 'o%s'", sc->pd.name, status ? "ff" : "n");
sc                149 drivers/clk/qcom/gdsc.c static inline int gdsc_deassert_reset(struct gdsc *sc)
sc                153 drivers/clk/qcom/gdsc.c 	for (i = 0; i < sc->reset_count; i++)
sc                154 drivers/clk/qcom/gdsc.c 		sc->rcdev->ops->deassert(sc->rcdev, sc->resets[i]);
sc                158 drivers/clk/qcom/gdsc.c static inline int gdsc_assert_reset(struct gdsc *sc)
sc                162 drivers/clk/qcom/gdsc.c 	for (i = 0; i < sc->reset_count; i++)
sc                163 drivers/clk/qcom/gdsc.c 		sc->rcdev->ops->assert(sc->rcdev, sc->resets[i]);
sc                167 drivers/clk/qcom/gdsc.c static inline void gdsc_force_mem_on(struct gdsc *sc)
sc                172 drivers/clk/qcom/gdsc.c 	for (i = 0; i < sc->cxc_count; i++)
sc                173 drivers/clk/qcom/gdsc.c 		regmap_update_bits(sc->regmap, sc->cxcs[i], mask, mask);
sc                176 drivers/clk/qcom/gdsc.c static inline void gdsc_clear_mem_on(struct gdsc *sc)
sc                181 drivers/clk/qcom/gdsc.c 	for (i = 0; i < sc->cxc_count; i++)
sc                182 drivers/clk/qcom/gdsc.c 		regmap_update_bits(sc->regmap, sc->cxcs[i], mask, 0);
sc                185 drivers/clk/qcom/gdsc.c static inline void gdsc_deassert_clamp_io(struct gdsc *sc)
sc                187 drivers/clk/qcom/gdsc.c 	regmap_update_bits(sc->regmap, sc->clamp_io_ctrl,
sc                191 drivers/clk/qcom/gdsc.c static inline void gdsc_assert_clamp_io(struct gdsc *sc)
sc                193 drivers/clk/qcom/gdsc.c 	regmap_update_bits(sc->regmap, sc->clamp_io_ctrl,
sc                197 drivers/clk/qcom/gdsc.c static inline void gdsc_assert_reset_aon(struct gdsc *sc)
sc                199 drivers/clk/qcom/gdsc.c 	regmap_update_bits(sc->regmap, sc->clamp_io_ctrl,
sc                202 drivers/clk/qcom/gdsc.c 	regmap_update_bits(sc->regmap, sc->clamp_io_ctrl,
sc                207 drivers/clk/qcom/gdsc.c 	struct gdsc *sc = domain_to_gdsc(domain);
sc                210 drivers/clk/qcom/gdsc.c 	if (sc->pwrsts == PWRSTS_ON)
sc                211 drivers/clk/qcom/gdsc.c 		return gdsc_deassert_reset(sc);
sc                213 drivers/clk/qcom/gdsc.c 	if (sc->flags & SW_RESET) {
sc                214 drivers/clk/qcom/gdsc.c 		gdsc_assert_reset(sc);
sc                216 drivers/clk/qcom/gdsc.c 		gdsc_deassert_reset(sc);
sc                219 drivers/clk/qcom/gdsc.c 	if (sc->flags & CLAMP_IO) {
sc                220 drivers/clk/qcom/gdsc.c 		if (sc->flags & AON_RESET)
sc                221 drivers/clk/qcom/gdsc.c 			gdsc_assert_reset_aon(sc);
sc                222 drivers/clk/qcom/gdsc.c 		gdsc_deassert_clamp_io(sc);
sc                225 drivers/clk/qcom/gdsc.c 	ret = gdsc_toggle_logic(sc, GDSC_ON);
sc                229 drivers/clk/qcom/gdsc.c 	if (sc->pwrsts & PWRSTS_OFF)
sc                230 drivers/clk/qcom/gdsc.c 		gdsc_force_mem_on(sc);
sc                242 drivers/clk/qcom/gdsc.c 	if (sc->flags & HW_CTRL) {
sc                243 drivers/clk/qcom/gdsc.c 		ret = gdsc_hwctrl(sc, true);
sc                262 drivers/clk/qcom/gdsc.c 	struct gdsc *sc = domain_to_gdsc(domain);
sc                265 drivers/clk/qcom/gdsc.c 	if (sc->pwrsts == PWRSTS_ON)
sc                266 drivers/clk/qcom/gdsc.c 		return gdsc_assert_reset(sc);
sc                269 drivers/clk/qcom/gdsc.c 	if (sc->flags & HW_CTRL) {
sc                270 drivers/clk/qcom/gdsc.c 		ret = gdsc_hwctrl(sc, false);
sc                281 drivers/clk/qcom/gdsc.c 		ret = gdsc_poll_status(sc, GDSC_ON);
sc                286 drivers/clk/qcom/gdsc.c 	if (sc->pwrsts & PWRSTS_OFF)
sc                287 drivers/clk/qcom/gdsc.c 		gdsc_clear_mem_on(sc);
sc                289 drivers/clk/qcom/gdsc.c 	ret = gdsc_toggle_logic(sc, GDSC_OFF);
sc                293 drivers/clk/qcom/gdsc.c 	if (sc->flags & CLAMP_IO)
sc                294 drivers/clk/qcom/gdsc.c 		gdsc_assert_clamp_io(sc);
sc                299 drivers/clk/qcom/gdsc.c static int gdsc_init(struct gdsc *sc)
sc                312 drivers/clk/qcom/gdsc.c 	ret = regmap_update_bits(sc->regmap, sc->gdscr, mask, val);
sc                317 drivers/clk/qcom/gdsc.c 	if (sc->pwrsts == PWRSTS_ON) {
sc                318 drivers/clk/qcom/gdsc.c 		ret = gdsc_toggle_logic(sc, GDSC_ON);
sc                323 drivers/clk/qcom/gdsc.c 	on = gdsc_check_status(sc, GDSC_ON);
sc                331 drivers/clk/qcom/gdsc.c 	if ((sc->flags & VOTABLE) && on)
sc                332 drivers/clk/qcom/gdsc.c 		gdsc_enable(&sc->pd);
sc                335 drivers/clk/qcom/gdsc.c 	if (sc->flags & ALWAYS_ON) {
sc                337 drivers/clk/qcom/gdsc.c 			gdsc_enable(&sc->pd);
sc                339 drivers/clk/qcom/gdsc.c 		sc->pd.flags |= GENPD_FLAG_ALWAYS_ON;
sc                342 drivers/clk/qcom/gdsc.c 	if (on || (sc->pwrsts & PWRSTS_RET))
sc                343 drivers/clk/qcom/gdsc.c 		gdsc_force_mem_on(sc);
sc                345 drivers/clk/qcom/gdsc.c 		gdsc_clear_mem_on(sc);
sc                347 drivers/clk/qcom/gdsc.c 	if (!sc->pd.power_off)
sc                348 drivers/clk/qcom/gdsc.c 		sc->pd.power_off = gdsc_disable;
sc                349 drivers/clk/qcom/gdsc.c 	if (!sc->pd.power_on)
sc                350 drivers/clk/qcom/gdsc.c 		sc->pd.power_on = gdsc_enable;
sc                351 drivers/clk/qcom/gdsc.c 	pm_genpd_init(&sc->pd, NULL, !on);
sc                 27 drivers/crypto/chelsio/chtls/chtls_hw.c 	struct ulptx_idata *sc;
sc                 36 drivers/crypto/chelsio/chtls/chtls_hw.c 	sc = (struct ulptx_idata *)(req + 1);
sc                 37 drivers/crypto/chelsio/chtls/chtls_hw.c 	sc->cmd_more = htonl(ULPTX_CMD_V(ULP_TX_SC_NOOP));
sc                 38 drivers/crypto/chelsio/chtls/chtls_hw.c 	sc->len = htonl(0);
sc                 46 drivers/crypto/chelsio/chtls/chtls_hw.c 	struct ulptx_idata *sc;
sc                 49 drivers/crypto/chelsio/chtls/chtls_hw.c 	wrlen = roundup(sizeof(*req) + sizeof(*sc), 16);
sc                 66 drivers/crypto/chelsio/chtls/chtls_hw.c 	struct ulptx_idata *sc;
sc                 71 drivers/crypto/chelsio/chtls/chtls_hw.c 	wrlen = roundup(sizeof(*req) + sizeof(*sc), 16);
sc                281 drivers/crypto/chelsio/chtls/chtls_io.c 	struct ulptx_idata *sc;
sc                290 drivers/crypto/chelsio/chtls/chtls_io.c 	immdlen = sizeof(*sc) + sizeof(*sc_memrd);
sc                292 drivers/crypto/chelsio/chtls/chtls_io.c 	sc = (struct ulptx_idata *)__skb_push(skb, immdlen);
sc                293 drivers/crypto/chelsio/chtls/chtls_io.c 	if (sc) {
sc                294 drivers/crypto/chelsio/chtls/chtls_io.c 		sc->cmd_more = htonl(ULPTX_CMD_V(ULP_TX_SC_NOOP));
sc                295 drivers/crypto/chelsio/chtls/chtls_io.c 		sc->len = htonl(0);
sc                296 drivers/crypto/chelsio/chtls/chtls_io.c 		sc_memrd = (struct ulptx_sc_memrd *)(sc + 1);
sc                155 drivers/firewire/core-iso.c 	ctx->callback.sc = callback;
sc               2728 drivers/firewire/ohci.c 	ctx->base.callback.sc(&ctx->base, ctx->last_timestamp,
sc               3046 drivers/gpu/drm/drm_dp_mst_topology.c 	int sc;
sc               3048 drivers/gpu/drm/drm_dp_mst_topology.c 	sc = esi[0] & 0x3f;
sc               3050 drivers/gpu/drm/drm_dp_mst_topology.c 	if (sc != mgr->sink_count) {
sc               3051 drivers/gpu/drm/drm_dp_mst_topology.c 		mgr->sink_count = sc;
sc                110 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	struct fimc_scaler	sc;
sc                738 drivers/gpu/drm/exynos/exynos_drm_fimc.c static int fimc_set_prescaler(struct fimc_context *ctx, struct fimc_scaler *sc,
sc                785 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	sc->hratio = (src_w << 14) / (dst_w << hfactor);
sc                786 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	sc->vratio = (src_h << 14) / (dst_h << vfactor);
sc                787 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	sc->up_h = (dst_w >= src_w) ? true : false;
sc                788 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	sc->up_v = (dst_h >= src_h) ? true : false;
sc                790 drivers/gpu/drm/exynos/exynos_drm_fimc.c 			  sc->hratio, sc->vratio, sc->up_h, sc->up_v);
sc                807 drivers/gpu/drm/exynos/exynos_drm_fimc.c static void fimc_set_scaler(struct fimc_context *ctx, struct fimc_scaler *sc)
sc                812 drivers/gpu/drm/exynos/exynos_drm_fimc.c 			  sc->range, sc->bypass, sc->up_h, sc->up_v);
sc                814 drivers/gpu/drm/exynos/exynos_drm_fimc.c 			  sc->hratio, sc->vratio);
sc                824 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	if (sc->range)
sc                827 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	if (sc->bypass)
sc                829 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	if (sc->up_h)
sc                831 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	if (sc->up_v)
sc                834 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg |= (EXYNOS_CISCCTRL_MAINHORRATIO((sc->hratio >> 6)) |
sc                835 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		EXYNOS_CISCCTRL_MAINVERRATIO((sc->vratio >> 6)));
sc                841 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	cfg_ext |= (EXYNOS_CIEXTEN_MAINHORRATIO_EXT(sc->hratio) |
sc                842 drivers/gpu/drm/exynos/exynos_drm_fimc.c 		EXYNOS_CIEXTEN_MAINVERRATIO_EXT(sc->vratio));
sc               1006 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	memset(&ctx->sc, 0x0, sizeof(ctx->sc));
sc               1019 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_set_scaler(ctx, &ctx->sc);
sc               1100 drivers/gpu/drm/exynos/exynos_drm_fimc.c 	fimc_set_prescaler(ctx, &ctx->sc, &task->src.rect, &task->dst.rect);
sc                111 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_scaler	sc;
sc                567 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_scaler *sc = &ctx->sc;
sc                594 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		if (sc->range)
sc                599 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		if (sc->range)
sc                743 drivers/gpu/drm/exynos/exynos_drm_gsc.c static int gsc_set_prescaler(struct gsc_context *ctx, struct gsc_scaler *sc,
sc                762 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ret = gsc_get_ratio_shift(ctx, src_w, dst_w, &sc->pre_hratio);
sc                768 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	ret = gsc_get_ratio_shift(ctx, src_h, dst_h, &sc->pre_vratio);
sc                775 drivers/gpu/drm/exynos/exynos_drm_gsc.c 			  sc->pre_hratio, sc->pre_vratio);
sc                777 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	sc->main_hratio = (src_w << 16) / dst_w;
sc                778 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	sc->main_vratio = (src_h << 16) / dst_h;
sc                781 drivers/gpu/drm/exynos/exynos_drm_gsc.c 			  sc->main_hratio, sc->main_vratio);
sc                783 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_get_prescaler_shfactor(sc->pre_hratio, sc->pre_vratio,
sc                784 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		&sc->pre_shfactor);
sc                786 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	DRM_DEV_DEBUG_KMS(ctx->dev, "pre_shfactor[%d]\n", sc->pre_shfactor);
sc                788 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	cfg = (GSC_PRESC_SHFACTOR(sc->pre_shfactor) |
sc                789 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		GSC_PRESC_H_RATIO(sc->pre_hratio) |
sc                790 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		GSC_PRESC_V_RATIO(sc->pre_vratio));
sc                848 drivers/gpu/drm/exynos/exynos_drm_gsc.c static void gsc_set_scaler(struct gsc_context *ctx, struct gsc_scaler *sc)
sc                853 drivers/gpu/drm/exynos/exynos_drm_gsc.c 			  sc->main_hratio, sc->main_vratio);
sc                855 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_set_h_coef(ctx, sc->main_hratio);
sc                856 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	cfg = GSC_MAIN_H_RATIO_VALUE(sc->main_hratio);
sc                859 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_set_v_coef(ctx, sc->main_vratio);
sc                860 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	cfg = GSC_MAIN_V_RATIO_VALUE(sc->main_vratio);
sc                867 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_scaler *sc = &ctx->sc;
sc                895 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		if (sc->range)
sc                900 drivers/gpu/drm/exynos/exynos_drm_gsc.c 		if (sc->range)
sc               1066 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	struct gsc_scaler *sc = &ctx->sc;
sc               1077 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	memset(&ctx->sc, 0x0, sizeof(ctx->sc));
sc               1078 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	sc->range = true;
sc               1107 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_set_scaler(ctx, &ctx->sc);
sc               1137 drivers/gpu/drm/exynos/exynos_drm_gsc.c 	gsc_set_prescaler(ctx, &ctx->sc, &task->src.rect, &task->dst.rect);
sc                315 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c i915_gem_shrinker_count(struct shrinker *shrinker, struct shrink_control *sc)
sc                344 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c i915_gem_shrinker_scan(struct shrinker *shrinker, struct shrink_control *sc)
sc                351 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c 	sc->nr_scanned = 0;
sc                357 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c 				sc->nr_to_scan,
sc                358 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c 				&sc->nr_scanned,
sc                362 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c 	if (sc->nr_scanned < sc->nr_to_scan && current_is_kswapd()) {
sc                367 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c 						 sc->nr_to_scan - sc->nr_scanned,
sc                368 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c 						 &sc->nr_scanned,
sc                378 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c 	return sc->nr_scanned ? freed : SHRINK_STOP;
sc               2705 drivers/gpu/drm/i915/i915_debugfs.c 			struct intel_scaler *sc =
sc               2709 drivers/gpu/drm/i915/i915_debugfs.c 				   i, yesno(sc->in_use), sc->mode);
sc                 41 drivers/gpu/drm/msm/msm_gem_shrinker.c msm_gem_shrinker_count(struct shrinker *shrinker, struct shrink_control *sc)
sc                 65 drivers/gpu/drm/msm/msm_gem_shrinker.c msm_gem_shrinker_scan(struct shrinker *shrinker, struct shrink_control *sc)
sc                 78 drivers/gpu/drm/msm/msm_gem_shrinker.c 		if (freed >= sc->nr_to_scan)
sc                142 drivers/gpu/drm/nouveau/nvkm/subdev/fb/nv50.c 	const struct nvkm_enum *en, *re, *cl, *sc;
sc                177 drivers/gpu/drm/nouveau/nvkm/subdev/fb/nv50.c 	if      (cl && cl->data) sc = nvkm_enum_find(cl->data, st3);
sc                178 drivers/gpu/drm/nouveau/nvkm/subdev/fb/nv50.c 	else if (en && en->data) sc = nvkm_enum_find(en->data, st3);
sc                179 drivers/gpu/drm/nouveau/nvkm/subdev/fb/nv50.c 	else                     sc = NULL;
sc                190 drivers/gpu/drm/nouveau/nvkm/subdev/fb/nv50.c 		   st2, cl ? cl->name : "", st3, sc ? sc->name : "",
sc                424 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c 	unsigned int itc, ec, q, sc;
sc                447 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c 	sc = (ptr[2] >> 0) & 0x3;
sc                462 drivers/gpu/drm/omapdrm/dss/hdmi5_core.c 		(itc << 7) | (ec << 4) | (q << 2) | (sc << 0));
sc                 19 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c panfrost_gem_shrinker_count(struct shrinker *shrinker, struct shrink_control *sc)
sc                 58 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c panfrost_gem_shrinker_scan(struct shrinker *shrinker, struct shrink_control *sc)
sc                 69 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c 		if (freed >= sc->nr_to_scan)
sc                 62 drivers/gpu/drm/qxl/qxl_release.c 	int count = 0, sc = 0;
sc                 71 drivers/gpu/drm/qxl/qxl_release.c 	sc++;
sc                 89 drivers/gpu/drm/qxl/qxl_release.c 	if (have_drawable_releases || sc < 4) {
sc                 90 drivers/gpu/drm/qxl/qxl_release.c 		if (sc > 2)
sc                 97 drivers/gpu/drm/qxl/qxl_release.c 		if (have_drawable_releases && sc > 300) {
sc                100 drivers/gpu/drm/qxl/qxl_release.c 				       fence->context & ~0xf0000000, sc);
sc                377 drivers/gpu/drm/ttm/ttm_page_alloc.c ttm_pool_shrink_scan(struct shrinker *shrink, struct shrink_control *sc)
sc                384 drivers/gpu/drm/ttm/ttm_page_alloc.c 	int shrink_pages = sc->nr_to_scan;
sc                405 drivers/gpu/drm/ttm/ttm_page_alloc.c 		if (freed >= sc->nr_to_scan)
sc                415 drivers/gpu/drm/ttm/ttm_page_alloc.c ttm_pool_shrink_count(struct shrinker *shrink, struct shrink_control *sc)
sc               1093 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c ttm_dma_pool_shrink_scan(struct shrinker *shrink, struct shrink_control *sc)
sc               1098 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c 	unsigned shrink_pages = sc->nr_to_scan;
sc               1135 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c ttm_dma_pool_shrink_count(struct shrinker *shrink, struct shrink_control *sc)
sc                583 drivers/hid/hid-sony.c static void sony_set_leds(struct sony_sc *sc);
sc                585 drivers/hid/hid-sony.c static inline void sony_schedule_work(struct sony_sc *sc,
sc                592 drivers/hid/hid-sony.c 		spin_lock_irqsave(&sc->lock, flags);
sc                593 drivers/hid/hid-sony.c 		if (!sc->defer_initialization && sc->state_worker_initialized)
sc                594 drivers/hid/hid-sony.c 			schedule_work(&sc->state_worker);
sc                595 drivers/hid/hid-sony.c 		spin_unlock_irqrestore(&sc->lock, flags);
sc                598 drivers/hid/hid-sony.c 		if (sc->hotplug_worker_initialized)
sc                599 drivers/hid/hid-sony.c 			schedule_work(&sc->hotplug_worker);
sc                609 drivers/hid/hid-sony.c 	struct sony_sc *sc = hid_get_drvdata(hdev);
sc                611 drivers/hid/hid-sony.c 	return snprintf(buf, PAGE_SIZE, "%i\n", sc->ds4_bt_poll_interval);
sc                619 drivers/hid/hid-sony.c 	struct sony_sc *sc = hid_get_drvdata(hdev);
sc                629 drivers/hid/hid-sony.c 	spin_lock_irqsave(&sc->lock, flags);
sc                630 drivers/hid/hid-sony.c 	sc->ds4_bt_poll_interval = interval;
sc                631 drivers/hid/hid-sony.c 	spin_unlock_irqrestore(&sc->lock, flags);
sc                633 drivers/hid/hid-sony.c 	sony_schedule_work(sc, SONY_WORKER_STATE);
sc                646 drivers/hid/hid-sony.c 	struct sony_sc *sc = hid_get_drvdata(hdev);
sc                648 drivers/hid/hid-sony.c 	return snprintf(buf, PAGE_SIZE, "0x%04x\n", sc->fw_version);
sc                658 drivers/hid/hid-sony.c 	struct sony_sc *sc = hid_get_drvdata(hdev);
sc                660 drivers/hid/hid-sony.c 	return snprintf(buf, PAGE_SIZE, "0x%04x\n", sc->hw_version);
sc                843 drivers/hid/hid-sony.c 	struct sony_sc *sc = hid_get_drvdata(hdev);
sc                845 drivers/hid/hid-sony.c 	if (sc->quirks & (SINO_LITE_CONTROLLER | FUTUREMAX_DANCE_MAT))
sc                852 drivers/hid/hid-sony.c 	if ((sc->quirks & VAIO_RDESC_CONSTANT) && *rsize >= 56 &&
sc                864 drivers/hid/hid-sony.c 	if (sc->quirks & MOTION_CONTROLLER)
sc                867 drivers/hid/hid-sony.c 	if (sc->quirks & PS3REMOTE)
sc                874 drivers/hid/hid-sony.c 	if ((sc->quirks & SIXAXIS_CONTROLLER_USB) && *rsize >= 45 &&
sc                890 drivers/hid/hid-sony.c static void sixaxis_parse_report(struct sony_sc *sc, u8 *rd, int size)
sc                903 drivers/hid/hid-sony.c 	offset = (sc->quirks & MOTION_CONTROLLER) ? 12 : 30;
sc                916 drivers/hid/hid-sony.c 	spin_lock_irqsave(&sc->lock, flags);
sc                917 drivers/hid/hid-sony.c 	sc->cable_state = cable_state;
sc                918 drivers/hid/hid-sony.c 	sc->battery_capacity = battery_capacity;
sc                919 drivers/hid/hid-sony.c 	sc->battery_charging = battery_charging;
sc                920 drivers/hid/hid-sony.c 	spin_unlock_irqrestore(&sc->lock, flags);
sc                922 drivers/hid/hid-sony.c 	if (sc->quirks & SIXAXIS_CONTROLLER) {
sc                927 drivers/hid/hid-sony.c 		input_report_abs(sc->sensor_dev, ABS_X, val);
sc                931 drivers/hid/hid-sony.c 		input_report_abs(sc->sensor_dev, ABS_Y, val);
sc                934 drivers/hid/hid-sony.c 		input_report_abs(sc->sensor_dev, ABS_Z, val);
sc                936 drivers/hid/hid-sony.c 		input_sync(sc->sensor_dev);
sc                940 drivers/hid/hid-sony.c static void dualshock4_parse_report(struct sony_sc *sc, u8 *rd, int size)
sc                942 drivers/hid/hid-sony.c 	struct hid_input *hidinput = list_entry(sc->hdev->inputs.next,
sc                951 drivers/hid/hid-sony.c 	int data_offset = (sc->quirks & DUALSHOCK4_CONTROLLER_BT) ? 2 : 0;
sc                955 drivers/hid/hid-sony.c 	input_report_key(sc->touchpad, BTN_LEFT, rd[offset+2] & 0x2);
sc               1011 drivers/hid/hid-sony.c 	if (!sc->timestamp_initialized) {
sc               1012 drivers/hid/hid-sony.c 		sc->timestamp_us = ((unsigned int)timestamp * 16) / 3;
sc               1013 drivers/hid/hid-sony.c 		sc->timestamp_initialized = true;
sc               1017 drivers/hid/hid-sony.c 		if (sc->prev_timestamp > timestamp)
sc               1018 drivers/hid/hid-sony.c 			delta = (U16_MAX - sc->prev_timestamp + timestamp + 1);
sc               1020 drivers/hid/hid-sony.c 			delta = timestamp - sc->prev_timestamp;
sc               1021 drivers/hid/hid-sony.c 		sc->timestamp_us += (delta * 16) / 3;
sc               1023 drivers/hid/hid-sony.c 	sc->prev_timestamp = timestamp;
sc               1024 drivers/hid/hid-sony.c 	input_event(sc->sensor_dev, EV_MSC, MSC_TIMESTAMP, sc->timestamp_us);
sc               1030 drivers/hid/hid-sony.c 		struct ds4_calibration_data *calib = &sc->ds4_calib_data[n];
sc               1041 drivers/hid/hid-sony.c 		input_report_abs(sc->sensor_dev, calib->abs_code, calib_data);
sc               1044 drivers/hid/hid-sony.c 	input_sync(sc->sensor_dev);
sc               1071 drivers/hid/hid-sony.c 	spin_lock_irqsave(&sc->lock, flags);
sc               1072 drivers/hid/hid-sony.c 	sc->cable_state = cable_state;
sc               1073 drivers/hid/hid-sony.c 	sc->battery_capacity = battery_capacity;
sc               1074 drivers/hid/hid-sony.c 	sc->battery_charging = battery_charging;
sc               1075 drivers/hid/hid-sony.c 	spin_unlock_irqrestore(&sc->lock, flags);
sc               1084 drivers/hid/hid-sony.c 	max_touch_data = (sc->quirks & DUALSHOCK4_CONTROLLER_BT) ? 4 : 3;
sc               1111 drivers/hid/hid-sony.c 			input_mt_slot(sc->touchpad, n);
sc               1112 drivers/hid/hid-sony.c 			input_mt_report_slot_state(sc->touchpad, MT_TOOL_FINGER, active);
sc               1115 drivers/hid/hid-sony.c 				input_report_abs(sc->touchpad, ABS_MT_POSITION_X, x);
sc               1116 drivers/hid/hid-sony.c 				input_report_abs(sc->touchpad, ABS_MT_POSITION_Y, y);
sc               1121 drivers/hid/hid-sony.c 		input_mt_sync_frame(sc->touchpad);
sc               1122 drivers/hid/hid-sony.c 		input_sync(sc->touchpad);
sc               1126 drivers/hid/hid-sony.c static void nsg_mrxu_parse_report(struct sony_sc *sc, u8 *rd, int size)
sc               1151 drivers/hid/hid-sony.c 	input_report_key(sc->touchpad, BTN_LEFT, rd[offset] & 0x0F);
sc               1165 drivers/hid/hid-sony.c 		input_mt_slot(sc->touchpad, n);
sc               1166 drivers/hid/hid-sony.c 		input_mt_report_slot_state(sc->touchpad, MT_TOOL_FINGER, active & 0x03);
sc               1171 drivers/hid/hid-sony.c 			input_report_abs(sc->touchpad, ABS_MT_TOUCH_MAJOR,
sc               1173 drivers/hid/hid-sony.c 			input_report_abs(sc->touchpad, ABS_MT_TOUCH_MINOR,
sc               1175 drivers/hid/hid-sony.c 			input_report_abs(sc->touchpad, ABS_MT_ORIENTATION,
sc               1177 drivers/hid/hid-sony.c 			input_report_abs(sc->touchpad, ABS_MT_POSITION_X, x);
sc               1178 drivers/hid/hid-sony.c 			input_report_abs(sc->touchpad, ABS_MT_POSITION_Y,
sc               1186 drivers/hid/hid-sony.c 				input_report_rel(sc->touchpad, REL_X, relx);
sc               1187 drivers/hid/hid-sony.c 				input_report_rel(sc->touchpad, REL_Y, rely);
sc               1195 drivers/hid/hid-sony.c 	input_mt_sync_frame(sc->touchpad);
sc               1197 drivers/hid/hid-sony.c 	input_sync(sc->touchpad);
sc               1203 drivers/hid/hid-sony.c 	struct sony_sc *sc = hid_get_drvdata(hdev);
sc               1209 drivers/hid/hid-sony.c 	if ((sc->quirks & SIXAXIS_CONTROLLER) && rd[0] == 0x01 && size == 49) {
sc               1226 drivers/hid/hid-sony.c 		sixaxis_parse_report(sc, rd, size);
sc               1227 drivers/hid/hid-sony.c 	} else if ((sc->quirks & MOTION_CONTROLLER_BT) && rd[0] == 0x01 && size == 49) {
sc               1228 drivers/hid/hid-sony.c 		sixaxis_parse_report(sc, rd, size);
sc               1229 drivers/hid/hid-sony.c 	} else if ((sc->quirks & NAVIGATION_CONTROLLER) && rd[0] == 0x01 &&
sc               1231 drivers/hid/hid-sony.c 		sixaxis_parse_report(sc, rd, size);
sc               1232 drivers/hid/hid-sony.c 	} else if ((sc->quirks & DUALSHOCK4_CONTROLLER_USB) && rd[0] == 0x01 &&
sc               1234 drivers/hid/hid-sony.c 		dualshock4_parse_report(sc, rd, size);
sc               1235 drivers/hid/hid-sony.c 	} else if (((sc->quirks & DUALSHOCK4_CONTROLLER_BT) && rd[0] == 0x11 &&
sc               1246 drivers/hid/hid-sony.c 			hid_dbg(sc->hdev, "DualShock 4 input report's CRC check failed, received crc 0x%0x != 0x%0x\n",
sc               1251 drivers/hid/hid-sony.c 		dualshock4_parse_report(sc, rd, size);
sc               1252 drivers/hid/hid-sony.c 	} else if ((sc->quirks & DUALSHOCK4_DONGLE) && rd[0] == 0x01 &&
sc               1264 drivers/hid/hid-sony.c 		spin_lock_irqsave(&sc->lock, flags);
sc               1265 drivers/hid/hid-sony.c 		dongle_state = sc->ds4_dongle_state;
sc               1266 drivers/hid/hid-sony.c 		spin_unlock_irqrestore(&sc->lock, flags);
sc               1277 drivers/hid/hid-sony.c 			hid_info(sc->hdev, "DualShock 4 USB dongle: controller connected\n");
sc               1278 drivers/hid/hid-sony.c 			sony_set_leds(sc);
sc               1280 drivers/hid/hid-sony.c 			spin_lock_irqsave(&sc->lock, flags);
sc               1281 drivers/hid/hid-sony.c 			sc->ds4_dongle_state = DONGLE_CALIBRATING;
sc               1282 drivers/hid/hid-sony.c 			spin_unlock_irqrestore(&sc->lock, flags);
sc               1284 drivers/hid/hid-sony.c 			sony_schedule_work(sc, SONY_WORKER_HOTPLUG);
sc               1292 drivers/hid/hid-sony.c 			hid_info(sc->hdev, "DualShock 4 USB dongle: controller disconnected\n");
sc               1294 drivers/hid/hid-sony.c 			spin_lock_irqsave(&sc->lock, flags);
sc               1295 drivers/hid/hid-sony.c 			sc->ds4_dongle_state = DONGLE_DISCONNECTED;
sc               1296 drivers/hid/hid-sony.c 			spin_unlock_irqrestore(&sc->lock, flags);
sc               1307 drivers/hid/hid-sony.c 		dualshock4_parse_report(sc, rd, size);
sc               1309 drivers/hid/hid-sony.c 	} else if ((sc->quirks & NSG_MRXU_REMOTE) && rd[0] == 0x02) {
sc               1310 drivers/hid/hid-sony.c 		nsg_mrxu_parse_report(sc, rd, size);
sc               1314 drivers/hid/hid-sony.c 	if (sc->defer_initialization) {
sc               1315 drivers/hid/hid-sony.c 		sc->defer_initialization = 0;
sc               1316 drivers/hid/hid-sony.c 		sony_schedule_work(sc, SONY_WORKER_STATE);
sc               1326 drivers/hid/hid-sony.c 	struct sony_sc *sc = hid_get_drvdata(hdev);
sc               1328 drivers/hid/hid-sony.c 	if (sc->quirks & BUZZ_CONTROLLER) {
sc               1351 drivers/hid/hid-sony.c 	if (sc->quirks & PS3REMOTE)
sc               1354 drivers/hid/hid-sony.c 	if (sc->quirks & NAVIGATION_CONTROLLER)
sc               1357 drivers/hid/hid-sony.c 	if (sc->quirks & SIXAXIS_CONTROLLER)
sc               1360 drivers/hid/hid-sony.c 	if (sc->quirks & DUALSHOCK4_CONTROLLER)
sc               1368 drivers/hid/hid-sony.c static int sony_register_touchpad(struct sony_sc *sc, int touch_count,
sc               1375 drivers/hid/hid-sony.c 	sc->touchpad = devm_input_allocate_device(&sc->hdev->dev);
sc               1376 drivers/hid/hid-sony.c 	if (!sc->touchpad)
sc               1379 drivers/hid/hid-sony.c 	input_set_drvdata(sc->touchpad, sc);
sc               1380 drivers/hid/hid-sony.c 	sc->touchpad->dev.parent = &sc->hdev->dev;
sc               1381 drivers/hid/hid-sony.c 	sc->touchpad->phys = sc->hdev->phys;
sc               1382 drivers/hid/hid-sony.c 	sc->touchpad->uniq = sc->hdev->uniq;
sc               1383 drivers/hid/hid-sony.c 	sc->touchpad->id.bustype = sc->hdev->bus;
sc               1384 drivers/hid/hid-sony.c 	sc->touchpad->id.vendor = sc->hdev->vendor;
sc               1385 drivers/hid/hid-sony.c 	sc->touchpad->id.product = sc->hdev->product;
sc               1386 drivers/hid/hid-sony.c 	sc->touchpad->id.version = sc->hdev->version;
sc               1391 drivers/hid/hid-sony.c 	name_sz = strlen(sc->hdev->name) + sizeof(DS4_TOUCHPAD_SUFFIX);
sc               1392 drivers/hid/hid-sony.c 	name = devm_kzalloc(&sc->hdev->dev, name_sz, GFP_KERNEL);
sc               1395 drivers/hid/hid-sony.c 	snprintf(name, name_sz, "%s" DS4_TOUCHPAD_SUFFIX, sc->hdev->name);
sc               1396 drivers/hid/hid-sony.c 	sc->touchpad->name = name;
sc               1399 drivers/hid/hid-sony.c 	__set_bit(EV_KEY, sc->touchpad->evbit);
sc               1400 drivers/hid/hid-sony.c 	__set_bit(BTN_LEFT, sc->touchpad->keybit);
sc               1401 drivers/hid/hid-sony.c 	__set_bit(INPUT_PROP_BUTTONPAD, sc->touchpad->propbit);
sc               1403 drivers/hid/hid-sony.c 	input_set_abs_params(sc->touchpad, ABS_MT_POSITION_X, 0, w, 0, 0);
sc               1404 drivers/hid/hid-sony.c 	input_set_abs_params(sc->touchpad, ABS_MT_POSITION_Y, 0, h, 0, 0);
sc               1407 drivers/hid/hid-sony.c 		input_set_abs_params(sc->touchpad, ABS_MT_TOUCH_MAJOR, 
sc               1410 drivers/hid/hid-sony.c 			input_set_abs_params(sc->touchpad, ABS_MT_TOUCH_MINOR, 
sc               1413 drivers/hid/hid-sony.c 			input_set_abs_params(sc->touchpad, ABS_MT_ORIENTATION, 
sc               1417 drivers/hid/hid-sony.c 	if (sc->quirks & NSG_MRXU_REMOTE) {
sc               1418 drivers/hid/hid-sony.c 		__set_bit(EV_REL, sc->touchpad->evbit);
sc               1421 drivers/hid/hid-sony.c 	ret = input_mt_init_slots(sc->touchpad, touch_count, INPUT_MT_POINTER);
sc               1425 drivers/hid/hid-sony.c 	ret = input_register_device(sc->touchpad);
sc               1432 drivers/hid/hid-sony.c static int sony_register_sensors(struct sony_sc *sc)
sc               1439 drivers/hid/hid-sony.c 	sc->sensor_dev = devm_input_allocate_device(&sc->hdev->dev);
sc               1440 drivers/hid/hid-sony.c 	if (!sc->sensor_dev)
sc               1443 drivers/hid/hid-sony.c 	input_set_drvdata(sc->sensor_dev, sc);
sc               1444 drivers/hid/hid-sony.c 	sc->sensor_dev->dev.parent = &sc->hdev->dev;
sc               1445 drivers/hid/hid-sony.c 	sc->sensor_dev->phys = sc->hdev->phys;
sc               1446 drivers/hid/hid-sony.c 	sc->sensor_dev->uniq = sc->hdev->uniq;
sc               1447 drivers/hid/hid-sony.c 	sc->sensor_dev->id.bustype = sc->hdev->bus;
sc               1448 drivers/hid/hid-sony.c 	sc->sensor_dev->id.vendor = sc->hdev->vendor;
sc               1449 drivers/hid/hid-sony.c 	sc->sensor_dev->id.product = sc->hdev->product;
sc               1450 drivers/hid/hid-sony.c 	sc->sensor_dev->id.version = sc->hdev->version;
sc               1455 drivers/hid/hid-sony.c 	name_sz = strlen(sc->hdev->name) + sizeof(SENSOR_SUFFIX);
sc               1456 drivers/hid/hid-sony.c 	name = devm_kzalloc(&sc->hdev->dev, name_sz, GFP_KERNEL);
sc               1459 drivers/hid/hid-sony.c 	snprintf(name, name_sz, "%s" SENSOR_SUFFIX, sc->hdev->name);
sc               1460 drivers/hid/hid-sony.c 	sc->sensor_dev->name = name;
sc               1462 drivers/hid/hid-sony.c 	if (sc->quirks & SIXAXIS_CONTROLLER) {
sc               1469 drivers/hid/hid-sony.c 		input_set_abs_params(sc->sensor_dev, ABS_X, -512, 511, 4, 0);
sc               1470 drivers/hid/hid-sony.c 		input_set_abs_params(sc->sensor_dev, ABS_Y, -512, 511, 4, 0);
sc               1471 drivers/hid/hid-sony.c 		input_set_abs_params(sc->sensor_dev, ABS_Z, -512, 511, 4, 0);
sc               1472 drivers/hid/hid-sony.c 		input_abs_set_res(sc->sensor_dev, ABS_X, SIXAXIS_ACC_RES_PER_G);
sc               1473 drivers/hid/hid-sony.c 		input_abs_set_res(sc->sensor_dev, ABS_Y, SIXAXIS_ACC_RES_PER_G);
sc               1474 drivers/hid/hid-sony.c 		input_abs_set_res(sc->sensor_dev, ABS_Z, SIXAXIS_ACC_RES_PER_G);
sc               1475 drivers/hid/hid-sony.c 	} else if (sc->quirks & DUALSHOCK4_CONTROLLER) {
sc               1477 drivers/hid/hid-sony.c 		input_set_abs_params(sc->sensor_dev, ABS_X, -range, range, 16, 0);
sc               1478 drivers/hid/hid-sony.c 		input_set_abs_params(sc->sensor_dev, ABS_Y, -range, range, 16, 0);
sc               1479 drivers/hid/hid-sony.c 		input_set_abs_params(sc->sensor_dev, ABS_Z, -range, range, 16, 0);
sc               1480 drivers/hid/hid-sony.c 		input_abs_set_res(sc->sensor_dev, ABS_X, DS4_ACC_RES_PER_G);
sc               1481 drivers/hid/hid-sony.c 		input_abs_set_res(sc->sensor_dev, ABS_Y, DS4_ACC_RES_PER_G);
sc               1482 drivers/hid/hid-sony.c 		input_abs_set_res(sc->sensor_dev, ABS_Z, DS4_ACC_RES_PER_G);
sc               1485 drivers/hid/hid-sony.c 		input_set_abs_params(sc->sensor_dev, ABS_RX, -range, range, 16, 0);
sc               1486 drivers/hid/hid-sony.c 		input_set_abs_params(sc->sensor_dev, ABS_RY, -range, range, 16, 0);
sc               1487 drivers/hid/hid-sony.c 		input_set_abs_params(sc->sensor_dev, ABS_RZ, -range, range, 16, 0);
sc               1488 drivers/hid/hid-sony.c 		input_abs_set_res(sc->sensor_dev, ABS_RX, DS4_GYRO_RES_PER_DEG_S);
sc               1489 drivers/hid/hid-sony.c 		input_abs_set_res(sc->sensor_dev, ABS_RY, DS4_GYRO_RES_PER_DEG_S);
sc               1490 drivers/hid/hid-sony.c 		input_abs_set_res(sc->sensor_dev, ABS_RZ, DS4_GYRO_RES_PER_DEG_S);
sc               1492 drivers/hid/hid-sony.c 		__set_bit(EV_MSC, sc->sensor_dev->evbit);
sc               1493 drivers/hid/hid-sony.c 		__set_bit(MSC_TIMESTAMP, sc->sensor_dev->mscbit);
sc               1496 drivers/hid/hid-sony.c 	__set_bit(INPUT_PROP_ACCELEROMETER, sc->sensor_dev->propbit);
sc               1498 drivers/hid/hid-sony.c 	ret = input_register_device(sc->sensor_dev);
sc               1512 drivers/hid/hid-sony.c 	struct sony_sc *sc = hid_get_drvdata(hdev);
sc               1544 drivers/hid/hid-sony.c 	if (sc->quirks & SHANWAN_GAMEPAD)
sc               1581 drivers/hid/hid-sony.c static int dualshock4_get_calibration_data(struct sony_sc *sc)
sc               1599 drivers/hid/hid-sony.c 	if (sc->quirks & (DUALSHOCK4_CONTROLLER_USB | DUALSHOCK4_DONGLE)) {
sc               1604 drivers/hid/hid-sony.c 		ret = hid_hw_raw_request(sc->hdev, 0x02, buf,
sc               1621 drivers/hid/hid-sony.c 			ret = hid_hw_raw_request(sc->hdev, 0x05, buf,
sc               1633 drivers/hid/hid-sony.c 				hid_warn(sc->hdev, "DualShock 4 calibration report's CRC check failed, received crc 0x%0x != 0x%0x\n",
sc               1636 drivers/hid/hid-sony.c 					hid_warn(sc->hdev, "Retrying DualShock 4 get calibration report request\n");
sc               1651 drivers/hid/hid-sony.c 	if (sc->quirks & DUALSHOCK4_CONTROLLER_USB) {
sc               1680 drivers/hid/hid-sony.c 	sc->ds4_calib_data[0].abs_code = ABS_RX;
sc               1681 drivers/hid/hid-sony.c 	sc->ds4_calib_data[0].bias = gyro_pitch_bias;
sc               1682 drivers/hid/hid-sony.c 	sc->ds4_calib_data[0].sens_numer = speed_2x*DS4_GYRO_RES_PER_DEG_S;
sc               1683 drivers/hid/hid-sony.c 	sc->ds4_calib_data[0].sens_denom = gyro_pitch_plus - gyro_pitch_minus;
sc               1685 drivers/hid/hid-sony.c 	sc->ds4_calib_data[1].abs_code = ABS_RY;
sc               1686 drivers/hid/hid-sony.c 	sc->ds4_calib_data[1].bias = gyro_yaw_bias;
sc               1687 drivers/hid/hid-sony.c 	sc->ds4_calib_data[1].sens_numer = speed_2x*DS4_GYRO_RES_PER_DEG_S;
sc               1688 drivers/hid/hid-sony.c 	sc->ds4_calib_data[1].sens_denom = gyro_yaw_plus - gyro_yaw_minus;
sc               1690 drivers/hid/hid-sony.c 	sc->ds4_calib_data[2].abs_code = ABS_RZ;
sc               1691 drivers/hid/hid-sony.c 	sc->ds4_calib_data[2].bias = gyro_roll_bias;
sc               1692 drivers/hid/hid-sony.c 	sc->ds4_calib_data[2].sens_numer = speed_2x*DS4_GYRO_RES_PER_DEG_S;
sc               1693 drivers/hid/hid-sony.c 	sc->ds4_calib_data[2].sens_denom = gyro_roll_plus - gyro_roll_minus;
sc               1699 drivers/hid/hid-sony.c 	sc->ds4_calib_data[3].abs_code = ABS_X;
sc               1700 drivers/hid/hid-sony.c 	sc->ds4_calib_data[3].bias = acc_x_plus - range_2g / 2;
sc               1701 drivers/hid/hid-sony.c 	sc->ds4_calib_data[3].sens_numer = 2*DS4_ACC_RES_PER_G;
sc               1702 drivers/hid/hid-sony.c 	sc->ds4_calib_data[3].sens_denom = range_2g;
sc               1705 drivers/hid/hid-sony.c 	sc->ds4_calib_data[4].abs_code = ABS_Y;
sc               1706 drivers/hid/hid-sony.c 	sc->ds4_calib_data[4].bias = acc_y_plus - range_2g / 2;
sc               1707 drivers/hid/hid-sony.c 	sc->ds4_calib_data[4].sens_numer = 2*DS4_ACC_RES_PER_G;
sc               1708 drivers/hid/hid-sony.c 	sc->ds4_calib_data[4].sens_denom = range_2g;
sc               1711 drivers/hid/hid-sony.c 	sc->ds4_calib_data[5].abs_code = ABS_Z;
sc               1712 drivers/hid/hid-sony.c 	sc->ds4_calib_data[5].bias = acc_z_plus - range_2g / 2;
sc               1713 drivers/hid/hid-sony.c 	sc->ds4_calib_data[5].sens_numer = 2*DS4_ACC_RES_PER_G;
sc               1714 drivers/hid/hid-sony.c 	sc->ds4_calib_data[5].sens_denom = range_2g;
sc               1723 drivers/hid/hid-sony.c 	struct sony_sc *sc = container_of(work, struct sony_sc, hotplug_worker);
sc               1728 drivers/hid/hid-sony.c 	ret = dualshock4_get_calibration_data(sc);
sc               1736 drivers/hid/hid-sony.c 		hid_err(sc->hdev, "DualShock 4 USB dongle: calibration failed, disabling device\n");
sc               1739 drivers/hid/hid-sony.c 		hid_info(sc->hdev, "DualShock 4 USB dongle: calibration completed\n");
sc               1743 drivers/hid/hid-sony.c 	spin_lock_irqsave(&sc->lock, flags);
sc               1744 drivers/hid/hid-sony.c 	sc->ds4_dongle_state = dongle_state;
sc               1745 drivers/hid/hid-sony.c 	spin_unlock_irqrestore(&sc->lock, flags);
sc               1748 drivers/hid/hid-sony.c static int dualshock4_get_version_info(struct sony_sc *sc)
sc               1757 drivers/hid/hid-sony.c 	ret = hid_hw_raw_request(sc->hdev, 0xA3, buf,
sc               1766 drivers/hid/hid-sony.c 	sc->hw_version = get_unaligned_le16(&buf[35]);
sc               1767 drivers/hid/hid-sony.c 	sc->fw_version = get_unaligned_le16(&buf[41]);
sc               1773 drivers/hid/hid-sony.c static void sixaxis_set_leds_from_id(struct sony_sc *sc)
sc               1788 drivers/hid/hid-sony.c 	int id = sc->device_id;
sc               1796 drivers/hid/hid-sony.c 	memcpy(sc->led_state, sixaxis_leds[id], sizeof(sixaxis_leds[id]));
sc               1799 drivers/hid/hid-sony.c static void dualshock4_set_leds_from_id(struct sony_sc *sc)
sc               1812 drivers/hid/hid-sony.c 	int id = sc->device_id;
sc               1820 drivers/hid/hid-sony.c 	memcpy(sc->led_state, color_code[id], sizeof(color_code[id]));
sc               1823 drivers/hid/hid-sony.c static void buzz_set_leds(struct sony_sc *sc)
sc               1825 drivers/hid/hid-sony.c 	struct hid_device *hdev = sc->hdev;
sc               1835 drivers/hid/hid-sony.c 	value[1] = sc->led_state[0] ? 0xff : 0x00;
sc               1836 drivers/hid/hid-sony.c 	value[2] = sc->led_state[1] ? 0xff : 0x00;
sc               1837 drivers/hid/hid-sony.c 	value[3] = sc->led_state[2] ? 0xff : 0x00;
sc               1838 drivers/hid/hid-sony.c 	value[4] = sc->led_state[3] ? 0xff : 0x00;
sc               1844 drivers/hid/hid-sony.c static void sony_set_leds(struct sony_sc *sc)
sc               1846 drivers/hid/hid-sony.c 	if (!(sc->quirks & BUZZ_CONTROLLER))
sc               1847 drivers/hid/hid-sony.c 		sony_schedule_work(sc, SONY_WORKER_STATE);
sc               1849 drivers/hid/hid-sony.c 		buzz_set_leds(sc);
sc               1964 drivers/hid/hid-sony.c static int sony_leds_init(struct sony_sc *sc)
sc               1966 drivers/hid/hid-sony.c 	struct hid_device *hdev = sc->hdev;
sc               1979 drivers/hid/hid-sony.c 	BUG_ON(!(sc->quirks & SONY_LED_SUPPORT));
sc               1981 drivers/hid/hid-sony.c 	if (sc->quirks & BUZZ_CONTROLLER) {
sc               1982 drivers/hid/hid-sony.c 		sc->led_count = 4;
sc               1989 drivers/hid/hid-sony.c 	} else if (sc->quirks & DUALSHOCK4_CONTROLLER) {
sc               1990 drivers/hid/hid-sony.c 		dualshock4_set_leds_from_id(sc);
sc               1991 drivers/hid/hid-sony.c 		sc->led_state[3] = 1;
sc               1992 drivers/hid/hid-sony.c 		sc->led_count = 4;
sc               1998 drivers/hid/hid-sony.c 	} else if (sc->quirks & MOTION_CONTROLLER) {
sc               1999 drivers/hid/hid-sony.c 		sc->led_count = 3;
sc               2004 drivers/hid/hid-sony.c 	} else if (sc->quirks & NAVIGATION_CONTROLLER) {
sc               2007 drivers/hid/hid-sony.c 		memcpy(sc->led_state, navigation_leds, sizeof(navigation_leds));
sc               2008 drivers/hid/hid-sony.c 		sc->led_count = 1;
sc               2014 drivers/hid/hid-sony.c 		sixaxis_set_leds_from_id(sc);
sc               2015 drivers/hid/hid-sony.c 		sc->led_count = 4;
sc               2027 drivers/hid/hid-sony.c 	sony_set_leds(sc);
sc               2031 drivers/hid/hid-sony.c 	for (n = 0; n < sc->led_count; n++) {
sc               2049 drivers/hid/hid-sony.c 		led->brightness = sc->led_state[n];
sc               2058 drivers/hid/hid-sony.c 		sc->leds[n] = led;
sc               2070 drivers/hid/hid-sony.c static void sixaxis_send_output_report(struct sony_sc *sc)
sc               2085 drivers/hid/hid-sony.c 		(struct sixaxis_output_report *)sc->output_report_dmabuf;
sc               2092 drivers/hid/hid-sony.c 	report->rumble.right_motor_on = sc->right ? 1 : 0;
sc               2093 drivers/hid/hid-sony.c 	report->rumble.left_motor_force = sc->left;
sc               2096 drivers/hid/hid-sony.c 	report->leds_bitmap |= sc->led_state[0] << 1;
sc               2097 drivers/hid/hid-sony.c 	report->leds_bitmap |= sc->led_state[1] << 2;
sc               2098 drivers/hid/hid-sony.c 	report->leds_bitmap |= sc->led_state[2] << 3;
sc               2099 drivers/hid/hid-sony.c 	report->leds_bitmap |= sc->led_state[3] << 4;
sc               2115 drivers/hid/hid-sony.c 		if (sc->led_delay_on[n] || sc->led_delay_off[n]) {
sc               2116 drivers/hid/hid-sony.c 			report->led[3 - n].duty_off = sc->led_delay_off[n];
sc               2117 drivers/hid/hid-sony.c 			report->led[3 - n].duty_on = sc->led_delay_on[n];
sc               2122 drivers/hid/hid-sony.c 	if (sc->quirks & SHANWAN_GAMEPAD)
sc               2123 drivers/hid/hid-sony.c 		hid_hw_output_report(sc->hdev, (u8 *)report,
sc               2126 drivers/hid/hid-sony.c 		hid_hw_raw_request(sc->hdev, report->report_id, (u8 *)report,
sc               2131 drivers/hid/hid-sony.c static void dualshock4_send_output_report(struct sony_sc *sc)
sc               2133 drivers/hid/hid-sony.c 	struct hid_device *hdev = sc->hdev;
sc               2134 drivers/hid/hid-sony.c 	u8 *buf = sc->output_report_dmabuf;
sc               2146 drivers/hid/hid-sony.c 	if (sc->quirks & (DUALSHOCK4_CONTROLLER_USB | DUALSHOCK4_DONGLE)) {
sc               2154 drivers/hid/hid-sony.c 		buf[1] = 0xC0 /* HID + CRC */ | sc->ds4_bt_poll_interval;
sc               2160 drivers/hid/hid-sony.c 	buf[offset++] = sc->right;
sc               2161 drivers/hid/hid-sony.c 	buf[offset++] = sc->left;
sc               2167 drivers/hid/hid-sony.c 	if (sc->led_state[3]) {
sc               2168 drivers/hid/hid-sony.c 		buf[offset++] = sc->led_state[0];
sc               2169 drivers/hid/hid-sony.c 		buf[offset++] = sc->led_state[1];
sc               2170 drivers/hid/hid-sony.c 		buf[offset++] = sc->led_state[2];
sc               2176 drivers/hid/hid-sony.c 	buf[offset++] = sc->led_delay_on[3];
sc               2177 drivers/hid/hid-sony.c 	buf[offset++] = sc->led_delay_off[3];
sc               2179 drivers/hid/hid-sony.c 	if (sc->quirks & (DUALSHOCK4_CONTROLLER_USB | DUALSHOCK4_DONGLE))
sc               2193 drivers/hid/hid-sony.c static void motion_send_output_report(struct sony_sc *sc)
sc               2195 drivers/hid/hid-sony.c 	struct hid_device *hdev = sc->hdev;
sc               2197 drivers/hid/hid-sony.c 		(struct motion_output_report_02 *)sc->output_report_dmabuf;
sc               2202 drivers/hid/hid-sony.c 	report->r = sc->led_state[0];
sc               2203 drivers/hid/hid-sony.c 	report->g = sc->led_state[1];
sc               2204 drivers/hid/hid-sony.c 	report->b = sc->led_state[2];
sc               2207 drivers/hid/hid-sony.c 	report->rumble = max(sc->right, sc->left);
sc               2213 drivers/hid/hid-sony.c static inline void sony_send_output_report(struct sony_sc *sc)
sc               2215 drivers/hid/hid-sony.c 	if (sc->send_output_report)
sc               2216 drivers/hid/hid-sony.c 		sc->send_output_report(sc);
sc               2221 drivers/hid/hid-sony.c 	struct sony_sc *sc = container_of(work, struct sony_sc, state_worker);
sc               2223 drivers/hid/hid-sony.c 	sc->send_output_report(sc);
sc               2226 drivers/hid/hid-sony.c static int sony_allocate_output_report(struct sony_sc *sc)
sc               2228 drivers/hid/hid-sony.c 	if ((sc->quirks & SIXAXIS_CONTROLLER) ||
sc               2229 drivers/hid/hid-sony.c 			(sc->quirks & NAVIGATION_CONTROLLER))
sc               2230 drivers/hid/hid-sony.c 		sc->output_report_dmabuf =
sc               2231 drivers/hid/hid-sony.c 			devm_kmalloc(&sc->hdev->dev,
sc               2234 drivers/hid/hid-sony.c 	else if (sc->quirks & DUALSHOCK4_CONTROLLER_BT)
sc               2235 drivers/hid/hid-sony.c 		sc->output_report_dmabuf = devm_kmalloc(&sc->hdev->dev,
sc               2238 drivers/hid/hid-sony.c 	else if (sc->quirks & (DUALSHOCK4_CONTROLLER_USB | DUALSHOCK4_DONGLE))
sc               2239 drivers/hid/hid-sony.c 		sc->output_report_dmabuf = devm_kmalloc(&sc->hdev->dev,
sc               2242 drivers/hid/hid-sony.c 	else if (sc->quirks & MOTION_CONTROLLER)
sc               2243 drivers/hid/hid-sony.c 		sc->output_report_dmabuf = devm_kmalloc(&sc->hdev->dev,
sc               2249 drivers/hid/hid-sony.c 	if (!sc->output_report_dmabuf)
sc               2260 drivers/hid/hid-sony.c 	struct sony_sc *sc = hid_get_drvdata(hid);
sc               2265 drivers/hid/hid-sony.c 	sc->left = effect->u.rumble.strong_magnitude / 256;
sc               2266 drivers/hid/hid-sony.c 	sc->right = effect->u.rumble.weak_magnitude / 256;
sc               2268 drivers/hid/hid-sony.c 	sony_schedule_work(sc, SONY_WORKER_STATE);
sc               2272 drivers/hid/hid-sony.c static int sony_init_ff(struct sony_sc *sc)
sc               2277 drivers/hid/hid-sony.c 	if (list_empty(&sc->hdev->inputs)) {
sc               2278 drivers/hid/hid-sony.c 		hid_err(sc->hdev, "no inputs found\n");
sc               2281 drivers/hid/hid-sony.c 	hidinput = list_entry(sc->hdev->inputs.next, struct hid_input, list);
sc               2289 drivers/hid/hid-sony.c static int sony_init_ff(struct sony_sc *sc)
sc               2300 drivers/hid/hid-sony.c 	struct sony_sc *sc = power_supply_get_drvdata(psy);
sc               2305 drivers/hid/hid-sony.c 	spin_lock_irqsave(&sc->lock, flags);
sc               2306 drivers/hid/hid-sony.c 	battery_charging = sc->battery_charging;
sc               2307 drivers/hid/hid-sony.c 	battery_capacity = sc->battery_capacity;
sc               2308 drivers/hid/hid-sony.c 	cable_state = sc->cable_state;
sc               2309 drivers/hid/hid-sony.c 	spin_unlock_irqrestore(&sc->lock, flags);
sc               2337 drivers/hid/hid-sony.c static int sony_battery_probe(struct sony_sc *sc, int append_dev_id)
sc               2342 drivers/hid/hid-sony.c 	struct power_supply_config psy_cfg = { .drv_data = sc, };
sc               2343 drivers/hid/hid-sony.c 	struct hid_device *hdev = sc->hdev;
sc               2350 drivers/hid/hid-sony.c 	sc->battery_capacity = 100;
sc               2352 drivers/hid/hid-sony.c 	sc->battery_desc.properties = sony_battery_props;
sc               2353 drivers/hid/hid-sony.c 	sc->battery_desc.num_properties = ARRAY_SIZE(sony_battery_props);
sc               2354 drivers/hid/hid-sony.c 	sc->battery_desc.get_property = sony_battery_get_property;
sc               2355 drivers/hid/hid-sony.c 	sc->battery_desc.type = POWER_SUPPLY_TYPE_BATTERY;
sc               2356 drivers/hid/hid-sony.c 	sc->battery_desc.use_for_apm = 0;
sc               2357 drivers/hid/hid-sony.c 	sc->battery_desc.name = devm_kasprintf(&hdev->dev, GFP_KERNEL,
sc               2358 drivers/hid/hid-sony.c 					  battery_str_fmt, sc->mac_address, sc->device_id);
sc               2359 drivers/hid/hid-sony.c 	if (!sc->battery_desc.name)
sc               2362 drivers/hid/hid-sony.c 	sc->battery = devm_power_supply_register(&hdev->dev, &sc->battery_desc,
sc               2364 drivers/hid/hid-sony.c 	if (IS_ERR(sc->battery)) {
sc               2365 drivers/hid/hid-sony.c 		ret = PTR_ERR(sc->battery);
sc               2370 drivers/hid/hid-sony.c 	power_supply_powers(sc->battery, &hdev->dev);
sc               2394 drivers/hid/hid-sony.c static int sony_check_add_dev_list(struct sony_sc *sc)
sc               2403 drivers/hid/hid-sony.c 		ret = memcmp(sc->mac_address, entry->mac_address,
sc               2404 drivers/hid/hid-sony.c 				sizeof(sc->mac_address));
sc               2406 drivers/hid/hid-sony.c 			if (sony_compare_connection_type(sc, entry)) {
sc               2410 drivers/hid/hid-sony.c 				hid_info(sc->hdev,
sc               2412 drivers/hid/hid-sony.c 				sc->mac_address);
sc               2419 drivers/hid/hid-sony.c 	list_add(&(sc->list_node), &sony_device_list);
sc               2426 drivers/hid/hid-sony.c static void sony_remove_dev_list(struct sony_sc *sc)
sc               2430 drivers/hid/hid-sony.c 	if (sc->list_node.next) {
sc               2432 drivers/hid/hid-sony.c 		list_del(&(sc->list_node));
sc               2437 drivers/hid/hid-sony.c static int sony_get_bt_devaddr(struct sony_sc *sc)
sc               2442 drivers/hid/hid-sony.c 	ret = strlen(sc->hdev->uniq);
sc               2446 drivers/hid/hid-sony.c 	ret = sscanf(sc->hdev->uniq,
sc               2448 drivers/hid/hid-sony.c 		&sc->mac_address[5], &sc->mac_address[4], &sc->mac_address[3],
sc               2449 drivers/hid/hid-sony.c 		&sc->mac_address[2], &sc->mac_address[1], &sc->mac_address[0]);
sc               2457 drivers/hid/hid-sony.c static int sony_check_add(struct sony_sc *sc)
sc               2462 drivers/hid/hid-sony.c 	if ((sc->quirks & DUALSHOCK4_CONTROLLER_BT) ||
sc               2463 drivers/hid/hid-sony.c 	    (sc->quirks & MOTION_CONTROLLER_BT) ||
sc               2464 drivers/hid/hid-sony.c 	    (sc->quirks & NAVIGATION_CONTROLLER_BT) ||
sc               2465 drivers/hid/hid-sony.c 	    (sc->quirks & SIXAXIS_CONTROLLER_BT)) {
sc               2472 drivers/hid/hid-sony.c 		if (sony_get_bt_devaddr(sc) < 0) {
sc               2473 drivers/hid/hid-sony.c 			hid_warn(sc->hdev, "UNIQ does not contain a MAC address; duplicate check skipped\n");
sc               2476 drivers/hid/hid-sony.c 	} else if (sc->quirks & (DUALSHOCK4_CONTROLLER_USB | DUALSHOCK4_DONGLE)) {
sc               2486 drivers/hid/hid-sony.c 		ret = hid_hw_raw_request(sc->hdev, 0x81, buf,
sc               2491 drivers/hid/hid-sony.c 			hid_err(sc->hdev, "failed to retrieve feature report 0x81 with the DualShock 4 MAC address\n");
sc               2496 drivers/hid/hid-sony.c 		memcpy(sc->mac_address, &buf[1], sizeof(sc->mac_address));
sc               2498 drivers/hid/hid-sony.c 		snprintf(sc->hdev->uniq, sizeof(sc->hdev->uniq),
sc               2499 drivers/hid/hid-sony.c 			 "%pMR", sc->mac_address);
sc               2500 drivers/hid/hid-sony.c 	} else if ((sc->quirks & SIXAXIS_CONTROLLER_USB) ||
sc               2501 drivers/hid/hid-sony.c 			(sc->quirks & NAVIGATION_CONTROLLER_USB)) {
sc               2511 drivers/hid/hid-sony.c 		ret = hid_hw_raw_request(sc->hdev, 0xf2, buf,
sc               2516 drivers/hid/hid-sony.c 			hid_err(sc->hdev, "failed to retrieve feature report 0xf2 with the Sixaxis MAC address\n");
sc               2526 drivers/hid/hid-sony.c 			sc->mac_address[5-n] = buf[4+n];
sc               2528 drivers/hid/hid-sony.c 		snprintf(sc->hdev->uniq, sizeof(sc->hdev->uniq),
sc               2529 drivers/hid/hid-sony.c 			 "%pMR", sc->mac_address);
sc               2534 drivers/hid/hid-sony.c 	ret = sony_check_add_dev_list(sc);
sc               2543 drivers/hid/hid-sony.c static int sony_set_device_id(struct sony_sc *sc)
sc               2551 drivers/hid/hid-sony.c 	if ((sc->quirks & SIXAXIS_CONTROLLER) ||
sc               2552 drivers/hid/hid-sony.c 	    (sc->quirks & DUALSHOCK4_CONTROLLER)) {
sc               2556 drivers/hid/hid-sony.c 			sc->device_id = -1;
sc               2559 drivers/hid/hid-sony.c 		sc->device_id = ret;
sc               2561 drivers/hid/hid-sony.c 		sc->device_id = -1;
sc               2567 drivers/hid/hid-sony.c static void sony_release_device_id(struct sony_sc *sc)
sc               2569 drivers/hid/hid-sony.c 	if (sc->device_id >= 0) {
sc               2570 drivers/hid/hid-sony.c 		ida_simple_remove(&sony_device_id_allocator, sc->device_id);
sc               2571 drivers/hid/hid-sony.c 		sc->device_id = -1;
sc               2575 drivers/hid/hid-sony.c static inline void sony_init_output_report(struct sony_sc *sc,
sc               2578 drivers/hid/hid-sony.c 	sc->send_output_report = send_output_report;
sc               2580 drivers/hid/hid-sony.c 	if (!sc->state_worker_initialized)
sc               2581 drivers/hid/hid-sony.c 		INIT_WORK(&sc->state_worker, sony_state_worker);
sc               2583 drivers/hid/hid-sony.c 	sc->state_worker_initialized = 1;
sc               2586 drivers/hid/hid-sony.c static inline void sony_cancel_work_sync(struct sony_sc *sc)
sc               2590 drivers/hid/hid-sony.c 	if (sc->hotplug_worker_initialized)
sc               2591 drivers/hid/hid-sony.c 		cancel_work_sync(&sc->hotplug_worker);
sc               2592 drivers/hid/hid-sony.c 	if (sc->state_worker_initialized) {
sc               2593 drivers/hid/hid-sony.c 		spin_lock_irqsave(&sc->lock, flags);
sc               2594 drivers/hid/hid-sony.c 		sc->state_worker_initialized = 0;
sc               2595 drivers/hid/hid-sony.c 		spin_unlock_irqrestore(&sc->lock, flags);
sc               2596 drivers/hid/hid-sony.c 		cancel_work_sync(&sc->state_worker);
sc               2603 drivers/hid/hid-sony.c 	struct sony_sc *sc = hid_get_drvdata(hdev);
sc               2607 drivers/hid/hid-sony.c 	ret = sony_set_device_id(sc);
sc               2613 drivers/hid/hid-sony.c 	ret = append_dev_id = sony_check_add(sc);
sc               2617 drivers/hid/hid-sony.c 	ret = sony_allocate_output_report(sc);
sc               2623 drivers/hid/hid-sony.c 	if (sc->quirks & NAVIGATION_CONTROLLER_USB) {
sc               2642 drivers/hid/hid-sony.c 		sc->defer_initialization = 1;
sc               2650 drivers/hid/hid-sony.c 		sony_init_output_report(sc, sixaxis_send_output_report);
sc               2651 drivers/hid/hid-sony.c 	} else if (sc->quirks & NAVIGATION_CONTROLLER_BT) {
sc               2664 drivers/hid/hid-sony.c 		sony_init_output_report(sc, sixaxis_send_output_report);
sc               2665 drivers/hid/hid-sony.c 	} else if (sc->quirks & SIXAXIS_CONTROLLER_USB) {
sc               2674 drivers/hid/hid-sony.c 		sc->defer_initialization = 1;
sc               2682 drivers/hid/hid-sony.c 		ret = sony_register_sensors(sc);
sc               2684 drivers/hid/hid-sony.c 			hid_err(sc->hdev,
sc               2689 drivers/hid/hid-sony.c 		sony_init_output_report(sc, sixaxis_send_output_report);
sc               2690 drivers/hid/hid-sony.c 	} else if (sc->quirks & SIXAXIS_CONTROLLER_BT) {
sc               2703 drivers/hid/hid-sony.c 		ret = sony_register_sensors(sc);
sc               2705 drivers/hid/hid-sony.c 			hid_err(sc->hdev,
sc               2710 drivers/hid/hid-sony.c 		sony_init_output_report(sc, sixaxis_send_output_report);
sc               2711 drivers/hid/hid-sony.c 	} else if (sc->quirks & DUALSHOCK4_CONTROLLER) {
sc               2712 drivers/hid/hid-sony.c 		ret = dualshock4_get_calibration_data(sc);
sc               2718 drivers/hid/hid-sony.c 		ret = dualshock4_get_version_info(sc);
sc               2720 drivers/hid/hid-sony.c 			hid_err(sc->hdev, "Failed to get version data from Dualshock 4\n");
sc               2724 drivers/hid/hid-sony.c 		ret = device_create_file(&sc->hdev->dev, &dev_attr_firmware_version);
sc               2727 drivers/hid/hid-sony.c 			sc->fw_version = 0;
sc               2728 drivers/hid/hid-sony.c 			sc->hw_version = 0;
sc               2729 drivers/hid/hid-sony.c 			hid_err(sc->hdev, "can't create sysfs firmware_version attribute err: %d\n", ret);
sc               2733 drivers/hid/hid-sony.c 		ret = device_create_file(&sc->hdev->dev, &dev_attr_hardware_version);
sc               2735 drivers/hid/hid-sony.c 			sc->hw_version = 0;
sc               2736 drivers/hid/hid-sony.c 			hid_err(sc->hdev, "can't create sysfs hardware_version attribute err: %d\n", ret);
sc               2744 drivers/hid/hid-sony.c 		ret = sony_register_touchpad(sc, 2, 1920, 942, 0, 0, 0);
sc               2746 drivers/hid/hid-sony.c 			hid_err(sc->hdev,
sc               2752 drivers/hid/hid-sony.c 		ret = sony_register_sensors(sc);
sc               2754 drivers/hid/hid-sony.c 			hid_err(sc->hdev,
sc               2759 drivers/hid/hid-sony.c 		if (sc->quirks & DUALSHOCK4_CONTROLLER_BT) {
sc               2760 drivers/hid/hid-sony.c 			sc->ds4_bt_poll_interval = DS4_BT_DEFAULT_POLL_INTERVAL_MS;
sc               2761 drivers/hid/hid-sony.c 			ret = device_create_file(&sc->hdev->dev, &dev_attr_bt_poll_interval);
sc               2763 drivers/hid/hid-sony.c 				hid_warn(sc->hdev,
sc               2768 drivers/hid/hid-sony.c 		if (sc->quirks & DUALSHOCK4_DONGLE) {
sc               2769 drivers/hid/hid-sony.c 			INIT_WORK(&sc->hotplug_worker, dualshock4_calibration_work);
sc               2770 drivers/hid/hid-sony.c 			sc->hotplug_worker_initialized = 1;
sc               2771 drivers/hid/hid-sony.c 			sc->ds4_dongle_state = DONGLE_DISCONNECTED;
sc               2774 drivers/hid/hid-sony.c 		sony_init_output_report(sc, dualshock4_send_output_report);
sc               2775 drivers/hid/hid-sony.c 	} else if (sc->quirks & NSG_MRXU_REMOTE) {
sc               2780 drivers/hid/hid-sony.c 		ret = sony_register_touchpad(sc, 2,
sc               2783 drivers/hid/hid-sony.c 			hid_err(sc->hdev,
sc               2789 drivers/hid/hid-sony.c 	} else if (sc->quirks & MOTION_CONTROLLER) {
sc               2790 drivers/hid/hid-sony.c 		sony_init_output_report(sc, motion_send_output_report);
sc               2795 drivers/hid/hid-sony.c 	if (sc->quirks & SONY_LED_SUPPORT) {
sc               2796 drivers/hid/hid-sony.c 		ret = sony_leds_init(sc);
sc               2801 drivers/hid/hid-sony.c 	if (sc->quirks & SONY_BATTERY_SUPPORT) {
sc               2802 drivers/hid/hid-sony.c 		ret = sony_battery_probe(sc, append_dev_id);
sc               2814 drivers/hid/hid-sony.c 	if (sc->quirks & SONY_FF_SUPPORT) {
sc               2815 drivers/hid/hid-sony.c 		ret = sony_init_ff(sc);
sc               2828 drivers/hid/hid-sony.c 	if (sc->ds4_bt_poll_interval)
sc               2829 drivers/hid/hid-sony.c 		device_remove_file(&sc->hdev->dev, &dev_attr_bt_poll_interval);
sc               2830 drivers/hid/hid-sony.c 	if (sc->fw_version)
sc               2831 drivers/hid/hid-sony.c 		device_remove_file(&sc->hdev->dev, &dev_attr_firmware_version);
sc               2832 drivers/hid/hid-sony.c 	if (sc->hw_version)
sc               2833 drivers/hid/hid-sony.c 		device_remove_file(&sc->hdev->dev, &dev_attr_hardware_version);
sc               2834 drivers/hid/hid-sony.c 	sony_cancel_work_sync(sc);
sc               2835 drivers/hid/hid-sony.c 	sony_remove_dev_list(sc);
sc               2836 drivers/hid/hid-sony.c 	sony_release_device_id(sc);
sc               2844 drivers/hid/hid-sony.c 	struct sony_sc *sc;
sc               2853 drivers/hid/hid-sony.c 	sc = devm_kzalloc(&hdev->dev, sizeof(*sc), GFP_KERNEL);
sc               2854 drivers/hid/hid-sony.c 	if (sc == NULL) {
sc               2859 drivers/hid/hid-sony.c 	spin_lock_init(&sc->lock);
sc               2861 drivers/hid/hid-sony.c 	sc->quirks = quirks;
sc               2862 drivers/hid/hid-sony.c 	hid_set_drvdata(hdev, sc);
sc               2863 drivers/hid/hid-sony.c 	sc->hdev = hdev;
sc               2871 drivers/hid/hid-sony.c 	if (sc->quirks & VAIO_RDESC_CONSTANT)
sc               2873 drivers/hid/hid-sony.c 	else if (sc->quirks & SIXAXIS_CONTROLLER)
sc               2882 drivers/hid/hid-sony.c 	if (sc->quirks & (SIXAXIS_CONTROLLER | DUALSHOCK4_CONTROLLER))
sc               2910 drivers/hid/hid-sony.c 	struct sony_sc *sc = hid_get_drvdata(hdev);
sc               2914 drivers/hid/hid-sony.c 	if (sc->quirks & DUALSHOCK4_CONTROLLER_BT)
sc               2915 drivers/hid/hid-sony.c 		device_remove_file(&sc->hdev->dev, &dev_attr_bt_poll_interval);
sc               2917 drivers/hid/hid-sony.c 	if (sc->fw_version)
sc               2918 drivers/hid/hid-sony.c 		device_remove_file(&sc->hdev->dev, &dev_attr_firmware_version);
sc               2920 drivers/hid/hid-sony.c 	if (sc->hw_version)
sc               2921 drivers/hid/hid-sony.c 		device_remove_file(&sc->hdev->dev, &dev_attr_hardware_version);
sc               2923 drivers/hid/hid-sony.c 	sony_cancel_work_sync(sc);
sc               2925 drivers/hid/hid-sony.c 	sony_remove_dev_list(sc);
sc               2927 drivers/hid/hid-sony.c 	sony_release_device_id(sc);
sc               2940 drivers/hid/hid-sony.c 		struct sony_sc *sc = hid_get_drvdata(hdev);
sc               2942 drivers/hid/hid-sony.c 		sc->left = sc->right = 0;
sc               2943 drivers/hid/hid-sony.c 		sony_send_output_report(sc);
sc               2952 drivers/hid/hid-sony.c 	struct sony_sc *sc = hid_get_drvdata(hdev);
sc               2958 drivers/hid/hid-sony.c 	if ((sc->quirks & SIXAXIS_CONTROLLER_USB) ||
sc               2959 drivers/hid/hid-sony.c 		(sc->quirks & NAVIGATION_CONTROLLER_USB)) {
sc               2960 drivers/hid/hid-sony.c 		sixaxis_set_operational_usb(sc->hdev);
sc               2961 drivers/hid/hid-sony.c 		sc->defer_initialization = 1;
sc                248 drivers/hv/hv.c 	struct vmbus_channel *channel, *sc;
sc                265 drivers/hv/hv.c 		list_for_each_entry(sc, &channel->sc_list, sc_list) {
sc                266 drivers/hv/hv.c 			if (sc->target_cpu == cpu) {
sc               2179 drivers/hv/vmbus_drv.c 	struct vmbus_channel *channel, *sc;
sc               2236 drivers/hv/vmbus_drv.c 		list_for_each_entry(sc, &channel->sc_list, sc_list) {
sc                 33 drivers/hwtracing/stm/console.c 	struct stm_console *sc = container_of(con, struct stm_console, console);
sc                 35 drivers/hwtracing/stm/console.c 	stm_source_write(&sc->data, 0, buf, len);
sc                 40 drivers/hwtracing/stm/console.c 	struct stm_console *sc = container_of(data, struct stm_console, data);
sc                 42 drivers/hwtracing/stm/console.c 	strcpy(sc->console.name, "stm_console");
sc                 43 drivers/hwtracing/stm/console.c 	sc->console.write = stm_console_write;
sc                 44 drivers/hwtracing/stm/console.c 	sc->console.flags = CON_ENABLED | CON_PRINTBUFFER;
sc                 45 drivers/hwtracing/stm/console.c 	register_console(&sc->console);
sc                 52 drivers/hwtracing/stm/console.c 	struct stm_console *sc = container_of(data, struct stm_console, data);
sc                 54 drivers/hwtracing/stm/console.c 	unregister_console(&sc->console);
sc                115 drivers/infiniband/hw/cxgb4/mem.c 	struct ulptx_idata *sc;
sc                133 drivers/infiniband/hw/cxgb4/mem.c 		wr_len = roundup(sizeof(*req) + sizeof(*sc) +
sc                163 drivers/infiniband/hw/cxgb4/mem.c 		sc = (struct ulptx_idata *)(req + 1);
sc                164 drivers/infiniband/hw/cxgb4/mem.c 		sc->cmd_more = cpu_to_be32(ULPTX_CMD_V(ULP_TX_SC_IMM));
sc                165 drivers/infiniband/hw/cxgb4/mem.c 		sc->len = cpu_to_be32(roundup(copy_len, T4_ULPTX_MIN_IO));
sc                167 drivers/infiniband/hw/cxgb4/mem.c 		to_dp = (u8 *)(sc + 1);
sc               5808 drivers/infiniband/hw/hfi1/chip.c 	struct send_context *sc;
sc               5817 drivers/infiniband/hw/hfi1/chip.c 	sc = sci->sc;
sc               5818 drivers/infiniband/hw/hfi1/chip.c 	if (!sc)
sc               5820 drivers/infiniband/hw/hfi1/chip.c 	if (dd->vld[15].sc == sc)
sc               5823 drivers/infiniband/hw/hfi1/chip.c 		if (dd->vld[i].sc == sc)
sc               5972 drivers/infiniband/hw/hfi1/chip.c 	struct send_context *sc;
sc               5988 drivers/infiniband/hw/hfi1/chip.c 	sc = sci->sc;
sc               5989 drivers/infiniband/hw/hfi1/chip.c 	if (!sc) {
sc               5997 drivers/infiniband/hw/hfi1/chip.c 	sc_stop(sc, SCF_HALTED);
sc               6012 drivers/infiniband/hw/hfi1/chip.c 	if (sc->type != SC_USER)
sc               6013 drivers/infiniband/hw/hfi1/chip.c 		queue_work(dd->pport->hfi1_wq, &sc->halt_work);
sc               6763 drivers/infiniband/hw/hfi1/chip.c 	struct send_context *sc;
sc               6780 drivers/infiniband/hw/hfi1/chip.c 		sc = dd->send_contexts[i].sc;
sc               6781 drivers/infiniband/hw/hfi1/chip.c 		if (sc && (sc->flags & SCF_ENABLED))
sc               6782 drivers/infiniband/hw/hfi1/chip.c 			sc_stop(sc, sc_flags);
sc               10105 drivers/infiniband/hw/hfi1/chip.c 		thres = min(sc_percent_to_threshold(dd->vld[i].sc, 50),
sc               10106 drivers/infiniband/hw/hfi1/chip.c 			    sc_mtu_to_threshold(dd->vld[i].sc,
sc               10114 drivers/infiniband/hw/hfi1/chip.c 	thres = min(sc_percent_to_threshold(dd->vld[15].sc, 50),
sc               10115 drivers/infiniband/hw/hfi1/chip.c 		    sc_mtu_to_threshold(dd->vld[15].sc,
sc               10118 drivers/infiniband/hw/hfi1/chip.c 	sc_set_cr_threshold(dd->vld[15].sc, thres);
sc               13009 drivers/infiniband/hw/hfi1/chip.c void hfi1_init_ctxt(struct send_context *sc)
sc               13011 drivers/infiniband/hw/hfi1/chip.c 	if (sc) {
sc               13012 drivers/infiniband/hw/hfi1/chip.c 		struct hfi1_devdata *dd = sc->dd;
sc               13014 drivers/infiniband/hw/hfi1/chip.c 		u8 set = (sc->type == SC_USER ?
sc               13017 drivers/infiniband/hw/hfi1/chip.c 		reg = read_kctxt_csr(dd, sc->hw_context,
sc               13023 drivers/infiniband/hw/hfi1/chip.c 		write_kctxt_csr(dd, sc->hw_context,
sc               14594 drivers/infiniband/hw/hfi1/chip.c 	if (!rcd || !rcd->sc)
sc               14597 drivers/infiniband/hw/hfi1/chip.c 	hw_ctxt = rcd->sc->hw_context;
sc               14628 drivers/infiniband/hw/hfi1/chip.c 	if (!rcd || !rcd->sc)
sc               14631 drivers/infiniband/hw/hfi1/chip.c 	hw_ctxt = rcd->sc->hw_context;
sc               14655 drivers/infiniband/hw/hfi1/chip.c 	if (!rcd || !rcd->sc)
sc               14658 drivers/infiniband/hw/hfi1/chip.c 	hw_ctxt = rcd->sc->hw_context;
sc               14675 drivers/infiniband/hw/hfi1/chip.c 	if (!ctxt || !ctxt->sc)
sc               14678 drivers/infiniband/hw/hfi1/chip.c 	hw_ctxt = ctxt->sc->hw_context;
sc               1420 drivers/infiniband/hw/hfi1/chip.h void hfi1_init_ctxt(struct send_context *sc);
sc                448 drivers/infiniband/hw/hfi1/debugfs.c 	if (sci && sci->type != SC_USER && sci->allocated && sci->sc)
sc                454 drivers/infiniband/hw/hfi1/driver.c 	u8 hdr_type, sc, svc_type, opcode;
sc                461 drivers/infiniband/hw/hfi1/driver.c 		sc = hfi1_16B_get_sc(pkt->hdr);
sc                471 drivers/infiniband/hw/hfi1/driver.c 		sc = hfi1_9B_get_sc5(pkt->hdr, pkt->rhf);
sc                524 drivers/infiniband/hw/hfi1/driver.c 					      dlid, rlid, sc, grh);
sc                528 drivers/infiniband/hw/hfi1/driver.c 		u8 sl = ibp->sc_to_sl[sc];
sc                975 drivers/infiniband/hw/hfi1/driver.c 	u8 sc = SC15_PACKET;
sc                980 drivers/infiniband/hw/hfi1/driver.c 		sc = hfi1_9B_get_sc5(hdr, packet->rhf);
sc                985 drivers/infiniband/hw/hfi1/driver.c 		sc = hfi1_16B_get_sc(hdr);
sc                987 drivers/infiniband/hw/hfi1/driver.c 	if (sc != SC15_PACKET) {
sc               1435 drivers/infiniband/hw/hfi1/driver.c 	if ((hfi1_is_16B_mcast(packet->dlid)) && (packet->sc == 0xF))
sc               1441 drivers/infiniband/hw/hfi1/driver.c 	    (packet->sc != 0xF))
sc               1484 drivers/infiniband/hw/hfi1/driver.c 	packet->sc = hfi1_9B_get_sc5(hdr, packet->rhf);
sc               1564 drivers/infiniband/hw/hfi1/driver.c 	packet->sc = hfi1_16B_get_sc(packet->hdr);
sc               1565 drivers/infiniband/hw/hfi1/driver.c 	packet->sl = ibp->sc_to_sl[packet->sc];
sc                252 drivers/infiniband/hw/hfi1/file_ops.c 			sc_return_credits(uctxt->sc);
sc                385 drivers/infiniband/hw/hfi1/file_ops.c 			   (uctxt->sc->hw_context * BIT(16))) +
sc                393 drivers/infiniband/hw/hfi1/file_ops.c 		memlen = PAGE_ALIGN(uctxt->sc->credits * PIO_BLOCK_SIZE);
sc                411 drivers/infiniband/hw/hfi1/file_ops.c 			(((u64)uctxt->sc->hw_free -
sc                703 drivers/infiniband/hw/hfi1/file_ops.c 	if (uctxt->sc) {
sc                704 drivers/infiniband/hw/hfi1/file_ops.c 		sc_disable(uctxt->sc);
sc                705 drivers/infiniband/hw/hfi1/file_ops.c 		set_pio_integrity(uctxt->sc);
sc                865 drivers/infiniband/hw/hfi1/file_ops.c 	if (uctxt->sc && (uctxt->sc->type == SC_KERNEL))
sc                985 drivers/infiniband/hw/hfi1/file_ops.c 	uctxt->sc = sc_alloc(dd, SC_USER, uctxt->rcvhdrqentsize, dd->node);
sc                986 drivers/infiniband/hw/hfi1/file_ops.c 	if (!uctxt->sc) {
sc                990 drivers/infiniband/hw/hfi1/file_ops.c 	hfi1_cdbg(PROC, "allocated send context %u(%u)\n", uctxt->sc->sw_index,
sc                991 drivers/infiniband/hw/hfi1/file_ops.c 		  uctxt->sc->hw_context);
sc                992 drivers/infiniband/hw/hfi1/file_ops.c 	ret = sc_enable(uctxt->sc);
sc               1163 drivers/infiniband/hw/hfi1/file_ops.c 	cinfo.credits = uctxt->sc->credits;
sc               1166 drivers/infiniband/hw/hfi1/file_ops.c 	cinfo.send_ctxt = uctxt->sc->hw_context;
sc               1203 drivers/infiniband/hw/hfi1/file_ops.c 	hfi1_init_ctxt(uctxt->sc);
sc               1277 drivers/infiniband/hw/hfi1/file_ops.c 	offset = ((u64)uctxt->sc->hw_free -
sc               1283 drivers/infiniband/hw/hfi1/file_ops.c 					    uctxt->sc->base_addr);
sc               1287 drivers/infiniband/hw/hfi1/file_ops.c 						uctxt->sc->base_addr);
sc               1628 drivers/infiniband/hw/hfi1/file_ops.c 	struct send_context *sc;
sc               1632 drivers/infiniband/hw/hfi1/file_ops.c 	if (!uctxt || !uctxt->dd || !uctxt->sc)
sc               1642 drivers/infiniband/hw/hfi1/file_ops.c 	sc = uctxt->sc;
sc               1649 drivers/infiniband/hw/hfi1/file_ops.c 		sc->halt_wait, (sc->flags & SCF_HALTED),
sc               1651 drivers/infiniband/hw/hfi1/file_ops.c 	if (!(sc->flags & SCF_HALTED))
sc               1658 drivers/infiniband/hw/hfi1/file_ops.c 	if (sc->flags & SCF_FROZEN) {
sc               1673 drivers/infiniband/hw/hfi1/file_ops.c 		sc_disable(sc);
sc               1674 drivers/infiniband/hw/hfi1/file_ops.c 		ret = sc_enable(sc);
sc               1677 drivers/infiniband/hw/hfi1/file_ops.c 		ret = sc_restart(sc);
sc               1680 drivers/infiniband/hw/hfi1/file_ops.c 		sc_return_credits(sc);
sc                219 drivers/infiniband/hw/hfi1/hfi.h 	struct send_context *sc;
sc                390 drivers/infiniband/hw/hfi1/hfi.h 	u8 sc;
sc                997 drivers/infiniband/hw/hfi1/hfi.h 	struct send_context *sc;
sc               2502 drivers/infiniband/hw/hfi1/hfi.h 				     u8 sc)
sc               2513 drivers/infiniband/hw/hfi1/hfi.h 	lrh1 = (lrh1 & ~OPA_16B_SC_MASK) | (sc << OPA_16B_SC_SHIFT);
sc                159 drivers/infiniband/hw/hfi1/init.c 	rcd->sc = sc_alloc(dd, SC_ACK, rcd->rcvhdrqentsize, dd->node);
sc                160 drivers/infiniband/hw/hfi1/init.c 	if (!rcd->sc) {
sc                164 drivers/infiniband/hw/hfi1/init.c 	hfi1_init_ctxt(rcd->sc);
sc                754 drivers/infiniband/hw/hfi1/init.c 		sc_disable(dd->send_contexts[i].sc);
sc                788 drivers/infiniband/hw/hfi1/init.c 		sc_enable(rcd->sc);
sc               1095 drivers/infiniband/hw/hfi1/init.c 			sc_flush(dd->send_contexts[i].sc);
sc               1109 drivers/infiniband/hw/hfi1/init.c 			sc_disable(dd->send_contexts[i].sc);
sc               1175 drivers/infiniband/hw/hfi1/init.c 	sc_free(rcd->sc);
sc               1176 drivers/infiniband/hw/hfi1/init.c 	rcd->sc = NULL;
sc               1585 drivers/infiniband/hw/hfi1/init.c 		sc_free(dd->send_contexts[ctxt].sc);
sc               1858 drivers/infiniband/hw/hfi1/mad.c 	u8 sc;
sc               1866 drivers/infiniband/hw/hfi1/mad.c 		sc = *p++;
sc               1867 drivers/infiniband/hw/hfi1/mad.c 		if (ibp->sl_to_sc[i] != sc) {
sc               1868 drivers/infiniband/hw/hfi1/mad.c 			ibp->sl_to_sc[i] = sc;
sc                 57 drivers/infiniband/hw/hfi1/pio.c static void sc_wait_for_packet_egress(struct send_context *sc, int pause);
sc                561 drivers/infiniband/hw/hfi1/pio.c static void cr_group_addresses(struct send_context *sc, dma_addr_t *dma)
sc                563 drivers/infiniband/hw/hfi1/pio.c 	u32 gc = group_context(sc->hw_context, sc->group);
sc                564 drivers/infiniband/hw/hfi1/pio.c 	u32 index = sc->hw_context & 0x7;
sc                566 drivers/infiniband/hw/hfi1/pio.c 	sc->hw_free = &sc->dd->cr_base[sc->node].va[gc].cr[index];
sc                568 drivers/infiniband/hw/hfi1/pio.c 	       &((struct credit_return *)sc->dd->cr_base[sc->node].dma)[gc];
sc                577 drivers/infiniband/hw/hfi1/pio.c 	struct send_context *sc;
sc                579 drivers/infiniband/hw/hfi1/pio.c 	sc = container_of(work, struct send_context, halt_work);
sc                580 drivers/infiniband/hw/hfi1/pio.c 	sc_restart(sc);
sc                593 drivers/infiniband/hw/hfi1/pio.c u32 sc_mtu_to_threshold(struct send_context *sc, u32 mtu, u32 hdrqentsize)
sc                603 drivers/infiniband/hw/hfi1/pio.c 	if (sc->credits <= release_credits)
sc                606 drivers/infiniband/hw/hfi1/pio.c 		threshold = sc->credits - release_credits;
sc                618 drivers/infiniband/hw/hfi1/pio.c u32 sc_percent_to_threshold(struct send_context *sc, u32 percent)
sc                620 drivers/infiniband/hw/hfi1/pio.c 	return (sc->credits * percent) / 100;
sc                626 drivers/infiniband/hw/hfi1/pio.c void sc_set_cr_threshold(struct send_context *sc, u32 new_threshold)
sc                632 drivers/infiniband/hw/hfi1/pio.c 	spin_lock_irqsave(&sc->credit_ctrl_lock, flags);
sc                634 drivers/infiniband/hw/hfi1/pio.c 	old_threshold = (sc->credit_ctrl >>
sc                639 drivers/infiniband/hw/hfi1/pio.c 		sc->credit_ctrl =
sc                640 drivers/infiniband/hw/hfi1/pio.c 			(sc->credit_ctrl
sc                645 drivers/infiniband/hw/hfi1/pio.c 		write_kctxt_csr(sc->dd, sc->hw_context,
sc                646 drivers/infiniband/hw/hfi1/pio.c 				SC(CREDIT_CTRL), sc->credit_ctrl);
sc                652 drivers/infiniband/hw/hfi1/pio.c 	spin_unlock_irqrestore(&sc->credit_ctrl_lock, flags);
sc                655 drivers/infiniband/hw/hfi1/pio.c 		sc_return_credits(sc);
sc                663 drivers/infiniband/hw/hfi1/pio.c void set_pio_integrity(struct send_context *sc)
sc                665 drivers/infiniband/hw/hfi1/pio.c 	struct hfi1_devdata *dd = sc->dd;
sc                666 drivers/infiniband/hw/hfi1/pio.c 	u32 hw_context = sc->hw_context;
sc                667 drivers/infiniband/hw/hfi1/pio.c 	int type = sc->type;
sc                674 drivers/infiniband/hw/hfi1/pio.c static u32 get_buffers_allocated(struct send_context *sc)
sc                680 drivers/infiniband/hw/hfi1/pio.c 		ret += *per_cpu_ptr(sc->buffers_allocated, cpu);
sc                684 drivers/infiniband/hw/hfi1/pio.c static void reset_buffers_allocated(struct send_context *sc)
sc                689 drivers/infiniband/hw/hfi1/pio.c 		(*per_cpu_ptr(sc->buffers_allocated, cpu)) = 0;
sc                700 drivers/infiniband/hw/hfi1/pio.c 	struct send_context *sc = NULL;
sc                714 drivers/infiniband/hw/hfi1/pio.c 	sc = kzalloc_node(sizeof(*sc), GFP_KERNEL, numa);
sc                715 drivers/infiniband/hw/hfi1/pio.c 	if (!sc)
sc                718 drivers/infiniband/hw/hfi1/pio.c 	sc->buffers_allocated = alloc_percpu(u32);
sc                719 drivers/infiniband/hw/hfi1/pio.c 	if (!sc->buffers_allocated) {
sc                720 drivers/infiniband/hw/hfi1/pio.c 		kfree(sc);
sc                731 drivers/infiniband/hw/hfi1/pio.c 		free_percpu(sc->buffers_allocated);
sc                732 drivers/infiniband/hw/hfi1/pio.c 		kfree(sc);
sc                737 drivers/infiniband/hw/hfi1/pio.c 	sci->sc = sc;
sc                739 drivers/infiniband/hw/hfi1/pio.c 	sc->dd = dd;
sc                740 drivers/infiniband/hw/hfi1/pio.c 	sc->node = numa;
sc                741 drivers/infiniband/hw/hfi1/pio.c 	sc->type = type;
sc                742 drivers/infiniband/hw/hfi1/pio.c 	spin_lock_init(&sc->alloc_lock);
sc                743 drivers/infiniband/hw/hfi1/pio.c 	spin_lock_init(&sc->release_lock);
sc                744 drivers/infiniband/hw/hfi1/pio.c 	spin_lock_init(&sc->credit_ctrl_lock);
sc                745 drivers/infiniband/hw/hfi1/pio.c 	seqlock_init(&sc->waitlock);
sc                746 drivers/infiniband/hw/hfi1/pio.c 	INIT_LIST_HEAD(&sc->piowait);
sc                747 drivers/infiniband/hw/hfi1/pio.c 	INIT_WORK(&sc->halt_work, sc_halted);
sc                748 drivers/infiniband/hw/hfi1/pio.c 	init_waitqueue_head(&sc->halt_wait);
sc                751 drivers/infiniband/hw/hfi1/pio.c 	sc->group = 0;
sc                753 drivers/infiniband/hw/hfi1/pio.c 	sc->sw_index = sw_index;
sc                754 drivers/infiniband/hw/hfi1/pio.c 	sc->hw_context = hw_context;
sc                755 drivers/infiniband/hw/hfi1/pio.c 	cr_group_addresses(sc, &dma);
sc                756 drivers/infiniband/hw/hfi1/pio.c 	sc->credits = sci->credits;
sc                757 drivers/infiniband/hw/hfi1/pio.c 	sc->size = sc->credits * PIO_BLOCK_SIZE;
sc                762 drivers/infiniband/hw/hfi1/pio.c 	sc->base_addr = dd->piobase + ((hw_context & PIO_ADDR_CONTEXT_MASK)
sc                772 drivers/infiniband/hw/hfi1/pio.c 	set_pio_integrity(sc);
sc                813 drivers/infiniband/hw/hfi1/pio.c 		thresh = sc_percent_to_threshold(sc, 50);
sc                815 drivers/infiniband/hw/hfi1/pio.c 		thresh = sc_percent_to_threshold(sc,
sc                818 drivers/infiniband/hw/hfi1/pio.c 		thresh = min(sc_percent_to_threshold(sc, 50),
sc                819 drivers/infiniband/hw/hfi1/pio.c 			     sc_mtu_to_threshold(sc, hfi1_max_mtu,
sc                830 drivers/infiniband/hw/hfi1/pio.c 	sc->credit_ctrl = reg;
sc                854 drivers/infiniband/hw/hfi1/pio.c 		sc->sr_size = sci->credits + 1;
sc                855 drivers/infiniband/hw/hfi1/pio.c 		sc->sr = kcalloc_node(sc->sr_size,
sc                858 drivers/infiniband/hw/hfi1/pio.c 		if (!sc->sr) {
sc                859 drivers/infiniband/hw/hfi1/pio.c 			sc_free(sc);
sc                869 drivers/infiniband/hw/hfi1/pio.c 		  sc->group,
sc                870 drivers/infiniband/hw/hfi1/pio.c 		  sc->credits,
sc                871 drivers/infiniband/hw/hfi1/pio.c 		  sc->credit_ctrl,
sc                874 drivers/infiniband/hw/hfi1/pio.c 	return sc;
sc                878 drivers/infiniband/hw/hfi1/pio.c void sc_free(struct send_context *sc)
sc                885 drivers/infiniband/hw/hfi1/pio.c 	if (!sc)
sc                888 drivers/infiniband/hw/hfi1/pio.c 	sc->flags |= SCF_IN_FREE;	/* ensure no restarts */
sc                889 drivers/infiniband/hw/hfi1/pio.c 	dd = sc->dd;
sc                890 drivers/infiniband/hw/hfi1/pio.c 	if (!list_empty(&sc->piowait))
sc                892 drivers/infiniband/hw/hfi1/pio.c 	sw_index = sc->sw_index;
sc                893 drivers/infiniband/hw/hfi1/pio.c 	hw_context = sc->hw_context;
sc                894 drivers/infiniband/hw/hfi1/pio.c 	sc_disable(sc);	/* make sure the HW is disabled */
sc                895 drivers/infiniband/hw/hfi1/pio.c 	flush_work(&sc->halt_work);
sc                898 drivers/infiniband/hw/hfi1/pio.c 	dd->send_contexts[sw_index].sc = NULL;
sc                913 drivers/infiniband/hw/hfi1/pio.c 	kfree(sc->sr);
sc                914 drivers/infiniband/hw/hfi1/pio.c 	free_percpu(sc->buffers_allocated);
sc                915 drivers/infiniband/hw/hfi1/pio.c 	kfree(sc);
sc                919 drivers/infiniband/hw/hfi1/pio.c void sc_disable(struct send_context *sc)
sc                924 drivers/infiniband/hw/hfi1/pio.c 	if (!sc)
sc                928 drivers/infiniband/hw/hfi1/pio.c 	spin_lock_irq(&sc->alloc_lock);
sc                929 drivers/infiniband/hw/hfi1/pio.c 	reg = read_kctxt_csr(sc->dd, sc->hw_context, SC(CTRL));
sc                931 drivers/infiniband/hw/hfi1/pio.c 	sc->flags &= ~SCF_ENABLED;
sc                932 drivers/infiniband/hw/hfi1/pio.c 	sc_wait_for_packet_egress(sc, 1);
sc                933 drivers/infiniband/hw/hfi1/pio.c 	write_kctxt_csr(sc->dd, sc->hw_context, SC(CTRL), reg);
sc                943 drivers/infiniband/hw/hfi1/pio.c 	spin_lock(&sc->release_lock);
sc                944 drivers/infiniband/hw/hfi1/pio.c 	if (sc->sr) {	/* this context has a shadow ring */
sc                945 drivers/infiniband/hw/hfi1/pio.c 		while (sc->sr_tail != sc->sr_head) {
sc                946 drivers/infiniband/hw/hfi1/pio.c 			pbuf = &sc->sr[sc->sr_tail].pbuf;
sc                949 drivers/infiniband/hw/hfi1/pio.c 			sc->sr_tail++;
sc                950 drivers/infiniband/hw/hfi1/pio.c 			if (sc->sr_tail >= sc->sr_size)
sc                951 drivers/infiniband/hw/hfi1/pio.c 				sc->sr_tail = 0;
sc                954 drivers/infiniband/hw/hfi1/pio.c 	spin_unlock(&sc->release_lock);
sc                956 drivers/infiniband/hw/hfi1/pio.c 	write_seqlock(&sc->waitlock);
sc                957 drivers/infiniband/hw/hfi1/pio.c 	while (!list_empty(&sc->piowait)) {
sc                962 drivers/infiniband/hw/hfi1/pio.c 		wait = list_first_entry(&sc->piowait, struct iowait, list);
sc                969 drivers/infiniband/hw/hfi1/pio.c 	write_sequnlock(&sc->waitlock);
sc                971 drivers/infiniband/hw/hfi1/pio.c 	spin_unlock_irq(&sc->alloc_lock);
sc               1009 drivers/infiniband/hw/hfi1/pio.c static void sc_wait_for_packet_egress(struct send_context *sc, int pause)
sc               1011 drivers/infiniband/hw/hfi1/pio.c 	struct hfi1_devdata *dd = sc->dd;
sc               1018 drivers/infiniband/hw/hfi1/pio.c 		reg = read_csr(dd, sc->hw_context * 8 +
sc               1021 drivers/infiniband/hw/hfi1/pio.c 		if (sc->flags & SCF_HALTED ||
sc               1022 drivers/infiniband/hw/hfi1/pio.c 		    is_sc_halted(dd, sc->hw_context) || egress_halted(reg))
sc               1034 drivers/infiniband/hw/hfi1/pio.c 				   __func__, sc->sw_index,
sc               1035 drivers/infiniband/hw/hfi1/pio.c 				   sc->hw_context, (u32)reg);
sc               1054 drivers/infiniband/hw/hfi1/pio.c 		struct send_context *sc = dd->send_contexts[i].sc;
sc               1056 drivers/infiniband/hw/hfi1/pio.c 		if (!sc)
sc               1058 drivers/infiniband/hw/hfi1/pio.c 		sc_wait_for_packet_egress(sc, 0);
sc               1071 drivers/infiniband/hw/hfi1/pio.c int sc_restart(struct send_context *sc)
sc               1073 drivers/infiniband/hw/hfi1/pio.c 	struct hfi1_devdata *dd = sc->dd;
sc               1079 drivers/infiniband/hw/hfi1/pio.c 	if (!(sc->flags & SCF_HALTED) || (sc->flags & SCF_IN_FREE))
sc               1082 drivers/infiniband/hw/hfi1/pio.c 	dd_dev_info(dd, "restarting send context %u(%u)\n", sc->sw_index,
sc               1083 drivers/infiniband/hw/hfi1/pio.c 		    sc->hw_context);
sc               1093 drivers/infiniband/hw/hfi1/pio.c 		reg = read_kctxt_csr(dd, sc->hw_context, SC(STATUS));
sc               1098 drivers/infiniband/hw/hfi1/pio.c 				   __func__, sc->sw_index, sc->hw_context);
sc               1115 drivers/infiniband/hw/hfi1/pio.c 	if (sc->type != SC_USER) {
sc               1119 drivers/infiniband/hw/hfi1/pio.c 			count = get_buffers_allocated(sc);
sc               1125 drivers/infiniband/hw/hfi1/pio.c 					   __func__, sc->sw_index,
sc               1126 drivers/infiniband/hw/hfi1/pio.c 					   sc->hw_context, count);
sc               1142 drivers/infiniband/hw/hfi1/pio.c 	sc_disable(sc);
sc               1150 drivers/infiniband/hw/hfi1/pio.c 	return sc_enable(sc);
sc               1160 drivers/infiniband/hw/hfi1/pio.c 	struct send_context *sc;
sc               1164 drivers/infiniband/hw/hfi1/pio.c 		sc = dd->send_contexts[i].sc;
sc               1170 drivers/infiniband/hw/hfi1/pio.c 		if (!sc || !(sc->flags & SCF_FROZEN) || sc->type == SC_USER)
sc               1174 drivers/infiniband/hw/hfi1/pio.c 		sc_disable(sc);
sc               1187 drivers/infiniband/hw/hfi1/pio.c 	struct send_context *sc;
sc               1191 drivers/infiniband/hw/hfi1/pio.c 		sc = dd->send_contexts[i].sc;
sc               1192 drivers/infiniband/hw/hfi1/pio.c 		if (!sc || !(sc->flags & SCF_FROZEN) || sc->type == SC_USER)
sc               1194 drivers/infiniband/hw/hfi1/pio.c 		if (sc->flags & SCF_LINK_DOWN)
sc               1197 drivers/infiniband/hw/hfi1/pio.c 		sc_enable(sc);	/* will clear the sc frozen flag */
sc               1215 drivers/infiniband/hw/hfi1/pio.c 	struct send_context *sc;
sc               1219 drivers/infiniband/hw/hfi1/pio.c 		sc = dd->send_contexts[i].sc;
sc               1220 drivers/infiniband/hw/hfi1/pio.c 		if (!sc || !(sc->flags & SCF_LINK_DOWN) || sc->type == SC_USER)
sc               1223 drivers/infiniband/hw/hfi1/pio.c 		sc_enable(sc);	/* will clear the sc link down flag */
sc               1283 drivers/infiniband/hw/hfi1/pio.c int sc_enable(struct send_context *sc)
sc               1290 drivers/infiniband/hw/hfi1/pio.c 	if (!sc)
sc               1292 drivers/infiniband/hw/hfi1/pio.c 	dd = sc->dd;
sc               1301 drivers/infiniband/hw/hfi1/pio.c 	spin_lock_irqsave(&sc->alloc_lock, flags);
sc               1302 drivers/infiniband/hw/hfi1/pio.c 	sc_ctrl = read_kctxt_csr(dd, sc->hw_context, SC(CTRL));
sc               1308 drivers/infiniband/hw/hfi1/pio.c 	*sc->hw_free = 0;
sc               1309 drivers/infiniband/hw/hfi1/pio.c 	sc->free = 0;
sc               1310 drivers/infiniband/hw/hfi1/pio.c 	sc->alloc_free = 0;
sc               1311 drivers/infiniband/hw/hfi1/pio.c 	sc->fill = 0;
sc               1312 drivers/infiniband/hw/hfi1/pio.c 	sc->fill_wrap = 0;
sc               1313 drivers/infiniband/hw/hfi1/pio.c 	sc->sr_head = 0;
sc               1314 drivers/infiniband/hw/hfi1/pio.c 	sc->sr_tail = 0;
sc               1315 drivers/infiniband/hw/hfi1/pio.c 	sc->flags = 0;
sc               1317 drivers/infiniband/hw/hfi1/pio.c 	reset_buffers_allocated(sc);
sc               1325 drivers/infiniband/hw/hfi1/pio.c 	reg = read_kctxt_csr(dd, sc->hw_context, SC(ERR_STATUS));
sc               1327 drivers/infiniband/hw/hfi1/pio.c 		write_kctxt_csr(dd, sc->hw_context, SC(ERR_CLEAR), reg);
sc               1341 drivers/infiniband/hw/hfi1/pio.c 	pio = ((sc->hw_context & SEND_PIO_INIT_CTXT_PIO_CTXT_NUM_MASK) <<
sc               1355 drivers/infiniband/hw/hfi1/pio.c 			   sc->sw_index, sc->hw_context, ret);
sc               1363 drivers/infiniband/hw/hfi1/pio.c 	write_kctxt_csr(dd, sc->hw_context, SC(CTRL), sc_ctrl);
sc               1368 drivers/infiniband/hw/hfi1/pio.c 	read_kctxt_csr(dd, sc->hw_context, SC(CTRL));
sc               1369 drivers/infiniband/hw/hfi1/pio.c 	sc->flags |= SCF_ENABLED;
sc               1372 drivers/infiniband/hw/hfi1/pio.c 	spin_unlock_irqrestore(&sc->alloc_lock, flags);
sc               1378 drivers/infiniband/hw/hfi1/pio.c void sc_return_credits(struct send_context *sc)
sc               1380 drivers/infiniband/hw/hfi1/pio.c 	if (!sc)
sc               1384 drivers/infiniband/hw/hfi1/pio.c 	write_kctxt_csr(sc->dd, sc->hw_context, SC(CREDIT_FORCE),
sc               1390 drivers/infiniband/hw/hfi1/pio.c 	read_kctxt_csr(sc->dd, sc->hw_context, SC(CREDIT_FORCE));
sc               1392 drivers/infiniband/hw/hfi1/pio.c 	write_kctxt_csr(sc->dd, sc->hw_context, SC(CREDIT_FORCE), 0);
sc               1396 drivers/infiniband/hw/hfi1/pio.c void sc_flush(struct send_context *sc)
sc               1398 drivers/infiniband/hw/hfi1/pio.c 	if (!sc)
sc               1401 drivers/infiniband/hw/hfi1/pio.c 	sc_wait_for_packet_egress(sc, 1);
sc               1405 drivers/infiniband/hw/hfi1/pio.c void sc_drop(struct send_context *sc)
sc               1407 drivers/infiniband/hw/hfi1/pio.c 	if (!sc)
sc               1410 drivers/infiniband/hw/hfi1/pio.c 	dd_dev_info(sc->dd, "%s: context %u(%u) - not implemented\n",
sc               1411 drivers/infiniband/hw/hfi1/pio.c 		    __func__, sc->sw_index, sc->hw_context);
sc               1422 drivers/infiniband/hw/hfi1/pio.c void sc_stop(struct send_context *sc, int flag)
sc               1427 drivers/infiniband/hw/hfi1/pio.c 	spin_lock_irqsave(&sc->alloc_lock, flags);
sc               1429 drivers/infiniband/hw/hfi1/pio.c 	sc->flags |= flag;
sc               1430 drivers/infiniband/hw/hfi1/pio.c 	sc->flags &= ~SCF_ENABLED;
sc               1431 drivers/infiniband/hw/hfi1/pio.c 	spin_unlock_irqrestore(&sc->alloc_lock, flags);
sc               1432 drivers/infiniband/hw/hfi1/pio.c 	wake_up(&sc->halt_wait);
sc               1449 drivers/infiniband/hw/hfi1/pio.c struct pio_buf *sc_buffer_alloc(struct send_context *sc, u32 dw_len,
sc               1460 drivers/infiniband/hw/hfi1/pio.c 	spin_lock_irqsave(&sc->alloc_lock, flags);
sc               1461 drivers/infiniband/hw/hfi1/pio.c 	if (!(sc->flags & SCF_ENABLED)) {
sc               1462 drivers/infiniband/hw/hfi1/pio.c 		spin_unlock_irqrestore(&sc->alloc_lock, flags);
sc               1467 drivers/infiniband/hw/hfi1/pio.c 	avail = (unsigned long)sc->credits - (sc->fill - sc->alloc_free);
sc               1471 drivers/infiniband/hw/hfi1/pio.c 			spin_unlock_irqrestore(&sc->alloc_lock, flags);
sc               1475 drivers/infiniband/hw/hfi1/pio.c 		sc->alloc_free = READ_ONCE(sc->free);
sc               1477 drivers/infiniband/hw/hfi1/pio.c 			(unsigned long)sc->credits -
sc               1478 drivers/infiniband/hw/hfi1/pio.c 			(sc->fill - sc->alloc_free);
sc               1481 drivers/infiniband/hw/hfi1/pio.c 			sc_release_update(sc);
sc               1482 drivers/infiniband/hw/hfi1/pio.c 			sc->alloc_free = READ_ONCE(sc->free);
sc               1491 drivers/infiniband/hw/hfi1/pio.c 	this_cpu_inc(*sc->buffers_allocated);
sc               1494 drivers/infiniband/hw/hfi1/pio.c 	head = sc->sr_head;
sc               1497 drivers/infiniband/hw/hfi1/pio.c 	sc->fill += blocks;
sc               1498 drivers/infiniband/hw/hfi1/pio.c 	fill_wrap = sc->fill_wrap;
sc               1499 drivers/infiniband/hw/hfi1/pio.c 	sc->fill_wrap += blocks;
sc               1500 drivers/infiniband/hw/hfi1/pio.c 	if (sc->fill_wrap >= sc->credits)
sc               1501 drivers/infiniband/hw/hfi1/pio.c 		sc->fill_wrap = sc->fill_wrap - sc->credits;
sc               1510 drivers/infiniband/hw/hfi1/pio.c 	pbuf = &sc->sr[head].pbuf;
sc               1511 drivers/infiniband/hw/hfi1/pio.c 	pbuf->sent_at = sc->fill;
sc               1514 drivers/infiniband/hw/hfi1/pio.c 	pbuf->sc = sc;	/* could be filled in at sc->sr init time */
sc               1519 drivers/infiniband/hw/hfi1/pio.c 	if (next >= sc->sr_size)
sc               1526 drivers/infiniband/hw/hfi1/pio.c 	sc->sr_head = next;
sc               1527 drivers/infiniband/hw/hfi1/pio.c 	spin_unlock_irqrestore(&sc->alloc_lock, flags);
sc               1530 drivers/infiniband/hw/hfi1/pio.c 	pbuf->start = sc->base_addr + fill_wrap * PIO_BLOCK_SIZE;
sc               1531 drivers/infiniband/hw/hfi1/pio.c 	pbuf->end = sc->base_addr + sc->size;
sc               1550 drivers/infiniband/hw/hfi1/pio.c void sc_add_credit_return_intr(struct send_context *sc)
sc               1555 drivers/infiniband/hw/hfi1/pio.c 	spin_lock_irqsave(&sc->credit_ctrl_lock, flags);
sc               1556 drivers/infiniband/hw/hfi1/pio.c 	if (sc->credit_intr_count == 0) {
sc               1557 drivers/infiniband/hw/hfi1/pio.c 		sc->credit_ctrl |= SC(CREDIT_CTRL_CREDIT_INTR_SMASK);
sc               1558 drivers/infiniband/hw/hfi1/pio.c 		write_kctxt_csr(sc->dd, sc->hw_context,
sc               1559 drivers/infiniband/hw/hfi1/pio.c 				SC(CREDIT_CTRL), sc->credit_ctrl);
sc               1561 drivers/infiniband/hw/hfi1/pio.c 	sc->credit_intr_count++;
sc               1562 drivers/infiniband/hw/hfi1/pio.c 	spin_unlock_irqrestore(&sc->credit_ctrl_lock, flags);
sc               1569 drivers/infiniband/hw/hfi1/pio.c void sc_del_credit_return_intr(struct send_context *sc)
sc               1573 drivers/infiniband/hw/hfi1/pio.c 	WARN_ON(sc->credit_intr_count == 0);
sc               1576 drivers/infiniband/hw/hfi1/pio.c 	spin_lock_irqsave(&sc->credit_ctrl_lock, flags);
sc               1577 drivers/infiniband/hw/hfi1/pio.c 	sc->credit_intr_count--;
sc               1578 drivers/infiniband/hw/hfi1/pio.c 	if (sc->credit_intr_count == 0) {
sc               1579 drivers/infiniband/hw/hfi1/pio.c 		sc->credit_ctrl &= ~SC(CREDIT_CTRL_CREDIT_INTR_SMASK);
sc               1580 drivers/infiniband/hw/hfi1/pio.c 		write_kctxt_csr(sc->dd, sc->hw_context,
sc               1581 drivers/infiniband/hw/hfi1/pio.c 				SC(CREDIT_CTRL), sc->credit_ctrl);
sc               1583 drivers/infiniband/hw/hfi1/pio.c 	spin_unlock_irqrestore(&sc->credit_ctrl_lock, flags);
sc               1590 drivers/infiniband/hw/hfi1/pio.c void hfi1_sc_wantpiobuf_intr(struct send_context *sc, u32 needint)
sc               1593 drivers/infiniband/hw/hfi1/pio.c 		sc_add_credit_return_intr(sc);
sc               1595 drivers/infiniband/hw/hfi1/pio.c 		sc_del_credit_return_intr(sc);
sc               1596 drivers/infiniband/hw/hfi1/pio.c 	trace_hfi1_wantpiointr(sc, needint, sc->credit_ctrl);
sc               1598 drivers/infiniband/hw/hfi1/pio.c 		sc_return_credits(sc);
sc               1609 drivers/infiniband/hw/hfi1/pio.c static void sc_piobufavail(struct send_context *sc)
sc               1611 drivers/infiniband/hw/hfi1/pio.c 	struct hfi1_devdata *dd = sc->dd;
sc               1619 drivers/infiniband/hw/hfi1/pio.c 	if (dd->send_contexts[sc->sw_index].type != SC_KERNEL &&
sc               1620 drivers/infiniband/hw/hfi1/pio.c 	    dd->send_contexts[sc->sw_index].type != SC_VL15)
sc               1622 drivers/infiniband/hw/hfi1/pio.c 	list = &sc->piowait;
sc               1629 drivers/infiniband/hw/hfi1/pio.c 	write_seqlock_irqsave(&sc->waitlock, flags);
sc               1656 drivers/infiniband/hw/hfi1/pio.c 		hfi1_sc_wantpiobuf_intr(sc, 0);
sc               1658 drivers/infiniband/hw/hfi1/pio.c 			hfi1_sc_wantpiobuf_intr(sc, 1);
sc               1660 drivers/infiniband/hw/hfi1/pio.c 	write_sequnlock_irqrestore(&sc->waitlock, flags);
sc               1696 drivers/infiniband/hw/hfi1/pio.c void sc_release_update(struct send_context *sc)
sc               1707 drivers/infiniband/hw/hfi1/pio.c 	if (!sc)
sc               1710 drivers/infiniband/hw/hfi1/pio.c 	spin_lock_irqsave(&sc->release_lock, flags);
sc               1712 drivers/infiniband/hw/hfi1/pio.c 	hw_free = le64_to_cpu(*sc->hw_free);		/* volatile read */
sc               1713 drivers/infiniband/hw/hfi1/pio.c 	old_free = sc->free;
sc               1718 drivers/infiniband/hw/hfi1/pio.c 	trace_hfi1_piofree(sc, extra);
sc               1722 drivers/infiniband/hw/hfi1/pio.c 	head = READ_ONCE(sc->sr_head);	/* snapshot the head */
sc               1723 drivers/infiniband/hw/hfi1/pio.c 	tail = sc->sr_tail;
sc               1725 drivers/infiniband/hw/hfi1/pio.c 		pbuf = &sc->sr[tail].pbuf;
sc               1738 drivers/infiniband/hw/hfi1/pio.c 		if (tail >= sc->sr_size)
sc               1741 drivers/infiniband/hw/hfi1/pio.c 	sc->sr_tail = tail;
sc               1744 drivers/infiniband/hw/hfi1/pio.c 	sc->free = free;
sc               1745 drivers/infiniband/hw/hfi1/pio.c 	spin_unlock_irqrestore(&sc->release_lock, flags);
sc               1746 drivers/infiniband/hw/hfi1/pio.c 	sc_piobufavail(sc);
sc               1760 drivers/infiniband/hw/hfi1/pio.c 	struct send_context *sc;
sc               1771 drivers/infiniband/hw/hfi1/pio.c 	sc = dd->send_contexts[sw_index].sc;
sc               1772 drivers/infiniband/hw/hfi1/pio.c 	if (unlikely(!sc))
sc               1775 drivers/infiniband/hw/hfi1/pio.c 	gc = group_context(hw_context, sc->group);
sc               1776 drivers/infiniband/hw/hfi1/pio.c 	gc_end = gc + group_size(sc->group);
sc               1785 drivers/infiniband/hw/hfi1/pio.c 		sc_release_update(dd->send_contexts[sw_index].sc);
sc               1821 drivers/infiniband/hw/hfi1/pio.c 		return dd->vld[0].sc;
sc               1828 drivers/infiniband/hw/hfi1/pio.c 	rval = !rval ? dd->vld[0].sc : rval;
sc               2022 drivers/infiniband/hw/hfi1/pio.c 	dd->vld[15].sc = sc_alloc(dd, SC_VL15,
sc               2024 drivers/infiniband/hw/hfi1/pio.c 	if (!dd->vld[15].sc)
sc               2027 drivers/infiniband/hw/hfi1/pio.c 	hfi1_init_ctxt(dd->vld[15].sc);
sc               2036 drivers/infiniband/hw/hfi1/pio.c 	dd->kernel_send_context[0] = dd->vld[15].sc;
sc               2046 drivers/infiniband/hw/hfi1/pio.c 		dd->vld[i].sc = sc_alloc(dd, SC_KERNEL,
sc               2048 drivers/infiniband/hw/hfi1/pio.c 		if (!dd->vld[i].sc)
sc               2050 drivers/infiniband/hw/hfi1/pio.c 		dd->kernel_send_context[i + 1] = dd->vld[i].sc;
sc               2051 drivers/infiniband/hw/hfi1/pio.c 		hfi1_init_ctxt(dd->vld[i].sc);
sc               2063 drivers/infiniband/hw/hfi1/pio.c 	sc_enable(dd->vld[15].sc);
sc               2064 drivers/infiniband/hw/hfi1/pio.c 	ctxt = dd->vld[15].sc->hw_context;
sc               2069 drivers/infiniband/hw/hfi1/pio.c 		    dd->vld[15].sc->sw_index, ctxt);
sc               2072 drivers/infiniband/hw/hfi1/pio.c 		sc_enable(dd->vld[i].sc);
sc               2073 drivers/infiniband/hw/hfi1/pio.c 		ctxt = dd->vld[i].sc->hw_context;
sc               2090 drivers/infiniband/hw/hfi1/pio.c 		sc_free(dd->vld[i].sc);
sc               2091 drivers/infiniband/hw/hfi1/pio.c 		dd->vld[i].sc = NULL;
sc               2101 drivers/infiniband/hw/hfi1/pio.c 	sc_free(dd->vld[15].sc);
sc               2164 drivers/infiniband/hw/hfi1/pio.c 	struct send_context *sc = sci->sc;
sc               2170 drivers/infiniband/hw/hfi1/pio.c 		   sc->flags,  sc->sw_index, sc->hw_context, sc->group);
sc               2172 drivers/infiniband/hw/hfi1/pio.c 		   sc->sr_size, sc->credits, sc->sr_head, sc->sr_tail);
sc               2174 drivers/infiniband/hw/hfi1/pio.c 		   sc->fill, sc->free, sc->fill_wrap, sc->alloc_free);
sc               2176 drivers/infiniband/hw/hfi1/pio.c 		   sc->credit_intr_count, sc->credit_ctrl);
sc               2177 drivers/infiniband/hw/hfi1/pio.c 	reg = read_kctxt_csr(sc->dd, sc->hw_context, SC(CREDIT_STATUS));
sc               2179 drivers/infiniband/hw/hfi1/pio.c 		   (le64_to_cpu(*sc->hw_free) & CR_COUNTER_SMASK) >>
sc                 81 drivers/infiniband/hw/hfi1/pio.h 	struct send_context *sc;/* back pointer to owning send context */
sc                147 drivers/infiniband/hw/hfi1/pio.h 	struct send_context *sc;	/* allocated working context */
sc                286 drivers/infiniband/hw/hfi1/pio.h void sc_free(struct send_context *sc);
sc                287 drivers/infiniband/hw/hfi1/pio.h int sc_enable(struct send_context *sc);
sc                288 drivers/infiniband/hw/hfi1/pio.h void sc_disable(struct send_context *sc);
sc                289 drivers/infiniband/hw/hfi1/pio.h int sc_restart(struct send_context *sc);
sc                290 drivers/infiniband/hw/hfi1/pio.h void sc_return_credits(struct send_context *sc);
sc                291 drivers/infiniband/hw/hfi1/pio.h void sc_flush(struct send_context *sc);
sc                292 drivers/infiniband/hw/hfi1/pio.h void sc_drop(struct send_context *sc);
sc                293 drivers/infiniband/hw/hfi1/pio.h void sc_stop(struct send_context *sc, int bit);
sc                294 drivers/infiniband/hw/hfi1/pio.h struct pio_buf *sc_buffer_alloc(struct send_context *sc, u32 dw_len,
sc                296 drivers/infiniband/hw/hfi1/pio.h void sc_release_update(struct send_context *sc);
sc                297 drivers/infiniband/hw/hfi1/pio.h void sc_return_credits(struct send_context *sc);
sc                299 drivers/infiniband/hw/hfi1/pio.h void sc_add_credit_return_intr(struct send_context *sc);
sc                300 drivers/infiniband/hw/hfi1/pio.h void sc_del_credit_return_intr(struct send_context *sc);
sc                301 drivers/infiniband/hw/hfi1/pio.h void sc_set_cr_threshold(struct send_context *sc, u32 new_threshold);
sc                302 drivers/infiniband/hw/hfi1/pio.h u32 sc_percent_to_threshold(struct send_context *sc, u32 percent);
sc                303 drivers/infiniband/hw/hfi1/pio.h u32 sc_mtu_to_threshold(struct send_context *sc, u32 mtu, u32 hdrqentsize);
sc                304 drivers/infiniband/hw/hfi1/pio.h void hfi1_sc_wantpiobuf_intr(struct send_context *sc, u32 needint);
sc                306 drivers/infiniband/hw/hfi1/pio.h void set_pio_integrity(struct send_context *sc);
sc                132 drivers/infiniband/hw/hfi1/pio_copy.c 			dest -= pbuf->sc->size;
sc                133 drivers/infiniband/hw/hfi1/pio_copy.c 			dend -= pbuf->sc->size;
sc                164 drivers/infiniband/hw/hfi1/pio_copy.c 	this_cpu_dec(*pbuf->sc->buffers_allocated);
sc                364 drivers/infiniband/hw/hfi1/pio_copy.c 			dest -= pbuf->sc->size;
sc                365 drivers/infiniband/hw/hfi1/pio_copy.c 			dend -= pbuf->sc->size;
sc                461 drivers/infiniband/hw/hfi1/pio_copy.c 		dest -= pbuf->sc->size;
sc                462 drivers/infiniband/hw/hfi1/pio_copy.c 		dend -= pbuf->sc->size;
sc                495 drivers/infiniband/hw/hfi1/pio_copy.c 			dest -= pbuf->sc->size;
sc                587 drivers/infiniband/hw/hfi1/pio_copy.c 		dest -= pbuf->sc->size;
sc                588 drivers/infiniband/hw/hfi1/pio_copy.c 		dend -= pbuf->sc->size;
sc                669 drivers/infiniband/hw/hfi1/pio_copy.c 				dest -= pbuf->sc->size;
sc                722 drivers/infiniband/hw/hfi1/pio_copy.c 		dest -= pbuf->sc->size;
sc                755 drivers/infiniband/hw/hfi1/pio_copy.c 	this_cpu_dec(*pbuf->sc->buffers_allocated);
sc                222 drivers/infiniband/hw/hfi1/qp.c 	u8 sc;
sc                225 drivers/infiniband/hw/hfi1/qp.c 		sc = ah_to_sc(ibqp->device, &attr->ah_attr);
sc                226 drivers/infiniband/hw/hfi1/qp.c 		if (sc == 0xf)
sc                229 drivers/infiniband/hw/hfi1/qp.c 		if (!qp_to_sdma_engine(qp, sc) &&
sc                233 drivers/infiniband/hw/hfi1/qp.c 		if (!qp_to_send_context(qp, sc))
sc                238 drivers/infiniband/hw/hfi1/qp.c 		sc = ah_to_sc(ibqp->device, &attr->alt_ah_attr);
sc                239 drivers/infiniband/hw/hfi1/qp.c 		if (sc == 0xf)
sc                242 drivers/infiniband/hw/hfi1/qp.c 		if (!qp_to_sdma_engine(qp, sc) &&
sc                246 drivers/infiniband/hw/hfi1/qp.c 		if (!qp_to_send_context(qp, sc))
sc                623 drivers/infiniband/hw/hfi1/qp.c 		return dd->vld[15].sc;
sc                861 drivers/infiniband/hw/hfi1/qp.c 	u8 sc, vl;
sc                864 drivers/infiniband/hw/hfi1/qp.c 	sc = ibp->sl_to_sc[rdma_ah_get_sl(&qp->remote_ah_attr)];
sc                865 drivers/infiniband/hw/hfi1/qp.c 	vl = sc_to_vlt(dd, sc);
sc               1428 drivers/infiniband/hw/hfi1/rc.c 	pbuf = sc_buffer_alloc(rcd->sc, plen, NULL, NULL);
sc                213 drivers/infiniband/hw/hfi1/sysfs.c 		.sc = N \
sc                218 drivers/infiniband/hw/hfi1/sysfs.c 	int sc;
sc                299 drivers/infiniband/hw/hfi1/sysfs.c 	return sprintf(buf, "%u\n", *((u8 *)dd->sc2vl + sattr->sc));
sc                191 drivers/infiniband/hw/hfi1/trace.c 			     u8 *lnh, u8 *lver, u8 *sl, u8 *sc,
sc                197 drivers/infiniband/hw/hfi1/trace.c 	*sc = ib_get_sc(hdr) | (sc5 << 4);
sc                205 drivers/infiniband/hw/hfi1/trace.c 			      u8 *l4, u8 *rc, u8 *sc,
sc                214 drivers/infiniband/hw/hfi1/trace.c 	*sc = hfi1_16B_get_sc(hdr);
sc                229 drivers/infiniband/hw/hfi1/trace.c 			       u8 rc, u8 sc, u8 sl, u16 entropy,
sc                234 drivers/infiniband/hw/hfi1/trace.c 	trace_seq_printf(p, LRH_PRN, len, sc, dlid, slid);
sc                238 drivers/infiniband/hw/hfi1/trace.c 				 age, becn, fecn, l4, rc, sc, pkey, entropy);
sc                 80 drivers/infiniband/hw/hfi1/trace_ctxts.h 			   __entry->credits = uctxt->sc->credits;
sc                 81 drivers/infiniband/hw/hfi1/trace_ctxts.h 			   __entry->hw_free = le64_to_cpu(*uctxt->sc->hw_free);
sc                 82 drivers/infiniband/hw/hfi1/trace_ctxts.h 			   __entry->piobase = uctxt->sc->base_addr;
sc                120 drivers/infiniband/hw/hfi1/trace_ibhdrs.h 			     u8 *lnh, u8 *lver, u8 *sl, u8 *sc,
sc                128 drivers/infiniband/hw/hfi1/trace_ibhdrs.h 			      u8 *l4, u8 *rc, u8 *sc,
sc                135 drivers/infiniband/hw/hfi1/trace_ibhdrs.h 			       u8 rc, u8 sc, u8 sl, u16 entropy,
sc                175 drivers/infiniband/hw/hfi1/trace_ibhdrs.h 			__field(u8, sc)
sc                206 drivers/infiniband/hw/hfi1/trace_ibhdrs.h 							 &__entry->sc,
sc                234 drivers/infiniband/hw/hfi1/trace_ibhdrs.h 							&__entry->sc,
sc                275 drivers/infiniband/hw/hfi1/trace_ibhdrs.h 						 __entry->sc,
sc                334 drivers/infiniband/hw/hfi1/trace_ibhdrs.h 			__field(u8, sc)
sc                366 drivers/infiniband/hw/hfi1/trace_ibhdrs.h 							 &__entry->sc,
sc                399 drivers/infiniband/hw/hfi1/trace_ibhdrs.h 							&__entry->sc,
sc                439 drivers/infiniband/hw/hfi1/trace_ibhdrs.h 						 __entry->sc,
sc                 65 drivers/infiniband/hw/hfi1/trace_tx.h 	    TP_PROTO(struct send_context *sc, int extra),
sc                 66 drivers/infiniband/hw/hfi1/trace_tx.h 	    TP_ARGS(sc, extra),
sc                 67 drivers/infiniband/hw/hfi1/trace_tx.h 	    TP_STRUCT__entry(DD_DEV_ENTRY(sc->dd)
sc                 72 drivers/infiniband/hw/hfi1/trace_tx.h 	    TP_fast_assign(DD_DEV_ASSIGN(sc->dd);
sc                 73 drivers/infiniband/hw/hfi1/trace_tx.h 	    __entry->sw_index = sc->sw_index;
sc                 74 drivers/infiniband/hw/hfi1/trace_tx.h 	    __entry->hw_context = sc->hw_context;
sc                 86 drivers/infiniband/hw/hfi1/trace_tx.h 	    TP_PROTO(struct send_context *sc, u32 needint, u64 credit_ctrl),
sc                 87 drivers/infiniband/hw/hfi1/trace_tx.h 	    TP_ARGS(sc, needint, credit_ctrl),
sc                 88 drivers/infiniband/hw/hfi1/trace_tx.h 	    TP_STRUCT__entry(DD_DEV_ENTRY(sc->dd)
sc                 94 drivers/infiniband/hw/hfi1/trace_tx.h 	    TP_fast_assign(DD_DEV_ASSIGN(sc->dd);
sc                 95 drivers/infiniband/hw/hfi1/trace_tx.h 			__entry->sw_index = sc->sw_index;
sc                 96 drivers/infiniband/hw/hfi1/trace_tx.h 			__entry->hw_context = sc->hw_context;
sc                866 drivers/infiniband/hw/hfi1/ud.c 	u8 sc5 = packet->sc;
sc                356 drivers/infiniband/hw/hfi1/user_sdma.c 	u8 opcode, sc, vl;
sc                481 drivers/infiniband/hw/hfi1/user_sdma.c 	sc = (((be16_to_cpu(req->hdr.lrh[0]) >> 12) & 0xF) |
sc                484 drivers/infiniband/hw/hfi1/user_sdma.c 	    vl != sc_to_vlt(dd, sc)) {
sc                485 drivers/infiniband/hw/hfi1/user_sdma.c 		SDMA_DBG(req, "Invalid SC(%u)/VL(%u)", sc, vl);
sc                493 drivers/infiniband/hw/hfi1/user_sdma.c 	if (egress_pkey_check(dd->pport, slid, pkey, sc, PKEY_CHECK_INVALID)) {
sc                143 drivers/infiniband/hw/hfi1/verbs.c 		    struct send_context *sc,
sc                489 drivers/infiniband/hw/hfi1/verbs.c 	return ingress_pkey_check(ppd, pkey, packet->sc,
sc                923 drivers/infiniband/hw/hfi1/verbs.c 		    struct send_context *sc,
sc                928 drivers/infiniband/hw/hfi1/verbs.c 	struct hfi1_devdata *dd = sc->dd;
sc                940 drivers/infiniband/hw/hfi1/verbs.c 		write_seqlock(&sc->waitlock);
sc                950 drivers/infiniband/hw/hfi1/verbs.c 			was_empty = list_empty(&sc->piowait);
sc                953 drivers/infiniband/hw/hfi1/verbs.c 				     &sc->piowait);
sc                954 drivers/infiniband/hw/hfi1/verbs.c 			priv->s_iowait.lock = &sc->waitlock;
sc                959 drivers/infiniband/hw/hfi1/verbs.c 				hfi1_sc_wantpiobuf_intr(sc, 1);
sc                961 drivers/infiniband/hw/hfi1/verbs.c 		write_sequnlock(&sc->waitlock);
sc                991 drivers/infiniband/hw/hfi1/verbs.c 	struct send_context *sc;
sc               1022 drivers/infiniband/hw/hfi1/verbs.c 	sc = ps->s_txreq->psc;
sc               1042 drivers/infiniband/hw/hfi1/verbs.c 	pbuf = sc_buffer_alloc(sc, plen, cb, qp);
sc               1065 drivers/infiniband/hw/hfi1/verbs.c 			ret = pio_wait(qp, sc, ps, RVT_S_WAIT_PIO);
sc                190 drivers/infiniband/ulp/iser/iscsi_iser.c 	const bool mgmt_task = !task->sc && !in_interrupt();
sc                247 drivers/infiniband/ulp/iser/iscsi_iser.c 	if (!task->sc)
sc                252 drivers/infiniband/ulp/iser/iscsi_iser.c 	iser_task->sc = task->sc;
sc                329 drivers/infiniband/ulp/iser/iscsi_iser.c 	if (!task->sc)
sc                332 drivers/infiniband/ulp/iser/iscsi_iser.c 	if (task->sc->sc_data_direction == DMA_TO_DEVICE) {
sc                333 drivers/infiniband/ulp/iser/iscsi_iser.c 		BUG_ON(scsi_bufflen(task->sc) == 0);
sc                336 drivers/infiniband/ulp/iser/iscsi_iser.c 			   task->itt, scsi_bufflen(task->sc),
sc                385 drivers/infiniband/ulp/iser/iscsi_iser.c 	if (!task->sc)
sc                513 drivers/infiniband/ulp/iser/iscsi_iser.h 	struct scsi_cmnd	     *sc;
sc                 64 drivers/infiniband/ulp/iser/iser_initiator.c 	if (scsi_prot_sg_count(iser_task->sc)) {
sc                118 drivers/infiniband/ulp/iser/iser_initiator.c 	if (scsi_prot_sg_count(iser_task->sc)) {
sc                371 drivers/infiniband/ulp/iser/iser_initiator.c 	struct scsi_cmnd *sc  =  task->sc;
sc                390 drivers/infiniband/ulp/iser/iser_initiator.c 	if (scsi_sg_count(sc)) { /* using a scatter list */
sc                391 drivers/infiniband/ulp/iser/iser_initiator.c 		data_buf->sg = scsi_sglist(sc);
sc                392 drivers/infiniband/ulp/iser/iser_initiator.c 		data_buf->size = scsi_sg_count(sc);
sc                394 drivers/infiniband/ulp/iser/iser_initiator.c 	data_buf->data_len = scsi_bufflen(sc);
sc                396 drivers/infiniband/ulp/iser/iser_initiator.c 	if (scsi_prot_sg_count(sc)) {
sc                397 drivers/infiniband/ulp/iser/iser_initiator.c 		prot_buf->sg  = scsi_prot_sglist(sc);
sc                398 drivers/infiniband/ulp/iser/iser_initiator.c 		prot_buf->size = scsi_prot_sg_count(sc);
sc                400 drivers/infiniband/ulp/iser/iser_initiator.c 				     ilog2(sc->device->sector_size)) * 8;
sc                763 drivers/infiniband/ulp/iser/iser_initiator.c 	int prot_count = scsi_prot_sg_count(iser_task->sc);
sc                305 drivers/infiniband/ulp/iser/iser_memory.c iser_set_dif_domain(struct scsi_cmnd *sc, struct ib_sig_domain *domain)
sc                308 drivers/infiniband/ulp/iser/iser_memory.c 	domain->sig.dif.pi_interval = scsi_prot_interval(sc);
sc                309 drivers/infiniband/ulp/iser/iser_memory.c 	domain->sig.dif.ref_tag = t10_pi_ref_tag(sc->request);
sc                317 drivers/infiniband/ulp/iser/iser_memory.c 	if (sc->prot_flags & SCSI_PROT_REF_INCREMENT)
sc                322 drivers/infiniband/ulp/iser/iser_memory.c iser_set_sig_attrs(struct scsi_cmnd *sc, struct ib_sig_attrs *sig_attrs)
sc                324 drivers/infiniband/ulp/iser/iser_memory.c 	switch (scsi_get_prot_op(sc)) {
sc                328 drivers/infiniband/ulp/iser/iser_memory.c 		iser_set_dif_domain(sc, &sig_attrs->wire);
sc                334 drivers/infiniband/ulp/iser/iser_memory.c 		iser_set_dif_domain(sc, &sig_attrs->mem);
sc                335 drivers/infiniband/ulp/iser/iser_memory.c 		sig_attrs->mem.sig.dif.bg_type = sc->prot_flags & SCSI_PROT_IP_CHECKSUM ?
sc                340 drivers/infiniband/ulp/iser/iser_memory.c 		iser_set_dif_domain(sc, &sig_attrs->wire);
sc                342 drivers/infiniband/ulp/iser/iser_memory.c 		iser_set_dif_domain(sc, &sig_attrs->mem);
sc                343 drivers/infiniband/ulp/iser/iser_memory.c 		sig_attrs->mem.sig.dif.bg_type = sc->prot_flags & SCSI_PROT_IP_CHECKSUM ?
sc                348 drivers/infiniband/ulp/iser/iser_memory.c 			 scsi_get_prot_op(sc));
sc                356 drivers/infiniband/ulp/iser/iser_memory.c iser_set_prot_checks(struct scsi_cmnd *sc, u8 *mask)
sc                359 drivers/infiniband/ulp/iser/iser_memory.c 	if (sc->prot_flags & SCSI_PROT_REF_CHECK)
sc                361 drivers/infiniband/ulp/iser/iser_memory.c 	if (sc->prot_flags & SCSI_PROT_GUARD_CHECK)
sc                394 drivers/infiniband/ulp/iser/iser_memory.c 	ret = iser_set_sig_attrs(iser_task->sc, sig_attrs);
sc                398 drivers/infiniband/ulp/iser/iser_memory.c 	iser_set_prot_checks(iser_task->sc, &sig_attrs->check_mask);
sc                513 drivers/infiniband/ulp/iser/iser_memory.c 		      scsi_get_prot_op(task->sc) == SCSI_PROT_NORMAL;
sc                520 drivers/infiniband/ulp/iser/iser_memory.c 	if (scsi_get_prot_op(task->sc) == SCSI_PROT_NORMAL) {
sc               1075 drivers/infiniband/ulp/iser/iser_verbs.c 	unsigned long sector_size = iser_task->sc->device->sector_size;
sc               1094 drivers/infiniband/ulp/iser/iser_verbs.c 			*sector = scsi_get_lba(iser_task->sc) + sector_off;
sc                 74 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.c 					u16 pkey, u16 entropy, u8 sc, u8 rc,
sc                 93 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.c 	h[1] |= (sc << OPA_16B_SC_SHFT);
sc                368 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.c 	u8 sc;
sc                374 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.c 			sc = info->vport.pcp_to_sc_mc[pcp];
sc                376 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.c 			sc = info->vport.pcp_to_sc_uc[pcp];
sc                379 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.c 			sc = info->vport.non_vlan_sc_mc;
sc                381 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.c 			sc = info->vport.non_vlan_sc_uc;
sc                384 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.c 	return sc;
sc                487 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.c 	u8 def_port, sc, rc, entropy, *hdr;
sc                497 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.c 	sc = opa_vnic_get_sc(info, skb);
sc                511 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.c 			     info->vesw.pkey, entropy, sc, rc,
sc                 36 drivers/input/tablet/hanwang.c #define HANWANG_TABLET_DEVICE(vend, cl, sc, pr) \
sc                 41 drivers/input/tablet/hanwang.c 	.bInterfaceSubClass = (sc), \
sc                204 drivers/isdn/capi/capiutil.c static unsigned command_2_index(u8 c, u8 sc)
sc                212 drivers/isdn/capi/capiutil.c 	return (sc & 3) * (0x9 + 0x9) + c;
sc                105 drivers/isdn/hardware/mISDN/mISDNinfineon.c 	struct inf_hw		*sc[3];	/* slave cards */
sc                874 drivers/isdn/hardware/mISDN/mISDNinfineon.c 			if (card->sc[i])
sc                875 drivers/isdn/hardware/mISDN/mISDNinfineon.c 				release_card(card->sc[i]);
sc                876 drivers/isdn/hardware/mISDN/mISDNinfineon.c 			card->sc[i] = NULL;
sc               1102 drivers/isdn/hardware/mISDN/mISDNinfineon.c 		struct inf_hw *sc;
sc               1105 drivers/isdn/hardware/mISDN/mISDNinfineon.c 			sc = kzalloc(sizeof(struct inf_hw), GFP_KERNEL);
sc               1106 drivers/isdn/hardware/mISDN/mISDNinfineon.c 			if (!sc) {
sc               1111 drivers/isdn/hardware/mISDN/mISDNinfineon.c 			sc->irq = card->irq;
sc               1112 drivers/isdn/hardware/mISDN/mISDNinfineon.c 			sc->pdev = card->pdev;
sc               1113 drivers/isdn/hardware/mISDN/mISDNinfineon.c 			sc->ci = card->ci + i;
sc               1114 drivers/isdn/hardware/mISDN/mISDNinfineon.c 			err = setup_instance(sc);
sc               1117 drivers/isdn/hardware/mISDN/mISDNinfineon.c 				kfree(sc);
sc               1121 drivers/isdn/hardware/mISDN/mISDNinfineon.c 				card->sc[i - 1] = sc;
sc                698 drivers/md/bcache/btree.c 				  struct shrink_control *sc)
sc                702 drivers/md/bcache/btree.c 	unsigned long i, nr = sc->nr_to_scan;
sc                713 drivers/md/bcache/btree.c 	if (sc->gfp_mask & __GFP_IO)
sc                767 drivers/md/bcache/btree.c 				   struct shrink_control *sc)
sc               1919 drivers/md/bcache/super.c 			struct shrink_control sc;
sc               1921 drivers/md/bcache/super.c 			sc.gfp_mask = GFP_KERNEL;
sc               1922 drivers/md/bcache/super.c 			sc.nr_to_scan = c->btree_cache_used * c->btree_pages;
sc               1924 drivers/md/bcache/super.c 			c->shrink.scan_objects(&c->shrink, &sc);
sc               1926 drivers/md/bcache/super.c 			c->shrink.scan_objects(&c->shrink, &sc);
sc                830 drivers/md/bcache/sysfs.c 		struct shrink_control sc;
sc                832 drivers/md/bcache/sysfs.c 		sc.gfp_mask = GFP_KERNEL;
sc                833 drivers/md/bcache/sysfs.c 		sc.nr_to_scan = strtoul_or_return(buf);
sc                834 drivers/md/bcache/sysfs.c 		c->shrink.scan_objects(&c->shrink, &sc);
sc               1575 drivers/md/dm-bufio.c dm_bufio_shrink_scan(struct shrinker *shrink, struct shrink_control *sc)
sc               1581 drivers/md/dm-bufio.c 	if (sc->gfp_mask & __GFP_FS)
sc               1586 drivers/md/dm-bufio.c 	freed  = __scan(c, sc->nr_to_scan, sc->gfp_mask);
sc               1592 drivers/md/dm-bufio.c dm_bufio_shrink_count(struct shrinker *shrink, struct shrink_control *sc)
sc                 53 drivers/md/dm-stripe.c 	struct stripe_c *sc = container_of(work, struct stripe_c,
sc                 55 drivers/md/dm-stripe.c 	dm_table_event(sc->ti->table);
sc                 74 drivers/md/dm-stripe.c static int get_stripe(struct dm_target *ti, struct stripe_c *sc,
sc                 85 drivers/md/dm-stripe.c 			    &sc->stripe[stripe].dev);
sc                 89 drivers/md/dm-stripe.c 	sc->stripe[stripe].physical_start = start;
sc                100 drivers/md/dm-stripe.c 	struct stripe_c *sc;
sc                145 drivers/md/dm-stripe.c 	sc = alloc_context(stripes);
sc                146 drivers/md/dm-stripe.c 	if (!sc) {
sc                152 drivers/md/dm-stripe.c 	INIT_WORK(&sc->trigger_event, trigger_event);
sc                155 drivers/md/dm-stripe.c 	sc->ti = ti;
sc                156 drivers/md/dm-stripe.c 	sc->stripes = stripes;
sc                157 drivers/md/dm-stripe.c 	sc->stripe_width = width;
sc                160 drivers/md/dm-stripe.c 		sc->stripes_shift = -1;
sc                162 drivers/md/dm-stripe.c 		sc->stripes_shift = __ffs(stripes);
sc                166 drivers/md/dm-stripe.c 		kfree(sc);
sc                176 drivers/md/dm-stripe.c 	sc->chunk_size = chunk_size;
sc                178 drivers/md/dm-stripe.c 		sc->chunk_size_shift = -1;
sc                180 drivers/md/dm-stripe.c 		sc->chunk_size_shift = __ffs(chunk_size);
sc                188 drivers/md/dm-stripe.c 		r = get_stripe(ti, sc, i, argv);
sc                192 drivers/md/dm-stripe.c 				dm_put_device(ti, sc->stripe[i].dev);
sc                193 drivers/md/dm-stripe.c 			kfree(sc);
sc                196 drivers/md/dm-stripe.c 		atomic_set(&(sc->stripe[i].error_count), 0);
sc                199 drivers/md/dm-stripe.c 	ti->private = sc;
sc                207 drivers/md/dm-stripe.c 	struct stripe_c *sc = (struct stripe_c *) ti->private;
sc                209 drivers/md/dm-stripe.c 	for (i = 0; i < sc->stripes; i++)
sc                210 drivers/md/dm-stripe.c 		dm_put_device(ti, sc->stripe[i].dev);
sc                212 drivers/md/dm-stripe.c 	flush_work(&sc->trigger_event);
sc                213 drivers/md/dm-stripe.c 	kfree(sc);
sc                216 drivers/md/dm-stripe.c static void stripe_map_sector(struct stripe_c *sc, sector_t sector,
sc                219 drivers/md/dm-stripe.c 	sector_t chunk = dm_target_offset(sc->ti, sector);
sc                222 drivers/md/dm-stripe.c 	if (sc->chunk_size_shift < 0)
sc                223 drivers/md/dm-stripe.c 		chunk_offset = sector_div(chunk, sc->chunk_size);
sc                225 drivers/md/dm-stripe.c 		chunk_offset = chunk & (sc->chunk_size - 1);
sc                226 drivers/md/dm-stripe.c 		chunk >>= sc->chunk_size_shift;
sc                229 drivers/md/dm-stripe.c 	if (sc->stripes_shift < 0)
sc                230 drivers/md/dm-stripe.c 		*stripe = sector_div(chunk, sc->stripes);
sc                232 drivers/md/dm-stripe.c 		*stripe = chunk & (sc->stripes - 1);
sc                233 drivers/md/dm-stripe.c 		chunk >>= sc->stripes_shift;
sc                236 drivers/md/dm-stripe.c 	if (sc->chunk_size_shift < 0)
sc                237 drivers/md/dm-stripe.c 		chunk *= sc->chunk_size;
sc                239 drivers/md/dm-stripe.c 		chunk <<= sc->chunk_size_shift;
sc                244 drivers/md/dm-stripe.c static void stripe_map_range_sector(struct stripe_c *sc, sector_t sector,
sc                249 drivers/md/dm-stripe.c 	stripe_map_sector(sc, sector, &stripe, result);
sc                255 drivers/md/dm-stripe.c 	if (sc->chunk_size_shift < 0)
sc                256 drivers/md/dm-stripe.c 		*result -= sector_div(sector, sc->chunk_size);
sc                258 drivers/md/dm-stripe.c 		*result = sector & ~(sector_t)(sc->chunk_size - 1);
sc                261 drivers/md/dm-stripe.c 		*result += sc->chunk_size;		/* next chunk */
sc                264 drivers/md/dm-stripe.c static int stripe_map_range(struct stripe_c *sc, struct bio *bio,
sc                269 drivers/md/dm-stripe.c 	stripe_map_range_sector(sc, bio->bi_iter.bi_sector,
sc                271 drivers/md/dm-stripe.c 	stripe_map_range_sector(sc, bio_end_sector(bio),
sc                274 drivers/md/dm-stripe.c 		bio_set_dev(bio, sc->stripe[target_stripe].dev->bdev);
sc                276 drivers/md/dm-stripe.c 			sc->stripe[target_stripe].physical_start;
sc                288 drivers/md/dm-stripe.c 	struct stripe_c *sc = ti->private;
sc                294 drivers/md/dm-stripe.c 		BUG_ON(target_bio_nr >= sc->stripes);
sc                295 drivers/md/dm-stripe.c 		bio_set_dev(bio, sc->stripe[target_bio_nr].dev->bdev);
sc                303 drivers/md/dm-stripe.c 		BUG_ON(target_bio_nr >= sc->stripes);
sc                304 drivers/md/dm-stripe.c 		return stripe_map_range(sc, bio, target_bio_nr);
sc                307 drivers/md/dm-stripe.c 	stripe_map_sector(sc, bio->bi_iter.bi_sector,
sc                310 drivers/md/dm-stripe.c 	bio->bi_iter.bi_sector += sc->stripe[stripe].physical_start;
sc                311 drivers/md/dm-stripe.c 	bio_set_dev(bio, sc->stripe[stripe].dev->bdev);
sc                321 drivers/md/dm-stripe.c 	struct stripe_c *sc = ti->private;
sc                327 drivers/md/dm-stripe.c 	stripe_map_sector(sc, sector, &stripe, &dev_sector);
sc                328 drivers/md/dm-stripe.c 	dev_sector += sc->stripe[stripe].physical_start;
sc                329 drivers/md/dm-stripe.c 	dax_dev = sc->stripe[stripe].dev->dax_dev;
sc                330 drivers/md/dm-stripe.c 	bdev = sc->stripe[stripe].dev->bdev;
sc                342 drivers/md/dm-stripe.c 	struct stripe_c *sc = ti->private;
sc                347 drivers/md/dm-stripe.c 	stripe_map_sector(sc, sector, &stripe, &dev_sector);
sc                348 drivers/md/dm-stripe.c 	dev_sector += sc->stripe[stripe].physical_start;
sc                349 drivers/md/dm-stripe.c 	dax_dev = sc->stripe[stripe].dev->dax_dev;
sc                350 drivers/md/dm-stripe.c 	bdev = sc->stripe[stripe].dev->bdev;
sc                361 drivers/md/dm-stripe.c 	struct stripe_c *sc = ti->private;
sc                366 drivers/md/dm-stripe.c 	stripe_map_sector(sc, sector, &stripe, &dev_sector);
sc                367 drivers/md/dm-stripe.c 	dev_sector += sc->stripe[stripe].physical_start;
sc                368 drivers/md/dm-stripe.c 	dax_dev = sc->stripe[stripe].dev->dax_dev;
sc                369 drivers/md/dm-stripe.c 	bdev = sc->stripe[stripe].dev->bdev;
sc                398 drivers/md/dm-stripe.c 	struct stripe_c *sc = (struct stripe_c *) ti->private;
sc                404 drivers/md/dm-stripe.c 		DMEMIT("%d ", sc->stripes);
sc                405 drivers/md/dm-stripe.c 		for (i = 0; i < sc->stripes; i++)  {
sc                406 drivers/md/dm-stripe.c 			DMEMIT("%s ", sc->stripe[i].dev->name);
sc                409 drivers/md/dm-stripe.c 		for (i = 0; i < sc->stripes; i++) {
sc                410 drivers/md/dm-stripe.c 			DMEMIT("%c", atomic_read(&(sc->stripe[i].error_count)) ?
sc                416 drivers/md/dm-stripe.c 		DMEMIT("%d %llu", sc->stripes,
sc                417 drivers/md/dm-stripe.c 			(unsigned long long)sc->chunk_size);
sc                418 drivers/md/dm-stripe.c 		for (i = 0; i < sc->stripes; i++)
sc                419 drivers/md/dm-stripe.c 			DMEMIT(" %s %llu", sc->stripe[i].dev->name,
sc                420 drivers/md/dm-stripe.c 			    (unsigned long long)sc->stripe[i].physical_start);
sc                430 drivers/md/dm-stripe.c 	struct stripe_c *sc = ti->private;
sc                450 drivers/md/dm-stripe.c 	for (i = 0; i < sc->stripes; i++)
sc                451 drivers/md/dm-stripe.c 		if (!strcmp(sc->stripe[i].dev->name, major_minor)) {
sc                452 drivers/md/dm-stripe.c 			atomic_inc(&(sc->stripe[i].error_count));
sc                453 drivers/md/dm-stripe.c 			if (atomic_read(&(sc->stripe[i].error_count)) <
sc                455 drivers/md/dm-stripe.c 				schedule_work(&sc->trigger_event);
sc                464 drivers/md/dm-stripe.c 	struct stripe_c *sc = ti->private;
sc                469 drivers/md/dm-stripe.c 		ret = fn(ti, sc->stripe[i].dev,
sc                470 drivers/md/dm-stripe.c 			 sc->stripe[i].physical_start,
sc                471 drivers/md/dm-stripe.c 			 sc->stripe_width, data);
sc                472 drivers/md/dm-stripe.c 	} while (!ret && ++i < sc->stripes);
sc                480 drivers/md/dm-stripe.c 	struct stripe_c *sc = ti->private;
sc                481 drivers/md/dm-stripe.c 	unsigned chunk_size = sc->chunk_size << SECTOR_SHIFT;
sc                484 drivers/md/dm-stripe.c 	blk_limits_io_opt(limits, chunk_size * sc->stripes);
sc                483 drivers/md/dm-zoned-metadata.c 					       struct shrink_control *sc)
sc                494 drivers/md/dm-zoned-metadata.c 					      struct shrink_control *sc)
sc                500 drivers/md/dm-zoned-metadata.c 	count = dmz_shrink_mblock_cache(zmd, sc->nr_to_scan);
sc               2124 drivers/md/raid5.c static void free_stripe(struct kmem_cache *sc, struct stripe_head *sh)
sc               2128 drivers/md/raid5.c 	kmem_cache_free(sc, sh);
sc               2131 drivers/md/raid5.c static struct stripe_head *alloc_stripe(struct kmem_cache *sc, gfp_t gfp,
sc               2137 drivers/md/raid5.c 	sh = kmem_cache_zalloc(sc, gfp);
sc               2158 drivers/md/raid5.c 				free_stripe(sc, sh);
sc               2190 drivers/md/raid5.c 	struct kmem_cache *sc;
sc               2203 drivers/md/raid5.c 	sc = kmem_cache_create(conf->cache_name[conf->active_name],
sc               2206 drivers/md/raid5.c 	if (!sc)
sc               2208 drivers/md/raid5.c 	conf->slab_cache = sc;
sc               2314 drivers/md/raid5.c 	struct kmem_cache *sc;
sc               2321 drivers/md/raid5.c 	sc = kmem_cache_create(conf->cache_name[1-conf->active_name],
sc               2324 drivers/md/raid5.c 	if (!sc)
sc               2331 drivers/md/raid5.c 		nsh = alloc_stripe(sc, GFP_KERNEL, newsize, conf);
sc               2342 drivers/md/raid5.c 			free_stripe(sc, nsh);
sc               2344 drivers/md/raid5.c 		kmem_cache_destroy(sc);
sc               2408 drivers/md/raid5.c 	conf->slab_cache = sc;
sc               6846 drivers/md/raid5.c 				      struct shrink_control *sc)
sc               6853 drivers/md/raid5.c 		while (ret < sc->nr_to_scan &&
sc               6867 drivers/md/raid5.c 				       struct shrink_control *sc)
sc               1116 drivers/media/i2c/cx25840/cx25840-core.c 	int hblank, hactive, burst, vblank, vactive, sc;
sc               1147 drivers/media/i2c/cx25840/cx25840-core.c 			sc = 0x0a425f;
sc               1155 drivers/media/i2c/cx25840/cx25840-core.c 			sc = 556453;
sc               1159 drivers/media/i2c/cx25840/cx25840-core.c 			sc = 688739;
sc               1183 drivers/media/i2c/cx25840/cx25840-core.c 			sc = 688739;
sc               1189 drivers/media/i2c/cx25840/cx25840-core.c 			sc = 555452;
sc               1197 drivers/media/i2c/cx25840/cx25840-core.c 			sc = 556063;
sc               1227 drivers/media/i2c/cx25840/cx25840-core.c 			fsc = (((u64)sc) * pll) >> 24L;
sc               1236 drivers/media/i2c/cx25840/cx25840-core.c 				comb, sc);
sc               1267 drivers/media/i2c/cx25840/cx25840-core.c 	cx25840_write(client, 0x47c, sc);
sc               1268 drivers/media/i2c/cx25840/cx25840-core.c 	cx25840_write(client, 0x47d, (sc >> 8) & 0xff);
sc               1269 drivers/media/i2c/cx25840/cx25840-core.c 	cx25840_write(client, 0x47e, (sc >> 16) & 0xff);
sc                 35 drivers/media/pci/cobalt/cobalt-alsa-main.c static void snd_cobalt_card_private_free(struct snd_card *sc)
sc                 37 drivers/media/pci/cobalt/cobalt-alsa-main.c 	if (sc == NULL)
sc                 39 drivers/media/pci/cobalt/cobalt-alsa-main.c 	snd_cobalt_card_free(sc->private_data);
sc                 40 drivers/media/pci/cobalt/cobalt-alsa-main.c 	sc->private_data = NULL;
sc                 41 drivers/media/pci/cobalt/cobalt-alsa-main.c 	sc->private_free = NULL;
sc                 45 drivers/media/pci/cobalt/cobalt-alsa-main.c 				       struct snd_card *sc,
sc                 53 drivers/media/pci/cobalt/cobalt-alsa-main.c 	(*cobsc)->sc = sc;
sc                 55 drivers/media/pci/cobalt/cobalt-alsa-main.c 	sc->private_data = *cobsc;
sc                 56 drivers/media/pci/cobalt/cobalt-alsa-main.c 	sc->private_free = snd_cobalt_card_private_free;
sc                 65 drivers/media/pci/cobalt/cobalt-alsa-main.c 	struct snd_card *sc = cobsc->sc;
sc                 68 drivers/media/pci/cobalt/cobalt-alsa-main.c 	strscpy(sc->driver, "cobalt", sizeof(sc->driver));
sc                 71 drivers/media/pci/cobalt/cobalt-alsa-main.c 	snprintf(sc->shortname,  sizeof(sc->shortname), "cobalt-%d-%d",
sc                 75 drivers/media/pci/cobalt/cobalt-alsa-main.c 	snprintf(sc->longname, sizeof(sc->longname),
sc                 85 drivers/media/pci/cobalt/cobalt-alsa-main.c 	struct snd_card *sc = NULL;
sc                 96 drivers/media/pci/cobalt/cobalt-alsa-main.c 			   SNDRV_DEFAULT_STR1, THIS_MODULE, 0, &sc);
sc                103 drivers/media/pci/cobalt/cobalt-alsa-main.c 	ret = snd_cobalt_card_create(s, sc, &cobsc);
sc                126 drivers/media/pci/cobalt/cobalt-alsa-main.c 	ret = snd_card_register(sc);
sc                136 drivers/media/pci/cobalt/cobalt-alsa-main.c 	if (sc != NULL)
sc                137 drivers/media/pci/cobalt/cobalt-alsa-main.c 		snd_card_free(sc);
sc                148 drivers/media/pci/cobalt/cobalt-alsa-main.c 		snd_card_free(cobsc->sc);
sc                528 drivers/media/pci/cobalt/cobalt-alsa-pcm.c 	struct snd_card *sc = cobsc->sc;
sc                545 drivers/media/pci/cobalt/cobalt-alsa-pcm.c 		ret = snd_pcm_new(sc, "Cobalt PCM-In HDMI",
sc                569 drivers/media/pci/cobalt/cobalt-alsa-pcm.c 		ret = snd_pcm_new(sc, "Cobalt PCM-Out HDMI",
sc                 13 drivers/media/pci/cobalt/cobalt-alsa.h 	struct snd_card *sc;
sc                 74 drivers/media/pci/cx18/cx18-alsa-main.c static void snd_cx18_card_private_free(struct snd_card *sc)
sc                 76 drivers/media/pci/cx18/cx18-alsa-main.c 	if (sc == NULL)
sc                 78 drivers/media/pci/cx18/cx18-alsa-main.c 	snd_cx18_card_free(sc->private_data);
sc                 79 drivers/media/pci/cx18/cx18-alsa-main.c 	sc->private_data = NULL;
sc                 80 drivers/media/pci/cx18/cx18-alsa-main.c 	sc->private_free = NULL;
sc                 84 drivers/media/pci/cx18/cx18-alsa-main.c 				       struct snd_card *sc,
sc                 92 drivers/media/pci/cx18/cx18-alsa-main.c 	(*cxsc)->sc = sc;
sc                 94 drivers/media/pci/cx18/cx18-alsa-main.c 	sc->private_data = *cxsc;
sc                 95 drivers/media/pci/cx18/cx18-alsa-main.c 	sc->private_free = snd_cx18_card_private_free;
sc                103 drivers/media/pci/cx18/cx18-alsa-main.c 	struct snd_card *sc = cxsc->sc;
sc                106 drivers/media/pci/cx18/cx18-alsa-main.c 	strscpy(sc->driver, "CX23418", sizeof(sc->driver));
sc                109 drivers/media/pci/cx18/cx18-alsa-main.c 	snprintf(sc->shortname,  sizeof(sc->shortname), "CX18-%d",
sc                113 drivers/media/pci/cx18/cx18-alsa-main.c 	snprintf(sc->longname, sizeof(sc->longname),
sc                123 drivers/media/pci/cx18/cx18-alsa-main.c 	struct snd_card *sc = NULL;
sc                136 drivers/media/pci/cx18/cx18-alsa-main.c 			   THIS_MODULE, 0, &sc);
sc                144 drivers/media/pci/cx18/cx18-alsa-main.c 	ret = snd_cx18_card_create(v4l2_dev, sc, &cxsc);
sc                168 drivers/media/pci/cx18/cx18-alsa-main.c 	ret = snd_card_register(sc);
sc                179 drivers/media/pci/cx18/cx18-alsa-main.c 	if (sc != NULL)
sc                180 drivers/media/pci/cx18/cx18-alsa-main.c 		snd_card_free(sc);
sc                239 drivers/media/pci/cx18/cx18-alsa-main.c 	snd_card_free(cxsc->sc);
sc                317 drivers/media/pci/cx18/cx18-alsa-pcm.c 	struct snd_card *sc = cxsc->sc;
sc                322 drivers/media/pci/cx18/cx18-alsa-pcm.c 	ret = snd_pcm_new(sc, "CX23418 PCM",
sc                 12 drivers/media/pci/cx18/cx18-alsa.h 	struct snd_card *sc;
sc                285 drivers/media/pci/cx18/cx18-av-core.c 	int hblank, hactive, burst, vblank, vactive, sc;
sc                363 drivers/media/pci/cx18/cx18-av-core.c 			sc = 688700;
sc                368 drivers/media/pci/cx18/cx18-av-core.c 			sc = 556422;
sc                374 drivers/media/pci/cx18/cx18-av-core.c 			sc = 672314;
sc                424 drivers/media/pci/cx18/cx18-av-core.c 			sc = 688700;
sc                430 drivers/media/pci/cx18/cx18-av-core.c 			sc = 555421;
sc                435 drivers/media/pci/cx18/cx18-av-core.c 			sc = 556032;
sc                461 drivers/media/pci/cx18/cx18-av-core.c 		tmp = 28636360 * (u64) sc;
sc                472 drivers/media/pci/cx18/cx18-av-core.c 				    comb, sc);
sc                502 drivers/media/pci/cx18/cx18-av-core.c 	cx18_av_write(cx, 0x47c, sc);
sc                503 drivers/media/pci/cx18/cx18-av-core.c 	cx18_av_write(cx, 0x47d, (sc >> 8) & 0xff);
sc                504 drivers/media/pci/cx18/cx18-av-core.c 	cx18_av_write(cx, 0x47e, (sc >> 16) & 0xff);
sc                 71 drivers/media/pci/ivtv/ivtv-alsa-main.c static void snd_ivtv_card_private_free(struct snd_card *sc)
sc                 73 drivers/media/pci/ivtv/ivtv-alsa-main.c 	if (sc == NULL)
sc                 75 drivers/media/pci/ivtv/ivtv-alsa-main.c 	snd_ivtv_card_free(sc->private_data);
sc                 76 drivers/media/pci/ivtv/ivtv-alsa-main.c 	sc->private_data = NULL;
sc                 77 drivers/media/pci/ivtv/ivtv-alsa-main.c 	sc->private_free = NULL;
sc                 81 drivers/media/pci/ivtv/ivtv-alsa-main.c 				       struct snd_card *sc,
sc                 89 drivers/media/pci/ivtv/ivtv-alsa-main.c 	(*itvsc)->sc = sc;
sc                 91 drivers/media/pci/ivtv/ivtv-alsa-main.c 	sc->private_data = *itvsc;
sc                 92 drivers/media/pci/ivtv/ivtv-alsa-main.c 	sc->private_free = snd_ivtv_card_private_free;
sc                100 drivers/media/pci/ivtv/ivtv-alsa-main.c 	struct snd_card *sc = itvsc->sc;
sc                103 drivers/media/pci/ivtv/ivtv-alsa-main.c 	strscpy(sc->driver, "CX2341[56]", sizeof(sc->driver));
sc                106 drivers/media/pci/ivtv/ivtv-alsa-main.c 	snprintf(sc->shortname,  sizeof(sc->shortname), "IVTV-%d",
sc                110 drivers/media/pci/ivtv/ivtv-alsa-main.c 	snprintf(sc->longname, sizeof(sc->longname),
sc                120 drivers/media/pci/ivtv/ivtv-alsa-main.c 	struct snd_card *sc = NULL;
sc                135 drivers/media/pci/ivtv/ivtv-alsa-main.c 			   THIS_MODULE, 0, &sc);
sc                143 drivers/media/pci/ivtv/ivtv-alsa-main.c 	ret = snd_ivtv_card_create(v4l2_dev, sc, &itvsc);
sc                167 drivers/media/pci/ivtv/ivtv-alsa-main.c 	ret = snd_card_register(sc);
sc                176 drivers/media/pci/ivtv/ivtv-alsa-main.c 			 __func__, itv->instance, sc->number);
sc                181 drivers/media/pci/ivtv/ivtv-alsa-main.c 	if (sc != NULL)
sc                182 drivers/media/pci/ivtv/ivtv-alsa-main.c 		snd_card_free(sc);
sc                239 drivers/media/pci/ivtv/ivtv-alsa-main.c 	snd_card_free(itvsc->sc);
sc                322 drivers/media/pci/ivtv/ivtv-alsa-pcm.c 	struct snd_card *sc = itvsc->sc;
sc                327 drivers/media/pci/ivtv/ivtv-alsa-pcm.c 	ret = snd_pcm_new(sc, "CX2341[56] PCM",
sc                 13 drivers/media/pci/ivtv/ivtv-alsa.h 	struct snd_card *sc;
sc                443 drivers/media/pci/pt1/pt1.c 	int sc;
sc                463 drivers/media/pci/pt1/pt1.c 		sc = upacket >> 26 & 0x7;
sc                464 drivers/media/pci/pt1/pt1.c 		if (adap->st_count != -1 && sc != ((adap->st_count + 1) & 0x7))
sc                467 drivers/media/pci/pt1/pt1.c 		adap->st_count = sc;
sc                636 drivers/media/platform/exynos-gsc/gsc-core.c 	struct gsc_scaler *sc = &ctx->scaler;
sc                667 drivers/media/platform/exynos-gsc/gsc-core.c 				      tx, &sc->pre_hratio);
sc                674 drivers/media/platform/exynos-gsc/gsc-core.c 				      ty, &sc->pre_vratio);
sc                680 drivers/media/platform/exynos-gsc/gsc-core.c 	gsc_check_src_scale_info(variant, s_frame, &sc->pre_hratio,
sc                681 drivers/media/platform/exynos-gsc/gsc-core.c 				 tx, ty, &sc->pre_vratio);
sc                683 drivers/media/platform/exynos-gsc/gsc-core.c 	gsc_get_prescaler_shfactor(sc->pre_hratio, sc->pre_vratio,
sc                684 drivers/media/platform/exynos-gsc/gsc-core.c 				   &sc->pre_shfactor);
sc                686 drivers/media/platform/exynos-gsc/gsc-core.c 	sc->main_hratio = (s_frame->crop.width << 16) / tx;
sc                687 drivers/media/platform/exynos-gsc/gsc-core.c 	sc->main_vratio = (s_frame->crop.height << 16) / ty;
sc                692 drivers/media/platform/exynos-gsc/gsc-core.c 			sc->pre_shfactor, sc->pre_hratio);
sc                694 drivers/media/platform/exynos-gsc/gsc-core.c 			sc->pre_vratio, sc->main_hratio, sc->main_vratio);
sc                345 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_scaler *sc = &ctx->scaler;
sc                348 drivers/media/platform/exynos-gsc/gsc-regs.c 	cfg = GSC_PRESC_SHFACTOR(sc->pre_shfactor);
sc                349 drivers/media/platform/exynos-gsc/gsc-regs.c 	cfg |= GSC_PRESC_H_RATIO(sc->pre_hratio);
sc                350 drivers/media/platform/exynos-gsc/gsc-regs.c 	cfg |= GSC_PRESC_V_RATIO(sc->pre_vratio);
sc                357 drivers/media/platform/exynos-gsc/gsc-regs.c 	struct gsc_scaler *sc = &ctx->scaler;
sc                360 drivers/media/platform/exynos-gsc/gsc-regs.c 	cfg = GSC_MAIN_H_RATIO_VALUE(sc->main_hratio);
sc                363 drivers/media/platform/exynos-gsc/gsc-regs.c 	cfg = GSC_MAIN_V_RATIO_VALUE(sc->main_vratio);
sc                229 drivers/media/platform/exynos4-is/fimc-core.c 	struct fimc_scaler *sc = &ctx->scaler;
sc                253 drivers/media/platform/exynos4-is/fimc-core.c 	sc->real_width = sx;
sc                254 drivers/media/platform/exynos4-is/fimc-core.c 	sc->real_height = sy;
sc                256 drivers/media/platform/exynos4-is/fimc-core.c 	ret = fimc_get_scaler_factor(sx, tx, &sc->pre_hratio, &sc->hfactor);
sc                260 drivers/media/platform/exynos4-is/fimc-core.c 	ret = fimc_get_scaler_factor(sy, ty,  &sc->pre_vratio, &sc->vfactor);
sc                264 drivers/media/platform/exynos4-is/fimc-core.c 	sc->pre_dst_width = sx / sc->pre_hratio;
sc                265 drivers/media/platform/exynos4-is/fimc-core.c 	sc->pre_dst_height = sy / sc->pre_vratio;
sc                268 drivers/media/platform/exynos4-is/fimc-core.c 		sc->main_hratio = (sx << 14) / (tx << sc->hfactor);
sc                269 drivers/media/platform/exynos4-is/fimc-core.c 		sc->main_vratio = (sy << 14) / (ty << sc->vfactor);
sc                271 drivers/media/platform/exynos4-is/fimc-core.c 		sc->main_hratio = (sx << 8) / (tx << sc->hfactor);
sc                272 drivers/media/platform/exynos4-is/fimc-core.c 		sc->main_vratio = (sy << 8) / (ty << sc->vfactor);
sc                276 drivers/media/platform/exynos4-is/fimc-core.c 	sc->scaleup_h = (tx >= sx) ? 1 : 0;
sc                277 drivers/media/platform/exynos4-is/fimc-core.c 	sc->scaleup_v = (ty >= sy) ? 1 : 0;
sc                283 drivers/media/platform/exynos4-is/fimc-core.c 		sc->copy_mode = 1;
sc                285 drivers/media/platform/exynos4-is/fimc-core.c 		sc->copy_mode = 0;
sc                234 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_scaler *sc = &ctx->scaler;
sc                237 drivers/media/platform/exynos4-is/fimc-reg.c 	shfactor = 10 - (sc->hfactor + sc->vfactor);
sc                240 drivers/media/platform/exynos4-is/fimc-reg.c 	cfg |= (sc->pre_hratio << 16) | sc->pre_vratio;
sc                243 drivers/media/platform/exynos4-is/fimc-reg.c 	cfg = (sc->pre_dst_width << 16) | sc->pre_dst_height;
sc                250 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_scaler *sc = &ctx->scaler;
sc                266 drivers/media/platform/exynos4-is/fimc-reg.c 	if (!sc->enabled)
sc                269 drivers/media/platform/exynos4-is/fimc-reg.c 	if (sc->scaleup_h)
sc                272 drivers/media/platform/exynos4-is/fimc-reg.c 	if (sc->scaleup_v)
sc                275 drivers/media/platform/exynos4-is/fimc-reg.c 	if (sc->copy_mode)
sc                315 drivers/media/platform/exynos4-is/fimc-reg.c 	struct fimc_scaler *sc = &ctx->scaler;
sc                319 drivers/media/platform/exynos4-is/fimc-reg.c 	    sc->main_hratio, sc->main_vratio);
sc                328 drivers/media/platform/exynos4-is/fimc-reg.c 		cfg |= FIMC_REG_CISCCTRL_MHRATIO_EXT(sc->main_hratio);
sc                329 drivers/media/platform/exynos4-is/fimc-reg.c 		cfg |= FIMC_REG_CISCCTRL_MVRATIO_EXT(sc->main_vratio);
sc                336 drivers/media/platform/exynos4-is/fimc-reg.c 		cfg |= FIMC_REG_CIEXTEN_MHRATIO_EXT(sc->main_hratio);
sc                337 drivers/media/platform/exynos4-is/fimc-reg.c 		cfg |= FIMC_REG_CIEXTEN_MVRATIO_EXT(sc->main_vratio);
sc                340 drivers/media/platform/exynos4-is/fimc-reg.c 		cfg |= FIMC_REG_CISCCTRL_MHRATIO(sc->main_hratio);
sc                341 drivers/media/platform/exynos4-is/fimc-reg.c 		cfg |= FIMC_REG_CISCCTRL_MVRATIO(sc->main_vratio);
sc                364 drivers/media/platform/s3c-camif/camif-regs.c 	struct camif_scaler *sc = &vp->scaler;
sc                369 drivers/media/platform/s3c-camif/camif-regs.c 	shfactor = 10 - (sc->h_shift + sc->v_shift);
sc                372 drivers/media/platform/s3c-camif/camif-regs.c 	cfg |= (sc->pre_h_ratio << 16) | sc->pre_v_ratio;
sc                375 drivers/media/platform/s3c-camif/camif-regs.c 	cfg = (sc->pre_dst_width << 16) | sc->pre_dst_height;
sc                 21 drivers/media/platform/ti-vpe/sc.c void sc_dump_regs(struct sc_data *sc)
sc                 23 drivers/media/platform/ti-vpe/sc.c 	struct device *dev = &sc->pdev->dev;
sc                 26 drivers/media/platform/ti-vpe/sc.c 	ioread32(sc->base + CFG_##r))
sc                 28 drivers/media/platform/ti-vpe/sc.c 	dev_dbg(dev, "SC Registers @ %pa:\n", &sc->res->start);
sc                 61 drivers/media/platform/ti-vpe/sc.c void sc_set_hs_coeffs(struct sc_data *sc, void *addr, unsigned int src_w,
sc                102 drivers/media/platform/ti-vpe/sc.c 	sc->load_coeff_h = true;
sc                110 drivers/media/platform/ti-vpe/sc.c void sc_set_vs_coeffs(struct sc_data *sc, void *addr, unsigned int src_h,
sc                143 drivers/media/platform/ti-vpe/sc.c 	sc->load_coeff_v = true;
sc                147 drivers/media/platform/ti-vpe/sc.c void sc_config_scaler(struct sc_data *sc, u32 *sc_reg0, u32 *sc_reg8,
sc                151 drivers/media/platform/ti-vpe/sc.c 	struct device *dev = &sc->pdev->dev;
sc                277 drivers/media/platform/ti-vpe/sc.c 	struct sc_data *sc;
sc                281 drivers/media/platform/ti-vpe/sc.c 	sc = devm_kzalloc(&pdev->dev, sizeof(*sc), GFP_KERNEL);
sc                282 drivers/media/platform/ti-vpe/sc.c 	if (!sc) {
sc                287 drivers/media/platform/ti-vpe/sc.c 	sc->pdev = pdev;
sc                289 drivers/media/platform/ti-vpe/sc.c 	sc->res = platform_get_resource_byname(pdev, IORESOURCE_MEM, res_name);
sc                290 drivers/media/platform/ti-vpe/sc.c 	if (!sc->res) {
sc                296 drivers/media/platform/ti-vpe/sc.c 	sc->base = devm_ioremap_resource(&pdev->dev, sc->res);
sc                297 drivers/media/platform/ti-vpe/sc.c 	if (IS_ERR(sc->base)) {
sc                299 drivers/media/platform/ti-vpe/sc.c 		return ERR_CAST(sc->base);
sc                302 drivers/media/platform/ti-vpe/sc.c 	return sc;
sc                198 drivers/media/platform/ti-vpe/sc.h void sc_dump_regs(struct sc_data *sc);
sc                199 drivers/media/platform/ti-vpe/sc.h void sc_set_hs_coeffs(struct sc_data *sc, void *addr, unsigned int src_w,
sc                201 drivers/media/platform/ti-vpe/sc.h void sc_set_vs_coeffs(struct sc_data *sc, void *addr, unsigned int src_h,
sc                203 drivers/media/platform/ti-vpe/sc.h void sc_config_scaler(struct sc_data *sc, u32 *sc_reg0, u32 *sc_reg8,
sc                380 drivers/media/platform/ti-vpe/vpe.c 	struct sc_data		*sc;		/* scaler data handle */
sc                523 drivers/media/platform/ti-vpe/vpe.c 		GET_OFFSET_TOP(ctx, ctx->dev->sc, CFG_SC0));
sc                525 drivers/media/platform/ti-vpe/vpe.c 		GET_OFFSET_TOP(ctx, ctx->dev->sc, CFG_SC8));
sc                527 drivers/media/platform/ti-vpe/vpe.c 		GET_OFFSET_TOP(ctx, ctx->dev->sc, CFG_SC17));
sc                898 drivers/media/platform/ti-vpe/vpe.c 	sc_set_hs_coeffs(ctx->dev->sc, ctx->sc_coeff_h.addr, src_w, dst_w);
sc                899 drivers/media/platform/ti-vpe/vpe.c 	sc_set_vs_coeffs(ctx->dev->sc, ctx->sc_coeff_v.addr, src_h, dst_h);
sc                901 drivers/media/platform/ti-vpe/vpe.c 	sc_config_scaler(ctx->dev->sc, &mmr_adb->sc_regs0[0],
sc               1006 drivers/media/platform/ti-vpe/vpe.c 	sc_dump_regs(dev->sc);
sc               1189 drivers/media/platform/ti-vpe/vpe.c 	struct sc_data *sc = ctx->dev->sc;
sc               1238 drivers/media/platform/ti-vpe/vpe.c 	if (sc->loaded_coeff_h != ctx->sc_coeff_h.dma_addr ||
sc               1239 drivers/media/platform/ti-vpe/vpe.c 			sc->load_coeff_h) {
sc               1244 drivers/media/platform/ti-vpe/vpe.c 		sc->loaded_coeff_h = ctx->sc_coeff_h.dma_addr;
sc               1245 drivers/media/platform/ti-vpe/vpe.c 		sc->load_coeff_h = false;
sc               1248 drivers/media/platform/ti-vpe/vpe.c 	if (sc->loaded_coeff_v != ctx->sc_coeff_v.dma_addr ||
sc               1249 drivers/media/platform/ti-vpe/vpe.c 			sc->load_coeff_v) {
sc               1254 drivers/media/platform/ti-vpe/vpe.c 		sc->loaded_coeff_v = ctx->sc_coeff_v.dma_addr;
sc               1255 drivers/media/platform/ti-vpe/vpe.c 		sc->load_coeff_v = false;
sc               2545 drivers/media/platform/ti-vpe/vpe.c 	dev->sc = sc_create(pdev, "sc");
sc               2546 drivers/media/platform/ti-vpe/vpe.c 	if (IS_ERR(dev->sc)) {
sc               2547 drivers/media/platform/ti-vpe/vpe.c 		ret = PTR_ERR(dev->sc);
sc                167 drivers/media/rc/rc-loopback.c 				  struct rc_scancode_filter *sc)
sc                175 drivers/media/rc/rc-loopback.c 	if (!sc->mask)
sc                183 drivers/media/rc/rc-loopback.c 	ret = ir_raw_encode_scancode(dev->wakeup_protocol, sc->data, raw, max);
sc                710 drivers/media/rc/rc-main.c 	struct lirc_scancode sc = {
sc                718 drivers/media/rc/rc-main.c 		ir_lirc_scancode_event(dev, &sc);
sc                752 drivers/media/rc/rc-main.c 	struct lirc_scancode sc = {
sc                759 drivers/media/rc/rc-main.c 		ir_lirc_scancode_event(dev, &sc);
sc                141 drivers/media/usb/gspca/jpeg.h 	int i, sc;
sc                144 drivers/media/usb/gspca/jpeg.h 		sc = 5000;
sc                146 drivers/media/usb/gspca/jpeg.h 		sc = 5000 / quality;
sc                148 drivers/media/usb/gspca/jpeg.h 		sc = 200 - quality * 2;
sc                151 drivers/media/usb/gspca/jpeg.h 			(jpeg_head[JPEG_QT0_OFFSET + i] * sc + 50) / 100;
sc                153 drivers/media/usb/gspca/jpeg.h 			(jpeg_head[JPEG_QT1_OFFSET + i] * sc + 50) / 100;
sc                959 drivers/media/usb/gspca/topro.c 	int i, sc;
sc                962 drivers/media/usb/gspca/topro.c 		sc = 5000;
sc                964 drivers/media/usb/gspca/topro.c 		sc = 5000 / quality;
sc                966 drivers/media/usb/gspca/topro.c 		sc = 200 - quality * 2;
sc                969 drivers/media/usb/gspca/topro.c 			(jpeg_head[JPEG_QT0_OFFSET + i] * sc + 50) / 100;
sc                971 drivers/media/usb/gspca/topro.c 			(jpeg_head[JPEG_QT1_OFFSET + i] * sc + 50) / 100;
sc               3058 drivers/message/fusion/mptbase.c 	u32 s, sc;
sc               3062 drivers/message/fusion/mptbase.c 	sc = s & MPI_IOC_STATE_MASK;
sc               3065 drivers/message/fusion/mptbase.c 	ioc->last_state = sc;
sc               3067 drivers/message/fusion/mptbase.c 	return cooked ? sc : s;
sc               1927 drivers/message/fusion/mptsas.c static enum blk_eh_timer_return mptsas_eh_timed_out(struct scsi_cmnd *sc)
sc               1934 drivers/message/fusion/mptsas.c 	hd = shost_priv(sc->device->host);
sc               1937 drivers/message/fusion/mptsas.c 		    __func__, sc);
sc               1944 drivers/message/fusion/mptsas.c 		    __func__, sc);
sc               1954 drivers/message/fusion/mptsas.c 		    ioc->name, __func__, sc));
sc               1957 drivers/message/fusion/mptsas.c 	vdevice = sc->device->hostdata;
sc               1962 drivers/message/fusion/mptsas.c 		    ioc->name, __func__, sc));
sc               4840 drivers/message/fusion/mptsas.c 	struct scsi_cmnd	*sc;
sc               4868 drivers/message/fusion/mptsas.c 		sc = mptscsih_get_scsi_lookup(ioc, ii);
sc               4869 drivers/message/fusion/mptsas.c 		if (!sc)
sc               4875 drivers/message/fusion/mptsas.c 		vdevice = sc->device->hostdata;
sc                 89 drivers/message/fusion/mptscsih.c static void	mptscsih_report_queue_full(struct scsi_cmnd *sc, SCSIIOReply_t *pScsiReply, SCSIIORequest_t *pScsiReq);
sc                 95 drivers/message/fusion/mptscsih.c static void	mptscsih_copy_sense_data(struct scsi_cmnd *sc, MPT_SCSI_HOST *hd, MPT_FRAME_HDR *mf, SCSIIOReply_t *pScsiReply);
sc                436 drivers/message/fusion/mptscsih.c mptscsih_info_scsiio(MPT_ADAPTER *ioc, struct scsi_cmnd *sc, SCSIIOReply_t * pScsiReply)
sc                538 drivers/message/fusion/mptscsih.c 	scsi_print_command(sc);
sc                540 drivers/message/fusion/mptscsih.c 	    ioc->name, pScsiReply->Bus, pScsiReply->TargetID, sc->device->lun);
sc                542 drivers/message/fusion/mptscsih.c 	    "resid = %d\n", ioc->name, scsi_bufflen(sc), sc->underflow,
sc                543 drivers/message/fusion/mptscsih.c 	    scsi_get_resid(sc));
sc                546 drivers/message/fusion/mptscsih.c 	    le32_to_cpu(pScsiReply->TransferCount), sc->result);
sc                554 drivers/message/fusion/mptscsih.c 		skey = sc->sense_buffer[2] & 0x0F;
sc                555 drivers/message/fusion/mptscsih.c 		asc = sc->sense_buffer[12];
sc                556 drivers/message/fusion/mptscsih.c 		ascq = sc->sense_buffer[13];
sc                590 drivers/message/fusion/mptscsih.c 	struct scsi_cmnd	*sc;
sc                611 drivers/message/fusion/mptscsih.c 	sc = mptscsih_getclear_scsi_lookup(ioc, req_idx);
sc                612 drivers/message/fusion/mptscsih.c 	if (sc == NULL) {
sc                627 drivers/message/fusion/mptscsih.c 	if ((unsigned char *)mf != sc->host_scribble) {
sc                633 drivers/message/fusion/mptscsih.c 		VirtDevice *vdevice = sc->device->hostdata;
sc                637 drivers/message/fusion/mptscsih.c 			sc->result = DID_NO_CONNECT << 16;
sc                642 drivers/message/fusion/mptscsih.c 	sc->host_scribble = NULL;
sc                643 drivers/message/fusion/mptscsih.c 	sc->result = DID_OK << 16;		/* Set default reply as OK */
sc                650 drivers/message/fusion/mptscsih.c 			ioc->name, mf, mr, sc, req_idx, pScsiReply->TaskTag));
sc                654 drivers/message/fusion/mptscsih.c 			ioc->name, mf, mr, sc, req_idx));
sc                671 drivers/message/fusion/mptscsih.c 		scsi_set_resid(sc, scsi_bufflen(sc) - xfer_cnt);
sc                688 drivers/message/fusion/mptscsih.c 			mptscsih_copy_sense_data(sc, hd, mf, pScsiReply);
sc                697 drivers/message/fusion/mptscsih.c 			sc->device->host->host_no, sc->device->channel,
sc                698 drivers/message/fusion/mptscsih.c 			sc->device->id, sc->device->lun,
sc                710 drivers/message/fusion/mptscsih.c 			sc->result = SAM_STAT_BUSY;
sc                715 drivers/message/fusion/mptscsih.c 			sc->result = DID_BAD_TARGET << 16;
sc                721 drivers/message/fusion/mptscsih.c 				sc->result = DID_NO_CONNECT << 16;
sc                724 drivers/message/fusion/mptscsih.c 				sc->result = DID_REQUEUE << 16;
sc                729 drivers/message/fusion/mptscsih.c 			vdevice = sc->device->hostdata;
sc                750 drivers/message/fusion/mptscsih.c 						sc->device->hostdata;
sc                768 drivers/message/fusion/mptscsih.c 					    sc->result =
sc                782 drivers/message/fusion/mptscsih.c 				sc->result = DID_ERROR << 16;
sc                795 drivers/message/fusion/mptscsih.c 			sc->result = DID_RESET << 16;
sc                800 drivers/message/fusion/mptscsih.c 				sc->result = DID_ERROR << 16;
sc                802 drivers/message/fusion/mptscsih.c 				sc->result = DID_RESET << 16;
sc                806 drivers/message/fusion/mptscsih.c 			scsi_set_resid(sc, scsi_bufflen(sc) - xfer_cnt);
sc                807 drivers/message/fusion/mptscsih.c 			if((xfer_cnt==0)||(sc->underflow > xfer_cnt))
sc                808 drivers/message/fusion/mptscsih.c 				sc->result=DID_SOFT_ERROR << 16;
sc                810 drivers/message/fusion/mptscsih.c 				sc->result = (DID_OK << 16) | scsi_status;
sc                813 drivers/message/fusion/mptscsih.c 			    ioc->name, sc->result, sc->device->channel, sc->device->id));
sc                821 drivers/message/fusion/mptscsih.c 			sc->result = (DID_OK << 16) | scsi_status;
sc                839 drivers/message/fusion/mptscsih.c 						if (scsi_bufflen(sc) !=
sc                841 drivers/message/fusion/mptscsih.c 							sc->result =
sc                847 drivers/message/fusion/mptscsih.c 						    scsi_bufflen(sc),
sc                853 drivers/message/fusion/mptscsih.c 				if (xfer_cnt < sc->underflow) {
sc                855 drivers/message/fusion/mptscsih.c 						sc->result = SAM_STAT_BUSY;
sc                857 drivers/message/fusion/mptscsih.c 						sc->result = DID_SOFT_ERROR << 16;
sc                862 drivers/message/fusion/mptscsih.c 					sc->result = DID_SOFT_ERROR << 16;
sc                866 drivers/message/fusion/mptscsih.c 					sc->result = DID_RESET << 16;
sc                873 drivers/message/fusion/mptscsih.c 			    ioc->name, sc->underflow));
sc                880 drivers/message/fusion/mptscsih.c 				mptscsih_report_queue_full(sc, pScsiReply, pScsiReq);
sc                885 drivers/message/fusion/mptscsih.c 			scsi_set_resid(sc, 0);
sc                889 drivers/message/fusion/mptscsih.c 			sc->result = (DID_OK << 16) | scsi_status;
sc                904 drivers/message/fusion/mptscsih.c 					(sc->sense_buffer[2] & 0x20)) {
sc                907 drivers/message/fusion/mptscsih.c 					sc->sense_buffer[3] << 24 |
sc                908 drivers/message/fusion/mptscsih.c 					sc->sense_buffer[4] << 16 |
sc                909 drivers/message/fusion/mptscsih.c 					sc->sense_buffer[5] << 8 |
sc                910 drivers/message/fusion/mptscsih.c 					sc->sense_buffer[6];
sc                911 drivers/message/fusion/mptscsih.c 					if (((sc->sense_buffer[3] & 0x80) ==
sc                912 drivers/message/fusion/mptscsih.c 						0x80) && (scsi_bufflen(sc)
sc                914 drivers/message/fusion/mptscsih.c 						sc->sense_buffer[2] =
sc                916 drivers/message/fusion/mptscsih.c 						sc->sense_buffer[12] = 0xff;
sc                917 drivers/message/fusion/mptscsih.c 						sc->sense_buffer[13] = 0xff;
sc                922 drivers/message/fusion/mptscsih.c 						scsi_bufflen(sc),
sc                925 drivers/message/fusion/mptscsih.c 					if (((sc->sense_buffer[3] & 0x80)
sc                927 drivers/message/fusion/mptscsih.c 						(scsi_bufflen(sc) !=
sc                929 drivers/message/fusion/mptscsih.c 						sc->sense_buffer[2] =
sc                931 drivers/message/fusion/mptscsih.c 						sc->sense_buffer[12] = 0xff;
sc                932 drivers/message/fusion/mptscsih.c 						sc->sense_buffer[13] = 0xff;
sc                938 drivers/message/fusion/mptscsih.c 						scsi_bufflen(sc),
sc                951 drivers/message/fusion/mptscsih.c 					mptscsih_report_queue_full(sc, pScsiReply, pScsiReq);
sc                960 drivers/message/fusion/mptscsih.c 				sc->result = DID_SOFT_ERROR << 16;
sc                964 drivers/message/fusion/mptscsih.c 				sc->result = DID_RESET << 16;
sc                974 drivers/message/fusion/mptscsih.c 			if (sc->result == MPI_SCSI_STATUS_TASK_SET_FULL)
sc                975 drivers/message/fusion/mptscsih.c 				mptscsih_report_queue_full(sc, pScsiReply, pScsiReq);
sc                984 drivers/message/fusion/mptscsih.c 			sc->result = DID_SOFT_ERROR << 16;
sc                999 drivers/message/fusion/mptscsih.c 			sc->result = DID_SOFT_ERROR << 16;
sc               1005 drivers/message/fusion/mptscsih.c 		if (sc->result && (ioc->debug_level & MPT_DEBUG_REPLY))
sc               1006 drivers/message/fusion/mptscsih.c 			mptscsih_info_scsiio(ioc, sc, pScsiReply);
sc               1012 drivers/message/fusion/mptscsih.c 	scsi_dma_unmap(sc);
sc               1014 drivers/message/fusion/mptscsih.c 	sc->scsi_done(sc);		/* Issue the command callback */
sc               1035 drivers/message/fusion/mptscsih.c 	struct scsi_cmnd *sc;
sc               1041 drivers/message/fusion/mptscsih.c 		sc = mptscsih_getclear_scsi_lookup(ioc, ii);
sc               1042 drivers/message/fusion/mptscsih.c 		if (!sc)
sc               1051 drivers/message/fusion/mptscsih.c 		if ((unsigned char *)mf != sc->host_scribble)
sc               1053 drivers/message/fusion/mptscsih.c 		scsi_dma_unmap(sc);
sc               1054 drivers/message/fusion/mptscsih.c 		sc->result = DID_RESET << 16;
sc               1055 drivers/message/fusion/mptscsih.c 		sc->host_scribble = NULL;
sc               1056 drivers/message/fusion/mptscsih.c 		dtmprintk(ioc, sdev_printk(KERN_INFO, sc->device, MYIOC_s_FMT
sc               1058 drivers/message/fusion/mptscsih.c 		    "idx=%x\n", ioc->name, channel, id, sc, mf, ii));
sc               1059 drivers/message/fusion/mptscsih.c 		sc->scsi_done(sc);
sc               1083 drivers/message/fusion/mptscsih.c 	struct scsi_cmnd *sc;
sc               1090 drivers/message/fusion/mptscsih.c 		if ((sc = ioc->ScsiLookup[ii]) != NULL) {
sc               1109 drivers/message/fusion/mptscsih.c 			if ((unsigned char *)mf != sc->host_scribble)
sc               1115 drivers/message/fusion/mptscsih.c 			scsi_dma_unmap(sc);
sc               1116 drivers/message/fusion/mptscsih.c 			sc->host_scribble = NULL;
sc               1117 drivers/message/fusion/mptscsih.c 			sc->result = DID_NO_CONNECT << 16;
sc               1118 drivers/message/fusion/mptscsih.c 			dtmprintk(ioc, sdev_printk(KERN_INFO, sc->device,
sc               1122 drivers/message/fusion/mptscsih.c 			   sc, mf, ii));
sc               1123 drivers/message/fusion/mptscsih.c 			sc->scsi_done(sc);
sc               1146 drivers/message/fusion/mptscsih.c mptscsih_report_queue_full(struct scsi_cmnd *sc, SCSIIOReply_t *pScsiReply, SCSIIORequest_t *pScsiReq)
sc               1152 drivers/message/fusion/mptscsih.c 	if (sc->device == NULL)
sc               1154 drivers/message/fusion/mptscsih.c 	if (sc->device->host == NULL)
sc               1156 drivers/message/fusion/mptscsih.c 	if ((hd = shost_priv(sc->device->host)) == NULL)
sc               1161 drivers/message/fusion/mptscsih.c 				ioc->name, 0, sc->device->id, sc->device->lun));
sc               2406 drivers/message/fusion/mptscsih.c mptscsih_copy_sense_data(struct scsi_cmnd *sc, MPT_SCSI_HOST *hd, MPT_FRAME_HDR *mf, SCSIIOReply_t *pScsiReply)
sc               2416 drivers/message/fusion/mptscsih.c 	vdevice = sc->device->hostdata;
sc               2425 drivers/message/fusion/mptscsih.c 		memcpy(sc->sense_buffer, sense_data, SNS_LEN(sc));
sc               2439 drivers/message/fusion/mptscsih.c 					(sc->device->channel << 8) | sc->device->id;
sc               2526 drivers/message/fusion/mptscsih.c SCPNT_TO_LOOKUP_IDX(MPT_ADAPTER *ioc, struct scsi_cmnd *sc)
sc               2533 drivers/message/fusion/mptscsih.c 		if (ioc->ScsiLookup[i] == sc) {
sc                 39 drivers/misc/fastrpc.c #define REMOTE_SCALARS_INBUFS(sc)	(((sc) >> 16) & 0x0ff)
sc                 42 drivers/misc/fastrpc.c #define REMOTE_SCALARS_OUTBUFS(sc)	(((sc) >> 8) & 0x0ff)
sc                 45 drivers/misc/fastrpc.c #define REMOTE_SCALARS_INHANDLES(sc)	(((sc) >> 4) & 0x0f)
sc                 48 drivers/misc/fastrpc.c #define REMOTE_SCALARS_OUTHANDLES(sc)	((sc) & 0x0f)
sc                 50 drivers/misc/fastrpc.c #define REMOTE_SCALARS_LENGTH(sc)	(REMOTE_SCALARS_INBUFS(sc) +   \
sc                 51 drivers/misc/fastrpc.c 					 REMOTE_SCALARS_OUTBUFS(sc) +  \
sc                 52 drivers/misc/fastrpc.c 					 REMOTE_SCALARS_INHANDLES(sc)+ \
sc                 53 drivers/misc/fastrpc.c 					 REMOTE_SCALARS_OUTHANDLES(sc))
sc                 97 drivers/misc/fastrpc.c 	u32 sc;		/* scalars structure describing the data */
sc                154 drivers/misc/fastrpc.c 	u32 sc;
sc                410 drivers/misc/fastrpc.c 			struct fastrpc_user *user, u32 kernel, u32 sc,
sc                424 drivers/misc/fastrpc.c 	ctx->nscalars = REMOTE_SCALARS_LENGTH(sc);
sc                425 drivers/misc/fastrpc.c 	ctx->nbufs = REMOTE_SCALARS_INBUFS(sc) +
sc                426 drivers/misc/fastrpc.c 		     REMOTE_SCALARS_OUTBUFS(sc);
sc                449 drivers/misc/fastrpc.c 	ctx->sc = sc;
sc                749 drivers/misc/fastrpc.c 	inbufs = REMOTE_SCALARS_INBUFS(ctx->sc);
sc                864 drivers/misc/fastrpc.c 	inbufs = REMOTE_SCALARS_INBUFS(ctx->sc);
sc                899 drivers/misc/fastrpc.c 	msg->sc = ctx->sc;
sc                908 drivers/misc/fastrpc.c 				   u32 handle, u32 sc,
sc                920 drivers/misc/fastrpc.c 	ctx = fastrpc_context_alloc(fl, kernel, sc, args);
sc                987 drivers/misc/fastrpc.c 	u32 sc;
sc               1052 drivers/misc/fastrpc.c 	sc = FASTRPC_SCALARS(FASTRPC_RMID_INIT_CREATE, 4, 0);
sc               1054 drivers/misc/fastrpc.c 		sc = FASTRPC_SCALARS(FASTRPC_RMID_INIT_CREATE_ATTR, 6, 0);
sc               1057 drivers/misc/fastrpc.c 				      sc, args);
sc               1115 drivers/misc/fastrpc.c 	u32 sc;
sc               1122 drivers/misc/fastrpc.c 	sc = FASTRPC_SCALARS(FASTRPC_RMID_INIT_RELEASE, 1, 0);
sc               1125 drivers/misc/fastrpc.c 				       sc, &args[0]);
sc               1245 drivers/misc/fastrpc.c 	u32 sc;
sc               1251 drivers/misc/fastrpc.c 	sc = FASTRPC_SCALARS(FASTRPC_RMID_INIT_ATTACH, 1, 0);
sc               1255 drivers/misc/fastrpc.c 				       sc, &args[0]);
sc               1269 drivers/misc/fastrpc.c 	nscalars = REMOTE_SCALARS_LENGTH(inv.sc);
sc               1282 drivers/misc/fastrpc.c 	err = fastrpc_internal_invoke(fl, false, inv.handle, inv.sc, args);
sc               1526 drivers/misc/vmw_balloon.c 					     struct shrink_control *sc)
sc               1542 drivers/misc/vmw_balloon.c 	deflated_frames = vmballoon_deflate(b, sc->nr_to_scan, true);
sc               1568 drivers/misc/vmw_balloon.c 					      struct shrink_control *sc)
sc                483 drivers/mmc/host/sdhci-s3c.c 	struct sdhci_s3c *sc;
sc                501 drivers/mmc/host/sdhci-s3c.c 	sc = sdhci_priv(host);
sc                515 drivers/mmc/host/sdhci-s3c.c 		sc->ext_cd_gpio = -1; /* invalid gpio number */
sc                520 drivers/mmc/host/sdhci-s3c.c 	sc->host = host;
sc                521 drivers/mmc/host/sdhci-s3c.c 	sc->pdev = pdev;
sc                522 drivers/mmc/host/sdhci-s3c.c 	sc->pdata = pdata;
sc                523 drivers/mmc/host/sdhci-s3c.c 	sc->cur_clk = -1;
sc                527 drivers/mmc/host/sdhci-s3c.c 	sc->clk_io = devm_clk_get(dev, "hsmmc");
sc                528 drivers/mmc/host/sdhci-s3c.c 	if (IS_ERR(sc->clk_io)) {
sc                530 drivers/mmc/host/sdhci-s3c.c 		ret = PTR_ERR(sc->clk_io);
sc                535 drivers/mmc/host/sdhci-s3c.c 	clk_prepare_enable(sc->clk_io);
sc                541 drivers/mmc/host/sdhci-s3c.c 		sc->clk_bus[ptr] = devm_clk_get(dev, name);
sc                542 drivers/mmc/host/sdhci-s3c.c 		if (IS_ERR(sc->clk_bus[ptr]))
sc                546 drivers/mmc/host/sdhci-s3c.c 		sc->clk_rates[ptr] = clk_get_rate(sc->clk_bus[ptr]);
sc                549 drivers/mmc/host/sdhci-s3c.c 				ptr, name, sc->clk_rates[ptr]);
sc                580 drivers/mmc/host/sdhci-s3c.c 		sc->no_divider = drv_data->no_divider;
sc                631 drivers/mmc/host/sdhci-s3c.c 	if (sc->no_divider) {
sc                659 drivers/mmc/host/sdhci-s3c.c 		clk_disable_unprepare(sc->clk_io);
sc                667 drivers/mmc/host/sdhci-s3c.c 	clk_disable_unprepare(sc->clk_io);
sc                678 drivers/mmc/host/sdhci-s3c.c 	struct sdhci_s3c *sc = sdhci_priv(host);
sc                680 drivers/mmc/host/sdhci-s3c.c 	if (sc->ext_cd_irq)
sc                681 drivers/mmc/host/sdhci-s3c.c 		free_irq(sc->ext_cd_irq, sc);
sc                684 drivers/mmc/host/sdhci-s3c.c 	if (sc->pdata->cd_type != S3C_SDHCI_CD_INTERNAL)
sc                685 drivers/mmc/host/sdhci-s3c.c 		clk_prepare_enable(sc->clk_io);
sc                692 drivers/mmc/host/sdhci-s3c.c 	clk_disable_unprepare(sc->clk_io);
sc                265 drivers/net/eql.c static int eql_g_slave_cfg(struct net_device *dev, slave_config_t __user *sc);
sc                266 drivers/net/eql.c static int eql_s_slave_cfg(struct net_device *dev, slave_config_t __user *sc);
sc                482 drivers/net/eql.c 	slave_config_t sc;
sc                485 drivers/net/eql.c 	if (copy_from_user(&sc, scp, sizeof (slave_config_t)))
sc                488 drivers/net/eql.c 	slave_dev = __dev_get_by_name(&init_net, sc.slave_name);
sc                498 drivers/net/eql.c 			sc.priority = slave->priority;
sc                504 drivers/net/eql.c 	if (!ret && copy_to_user(scp, &sc, sizeof (slave_config_t)))
sc                515 drivers/net/eql.c 	slave_config_t sc;
sc                518 drivers/net/eql.c 	if (copy_from_user(&sc, scp, sizeof (slave_config_t)))
sc                521 drivers/net/eql.c 	slave_dev = __dev_get_by_name(&init_net, sc.slave_name);
sc                532 drivers/net/eql.c 			slave->priority = sc.priority;
sc                533 drivers/net/eql.c 			slave->priority_bps = sc.priority;
sc                534 drivers/net/eql.c 			slave->priority_Bps = sc.priority / 8;
sc               5177 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		req.sc = cpu_to_le16(bp->grp_info[grp_idx].fw_stats_ctx);
sc               5333 drivers/net/ethernet/broadcom/bnxt/bnxt_hsi.h 	__le16	sc;
sc                272 drivers/net/ethernet/broadcom/sb1250-mac.c static int sbdma_add_rcvbuffer(struct sbmac_softc *sc, struct sbmacdma *d,
sc                276 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_fillring(struct sbmac_softc *sc, struct sbmacdma *d);
sc                277 drivers/net/ethernet/broadcom/sb1250-mac.c static int sbdma_rx_process(struct sbmac_softc *sc, struct sbmacdma *d,
sc                279 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_tx_process(struct sbmac_softc *sc, struct sbmacdma *d,
sc                286 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbmac_promiscuous_mode(struct sbmac_softc *sc, int onoff);
sc                290 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbmac_setmulti(struct sbmac_softc *sc);
sc                423 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = (struct sbmac_softc *)bus->priv;
sc                424 drivers/net/ethernet/broadcom/sb1250-mac.c 	void __iomem *sbm_mdio = sc->sbm_mdio;
sc                517 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = (struct sbmac_softc *)bus->priv;
sc                518 drivers/net/ethernet/broadcom/sb1250-mac.c 	void __iomem *sbm_mdio = sc->sbm_mdio;
sc                771 drivers/net/ethernet/broadcom/sb1250-mac.c static int sbdma_add_rcvbuffer(struct sbmac_softc *sc, struct sbmacdma *d,
sc                774 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct net_device *dev = sc->sbm_dev;
sc               1005 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_fillring(struct sbmac_softc *sc, struct sbmacdma *d)
sc               1010 drivers/net/ethernet/broadcom/sb1250-mac.c 		if (sbdma_add_rcvbuffer(sc, d, NULL) != 0)
sc               1018 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = netdev_priv(netdev);
sc               1019 drivers/net/ethernet/broadcom/sb1250-mac.c 	int irq = sc->sbm_dev->irq;
sc               1021 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(0, sc->sbm_imr);
sc               1028 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbm_imr);
sc               1031 drivers/net/ethernet/broadcom/sb1250-mac.c 	(M_MAC_INT_CHANNEL << S_MAC_RX_CH0), sc->sbm_imr);
sc               1052 drivers/net/ethernet/broadcom/sb1250-mac.c static int sbdma_rx_process(struct sbmac_softc *sc, struct sbmacdma *d,
sc               1055 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct net_device *dev = sc->sbm_dev;
sc               1069 drivers/net/ethernet/broadcom/sb1250-mac.c 	    += __raw_readq(sc->sbm_rxdma.sbdma_oodpktlost) & 0xffff;
sc               1070 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(0, sc->sbm_rxdma.sbdma_oodpktlost);
sc               1126 drivers/net/ethernet/broadcom/sb1250-mac.c 			if (unlikely(sbdma_add_rcvbuffer(sc, d, NULL) ==
sc               1130 drivers/net/ethernet/broadcom/sb1250-mac.c 				sbdma_add_rcvbuffer(sc, d, sb);
sc               1148 drivers/net/ethernet/broadcom/sb1250-mac.c 				if (sc->rx_hw_checksum == ENABLE) {
sc               1180 drivers/net/ethernet/broadcom/sb1250-mac.c 			sbdma_add_rcvbuffer(sc, d, sb);
sc               1217 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbdma_tx_process(struct sbmac_softc *sc, struct sbmacdma *d,
sc               1220 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct net_device *dev = sc->sbm_dev;
sc               1228 drivers/net/ethernet/broadcom/sb1250-mac.c 	spin_lock_irqsave(&(sc->sbm_lock), flags);
sc               1300 drivers/net/ethernet/broadcom/sb1250-mac.c 	spin_unlock_irqrestore(&(sc->sbm_lock), flags);
sc               1365 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbmac_uninitctx(struct sbmac_softc *sc)
sc               1367 drivers/net/ethernet/broadcom/sb1250-mac.c 	sbdma_uninitctx(&(sc->sbm_txdma));
sc               1368 drivers/net/ethernet/broadcom/sb1250-mac.c 	sbdma_uninitctx(&(sc->sbm_rxdma));
sc               1640 drivers/net/ethernet/broadcom/sb1250-mac.c static enum sbmac_state sbmac_set_channel_state(struct sbmac_softc *sc,
sc               1643 drivers/net/ethernet/broadcom/sb1250-mac.c 	enum sbmac_state oldstate = sc->sbm_state;
sc               1658 drivers/net/ethernet/broadcom/sb1250-mac.c 		sbmac_channel_start(sc);
sc               1661 drivers/net/ethernet/broadcom/sb1250-mac.c 		sbmac_channel_stop(sc);
sc               1685 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbmac_promiscuous_mode(struct sbmac_softc *sc,int onoff)
sc               1689 drivers/net/ethernet/broadcom/sb1250-mac.c 	if (sc->sbm_state != sbmac_state_on)
sc               1693 drivers/net/ethernet/broadcom/sb1250-mac.c 		reg = __raw_readq(sc->sbm_rxfilter);
sc               1695 drivers/net/ethernet/broadcom/sb1250-mac.c 		__raw_writeq(reg, sc->sbm_rxfilter);
sc               1698 drivers/net/ethernet/broadcom/sb1250-mac.c 		reg = __raw_readq(sc->sbm_rxfilter);
sc               1700 drivers/net/ethernet/broadcom/sb1250-mac.c 		__raw_writeq(reg, sc->sbm_rxfilter);
sc               1716 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbmac_set_iphdr_offset(struct sbmac_softc *sc)
sc               1721 drivers/net/ethernet/broadcom/sb1250-mac.c 	reg = __raw_readq(sc->sbm_rxfilter);
sc               1723 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(reg, sc->sbm_rxfilter);
sc               1728 drivers/net/ethernet/broadcom/sb1250-mac.c 		sc->rx_hw_checksum = DISABLE;
sc               1730 drivers/net/ethernet/broadcom/sb1250-mac.c 		sc->rx_hw_checksum = ENABLE;
sc               1968 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = netdev_priv(dev);
sc               1977 drivers/net/ethernet/broadcom/sb1250-mac.c 	isr = __raw_readq(sc->sbm_isr) & ~M_MAC_COUNTER_ADDR;
sc               1988 drivers/net/ethernet/broadcom/sb1250-mac.c 		sbdma_tx_process(sc,&(sc->sbm_txdma), 0);
sc               1991 drivers/net/ethernet/broadcom/sb1250-mac.c 		if (napi_schedule_prep(&sc->napi)) {
sc               1992 drivers/net/ethernet/broadcom/sb1250-mac.c 			__raw_writeq(0, sc->sbm_imr);
sc               1993 drivers/net/ethernet/broadcom/sb1250-mac.c 			__napi_schedule(&sc->napi);
sc               1998 drivers/net/ethernet/broadcom/sb1250-mac.c 			sbdma_rx_process(sc,&(sc->sbm_rxdma),
sc               2020 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = netdev_priv(dev);
sc               2024 drivers/net/ethernet/broadcom/sb1250-mac.c 	spin_lock_irqsave(&sc->sbm_lock, flags);
sc               2031 drivers/net/ethernet/broadcom/sb1250-mac.c 	if (sbdma_add_txbuffer(&(sc->sbm_txdma),skb)) {
sc               2034 drivers/net/ethernet/broadcom/sb1250-mac.c 		spin_unlock_irqrestore(&sc->sbm_lock, flags);
sc               2039 drivers/net/ethernet/broadcom/sb1250-mac.c 	spin_unlock_irqrestore(&sc->sbm_lock, flags);
sc               2058 drivers/net/ethernet/broadcom/sb1250-mac.c static void sbmac_setmulti(struct sbmac_softc *sc)
sc               2064 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct net_device *dev = sc->sbm_dev;
sc               2073 drivers/net/ethernet/broadcom/sb1250-mac.c 		port = sc->sbm_base + R_MAC_ADDR_BASE+(idx*sizeof(uint64_t));
sc               2078 drivers/net/ethernet/broadcom/sb1250-mac.c 		port = sc->sbm_base + R_MAC_HASH_BASE+(idx*sizeof(uint64_t));
sc               2086 drivers/net/ethernet/broadcom/sb1250-mac.c 	reg = __raw_readq(sc->sbm_rxfilter);
sc               2088 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(reg, sc->sbm_rxfilter);
sc               2095 drivers/net/ethernet/broadcom/sb1250-mac.c 		reg = __raw_readq(sc->sbm_rxfilter);
sc               2097 drivers/net/ethernet/broadcom/sb1250-mac.c 		__raw_writeq(reg, sc->sbm_rxfilter);
sc               2116 drivers/net/ethernet/broadcom/sb1250-mac.c 		port = sc->sbm_base + R_MAC_ADDR_BASE+(idx * sizeof(uint64_t));
sc               2127 drivers/net/ethernet/broadcom/sb1250-mac.c 		reg = __raw_readq(sc->sbm_rxfilter);
sc               2129 drivers/net/ethernet/broadcom/sb1250-mac.c 		__raw_writeq(reg, sc->sbm_rxfilter);
sc               2163 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = netdev_priv(dev);
sc               2169 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbm_dev = dev;
sc               2170 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbe_idx = idx;
sc               2172 drivers/net/ethernet/broadcom/sb1250-mac.c 	eaddr = sc->sbm_hwaddr;
sc               2179 drivers/net/ethernet/broadcom/sb1250-mac.c 	ea_reg = __raw_readq(sc->sbm_base + R_MAC_ETHERNET_ADDR);
sc               2180 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_writeq(0, sc->sbm_base + R_MAC_ETHERNET_ADDR);
sc               2195 drivers/net/ethernet/broadcom/sb1250-mac.c 	sbmac_initctx(sc);
sc               2201 drivers/net/ethernet/broadcom/sb1250-mac.c 	spin_lock_init(&(sc->sbm_lock));
sc               2208 drivers/net/ethernet/broadcom/sb1250-mac.c 	netif_napi_add(dev, &sc->napi, sbmac_poll, 16);
sc               2213 drivers/net/ethernet/broadcom/sb1250-mac.c 	sbmac_set_iphdr_offset(sc);
sc               2215 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->mii_bus = mdiobus_alloc();
sc               2216 drivers/net/ethernet/broadcom/sb1250-mac.c 	if (sc->mii_bus == NULL) {
sc               2221 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->mii_bus->name = sbmac_mdio_string;
sc               2222 drivers/net/ethernet/broadcom/sb1250-mac.c 	snprintf(sc->mii_bus->id, MII_BUS_ID_SIZE, "%s-%x",
sc               2224 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->mii_bus->priv = sc;
sc               2225 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->mii_bus->read = sbmac_mii_read;
sc               2226 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->mii_bus->write = sbmac_mii_write;
sc               2228 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->mii_bus->parent = &pldev->dev;
sc               2232 drivers/net/ethernet/broadcom/sb1250-mac.c 	err = mdiobus_register(sc->mii_bus);
sc               2238 drivers/net/ethernet/broadcom/sb1250-mac.c 	platform_set_drvdata(pldev, sc->mii_bus);
sc               2249 drivers/net/ethernet/broadcom/sb1250-mac.c 	if (sc->rx_hw_checksum == ENABLE)
sc               2262 drivers/net/ethernet/broadcom/sb1250-mac.c 	mdiobus_unregister(sc->mii_bus);
sc               2264 drivers/net/ethernet/broadcom/sb1250-mac.c 	mdiobus_free(sc->mii_bus);
sc               2266 drivers/net/ethernet/broadcom/sb1250-mac.c 	sbmac_uninitctx(sc);
sc               2273 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = netdev_priv(dev);
sc               2285 drivers/net/ethernet/broadcom/sb1250-mac.c 	__raw_readq(sc->sbm_isr);
sc               2293 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbm_speed = sbmac_speed_none;
sc               2294 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbm_duplex = sbmac_duplex_none;
sc               2295 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbm_fc = sbmac_fc_none;
sc               2296 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbm_pause = -1;
sc               2297 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbm_link = 0;
sc               2310 drivers/net/ethernet/broadcom/sb1250-mac.c 	sbmac_set_channel_state(sc,sbmac_state_on);
sc               2316 drivers/net/ethernet/broadcom/sb1250-mac.c 	phy_start(sc->phy_dev);
sc               2318 drivers/net/ethernet/broadcom/sb1250-mac.c 	napi_enable(&sc->napi);
sc               2330 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = netdev_priv(dev);
sc               2333 drivers/net/ethernet/broadcom/sb1250-mac.c 	phy_dev = phy_find_first(sc->mii_bus);
sc               2352 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->phy_dev = phy_dev;
sc               2360 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = netdev_priv(dev);
sc               2361 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct phy_device *phy_dev = sc->phy_dev;
sc               2366 drivers/net/ethernet/broadcom/sb1250-mac.c 	link_chg = (sc->sbm_link != phy_dev->link);
sc               2367 drivers/net/ethernet/broadcom/sb1250-mac.c 	speed_chg = (sc->sbm_speed != phy_dev->speed);
sc               2368 drivers/net/ethernet/broadcom/sb1250-mac.c 	duplex_chg = (sc->sbm_duplex != phy_dev->duplex);
sc               2369 drivers/net/ethernet/broadcom/sb1250-mac.c 	pause_chg = (sc->sbm_pause != phy_dev->pause);
sc               2376 drivers/net/ethernet/broadcom/sb1250-mac.c 			sc->sbm_link = phy_dev->link;
sc               2377 drivers/net/ethernet/broadcom/sb1250-mac.c 			sc->sbm_speed = sbmac_speed_none;
sc               2378 drivers/net/ethernet/broadcom/sb1250-mac.c 			sc->sbm_duplex = sbmac_duplex_none;
sc               2379 drivers/net/ethernet/broadcom/sb1250-mac.c 			sc->sbm_fc = sbmac_fc_disabled;
sc               2380 drivers/net/ethernet/broadcom/sb1250-mac.c 			sc->sbm_pause = -1;
sc               2393 drivers/net/ethernet/broadcom/sb1250-mac.c 	fc_chg = (sc->sbm_fc != fc);
sc               2398 drivers/net/ethernet/broadcom/sb1250-mac.c 	spin_lock_irqsave(&sc->sbm_lock, flags);
sc               2400 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbm_speed = phy_dev->speed;
sc               2401 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbm_duplex = phy_dev->duplex;
sc               2402 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbm_fc = fc;
sc               2403 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbm_pause = phy_dev->pause;
sc               2404 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbm_link = phy_dev->link;
sc               2407 drivers/net/ethernet/broadcom/sb1250-mac.c 	    sc->sbm_state != sbmac_state_off) {
sc               2414 drivers/net/ethernet/broadcom/sb1250-mac.c 		sbmac_channel_stop(sc);
sc               2415 drivers/net/ethernet/broadcom/sb1250-mac.c 		sbmac_channel_start(sc);
sc               2418 drivers/net/ethernet/broadcom/sb1250-mac.c 	spin_unlock_irqrestore(&sc->sbm_lock, flags);
sc               2424 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = netdev_priv(dev);
sc               2427 drivers/net/ethernet/broadcom/sb1250-mac.c 	spin_lock_irqsave(&sc->sbm_lock, flags);
sc               2433 drivers/net/ethernet/broadcom/sb1250-mac.c 	spin_unlock_irqrestore(&sc->sbm_lock, flags);
sc               2444 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = netdev_priv(dev);
sc               2446 drivers/net/ethernet/broadcom/sb1250-mac.c 	spin_lock_irqsave(&sc->sbm_lock, flags);
sc               2447 drivers/net/ethernet/broadcom/sb1250-mac.c 	if ((dev->flags ^ sc->sbm_devflags) & IFF_PROMISC) {
sc               2453 drivers/net/ethernet/broadcom/sb1250-mac.c 			sbmac_promiscuous_mode(sc,1);
sc               2456 drivers/net/ethernet/broadcom/sb1250-mac.c 			sbmac_promiscuous_mode(sc,0);
sc               2459 drivers/net/ethernet/broadcom/sb1250-mac.c 	spin_unlock_irqrestore(&sc->sbm_lock, flags);
sc               2465 drivers/net/ethernet/broadcom/sb1250-mac.c 	sbmac_setmulti(sc);
sc               2471 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = netdev_priv(dev);
sc               2473 drivers/net/ethernet/broadcom/sb1250-mac.c 	if (!netif_running(dev) || !sc->phy_dev)
sc               2476 drivers/net/ethernet/broadcom/sb1250-mac.c 	return phy_mii_ioctl(sc->phy_dev, rq, cmd);
sc               2481 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = netdev_priv(dev);
sc               2483 drivers/net/ethernet/broadcom/sb1250-mac.c 	napi_disable(&sc->napi);
sc               2485 drivers/net/ethernet/broadcom/sb1250-mac.c 	phy_stop(sc->phy_dev);
sc               2487 drivers/net/ethernet/broadcom/sb1250-mac.c 	sbmac_set_channel_state(sc, sbmac_state_off);
sc               2494 drivers/net/ethernet/broadcom/sb1250-mac.c 	phy_disconnect(sc->phy_dev);
sc               2495 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->phy_dev = NULL;
sc               2498 drivers/net/ethernet/broadcom/sb1250-mac.c 	sbdma_emptyring(&(sc->sbm_txdma));
sc               2499 drivers/net/ethernet/broadcom/sb1250-mac.c 	sbdma_emptyring(&(sc->sbm_rxdma));
sc               2506 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = container_of(napi, struct sbmac_softc, napi);
sc               2509 drivers/net/ethernet/broadcom/sb1250-mac.c 	work_done = sbdma_rx_process(sc, &(sc->sbm_rxdma), budget, 1);
sc               2510 drivers/net/ethernet/broadcom/sb1250-mac.c 	sbdma_tx_process(sc, &(sc->sbm_txdma), 1);
sc               2518 drivers/net/ethernet/broadcom/sb1250-mac.c 			     sc->sbm_imr);
sc               2521 drivers/net/ethernet/broadcom/sb1250-mac.c 			     (M_MAC_INT_CHANNEL << S_MAC_RX_CH0), sc->sbm_imr);
sc               2532 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc;
sc               2573 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc = netdev_priv(dev);
sc               2574 drivers/net/ethernet/broadcom/sb1250-mac.c 	sc->sbm_base = sbm_base;
sc               2596 drivers/net/ethernet/broadcom/sb1250-mac.c 	struct sbmac_softc *sc = netdev_priv(dev);
sc               2599 drivers/net/ethernet/broadcom/sb1250-mac.c 	sbmac_uninitctx(sc);
sc               2600 drivers/net/ethernet/broadcom/sb1250-mac.c 	mdiobus_unregister(sc->mii_bus);
sc               2601 drivers/net/ethernet/broadcom/sb1250-mac.c 	mdiobus_free(sc->mii_bus);
sc               2602 drivers/net/ethernet/broadcom/sb1250-mac.c 	iounmap(sc->sbm_base);
sc                196 drivers/net/ethernet/cavium/liquidio/lio_core.c 	struct octeon_soft_command *sc;
sc                207 drivers/net/ethernet/cavium/liquidio/lio_core.c 		sc = buf;
sc                208 drivers/net/ethernet/cavium/liquidio/lio_core.c 		skb = sc->callback_arg;
sc                223 drivers/net/ethernet/cavium/liquidio/lio_core.c 	struct octeon_soft_command *sc;
sc                235 drivers/net/ethernet/cavium/liquidio/lio_core.c 		sc = buf;
sc                236 drivers/net/ethernet/cavium/liquidio/lio_core.c 		skb = sc->callback_arg;
sc               1208 drivers/net/ethernet/cavium/liquidio/lio_core.c 	struct octeon_soft_command *sc;
sc               1212 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc = (struct octeon_soft_command *)
sc               1214 drivers/net/ethernet/cavium/liquidio/lio_core.c 	if (!sc) {
sc               1220 drivers/net/ethernet/cavium/liquidio/lio_core.c 	ncmd = (union octnet_cmd *)sc->virtdptr;
sc               1222 drivers/net/ethernet/cavium/liquidio/lio_core.c 	init_completion(&sc->complete);
sc               1223 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc               1231 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc               1233 drivers/net/ethernet/cavium/liquidio/lio_core.c 	octeon_prepare_soft_command(oct, sc, OPCODE_NIC,
sc               1236 drivers/net/ethernet/cavium/liquidio/lio_core.c 	ret = octeon_send_soft_command(oct, sc);
sc               1239 drivers/net/ethernet/cavium/liquidio/lio_core.c 		octeon_free_soft_command(oct, sc);
sc               1245 drivers/net/ethernet/cavium/liquidio/lio_core.c 	ret = wait_for_sc_completion_timeout(oct, sc, 0);
sc               1249 drivers/net/ethernet/cavium/liquidio/lio_core.c 	if (sc->sc_status) {
sc               1250 drivers/net/ethernet/cavium/liquidio/lio_core.c 		WRITE_ONCE(sc->caller_is_done, true);
sc               1257 drivers/net/ethernet/cavium/liquidio/lio_core.c 	WRITE_ONCE(sc->caller_is_done, true);
sc               1288 drivers/net/ethernet/cavium/liquidio/lio_core.c 	struct octeon_soft_command *sc = (struct octeon_soft_command *)ptr;
sc               1290 drivers/net/ethernet/cavium/liquidio/lio_core.c 	    (struct oct_nic_stats_resp *)sc->virtrptr;
sc               1386 drivers/net/ethernet/cavium/liquidio/lio_core.c 	struct octeon_soft_command *sc;
sc               1392 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc = (struct octeon_soft_command *)
sc               1398 drivers/net/ethernet/cavium/liquidio/lio_core.c 	if (!sc) {
sc               1404 drivers/net/ethernet/cavium/liquidio/lio_core.c 	resp = (struct oct_nic_vf_stats_resp *)sc->virtrptr;
sc               1407 drivers/net/ethernet/cavium/liquidio/lio_core.c 	init_completion(&sc->complete);
sc               1408 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc               1410 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc               1412 drivers/net/ethernet/cavium/liquidio/lio_core.c 	octeon_prepare_soft_command(oct_dev, sc, OPCODE_NIC,
sc               1415 drivers/net/ethernet/cavium/liquidio/lio_core.c 	retval = octeon_send_soft_command(oct_dev, sc);
sc               1417 drivers/net/ethernet/cavium/liquidio/lio_core.c 		octeon_free_soft_command(oct_dev, sc);
sc               1422 drivers/net/ethernet/cavium/liquidio/lio_core.c 		wait_for_sc_completion_timeout(oct_dev, sc,
sc               1430 drivers/net/ethernet/cavium/liquidio/lio_core.c 	if (sc->sc_status != OCTEON_REQUEST_TIMEOUT && !resp->status) {
sc               1440 drivers/net/ethernet/cavium/liquidio/lio_core.c 	WRITE_ONCE(sc->caller_is_done, 1);
sc               1451 drivers/net/ethernet/cavium/liquidio/lio_core.c 	struct octeon_soft_command *sc;
sc               1468 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc = (struct octeon_soft_command *)
sc               1474 drivers/net/ethernet/cavium/liquidio/lio_core.c 	if (!sc) {
sc               1479 drivers/net/ethernet/cavium/liquidio/lio_core.c 	resp = (struct oct_nic_stats_resp *)sc->virtrptr;
sc               1482 drivers/net/ethernet/cavium/liquidio/lio_core.c 	init_completion(&sc->complete);
sc               1483 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc               1485 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc               1487 drivers/net/ethernet/cavium/liquidio/lio_core.c 	octeon_prepare_soft_command(oct_dev, sc, OPCODE_NIC,
sc               1490 drivers/net/ethernet/cavium/liquidio/lio_core.c 	retval = octeon_send_soft_command(oct_dev, sc);
sc               1492 drivers/net/ethernet/cavium/liquidio/lio_core.c 		octeon_free_soft_command(oct_dev, sc);
sc               1496 drivers/net/ethernet/cavium/liquidio/lio_core.c 	retval = wait_for_sc_completion_timeout(oct_dev, sc,
sc               1503 drivers/net/ethernet/cavium/liquidio/lio_core.c 	octnet_nic_stats_callback(oct_dev, sc->sc_status, sc);
sc               1504 drivers/net/ethernet/cavium/liquidio/lio_core.c 	WRITE_ONCE(sc->caller_is_done, true);
sc               1518 drivers/net/ethernet/cavium/liquidio/lio_core.c 	struct octeon_soft_command *sc;
sc               1532 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc = octeon_alloc_soft_command(oct, OCTNET_CMD_SIZE,
sc               1535 drivers/net/ethernet/cavium/liquidio/lio_core.c 	if (!sc)
sc               1538 drivers/net/ethernet/cavium/liquidio/lio_core.c 	ncmd = sc->virtdptr;
sc               1539 drivers/net/ethernet/cavium/liquidio/lio_core.c 	resp = sc->virtrptr;
sc               1542 drivers/net/ethernet/cavium/liquidio/lio_core.c 	init_completion(&sc->complete);
sc               1543 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc               1551 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc               1553 drivers/net/ethernet/cavium/liquidio/lio_core.c 	octeon_prepare_soft_command(oct, sc, OPCODE_NIC,
sc               1556 drivers/net/ethernet/cavium/liquidio/lio_core.c 	retval = octeon_send_soft_command(oct, sc);
sc               1559 drivers/net/ethernet/cavium/liquidio/lio_core.c 		octeon_free_soft_command(oct, sc);
sc               1563 drivers/net/ethernet/cavium/liquidio/lio_core.c 		retval = wait_for_sc_completion_timeout(oct, sc, 0);
sc               1572 drivers/net/ethernet/cavium/liquidio/lio_core.c 			WRITE_ONCE(sc->caller_is_done, true);
sc               1585 drivers/net/ethernet/cavium/liquidio/lio_core.c 		WRITE_ONCE(sc->caller_is_done, true);
sc               1595 drivers/net/ethernet/cavium/liquidio/lio_core.c 	struct octeon_soft_command *sc;
sc               1599 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc = octeon_alloc_soft_command(oct, OCTNET_CMD_SIZE,
sc               1602 drivers/net/ethernet/cavium/liquidio/lio_core.c 	if (!sc)
sc               1605 drivers/net/ethernet/cavium/liquidio/lio_core.c 	ncmd = sc->virtdptr;
sc               1606 drivers/net/ethernet/cavium/liquidio/lio_core.c 	resp = sc->virtrptr;
sc               1609 drivers/net/ethernet/cavium/liquidio/lio_core.c 	init_completion(&sc->complete);
sc               1610 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc               1617 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc               1619 drivers/net/ethernet/cavium/liquidio/lio_core.c 	octeon_prepare_soft_command(oct, sc, OPCODE_NIC,
sc               1622 drivers/net/ethernet/cavium/liquidio/lio_core.c 	retval = octeon_send_soft_command(oct, sc);
sc               1625 drivers/net/ethernet/cavium/liquidio/lio_core.c 		octeon_free_soft_command(oct, sc);
sc               1628 drivers/net/ethernet/cavium/liquidio/lio_core.c 		retval = wait_for_sc_completion_timeout(oct, sc, 0);
sc               1658 drivers/net/ethernet/cavium/liquidio/lio_core.c 		WRITE_ONCE(sc->caller_is_done, true);
sc               1667 drivers/net/ethernet/cavium/liquidio/lio_core.c 	struct octeon_soft_command *sc;
sc               1690 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc = octeon_alloc_soft_command(oct, OCTNET_CMD_SIZE,
sc               1692 drivers/net/ethernet/cavium/liquidio/lio_core.c 	if (!sc) {
sc               1698 drivers/net/ethernet/cavium/liquidio/lio_core.c 	ncmd = sc->virtdptr;
sc               1699 drivers/net/ethernet/cavium/liquidio/lio_core.c 	resp = sc->virtrptr;
sc               1702 drivers/net/ethernet/cavium/liquidio/lio_core.c 	init_completion(&sc->complete);
sc               1703 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc               1712 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc               1714 drivers/net/ethernet/cavium/liquidio/lio_core.c 	octeon_prepare_soft_command(oct, sc, OPCODE_NIC,
sc               1717 drivers/net/ethernet/cavium/liquidio/lio_core.c 	retval = octeon_send_soft_command(oct, sc);
sc               1720 drivers/net/ethernet/cavium/liquidio/lio_core.c 		octeon_free_soft_command(oct, sc);
sc               1724 drivers/net/ethernet/cavium/liquidio/lio_core.c 	retval = wait_for_sc_completion_timeout(oct, sc, 0);
sc               1741 drivers/net/ethernet/cavium/liquidio/lio_core.c 	WRITE_ONCE(sc->caller_is_done, true);
sc               1756 drivers/net/ethernet/cavium/liquidio/lio_core.c 	struct octeon_soft_command *sc;
sc               1764 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc = octeon_alloc_soft_command(oct, OCTNET_CMD_SIZE,
sc               1766 drivers/net/ethernet/cavium/liquidio/lio_core.c 	if (!sc)
sc               1769 drivers/net/ethernet/cavium/liquidio/lio_core.c 	ncmd = sc->virtdptr;
sc               1770 drivers/net/ethernet/cavium/liquidio/lio_core.c 	resp = sc->virtrptr;
sc               1773 drivers/net/ethernet/cavium/liquidio/lio_core.c 	init_completion(&sc->complete);
sc               1774 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc               1781 drivers/net/ethernet/cavium/liquidio/lio_core.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc               1783 drivers/net/ethernet/cavium/liquidio/lio_core.c 	octeon_prepare_soft_command(oct, sc, OPCODE_NIC,
sc               1786 drivers/net/ethernet/cavium/liquidio/lio_core.c 	retval = octeon_send_soft_command(oct, sc);
sc               1790 drivers/net/ethernet/cavium/liquidio/lio_core.c 		octeon_free_soft_command(oct, sc);
sc               1794 drivers/net/ethernet/cavium/liquidio/lio_core.c 	retval = wait_for_sc_completion_timeout(oct, sc, 0);
sc               1805 drivers/net/ethernet/cavium/liquidio/lio_core.c 	WRITE_ONCE(sc->caller_is_done, true);
sc                764 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	struct octeon_soft_command *sc;
sc                769 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	sc = (struct octeon_soft_command *)
sc                774 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	if (!sc)
sc                777 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	mdio_cmd_rsp = (struct oct_mdio_cmd_resp *)sc->virtrptr;
sc                778 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	mdio_cmd = (struct oct_mdio_cmd *)sc->virtdptr;
sc                786 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc                788 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	octeon_prepare_soft_command(oct_dev, sc, OPCODE_NIC, OPCODE_NIC_MDIO45,
sc                791 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	init_completion(&sc->complete);
sc                792 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc                794 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	retval = octeon_send_soft_command(oct_dev, sc);
sc                799 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 		octeon_free_soft_command(oct_dev, sc);
sc                805 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 		retval = wait_for_sc_completion_timeout(oct_dev, sc, 0);
sc                813 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 			WRITE_ONCE(sc->caller_is_done, true);
sc                823 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 		WRITE_ONCE(sc->caller_is_done, true);
sc                990 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	struct octeon_soft_command *sc;
sc                999 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	sc = (struct octeon_soft_command *)
sc               1002 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	if (!sc) {
sc               1008 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	resp = (struct liquidio_if_cfg_resp *)sc->virtrptr;
sc               1009 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	vdata = (struct lio_version *)sc->virtdptr;
sc               1023 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	sc->iq_no = 0;
sc               1024 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	octeon_prepare_soft_command(oct, sc, OPCODE_NIC,
sc               1028 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	init_completion(&sc->complete);
sc               1029 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc               1031 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	retval = octeon_send_soft_command(oct, sc);
sc               1036 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 		octeon_free_soft_command(oct, sc);
sc               1040 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	retval = wait_for_sc_completion_timeout(oct, sc, 0);
sc               1048 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 		WRITE_ONCE(sc->caller_is_done, true);
sc               1077 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	WRITE_ONCE(sc->caller_is_done, true);
sc               1998 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	struct octeon_soft_command *sc;
sc               2004 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	sc = (struct octeon_soft_command *)
sc               2009 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	if (!sc)
sc               2012 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	resp = (struct oct_intrmod_resp *)sc->virtrptr;
sc               2015 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc               2017 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	octeon_prepare_soft_command(oct_dev, sc, OPCODE_NIC,
sc               2020 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	init_completion(&sc->complete);
sc               2021 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc               2023 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	retval = octeon_send_soft_command(oct_dev, sc);
sc               2025 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 		octeon_free_soft_command(oct_dev, sc);
sc               2032 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	retval = wait_for_sc_completion_timeout(oct_dev, sc, 0);
sc               2039 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 		WRITE_ONCE(sc->caller_is_done, true);
sc               2046 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	WRITE_ONCE(sc->caller_is_done, true);
sc               2055 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	struct octeon_soft_command *sc;
sc               2061 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	sc = (struct octeon_soft_command *)
sc               2066 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	if (!sc)
sc               2069 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	cfg = (struct oct_intrmod_cfg *)sc->virtdptr;
sc               2074 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc               2076 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	octeon_prepare_soft_command(oct_dev, sc, OPCODE_NIC,
sc               2079 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	init_completion(&sc->complete);
sc               2080 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc               2082 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	retval = octeon_send_soft_command(oct_dev, sc);
sc               2084 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 		octeon_free_soft_command(oct_dev, sc);
sc               2091 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	retval = wait_for_sc_completion_timeout(oct_dev, sc, 0);
sc               2095 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	retval = sc->sc_status;
sc               2101 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 		WRITE_ONCE(sc->caller_is_done, true);
sc               2107 drivers/net/ethernet/cavium/liquidio/lio_ethtool.c 	WRITE_ONCE(sc->caller_is_done, true);
sc                646 drivers/net/ethernet/cavium/liquidio/lio_main.c 	struct octeon_soft_command *sc;
sc                651 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc = octeon_alloc_soft_command(oct, sizeof(struct lio_time), 16, 0);
sc                652 drivers/net/ethernet/cavium/liquidio/lio_main.c 	if (!sc) {
sc                658 drivers/net/ethernet/cavium/liquidio/lio_main.c 	lt = (struct lio_time *)sc->virtdptr;
sc                666 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc                667 drivers/net/ethernet/cavium/liquidio/lio_main.c 	octeon_prepare_soft_command(oct, sc, OPCODE_NIC,
sc                670 drivers/net/ethernet/cavium/liquidio/lio_main.c 	init_completion(&sc->complete);
sc                671 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc                673 drivers/net/ethernet/cavium/liquidio/lio_main.c 	ret = octeon_send_soft_command(oct, sc);
sc                677 drivers/net/ethernet/cavium/liquidio/lio_main.c 		octeon_free_soft_command(oct, sc);
sc                679 drivers/net/ethernet/cavium/liquidio/lio_main.c 		WRITE_ONCE(sc->caller_is_done, true);
sc               1184 drivers/net/ethernet/cavium/liquidio/lio_main.c 	struct octeon_soft_command *sc;
sc               1192 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc = (struct octeon_soft_command *)
sc               1195 drivers/net/ethernet/cavium/liquidio/lio_main.c 	if (!sc) {
sc               1201 drivers/net/ethernet/cavium/liquidio/lio_main.c 	ncmd = (union octnet_cmd *)sc->virtdptr;
sc               1209 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc               1211 drivers/net/ethernet/cavium/liquidio/lio_main.c 	octeon_prepare_soft_command(oct, sc, OPCODE_NIC,
sc               1214 drivers/net/ethernet/cavium/liquidio/lio_main.c 	init_completion(&sc->complete);
sc               1215 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc               1217 drivers/net/ethernet/cavium/liquidio/lio_main.c 	retval = octeon_send_soft_command(oct, sc);
sc               1220 drivers/net/ethernet/cavium/liquidio/lio_main.c 		octeon_free_soft_command(oct, sc);
sc               1226 drivers/net/ethernet/cavium/liquidio/lio_main.c 		retval = wait_for_sc_completion_timeout(oct, sc, 0);
sc               1231 drivers/net/ethernet/cavium/liquidio/lio_main.c 		WRITE_ONCE(sc->caller_is_done, true);
sc               1517 drivers/net/ethernet/cavium/liquidio/lio_main.c 	struct octeon_soft_command *sc;
sc               1524 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc = (struct octeon_soft_command *)buf;
sc               1525 drivers/net/ethernet/cavium/liquidio/lio_main.c 	skb = (struct sk_buff *)sc->callback_arg;
sc               2221 drivers/net/ethernet/cavium/liquidio/lio_main.c 	struct octeon_soft_command *sc;
sc               2228 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc = finfo->sc;
sc               2230 drivers/net/ethernet/cavium/liquidio/lio_main.c 	resp = (struct oct_timestamp_resp *)sc->virtrptr;
sc               2251 drivers/net/ethernet/cavium/liquidio/lio_main.c 	octeon_free_soft_command(oct, sc);
sc               2266 drivers/net/ethernet/cavium/liquidio/lio_main.c 	struct octeon_soft_command *sc;
sc               2273 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc = octeon_alloc_soft_command_resp(oct, &ndata->cmd,
sc               2275 drivers/net/ethernet/cavium/liquidio/lio_main.c 	finfo->sc = sc;
sc               2277 drivers/net/ethernet/cavium/liquidio/lio_main.c 	if (!sc) {
sc               2287 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc->callback = handle_timestamp;
sc               2288 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc->callback_arg = finfo->skb;
sc               2289 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc->iq_no = ndata->q_no;
sc               2293 drivers/net/ethernet/cavium/liquidio/lio_main.c 			    (&sc->cmd.cmd3.ih3))->dlengsz;
sc               2296 drivers/net/ethernet/cavium/liquidio/lio_main.c 			    (&sc->cmd.cmd2.ih2))->dlengsz;
sc               2300 drivers/net/ethernet/cavium/liquidio/lio_main.c 	retval = octeon_send_command(oct, sc->iq_no, ring_doorbell, &sc->cmd,
sc               2301 drivers/net/ethernet/cavium/liquidio/lio_main.c 				     sc, len, ndata->reqtype);
sc               2306 drivers/net/ethernet/cavium/liquidio/lio_main.c 		octeon_free_soft_command(oct, sc);
sc               2363 drivers/net/ethernet/cavium/liquidio/lio_main.c 	finfo->sc = NULL;
sc               3017 drivers/net/ethernet/cavium/liquidio/lio_main.c 	struct octeon_soft_command *sc;
sc               3020 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc = octeon_alloc_soft_command(oct, 0, 16, 0);
sc               3021 drivers/net/ethernet/cavium/liquidio/lio_main.c 	if (!sc)
sc               3024 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc               3027 drivers/net/ethernet/cavium/liquidio/lio_main.c 	octeon_prepare_soft_command(oct, sc, OPCODE_NIC,
sc               3031 drivers/net/ethernet/cavium/liquidio/lio_main.c 	init_completion(&sc->complete);
sc               3032 drivers/net/ethernet/cavium/liquidio/lio_main.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc               3034 drivers/net/ethernet/cavium/liquidio/lio_main.c 	retval = octeon_send_soft_command(oct, sc);
sc               3036 drivers/net/ethernet/cavium/liquidio/lio_main.c 		octeon_free_soft_command(oct, sc);
sc               3040 drivers/net/ethernet/cavium/liquidio/lio_main.c 		retval = wait_for_sc_completion_timeout(oct, sc, 0);
sc               3044 drivers/net/ethernet/cavium/liquidio/lio_main.c 		WRITE_ONCE(sc->caller_is_done, true);
sc               3354 drivers/net/ethernet/cavium/liquidio/lio_main.c 	struct octeon_soft_command *sc;
sc               3388 drivers/net/ethernet/cavium/liquidio/lio_main.c 		sc = (struct octeon_soft_command *)
sc               3391 drivers/net/ethernet/cavium/liquidio/lio_main.c 		resp = (struct liquidio_if_cfg_resp *)sc->virtrptr;
sc               3392 drivers/net/ethernet/cavium/liquidio/lio_main.c 		vdata = (struct lio_version *)sc->virtdptr;
sc               3428 drivers/net/ethernet/cavium/liquidio/lio_main.c 		sc->iq_no = 0;
sc               3430 drivers/net/ethernet/cavium/liquidio/lio_main.c 		octeon_prepare_soft_command(octeon_dev, sc, OPCODE_NIC,
sc               3434 drivers/net/ethernet/cavium/liquidio/lio_main.c 		init_completion(&sc->complete);
sc               3435 drivers/net/ethernet/cavium/liquidio/lio_main.c 		sc->sc_status = OCTEON_REQUEST_PENDING;
sc               3437 drivers/net/ethernet/cavium/liquidio/lio_main.c 		retval = octeon_send_soft_command(octeon_dev, sc);
sc               3443 drivers/net/ethernet/cavium/liquidio/lio_main.c 			octeon_free_soft_command(octeon_dev, sc);
sc               3450 drivers/net/ethernet/cavium/liquidio/lio_main.c 		retval = wait_for_sc_completion_timeout(octeon_dev, sc, 0);
sc               3457 drivers/net/ethernet/cavium/liquidio/lio_main.c 			WRITE_ONCE(sc->caller_is_done, true);
sc               3472 drivers/net/ethernet/cavium/liquidio/lio_main.c 			WRITE_ONCE(sc->caller_is_done, true);
sc               3500 drivers/net/ethernet/cavium/liquidio/lio_main.c 			WRITE_ONCE(sc->caller_is_done, true);
sc               3520 drivers/net/ethernet/cavium/liquidio/lio_main.c 			WRITE_ONCE(sc->caller_is_done, true);
sc               3535 drivers/net/ethernet/cavium/liquidio/lio_main.c 			WRITE_ONCE(sc->caller_is_done, true);
sc               3543 drivers/net/ethernet/cavium/liquidio/lio_main.c 			WRITE_ONCE(sc->caller_is_done, true);
sc               3571 drivers/net/ethernet/cavium/liquidio/lio_main.c 		WRITE_ONCE(sc->caller_is_done, true);
sc                604 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	struct octeon_soft_command *sc;
sc                611 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	sc = (struct octeon_soft_command *)
sc                615 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	ncmd = (union octnet_cmd *)sc->virtdptr;
sc                623 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	sc->iq_no = lio->linfo.txpciq[0].s.q_no;
sc                625 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	octeon_prepare_soft_command(oct, sc, OPCODE_NIC,
sc                628 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	init_completion(&sc->complete);
sc                629 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc                631 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	retval = octeon_send_soft_command(oct, sc);
sc                634 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		octeon_free_soft_command(oct, sc);
sc                639 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		retval = wait_for_sc_completion_timeout(oct, sc, 0);
sc                644 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		WRITE_ONCE(sc->caller_is_done, true);
sc                864 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	struct octeon_soft_command *sc;
sc                870 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	sc = (struct octeon_soft_command *)buf;
sc                871 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	skb = (struct sk_buff *)sc->callback_arg;
sc               1312 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	struct octeon_soft_command *sc;
sc               1317 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	sc = finfo->sc;
sc               1319 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	resp = (struct oct_timestamp_resp *)sc->virtrptr;
sc               1340 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	octeon_free_soft_command(oct, sc);
sc               1354 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	struct octeon_soft_command *sc;
sc               1362 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	sc = octeon_alloc_soft_command_resp(oct, &ndata->cmd,
sc               1364 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	finfo->sc = sc;
sc               1366 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	if (!sc) {
sc               1376 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	sc->callback = handle_timestamp;
sc               1377 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	sc->callback_arg = finfo->skb;
sc               1378 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	sc->iq_no = ndata->q_no;
sc               1380 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	len = (u32)((struct octeon_instr_ih3 *)(&sc->cmd.cmd3.ih3))->dlengsz;
sc               1384 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	retval = octeon_send_command(oct, sc->iq_no, ring_doorbell, &sc->cmd,
sc               1385 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 				     sc, len, ndata->reqtype);
sc               1390 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		octeon_free_soft_command(oct, sc);
sc               1446 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	finfo->sc = NULL;
sc               1929 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 	struct octeon_soft_command *sc;
sc               1959 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		sc = (struct octeon_soft_command *)
sc               1962 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		resp = (struct liquidio_if_cfg_resp *)sc->virtrptr;
sc               1963 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		vdata = (struct lio_version *)sc->virtdptr;
sc               1976 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		sc->iq_no = 0;
sc               1978 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		octeon_prepare_soft_command(octeon_dev, sc, OPCODE_NIC,
sc               1982 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		init_completion(&sc->complete);
sc               1983 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		sc->sc_status = OCTEON_REQUEST_PENDING;
sc               1985 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		retval = octeon_send_soft_command(octeon_dev, sc);
sc               1990 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 			octeon_free_soft_command(octeon_dev, sc);
sc               1997 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		retval = wait_for_sc_completion_timeout(octeon_dev, sc, 0);
sc               2005 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 			WRITE_ONCE(sc->caller_is_done, true);
sc               2023 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 			WRITE_ONCE(sc->caller_is_done, true);
sc               2035 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 			WRITE_ONCE(sc->caller_is_done, true);
sc               2114 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		WRITE_ONCE(sc->caller_is_done, true);
sc                 60 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	struct octeon_soft_command *sc = NULL;
sc                 65 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	sc = (struct octeon_soft_command *)
sc                 68 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	if (!sc)
sc                 71 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	init_completion(&sc->complete);
sc                 72 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc                 74 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	sc_req = (struct lio_vf_rep_req *)sc->virtdptr;
sc                 77 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	rep_resp = (struct lio_vf_rep_resp *)sc->virtrptr;
sc                 81 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	sc->iq_no = 0;
sc                 82 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	octeon_prepare_soft_command(oct, sc, OPCODE_NIC,
sc                 85 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	err = octeon_send_soft_command(oct, sc);
sc                 89 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	err = wait_for_sc_completion_timeout(oct, sc, 0);
sc                 99 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	WRITE_ONCE(sc->caller_is_done, true);
sc                103 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	octeon_free_soft_command(oct, sc);
sc                351 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	struct octeon_soft_command *sc = (struct octeon_soft_command *)buf;
sc                352 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	struct sk_buff *skb = sc->ctxptr;
sc                356 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	dma_unmap_single(&oct->pci_dev->dev, sc->dmadptr,
sc                357 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 			 sc->datasize, DMA_TO_DEVICE);
sc                359 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	iq_no = sc->iq_no;
sc                360 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	octeon_free_soft_command(oct, sc);
sc                376 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	struct octeon_soft_command *sc;
sc                392 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	sc = (struct octeon_soft_command *)
sc                394 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	if (!sc) {
sc                402 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 		octeon_free_soft_command(oct, sc);
sc                406 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	sc->dmadptr = dma_map_single(&oct->pci_dev->dev,
sc                408 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	if (dma_mapping_error(&oct->pci_dev->dev, sc->dmadptr)) {
sc                410 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 		octeon_free_soft_command(oct, sc);
sc                414 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	sc->virtdptr = skb->data;
sc                415 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	sc->datasize = skb->len;
sc                416 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	sc->ctxptr = skb;
sc                417 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	sc->iq_no = parent_lio->txq;
sc                419 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	octeon_prepare_soft_command(oct, sc, OPCODE_NIC, OPCODE_NIC_VF_REP_PKT,
sc                421 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	pki_ih3 = (struct octeon_instr_pki_ih3 *)&sc->cmd.cmd3.pki_ih3;
sc                424 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	sc->callback = lio_vf_rep_packet_sent_callback;
sc                425 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	sc->callback_arg = sc;
sc                427 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 	status = octeon_send_soft_command(oct, sc);
sc                429 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 		dma_unmap_single(&oct->pci_dev->dev, sc->dmadptr,
sc                430 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 				 sc->datasize, DMA_TO_DEVICE);
sc                431 drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c 		octeon_free_soft_command(oct, sc);
sc                336 drivers/net/ethernet/cavium/liquidio/octeon_iq.h 			      struct octeon_soft_command *sc);
sc                382 drivers/net/ethernet/cavium/liquidio/octeon_iq.h 			      struct octeon_soft_command *sc);
sc                385 drivers/net/ethernet/cavium/liquidio/octeon_iq.h 				 struct octeon_soft_command *sc,
sc                391 drivers/net/ethernet/cavium/liquidio/octeon_iq.h 			     struct octeon_soft_command *sc);
sc                 62 drivers/net/ethernet/cavium/liquidio/octeon_main.h 	struct octeon_soft_command *sc;
sc                185 drivers/net/ethernet/cavium/liquidio/octeon_main.h 			       struct octeon_soft_command *sc,
sc                197 drivers/net/ethernet/cavium/liquidio/octeon_main.h 		wait_for_completion_interruptible_timeout(&sc->complete,
sc                202 drivers/net/ethernet/cavium/liquidio/octeon_main.h 		WRITE_ONCE(sc->caller_is_done, true);
sc                207 drivers/net/ethernet/cavium/liquidio/octeon_main.h 		WRITE_ONCE(sc->caller_is_done, true);
sc                209 drivers/net/ethernet/cavium/liquidio/octeon_main.h 	} else  if (sc->sc_status == OCTEON_REQUEST_TIMEOUT) {
sc                212 drivers/net/ethernet/cavium/liquidio/octeon_main.h 		WRITE_ONCE(sc->caller_is_done, true);
sc                 34 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	struct octeon_soft_command *sc;
sc                 40 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	sc = (struct octeon_soft_command *)
sc                 43 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	if (!sc)
sc                 47 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	memcpy(&sc->cmd, cmd, sizeof(union octeon_instr_64B));
sc                 53 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 		ih3      = (struct octeon_instr_ih3 *)&sc->cmd.cmd3.ih3;
sc                 54 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 		rdp     = (struct octeon_instr_rdp *)&sc->cmd.cmd3.rdp;
sc                 55 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 		irh     = (struct octeon_instr_irh *)&sc->cmd.cmd3.irh;
sc                 59 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 		ih2      = (struct octeon_instr_ih2 *)&sc->cmd.cmd2.ih2;
sc                 60 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 		rdp     = (struct octeon_instr_rdp *)&sc->cmd.cmd2.rdp;
sc                 61 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 		irh     = (struct octeon_instr_irh *)&sc->cmd.cmd2.irh;
sc                 71 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	*sc->status_word = COMPLETION_WORD_INIT;
sc                 74 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 		sc->cmd.cmd3.rptr =  sc->dmarptr;
sc                 76 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 		sc->cmd.cmd2.rptr =  sc->dmarptr;
sc                 78 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	sc->expiry_time = jiffies + msecs_to_jiffies(LIO_SC_MAX_TMO_MS);
sc                 80 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	return sc;
sc                 98 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	struct octeon_soft_command *sc = NULL;
sc                108 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	sc = (struct octeon_soft_command *)
sc                111 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	if (!sc)
sc                114 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	data = (u8 *)sc->virtdptr;
sc                125 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	sc->iq_no = (u32)nctrl->iq_no;
sc                127 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	octeon_prepare_soft_command(oct, sc, OPCODE_NIC, OPCODE_NIC_CMD,
sc                130 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	init_completion(&sc->complete);
sc                131 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	sc->sc_status = OCTEON_REQUEST_PENDING;
sc                133 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	return sc;
sc                141 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	struct octeon_soft_command *sc = NULL;
sc                156 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	sc = octnic_alloc_ctrl_pkt_sc(oct, nctrl);
sc                157 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	if (!sc) {
sc                164 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	retval = octeon_send_soft_command(oct, sc);
sc                166 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 		octeon_free_soft_command(oct, sc);
sc                181 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 			WRITE_ONCE(sc->caller_is_done, true);
sc                186 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	retval = wait_for_sc_completion_timeout(oct, sc, 0);
sc                190 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	nctrl->sc_status = sc->sc_status;
sc                195 drivers/net/ethernet/cavium/liquidio/octeon_nic.c 	WRITE_ONCE(sc->caller_is_done, true);
sc                381 drivers/net/ethernet/cavium/liquidio/request_manager.c 	struct octeon_soft_command *sc;
sc                402 drivers/net/ethernet/cavium/liquidio/request_manager.c 			sc = buf;
sc                413 drivers/net/ethernet/cavium/liquidio/request_manager.c 			list_add_tail(&sc->node, &oct->response_list
sc                585 drivers/net/ethernet/cavium/liquidio/request_manager.c 			    struct octeon_soft_command *sc,
sc                605 drivers/net/ethernet/cavium/liquidio/request_manager.c 		ih3 = (struct octeon_instr_ih3 *)&sc->cmd.cmd3.ih3;
sc                607 drivers/net/ethernet/cavium/liquidio/request_manager.c 		ih3->pkind = oct->instr_queue[sc->iq_no]->txpciq.s.pkind;
sc                609 drivers/net/ethernet/cavium/liquidio/request_manager.c 		pki_ih3 = (struct octeon_instr_pki_ih3 *)&sc->cmd.cmd3.pki_ih3;
sc                615 drivers/net/ethernet/cavium/liquidio/request_manager.c 			oct->instr_queue[sc->iq_no]->txpciq.s.use_qpg;
sc                620 drivers/net/ethernet/cavium/liquidio/request_manager.c 			oct->instr_queue[sc->iq_no]->txpciq.s.ctrl_qpg;
sc                625 drivers/net/ethernet/cavium/liquidio/request_manager.c 		if (sc->datasize)
sc                626 drivers/net/ethernet/cavium/liquidio/request_manager.c 			ih3->dlengsz = sc->datasize;
sc                628 drivers/net/ethernet/cavium/liquidio/request_manager.c 		irh            = (struct octeon_instr_irh *)&sc->cmd.cmd3.irh;
sc                634 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->cmd.cmd3.ossp[0] = ossp0;
sc                635 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->cmd.cmd3.ossp[1] = ossp1;
sc                637 drivers/net/ethernet/cavium/liquidio/request_manager.c 		if (sc->rdatasize) {
sc                638 drivers/net/ethernet/cavium/liquidio/request_manager.c 			rdp = (struct octeon_instr_rdp *)&sc->cmd.cmd3.rdp;
sc                640 drivers/net/ethernet/cavium/liquidio/request_manager.c 			rdp->rlen      = sc->rdatasize;
sc                654 drivers/net/ethernet/cavium/liquidio/request_manager.c 		ih2          = (struct octeon_instr_ih2 *)&sc->cmd.cmd2.ih2;
sc                660 drivers/net/ethernet/cavium/liquidio/request_manager.c 		if (sc->datasize) {
sc                661 drivers/net/ethernet/cavium/liquidio/request_manager.c 			ih2->dlengsz = sc->datasize;
sc                665 drivers/net/ethernet/cavium/liquidio/request_manager.c 		irh            = (struct octeon_instr_irh *)&sc->cmd.cmd2.irh;
sc                671 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->cmd.cmd2.ossp[0] = ossp0;
sc                672 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->cmd.cmd2.ossp[1] = ossp1;
sc                674 drivers/net/ethernet/cavium/liquidio/request_manager.c 		if (sc->rdatasize) {
sc                675 drivers/net/ethernet/cavium/liquidio/request_manager.c 			rdp = (struct octeon_instr_rdp *)&sc->cmd.cmd2.rdp;
sc                677 drivers/net/ethernet/cavium/liquidio/request_manager.c 			rdp->rlen      = sc->rdatasize;
sc                691 drivers/net/ethernet/cavium/liquidio/request_manager.c 			     struct octeon_soft_command *sc)
sc                699 drivers/net/ethernet/cavium/liquidio/request_manager.c 	iq = oct->instr_queue[sc->iq_no];
sc                702 drivers/net/ethernet/cavium/liquidio/request_manager.c 			sc->iq_no);
sc                703 drivers/net/ethernet/cavium/liquidio/request_manager.c 		INCR_INSTRQUEUE_PKT_COUNT(oct, sc->iq_no, instr_dropped, 1);
sc                708 drivers/net/ethernet/cavium/liquidio/request_manager.c 		ih3 =  (struct octeon_instr_ih3 *)&sc->cmd.cmd3.ih3;
sc                710 drivers/net/ethernet/cavium/liquidio/request_manager.c 			WARN_ON(!sc->dmadptr);
sc                711 drivers/net/ethernet/cavium/liquidio/request_manager.c 			sc->cmd.cmd3.dptr = sc->dmadptr;
sc                713 drivers/net/ethernet/cavium/liquidio/request_manager.c 		irh = (struct octeon_instr_irh *)&sc->cmd.cmd3.irh;
sc                715 drivers/net/ethernet/cavium/liquidio/request_manager.c 			WARN_ON(!sc->dmarptr);
sc                716 drivers/net/ethernet/cavium/liquidio/request_manager.c 			WARN_ON(!sc->status_word);
sc                717 drivers/net/ethernet/cavium/liquidio/request_manager.c 			*sc->status_word = COMPLETION_WORD_INIT;
sc                718 drivers/net/ethernet/cavium/liquidio/request_manager.c 			sc->cmd.cmd3.rptr = sc->dmarptr;
sc                722 drivers/net/ethernet/cavium/liquidio/request_manager.c 		ih2 = (struct octeon_instr_ih2 *)&sc->cmd.cmd2.ih2;
sc                724 drivers/net/ethernet/cavium/liquidio/request_manager.c 			WARN_ON(!sc->dmadptr);
sc                725 drivers/net/ethernet/cavium/liquidio/request_manager.c 			sc->cmd.cmd2.dptr = sc->dmadptr;
sc                727 drivers/net/ethernet/cavium/liquidio/request_manager.c 		irh = (struct octeon_instr_irh *)&sc->cmd.cmd2.irh;
sc                729 drivers/net/ethernet/cavium/liquidio/request_manager.c 			WARN_ON(!sc->dmarptr);
sc                730 drivers/net/ethernet/cavium/liquidio/request_manager.c 			WARN_ON(!sc->status_word);
sc                731 drivers/net/ethernet/cavium/liquidio/request_manager.c 			*sc->status_word = COMPLETION_WORD_INIT;
sc                732 drivers/net/ethernet/cavium/liquidio/request_manager.c 			sc->cmd.cmd2.rptr = sc->dmarptr;
sc                737 drivers/net/ethernet/cavium/liquidio/request_manager.c 	sc->expiry_time = jiffies + msecs_to_jiffies(LIO_SC_MAX_TMO_MS);
sc                739 drivers/net/ethernet/cavium/liquidio/request_manager.c 	return (octeon_send_command(oct, sc->iq_no, 1, &sc->cmd, sc,
sc                747 drivers/net/ethernet/cavium/liquidio/request_manager.c 	struct octeon_soft_command *sc;
sc                754 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc = (struct octeon_soft_command *)
sc                758 drivers/net/ethernet/cavium/liquidio/request_manager.c 		if (!sc) {
sc                763 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->dma_addr = dma_addr;
sc                764 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->size = SOFT_COMMAND_BUFFER_SIZE;
sc                766 drivers/net/ethernet/cavium/liquidio/request_manager.c 		list_add_tail(&sc->node, &oct->sc_buf_pool.head);
sc                775 drivers/net/ethernet/cavium/liquidio/request_manager.c 	struct octeon_soft_command *sc;
sc                790 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc = list_entry(tmp, struct octeon_soft_command, node);
sc                792 drivers/net/ethernet/cavium/liquidio/request_manager.c 		if (READ_ONCE(sc->caller_is_done)) {
sc                793 drivers/net/ethernet/cavium/liquidio/request_manager.c 			list_del(&sc->node);
sc                796 drivers/net/ethernet/cavium/liquidio/request_manager.c 			if (*sc->status_word == COMPLETION_WORD_INIT) {
sc                798 drivers/net/ethernet/cavium/liquidio/request_manager.c 				list_add_tail(&sc->node, &zombie_sc_list->head);
sc                801 drivers/net/ethernet/cavium/liquidio/request_manager.c 				octeon_free_soft_command(oct, sc);
sc                814 drivers/net/ethernet/cavium/liquidio/request_manager.c 	struct octeon_soft_command *sc;
sc                826 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc = list_entry(tmp, struct octeon_soft_command, node);
sc                827 drivers/net/ethernet/cavium/liquidio/request_manager.c 		octeon_free_soft_command(oct, sc);
sc                838 drivers/net/ethernet/cavium/liquidio/request_manager.c 	struct octeon_soft_command *sc;
sc                847 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc = (struct octeon_soft_command *)tmp;
sc                849 drivers/net/ethernet/cavium/liquidio/request_manager.c 		lio_dma_free(oct, sc->size, sc, sc->dma_addr);
sc                867 drivers/net/ethernet/cavium/liquidio/request_manager.c 	struct octeon_soft_command *sc = NULL;
sc                892 drivers/net/ethernet/cavium/liquidio/request_manager.c 	sc = (struct octeon_soft_command *)tmp;
sc                894 drivers/net/ethernet/cavium/liquidio/request_manager.c 	dma_addr = sc->dma_addr;
sc                895 drivers/net/ethernet/cavium/liquidio/request_manager.c 	size = sc->size;
sc                897 drivers/net/ethernet/cavium/liquidio/request_manager.c 	memset(sc, 0, sc->size);
sc                899 drivers/net/ethernet/cavium/liquidio/request_manager.c 	sc->dma_addr = dma_addr;
sc                900 drivers/net/ethernet/cavium/liquidio/request_manager.c 	sc->size = size;
sc                903 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->ctxptr = (u8 *)sc + offset;
sc                904 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->ctxsize = ctxsize;
sc                911 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->virtdptr = (u8 *)sc + offset;
sc                912 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->dmadptr = dma_addr + offset;
sc                913 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->datasize = datasize;
sc                921 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->virtrptr = (u8 *)sc + offset;
sc                922 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->dmarptr = dma_addr + offset;
sc                923 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->rdatasize = rdatasize;
sc                924 drivers/net/ethernet/cavium/liquidio/request_manager.c 		sc->status_word = (u64 *)((u8 *)(sc->virtrptr) + rdatasize - 8);
sc                927 drivers/net/ethernet/cavium/liquidio/request_manager.c 	return sc;
sc                931 drivers/net/ethernet/cavium/liquidio/request_manager.c 			      struct octeon_soft_command *sc)
sc                935 drivers/net/ethernet/cavium/liquidio/request_manager.c 	list_add_tail(&sc->node, &oct->sc_buf_pool.head);
sc                 66 drivers/net/ethernet/cavium/liquidio/response_manager.c 	struct octeon_soft_command *sc;
sc                 84 drivers/net/ethernet/cavium/liquidio/response_manager.c 		sc = list_first_entry(&ordered_sc_list->head,
sc                 92 drivers/net/ethernet/cavium/liquidio/response_manager.c 		status64 = *sc->status_word;
sc                116 drivers/net/ethernet/cavium/liquidio/response_manager.c 		} else if (unlikely(force_quit) || (sc->expiry_time &&
sc                117 drivers/net/ethernet/cavium/liquidio/response_manager.c 			time_after(jiffies, (unsigned long)sc->expiry_time))) {
sc                119 drivers/net/ethernet/cavium/liquidio/response_manager.c 				(struct octeon_instr_irh *)&sc->cmd.cmd3.irh;
sc                125 drivers/net/ethernet/cavium/liquidio/response_manager.c 				sc->cmd.cmd3.ossp[0], sc->cmd.cmd3.ossp[1]);
sc                128 drivers/net/ethernet/cavium/liquidio/response_manager.c 				(long)jiffies, (long)sc->expiry_time);
sc                133 drivers/net/ethernet/cavium/liquidio/response_manager.c 			sc->sc_status = status;
sc                137 drivers/net/ethernet/cavium/liquidio/response_manager.c 			list_del(&sc->node);
sc                142 drivers/net/ethernet/cavium/liquidio/response_manager.c 			if (!sc->callback) {
sc                146 drivers/net/ethernet/cavium/liquidio/response_manager.c 				list_add_tail(&sc->node,
sc                150 drivers/net/ethernet/cavium/liquidio/response_manager.c 				if (unlikely(READ_ONCE(sc->caller_is_done))) {
sc                159 drivers/net/ethernet/cavium/liquidio/response_manager.c 						    &sc->cmd.cmd3.irh;
sc                168 drivers/net/ethernet/cavium/liquidio/response_manager.c 						    sc->cmd.cmd3.ossp[0]);
sc                172 drivers/net/ethernet/cavium/liquidio/response_manager.c 						    sc->cmd.cmd3.ossp[1],
sc                176 drivers/net/ethernet/cavium/liquidio/response_manager.c 					complete(&sc->complete);
sc                186 drivers/net/ethernet/cavium/liquidio/response_manager.c 					list_add_tail(&sc->node,
sc                194 drivers/net/ethernet/cavium/liquidio/response_manager.c 				sc->callback(octeon_dev, status,
sc                195 drivers/net/ethernet/cavium/liquidio/response_manager.c 					     sc->callback_arg);
sc                 93 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	struct ulptx_idata *sc = (struct ulptx_idata *)(txpkt + 1);
sc                 97 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	sc->cmd_more = htonl(ULPTX_CMD_V(ULP_TX_SC_IMM));
sc                 98 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	sc->len = htonl(sizeof(*abort_req) - sizeof(struct work_request_hdr));
sc                108 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	struct ulptx_idata *sc = (struct ulptx_idata *)(txpkt + 1);
sc                112 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	sc->cmd_more = htonl(ULPTX_CMD_V(ULP_TX_SC_IMM));
sc                113 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	sc->len = htonl(sizeof(*abort_rpl) - sizeof(struct work_request_hdr));
sc                126 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	struct ulptx_idata *sc = (struct ulptx_idata *)(txpkt + 1);
sc                130 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	sc->cmd_more = htonl(ULPTX_CMD_V(ULP_TX_SC_IMM));
sc                131 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	sc->len = htonl(sizeof(*req) - sizeof(struct work_request_hdr));
sc                138 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	sc = (struct ulptx_idata *)(req + 1);
sc                139 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	sc->cmd_more = htonl(ULPTX_CMD_V(ULP_TX_SC_NOOP));
sc                140 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c 	sc->len = htonl(0);
sc                139 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ptp.c 	c.u.init.sc = FW_PTP_SC_RXTIME_STAMP;
sc                160 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ptp.c 	c.u.init.sc = FW_PTP_SC_TX_TYPE;
sc                185 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ptp.c 	c.u.init.sc = FW_PTP_SC_RDRX_TYPE;
sc                216 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ptp.c 	c.u.ts.sc = FW_PTP_SC_ADJ_FREQ;
sc                248 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ptp.c 	c.u.ts.sc = FW_PTP_SC_ADJ_FTIME;
sc                287 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ptp.c 		c.u.ts.sc = FW_PTP_SC_ADJ_TIME;
sc                350 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ptp.c 	c.u.ts.sc = FW_PTP_SC_SET_TIME;
sc                374 drivers/net/ethernet/chelsio/cxgb4/cxgb4_ptp.c 	c.u.scmd.sc = FW_PTP_SC_INIT_TIMER;
sc               10363 drivers/net/ethernet/chelsio/cxgb4/t4_hw.c 	cmd.u.params.sc = FW_SCHED_SC_PARAMS;
sc               3298 drivers/net/ethernet/chelsio/cxgb4/t4fw_api.h 			__u8   sc;
sc               3302 drivers/net/ethernet/chelsio/cxgb4/t4fw_api.h 			__u8   sc;
sc               3309 drivers/net/ethernet/chelsio/cxgb4/t4fw_api.h 			__u8   sc;
sc               3506 drivers/net/ethernet/chelsio/cxgb4/t4fw_api.h 			__u8   sc;
sc               3514 drivers/net/ethernet/chelsio/cxgb4/t4fw_api.h 			__u8   sc;
sc                 94 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 	u16 pkt_len, sc;
sc                106 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 	while (((sc = CBDR_SC(bdp)) & BD_ENET_TX_READY) == 0 && tx_left) {
sc                117 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 		if (sc & (BD_ENET_TX_HB | BD_ENET_TX_LC |
sc                120 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 			if (sc & BD_ENET_TX_HB)	/* No heartbeat */
sc                122 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 			if (sc & BD_ENET_TX_LC)	/* Late collision */
sc                124 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 			if (sc & BD_ENET_TX_RL)	/* Retrans limit */
sc                126 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 			if (sc & BD_ENET_TX_UN)	/* Underrun */
sc                128 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 			if (sc & BD_ENET_TX_CSL)	/* Carrier lost */
sc                131 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 			if (sc & (BD_ENET_TX_LC | BD_ENET_TX_RL | BD_ENET_TX_UN)) {
sc                138 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 		if (sc & BD_ENET_TX_READY) {
sc                147 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 		if (sc & BD_ENET_TX_DEF)
sc                169 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 		if ((sc & BD_ENET_TX_WRAP) == 0)
sc                199 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 	while (((sc = CBDR_SC(bdp)) & BD_ENET_RX_EMPTY) == 0 &&
sc                207 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 		if ((sc & BD_ENET_RX_LAST) == 0)
sc                213 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 		if (sc & (BD_ENET_RX_LG | BD_ENET_RX_SH | BD_ENET_RX_CL |
sc                217 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 			if (sc & (BD_ENET_RX_LG | BD_ENET_RX_SH))
sc                220 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 			if (sc & (BD_ENET_RX_NO | BD_ENET_RX_CL))
sc                223 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 			if (sc & BD_ENET_RX_CR)
sc                226 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 			if (sc & BD_ENET_RX_OV)
sc                287 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 		CBDW_SC(bdp, (sc & ~BD_ENET_RX_STATS) | BD_ENET_RX_EMPTY);
sc                292 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 		if ((sc & BD_ENET_RX_WRAP) == 0)
sc                490 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 	u16 sc;
sc                584 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 	sc = BD_ENET_TX_READY | BD_ENET_TX_INTR |
sc                591 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 		sc |= BD_ENET_TX_PAD;
sc                593 drivers/net/ethernet/freescale/fs_enet/fs_enet-main.c 	CBDS_SC(bdp, sc);
sc                320 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	   enum shf_sc sc, u8 shift,
sc                342 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	if (sc == SHF_SC_L_SHF && shift)
sc                347 drivers/net/ethernet/netronome/nfp/bpf/jit.c 		FIELD_PREP(OP_SHF_SC, sc) |
sc                364 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	 swreg lreg, enum shf_op op, swreg rreg, enum shf_sc sc, u8 shift)
sc                375 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	__emit_shf(nfp_prog, reg.dst, reg.dst_ab, sc, shift,
sc                382 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	       swreg lreg, enum shf_op op, swreg rreg, enum shf_sc sc)
sc                384 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	if (sc == SHF_SC_R_ROT) {
sc                390 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	emit_shf(nfp_prog, dst, lreg, op, rreg, sc, 0);
sc                487 drivers/net/ethernet/netronome/nfp/bpf/jit.c __emit_ld_field(struct nfp_prog *nfp_prog, enum shf_sc sc,
sc                496 drivers/net/ethernet/netronome/nfp/bpf/jit.c 		FIELD_PREP(OP_LDF_SC, sc) |
sc                512 drivers/net/ethernet/netronome/nfp/bpf/jit.c 		  enum shf_sc sc, u8 shift, bool zero)
sc                524 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	__emit_ld_field(nfp_prog, sc, reg.areg, bmask, reg.breg, shift,
sc                531 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	      enum shf_sc sc, u8 shift)
sc                533 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	emit_ld_field_any(nfp_prog, dst, bmask, src, sc, shift, false);
sc                696 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	enum shf_sc sc = offset ? SHF_SC_R_SHF : SHF_SC_NONE;
sc                699 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	emit_ld_field_any(nfp_prog, dst, mask, src, sc, offset * 8, true);
sc                709 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	enum shf_sc sc = offset ? SHF_SC_L_SHF : SHF_SC_NONE;
sc                712 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	emit_ld_field(nfp_prog, dst, mask, src, sc, 32 - offset * 8);
sc               1026 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	enum shf_sc sc;
sc               1056 drivers/net/ethernet/netronome/nfp/bpf/jit.c 		sc = SHF_SC_NONE;
sc               1059 drivers/net/ethernet/netronome/nfp/bpf/jit.c 		sc = SHF_SC_L_SHF;
sc               1061 drivers/net/ethernet/netronome/nfp/bpf/jit.c 		sc = SHF_SC_R_SHF;
sc               1080 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	emit_ld_field_any(nfp_prog, reg_both(dst), mask, reg, sc, shf, new_gpr);
sc               1095 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	enum shf_sc sc;
sc               1126 drivers/net/ethernet/netronome/nfp/bpf/jit.c 		sc = SHF_SC_NONE;
sc               1129 drivers/net/ethernet/netronome/nfp/bpf/jit.c 		sc = SHF_SC_L_SHF;
sc               1131 drivers/net/ethernet/netronome/nfp/bpf/jit.c 		sc = SHF_SC_R_SHF;
sc               1148 drivers/net/ethernet/netronome/nfp/bpf/jit.c 	emit_ld_field(nfp_prog, reg, mask, reg_b(src), sc, shf);
sc                533 drivers/net/fddi/skfp/pmf.c 	struct smt_p_setcount	*sc ;
sc                537 drivers/net/fddi/skfp/pmf.c 		sc = (struct smt_p_setcount *) pa ;
sc                538 drivers/net/fddi/skfp/pmf.c 		if ((smc->mib.fddiSMTSetCount.count != sc->count) ||
sc                540 drivers/net/fddi/skfp/pmf.c 			(char *)sc->timestamp,8))
sc                 64 drivers/net/macsec.c #define for_each_rxsc(secy, sc)				\
sc                 65 drivers/net/macsec.c 	for (sc = rcu_dereference_bh(secy->rx_sc);	\
sc                 66 drivers/net/macsec.c 	     sc;					\
sc                 67 drivers/net/macsec.c 	     sc = rcu_dereference_bh(sc->next))
sc                 68 drivers/net/macsec.c #define for_each_rxsc_rtnl(secy, sc)			\
sc                 69 drivers/net/macsec.c 	for (sc = rtnl_dereference(secy->rx_sc);	\
sc                 70 drivers/net/macsec.c 	     sc;					\
sc                 71 drivers/net/macsec.c 	     sc = rtnl_dereference(sc->next))
sc                150 drivers/net/macsec.c 	struct macsec_rx_sc *sc;
sc                328 drivers/net/macsec.c static struct macsec_rx_sc *macsec_rxsc_get(struct macsec_rx_sc *sc)
sc                330 drivers/net/macsec.c 	return refcount_inc_not_zero(&sc->refcnt) ? sc : NULL;
sc                333 drivers/net/macsec.c static void macsec_rxsc_put(struct macsec_rx_sc *sc)
sc                335 drivers/net/macsec.c 	if (refcount_dec_and_test(&sc->refcnt))
sc                336 drivers/net/macsec.c 		call_rcu(&sc->rcu_head, free_rx_sc_rcu);
sc                787 drivers/net/macsec.c 	struct pcpu_rx_sc_stats *rxsc_stats = this_cpu_ptr(rx_sa->sc->stats);
sc                890 drivers/net/macsec.c 	struct macsec_rx_sc *rx_sc = rx_sa->sc;
sc               1140 drivers/net/macsec.c 		struct macsec_rx_sc *sc = find_rx_sc(&macsec->secy, sci);
sc               1142 drivers/net/macsec.c 		sc = sc ? macsec_rxsc_get(sc) : NULL;
sc               1144 drivers/net/macsec.c 		if (sc) {
sc               1146 drivers/net/macsec.c 			rx_sc = sc;
sc               1723 drivers/net/macsec.c 	rx_sa->sc = rx_sc;
sc                307 drivers/net/usb/cx82310_eth.c #define USB_DEVICE_CLASS(vend, prod, cl, sc, pr) \
sc                313 drivers/net/usb/cx82310_eth.c 	.bDeviceSubClass = (sc), \
sc                 11 drivers/net/wan/lmc/lmc.h unsigned lmc_mii_readreg(lmc_softc_t * const sc, unsigned
sc                 13 drivers/net/wan/lmc/lmc.h void lmc_mii_writereg(lmc_softc_t * const sc, unsigned devaddr,
sc                 19 drivers/net/wan/lmc/lmc.h void lmc_gpio_mkinput(lmc_softc_t * const sc, u32 bits);
sc                 20 drivers/net/wan/lmc/lmc.h void lmc_gpio_mkoutput(lmc_softc_t * const sc, u32 bits);
sc                 95 drivers/net/wan/lmc/lmc_main.c static void lmc_initcsrs(lmc_softc_t * const sc, lmc_csrptr_t csr_base, size_t csr_size);
sc                100 drivers/net/wan/lmc/lmc_main.c static void lmc_reset(lmc_softc_t * const sc);
sc                101 drivers/net/wan/lmc/lmc_main.c static void lmc_dec_reset(lmc_softc_t * const sc);
sc                110 drivers/net/wan/lmc/lmc_main.c     lmc_softc_t *sc = dev_to_sc(dev);
sc                129 drivers/net/wan/lmc/lmc_main.c 	if (copy_to_user(ifr->ifr_data, &sc->ictl, sizeof(lmc_ctl_t)))
sc                151 drivers/net/wan/lmc/lmc_main.c 	spin_lock_irqsave(&sc->lmc_lock, flags);
sc                152 drivers/net/wan/lmc/lmc_main.c         sc->lmc_media->set_status (sc, &ctl);
sc                154 drivers/net/wan/lmc/lmc_main.c         if(ctl.crc_length != sc->ictl.crc_length) {
sc                155 drivers/net/wan/lmc/lmc_main.c             sc->lmc_media->set_crc_length(sc, ctl.crc_length);
sc                156 drivers/net/wan/lmc/lmc_main.c 	    if (sc->ictl.crc_length == LMC_CTL_CRC_LENGTH_16)
sc                157 drivers/net/wan/lmc/lmc_main.c 		sc->TxDescriptControlInit |=  LMC_TDES_ADD_CRC_DISABLE;
sc                159 drivers/net/wan/lmc/lmc_main.c 		sc->TxDescriptControlInit &= ~LMC_TDES_ADD_CRC_DISABLE;
sc                161 drivers/net/wan/lmc/lmc_main.c 	spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc                168 drivers/net/wan/lmc/lmc_main.c 	    u16 old_type = sc->if_type;
sc                188 drivers/net/wan/lmc/lmc_main.c 	    spin_lock_irqsave(&sc->lmc_lock, flags);
sc                189 drivers/net/wan/lmc/lmc_main.c             lmc_proto_close(sc);
sc                191 drivers/net/wan/lmc/lmc_main.c             sc->if_type = new_type;
sc                192 drivers/net/wan/lmc/lmc_main.c             lmc_proto_attach(sc);
sc                193 drivers/net/wan/lmc/lmc_main.c 	    ret = lmc_proto_open(sc);
sc                194 drivers/net/wan/lmc/lmc_main.c 	    spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc                199 drivers/net/wan/lmc/lmc_main.c 	spin_lock_irqsave(&sc->lmc_lock, flags);
sc                200 drivers/net/wan/lmc/lmc_main.c         sc->lmc_xinfo.Magic0 = 0xBEEFCAFE;
sc                202 drivers/net/wan/lmc/lmc_main.c         sc->lmc_xinfo.PciCardType = sc->lmc_cardtype;
sc                203 drivers/net/wan/lmc/lmc_main.c         sc->lmc_xinfo.PciSlotNumber = 0;
sc                204 drivers/net/wan/lmc/lmc_main.c         sc->lmc_xinfo.DriverMajorVersion = DRIVER_MAJOR_VERSION;
sc                205 drivers/net/wan/lmc/lmc_main.c         sc->lmc_xinfo.DriverMinorVersion = DRIVER_MINOR_VERSION;
sc                206 drivers/net/wan/lmc/lmc_main.c         sc->lmc_xinfo.DriverSubVersion = DRIVER_SUB_VERSION;
sc                207 drivers/net/wan/lmc/lmc_main.c         sc->lmc_xinfo.XilinxRevisionNumber =
sc                208 drivers/net/wan/lmc/lmc_main.c             lmc_mii_readreg (sc, 0, 3) & 0xf;
sc                209 drivers/net/wan/lmc/lmc_main.c         sc->lmc_xinfo.MaxFrameSize = LMC_PKT_BUF_SZ;
sc                210 drivers/net/wan/lmc/lmc_main.c         sc->lmc_xinfo.link_status = sc->lmc_media->get_link_status (sc);
sc                211 drivers/net/wan/lmc/lmc_main.c         sc->lmc_xinfo.mii_reg16 = lmc_mii_readreg (sc, 0, 16);
sc                212 drivers/net/wan/lmc/lmc_main.c 	spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc                214 drivers/net/wan/lmc/lmc_main.c         sc->lmc_xinfo.Magic1 = 0xDEADBEEF;
sc                216 drivers/net/wan/lmc/lmc_main.c         if (copy_to_user(ifr->ifr_data, &sc->lmc_xinfo,
sc                225 drivers/net/wan/lmc/lmc_main.c 	    spin_lock_irqsave(&sc->lmc_lock, flags);
sc                226 drivers/net/wan/lmc/lmc_main.c 	    if (sc->lmc_cardtype == LMC_CARDTYPE_T1) {
sc                227 drivers/net/wan/lmc/lmc_main.c 		    lmc_mii_writereg(sc, 0, 17, T1FRAMER_FERR_LSB);
sc                228 drivers/net/wan/lmc/lmc_main.c 		    sc->extra_stats.framingBitErrorCount +=
sc                229 drivers/net/wan/lmc/lmc_main.c 			    lmc_mii_readreg(sc, 0, 18) & 0xff;
sc                230 drivers/net/wan/lmc/lmc_main.c 		    lmc_mii_writereg(sc, 0, 17, T1FRAMER_FERR_MSB);
sc                231 drivers/net/wan/lmc/lmc_main.c 		    sc->extra_stats.framingBitErrorCount +=
sc                232 drivers/net/wan/lmc/lmc_main.c 			    (lmc_mii_readreg(sc, 0, 18) & 0xff) << 8;
sc                233 drivers/net/wan/lmc/lmc_main.c 		    lmc_mii_writereg(sc, 0, 17, T1FRAMER_LCV_LSB);
sc                234 drivers/net/wan/lmc/lmc_main.c 		    sc->extra_stats.lineCodeViolationCount +=
sc                235 drivers/net/wan/lmc/lmc_main.c 			    lmc_mii_readreg(sc, 0, 18) & 0xff;
sc                236 drivers/net/wan/lmc/lmc_main.c 		    lmc_mii_writereg(sc, 0, 17, T1FRAMER_LCV_MSB);
sc                237 drivers/net/wan/lmc/lmc_main.c 		    sc->extra_stats.lineCodeViolationCount +=
sc                238 drivers/net/wan/lmc/lmc_main.c 			    (lmc_mii_readreg(sc, 0, 18) & 0xff) << 8;
sc                239 drivers/net/wan/lmc/lmc_main.c 		    lmc_mii_writereg(sc, 0, 17, T1FRAMER_AERR);
sc                240 drivers/net/wan/lmc/lmc_main.c 		    regVal = lmc_mii_readreg(sc, 0, 18) & 0xff;
sc                242 drivers/net/wan/lmc/lmc_main.c 		    sc->extra_stats.lossOfFrameCount +=
sc                244 drivers/net/wan/lmc/lmc_main.c 		    sc->extra_stats.changeOfFrameAlignmentCount +=
sc                246 drivers/net/wan/lmc/lmc_main.c 		    sc->extra_stats.severelyErroredFrameCount +=
sc                249 drivers/net/wan/lmc/lmc_main.c 	    spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc                250 drivers/net/wan/lmc/lmc_main.c 	    if (copy_to_user(ifr->ifr_data, &sc->lmc_device->stats,
sc                251 drivers/net/wan/lmc/lmc_main.c 			     sizeof(sc->lmc_device->stats)) ||
sc                252 drivers/net/wan/lmc/lmc_main.c 		copy_to_user(ifr->ifr_data + sizeof(sc->lmc_device->stats),
sc                253 drivers/net/wan/lmc/lmc_main.c 			     &sc->extra_stats, sizeof(sc->extra_stats)))
sc                265 drivers/net/wan/lmc/lmc_main.c 	    spin_lock_irqsave(&sc->lmc_lock, flags);
sc                266 drivers/net/wan/lmc/lmc_main.c 	    memset(&sc->lmc_device->stats, 0, sizeof(sc->lmc_device->stats));
sc                267 drivers/net/wan/lmc/lmc_main.c 	    memset(&sc->extra_stats, 0, sizeof(sc->extra_stats));
sc                268 drivers/net/wan/lmc/lmc_main.c 	    sc->extra_stats.check = STATCHECK;
sc                269 drivers/net/wan/lmc/lmc_main.c 	    sc->extra_stats.version_size = (DRIVER_VERSION << 16) +
sc                270 drivers/net/wan/lmc/lmc_main.c 		    sizeof(sc->lmc_device->stats) + sizeof(sc->extra_stats);
sc                271 drivers/net/wan/lmc/lmc_main.c 	    sc->extra_stats.lmc_cardtype = sc->lmc_cardtype;
sc                272 drivers/net/wan/lmc/lmc_main.c 	    spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc                291 drivers/net/wan/lmc/lmc_main.c 	spin_lock_irqsave(&sc->lmc_lock, flags);
sc                292 drivers/net/wan/lmc/lmc_main.c         sc->lmc_media->set_circuit_type(sc, ctl.circuit_type);
sc                293 drivers/net/wan/lmc/lmc_main.c         sc->ictl.circuit_type = ctl.circuit_type;
sc                294 drivers/net/wan/lmc/lmc_main.c 	spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc                305 drivers/net/wan/lmc/lmc_main.c 	spin_lock_irqsave(&sc->lmc_lock, flags);
sc                307 drivers/net/wan/lmc/lmc_main.c         printk (" REG16 before reset +%04x\n", lmc_mii_readreg (sc, 0, 16));
sc                309 drivers/net/wan/lmc/lmc_main.c         printk (" REG16 after reset +%04x\n", lmc_mii_readreg (sc, 0, 16));
sc                311 drivers/net/wan/lmc/lmc_main.c         LMC_EVENT_LOG(LMC_EVENT_FORCEDRESET, LMC_CSR_READ (sc, csr_status), lmc_mii_readreg (sc, 0, 16));
sc                312 drivers/net/wan/lmc/lmc_main.c 	spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc                332 drivers/net/wan/lmc/lmc_main.c         if (sc->lmc_cardtype != LMC_CARDTYPE_T1){
sc                359 drivers/net/wan/lmc/lmc_main.c 		    spin_lock_irqsave(&sc->lmc_lock, flags);
sc                360 drivers/net/wan/lmc/lmc_main.c                     mii = lmc_mii_readreg (sc, 0, 16);
sc                365 drivers/net/wan/lmc/lmc_main.c                     lmc_gpio_mkinput(sc, 0xff);
sc                370 drivers/net/wan/lmc/lmc_main.c                     lmc_gpio_mkoutput(sc, LMC_GEP_RESET);
sc                378 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_gpio &= ~LMC_GEP_RESET;
sc                379 drivers/net/wan/lmc/lmc_main.c                     LMC_CSR_WRITE(sc, csr_gp, sc->lmc_gpio);
sc                387 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_gpio |= LMC_GEP_RESET;
sc                388 drivers/net/wan/lmc/lmc_main.c                     LMC_CSR_WRITE(sc, csr_gp, sc->lmc_gpio);
sc                394 drivers/net/wan/lmc/lmc_main.c                     lmc_gpio_mkinput(sc, 0xff);
sc                397 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_media->set_link_status (sc, 1);
sc                398 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_media->set_status (sc, NULL);
sc                404 drivers/net/wan/lmc/lmc_main.c                             lmc_led_on(sc, LMC_DS3_LED0);
sc                406 drivers/net/wan/lmc/lmc_main.c                             lmc_led_off(sc, LMC_DS3_LED0);
sc                407 drivers/net/wan/lmc/lmc_main.c                             lmc_led_on(sc, LMC_DS3_LED1);
sc                409 drivers/net/wan/lmc/lmc_main.c                             lmc_led_off(sc, LMC_DS3_LED1);
sc                410 drivers/net/wan/lmc/lmc_main.c                             lmc_led_on(sc, LMC_DS3_LED3);
sc                412 drivers/net/wan/lmc/lmc_main.c                             lmc_led_off(sc, LMC_DS3_LED3);
sc                413 drivers/net/wan/lmc/lmc_main.c                             lmc_led_on(sc, LMC_DS3_LED2);
sc                415 drivers/net/wan/lmc/lmc_main.c                             lmc_led_off(sc, LMC_DS3_LED2);
sc                418 drivers/net/wan/lmc/lmc_main.c 		    spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc                431 drivers/net/wan/lmc/lmc_main.c 		    spin_lock_irqsave(&sc->lmc_lock, flags);
sc                432 drivers/net/wan/lmc/lmc_main.c                     mii = lmc_mii_readreg (sc, 0, 16);
sc                437 drivers/net/wan/lmc/lmc_main.c                     lmc_gpio_mkinput(sc, 0xff);
sc                442 drivers/net/wan/lmc/lmc_main.c                     lmc_gpio_mkoutput(sc,  LMC_GEP_DP | LMC_GEP_RESET);
sc                450 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_gpio &= ~(LMC_GEP_RESET | LMC_GEP_DP);
sc                451 drivers/net/wan/lmc/lmc_main.c                     LMC_CSR_WRITE(sc, csr_gp, sc->lmc_gpio);
sc                459 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_gpio |= LMC_GEP_DP | LMC_GEP_RESET;
sc                460 drivers/net/wan/lmc/lmc_main.c                     LMC_CSR_WRITE(sc, csr_gp, sc->lmc_gpio);
sc                465 drivers/net/wan/lmc/lmc_main.c                     while( (LMC_CSR_READ(sc, csr_gp) & LMC_GEP_INIT) == 0 &&
sc                473 drivers/net/wan/lmc/lmc_main.c                     lmc_gpio_mkinput(sc, 0xff);
sc                474 drivers/net/wan/lmc/lmc_main.c 		    spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc                502 drivers/net/wan/lmc/lmc_main.c 		    spin_lock_irqsave(&sc->lmc_lock, flags);
sc                503 drivers/net/wan/lmc/lmc_main.c                     lmc_gpio_mkinput(sc, 0xff);
sc                516 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_gpio = 0x00;
sc                517 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_gpio &= ~LMC_GEP_DP;
sc                518 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_gpio &= ~LMC_GEP_RESET;
sc                519 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_gpio |=  LMC_GEP_MODE;
sc                520 drivers/net/wan/lmc/lmc_main.c                     LMC_CSR_WRITE(sc, csr_gp, sc->lmc_gpio);
sc                522 drivers/net/wan/lmc/lmc_main.c                     lmc_gpio_mkoutput(sc, LMC_GEP_MODE | LMC_GEP_DP | LMC_GEP_RESET);
sc                537 drivers/net/wan/lmc/lmc_main.c                     lmc_gpio_mkinput(sc, LMC_GEP_DP | LMC_GEP_RESET);
sc                542 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_gpio = 0x00;
sc                543 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_gpio |= LMC_GEP_MODE;
sc                544 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_gpio |= LMC_GEP_DATA;
sc                545 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_gpio |= LMC_GEP_CLK;
sc                546 drivers/net/wan/lmc/lmc_main.c                     LMC_CSR_WRITE(sc, csr_gp, sc->lmc_gpio);
sc                548 drivers/net/wan/lmc/lmc_main.c                     lmc_gpio_mkoutput(sc, LMC_GEP_DATA | LMC_GEP_CLK | LMC_GEP_MODE );
sc                553 drivers/net/wan/lmc/lmc_main.c                     while( (LMC_CSR_READ(sc, csr_gp) & LMC_GEP_INIT) == 0 &&
sc                562 drivers/net/wan/lmc/lmc_main.c                             sc->lmc_gpio &= ~LMC_GEP_DATA; /* Data is 0 */
sc                565 drivers/net/wan/lmc/lmc_main.c                             sc->lmc_gpio |= LMC_GEP_DATA; /* Data is 1 */
sc                569 drivers/net/wan/lmc/lmc_main.c                             sc->lmc_gpio |= LMC_GEP_DATA; /* Assume it's 1 */
sc                571 drivers/net/wan/lmc/lmc_main.c                         sc->lmc_gpio &= ~LMC_GEP_CLK; /* Clock to zero */
sc                572 drivers/net/wan/lmc/lmc_main.c                         sc->lmc_gpio |= LMC_GEP_MODE;
sc                573 drivers/net/wan/lmc/lmc_main.c                         LMC_CSR_WRITE(sc, csr_gp, sc->lmc_gpio);
sc                576 drivers/net/wan/lmc/lmc_main.c                         sc->lmc_gpio |= LMC_GEP_CLK; /* Put the clack back to one */
sc                577 drivers/net/wan/lmc/lmc_main.c                         sc->lmc_gpio |= LMC_GEP_MODE;
sc                578 drivers/net/wan/lmc/lmc_main.c                         LMC_CSR_WRITE(sc, csr_gp, sc->lmc_gpio);
sc                581 drivers/net/wan/lmc/lmc_main.c                     if((LMC_CSR_READ(sc, csr_gp) & LMC_GEP_INIT) == 0){
sc                584 drivers/net/wan/lmc/lmc_main.c                     else if((LMC_CSR_READ(sc, csr_gp) & LMC_GEP_DP) == 0){
sc                591 drivers/net/wan/lmc/lmc_main.c                     lmc_gpio_mkinput(sc, 0xff);
sc                593 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_miireg16 |= LMC_MII16_FIFO_RESET;
sc                594 drivers/net/wan/lmc/lmc_main.c                     lmc_mii_writereg(sc, 0, 16, sc->lmc_miireg16);
sc                596 drivers/net/wan/lmc/lmc_main.c                     sc->lmc_miireg16 &= ~LMC_MII16_FIFO_RESET;
sc                597 drivers/net/wan/lmc/lmc_main.c                     lmc_mii_writereg(sc, 0, 16, sc->lmc_miireg16);
sc                598 drivers/net/wan/lmc/lmc_main.c 		    spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc                612 drivers/net/wan/lmc/lmc_main.c             sc->lmc_txfull = 0;
sc                618 drivers/net/wan/lmc/lmc_main.c         ret = lmc_proto_ioctl (sc, ifr, cmd);
sc                631 drivers/net/wan/lmc/lmc_main.c     lmc_softc_t *sc = from_timer(sc, t, timer);
sc                632 drivers/net/wan/lmc/lmc_main.c     struct net_device *dev = sc->lmc_device;
sc                639 drivers/net/wan/lmc/lmc_main.c     spin_lock_irqsave(&sc->lmc_lock, flags);
sc                641 drivers/net/wan/lmc/lmc_main.c     if(sc->check != 0xBEAFCAFE){
sc                643 drivers/net/wan/lmc/lmc_main.c 	spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc                652 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_15, 0x00000011);
sc                653 drivers/net/wan/lmc/lmc_main.c     sc->lmc_cmdmode |= TULIP_CMD_TXRUN | TULIP_CMD_RXRUN;
sc                654 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_command, sc->lmc_cmdmode);
sc                656 drivers/net/wan/lmc/lmc_main.c     if (sc->lmc_ok == 0)
sc                659 drivers/net/wan/lmc/lmc_main.c     LMC_EVENT_LOG(LMC_EVENT_WATCHDOG, LMC_CSR_READ (sc, csr_status), lmc_mii_readreg (sc, 0, 16));
sc                664 drivers/net/wan/lmc/lmc_main.c     if (sc->lmc_taint_tx == sc->lastlmc_taint_tx &&
sc                665 drivers/net/wan/lmc/lmc_main.c 	sc->lmc_device->stats.tx_packets > sc->lasttx_packets &&
sc                666 drivers/net/wan/lmc/lmc_main.c 	sc->tx_TimeoutInd == 0)
sc                670 drivers/net/wan/lmc/lmc_main.c         sc->tx_TimeoutInd = 1;
sc                672 drivers/net/wan/lmc/lmc_main.c     else if (sc->lmc_taint_tx == sc->lastlmc_taint_tx &&
sc                673 drivers/net/wan/lmc/lmc_main.c 	     sc->lmc_device->stats.tx_packets > sc->lasttx_packets &&
sc                674 drivers/net/wan/lmc/lmc_main.c 	     sc->tx_TimeoutInd)
sc                677 drivers/net/wan/lmc/lmc_main.c         LMC_EVENT_LOG(LMC_EVENT_XMTINTTMO, LMC_CSR_READ (sc, csr_status), 0);
sc                679 drivers/net/wan/lmc/lmc_main.c         sc->tx_TimeoutDisplay = 1;
sc                680 drivers/net/wan/lmc/lmc_main.c 	sc->extra_stats.tx_TimeoutCnt++;
sc                687 drivers/net/wan/lmc/lmc_main.c         LMC_EVENT_LOG(LMC_EVENT_RESET1, LMC_CSR_READ (sc, csr_status), 0);
sc                695 drivers/net/wan/lmc/lmc_main.c         LMC_EVENT_LOG(LMC_EVENT_RESET2, lmc_mii_readreg (sc, 0, 16), lmc_mii_readreg (sc, 0, 17));
sc                698 drivers/net/wan/lmc/lmc_main.c         sc->tx_TimeoutInd = 0;
sc                699 drivers/net/wan/lmc/lmc_main.c         sc->lastlmc_taint_tx = sc->lmc_taint_tx;
sc                700 drivers/net/wan/lmc/lmc_main.c 	sc->lasttx_packets = sc->lmc_device->stats.tx_packets;
sc                702 drivers/net/wan/lmc/lmc_main.c         sc->tx_TimeoutInd = 0;
sc                703 drivers/net/wan/lmc/lmc_main.c         sc->lastlmc_taint_tx = sc->lmc_taint_tx;
sc                704 drivers/net/wan/lmc/lmc_main.c 	sc->lasttx_packets = sc->lmc_device->stats.tx_packets;
sc                710 drivers/net/wan/lmc/lmc_main.c     link_status = sc->lmc_media->get_link_status (sc);
sc                716 drivers/net/wan/lmc/lmc_main.c     if ((link_status == 0) && (sc->last_link_status != 0)) {
sc                718 drivers/net/wan/lmc/lmc_main.c         sc->last_link_status = 0;
sc                729 drivers/net/wan/lmc/lmc_main.c      if (link_status != 0 && sc->last_link_status == 0) {
sc                731 drivers/net/wan/lmc/lmc_main.c          sc->last_link_status = 1;
sc                738 drivers/net/wan/lmc/lmc_main.c     sc->lmc_media->watchdog(sc);
sc                744 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE(sc, csr_rxpoll, 0);
sc                750 drivers/net/wan/lmc/lmc_main.c     if(sc->failed_ring == 1){
sc                755 drivers/net/wan/lmc/lmc_main.c         sc->failed_ring = 0;
sc                756 drivers/net/wan/lmc/lmc_main.c         lmc_softreset(sc);
sc                758 drivers/net/wan/lmc/lmc_main.c     if(sc->failed_recv_alloc == 1){
sc                764 drivers/net/wan/lmc/lmc_main.c         sc->failed_recv_alloc = 0;
sc                765 drivers/net/wan/lmc/lmc_main.c         lmc_softreset(sc);
sc                774 drivers/net/wan/lmc/lmc_main.c     ticks = LMC_CSR_READ (sc, csr_gp_timer);
sc                775 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_gp_timer, 0xffffffffUL);
sc                776 drivers/net/wan/lmc/lmc_main.c     sc->ictl.ticks = 0x0000ffff - (ticks & 0x0000ffff);
sc                781 drivers/net/wan/lmc/lmc_main.c     sc->timer.expires = jiffies + (HZ);
sc                782 drivers/net/wan/lmc/lmc_main.c     add_timer (&sc->timer);
sc                784 drivers/net/wan/lmc/lmc_main.c     spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc                809 drivers/net/wan/lmc/lmc_main.c 	lmc_softc_t *sc;
sc                833 drivers/net/wan/lmc/lmc_main.c 	sc = devm_kzalloc(&pdev->dev, sizeof(lmc_softc_t), GFP_KERNEL);
sc                834 drivers/net/wan/lmc/lmc_main.c 	if (!sc)
sc                837 drivers/net/wan/lmc/lmc_main.c 	dev = alloc_hdlcdev(sc);
sc                850 drivers/net/wan/lmc/lmc_main.c 	sc->lmc_device = dev;
sc                851 drivers/net/wan/lmc/lmc_main.c 	sc->name = dev->name;
sc                852 drivers/net/wan/lmc/lmc_main.c 	sc->if_type = LMC_PPP;
sc                853 drivers/net/wan/lmc/lmc_main.c 	sc->check = 0xBEAFCAFE;
sc                863 drivers/net/wan/lmc/lmc_main.c 	lmc_proto_attach(sc);
sc                867 drivers/net/wan/lmc/lmc_main.c 	spin_lock_init(&sc->lmc_lock);
sc                880 drivers/net/wan/lmc/lmc_main.c     sc->lmc_cardtype = LMC_CARDTYPE_UNKNOWN;
sc                881 drivers/net/wan/lmc/lmc_main.c     sc->lmc_timing = LMC_CTL_CLOCK_SOURCE_EXT;
sc                895 drivers/net/wan/lmc/lmc_main.c         sc->lmc_cardtype = LMC_CARDTYPE_HSSI;
sc                896 drivers/net/wan/lmc/lmc_main.c         sc->lmc_media = &lmc_hssi_media;
sc                900 drivers/net/wan/lmc/lmc_main.c         sc->lmc_cardtype = LMC_CARDTYPE_DS3;
sc                901 drivers/net/wan/lmc/lmc_main.c         sc->lmc_media = &lmc_ds3_media;
sc                905 drivers/net/wan/lmc/lmc_main.c         sc->lmc_cardtype = LMC_CARDTYPE_SSI;
sc                906 drivers/net/wan/lmc/lmc_main.c         sc->lmc_media = &lmc_ssi_media;
sc                910 drivers/net/wan/lmc/lmc_main.c         sc->lmc_cardtype = LMC_CARDTYPE_T1;
sc                911 drivers/net/wan/lmc/lmc_main.c         sc->lmc_media = &lmc_t1_media;
sc                918 drivers/net/wan/lmc/lmc_main.c     lmc_initcsrs (sc, dev->base_addr, 8);
sc                920 drivers/net/wan/lmc/lmc_main.c     lmc_gpio_mkinput (sc, 0xff);
sc                921 drivers/net/wan/lmc/lmc_main.c     sc->lmc_gpio = 0;		/* drive no signals yet */
sc                923 drivers/net/wan/lmc/lmc_main.c     sc->lmc_media->defaults (sc);
sc                925 drivers/net/wan/lmc/lmc_main.c     sc->lmc_media->set_link_status (sc, LMC_LINK_UP);
sc                930 drivers/net/wan/lmc/lmc_main.c     AdapModelNum = (lmc_mii_readreg (sc, 0, 3) & 0x3f0) >> 4;
sc                947 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_gp_timer, 0xFFFFFFFFUL);
sc                949 drivers/net/wan/lmc/lmc_main.c     sc->board_idx = cards_found++;
sc                950 drivers/net/wan/lmc/lmc_main.c     sc->extra_stats.check = STATCHECK;
sc                951 drivers/net/wan/lmc/lmc_main.c     sc->extra_stats.version_size = (DRIVER_VERSION << 16) +
sc                952 drivers/net/wan/lmc/lmc_main.c 	    sizeof(sc->lmc_device->stats) + sizeof(sc->extra_stats);
sc                953 drivers/net/wan/lmc/lmc_main.c     sc->extra_stats.lmc_cardtype = sc->lmc_cardtype;
sc                955 drivers/net/wan/lmc/lmc_main.c     sc->lmc_ok = 0;
sc                956 drivers/net/wan/lmc/lmc_main.c     sc->last_link_status = 0;
sc                981 drivers/net/wan/lmc/lmc_main.c     lmc_softc_t *sc = dev_to_sc(dev);
sc                986 drivers/net/wan/lmc/lmc_main.c     lmc_led_on(sc, LMC_DS3_LED0);
sc                988 drivers/net/wan/lmc/lmc_main.c     lmc_dec_reset(sc);
sc                989 drivers/net/wan/lmc/lmc_main.c     lmc_reset(sc);
sc                991 drivers/net/wan/lmc/lmc_main.c     LMC_EVENT_LOG(LMC_EVENT_RESET1, LMC_CSR_READ(sc, csr_status), 0);
sc                992 drivers/net/wan/lmc/lmc_main.c     LMC_EVENT_LOG(LMC_EVENT_RESET2, lmc_mii_readreg(sc, 0, 16),
sc                993 drivers/net/wan/lmc/lmc_main.c 		  lmc_mii_readreg(sc, 0, 17));
sc                995 drivers/net/wan/lmc/lmc_main.c     if (sc->lmc_ok){
sc               1000 drivers/net/wan/lmc/lmc_main.c     lmc_softreset (sc);
sc               1008 drivers/net/wan/lmc/lmc_main.c     sc->got_irq = 1;
sc               1011 drivers/net/wan/lmc/lmc_main.c     sc->lmc_miireg16 |= LMC_MII16_LED_ALL;
sc               1012 drivers/net/wan/lmc/lmc_main.c     sc->lmc_media->set_link_status (sc, LMC_LINK_UP);
sc               1017 drivers/net/wan/lmc/lmc_main.c     sc->lmc_media->set_status (sc, NULL);
sc               1021 drivers/net/wan/lmc/lmc_main.c     sc->TxDescriptControlInit = (
sc               1029 drivers/net/wan/lmc/lmc_main.c     if (sc->ictl.crc_length == LMC_CTL_CRC_LENGTH_16) {
sc               1031 drivers/net/wan/lmc/lmc_main.c         sc->TxDescriptControlInit |= LMC_TDES_ADD_CRC_DISABLE;
sc               1033 drivers/net/wan/lmc/lmc_main.c     sc->lmc_media->set_crc_length(sc, sc->ictl.crc_length);
sc               1038 drivers/net/wan/lmc/lmc_main.c     if ((err = lmc_proto_open(sc)) != 0)
sc               1042 drivers/net/wan/lmc/lmc_main.c     sc->extra_stats.tx_tbusy0++;
sc               1047 drivers/net/wan/lmc/lmc_main.c     sc->lmc_intrmask = 0;
sc               1049 drivers/net/wan/lmc/lmc_main.c     sc->lmc_intrmask |= (TULIP_STS_NORMALINTR
sc               1059 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_intr, sc->lmc_intrmask);
sc               1061 drivers/net/wan/lmc/lmc_main.c     sc->lmc_cmdmode |= TULIP_CMD_TXRUN;
sc               1062 drivers/net/wan/lmc/lmc_main.c     sc->lmc_cmdmode |= TULIP_CMD_RXRUN;
sc               1063 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_command, sc->lmc_cmdmode);
sc               1065 drivers/net/wan/lmc/lmc_main.c     sc->lmc_ok = 1; /* Run watchdog */
sc               1071 drivers/net/wan/lmc/lmc_main.c     sc->last_link_status = 1;
sc               1077 drivers/net/wan/lmc/lmc_main.c     timer_setup(&sc->timer, lmc_watchdog, 0);
sc               1078 drivers/net/wan/lmc/lmc_main.c     sc->timer.expires = jiffies + HZ;
sc               1079 drivers/net/wan/lmc/lmc_main.c     add_timer (&sc->timer);
sc               1092 drivers/net/wan/lmc/lmc_main.c     lmc_softc_t *sc = dev_to_sc(dev);
sc               1098 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_intr, 0x00000000);
sc               1100 drivers/net/wan/lmc/lmc_main.c     lmc_dec_reset (sc);
sc               1101 drivers/net/wan/lmc/lmc_main.c     lmc_reset (sc);
sc               1102 drivers/net/wan/lmc/lmc_main.c     lmc_softreset (sc);
sc               1104 drivers/net/wan/lmc/lmc_main.c     sc->lmc_media->set_link_status (sc, 1);
sc               1105 drivers/net/wan/lmc/lmc_main.c     sc->lmc_media->set_status (sc, NULL);
sc               1109 drivers/net/wan/lmc/lmc_main.c     sc->lmc_txfull = 0;
sc               1110 drivers/net/wan/lmc/lmc_main.c     sc->extra_stats.tx_tbusy0++;
sc               1112 drivers/net/wan/lmc/lmc_main.c     sc->lmc_intrmask = TULIP_DEFAULT_INTR_MASK;
sc               1113 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_intr, sc->lmc_intrmask);
sc               1115 drivers/net/wan/lmc/lmc_main.c     sc->lmc_cmdmode |= (TULIP_CMD_TXRUN | TULIP_CMD_RXRUN);
sc               1116 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_command, sc->lmc_cmdmode);
sc               1129 drivers/net/wan/lmc/lmc_main.c     lmc_softc_t *sc = dev_to_sc(dev);
sc               1133 drivers/net/wan/lmc/lmc_main.c     sc->lmc_ok = 0;
sc               1134 drivers/net/wan/lmc/lmc_main.c     sc->lmc_media->set_link_status (sc, 0);
sc               1135 drivers/net/wan/lmc/lmc_main.c     del_timer (&sc->timer);
sc               1136 drivers/net/wan/lmc/lmc_main.c     lmc_proto_close(sc);
sc               1148 drivers/net/wan/lmc/lmc_main.c     lmc_softc_t *sc = dev_to_sc(dev);
sc               1157 drivers/net/wan/lmc/lmc_main.c     sc->extra_stats.tx_tbusy1++;
sc               1161 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_intr, 0x00000000);
sc               1164 drivers/net/wan/lmc/lmc_main.c     csr6 = LMC_CSR_READ (sc, csr_command);
sc               1167 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_command, csr6);
sc               1169 drivers/net/wan/lmc/lmc_main.c     sc->lmc_device->stats.rx_missed_errors +=
sc               1170 drivers/net/wan/lmc/lmc_main.c 	    LMC_CSR_READ(sc, csr_missed_frames) & 0xffff;
sc               1173 drivers/net/wan/lmc/lmc_main.c     if(sc->got_irq == 1){
sc               1175 drivers/net/wan/lmc/lmc_main.c         sc->got_irq = 0;
sc               1181 drivers/net/wan/lmc/lmc_main.c         struct sk_buff *skb = sc->lmc_rxq[i];
sc               1182 drivers/net/wan/lmc/lmc_main.c         sc->lmc_rxq[i] = NULL;
sc               1183 drivers/net/wan/lmc/lmc_main.c         sc->lmc_rxring[i].status = 0;
sc               1184 drivers/net/wan/lmc/lmc_main.c         sc->lmc_rxring[i].length = 0;
sc               1185 drivers/net/wan/lmc/lmc_main.c         sc->lmc_rxring[i].buffer1 = 0xDEADBEEF;
sc               1188 drivers/net/wan/lmc/lmc_main.c         sc->lmc_rxq[i] = NULL;
sc               1193 drivers/net/wan/lmc/lmc_main.c         if (sc->lmc_txq[i] != NULL)
sc               1194 drivers/net/wan/lmc/lmc_main.c             dev_kfree_skb(sc->lmc_txq[i]);
sc               1195 drivers/net/wan/lmc/lmc_main.c         sc->lmc_txq[i] = NULL;
sc               1198 drivers/net/wan/lmc/lmc_main.c     lmc_led_off (sc, LMC_MII16_LED_ALL);
sc               1201 drivers/net/wan/lmc/lmc_main.c     sc->extra_stats.tx_tbusy0++;
sc               1214 drivers/net/wan/lmc/lmc_main.c     lmc_softc_t *sc = dev_to_sc(dev);
sc               1225 drivers/net/wan/lmc/lmc_main.c     spin_lock(&sc->lmc_lock);
sc               1230 drivers/net/wan/lmc/lmc_main.c     csr = LMC_CSR_READ (sc, csr_status);
sc               1235 drivers/net/wan/lmc/lmc_main.c     if ( ! (csr & sc->lmc_intrmask)) {
sc               1242 drivers/net/wan/lmc/lmc_main.c     while (csr & sc->lmc_intrmask) {
sc               1248 drivers/net/wan/lmc/lmc_main.c         LMC_CSR_WRITE (sc, csr_status, csr);
sc               1277 drivers/net/wan/lmc/lmc_main.c 	    sc->extra_stats.tx_NoCompleteCnt = 0;
sc               1279 drivers/net/wan/lmc/lmc_main.c             badtx = sc->lmc_taint_tx;
sc               1282 drivers/net/wan/lmc/lmc_main.c             while ((badtx < sc->lmc_next_tx)) {
sc               1283 drivers/net/wan/lmc/lmc_main.c                 stat = sc->lmc_txring[i].status;
sc               1286 drivers/net/wan/lmc/lmc_main.c 						 sc->lmc_txring[i].length);
sc               1298 drivers/net/wan/lmc/lmc_main.c                 if (sc->lmc_txq[i] == NULL)
sc               1305 drivers/net/wan/lmc/lmc_main.c 			sc->lmc_device->stats.tx_errors++;
sc               1307 drivers/net/wan/lmc/lmc_main.c 				sc->lmc_device->stats.tx_aborted_errors++;
sc               1309 drivers/net/wan/lmc/lmc_main.c 				sc->lmc_device->stats.tx_carrier_errors++;
sc               1311 drivers/net/wan/lmc/lmc_main.c 				sc->lmc_device->stats.tx_window_errors++;
sc               1313 drivers/net/wan/lmc/lmc_main.c 				sc->lmc_device->stats.tx_fifo_errors++;
sc               1315 drivers/net/wan/lmc/lmc_main.c 			sc->lmc_device->stats.tx_bytes += sc->lmc_txring[i].length & 0x7ff;
sc               1317 drivers/net/wan/lmc/lmc_main.c 			sc->lmc_device->stats.tx_packets++;
sc               1320 drivers/net/wan/lmc/lmc_main.c 		dev_consume_skb_irq(sc->lmc_txq[i]);
sc               1321 drivers/net/wan/lmc/lmc_main.c                 sc->lmc_txq[i] = NULL;
sc               1327 drivers/net/wan/lmc/lmc_main.c             if (sc->lmc_next_tx - badtx > LMC_TXDESCS)
sc               1333 drivers/net/wan/lmc/lmc_main.c             sc->lmc_txfull = 0;
sc               1335 drivers/net/wan/lmc/lmc_main.c 	    sc->extra_stats.tx_tbusy0++;
sc               1339 drivers/net/wan/lmc/lmc_main.c 	    sc->extra_stats.dirtyTx = badtx;
sc               1340 drivers/net/wan/lmc/lmc_main.c 	    sc->extra_stats.lmc_next_tx = sc->lmc_next_tx;
sc               1341 drivers/net/wan/lmc/lmc_main.c 	    sc->extra_stats.lmc_txfull = sc->lmc_txfull;
sc               1343 drivers/net/wan/lmc/lmc_main.c             sc->lmc_taint_tx = badtx;
sc               1367 drivers/net/wan/lmc/lmc_main.c             lmc_dec_reset (sc);
sc               1368 drivers/net/wan/lmc/lmc_main.c             lmc_reset (sc);
sc               1369 drivers/net/wan/lmc/lmc_main.c             LMC_EVENT_LOG(LMC_EVENT_RESET1, LMC_CSR_READ (sc, csr_status), 0);
sc               1371 drivers/net/wan/lmc/lmc_main.c                           lmc_mii_readreg (sc, 0, 16),
sc               1372 drivers/net/wan/lmc/lmc_main.c                           lmc_mii_readreg (sc, 0, 17));
sc               1384 drivers/net/wan/lmc/lmc_main.c         csr = LMC_CSR_READ (sc, csr_status);
sc               1390 drivers/net/wan/lmc/lmc_main.c     spin_unlock(&sc->lmc_lock);
sc               1399 drivers/net/wan/lmc/lmc_main.c     lmc_softc_t *sc = dev_to_sc(dev);
sc               1406 drivers/net/wan/lmc/lmc_main.c     spin_lock_irqsave(&sc->lmc_lock, flags);
sc               1410 drivers/net/wan/lmc/lmc_main.c     entry = sc->lmc_next_tx % LMC_TXDESCS;
sc               1412 drivers/net/wan/lmc/lmc_main.c     sc->lmc_txq[entry] = skb;
sc               1413 drivers/net/wan/lmc/lmc_main.c     sc->lmc_txring[entry].buffer1 = virt_to_bus (skb->data);
sc               1419 drivers/net/wan/lmc/lmc_main.c     if (sc->lmc_next_tx - sc->lmc_taint_tx < LMC_TXDESCS / 2)
sc               1425 drivers/net/wan/lmc/lmc_main.c     else if (sc->lmc_next_tx - sc->lmc_taint_tx == LMC_TXDESCS / 2)
sc               1431 drivers/net/wan/lmc/lmc_main.c     else if (sc->lmc_next_tx - sc->lmc_taint_tx < LMC_TXDESCS - 1)
sc               1441 drivers/net/wan/lmc/lmc_main.c         sc->lmc_txfull = 1;
sc               1447 drivers/net/wan/lmc/lmc_main.c     if (sc->lmc_next_tx - sc->lmc_taint_tx >= LMC_TXDESCS - 1)
sc               1449 drivers/net/wan/lmc/lmc_main.c         sc->lmc_txfull = 1;
sc               1451 drivers/net/wan/lmc/lmc_main.c 	sc->extra_stats.tx_tbusy1++;
sc               1461 drivers/net/wan/lmc/lmc_main.c     flag = sc->lmc_txring[entry].length = (skb->len) | flag |
sc               1462 drivers/net/wan/lmc/lmc_main.c 						sc->TxDescriptControlInit;
sc               1468 drivers/net/wan/lmc/lmc_main.c     sc->extra_stats.tx_NoCompleteCnt++;
sc               1469 drivers/net/wan/lmc/lmc_main.c     sc->lmc_next_tx++;
sc               1473 drivers/net/wan/lmc/lmc_main.c     sc->lmc_txring[entry].status = 0x80000000;
sc               1476 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_txpoll, 0);
sc               1478 drivers/net/wan/lmc/lmc_main.c     spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc               1487 drivers/net/wan/lmc/lmc_main.c     lmc_softc_t *sc = dev_to_sc(dev);
sc               1498 drivers/net/wan/lmc/lmc_main.c     lmc_led_on(sc, LMC_DS3_LED3);
sc               1502 drivers/net/wan/lmc/lmc_main.c     i = sc->lmc_next_rx % LMC_RXDESCS;
sc               1504 drivers/net/wan/lmc/lmc_main.c     while (((stat = sc->lmc_rxring[i].status) & LMC_RDES_OWN_BIT) != DESC_OWNED_BY_DC21X4)
sc               1511 drivers/net/wan/lmc/lmc_main.c 			sc->lmc_device->stats.rx_length_errors++;
sc               1517 drivers/net/wan/lmc/lmc_main.c 		sc->lmc_device->stats.rx_errors++;
sc               1518 drivers/net/wan/lmc/lmc_main.c 		sc->lmc_device->stats.rx_frame_errors++;
sc               1524 drivers/net/wan/lmc/lmc_main.c 		sc->lmc_device->stats.rx_errors++;
sc               1525 drivers/net/wan/lmc/lmc_main.c 		sc->lmc_device->stats.rx_crc_errors++;
sc               1530 drivers/net/wan/lmc/lmc_main.c 		sc->lmc_device->stats.rx_length_errors++;
sc               1535 drivers/net/wan/lmc/lmc_main.c 	if (len < sc->lmc_crcSize + 2) {
sc               1536 drivers/net/wan/lmc/lmc_main.c 		sc->lmc_device->stats.rx_length_errors++;
sc               1537 drivers/net/wan/lmc/lmc_main.c 		sc->extra_stats.rx_SmallPktCnt++;
sc               1546 drivers/net/wan/lmc/lmc_main.c         len -= sc->lmc_crcSize;
sc               1548 drivers/net/wan/lmc/lmc_main.c         skb = sc->lmc_rxq[i];
sc               1558 drivers/net/wan/lmc/lmc_main.c                 sc->lmc_rxq[i] = nsb;
sc               1560 drivers/net/wan/lmc/lmc_main.c                 sc->lmc_rxring[i].buffer1 = virt_to_bus(skb_tail_pointer(nsb));
sc               1562 drivers/net/wan/lmc/lmc_main.c             sc->failed_recv_alloc = 1;
sc               1566 drivers/net/wan/lmc/lmc_main.c 	sc->lmc_device->stats.rx_packets++;
sc               1567 drivers/net/wan/lmc/lmc_main.c 	sc->lmc_device->stats.rx_bytes += len;
sc               1584 drivers/net/wan/lmc/lmc_main.c             sc->lmc_rxq[i] = NULL;
sc               1585 drivers/net/wan/lmc/lmc_main.c             sc->lmc_rxring[i].buffer1 = 0x0;
sc               1588 drivers/net/wan/lmc/lmc_main.c             skb->protocol = lmc_proto_type(sc, skb);
sc               1592 drivers/net/wan/lmc/lmc_main.c             lmc_proto_netif(sc, skb);
sc               1599 drivers/net/wan/lmc/lmc_main.c                 sc->lmc_rxq[i] = nsb;
sc               1601 drivers/net/wan/lmc/lmc_main.c                 sc->lmc_rxring[i].buffer1 = virt_to_bus(skb_tail_pointer(nsb));
sc               1613 drivers/net/wan/lmc/lmc_main.c 		sc->extra_stats.rx_BuffAllocErr++;
sc               1615 drivers/net/wan/lmc/lmc_main.c                 sc->failed_recv_alloc = 1;
sc               1626 drivers/net/wan/lmc/lmc_main.c             nsb->protocol = lmc_proto_type(sc, nsb);
sc               1630 drivers/net/wan/lmc/lmc_main.c             lmc_proto_netif(sc, nsb);
sc               1635 drivers/net/wan/lmc/lmc_main.c         sc->lmc_rxring[i].status = DESC_OWNED_BY_DC21X4;
sc               1637 drivers/net/wan/lmc/lmc_main.c         sc->lmc_next_rx++;
sc               1638 drivers/net/wan/lmc/lmc_main.c         i = sc->lmc_next_rx % LMC_RXDESCS;
sc               1654 drivers/net/wan/lmc/lmc_main.c     if (rxIntLoopCnt > sc->extra_stats.rxIntLoopCnt)
sc               1655 drivers/net/wan/lmc/lmc_main.c 	    sc->extra_stats.rxIntLoopCnt = rxIntLoopCnt; /* debug -baz */
sc               1662 drivers/net/wan/lmc/lmc_main.c             if ((sc->lmc_rxring[i].status & LMC_RDES_OWN_BIT)
sc               1673 drivers/net/wan/lmc/lmc_main.c     lmc_led_off(sc, LMC_DS3_LED3);
sc               1684 drivers/net/wan/lmc/lmc_main.c     lmc_softc_t *sc = dev_to_sc(dev);
sc               1689 drivers/net/wan/lmc/lmc_main.c     spin_lock_irqsave(&sc->lmc_lock, flags);
sc               1691 drivers/net/wan/lmc/lmc_main.c     sc->lmc_device->stats.rx_missed_errors += LMC_CSR_READ(sc, csr_missed_frames) & 0xffff;
sc               1693 drivers/net/wan/lmc/lmc_main.c     spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc               1697 drivers/net/wan/lmc/lmc_main.c     return &sc->lmc_device->stats;
sc               1709 drivers/net/wan/lmc/lmc_main.c unsigned lmc_mii_readreg (lmc_softc_t * const sc, unsigned devaddr, unsigned regno) /*fold00*/
sc               1715 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_mii_readreg in");
sc               1717 drivers/net/wan/lmc/lmc_main.c     LMC_MII_SYNC (sc);
sc               1719 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_mii_readreg: done sync");
sc               1725 drivers/net/wan/lmc/lmc_main.c         LMC_CSR_WRITE (sc, csr_9, dataval);
sc               1728 drivers/net/wan/lmc/lmc_main.c         LMC_CSR_WRITE (sc, csr_9, dataval | 0x10000);
sc               1733 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_mii_readreg: done1");
sc               1737 drivers/net/wan/lmc/lmc_main.c         LMC_CSR_WRITE (sc, csr_9, 0x40000);
sc               1740 drivers/net/wan/lmc/lmc_main.c         retval = (retval << 1) | ((LMC_CSR_READ (sc, csr_9) & 0x80000) ? 1 : 0);
sc               1741 drivers/net/wan/lmc/lmc_main.c         LMC_CSR_WRITE (sc, csr_9, 0x40000 | 0x10000);
sc               1746 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_mii_readreg out");
sc               1751 drivers/net/wan/lmc/lmc_main.c void lmc_mii_writereg (lmc_softc_t * const sc, unsigned devaddr, unsigned regno, unsigned data) /*fold00*/
sc               1756 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_mii_writereg in");
sc               1758 drivers/net/wan/lmc/lmc_main.c     LMC_MII_SYNC (sc);
sc               1770 drivers/net/wan/lmc/lmc_main.c         LMC_CSR_WRITE (sc, csr_9, datav);
sc               1773 drivers/net/wan/lmc/lmc_main.c         LMC_CSR_WRITE (sc, csr_9, (datav | 0x10000));
sc               1782 drivers/net/wan/lmc/lmc_main.c         LMC_CSR_WRITE (sc, csr_9, 0x40000);
sc               1785 drivers/net/wan/lmc/lmc_main.c         LMC_CSR_WRITE (sc, csr_9, 0x50000);
sc               1791 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_mii_writereg out");
sc               1794 drivers/net/wan/lmc/lmc_main.c static void lmc_softreset (lmc_softc_t * const sc) /*fold00*/
sc               1798 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_softreset in");
sc               1801 drivers/net/wan/lmc/lmc_main.c     sc->lmc_txfull = 0;
sc               1802 drivers/net/wan/lmc/lmc_main.c     sc->lmc_next_rx = 0;
sc               1803 drivers/net/wan/lmc/lmc_main.c     sc->lmc_next_tx = 0;
sc               1804 drivers/net/wan/lmc/lmc_main.c     sc->lmc_taint_rx = 0;
sc               1805 drivers/net/wan/lmc/lmc_main.c     sc->lmc_taint_tx = 0;
sc               1817 drivers/net/wan/lmc/lmc_main.c         if (sc->lmc_rxq[i] == NULL)
sc               1821 drivers/net/wan/lmc/lmc_main.c                 printk(KERN_WARNING "%s: Failed to allocate receiver ring, will try again\n", sc->name);
sc               1822 drivers/net/wan/lmc/lmc_main.c                 sc->failed_ring = 1;
sc               1826 drivers/net/wan/lmc/lmc_main.c                 sc->lmc_rxq[i] = skb;
sc               1831 drivers/net/wan/lmc/lmc_main.c             skb = sc->lmc_rxq[i];
sc               1834 drivers/net/wan/lmc/lmc_main.c         skb->dev = sc->lmc_device;
sc               1837 drivers/net/wan/lmc/lmc_main.c         sc->lmc_rxring[i].status = 0x80000000;
sc               1840 drivers/net/wan/lmc/lmc_main.c         sc->lmc_rxring[i].length = skb_tailroom(skb);
sc               1845 drivers/net/wan/lmc/lmc_main.c         sc->lmc_rxring[i].buffer1 = virt_to_bus (skb->data);
sc               1848 drivers/net/wan/lmc/lmc_main.c         sc->lmc_rxring[i].buffer2 = virt_to_bus (&sc->lmc_rxring[i + 1]);
sc               1856 drivers/net/wan/lmc/lmc_main.c         sc->lmc_rxring[i - 1].length |= 0x02000000; /* Set end of buffers flag */
sc               1857 drivers/net/wan/lmc/lmc_main.c         sc->lmc_rxring[i - 1].buffer2 = virt_to_bus(&sc->lmc_rxring[0]); /* Point back to the start */
sc               1859 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_rxlist, virt_to_bus (sc->lmc_rxring)); /* write base address */
sc               1864 drivers/net/wan/lmc/lmc_main.c         if (sc->lmc_txq[i] != NULL){		/* have buffer */
sc               1865 drivers/net/wan/lmc/lmc_main.c             dev_kfree_skb(sc->lmc_txq[i]);	/* free it */
sc               1866 drivers/net/wan/lmc/lmc_main.c 	    sc->lmc_device->stats.tx_dropped++;	/* We just dropped a packet */
sc               1868 drivers/net/wan/lmc/lmc_main.c         sc->lmc_txq[i] = NULL;
sc               1869 drivers/net/wan/lmc/lmc_main.c         sc->lmc_txring[i].status = 0x00000000;
sc               1870 drivers/net/wan/lmc/lmc_main.c         sc->lmc_txring[i].buffer2 = virt_to_bus (&sc->lmc_txring[i + 1]);
sc               1872 drivers/net/wan/lmc/lmc_main.c     sc->lmc_txring[i - 1].buffer2 = virt_to_bus (&sc->lmc_txring[0]);
sc               1873 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_txlist, virt_to_bus (sc->lmc_txring));
sc               1875 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_softreset out");
sc               1878 drivers/net/wan/lmc/lmc_main.c void lmc_gpio_mkinput(lmc_softc_t * const sc, u32 bits) /*fold00*/
sc               1880 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_gpio_mkinput in");
sc               1881 drivers/net/wan/lmc/lmc_main.c     sc->lmc_gpio_io &= ~bits;
sc               1882 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE(sc, csr_gp, TULIP_GP_PINSET | (sc->lmc_gpio_io));
sc               1883 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_gpio_mkinput out");
sc               1886 drivers/net/wan/lmc/lmc_main.c void lmc_gpio_mkoutput(lmc_softc_t * const sc, u32 bits) /*fold00*/
sc               1888 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_gpio_mkoutput in");
sc               1889 drivers/net/wan/lmc/lmc_main.c     sc->lmc_gpio_io |= bits;
sc               1890 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE(sc, csr_gp, TULIP_GP_PINSET | (sc->lmc_gpio_io));
sc               1891 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_gpio_mkoutput out");
sc               1894 drivers/net/wan/lmc/lmc_main.c void lmc_led_on(lmc_softc_t * const sc, u32 led) /*fold00*/
sc               1896 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_led_on in");
sc               1897 drivers/net/wan/lmc/lmc_main.c     if((~sc->lmc_miireg16) & led){ /* Already on! */
sc               1898 drivers/net/wan/lmc/lmc_main.c         lmc_trace(sc->lmc_device, "lmc_led_on aon out");
sc               1902 drivers/net/wan/lmc/lmc_main.c     sc->lmc_miireg16 &= ~led;
sc               1903 drivers/net/wan/lmc/lmc_main.c     lmc_mii_writereg(sc, 0, 16, sc->lmc_miireg16);
sc               1904 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_led_on out");
sc               1907 drivers/net/wan/lmc/lmc_main.c void lmc_led_off(lmc_softc_t * const sc, u32 led) /*fold00*/
sc               1909 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_led_off in");
sc               1910 drivers/net/wan/lmc/lmc_main.c     if(sc->lmc_miireg16 & led){ /* Already set don't do anything */
sc               1911 drivers/net/wan/lmc/lmc_main.c         lmc_trace(sc->lmc_device, "lmc_led_off aoff out");
sc               1915 drivers/net/wan/lmc/lmc_main.c     sc->lmc_miireg16 |= led;
sc               1916 drivers/net/wan/lmc/lmc_main.c     lmc_mii_writereg(sc, 0, 16, sc->lmc_miireg16);
sc               1917 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_led_off out");
sc               1920 drivers/net/wan/lmc/lmc_main.c static void lmc_reset(lmc_softc_t * const sc) /*fold00*/
sc               1922 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_reset in");
sc               1923 drivers/net/wan/lmc/lmc_main.c     sc->lmc_miireg16 |= LMC_MII16_FIFO_RESET;
sc               1924 drivers/net/wan/lmc/lmc_main.c     lmc_mii_writereg(sc, 0, 16, sc->lmc_miireg16);
sc               1926 drivers/net/wan/lmc/lmc_main.c     sc->lmc_miireg16 &= ~LMC_MII16_FIFO_RESET;
sc               1927 drivers/net/wan/lmc/lmc_main.c     lmc_mii_writereg(sc, 0, 16, sc->lmc_miireg16);
sc               1932 drivers/net/wan/lmc/lmc_main.c     lmc_gpio_mkoutput(sc, LMC_GEP_RESET);
sc               1939 drivers/net/wan/lmc/lmc_main.c     sc->lmc_gpio &= ~(LMC_GEP_RESET);
sc               1940 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE(sc, csr_gp, sc->lmc_gpio);
sc               1950 drivers/net/wan/lmc/lmc_main.c     lmc_gpio_mkinput(sc, LMC_GEP_RESET);
sc               1955 drivers/net/wan/lmc/lmc_main.c     sc->lmc_media->init(sc);
sc               1957 drivers/net/wan/lmc/lmc_main.c     sc->extra_stats.resetCount++;
sc               1958 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_reset out");
sc               1961 drivers/net/wan/lmc/lmc_main.c static void lmc_dec_reset(lmc_softc_t * const sc) /*fold00*/
sc               1964 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_dec_reset in");
sc               1969 drivers/net/wan/lmc/lmc_main.c     sc->lmc_intrmask = 0;
sc               1970 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE(sc, csr_intr, sc->lmc_intrmask);
sc               1978 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE(sc, csr_busmode, TULIP_BUSMODE_SWRESET);
sc               1981 drivers/net/wan/lmc/lmc_main.c     sc->lmc_busmode = LMC_CSR_READ(sc, csr_busmode);
sc               1982 drivers/net/wan/lmc/lmc_main.c     sc->lmc_busmode = 0x00100000;
sc               1983 drivers/net/wan/lmc/lmc_main.c     sc->lmc_busmode &= ~TULIP_BUSMODE_SWRESET;
sc               1984 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE(sc, csr_busmode, sc->lmc_busmode);
sc               1986 drivers/net/wan/lmc/lmc_main.c     sc->lmc_cmdmode = LMC_CSR_READ(sc, csr_command);
sc               1998 drivers/net/wan/lmc/lmc_main.c     sc->lmc_cmdmode |= ( TULIP_CMD_PROMISCUOUS
sc               2006 drivers/net/wan/lmc/lmc_main.c     sc->lmc_cmdmode &= ~( TULIP_CMD_OPERMODE
sc               2012 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE(sc, csr_command, sc->lmc_cmdmode);
sc               2017 drivers/net/wan/lmc/lmc_main.c     val = LMC_CSR_READ(sc, csr_sia_general);
sc               2019 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE(sc, csr_sia_general, val);
sc               2021 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_dec_reset out");
sc               2024 drivers/net/wan/lmc/lmc_main.c static void lmc_initcsrs(lmc_softc_t * const sc, lmc_csrptr_t csr_base, /*fold00*/
sc               2027 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_initcsrs in");
sc               2028 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_busmode	        = csr_base +  0 * csr_size;
sc               2029 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_txpoll		= csr_base +  1 * csr_size;
sc               2030 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_rxpoll		= csr_base +  2 * csr_size;
sc               2031 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_rxlist		= csr_base +  3 * csr_size;
sc               2032 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_txlist		= csr_base +  4 * csr_size;
sc               2033 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_status		= csr_base +  5 * csr_size;
sc               2034 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_command	        = csr_base +  6 * csr_size;
sc               2035 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_intr		= csr_base +  7 * csr_size;
sc               2036 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_missed_frames	= csr_base +  8 * csr_size;
sc               2037 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_9		        = csr_base +  9 * csr_size;
sc               2038 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_10		        = csr_base + 10 * csr_size;
sc               2039 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_11		        = csr_base + 11 * csr_size;
sc               2040 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_12		        = csr_base + 12 * csr_size;
sc               2041 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_13		        = csr_base + 13 * csr_size;
sc               2042 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_14		        = csr_base + 14 * csr_size;
sc               2043 drivers/net/wan/lmc/lmc_main.c     sc->lmc_csrs.csr_15		        = csr_base + 15 * csr_size;
sc               2044 drivers/net/wan/lmc/lmc_main.c     lmc_trace(sc->lmc_device, "lmc_initcsrs out");
sc               2049 drivers/net/wan/lmc/lmc_main.c     lmc_softc_t *sc = dev_to_sc(dev);
sc               2055 drivers/net/wan/lmc/lmc_main.c     spin_lock_irqsave(&sc->lmc_lock, flags);
sc               2059 drivers/net/wan/lmc/lmc_main.c     sc->extra_stats.tx_tbusy_calls++;
sc               2071 drivers/net/wan/lmc/lmc_main.c                   LMC_CSR_READ (sc, csr_status),
sc               2072 drivers/net/wan/lmc/lmc_main.c 		  sc->extra_stats.tx_ProcTimeout);
sc               2076 drivers/net/wan/lmc/lmc_main.c     LMC_EVENT_LOG(LMC_EVENT_RESET1, LMC_CSR_READ (sc, csr_status), 0);
sc               2078 drivers/net/wan/lmc/lmc_main.c                   lmc_mii_readreg (sc, 0, 16),
sc               2079 drivers/net/wan/lmc/lmc_main.c                   lmc_mii_readreg (sc, 0, 17));
sc               2082 drivers/net/wan/lmc/lmc_main.c     csr6 = LMC_CSR_READ (sc, csr_command);
sc               2083 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_command, csr6 | 0x0002);
sc               2084 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_command, csr6 | 0x2002);
sc               2087 drivers/net/wan/lmc/lmc_main.c     LMC_CSR_WRITE (sc, csr_txpoll, 0);
sc               2089 drivers/net/wan/lmc/lmc_main.c     sc->lmc_device->stats.tx_errors++;
sc               2090 drivers/net/wan/lmc/lmc_main.c     sc->extra_stats.tx_ProcTimeout++; /* -baz */
sc               2096 drivers/net/wan/lmc/lmc_main.c     spin_unlock_irqrestore(&sc->lmc_lock, flags);
sc                156 drivers/net/wan/lmc/lmc_media.c lmc_dummy_set_1 (lmc_softc_t * const sc, int a)
sc                161 drivers/net/wan/lmc/lmc_media.c lmc_dummy_set2_1 (lmc_softc_t * const sc, lmc_ctl_t * a)
sc                170 drivers/net/wan/lmc/lmc_media.c lmc_hssi_init (lmc_softc_t * const sc)
sc                172 drivers/net/wan/lmc/lmc_media.c   sc->ictl.cardtype = LMC_CTL_CARDTYPE_LMC5200;
sc                174 drivers/net/wan/lmc/lmc_media.c   lmc_gpio_mkoutput (sc, LMC_GEP_HSSI_CLOCK);
sc                178 drivers/net/wan/lmc/lmc_media.c lmc_hssi_default (lmc_softc_t * const sc)
sc                180 drivers/net/wan/lmc/lmc_media.c   sc->lmc_miireg16 = LMC_MII16_LED_ALL;
sc                182 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_link_status (sc, LMC_LINK_DOWN);
sc                183 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_clock_source (sc, LMC_CTL_CLOCK_SOURCE_EXT);
sc                184 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_crc_length (sc, LMC_CTL_CRC_LENGTH_16);
sc                192 drivers/net/wan/lmc/lmc_media.c lmc_hssi_set_status (lmc_softc_t * const sc, lmc_ctl_t * ctl)
sc                196 drivers/net/wan/lmc/lmc_media.c       sc->lmc_media->set_clock_source (sc, sc->ictl.clock_source);
sc                197 drivers/net/wan/lmc/lmc_media.c       lmc_set_protocol (sc, NULL);
sc                205 drivers/net/wan/lmc/lmc_media.c   if (ctl->clock_source && !sc->ictl.clock_source)
sc                207 drivers/net/wan/lmc/lmc_media.c       sc->lmc_media->set_clock_source (sc, LMC_CTL_CLOCK_SOURCE_INT);
sc                208 drivers/net/wan/lmc/lmc_media.c       sc->lmc_timing = LMC_CTL_CLOCK_SOURCE_INT;
sc                210 drivers/net/wan/lmc/lmc_media.c   else if (!ctl->clock_source && sc->ictl.clock_source)
sc                212 drivers/net/wan/lmc/lmc_media.c       sc->lmc_timing = LMC_CTL_CLOCK_SOURCE_EXT;
sc                213 drivers/net/wan/lmc/lmc_media.c       sc->lmc_media->set_clock_source (sc, LMC_CTL_CLOCK_SOURCE_EXT);
sc                216 drivers/net/wan/lmc/lmc_media.c   lmc_set_protocol (sc, ctl);
sc                223 drivers/net/wan/lmc/lmc_media.c lmc_hssi_set_clock (lmc_softc_t * const sc, int ie)
sc                226 drivers/net/wan/lmc/lmc_media.c   old = sc->ictl.clock_source;
sc                229 drivers/net/wan/lmc/lmc_media.c       sc->lmc_gpio |= LMC_GEP_HSSI_CLOCK;
sc                230 drivers/net/wan/lmc/lmc_media.c       LMC_CSR_WRITE (sc, csr_gp, sc->lmc_gpio);
sc                231 drivers/net/wan/lmc/lmc_media.c       sc->ictl.clock_source = LMC_CTL_CLOCK_SOURCE_EXT;
sc                237 drivers/net/wan/lmc/lmc_media.c       sc->lmc_gpio &= ~(LMC_GEP_HSSI_CLOCK);
sc                238 drivers/net/wan/lmc/lmc_media.c       LMC_CSR_WRITE (sc, csr_gp, sc->lmc_gpio);
sc                239 drivers/net/wan/lmc/lmc_media.c       sc->ictl.clock_source = LMC_CTL_CLOCK_SOURCE_INT;
sc                250 drivers/net/wan/lmc/lmc_media.c lmc_hssi_get_link_status (lmc_softc_t * const sc)
sc                256 drivers/net/wan/lmc/lmc_media.c     return lmc_ssi_get_link_status(sc);
sc                260 drivers/net/wan/lmc/lmc_media.c lmc_hssi_set_link_status (lmc_softc_t * const sc, int state)
sc                263 drivers/net/wan/lmc/lmc_media.c     sc->lmc_miireg16 |= LMC_MII16_HSSI_TA;
sc                265 drivers/net/wan/lmc/lmc_media.c     sc->lmc_miireg16 &= ~LMC_MII16_HSSI_TA;
sc                267 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 16, sc->lmc_miireg16);
sc                274 drivers/net/wan/lmc/lmc_media.c lmc_hssi_set_crc_length (lmc_softc_t * const sc, int state)
sc                279 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 |= LMC_MII16_HSSI_CRC;
sc                280 drivers/net/wan/lmc/lmc_media.c       sc->ictl.crc_length = LMC_CTL_CRC_LENGTH_32;
sc                285 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 &= ~LMC_MII16_HSSI_CRC;
sc                286 drivers/net/wan/lmc/lmc_media.c       sc->ictl.crc_length = LMC_CTL_CRC_LENGTH_16;
sc                289 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 16, sc->lmc_miireg16);
sc                293 drivers/net/wan/lmc/lmc_media.c lmc_hssi_watchdog (lmc_softc_t * const sc)
sc                306 drivers/net/wan/lmc/lmc_media.c lmc_ds3_set_100ft (lmc_softc_t * const sc, int ie)
sc                310 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 &= ~LMC_MII16_DS3_ZERO;
sc                311 drivers/net/wan/lmc/lmc_media.c       sc->ictl.cable_length = LMC_CTL_CABLE_LENGTH_GT_100FT;
sc                315 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 |= LMC_MII16_DS3_ZERO;
sc                316 drivers/net/wan/lmc/lmc_media.c       sc->ictl.cable_length = LMC_CTL_CABLE_LENGTH_LT_100FT;
sc                318 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 16, sc->lmc_miireg16);
sc                322 drivers/net/wan/lmc/lmc_media.c lmc_ds3_default (lmc_softc_t * const sc)
sc                324 drivers/net/wan/lmc/lmc_media.c   sc->lmc_miireg16 = LMC_MII16_LED_ALL;
sc                326 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_link_status (sc, LMC_LINK_DOWN);
sc                327 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_cable_length (sc, LMC_CTL_CABLE_LENGTH_LT_100FT);
sc                328 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_scrambler (sc, LMC_CTL_OFF);
sc                329 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_crc_length (sc, LMC_CTL_CRC_LENGTH_16);
sc                337 drivers/net/wan/lmc/lmc_media.c lmc_ds3_set_status (lmc_softc_t * const sc, lmc_ctl_t * ctl)
sc                341 drivers/net/wan/lmc/lmc_media.c       sc->lmc_media->set_cable_length (sc, sc->ictl.cable_length);
sc                342 drivers/net/wan/lmc/lmc_media.c       sc->lmc_media->set_scrambler (sc, sc->ictl.scrambler_onoff);
sc                343 drivers/net/wan/lmc/lmc_media.c       lmc_set_protocol (sc, NULL);
sc                351 drivers/net/wan/lmc/lmc_media.c   if (ctl->cable_length && !sc->ictl.cable_length)
sc                352 drivers/net/wan/lmc/lmc_media.c     lmc_ds3_set_100ft (sc, LMC_CTL_CABLE_LENGTH_GT_100FT);
sc                353 drivers/net/wan/lmc/lmc_media.c   else if (!ctl->cable_length && sc->ictl.cable_length)
sc                354 drivers/net/wan/lmc/lmc_media.c     lmc_ds3_set_100ft (sc, LMC_CTL_CABLE_LENGTH_LT_100FT);
sc                359 drivers/net/wan/lmc/lmc_media.c   if (ctl->scrambler_onoff && !sc->ictl.scrambler_onoff)
sc                360 drivers/net/wan/lmc/lmc_media.c     lmc_ds3_set_scram (sc, LMC_CTL_ON);
sc                361 drivers/net/wan/lmc/lmc_media.c   else if (!ctl->scrambler_onoff && sc->ictl.scrambler_onoff)
sc                362 drivers/net/wan/lmc/lmc_media.c     lmc_ds3_set_scram (sc, LMC_CTL_OFF);
sc                364 drivers/net/wan/lmc/lmc_media.c   lmc_set_protocol (sc, ctl);
sc                368 drivers/net/wan/lmc/lmc_media.c lmc_ds3_init (lmc_softc_t * const sc)
sc                372 drivers/net/wan/lmc/lmc_media.c   sc->ictl.cardtype = LMC_CTL_CARDTYPE_LMC5245;
sc                377 drivers/net/wan/lmc/lmc_media.c       lmc_mii_writereg (sc, 0, 17, i);
sc                378 drivers/net/wan/lmc/lmc_media.c       lmc_mii_writereg (sc, 0, 18, 0);
sc                382 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 17, 1);
sc                383 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 18, 0x25);	/* ser, xtx */
sc                385 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 17, 5);
sc                386 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 18, 0x80);	/* emode */
sc                388 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 17, 14);
sc                389 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 18, 0x30);	/* rcgen, tcgen */
sc                394 drivers/net/wan/lmc/lmc_media.c       lmc_mii_writereg (sc, 0, 17, i);
sc                395 drivers/net/wan/lmc/lmc_media.c       lmc_mii_readreg (sc, 0, 18);
sc                403 drivers/net/wan/lmc/lmc_media.c lmc_ds3_set_scram (lmc_softc_t * const sc, int ie)
sc                407 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 |= LMC_MII16_DS3_SCRAM;
sc                408 drivers/net/wan/lmc/lmc_media.c       sc->ictl.scrambler_onoff = LMC_CTL_ON;
sc                412 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 &= ~LMC_MII16_DS3_SCRAM;
sc                413 drivers/net/wan/lmc/lmc_media.c       sc->ictl.scrambler_onoff = LMC_CTL_OFF;
sc                415 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 16, sc->lmc_miireg16);
sc                423 drivers/net/wan/lmc/lmc_media.c lmc_ds3_get_link_status (lmc_softc_t * const sc)
sc                428 drivers/net/wan/lmc/lmc_media.c     lmc_mii_writereg (sc, 0, 17, 7);
sc                429 drivers/net/wan/lmc/lmc_media.c     link_status = lmc_mii_readreg (sc, 0, 18);
sc                441 drivers/net/wan/lmc/lmc_media.c     lmc_led_on(sc, LMC_DS3_LED2);
sc                446 drivers/net/wan/lmc/lmc_media.c         if(sc->last_led_err[3] != 1){
sc                448 drivers/net/wan/lmc/lmc_media.c             lmc_mii_writereg (sc, 0, 17, 01); /* Turn on Xbit error as our cisco does */
sc                449 drivers/net/wan/lmc/lmc_media.c             r1 = lmc_mii_readreg (sc, 0, 18);
sc                451 drivers/net/wan/lmc/lmc_media.c             lmc_mii_writereg(sc, 0, 18, r1);
sc                452 drivers/net/wan/lmc/lmc_media.c             printk(KERN_WARNING "%s: Red Alarm - Loss of Signal or Loss of Framing\n", sc->name);
sc                454 drivers/net/wan/lmc/lmc_media.c         lmc_led_on(sc, LMC_DS3_LED3);	/* turn on red LED */
sc                455 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[3] = 1;
sc                458 drivers/net/wan/lmc/lmc_media.c         lmc_led_off(sc, LMC_DS3_LED3);	/* turn on red LED */
sc                459 drivers/net/wan/lmc/lmc_media.c         if(sc->last_led_err[3] == 1){
sc                461 drivers/net/wan/lmc/lmc_media.c             lmc_mii_writereg (sc, 0, 17, 01); /* Turn off Xbit error */
sc                462 drivers/net/wan/lmc/lmc_media.c             r1 = lmc_mii_readreg (sc, 0, 18);
sc                464 drivers/net/wan/lmc/lmc_media.c             lmc_mii_writereg(sc, 0, 18, r1);
sc                466 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[3] = 0;
sc                469 drivers/net/wan/lmc/lmc_media.c     lmc_mii_writereg(sc, 0, 17, 0x10);
sc                470 drivers/net/wan/lmc/lmc_media.c     link_status_11 = lmc_mii_readreg(sc, 0, 18);
sc                474 drivers/net/wan/lmc/lmc_media.c         if(sc->last_led_err[0] != 1){
sc                475 drivers/net/wan/lmc/lmc_media.c             printk(KERN_WARNING "%s: AIS Alarm or XBit Error\n", sc->name);
sc                476 drivers/net/wan/lmc/lmc_media.c             printk(KERN_WARNING "%s: Remote end has loss of signal or framing\n", sc->name);
sc                478 drivers/net/wan/lmc/lmc_media.c         lmc_led_on(sc, LMC_DS3_LED0);
sc                479 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[0] = 1;
sc                482 drivers/net/wan/lmc/lmc_media.c         lmc_led_off(sc, LMC_DS3_LED0);
sc                483 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[0] = 0;
sc                486 drivers/net/wan/lmc/lmc_media.c     lmc_mii_writereg (sc, 0, 17, 9);
sc                487 drivers/net/wan/lmc/lmc_media.c     link_status = lmc_mii_readreg (sc, 0, 18);
sc                491 drivers/net/wan/lmc/lmc_media.c         if(sc->last_led_err[1] != 1){
sc                492 drivers/net/wan/lmc/lmc_media.c             printk(KERN_WARNING "%s: Blue Alarm - Receiving all 1's\n", sc->name);
sc                494 drivers/net/wan/lmc/lmc_media.c         lmc_led_on(sc, LMC_DS3_LED1);
sc                495 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[1] = 1;
sc                498 drivers/net/wan/lmc/lmc_media.c         lmc_led_off(sc, LMC_DS3_LED1);
sc                499 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[1] = 0;
sc                509 drivers/net/wan/lmc/lmc_media.c lmc_ds3_set_crc_length (lmc_softc_t * const sc, int state)
sc                514 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 |= LMC_MII16_DS3_CRC;
sc                515 drivers/net/wan/lmc/lmc_media.c       sc->ictl.crc_length = LMC_CTL_CRC_LENGTH_32;
sc                520 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 &= ~LMC_MII16_DS3_CRC;
sc                521 drivers/net/wan/lmc/lmc_media.c       sc->ictl.crc_length = LMC_CTL_CRC_LENGTH_16;
sc                524 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 16, sc->lmc_miireg16);
sc                528 drivers/net/wan/lmc/lmc_media.c lmc_ds3_watchdog (lmc_softc_t * const sc)
sc                538 drivers/net/wan/lmc/lmc_media.c static void lmc_ssi_init(lmc_softc_t * const sc)
sc                543 drivers/net/wan/lmc/lmc_media.c 	sc->ictl.cardtype = LMC_CTL_CARDTYPE_LMC1000;
sc                545 drivers/net/wan/lmc/lmc_media.c 	mii17 = lmc_mii_readreg(sc, 0, 17);
sc                548 drivers/net/wan/lmc/lmc_media.c 	sc->ictl.cable_type = cable;
sc                550 drivers/net/wan/lmc/lmc_media.c 	lmc_gpio_mkoutput(sc, LMC_GEP_SSI_TXCLOCK);
sc                554 drivers/net/wan/lmc/lmc_media.c lmc_ssi_default (lmc_softc_t * const sc)
sc                556 drivers/net/wan/lmc/lmc_media.c   sc->lmc_miireg16 = LMC_MII16_LED_ALL;
sc                561 drivers/net/wan/lmc/lmc_media.c   lmc_gpio_mkoutput (sc, LMC_GEP_SSI_TXCLOCK);
sc                563 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_link_status (sc, LMC_LINK_DOWN);
sc                564 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_clock_source (sc, LMC_CTL_CLOCK_SOURCE_EXT);
sc                565 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_speed (sc, NULL);
sc                566 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_crc_length (sc, LMC_CTL_CRC_LENGTH_16);
sc                574 drivers/net/wan/lmc/lmc_media.c lmc_ssi_set_status (lmc_softc_t * const sc, lmc_ctl_t * ctl)
sc                578 drivers/net/wan/lmc/lmc_media.c       sc->lmc_media->set_clock_source (sc, sc->ictl.clock_source);
sc                579 drivers/net/wan/lmc/lmc_media.c       sc->lmc_media->set_speed (sc, &sc->ictl);
sc                580 drivers/net/wan/lmc/lmc_media.c       lmc_set_protocol (sc, NULL);
sc                589 drivers/net/wan/lmc/lmc_media.c       && sc->ictl.clock_source == LMC_CTL_CLOCK_SOURCE_EXT)
sc                591 drivers/net/wan/lmc/lmc_media.c       sc->lmc_media->set_clock_source (sc, LMC_CTL_CLOCK_SOURCE_INT);
sc                592 drivers/net/wan/lmc/lmc_media.c       sc->lmc_timing = LMC_CTL_CLOCK_SOURCE_INT;
sc                595 drivers/net/wan/lmc/lmc_media.c 	   && sc->ictl.clock_source == LMC_CTL_CLOCK_SOURCE_INT)
sc                597 drivers/net/wan/lmc/lmc_media.c       sc->lmc_media->set_clock_source (sc, LMC_CTL_CLOCK_SOURCE_EXT);
sc                598 drivers/net/wan/lmc/lmc_media.c       sc->lmc_timing = LMC_CTL_CLOCK_SOURCE_EXT;
sc                601 drivers/net/wan/lmc/lmc_media.c   if (ctl->clock_rate != sc->ictl.clock_rate)
sc                602 drivers/net/wan/lmc/lmc_media.c     sc->lmc_media->set_speed (sc, ctl);
sc                604 drivers/net/wan/lmc/lmc_media.c   lmc_set_protocol (sc, ctl);
sc                611 drivers/net/wan/lmc/lmc_media.c lmc_ssi_set_clock (lmc_softc_t * const sc, int ie)
sc                617 drivers/net/wan/lmc/lmc_media.c       sc->lmc_gpio &= ~(LMC_GEP_SSI_TXCLOCK);
sc                618 drivers/net/wan/lmc/lmc_media.c       LMC_CSR_WRITE (sc, csr_gp, sc->lmc_gpio);
sc                619 drivers/net/wan/lmc/lmc_media.c       sc->ictl.clock_source = LMC_CTL_CLOCK_SOURCE_EXT;
sc                625 drivers/net/wan/lmc/lmc_media.c       sc->lmc_gpio |= LMC_GEP_SSI_TXCLOCK;
sc                626 drivers/net/wan/lmc/lmc_media.c       LMC_CSR_WRITE (sc, csr_gp, sc->lmc_gpio);
sc                627 drivers/net/wan/lmc/lmc_media.c       sc->ictl.clock_source = LMC_CTL_CLOCK_SOURCE_INT;
sc                634 drivers/net/wan/lmc/lmc_media.c lmc_ssi_set_speed (lmc_softc_t * const sc, lmc_ctl_t * ctl)
sc                636 drivers/net/wan/lmc/lmc_media.c   lmc_ctl_t *ictl = &sc->ictl;
sc                656 drivers/net/wan/lmc/lmc_media.c       write_av9110 (sc, av->n, av->m, av->v, av->x, av->r);
sc                668 drivers/net/wan/lmc/lmc_media.c   write_av9110 (sc, av->n, av->m, av->v, av->x, av->r);
sc                676 drivers/net/wan/lmc/lmc_media.c lmc_ssi_get_link_status (lmc_softc_t * const sc)
sc                703 drivers/net/wan/lmc/lmc_media.c   link_status = lmc_mii_readreg (sc, 0, 16);
sc                706 drivers/net/wan/lmc/lmc_media.c   ticks = LMC_CSR_READ (sc, csr_gp_timer);
sc                709 drivers/net/wan/lmc/lmc_media.c   lmc_led_on (sc, LMC_MII16_LED0);
sc                712 drivers/net/wan/lmc/lmc_media.c   if (sc->lmc_timing == LMC_CTL_CLOCK_SOURCE_INT) {
sc                713 drivers/net/wan/lmc/lmc_media.c       lmc_led_off(sc, LMC_MII16_LED3);
sc                717 drivers/net/wan/lmc/lmc_media.c       if (sc->last_led_err[3] != 1) {
sc                718 drivers/net/wan/lmc/lmc_media.c 	      sc->extra_stats.tx_lossOfClockCnt++;
sc                719 drivers/net/wan/lmc/lmc_media.c 	      printk(KERN_WARNING "%s: Lost Clock, Link Down\n", sc->name);
sc                721 drivers/net/wan/lmc/lmc_media.c       sc->last_led_err[3] = 1;
sc                722 drivers/net/wan/lmc/lmc_media.c       lmc_led_on (sc, LMC_MII16_LED3);	/* turn ON red LED */
sc                725 drivers/net/wan/lmc/lmc_media.c       if(sc->last_led_err[3] == 1)
sc                726 drivers/net/wan/lmc/lmc_media.c           printk(KERN_WARNING "%s: Clock Returned\n", sc->name);
sc                727 drivers/net/wan/lmc/lmc_media.c       sc->last_led_err[3] = 0;
sc                728 drivers/net/wan/lmc/lmc_media.c       lmc_led_off (sc, LMC_MII16_LED3);		/* turn OFF red LED */
sc                744 drivers/net/wan/lmc/lmc_media.c       if(sc->last_led_err[1] != 1)
sc                745 drivers/net/wan/lmc/lmc_media.c           printk(KERN_WARNING "%s: DSR not asserted\n", sc->name);
sc                746 drivers/net/wan/lmc/lmc_media.c       sc->last_led_err[1] = 1;
sc                747 drivers/net/wan/lmc/lmc_media.c       lmc_led_off(sc, LMC_MII16_LED1);
sc                750 drivers/net/wan/lmc/lmc_media.c       if(sc->last_led_err[1] != 0)
sc                751 drivers/net/wan/lmc/lmc_media.c           printk(KERN_WARNING "%s: DSR now asserted\n", sc->name);
sc                752 drivers/net/wan/lmc/lmc_media.c       sc->last_led_err[1] = 0;
sc                753 drivers/net/wan/lmc/lmc_media.c       lmc_led_on(sc, LMC_MII16_LED1);
sc                757 drivers/net/wan/lmc/lmc_media.c       lmc_led_on(sc, LMC_MII16_LED2); /* Over all good status? */
sc                764 drivers/net/wan/lmc/lmc_media.c lmc_ssi_set_link_status (lmc_softc_t * const sc, int state)
sc                768 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 |= (LMC_MII16_SSI_DTR | LMC_MII16_SSI_RTS);
sc                773 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 &= ~(LMC_MII16_SSI_DTR | LMC_MII16_SSI_RTS);
sc                777 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 16, sc->lmc_miireg16);
sc                785 drivers/net/wan/lmc/lmc_media.c lmc_ssi_set_crc_length (lmc_softc_t * const sc, int state)
sc                790 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 |= LMC_MII16_SSI_CRC;
sc                791 drivers/net/wan/lmc/lmc_media.c       sc->ictl.crc_length = LMC_CTL_CRC_LENGTH_32;
sc                792 drivers/net/wan/lmc/lmc_media.c       sc->lmc_crcSize = LMC_CTL_CRC_BYTESIZE_4;
sc                798 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 &= ~LMC_MII16_SSI_CRC;
sc                799 drivers/net/wan/lmc/lmc_media.c       sc->ictl.crc_length = LMC_CTL_CRC_LENGTH_16;
sc                800 drivers/net/wan/lmc/lmc_media.c       sc->lmc_crcSize = LMC_CTL_CRC_BYTESIZE_2;
sc                803 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 16, sc->lmc_miireg16);
sc                810 drivers/net/wan/lmc/lmc_media.c write_av9110_bit (lmc_softc_t * sc, int c)
sc                815 drivers/net/wan/lmc/lmc_media.c   sc->lmc_gpio &= ~(LMC_GEP_CLK);
sc                817 drivers/net/wan/lmc/lmc_media.c     sc->lmc_gpio |= LMC_GEP_DATA;
sc                819 drivers/net/wan/lmc/lmc_media.c     sc->lmc_gpio &= ~(LMC_GEP_DATA);
sc                820 drivers/net/wan/lmc/lmc_media.c   LMC_CSR_WRITE (sc, csr_gp, sc->lmc_gpio);
sc                825 drivers/net/wan/lmc/lmc_media.c   sc->lmc_gpio |= LMC_GEP_CLK;
sc                826 drivers/net/wan/lmc/lmc_media.c   LMC_CSR_WRITE (sc, csr_gp, sc->lmc_gpio);
sc                831 drivers/net/wan/lmc/lmc_media.c   sc->lmc_gpio &= ~(LMC_GEP_CLK);
sc                832 drivers/net/wan/lmc/lmc_media.c   LMC_CSR_WRITE (sc, csr_gp, sc->lmc_gpio);
sc                835 drivers/net/wan/lmc/lmc_media.c static void write_av9110(lmc_softc_t *sc, u32 n, u32 m, u32 v, u32 x, u32 r)
sc                841 drivers/net/wan/lmc/lmc_media.c 	  LMC_PRINTF_ARGS, sc->ictl.clock_rate, n, m, v, x, r);
sc                844 drivers/net/wan/lmc/lmc_media.c   sc->lmc_gpio |= LMC_GEP_SSI_GENERATOR;
sc                845 drivers/net/wan/lmc/lmc_media.c   sc->lmc_gpio &= ~(LMC_GEP_DATA | LMC_GEP_CLK);
sc                846 drivers/net/wan/lmc/lmc_media.c   LMC_CSR_WRITE (sc, csr_gp, sc->lmc_gpio);
sc                852 drivers/net/wan/lmc/lmc_media.c   lmc_gpio_mkoutput (sc, (LMC_GEP_DATA | LMC_GEP_CLK
sc                855 drivers/net/wan/lmc/lmc_media.c   sc->lmc_gpio &= ~(LMC_GEP_SSI_GENERATOR);
sc                856 drivers/net/wan/lmc/lmc_media.c   LMC_CSR_WRITE (sc, csr_gp, sc->lmc_gpio);
sc                862 drivers/net/wan/lmc/lmc_media.c     write_av9110_bit (sc, n >> i);
sc                864 drivers/net/wan/lmc/lmc_media.c     write_av9110_bit (sc, m >> i);
sc                866 drivers/net/wan/lmc/lmc_media.c     write_av9110_bit (sc, v >> i);
sc                868 drivers/net/wan/lmc/lmc_media.c     write_av9110_bit (sc, x >> i);
sc                870 drivers/net/wan/lmc/lmc_media.c     write_av9110_bit (sc, r >> i);
sc                872 drivers/net/wan/lmc/lmc_media.c     write_av9110_bit (sc, 0x17 >> i);
sc                877 drivers/net/wan/lmc/lmc_media.c   lmc_gpio_mkinput (sc,
sc                882 drivers/net/wan/lmc/lmc_media.c static void lmc_ssi_watchdog(lmc_softc_t * const sc)
sc                884 drivers/net/wan/lmc/lmc_media.c 	u16 mii17 = lmc_mii_readreg(sc, 0, 17);
sc                886 drivers/net/wan/lmc/lmc_media.c 		lmc_led_off(sc, LMC_MII16_LED2);
sc                888 drivers/net/wan/lmc/lmc_media.c 		lmc_led_on(sc, LMC_MII16_LED2);
sc                899 drivers/net/wan/lmc/lmc_media.c lmc_t1_write (lmc_softc_t * const sc, int a, int d)
sc                901 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 17, a);
sc                902 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 18, d);
sc                916 drivers/net/wan/lmc/lmc_media.c lmc_t1_init (lmc_softc_t * const sc)
sc                921 drivers/net/wan/lmc/lmc_media.c   sc->ictl.cardtype = LMC_CTL_CARDTYPE_LMC1200;
sc                922 drivers/net/wan/lmc/lmc_media.c   mii16 = lmc_mii_readreg (sc, 0, 16);
sc                926 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 16, mii16 | LMC_MII16_T1_RST);
sc                927 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 16, mii16);
sc                930 drivers/net/wan/lmc/lmc_media.c   sc->lmc_miireg16 = mii16;
sc                931 drivers/net/wan/lmc/lmc_media.c   lmc_t1_set_circuit_type(sc, LMC_CTL_CIRCUIT_TYPE_T1);
sc                932 drivers/net/wan/lmc/lmc_media.c   mii16 = sc->lmc_miireg16;
sc                934 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x01, 0x1B);	/* CR0     - primary control             */
sc                935 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x02, 0x42);	/* JAT_CR  - jitter atten config         */
sc                936 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x14, 0x00);	/* LOOP    - loopback config             */
sc                937 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x15, 0x00);	/* DL3_TS  - external data link timeslot */
sc                938 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x18, 0xFF);	/* PIO     - programmable I/O            */
sc                939 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x19, 0x30);	/* POE     - programmable OE             */
sc                940 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x1A, 0x0F);	/* CMUX    - clock input mux             */
sc                941 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x20, 0x41);	/* LIU_CR  - RX LIU config               */
sc                942 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x22, 0x76);	/* RLIU_CR - RX LIU config               */
sc                943 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x40, 0x03);	/* RCR0    - RX config                   */
sc                944 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x45, 0x00);	/* RALM    - RX alarm config             */
sc                945 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x46, 0x05);	/* LATCH   - RX alarm/err/cntr latch     */
sc                946 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x68, 0x40);	/* TLIU_CR - TX LIU config               */
sc                947 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x70, 0x0D);	/* TCR0    - TX framer config            */
sc                948 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x71, 0x05);	/* TCR1    - TX config                   */
sc                949 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x72, 0x0B);	/* TFRM    - TX frame format             */
sc                950 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x73, 0x00);	/* TERROR  - TX error insert             */
sc                951 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x74, 0x00);	/* TMAN    - TX manual Sa/FEBE config    */
sc                952 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x75, 0x00);	/* TALM    - TX alarm signal config      */
sc                953 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x76, 0x00);	/* TPATT   - TX test pattern config      */
sc                954 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x77, 0x00);	/* TLB     - TX inband loopback config   */
sc                955 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x90, 0x05);	/* CLAD_CR - clock rate adapter config   */
sc                956 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0x91, 0x05);	/* CSEL    - clad freq sel               */
sc                957 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0xA6, 0x00);	/* DL1_CTL - DL1 control                 */
sc                958 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0xB1, 0x00);	/* DL2_CTL - DL2 control                 */
sc                959 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0xD0, 0x47);	/* SBI_CR  - sys bus iface config        */
sc                960 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0xD1, 0x70);	/* RSB_CR  - RX sys bus config           */
sc                961 drivers/net/wan/lmc/lmc_media.c   lmc_t1_write (sc, 0xD4, 0x30);	/* TSB_CR  - TX sys bus config           */
sc                964 drivers/net/wan/lmc/lmc_media.c       lmc_t1_write (sc, 0x0E0 + i, 0x00);	/* SBCn - sys bus per-channel ctl    */
sc                965 drivers/net/wan/lmc/lmc_media.c       lmc_t1_write (sc, 0x100 + i, 0x00);	/* TPCn - TX per-channel ctl         */
sc                966 drivers/net/wan/lmc/lmc_media.c       lmc_t1_write (sc, 0x180 + i, 0x00);	/* RPCn - RX per-channel ctl         */
sc                970 drivers/net/wan/lmc/lmc_media.c       lmc_t1_write (sc, 0x0E0 + i, 0x0D);	/* SBCn - sys bus per-channel ctl    */
sc                974 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 16, mii16);
sc                975 drivers/net/wan/lmc/lmc_media.c   sc->lmc_miireg16 = mii16;
sc                979 drivers/net/wan/lmc/lmc_media.c lmc_t1_default (lmc_softc_t * const sc)
sc                981 drivers/net/wan/lmc/lmc_media.c   sc->lmc_miireg16 = LMC_MII16_LED_ALL;
sc                982 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_link_status (sc, LMC_LINK_DOWN);
sc                983 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_circuit_type (sc, LMC_CTL_CIRCUIT_TYPE_T1);
sc                984 drivers/net/wan/lmc/lmc_media.c   sc->lmc_media->set_crc_length (sc, LMC_CTL_CRC_LENGTH_16);
sc                986 drivers/net/wan/lmc/lmc_media.c   sc->ictl.clock_source = LMC_CTL_CLOCK_SOURCE_INT;
sc                991 drivers/net/wan/lmc/lmc_media.c lmc_t1_set_status (lmc_softc_t * const sc, lmc_ctl_t * ctl)
sc                995 drivers/net/wan/lmc/lmc_media.c       sc->lmc_media->set_circuit_type (sc, sc->ictl.circuit_type);
sc                996 drivers/net/wan/lmc/lmc_media.c       lmc_set_protocol (sc, NULL);
sc               1003 drivers/net/wan/lmc/lmc_media.c       && sc->ictl.circuit_type ==
sc               1004 drivers/net/wan/lmc/lmc_media.c       LMC_CTL_CIRCUIT_TYPE_E1) sc->lmc_media->set_circuit_type (sc,
sc               1007 drivers/net/wan/lmc/lmc_media.c 	   && sc->ictl.circuit_type == LMC_CTL_CIRCUIT_TYPE_T1)
sc               1008 drivers/net/wan/lmc/lmc_media.c     sc->lmc_media->set_circuit_type (sc, LMC_CTL_CIRCUIT_TYPE_T1);
sc               1009 drivers/net/wan/lmc/lmc_media.c   lmc_set_protocol (sc, ctl);
sc               1015 drivers/net/wan/lmc/lmc_media.c lmc_t1_get_link_status (lmc_softc_t * const sc)
sc               1029 drivers/net/wan/lmc/lmc_media.c     lmc_trace(sc->lmc_device, "lmc_t1_get_link_status in");
sc               1030 drivers/net/wan/lmc/lmc_media.c     lmc_led_on(sc, LMC_DS3_LED2);
sc               1032 drivers/net/wan/lmc/lmc_media.c     lmc_mii_writereg (sc, 0, 17, T1FRAMER_ALARM1_STATUS);
sc               1033 drivers/net/wan/lmc/lmc_media.c     link_status = lmc_mii_readreg (sc, 0, 18);
sc               1038 drivers/net/wan/lmc/lmc_media.c         if(sc->last_led_err[1] != 1){
sc               1039 drivers/net/wan/lmc/lmc_media.c             printk(KERN_WARNING "%s: Receive AIS/Blue Alarm. Far end in RED alarm\n", sc->name);
sc               1041 drivers/net/wan/lmc/lmc_media.c         lmc_led_on(sc, LMC_DS3_LED1);
sc               1042 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[1] = 1;
sc               1045 drivers/net/wan/lmc/lmc_media.c         if(sc->last_led_err[1] != 0){
sc               1046 drivers/net/wan/lmc/lmc_media.c             printk(KERN_WARNING "%s: End AIS/Blue Alarm\n", sc->name);
sc               1048 drivers/net/wan/lmc/lmc_media.c         lmc_led_off (sc, LMC_DS3_LED1);
sc               1049 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[1] = 0;
sc               1064 drivers/net/wan/lmc/lmc_media.c         if(sc->last_led_err[0] != 1){
sc               1065 drivers/net/wan/lmc/lmc_media.c             printk(KERN_WARNING "%s: Receive Yellow AIS Alarm\n", sc->name);
sc               1067 drivers/net/wan/lmc/lmc_media.c         lmc_led_on(sc, LMC_DS3_LED0);
sc               1068 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[0] = 1;
sc               1071 drivers/net/wan/lmc/lmc_media.c         if(sc->last_led_err[0] != 0){
sc               1072 drivers/net/wan/lmc/lmc_media.c             printk(KERN_WARNING "%s: End of Yellow AIS Alarm\n", sc->name);
sc               1074 drivers/net/wan/lmc/lmc_media.c         lmc_led_off(sc, LMC_DS3_LED0);
sc               1075 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[0] = 0;
sc               1084 drivers/net/wan/lmc/lmc_media.c         if(sc->last_led_err[3] != 1){
sc               1085 drivers/net/wan/lmc/lmc_media.c             printk(KERN_WARNING "%s: Local Red Alarm: Loss of Framing\n", sc->name);
sc               1087 drivers/net/wan/lmc/lmc_media.c         lmc_led_on(sc, LMC_DS3_LED3);
sc               1088 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[3] = 1;
sc               1092 drivers/net/wan/lmc/lmc_media.c         if(sc->last_led_err[3] != 0){
sc               1093 drivers/net/wan/lmc/lmc_media.c             printk(KERN_WARNING "%s: End Red Alarm (LOF)\n", sc->name);
sc               1096 drivers/net/wan/lmc/lmc_media.c             lmc_led_off(sc, LMC_DS3_LED3);
sc               1097 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[3] = 0;
sc               1102 drivers/net/wan/lmc/lmc_media.c         if(sc->last_led_err[2] != 1){
sc               1103 drivers/net/wan/lmc/lmc_media.c             printk(KERN_WARNING "%s: Local Red Alarm: Loss of Signal\n", sc->name);
sc               1105 drivers/net/wan/lmc/lmc_media.c         lmc_led_on(sc, LMC_DS3_LED3);
sc               1106 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[2] = 1;
sc               1110 drivers/net/wan/lmc/lmc_media.c         if(sc->last_led_err[2] != 0){
sc               1111 drivers/net/wan/lmc/lmc_media.c             printk(KERN_WARNING "%s: End Red Alarm (LOS)\n", sc->name);
sc               1114 drivers/net/wan/lmc/lmc_media.c             lmc_led_off(sc, LMC_DS3_LED3);
sc               1115 drivers/net/wan/lmc/lmc_media.c         sc->last_led_err[2] = 0;
sc               1118 drivers/net/wan/lmc/lmc_media.c     sc->lmc_xinfo.t1_alarm1_status = link_status;
sc               1120 drivers/net/wan/lmc/lmc_media.c     lmc_mii_writereg (sc, 0, 17, T1FRAMER_ALARM2_STATUS);
sc               1121 drivers/net/wan/lmc/lmc_media.c     sc->lmc_xinfo.t1_alarm2_status = lmc_mii_readreg (sc, 0, 18);
sc               1124 drivers/net/wan/lmc/lmc_media.c     lmc_trace(sc->lmc_device, "lmc_t1_get_link_status out");
sc               1133 drivers/net/wan/lmc/lmc_media.c lmc_t1_set_circuit_type (lmc_softc_t * const sc, int ie)
sc               1136 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 |= LMC_MII16_T1_Z;
sc               1137 drivers/net/wan/lmc/lmc_media.c       sc->ictl.circuit_type = LMC_CTL_CIRCUIT_TYPE_T1;
sc               1138 drivers/net/wan/lmc/lmc_media.c       printk(KERN_INFO "%s: In T1 Mode\n", sc->name);
sc               1141 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 &= ~LMC_MII16_T1_Z;
sc               1142 drivers/net/wan/lmc/lmc_media.c       sc->ictl.circuit_type = LMC_CTL_CIRCUIT_TYPE_E1;
sc               1143 drivers/net/wan/lmc/lmc_media.c       printk(KERN_INFO "%s: In E1 Mode\n", sc->name);
sc               1146 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 16, sc->lmc_miireg16);
sc               1153 drivers/net/wan/lmc/lmc_media.c lmc_t1_set_crc_length (lmc_softc_t * const sc, int state)
sc               1158 drivers/net/wan/lmc/lmc_media.c       sc->lmc_miireg16 |= LMC_MII16_T1_CRC;
sc               1159 drivers/net/wan/lmc/lmc_media.c       sc->ictl.crc_length = LMC_CTL_CRC_LENGTH_32;
sc               1160 drivers/net/wan/lmc/lmc_media.c       sc->lmc_crcSize = LMC_CTL_CRC_BYTESIZE_4;
sc               1165 drivers/net/wan/lmc/lmc_media.c       /* 16 bit */ sc->lmc_miireg16 &= ~LMC_MII16_T1_CRC;
sc               1166 drivers/net/wan/lmc/lmc_media.c       sc->ictl.crc_length = LMC_CTL_CRC_LENGTH_16;
sc               1167 drivers/net/wan/lmc/lmc_media.c       sc->lmc_crcSize = LMC_CTL_CRC_BYTESIZE_2;
sc               1171 drivers/net/wan/lmc/lmc_media.c   lmc_mii_writereg (sc, 0, 16, sc->lmc_miireg16);
sc               1178 drivers/net/wan/lmc/lmc_media.c lmc_t1_set_clock (lmc_softc_t * const sc, int ie)
sc               1184 drivers/net/wan/lmc/lmc_media.c       sc->lmc_gpio &= ~(LMC_GEP_SSI_TXCLOCK);
sc               1185 drivers/net/wan/lmc/lmc_media.c       LMC_CSR_WRITE (sc, csr_gp, sc->lmc_gpio);
sc               1186 drivers/net/wan/lmc/lmc_media.c       sc->ictl.clock_source = LMC_CTL_CLOCK_SOURCE_EXT;
sc               1192 drivers/net/wan/lmc/lmc_media.c       sc->lmc_gpio |= LMC_GEP_SSI_TXCLOCK;
sc               1193 drivers/net/wan/lmc/lmc_media.c       LMC_CSR_WRITE (sc, csr_gp, sc->lmc_gpio);
sc               1194 drivers/net/wan/lmc/lmc_media.c       sc->ictl.clock_source = LMC_CTL_CLOCK_SOURCE_INT;
sc               1201 drivers/net/wan/lmc/lmc_media.c lmc_t1_watchdog (lmc_softc_t * const sc)
sc               1206 drivers/net/wan/lmc/lmc_media.c lmc_set_protocol (lmc_softc_t * const sc, lmc_ctl_t * ctl)
sc               1209 drivers/net/wan/lmc/lmc_media.c 		sc->ictl.keepalive_onoff = LMC_CTL_ON;
sc                 48 drivers/net/wan/lmc/lmc_proto.c void lmc_proto_attach(lmc_softc_t *sc) /*FOLD00*/
sc                 50 drivers/net/wan/lmc/lmc_proto.c     lmc_trace(sc->lmc_device, "lmc_proto_attach in");
sc                 51 drivers/net/wan/lmc/lmc_proto.c     if (sc->if_type == LMC_NET) {
sc                 52 drivers/net/wan/lmc/lmc_proto.c             struct net_device *dev = sc->lmc_device;
sc                 60 drivers/net/wan/lmc/lmc_proto.c     lmc_trace(sc->lmc_device, "lmc_proto_attach out");
sc                 63 drivers/net/wan/lmc/lmc_proto.c int lmc_proto_ioctl(lmc_softc_t *sc, struct ifreq *ifr, int cmd)
sc                 65 drivers/net/wan/lmc/lmc_proto.c 	lmc_trace(sc->lmc_device, "lmc_proto_ioctl");
sc                 66 drivers/net/wan/lmc/lmc_proto.c 	if (sc->if_type == LMC_PPP)
sc                 67 drivers/net/wan/lmc/lmc_proto.c 		return hdlc_ioctl(sc->lmc_device, ifr, cmd);
sc                 71 drivers/net/wan/lmc/lmc_proto.c int lmc_proto_open(lmc_softc_t *sc)
sc                 75 drivers/net/wan/lmc/lmc_proto.c 	lmc_trace(sc->lmc_device, "lmc_proto_open in");
sc                 77 drivers/net/wan/lmc/lmc_proto.c 	if (sc->if_type == LMC_PPP) {
sc                 78 drivers/net/wan/lmc/lmc_proto.c 		ret = hdlc_open(sc->lmc_device);
sc                 81 drivers/net/wan/lmc/lmc_proto.c 			       sc->name, ret);
sc                 84 drivers/net/wan/lmc/lmc_proto.c 	lmc_trace(sc->lmc_device, "lmc_proto_open out");
sc                 88 drivers/net/wan/lmc/lmc_proto.c void lmc_proto_close(lmc_softc_t *sc)
sc                 90 drivers/net/wan/lmc/lmc_proto.c 	lmc_trace(sc->lmc_device, "lmc_proto_close in");
sc                 92 drivers/net/wan/lmc/lmc_proto.c 	if (sc->if_type == LMC_PPP)
sc                 93 drivers/net/wan/lmc/lmc_proto.c 		hdlc_close(sc->lmc_device);
sc                 95 drivers/net/wan/lmc/lmc_proto.c 	lmc_trace(sc->lmc_device, "lmc_proto_close out");
sc                 98 drivers/net/wan/lmc/lmc_proto.c __be16 lmc_proto_type(lmc_softc_t *sc, struct sk_buff *skb) /*FOLD00*/
sc                100 drivers/net/wan/lmc/lmc_proto.c     lmc_trace(sc->lmc_device, "lmc_proto_type in");
sc                101 drivers/net/wan/lmc/lmc_proto.c     switch(sc->if_type){
sc                103 drivers/net/wan/lmc/lmc_proto.c 	    return hdlc_type_trans(skb, sc->lmc_device);
sc                112 drivers/net/wan/lmc/lmc_proto.c         printk(KERN_WARNING "%s: No protocol set for this interface, assuming 802.2 (which is wrong!!)\n", sc->name);
sc                116 drivers/net/wan/lmc/lmc_proto.c     lmc_trace(sc->lmc_device, "lmc_proto_tye out");
sc                120 drivers/net/wan/lmc/lmc_proto.c void lmc_proto_netif(lmc_softc_t *sc, struct sk_buff *skb) /*FOLD00*/
sc                122 drivers/net/wan/lmc/lmc_proto.c     lmc_trace(sc->lmc_device, "lmc_proto_netif in");
sc                123 drivers/net/wan/lmc/lmc_proto.c     switch(sc->if_type){
sc                132 drivers/net/wan/lmc/lmc_proto.c     lmc_trace(sc->lmc_device, "lmc_proto_netif out");
sc                  7 drivers/net/wan/lmc/lmc_proto.h void lmc_proto_attach(lmc_softc_t *sc);
sc                  8 drivers/net/wan/lmc/lmc_proto.h int lmc_proto_ioctl(lmc_softc_t *sc, struct ifreq *ifr, int cmd);
sc                  9 drivers/net/wan/lmc/lmc_proto.h int lmc_proto_open(lmc_softc_t *sc);
sc                 10 drivers/net/wan/lmc/lmc_proto.h void lmc_proto_close(lmc_softc_t *sc);
sc                 11 drivers/net/wan/lmc/lmc_proto.h __be16 lmc_proto_type(lmc_softc_t *sc, struct sk_buff *skb);
sc                 12 drivers/net/wan/lmc/lmc_proto.h void lmc_proto_netif(lmc_softc_t *sc, struct sk_buff *skb);
sc                 31 drivers/net/wan/lmc/lmc_var.h #define LMC_PRINTF_ARGS	(sc->lmc_device->name)
sc                 42 drivers/net/wan/lmc/lmc_var.h #define LMC_CSR_READ(sc, csr) \
sc                 43 drivers/net/wan/lmc/lmc_var.h 	inl((sc)->lmc_csrs.csr)
sc                 44 drivers/net/wan/lmc/lmc_var.h #define LMC_CSR_WRITE(sc, reg, val) \
sc                 45 drivers/net/wan/lmc/lmc_var.h 	outl((val), (sc)->lmc_csrs.reg)
sc                 55 drivers/net/wan/lmc/lmc_var.h #define lmc_delay() inl(sc->lmc_csrs.csr_9)
sc                 58 drivers/net/wan/lmc/lmc_var.h #define LMC_MII_SYNC(sc) do {int n=32; while( n >= 0 ) { \
sc                 59 drivers/net/wan/lmc/lmc_var.h                 LMC_CSR_WRITE((sc), csr_9, 0x20000); \
sc                 61 drivers/net/wan/lmc/lmc_var.h 		LMC_CSR_WRITE((sc), csr_9, 0x30000); \
sc               2450 drivers/net/wireless/ath/ath10k/htt_rx.c 	u16 peer_id, sc, hdr_space;
sc               2499 drivers/net/wireless/ath/ath10k/htt_rx.c 	sc = __le16_to_cpu(hdr->seq_ctrl);
sc               2500 drivers/net/wireless/ath/ath10k/htt_rx.c 	seq = (sc & IEEE80211_SCTL_SEQ) >> 4;
sc               2501 drivers/net/wireless/ath/ath10k/htt_rx.c 	frag = sc & IEEE80211_SCTL_FRAG;
sc               1967 drivers/net/wireless/ath/ath6kl/wmi.c 	struct wmi_start_scan_cmd *sc;
sc               1986 drivers/net/wireless/ath/ath6kl/wmi.c 	sc = (struct wmi_start_scan_cmd *) skb->data;
sc               1987 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->scan_type = scan_type;
sc               1988 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->force_fg_scan = cpu_to_le32(force_fgscan);
sc               1989 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->is_legacy = cpu_to_le32(is_legacy);
sc               1990 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->home_dwell_time = cpu_to_le32(home_dwell_time);
sc               1991 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->force_scan_intvl = cpu_to_le32(force_scan_interval);
sc               1992 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->num_ch = num_chan;
sc               1995 drivers/net/wireless/ath/ath6kl/wmi.c 		sc->ch_list[i] = cpu_to_le16(ch_list[i]);
sc               2016 drivers/net/wireless/ath/ath6kl/wmi.c 	struct wmi_begin_scan_cmd *sc;
sc               2047 drivers/net/wireless/ath/ath6kl/wmi.c 	sc = (struct wmi_begin_scan_cmd *) skb->data;
sc               2048 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->scan_type = scan_type;
sc               2049 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->force_fg_scan = cpu_to_le32(force_fgscan);
sc               2050 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->is_legacy = cpu_to_le32(is_legacy);
sc               2051 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->home_dwell_time = cpu_to_le32(home_dwell_time);
sc               2052 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->force_scan_intvl = cpu_to_le32(force_scan_interval);
sc               2053 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->no_cck = cpu_to_le32(no_cck);
sc               2054 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->num_ch = num_chan;
sc               2066 drivers/net/wireless/ath/ath6kl/wmi.c 		supp_rates = sc->supp_rates[band].rates;
sc               2075 drivers/net/wireless/ath/ath6kl/wmi.c 		sc->supp_rates[band].nrates = num_rates;
sc               2079 drivers/net/wireless/ath/ath6kl/wmi.c 		sc->ch_list[i] = cpu_to_le16(ch_list[i]);
sc               2090 drivers/net/wireless/ath/ath6kl/wmi.c 	struct wmi_enable_sched_scan_cmd *sc;
sc               2093 drivers/net/wireless/ath/ath6kl/wmi.c 	skb = ath6kl_wmi_get_new_buf(sizeof(*sc));
sc               2099 drivers/net/wireless/ath/ath6kl/wmi.c 	sc = (struct wmi_enable_sched_scan_cmd *) skb->data;
sc               2100 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->enable = enable ? 1 : 0;
sc               2117 drivers/net/wireless/ath/ath6kl/wmi.c 	struct wmi_scan_params_cmd *sc;
sc               2120 drivers/net/wireless/ath/ath6kl/wmi.c 	skb = ath6kl_wmi_get_new_buf(sizeof(*sc));
sc               2124 drivers/net/wireless/ath/ath6kl/wmi.c 	sc = (struct wmi_scan_params_cmd *) skb->data;
sc               2125 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->fg_start_period = cpu_to_le16(fg_start_sec);
sc               2126 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->fg_end_period = cpu_to_le16(fg_end_sec);
sc               2127 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->bg_period = cpu_to_le16(bg_sec);
sc               2128 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->minact_chdwell_time = cpu_to_le16(minact_chdw_msec);
sc               2129 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->maxact_chdwell_time = cpu_to_le16(maxact_chdw_msec);
sc               2130 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->pas_chdwell_time = cpu_to_le16(pas_chdw_msec);
sc               2131 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->short_scan_ratio = short_scan_ratio;
sc               2132 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->scan_ctrl_flags = scan_ctrl_flag;
sc               2133 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->max_dfsch_act_time = cpu_to_le32(max_dfsch_act_time);
sc               2134 drivers/net/wireless/ath/ath6kl/wmi.c 	sc->maxact_scan_per_ssid = cpu_to_le16(maxact_scan_per_ssid);
sc                 75 drivers/net/wireless/ath/ath9k/ahb.c 	struct ath_softc *sc;
sc                119 drivers/net/wireless/ath/ath9k/ahb.c 	sc = hw->priv;
sc                120 drivers/net/wireless/ath/ath9k/ahb.c 	sc->hw = hw;
sc                121 drivers/net/wireless/ath/ath9k/ahb.c 	sc->dev = &pdev->dev;
sc                122 drivers/net/wireless/ath/ath9k/ahb.c 	sc->mem = mem;
sc                123 drivers/net/wireless/ath/ath9k/ahb.c 	sc->irq = irq;
sc                125 drivers/net/wireless/ath/ath9k/ahb.c 	ret = request_irq(irq, ath_isr, IRQF_SHARED, "ath9k", sc);
sc                131 drivers/net/wireless/ath/ath9k/ahb.c 	ret = ath9k_init_device(id->driver_data, sc, &ath_ahb_bus_ops);
sc                137 drivers/net/wireless/ath/ath9k/ahb.c 	ah = sc->sc_ah;
sc                145 drivers/net/wireless/ath/ath9k/ahb.c 	free_irq(irq, sc);
sc                156 drivers/net/wireless/ath/ath9k/ahb.c 		struct ath_softc *sc = hw->priv;
sc                158 drivers/net/wireless/ath/ath9k/ahb.c 		ath9k_deinit_device(sc);
sc                159 drivers/net/wireless/ath/ath9k/ahb.c 		free_irq(sc->irq, sc);
sc                160 drivers/net/wireless/ath/ath9k/ahb.c 		ieee80211_free_hw(sc->hw);
sc                721 drivers/net/wireless/ath/ath9k/antenna.c void ath_ant_comb_scan(struct ath_softc *sc, struct ath_rx_status *rs)
sc                724 drivers/net/wireless/ath/ath9k/antenna.c 	struct ath_ant_comb *antcomb = &sc->ant_comb;
sc                758 drivers/net/wireless/ath/ath9k/antenna.c 		ANT_STAT_INC(sc, ANT_MAIN, recv_cnt);
sc                759 drivers/net/wireless/ath/ath9k/antenna.c 		ANT_LNA_INC(sc, ANT_MAIN, rx_ant_conf);
sc                761 drivers/net/wireless/ath/ath9k/antenna.c 		ANT_STAT_INC(sc, ANT_ALT, recv_cnt);
sc                762 drivers/net/wireless/ath/ath9k/antenna.c 		ANT_LNA_INC(sc, ANT_ALT, rx_ant_conf);
sc                781 drivers/net/wireless/ath/ath9k/antenna.c 	ath9k_hw_antdiv_comb_conf_get(sc->sc_ah, &div_ant_conf);
sc                840 drivers/net/wireless/ath/ath9k/antenna.c 	ath9k_hw_antdiv_comb_conf_set(sc->sc_ah, &div_ant_conf);
sc                841 drivers/net/wireless/ath/ath9k/antenna.c 	ath9k_debug_stat_ant(sc, &div_ant_conf, main_rssi_avg, alt_rssi_avg);
sc                 75 drivers/net/wireless/ath/ath9k/ath9k.h int ath_descdma_setup(struct ath_softc *sc, struct ath_descdma *dd,
sc                 83 drivers/net/wireless/ath/ath9k/ath9k.h #define	ATH_TXQ_SETUP(sc, i) ((sc)->tx.txqsetup & (1<<i))
sc                249 drivers/net/wireless/ath/ath9k/ath9k.h void ath_tx_queue_tid(struct ath_softc *sc, struct ath_atx_tid *tid);
sc                252 drivers/net/wireless/ath/ath9k/ath9k.h 	struct ath_softc *sc;
sc                445 drivers/net/wireless/ath/ath9k/ath9k.h 	for (ctx = &sc->chanctx[0];                                 \
sc                446 drivers/net/wireless/ath/ath9k/ath9k.h 	     ctx <= &sc->chanctx[ARRAY_SIZE(sc->chanctx) - 1];      \
sc                449 drivers/net/wireless/ath/ath9k/ath9k.h void ath_chanctx_init(struct ath_softc *sc);
sc                450 drivers/net/wireless/ath/ath9k/ath9k.h void ath_chanctx_set_channel(struct ath_softc *sc, struct ath_chanctx *ctx,
sc                464 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_init_channel_context(struct ath_softc *sc);
sc                465 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_offchannel_init(struct ath_softc *sc);
sc                466 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_deinit_channel_context(struct ath_softc *sc);
sc                467 drivers/net/wireless/ath/ath9k/ath9k.h int ath9k_init_p2p(struct ath_softc *sc);
sc                468 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_deinit_p2p(struct ath_softc *sc);
sc                469 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_p2p_remove_vif(struct ath_softc *sc,
sc                471 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_p2p_beacon_sync(struct ath_softc *sc);
sc                472 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_p2p_bss_info_changed(struct ath_softc *sc,
sc                474 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_beacon_add_noa(struct ath_softc *sc, struct ath_vif *avp,
sc                477 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_chanctx_wake_queues(struct ath_softc *sc, struct ath_chanctx *ctx);
sc                478 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_chanctx_stop_queues(struct ath_softc *sc, struct ath_chanctx *ctx);
sc                479 drivers/net/wireless/ath/ath9k/ath9k.h void ath_chanctx_check_active(struct ath_softc *sc, struct ath_chanctx *ctx);
sc                481 drivers/net/wireless/ath/ath9k/ath9k.h void ath_chanctx_beacon_recv_ev(struct ath_softc *sc,
sc                483 drivers/net/wireless/ath/ath9k/ath9k.h void ath_chanctx_beacon_sent_ev(struct ath_softc *sc,
sc                485 drivers/net/wireless/ath/ath9k/ath9k.h void ath_chanctx_event(struct ath_softc *sc, struct ieee80211_vif *vif,
sc                487 drivers/net/wireless/ath/ath9k/ath9k.h void ath_chanctx_set_next(struct ath_softc *sc, bool force);
sc                488 drivers/net/wireless/ath/ath9k/ath9k.h void ath_offchannel_next(struct ath_softc *sc);
sc                489 drivers/net/wireless/ath/ath9k/ath9k.h void ath_scan_complete(struct ath_softc *sc, bool abort);
sc                490 drivers/net/wireless/ath/ath9k/ath9k.h void ath_roc_complete(struct ath_softc *sc,
sc                492 drivers/net/wireless/ath/ath9k/ath9k.h struct ath_chanctx* ath_is_go_chanctx_present(struct ath_softc *sc);
sc                503 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_init_channel_context(struct ath_softc *sc)
sc                506 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_offchannel_init(struct ath_softc *sc)
sc                509 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_deinit_channel_context(struct ath_softc *sc)
sc                512 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath_chanctx_beacon_recv_ev(struct ath_softc *sc,
sc                516 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath_chanctx_beacon_sent_ev(struct ath_softc *sc,
sc                520 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath_chanctx_event(struct ath_softc *sc,
sc                525 drivers/net/wireless/ath/ath9k/ath9k.h static inline int ath9k_init_p2p(struct ath_softc *sc)
sc                529 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_deinit_p2p(struct ath_softc *sc)
sc                532 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_p2p_remove_vif(struct ath_softc *sc,
sc                536 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_p2p_beacon_sync(struct ath_softc *sc)
sc                539 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_p2p_bss_info_changed(struct ath_softc *sc,
sc                543 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_beacon_add_noa(struct ath_softc *sc, struct ath_vif *avp,
sc                547 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_p2p_ps_timer(struct ath_softc *sc)
sc                550 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_chanctx_wake_queues(struct ath_softc *sc,
sc                554 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_chanctx_stop_queues(struct ath_softc *sc,
sc                558 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath_chanctx_check_active(struct ath_softc *sc,
sc                565 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath_txq_lock(struct ath_softc *sc, struct ath_txq *txq)
sc                569 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath_txq_unlock(struct ath_softc *sc, struct ath_txq *txq)
sc                574 drivers/net/wireless/ath/ath9k/ath9k.h void ath_startrecv(struct ath_softc *sc);
sc                575 drivers/net/wireless/ath/ath9k/ath9k.h bool ath_stoprecv(struct ath_softc *sc);
sc                576 drivers/net/wireless/ath/ath9k/ath9k.h u32 ath_calcrxfilter(struct ath_softc *sc);
sc                577 drivers/net/wireless/ath/ath9k/ath9k.h int ath_rx_init(struct ath_softc *sc, int nbufs);
sc                578 drivers/net/wireless/ath/ath9k/ath9k.h void ath_rx_cleanup(struct ath_softc *sc);
sc                579 drivers/net/wireless/ath/ath9k/ath9k.h int ath_rx_tasklet(struct ath_softc *sc, int flush, bool hp);
sc                580 drivers/net/wireless/ath/ath9k/ath9k.h struct ath_txq *ath_txq_setup(struct ath_softc *sc, int qtype, int subtype);
sc                581 drivers/net/wireless/ath/ath9k/ath9k.h void ath_txq_unlock_complete(struct ath_softc *sc, struct ath_txq *txq);
sc                582 drivers/net/wireless/ath/ath9k/ath9k.h void ath_tx_cleanupq(struct ath_softc *sc, struct ath_txq *txq);
sc                583 drivers/net/wireless/ath/ath9k/ath9k.h bool ath_drain_all_txq(struct ath_softc *sc);
sc                584 drivers/net/wireless/ath/ath9k/ath9k.h void ath_draintxq(struct ath_softc *sc, struct ath_txq *txq);
sc                585 drivers/net/wireless/ath/ath9k/ath9k.h void ath_tx_node_init(struct ath_softc *sc, struct ath_node *an);
sc                586 drivers/net/wireless/ath/ath9k/ath9k.h void ath_tx_node_cleanup(struct ath_softc *sc, struct ath_node *an);
sc                587 drivers/net/wireless/ath/ath9k/ath9k.h void ath_txq_schedule(struct ath_softc *sc, struct ath_txq *txq);
sc                588 drivers/net/wireless/ath/ath9k/ath9k.h void ath_txq_schedule_all(struct ath_softc *sc);
sc                589 drivers/net/wireless/ath/ath9k/ath9k.h int ath_tx_init(struct ath_softc *sc, int nbufs);
sc                590 drivers/net/wireless/ath/ath9k/ath9k.h int ath_txq_update(struct ath_softc *sc, int qnum,
sc                592 drivers/net/wireless/ath/ath9k/ath9k.h u32 ath_pkt_duration(struct ath_softc *sc, u8 rix, int pktlen,
sc                594 drivers/net/wireless/ath/ath9k/ath9k.h void ath_update_max_aggr_framelen(struct ath_softc *sc, int queue, int txop);
sc                600 drivers/net/wireless/ath/ath9k/ath9k.h void ath_tx_tasklet(struct ath_softc *sc);
sc                601 drivers/net/wireless/ath/ath9k/ath9k.h void ath_tx_edma_tasklet(struct ath_softc *sc);
sc                602 drivers/net/wireless/ath/ath9k/ath9k.h int ath_tx_aggr_start(struct ath_softc *sc, struct ieee80211_sta *sta,
sc                604 drivers/net/wireless/ath/ath9k/ath9k.h void ath_tx_aggr_stop(struct ath_softc *sc, struct ieee80211_sta *sta, u16 tid);
sc                606 drivers/net/wireless/ath/ath9k/ath9k.h void ath_tx_aggr_wakeup(struct ath_softc *sc, struct ath_node *an);
sc                607 drivers/net/wireless/ath/ath9k/ath9k.h void ath_tx_aggr_sleep(struct ieee80211_sta *sta, struct ath_softc *sc,
sc                672 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_calculate_iter_data(struct ath_softc *sc,
sc                675 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_calculate_summary_state(struct ath_softc *sc,
sc                677 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_set_txpower(struct ath_softc *sc, struct ieee80211_vif *vif);
sc                717 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_beacon_config(struct ath_softc *sc, struct ieee80211_vif *main_vif,
sc                719 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_beacon_assign_slot(struct ath_softc *sc, struct ieee80211_vif *vif);
sc                720 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_beacon_remove_slot(struct ath_softc *sc, struct ieee80211_vif *vif);
sc                721 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_beacon_ensure_primary_slot(struct ath_softc *sc);
sc                722 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_set_beacon(struct ath_softc *sc);
sc                723 drivers/net/wireless/ath/ath9k/ath9k.h bool ath9k_csa_is_finished(struct ath_softc *sc, struct ieee80211_vif *vif);
sc                724 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_csa_update(struct ath_softc *sc);
sc                743 drivers/net/wireless/ath/ath9k/ath9k.h bool ath_hw_check(struct ath_softc *sc);
sc                747 drivers/net/wireless/ath/ath9k/ath9k.h void ath_start_ani(struct ath_softc *sc);
sc                748 drivers/net/wireless/ath/ath9k/ath9k.h void ath_stop_ani(struct ath_softc *sc);
sc                749 drivers/net/wireless/ath/ath9k/ath9k.h void ath_check_ani(struct ath_softc *sc);
sc                750 drivers/net/wireless/ath/ath9k/ath9k.h int ath_update_survey_stats(struct ath_softc *sc);
sc                751 drivers/net/wireless/ath/ath9k/ath9k.h void ath_update_survey_nf(struct ath_softc *sc, int channel);
sc                752 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_queue_reset(struct ath_softc *sc, enum ath_reset_type type);
sc                791 drivers/net/wireless/ath/ath9k/ath9k.h int ath9k_init_btcoex(struct ath_softc *sc);
sc                792 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_deinit_btcoex(struct ath_softc *sc);
sc                793 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_start_btcoex(struct ath_softc *sc);
sc                794 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_stop_btcoex(struct ath_softc *sc);
sc                795 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_btcoex_timer_resume(struct ath_softc *sc);
sc                796 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_btcoex_timer_pause(struct ath_softc *sc);
sc                797 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_btcoex_handle_interrupt(struct ath_softc *sc, u32 status);
sc                798 drivers/net/wireless/ath/ath9k/ath9k.h u16 ath9k_btcoex_aggr_limit(struct ath_softc *sc, u32 max_4ms_framelen);
sc                799 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_btcoex_stop_gen_timer(struct ath_softc *sc);
sc                800 drivers/net/wireless/ath/ath9k/ath9k.h int ath9k_dump_btcoex(struct ath_softc *sc, u8 *buf, u32 size);
sc                802 drivers/net/wireless/ath/ath9k/ath9k.h static inline int ath9k_init_btcoex(struct ath_softc *sc)
sc                806 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_deinit_btcoex(struct ath_softc *sc)
sc                809 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_start_btcoex(struct ath_softc *sc)
sc                812 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_stop_btcoex(struct ath_softc *sc)
sc                815 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_btcoex_handle_interrupt(struct ath_softc *sc,
sc                819 drivers/net/wireless/ath/ath9k/ath9k.h static inline u16 ath9k_btcoex_aggr_limit(struct ath_softc *sc,
sc                824 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_btcoex_stop_gen_timer(struct ath_softc *sc)
sc                827 drivers/net/wireless/ath/ath9k/ath9k.h static inline int ath9k_dump_btcoex(struct ath_softc *sc, u8 *buf, u32 size)
sc                844 drivers/net/wireless/ath/ath9k/ath9k.h void ath_init_leds(struct ath_softc *sc);
sc                845 drivers/net/wireless/ath/ath9k/ath9k.h void ath_deinit_leds(struct ath_softc *sc);
sc                847 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath_init_leds(struct ath_softc *sc)
sc                851 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath_deinit_leds(struct ath_softc *sc)
sc                944 drivers/net/wireless/ath/ath9k/ath9k.h void ath_ant_comb_scan(struct ath_softc *sc, struct ath_rx_status *rs);
sc               1084 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_tx99_init_debug(struct ath_softc *sc);
sc               1085 drivers/net/wireless/ath/ath9k/ath9k.h int ath9k_tx99_send(struct ath_softc *sc, struct sk_buff *skb,
sc               1088 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_tx99_init_debug(struct ath_softc *sc)
sc               1091 drivers/net/wireless/ath/ath9k/ath9k.h static inline int ath9k_tx99_send(struct ath_softc *sc,
sc               1103 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_rng_start(struct ath_softc *sc);
sc               1104 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_rng_stop(struct ath_softc *sc);
sc               1106 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_rng_start(struct ath_softc *sc)
sc               1110 drivers/net/wireless/ath/ath9k/ath9k.h static inline void ath9k_rng_stop(struct ath_softc *sc)
sc               1124 drivers/net/wireless/ath/ath9k/ath9k.h int ath_reset(struct ath_softc *sc, struct ath9k_channel *hchan);
sc               1125 drivers/net/wireless/ath/ath9k/ath9k.h void ath_cancel_work(struct ath_softc *sc);
sc               1126 drivers/net/wireless/ath/ath9k/ath9k.h void ath_restart_work(struct ath_softc *sc);
sc               1127 drivers/net/wireless/ath/ath9k/ath9k.h int ath9k_init_device(u16 devid, struct ath_softc *sc,
sc               1129 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_deinit_device(struct ath_softc *sc);
sc               1130 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_reload_chainmask_settings(struct ath_softc *sc);
sc               1131 drivers/net/wireless/ath/ath9k/ath9k.h u8 ath_txchainmask_reduction(struct ath_softc *sc, u8 chainmask, u32 rate);
sc               1132 drivers/net/wireless/ath/ath9k/ath9k.h void ath_start_rfkill_poll(struct ath_softc *sc);
sc               1134 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_ps_wakeup(struct ath_softc *sc);
sc               1135 drivers/net/wireless/ath/ath9k/ath9k.h void ath9k_ps_restore(struct ath_softc *sc);
sc                 22 drivers/net/wireless/ath/ath9k/beacon.c static void ath9k_reset_beacon_status(struct ath_softc *sc)
sc                 24 drivers/net/wireless/ath/ath9k/beacon.c 	sc->beacon.tx_processed = false;
sc                 25 drivers/net/wireless/ath/ath9k/beacon.c 	sc->beacon.tx_last = false;
sc                 33 drivers/net/wireless/ath/ath9k/beacon.c static void ath9k_beaconq_config(struct ath_softc *sc)
sc                 35 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_hw *ah = sc->sc_ah;
sc                 40 drivers/net/wireless/ath/ath9k/beacon.c 	ath9k_hw_get_txq_props(ah, sc->beacon.beaconq, &qi);
sc                 42 drivers/net/wireless/ath/ath9k/beacon.c 	if (sc->sc_ah->opmode == NL80211_IFTYPE_AP ||
sc                 43 drivers/net/wireless/ath/ath9k/beacon.c 	    sc->sc_ah->opmode == NL80211_IFTYPE_MESH_POINT) {
sc                 50 drivers/net/wireless/ath/ath9k/beacon.c 		txq = sc->tx.txq_map[IEEE80211_AC_BE];
sc                 60 drivers/net/wireless/ath/ath9k/beacon.c 	if (!ath9k_hw_set_txq_props(ah, sc->beacon.beaconq, &qi)) {
sc                 63 drivers/net/wireless/ath/ath9k/beacon.c 		ath9k_hw_resettxqueue(ah, sc->beacon.beaconq);
sc                 72 drivers/net/wireless/ath/ath9k/beacon.c static void ath9k_beacon_setup(struct ath_softc *sc, struct ieee80211_vif *vif,
sc                 76 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_hw *ah = sc->sc_ah;
sc                 83 drivers/net/wireless/ath/ath9k/beacon.c 	sband = &common->sbands[sc->cur_chandef.chan->band];
sc                103 drivers/net/wireless/ath/ath9k/beacon.c 	info.qcu = sc->beacon.beaconq;
sc                107 drivers/net/wireless/ath/ath9k/beacon.c 	info.rates[0].ChSel = ath_txchainmask_reduction(sc, chainmask, rate);
sc                115 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_softc *sc = hw->priv;
sc                116 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                120 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_txq *cabq = sc->beacon.cabq;
sc                131 drivers/net/wireless/ath/ath9k/beacon.c 		dma_unmap_single(sc->dev, bf->bf_buf_addr,
sc                153 drivers/net/wireless/ath/ath9k/beacon.c 		ath9k_beacon_add_noa(sc, avp, skb);
sc                155 drivers/net/wireless/ath/ath9k/beacon.c 	bf->bf_buf_addr = dma_map_single(sc->dev, skb->data,
sc                157 drivers/net/wireless/ath/ath9k/beacon.c 	if (unlikely(dma_mapping_error(sc->dev, bf->bf_buf_addr))) {
sc                180 drivers/net/wireless/ath/ath9k/beacon.c 		if (sc->cur_chan->nvifs > 1) {
sc                183 drivers/net/wireless/ath/ath9k/beacon.c 			ath_draintxq(sc, cabq);
sc                187 drivers/net/wireless/ath/ath9k/beacon.c 	ath9k_beacon_setup(sc, vif, bf, info->control.rates[0].idx);
sc                195 drivers/net/wireless/ath/ath9k/beacon.c void ath9k_beacon_assign_slot(struct ath_softc *sc, struct ieee80211_vif *vif)
sc                197 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                201 drivers/net/wireless/ath/ath9k/beacon.c 	avp->av_bcbuf = list_first_entry(&sc->beacon.bbuf, struct ath_buf, list);
sc                205 drivers/net/wireless/ath/ath9k/beacon.c 		if (sc->beacon.bslot[slot] == NULL) {
sc                211 drivers/net/wireless/ath/ath9k/beacon.c 	sc->beacon.bslot[avp->av_bslot] = vif;
sc                217 drivers/net/wireless/ath/ath9k/beacon.c void ath9k_beacon_remove_slot(struct ath_softc *sc, struct ieee80211_vif *vif)
sc                219 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                226 drivers/net/wireless/ath/ath9k/beacon.c 	tasklet_disable(&sc->bcon_tasklet);
sc                230 drivers/net/wireless/ath/ath9k/beacon.c 		dma_unmap_single(sc->dev, bf->bf_buf_addr,
sc                238 drivers/net/wireless/ath/ath9k/beacon.c 	sc->beacon.bslot[avp->av_bslot] = NULL;
sc                239 drivers/net/wireless/ath/ath9k/beacon.c 	list_add_tail(&bf->list, &sc->beacon.bbuf);
sc                241 drivers/net/wireless/ath/ath9k/beacon.c 	tasklet_enable(&sc->bcon_tasklet);
sc                244 drivers/net/wireless/ath/ath9k/beacon.c void ath9k_beacon_ensure_primary_slot(struct ath_softc *sc)
sc                246 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                254 drivers/net/wireless/ath/ath9k/beacon.c 	tasklet_disable(&sc->bcon_tasklet);
sc                258 drivers/net/wireless/ath/ath9k/beacon.c 		if (sc->beacon.bslot[slot]) {
sc                269 drivers/net/wireless/ath/ath9k/beacon.c 			vif = sc->beacon.bslot[slot + first_slot];
sc                270 drivers/net/wireless/ath/ath9k/beacon.c 			sc->beacon.bslot[slot] = vif;
sc                277 drivers/net/wireless/ath/ath9k/beacon.c 			sc->beacon.bslot[slot] = NULL;
sc                281 drivers/net/wireless/ath/ath9k/beacon.c 	vif = sc->beacon.bslot[0];
sc                295 drivers/net/wireless/ath/ath9k/beacon.c 	if (sc->cur_chan == avp->chanctx) {
sc                297 drivers/net/wireless/ath/ath9k/beacon.c 		ath9k_hw_settsf64(sc->sc_ah, avp->chanctx->tsf_val + offset);
sc                303 drivers/net/wireless/ath/ath9k/beacon.c 	tasklet_enable(&sc->bcon_tasklet);
sc                306 drivers/net/wireless/ath/ath9k/beacon.c static int ath9k_beacon_choose_slot(struct ath_softc *sc)
sc                308 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                309 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_beacon_config *cur_conf = &sc->cur_chan->beacon;
sc                315 drivers/net/wireless/ath/ath9k/beacon.c 	if (sc->sc_ah->opmode != NL80211_IFTYPE_AP &&
sc                316 drivers/net/wireless/ath/ath9k/beacon.c 	    sc->sc_ah->opmode != NL80211_IFTYPE_MESH_POINT) {
sc                318 drivers/net/wireless/ath/ath9k/beacon.c 			ath9k_hw_gettsf64(sc->sc_ah));
sc                323 drivers/net/wireless/ath/ath9k/beacon.c 	tsf = ath9k_hw_gettsf64(sc->sc_ah);
sc                324 drivers/net/wireless/ath/ath9k/beacon.c 	tsf += TU_TO_USEC(sc->sc_ah->config.sw_beacon_response_time);
sc                334 drivers/net/wireless/ath/ath9k/beacon.c static void ath9k_set_tsfadjust(struct ath_softc *sc,
sc                337 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                344 drivers/net/wireless/ath/ath9k/beacon.c 		if (!sc->beacon.bslot[slot])
sc                347 drivers/net/wireless/ath/ath9k/beacon.c 		avp = (void *)sc->beacon.bslot[slot]->drv_priv;
sc                363 drivers/net/wireless/ath/ath9k/beacon.c bool ath9k_csa_is_finished(struct ath_softc *sc, struct ieee80211_vif *vif)
sc                377 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_softc *sc = data;
sc                378 drivers/net/wireless/ath/ath9k/beacon.c 	ath9k_csa_is_finished(sc, vif);
sc                381 drivers/net/wireless/ath/ath9k/beacon.c void ath9k_csa_update(struct ath_softc *sc)
sc                383 drivers/net/wireless/ath/ath9k/beacon.c 	ieee80211_iterate_active_interfaces_atomic(sc->hw,
sc                385 drivers/net/wireless/ath/ath9k/beacon.c 						   ath9k_csa_update_vif, sc);
sc                390 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_softc *sc = (struct ath_softc *)data;
sc                391 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_hw *ah = sc->sc_ah;
sc                411 drivers/net/wireless/ath/ath9k/beacon.c 	if (ath9k_hw_numtxpending(ah, sc->beacon.beaconq) != 0) {
sc                412 drivers/net/wireless/ath/ath9k/beacon.c 		sc->beacon.bmisscnt++;
sc                422 drivers/net/wireless/ath/ath9k/beacon.c 		if (!ath_hw_check(sc))
sc                425 drivers/net/wireless/ath/ath9k/beacon.c 		if (sc->beacon.bmisscnt < BSTUCK_THRESH * sc->nbcnvifs) {
sc                428 drivers/net/wireless/ath/ath9k/beacon.c 				sc->beacon.bmisscnt);
sc                429 drivers/net/wireless/ath/ath9k/beacon.c 			ath9k_hw_stop_dma_queue(ah, sc->beacon.beaconq);
sc                430 drivers/net/wireless/ath/ath9k/beacon.c 			if (sc->beacon.bmisscnt > 3)
sc                432 drivers/net/wireless/ath/ath9k/beacon.c 		} else if (sc->beacon.bmisscnt >= BSTUCK_THRESH) {
sc                434 drivers/net/wireless/ath/ath9k/beacon.c 			sc->beacon.bmisscnt = 0;
sc                435 drivers/net/wireless/ath/ath9k/beacon.c 			ath9k_queue_reset(sc, RESET_TYPE_BEACON_STUCK);
sc                441 drivers/net/wireless/ath/ath9k/beacon.c 	slot = ath9k_beacon_choose_slot(sc);
sc                442 drivers/net/wireless/ath/ath9k/beacon.c 	vif = sc->beacon.bslot[slot];
sc                447 drivers/net/wireless/ath/ath9k/beacon.c 			ath_chanctx_beacon_sent_ev(sc,
sc                451 drivers/net/wireless/ath/ath9k/beacon.c 		if (ath9k_csa_is_finished(sc, vif))
sc                459 drivers/net/wireless/ath/ath9k/beacon.c 		ath_chanctx_event(sc, vif, ATH_CHANCTX_EVENT_BEACON_PREPARE);
sc                462 drivers/net/wireless/ath/ath9k/beacon.c 	bf = ath9k_beacon_generate(sc->hw, vif);
sc                464 drivers/net/wireless/ath/ath9k/beacon.c 	if (sc->beacon.bmisscnt != 0) {
sc                466 drivers/net/wireless/ath/ath9k/beacon.c 			sc->beacon.bmisscnt);
sc                467 drivers/net/wireless/ath/ath9k/beacon.c 		sc->beacon.bmisscnt = 0;
sc                486 drivers/net/wireless/ath/ath9k/beacon.c 	if (sc->beacon.updateslot == UPDATE) {
sc                487 drivers/net/wireless/ath/ath9k/beacon.c 		sc->beacon.updateslot = COMMIT;
sc                488 drivers/net/wireless/ath/ath9k/beacon.c 		sc->beacon.slotupdate = slot;
sc                489 drivers/net/wireless/ath/ath9k/beacon.c 	} else if (sc->beacon.updateslot == COMMIT &&
sc                490 drivers/net/wireless/ath/ath9k/beacon.c 		   sc->beacon.slotupdate == slot) {
sc                491 drivers/net/wireless/ath/ath9k/beacon.c 		ah->slottime = sc->beacon.slottime;
sc                493 drivers/net/wireless/ath/ath9k/beacon.c 		sc->beacon.updateslot = OK;
sc                497 drivers/net/wireless/ath/ath9k/beacon.c 		ath9k_reset_beacon_status(sc);
sc                503 drivers/net/wireless/ath/ath9k/beacon.c 		ath9k_hw_puttxbuf(ah, sc->beacon.beaconq, bf->bf_daddr);
sc                506 drivers/net/wireless/ath/ath9k/beacon.c 			ath9k_hw_txstart(ah, sc->beacon.beaconq);
sc                513 drivers/net/wireless/ath/ath9k/beacon.c static void ath9k_beacon_init(struct ath_softc *sc, u32 nexttbtt,
sc                516 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_hw *ah = sc->sc_ah;
sc                519 drivers/net/wireless/ath/ath9k/beacon.c 	ath9k_beaconq_config(sc);
sc                522 drivers/net/wireless/ath/ath9k/beacon.c 	sc->beacon.bmisscnt = 0;
sc                527 drivers/net/wireless/ath/ath9k/beacon.c static void ath9k_beacon_stop(struct ath_softc *sc)
sc                529 drivers/net/wireless/ath/ath9k/beacon.c 	ath9k_hw_disable_interrupts(sc->sc_ah);
sc                530 drivers/net/wireless/ath/ath9k/beacon.c 	sc->sc_ah->imask &= ~(ATH9K_INT_SWBA | ATH9K_INT_BMISS);
sc                531 drivers/net/wireless/ath/ath9k/beacon.c 	sc->beacon.bmisscnt = 0;
sc                532 drivers/net/wireless/ath/ath9k/beacon.c 	ath9k_hw_set_interrupts(sc->sc_ah);
sc                533 drivers/net/wireless/ath/ath9k/beacon.c 	ath9k_hw_enable_interrupts(sc->sc_ah);
sc                541 drivers/net/wireless/ath/ath9k/beacon.c static void ath9k_beacon_config_ap(struct ath_softc *sc,
sc                544 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_hw *ah = sc->sc_ah;
sc                547 drivers/net/wireless/ath/ath9k/beacon.c 	ath9k_beacon_init(sc, conf->nexttbtt, conf->intval);
sc                566 drivers/net/wireless/ath/ath9k/beacon.c static void ath9k_beacon_config_adhoc(struct ath_softc *sc,
sc                569 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_hw *ah = sc->sc_ah;
sc                572 drivers/net/wireless/ath/ath9k/beacon.c 	ath9k_reset_beacon_status(sc);
sc                576 drivers/net/wireless/ath/ath9k/beacon.c 	ath9k_beacon_init(sc, conf->nexttbtt, conf->intval);
sc                586 drivers/net/wireless/ath/ath9k/beacon.c static void ath9k_cache_beacon_config(struct ath_softc *sc,
sc                590 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                621 drivers/net/wireless/ath/ath9k/beacon.c 	ath9k_set_tsfadjust(sc, cur_conf);
sc                624 drivers/net/wireless/ath/ath9k/beacon.c void ath9k_beacon_config(struct ath_softc *sc, struct ieee80211_vif *main_vif,
sc                627 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_hw *ah = sc->sc_ah;
sc                638 drivers/net/wireless/ath/ath9k/beacon.c 		ath9k_beacon_stop(sc);
sc                651 drivers/net/wireless/ath/ath9k/beacon.c 	if (sc->sc_ah->opmode == NL80211_IFTYPE_STATION) {
sc                652 drivers/net/wireless/ath/ath9k/beacon.c 		ath9k_cache_beacon_config(sc, ctx, &main_vif->bss_conf);
sc                654 drivers/net/wireless/ath/ath9k/beacon.c 		ath9k_set_beacon(sc);
sc                660 drivers/net/wireless/ath/ath9k/beacon.c 	ath9k_cache_beacon_config(sc, ctx, &main_vif->bss_conf);
sc                672 drivers/net/wireless/ath/ath9k/beacon.c 		if (sc->sc_ah->opmode == NL80211_IFTYPE_ADHOC &&
sc                674 drivers/net/wireless/ath/ath9k/beacon.c 			spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc                675 drivers/net/wireless/ath/ath9k/beacon.c 			sc->ps_flags |= PS_BEACON_SYNC | PS_WAIT_FOR_BEACON;
sc                676 drivers/net/wireless/ath/ath9k/beacon.c 			spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc                686 drivers/net/wireless/ath/ath9k/beacon.c 			ath9k_set_beacon(sc);
sc                689 drivers/net/wireless/ath/ath9k/beacon.c 			ath9k_beacon_stop(sc);
sc                693 drivers/net/wireless/ath/ath9k/beacon.c 		ath9k_beacon_stop(sc);
sc                697 drivers/net/wireless/ath/ath9k/beacon.c void ath9k_set_beacon(struct ath_softc *sc)
sc                699 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                700 drivers/net/wireless/ath/ath9k/beacon.c 	struct ath_beacon_config *cur_conf = &sc->cur_chan->beacon;
sc                702 drivers/net/wireless/ath/ath9k/beacon.c 	switch (sc->sc_ah->opmode) {
sc                705 drivers/net/wireless/ath/ath9k/beacon.c 		ath9k_beacon_config_ap(sc, cur_conf);
sc                708 drivers/net/wireless/ath/ath9k/beacon.c 		ath9k_beacon_config_adhoc(sc, cur_conf);
sc                711 drivers/net/wireless/ath/ath9k/beacon.c 		ath9k_beacon_config_sta(sc->sc_ah, cur_conf);
sc                 23 drivers/net/wireless/ath/ath9k/channel.c static int ath_set_channel(struct ath_softc *sc)
sc                 25 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_hw *ah = sc->sc_ah;
sc                 27 drivers/net/wireless/ath/ath9k/channel.c 	struct ieee80211_hw *hw = sc->hw;
sc                 29 drivers/net/wireless/ath/ath9k/channel.c 	struct cfg80211_chan_def *chandef = &sc->cur_chan->chandef;
sc                 47 drivers/net/wireless/ath/ath9k/channel.c 	ath_update_survey_stats(sc);
sc                 57 drivers/net/wireless/ath/ath9k/channel.c 	if (!sc->cur_chan->offchannel && sc->cur_survey != &sc->survey[pos]) {
sc                 58 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->cur_survey)
sc                 59 drivers/net/wireless/ath/ath9k/channel.c 			sc->cur_survey->filled &= ~SURVEY_INFO_IN_USE;
sc                 61 drivers/net/wireless/ath/ath9k/channel.c 		sc->cur_survey = &sc->survey[pos];
sc                 63 drivers/net/wireless/ath/ath9k/channel.c 		memset(sc->cur_survey, 0, sizeof(struct survey_info));
sc                 64 drivers/net/wireless/ath/ath9k/channel.c 		sc->cur_survey->filled |= SURVEY_INFO_IN_USE;
sc                 65 drivers/net/wireless/ath/ath9k/channel.c 	} else if (!(sc->survey[pos].filled & SURVEY_INFO_IN_USE)) {
sc                 66 drivers/net/wireless/ath/ath9k/channel.c 		memset(&sc->survey[pos], 0, sizeof(struct survey_info));
sc                 69 drivers/net/wireless/ath/ath9k/channel.c 	hchan = &sc->sc_ah->channels[pos];
sc                 70 drivers/net/wireless/ath/ath9k/channel.c 	r = ath_reset(sc, hchan);
sc                 79 drivers/net/wireless/ath/ath9k/channel.c 		ath_update_survey_nf(sc, old_pos);
sc                 96 drivers/net/wireless/ath/ath9k/channel.c 			sc->spec_priv.spectral_mode == SPECTRAL_CHANSCAN)
sc                 97 drivers/net/wireless/ath/ath9k/channel.c 			ath9k_cmn_spectral_scan_trigger(common, &sc->spec_priv);
sc                103 drivers/net/wireless/ath/ath9k/channel.c void ath_chanctx_init(struct ath_softc *sc)
sc                106 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                117 drivers/net/wireless/ath/ath9k/channel.c 		ctx = &sc->chanctx[i];
sc                130 drivers/net/wireless/ath/ath9k/channel.c void ath_chanctx_set_channel(struct ath_softc *sc, struct ath_chanctx *ctx,
sc                133 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                136 drivers/net/wireless/ath/ath9k/channel.c 	spin_lock_bh(&sc->chan_lock);
sc                139 drivers/net/wireless/ath/ath9k/channel.c 	cur_chan = sc->cur_chan == ctx;
sc                140 drivers/net/wireless/ath/ath9k/channel.c 	spin_unlock_bh(&sc->chan_lock);
sc                148 drivers/net/wireless/ath/ath9k/channel.c 	ath_set_channel(sc);
sc                157 drivers/net/wireless/ath/ath9k/channel.c struct ath_chanctx* ath_is_go_chanctx_present(struct ath_softc *sc)
sc                163 drivers/net/wireless/ath/ath9k/channel.c 	spin_lock_bh(&sc->chan_lock);
sc                165 drivers/net/wireless/ath/ath9k/channel.c 	ath_for_each_chanctx(sc, ctx) {
sc                173 drivers/net/wireless/ath/ath9k/channel.c 				spin_unlock_bh(&sc->chan_lock);
sc                179 drivers/net/wireless/ath/ath9k/channel.c 	spin_unlock_bh(&sc->chan_lock);
sc                233 drivers/net/wireless/ath/ath9k/channel.c static u32 chanctx_event_delta(struct ath_softc *sc)
sc                239 drivers/net/wireless/ath/ath9k/channel.c 	old = &sc->last_event_time;
sc                242 drivers/net/wireless/ath/ath9k/channel.c 	sc->last_event_time = ts;
sc                247 drivers/net/wireless/ath/ath9k/channel.c void ath_chanctx_check_active(struct ath_softc *sc, struct ath_chanctx *ctx)
sc                249 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                258 drivers/net/wireless/ath/ath9k/channel.c 	if (ctx == &sc->offchannel.chan) {
sc                259 drivers/net/wireless/ath/ath9k/channel.c 		spin_lock_bh(&sc->chan_lock);
sc                261 drivers/net/wireless/ath/ath9k/channel.c 		if (likely(sc->sched.channel_switch_time))
sc                263 drivers/net/wireless/ath/ath9k/channel.c 				usecs_to_jiffies(sc->sched.channel_switch_time);
sc                268 drivers/net/wireless/ath/ath9k/channel.c 		spin_unlock_bh(&sc->chan_lock);
sc                296 drivers/net/wireless/ath/ath9k/channel.c 	ath_for_each_chanctx(sc, ctx) {
sc                302 drivers/net/wireless/ath/ath9k/channel.c 	spin_lock_bh(&sc->chan_lock);
sc                307 drivers/net/wireless/ath/ath9k/channel.c 		spin_unlock_bh(&sc->chan_lock);
sc                311 drivers/net/wireless/ath/ath9k/channel.c 	ictx->flush_timeout = usecs_to_jiffies(sc->sched.channel_switch_time);
sc                314 drivers/net/wireless/ath/ath9k/channel.c 		spin_unlock_bh(&sc->chan_lock);
sc                318 drivers/net/wireless/ath/ath9k/channel.c 	spin_unlock_bh(&sc->chan_lock);
sc                321 drivers/net/wireless/ath/ath9k/channel.c 		ath_chanctx_event(sc, NULL,
sc                327 drivers/net/wireless/ath/ath9k/channel.c ath_chanctx_get_next(struct ath_softc *sc, struct ath_chanctx *ctx)
sc                329 drivers/net/wireless/ath/ath9k/channel.c 	int idx = ctx - &sc->chanctx[0];
sc                331 drivers/net/wireless/ath/ath9k/channel.c 	return &sc->chanctx[!idx];
sc                334 drivers/net/wireless/ath/ath9k/channel.c static void ath_chanctx_adjust_tbtt_delta(struct ath_softc *sc)
sc                341 drivers/net/wireless/ath/ath9k/channel.c 	beacon_int = TU_TO_USEC(sc->cur_chan->beacon.beacon_interval);
sc                343 drivers/net/wireless/ath/ath9k/channel.c 	cur = sc->cur_chan;
sc                344 drivers/net/wireless/ath/ath9k/channel.c 	prev = ath_chanctx_get_next(sc, cur);
sc                373 drivers/net/wireless/ath/ath9k/channel.c static void ath_chanctx_setup_timer(struct ath_softc *sc, u32 tsf_time)
sc                375 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                376 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_hw *ah = sc->sc_ah;
sc                379 drivers/net/wireless/ath/ath9k/channel.c 	ath9k_hw_gen_timer_start(ah, sc->p2p_ps_timer, tsf_time, 1000000);
sc                382 drivers/net/wireless/ath/ath9k/channel.c 	mod_timer(&sc->sched.timer, jiffies + timeout);
sc                389 drivers/net/wireless/ath/ath9k/channel.c static void ath_chanctx_handle_bmiss(struct ath_softc *sc,
sc                399 drivers/net/wireless/ath/ath9k/channel.c 	if (ctx->active && sc->sched.extend_absence) {
sc                401 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.extend_absence = false;
sc                408 drivers/net/wireless/ath/ath9k/channel.c 	if (ctx->active && sc->sched.beacon_miss >= 2) {
sc                410 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.extend_absence = true;
sc                414 drivers/net/wireless/ath/ath9k/channel.c static void ath_chanctx_offchannel_noa(struct ath_softc *sc,
sc                419 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                423 drivers/net/wireless/ath/ath9k/channel.c 	avp->offchannel_duration = sc->sched.offchannel_duration;
sc                440 drivers/net/wireless/ath/ath9k/channel.c static void ath_chanctx_set_periodic_noa(struct ath_softc *sc,
sc                446 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                451 drivers/net/wireless/ath/ath9k/channel.c 	if (sc->sched.extend_absence)
sc                453 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.channel_switch_time;
sc                457 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.channel_switch_time;
sc                460 drivers/net/wireless/ath/ath9k/channel.c 	    sc->sched.extend_absence)
sc                473 drivers/net/wireless/ath/ath9k/channel.c static void ath_chanctx_set_oneshot_noa(struct ath_softc *sc,
sc                478 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                484 drivers/net/wireless/ath/ath9k/channel.c 	avp->noa_duration = duration + sc->sched.channel_switch_time;
sc                494 drivers/net/wireless/ath/ath9k/channel.c void ath_chanctx_event(struct ath_softc *sc, struct ieee80211_vif *vif,
sc                497 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_hw *ah = sc->sc_ah;
sc                508 drivers/net/wireless/ath/ath9k/channel.c 	spin_lock_bh(&sc->chan_lock);
sc                511 drivers/net/wireless/ath/ath9k/channel.c 		sc->cur_chan->chandef.center_freq1,
sc                513 drivers/net/wireless/ath/ath9k/channel.c 		chanctx_state_string(sc->sched.state),
sc                514 drivers/net/wireless/ath/ath9k/channel.c 		chanctx_event_delta(sc));
sc                529 drivers/net/wireless/ath/ath9k/channel.c 		if (avp->chanctx != sc->cur_chan) {
sc                535 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->sched.offchannel_pending && !sc->sched.wait_switch) {
sc                536 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.offchannel_pending = false;
sc                537 drivers/net/wireless/ath/ath9k/channel.c 			sc->next_chan = &sc->offchannel.chan;
sc                538 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.state = ATH_CHANCTX_STATE_WAIT_FOR_BEACON;
sc                543 drivers/net/wireless/ath/ath9k/channel.c 		ctx = ath_chanctx_get_next(sc, sc->cur_chan);
sc                544 drivers/net/wireless/ath/ath9k/channel.c 		if (ctx->active && sc->sched.state == ATH_CHANCTX_STATE_IDLE) {
sc                545 drivers/net/wireless/ath/ath9k/channel.c 			sc->next_chan = ctx;
sc                546 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.state = ATH_CHANCTX_STATE_WAIT_FOR_BEACON;
sc                552 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->sched.state == ATH_CHANCTX_STATE_WAIT_FOR_TIMER) {
sc                553 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.state = ATH_CHANCTX_STATE_WAIT_FOR_BEACON;
sc                558 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->sched.mgd_prepare_tx)
sc                559 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.state = ATH_CHANCTX_STATE_WAIT_FOR_BEACON;
sc                568 drivers/net/wireless/ath/ath9k/channel.c 		    sc->sched.state != ATH_CHANCTX_STATE_WAIT_FOR_BEACON) {
sc                576 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->sched.state != ATH_CHANCTX_STATE_WAIT_FOR_BEACON)
sc                581 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.beacon_pending = true;
sc                582 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.next_tbtt = REG_READ(ah, AR_NEXT_TBTT_TIMER);
sc                584 drivers/net/wireless/ath/ath9k/channel.c 		cur_conf = &sc->cur_chan->beacon;
sc                588 drivers/net/wireless/ath/ath9k/channel.c 		tsf_time = sc->sched.next_tbtt + beacon_int / 4;
sc                589 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.switch_start_time = tsf_time;
sc                590 drivers/net/wireless/ath/ath9k/channel.c 		sc->cur_chan->last_beacon = sc->sched.next_tbtt;
sc                597 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->next_chan == &sc->offchannel.chan) {
sc                598 drivers/net/wireless/ath/ath9k/channel.c 			ath_chanctx_offchannel_noa(sc, ctx, avp, tsf_time);
sc                602 drivers/net/wireless/ath/ath9k/channel.c 		ath_chanctx_handle_bmiss(sc, ctx, avp);
sc                610 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->sched.mgd_prepare_tx) {
sc                611 drivers/net/wireless/ath/ath9k/channel.c 			ath_chanctx_set_oneshot_noa(sc, avp, tsf_time,
sc                626 drivers/net/wireless/ath/ath9k/channel.c 		    (!avp->noa_duration || sc->sched.force_noa_update))
sc                627 drivers/net/wireless/ath/ath9k/channel.c 			ath_chanctx_set_periodic_noa(sc, avp, cur_conf,
sc                630 drivers/net/wireless/ath/ath9k/channel.c 		if (ctx->active && sc->sched.force_noa_update)
sc                631 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.force_noa_update = false;
sc                635 drivers/net/wireless/ath/ath9k/channel.c 		if (!sc->sched.beacon_pending) {
sc                641 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.beacon_pending = false;
sc                643 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->sched.mgd_prepare_tx) {
sc                644 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.mgd_prepare_tx = false;
sc                645 drivers/net/wireless/ath/ath9k/channel.c 			complete(&sc->go_beacon);
sc                651 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->sched.state != ATH_CHANCTX_STATE_WAIT_FOR_BEACON)
sc                657 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.state = ATH_CHANCTX_STATE_WAIT_FOR_TIMER;
sc                658 drivers/net/wireless/ath/ath9k/channel.c 		ath_chanctx_setup_timer(sc, sc->sched.switch_start_time);
sc                661 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->sched.state != ATH_CHANCTX_STATE_WAIT_FOR_TIMER)
sc                664 drivers/net/wireless/ath/ath9k/channel.c 		if (!sc->cur_chan->switch_after_beacon &&
sc                665 drivers/net/wireless/ath/ath9k/channel.c 		    sc->sched.beacon_pending)
sc                666 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.beacon_miss++;
sc                671 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.state = ATH_CHANCTX_STATE_SWITCH;
sc                672 drivers/net/wireless/ath/ath9k/channel.c 		ieee80211_queue_work(sc->hw, &sc->chanctx_work);
sc                676 drivers/net/wireless/ath/ath9k/channel.c 		    sc->cur_chan == &sc->offchannel.chan)
sc                679 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.beacon_pending = false;
sc                680 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.beacon_miss = 0;
sc                682 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->sched.state == ATH_CHANCTX_STATE_FORCE_ACTIVE ||
sc                683 drivers/net/wireless/ath/ath9k/channel.c 		    !sc->sched.beacon_adjust ||
sc                684 drivers/net/wireless/ath/ath9k/channel.c 		    !sc->cur_chan->tsf_val)
sc                687 drivers/net/wireless/ath/ath9k/channel.c 		ath_chanctx_adjust_tbtt_delta(sc);
sc                692 drivers/net/wireless/ath/ath9k/channel.c 		tsf_time = sc->sched.switch_start_time;
sc                693 drivers/net/wireless/ath/ath9k/channel.c 		tsf_time -= (u32) sc->cur_chan->tsf_val +
sc                694 drivers/net/wireless/ath/ath9k/channel.c 			ath9k_hw_get_tsf_offset(&sc->cur_chan->tsf_ts, NULL);
sc                697 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.beacon_adjust = false;
sc                698 drivers/net/wireless/ath/ath9k/channel.c 		ath_chanctx_setup_timer(sc, tsf_time);
sc                701 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->sched.state != ATH_CHANCTX_STATE_FORCE_ACTIVE ||
sc                702 drivers/net/wireless/ath/ath9k/channel.c 		    avp->chanctx != sc->cur_chan)
sc                708 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.state = ATH_CHANCTX_STATE_IDLE;
sc                712 drivers/net/wireless/ath/ath9k/channel.c 		    sc->sched.state == ATH_CHANCTX_STATE_FORCE_ACTIVE ||
sc                713 drivers/net/wireless/ath/ath9k/channel.c 		    sc->cur_chan->switch_after_beacon ||
sc                714 drivers/net/wireless/ath/ath9k/channel.c 		    sc->cur_chan == &sc->offchannel.chan)
sc                720 drivers/net/wireless/ath/ath9k/channel.c 		sc->next_chan = ath_chanctx_get_next(sc, sc->cur_chan);
sc                721 drivers/net/wireless/ath/ath9k/channel.c 		cur_conf = &sc->cur_chan->beacon;
sc                726 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.state = ATH_CHANCTX_STATE_WAIT_FOR_TIMER;
sc                727 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.wait_switch = false;
sc                731 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->sched.extend_absence) {
sc                732 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.beacon_miss = 0;
sc                736 drivers/net/wireless/ath/ath9k/channel.c 		tsf_time -= sc->sched.channel_switch_time;
sc                737 drivers/net/wireless/ath/ath9k/channel.c 		tsf_time += ath9k_hw_gettsf32(sc->sc_ah);
sc                738 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.switch_start_time = tsf_time;
sc                740 drivers/net/wireless/ath/ath9k/channel.c 		ath_chanctx_setup_timer(sc, tsf_time);
sc                741 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.beacon_pending = true;
sc                742 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.beacon_adjust = true;
sc                745 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->cur_chan == &sc->offchannel.chan ||
sc                746 drivers/net/wireless/ath/ath9k/channel.c 		    sc->cur_chan->switch_after_beacon)
sc                749 drivers/net/wireless/ath/ath9k/channel.c 		sc->next_chan = ath_chanctx_get_next(sc, sc->cur_chan);
sc                750 drivers/net/wireless/ath/ath9k/channel.c 		ieee80211_queue_work(sc->hw, &sc->chanctx_work);
sc                753 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->cur_chan->assigned) {
sc                754 drivers/net/wireless/ath/ath9k/channel.c 			if (sc->next_chan && !sc->next_chan->assigned &&
sc                755 drivers/net/wireless/ath/ath9k/channel.c 			    sc->next_chan != &sc->offchannel.chan)
sc                756 drivers/net/wireless/ath/ath9k/channel.c 				sc->sched.state = ATH_CHANCTX_STATE_IDLE;
sc                760 drivers/net/wireless/ath/ath9k/channel.c 		ctx = ath_chanctx_get_next(sc, sc->cur_chan);
sc                761 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.state = ATH_CHANCTX_STATE_IDLE;
sc                765 drivers/net/wireless/ath/ath9k/channel.c 		sc->next_chan = ctx;
sc                766 drivers/net/wireless/ath/ath9k/channel.c 		ieee80211_queue_work(sc->hw, &sc->chanctx_work);
sc                774 drivers/net/wireless/ath/ath9k/channel.c 	spin_unlock_bh(&sc->chan_lock);
sc                777 drivers/net/wireless/ath/ath9k/channel.c void ath_chanctx_beacon_sent_ev(struct ath_softc *sc,
sc                780 drivers/net/wireless/ath/ath9k/channel.c 	if (sc->sched.beacon_pending)
sc                781 drivers/net/wireless/ath/ath9k/channel.c 		ath_chanctx_event(sc, NULL, ev);
sc                784 drivers/net/wireless/ath/ath9k/channel.c void ath_chanctx_beacon_recv_ev(struct ath_softc *sc,
sc                787 drivers/net/wireless/ath/ath9k/channel.c 	ath_chanctx_event(sc, NULL, ev);
sc                790 drivers/net/wireless/ath/ath9k/channel.c static int ath_scan_channel_duration(struct ath_softc *sc,
sc                793 drivers/net/wireless/ath/ath9k/channel.c 	struct cfg80211_scan_request *req = sc->offchannel.scan_req;
sc                801 drivers/net/wireless/ath/ath9k/channel.c static void ath_chanctx_switch(struct ath_softc *sc, struct ath_chanctx *ctx,
sc                804 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                806 drivers/net/wireless/ath/ath9k/channel.c 	spin_lock_bh(&sc->chan_lock);
sc                809 drivers/net/wireless/ath/ath9k/channel.c 	    (sc->cur_chan != ctx) && (ctx == &sc->offchannel.chan)) {
sc                813 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.offchannel_pending = true;
sc                814 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.wait_switch = true;
sc                815 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.offchannel_duration =
sc                816 drivers/net/wireless/ath/ath9k/channel.c 			jiffies_to_usecs(sc->offchannel.duration) +
sc                817 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.channel_switch_time;
sc                819 drivers/net/wireless/ath/ath9k/channel.c 		spin_unlock_bh(&sc->chan_lock);
sc                825 drivers/net/wireless/ath/ath9k/channel.c 	sc->next_chan = ctx;
sc                832 drivers/net/wireless/ath/ath9k/channel.c 	if (sc->next_chan == &sc->offchannel.chan) {
sc                833 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.offchannel_duration =
sc                834 drivers/net/wireless/ath/ath9k/channel.c 			jiffies_to_usecs(sc->offchannel.duration) +
sc                835 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.channel_switch_time;
sc                841 drivers/net/wireless/ath/ath9k/channel.c 				sc->sched.offchannel_duration);
sc                844 drivers/net/wireless/ath/ath9k/channel.c 	spin_unlock_bh(&sc->chan_lock);
sc                845 drivers/net/wireless/ath/ath9k/channel.c 	ieee80211_queue_work(sc->hw, &sc->chanctx_work);
sc                848 drivers/net/wireless/ath/ath9k/channel.c static void ath_chanctx_offchan_switch(struct ath_softc *sc,
sc                851 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                858 drivers/net/wireless/ath/ath9k/channel.c 	ath_chanctx_switch(sc, &sc->offchannel.chan, &chandef);
sc                861 drivers/net/wireless/ath/ath9k/channel.c static struct ath_chanctx *ath_chanctx_get_oper_chan(struct ath_softc *sc,
sc                866 drivers/net/wireless/ath/ath9k/channel.c 	ath_for_each_chanctx(sc, ctx) {
sc                876 drivers/net/wireless/ath/ath9k/channel.c 	return &sc->chanctx[0];
sc                880 drivers/net/wireless/ath/ath9k/channel.c ath_scan_next_channel(struct ath_softc *sc)
sc                882 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                883 drivers/net/wireless/ath/ath9k/channel.c 	struct cfg80211_scan_request *req = sc->offchannel.scan_req;
sc                886 drivers/net/wireless/ath/ath9k/channel.c 	if (sc->offchannel.scan_idx >= req->n_channels) {
sc                890 drivers/net/wireless/ath/ath9k/channel.c 			sc->offchannel.scan_idx,
sc                893 drivers/net/wireless/ath/ath9k/channel.c 		sc->offchannel.state = ATH_OFFCHANNEL_IDLE;
sc                894 drivers/net/wireless/ath/ath9k/channel.c 		ath_chanctx_switch(sc, ath_chanctx_get_oper_chan(sc, false),
sc                901 drivers/net/wireless/ath/ath9k/channel.c 		sc->offchannel.scan_idx);
sc                903 drivers/net/wireless/ath/ath9k/channel.c 	chan = req->channels[sc->offchannel.scan_idx++];
sc                904 drivers/net/wireless/ath/ath9k/channel.c 	sc->offchannel.duration = ath_scan_channel_duration(sc, chan);
sc                905 drivers/net/wireless/ath/ath9k/channel.c 	sc->offchannel.state = ATH_OFFCHANNEL_PROBE_SEND;
sc                907 drivers/net/wireless/ath/ath9k/channel.c 	ath_chanctx_offchan_switch(sc, chan);
sc                910 drivers/net/wireless/ath/ath9k/channel.c void ath_offchannel_next(struct ath_softc *sc)
sc                914 drivers/net/wireless/ath/ath9k/channel.c 	if (sc->offchannel.scan_req) {
sc                915 drivers/net/wireless/ath/ath9k/channel.c 		vif = sc->offchannel.scan_vif;
sc                916 drivers/net/wireless/ath/ath9k/channel.c 		sc->offchannel.chan.txpower = vif->bss_conf.txpower;
sc                917 drivers/net/wireless/ath/ath9k/channel.c 		ath_scan_next_channel(sc);
sc                918 drivers/net/wireless/ath/ath9k/channel.c 	} else if (sc->offchannel.roc_vif) {
sc                919 drivers/net/wireless/ath/ath9k/channel.c 		vif = sc->offchannel.roc_vif;
sc                920 drivers/net/wireless/ath/ath9k/channel.c 		sc->offchannel.chan.txpower = vif->bss_conf.txpower;
sc                921 drivers/net/wireless/ath/ath9k/channel.c 		sc->offchannel.duration =
sc                922 drivers/net/wireless/ath/ath9k/channel.c 			msecs_to_jiffies(sc->offchannel.roc_duration);
sc                923 drivers/net/wireless/ath/ath9k/channel.c 		sc->offchannel.state = ATH_OFFCHANNEL_ROC_START;
sc                924 drivers/net/wireless/ath/ath9k/channel.c 		ath_chanctx_offchan_switch(sc, sc->offchannel.roc_chan);
sc                926 drivers/net/wireless/ath/ath9k/channel.c 		spin_lock_bh(&sc->chan_lock);
sc                927 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.offchannel_pending = false;
sc                928 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.wait_switch = false;
sc                929 drivers/net/wireless/ath/ath9k/channel.c 		spin_unlock_bh(&sc->chan_lock);
sc                931 drivers/net/wireless/ath/ath9k/channel.c 		ath_chanctx_switch(sc, ath_chanctx_get_oper_chan(sc, false),
sc                933 drivers/net/wireless/ath/ath9k/channel.c 		sc->offchannel.state = ATH_OFFCHANNEL_IDLE;
sc                934 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->ps_idle)
sc                935 drivers/net/wireless/ath/ath9k/channel.c 			ath_cancel_work(sc);
sc                939 drivers/net/wireless/ath/ath9k/channel.c void ath_roc_complete(struct ath_softc *sc, enum ath_roc_complete_reason reason)
sc                941 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                943 drivers/net/wireless/ath/ath9k/channel.c 	sc->offchannel.roc_vif = NULL;
sc                944 drivers/net/wireless/ath/ath9k/channel.c 	sc->offchannel.roc_chan = NULL;
sc                949 drivers/net/wireless/ath/ath9k/channel.c 		ieee80211_remain_on_channel_expired(sc->hw);
sc                953 drivers/net/wireless/ath/ath9k/channel.c 		ieee80211_remain_on_channel_expired(sc->hw);
sc                960 drivers/net/wireless/ath/ath9k/channel.c 	ath_offchannel_next(sc);
sc                961 drivers/net/wireless/ath/ath9k/channel.c 	ath9k_ps_restore(sc);
sc                964 drivers/net/wireless/ath/ath9k/channel.c void ath_scan_complete(struct ath_softc *sc, bool abort)
sc                966 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                976 drivers/net/wireless/ath/ath9k/channel.c 	sc->offchannel.scan_req = NULL;
sc                977 drivers/net/wireless/ath/ath9k/channel.c 	sc->offchannel.scan_vif = NULL;
sc                978 drivers/net/wireless/ath/ath9k/channel.c 	sc->offchannel.state = ATH_OFFCHANNEL_IDLE;
sc                979 drivers/net/wireless/ath/ath9k/channel.c 	ieee80211_scan_completed(sc->hw, &info);
sc                981 drivers/net/wireless/ath/ath9k/channel.c 	spin_lock_bh(&sc->chan_lock);
sc                983 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.force_noa_update = true;
sc                984 drivers/net/wireless/ath/ath9k/channel.c 	spin_unlock_bh(&sc->chan_lock);
sc                985 drivers/net/wireless/ath/ath9k/channel.c 	ath_offchannel_next(sc);
sc                986 drivers/net/wireless/ath/ath9k/channel.c 	ath9k_ps_restore(sc);
sc                989 drivers/net/wireless/ath/ath9k/channel.c static void ath_scan_send_probe(struct ath_softc *sc,
sc                992 drivers/net/wireless/ath/ath9k/channel.c 	struct cfg80211_scan_request *req = sc->offchannel.scan_req;
sc                993 drivers/net/wireless/ath/ath9k/channel.c 	struct ieee80211_vif *vif = sc->offchannel.scan_vif;
sc                997 drivers/net/wireless/ath/ath9k/channel.c 	int band = sc->offchannel.chan.chandef.chan->band;
sc                999 drivers/net/wireless/ath/ath9k/channel.c 	skb = ieee80211_probereq_get(sc->hw, vif->addr,
sc               1013 drivers/net/wireless/ath/ath9k/channel.c 	if (!ieee80211_tx_prepare_skb(sc->hw, vif, skb, band, NULL))
sc               1016 drivers/net/wireless/ath/ath9k/channel.c 	txctl.txq = sc->tx.txq_map[IEEE80211_AC_VO];
sc               1017 drivers/net/wireless/ath/ath9k/channel.c 	if (ath_tx_start(sc->hw, skb, &txctl))
sc               1023 drivers/net/wireless/ath/ath9k/channel.c 	ieee80211_free_txskb(sc->hw, skb);
sc               1026 drivers/net/wireless/ath/ath9k/channel.c static void ath_scan_channel_start(struct ath_softc *sc)
sc               1028 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1029 drivers/net/wireless/ath/ath9k/channel.c 	struct cfg80211_scan_request *req = sc->offchannel.scan_req;
sc               1032 drivers/net/wireless/ath/ath9k/channel.c 	if (!(sc->cur_chan->chandef.chan->flags & IEEE80211_CHAN_NO_IR) &&
sc               1035 drivers/net/wireless/ath/ath9k/channel.c 			ath_scan_send_probe(sc, &req->ssids[i]);
sc               1042 drivers/net/wireless/ath/ath9k/channel.c 	sc->offchannel.state = ATH_OFFCHANNEL_PROBE_WAIT;
sc               1043 drivers/net/wireless/ath/ath9k/channel.c 	mod_timer(&sc->offchannel.timer, jiffies + sc->offchannel.duration);
sc               1048 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_softc *sc = from_timer(sc, t, sched.timer);
sc               1049 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1054 drivers/net/wireless/ath/ath9k/channel.c 	ath_chanctx_event(sc, NULL, ATH_CHANCTX_EVENT_TSF_TIMER);
sc               1059 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_softc *sc = from_timer(sc, t, offchannel.timer);
sc               1061 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1064 drivers/net/wireless/ath/ath9k/channel.c 		__func__, offchannel_state_string(sc->offchannel.state));
sc               1066 drivers/net/wireless/ath/ath9k/channel.c 	switch (sc->offchannel.state) {
sc               1068 drivers/net/wireless/ath/ath9k/channel.c 		if (!sc->offchannel.scan_req)
sc               1072 drivers/net/wireless/ath/ath9k/channel.c 		ctx = ath_chanctx_get_oper_chan(sc, true);
sc               1078 drivers/net/wireless/ath/ath9k/channel.c 			sc->offchannel.state = ATH_OFFCHANNEL_SUSPEND;
sc               1079 drivers/net/wireless/ath/ath9k/channel.c 			ath_chanctx_switch(sc, ctx, NULL);
sc               1080 drivers/net/wireless/ath/ath9k/channel.c 			mod_timer(&sc->offchannel.timer, jiffies + HZ / 10);
sc               1085 drivers/net/wireless/ath/ath9k/channel.c 		if (!sc->offchannel.scan_req)
sc               1088 drivers/net/wireless/ath/ath9k/channel.c 		ath_scan_next_channel(sc);
sc               1092 drivers/net/wireless/ath/ath9k/channel.c 		sc->offchannel.state = ATH_OFFCHANNEL_ROC_DONE;
sc               1093 drivers/net/wireless/ath/ath9k/channel.c 		ath_roc_complete(sc, ATH_ROC_COMPLETE_EXPIRE);
sc               1101 drivers/net/wireless/ath/ath9k/channel.c ath_chanctx_send_vif_ps_frame(struct ath_softc *sc, struct ath_vif *avp,
sc               1109 drivers/net/wireless/ath/ath9k/channel.c 	int band = sc->cur_chan->chandef.chan->band;
sc               1116 drivers/net/wireless/ath/ath9k/channel.c 		skb = ieee80211_nullfunc_get(sc->hw, vif, false);
sc               1127 drivers/net/wireless/ath/ath9k/channel.c 		if (!ieee80211_tx_prepare_skb(sc->hw, vif, skb, band, &sta)) {
sc               1137 drivers/net/wireless/ath/ath9k/channel.c 	txctl.txq = sc->tx.txq_map[IEEE80211_AC_VO];
sc               1139 drivers/net/wireless/ath/ath9k/channel.c 	if (ath_tx_start(sc->hw, skb, &txctl)) {
sc               1140 drivers/net/wireless/ath/ath9k/channel.c 		ieee80211_free_txskb(sc->hw, skb);
sc               1148 drivers/net/wireless/ath/ath9k/channel.c ath_chanctx_send_ps_frame(struct ath_softc *sc, bool powersave)
sc               1154 drivers/net/wireless/ath/ath9k/channel.c 	list_for_each_entry(avp, &sc->cur_chan->vifs, list) {
sc               1155 drivers/net/wireless/ath/ath9k/channel.c 		if (ath_chanctx_send_vif_ps_frame(sc, avp, powersave))
sc               1163 drivers/net/wireless/ath/ath9k/channel.c static bool ath_chanctx_defer_switch(struct ath_softc *sc)
sc               1165 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1167 drivers/net/wireless/ath/ath9k/channel.c 	if (sc->cur_chan == &sc->offchannel.chan)
sc               1170 drivers/net/wireless/ath/ath9k/channel.c 	switch (sc->sched.state) {
sc               1174 drivers/net/wireless/ath/ath9k/channel.c 		if (!sc->cur_chan->switch_after_beacon)
sc               1180 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.state = ATH_CHANCTX_STATE_WAIT_FOR_BEACON;
sc               1189 drivers/net/wireless/ath/ath9k/channel.c static void ath_offchannel_channel_change(struct ath_softc *sc)
sc               1191 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1194 drivers/net/wireless/ath/ath9k/channel.c 		__func__, offchannel_state_string(sc->offchannel.state));
sc               1196 drivers/net/wireless/ath/ath9k/channel.c 	switch (sc->offchannel.state) {
sc               1198 drivers/net/wireless/ath/ath9k/channel.c 		if (!sc->offchannel.scan_req)
sc               1201 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->cur_chan->chandef.chan !=
sc               1202 drivers/net/wireless/ath/ath9k/channel.c 		    sc->offchannel.chan.chandef.chan)
sc               1205 drivers/net/wireless/ath/ath9k/channel.c 		ath_scan_channel_start(sc);
sc               1208 drivers/net/wireless/ath/ath9k/channel.c 		if (!sc->offchannel.scan_req)
sc               1211 drivers/net/wireless/ath/ath9k/channel.c 		ath_scan_complete(sc, false);
sc               1214 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->cur_chan != &sc->offchannel.chan)
sc               1217 drivers/net/wireless/ath/ath9k/channel.c 		sc->offchannel.state = ATH_OFFCHANNEL_ROC_WAIT;
sc               1218 drivers/net/wireless/ath/ath9k/channel.c 		mod_timer(&sc->offchannel.timer,
sc               1219 drivers/net/wireless/ath/ath9k/channel.c 			  jiffies + sc->offchannel.duration);
sc               1220 drivers/net/wireless/ath/ath9k/channel.c 		ieee80211_ready_on_channel(sc->hw);
sc               1229 drivers/net/wireless/ath/ath9k/channel.c void ath_chanctx_set_next(struct ath_softc *sc, bool force)
sc               1231 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1238 drivers/net/wireless/ath/ath9k/channel.c 	spin_lock_bh(&sc->chan_lock);
sc               1239 drivers/net/wireless/ath/ath9k/channel.c 	if (!sc->next_chan) {
sc               1240 drivers/net/wireless/ath/ath9k/channel.c 		spin_unlock_bh(&sc->chan_lock);
sc               1244 drivers/net/wireless/ath/ath9k/channel.c 	if (!force && ath_chanctx_defer_switch(sc)) {
sc               1245 drivers/net/wireless/ath/ath9k/channel.c 		spin_unlock_bh(&sc->chan_lock);
sc               1252 drivers/net/wireless/ath/ath9k/channel.c 		sc->cur_chan->chandef.center_freq1,
sc               1253 drivers/net/wireless/ath/ath9k/channel.c 		sc->next_chan->chandef.center_freq1);
sc               1255 drivers/net/wireless/ath/ath9k/channel.c 	if (sc->cur_chan != sc->next_chan) {
sc               1258 drivers/net/wireless/ath/ath9k/channel.c 			sc->cur_chan->chandef.center_freq1);
sc               1259 drivers/net/wireless/ath/ath9k/channel.c 		sc->cur_chan->stopped = true;
sc               1260 drivers/net/wireless/ath/ath9k/channel.c 		spin_unlock_bh(&sc->chan_lock);
sc               1262 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->next_chan == &sc->offchannel.chan) {
sc               1267 drivers/net/wireless/ath/ath9k/channel.c 		ath9k_chanctx_stop_queues(sc, sc->cur_chan);
sc               1270 drivers/net/wireless/ath/ath9k/channel.c 		__ath9k_flush(sc->hw, ~0, true, false, false);
sc               1272 drivers/net/wireless/ath/ath9k/channel.c 		if (ath_chanctx_send_ps_frame(sc, true))
sc               1273 drivers/net/wireless/ath/ath9k/channel.c 			__ath9k_flush(sc->hw, BIT(IEEE80211_AC_VO),
sc               1277 drivers/net/wireless/ath/ath9k/channel.c 		spin_lock_bh(&sc->chan_lock);
sc               1279 drivers/net/wireless/ath/ath9k/channel.c 		if (sc->cur_chan != &sc->offchannel.chan) {
sc               1280 drivers/net/wireless/ath/ath9k/channel.c 			ktime_get_raw_ts64(&sc->cur_chan->tsf_ts);
sc               1281 drivers/net/wireless/ath/ath9k/channel.c 			sc->cur_chan->tsf_val = ath9k_hw_gettsf64(sc->sc_ah);
sc               1284 drivers/net/wireless/ath/ath9k/channel.c 	old_ctx = sc->cur_chan;
sc               1285 drivers/net/wireless/ath/ath9k/channel.c 	sc->cur_chan = sc->next_chan;
sc               1286 drivers/net/wireless/ath/ath9k/channel.c 	sc->cur_chan->stopped = false;
sc               1287 drivers/net/wireless/ath/ath9k/channel.c 	sc->next_chan = NULL;
sc               1289 drivers/net/wireless/ath/ath9k/channel.c 	if (!sc->sched.offchannel_pending)
sc               1290 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.offchannel_duration = 0;
sc               1292 drivers/net/wireless/ath/ath9k/channel.c 	if (sc->sched.state != ATH_CHANCTX_STATE_FORCE_ACTIVE)
sc               1293 drivers/net/wireless/ath/ath9k/channel.c 		sc->sched.state = ATH_CHANCTX_STATE_IDLE;
sc               1295 drivers/net/wireless/ath/ath9k/channel.c 	spin_unlock_bh(&sc->chan_lock);
sc               1297 drivers/net/wireless/ath/ath9k/channel.c 	if (sc->sc_ah->chip_fullsleep ||
sc               1298 drivers/net/wireless/ath/ath9k/channel.c 	    memcmp(&sc->cur_chandef, &sc->cur_chan->chandef,
sc               1299 drivers/net/wireless/ath/ath9k/channel.c 		   sizeof(sc->cur_chandef))) {
sc               1302 drivers/net/wireless/ath/ath9k/channel.c 			__func__, sc->cur_chan->chandef.center_freq1);
sc               1303 drivers/net/wireless/ath/ath9k/channel.c 		ath_set_channel(sc);
sc               1305 drivers/net/wireless/ath/ath9k/channel.c 			sc->sched.channel_switch_time =
sc               1315 drivers/net/wireless/ath/ath9k/channel.c 		ath9k_chanctx_wake_queues(sc, old_ctx);
sc               1318 drivers/net/wireless/ath/ath9k/channel.c 		ath_chanctx_send_ps_frame(sc, false);
sc               1320 drivers/net/wireless/ath/ath9k/channel.c 	ath_offchannel_channel_change(sc);
sc               1321 drivers/net/wireless/ath/ath9k/channel.c 	ath_chanctx_event(sc, NULL, ATH_CHANCTX_EVENT_SWITCH);
sc               1326 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_softc *sc = container_of(work, struct ath_softc,
sc               1328 drivers/net/wireless/ath/ath9k/channel.c 	mutex_lock(&sc->mutex);
sc               1329 drivers/net/wireless/ath/ath9k/channel.c 	ath_chanctx_set_next(sc, false);
sc               1330 drivers/net/wireless/ath/ath9k/channel.c 	mutex_unlock(&sc->mutex);
sc               1333 drivers/net/wireless/ath/ath9k/channel.c void ath9k_offchannel_init(struct ath_softc *sc)
sc               1336 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1347 drivers/net/wireless/ath/ath9k/channel.c 	ctx = &sc->offchannel.chan;
sc               1358 drivers/net/wireless/ath/ath9k/channel.c 	sc->offchannel.chan.offchannel = true;
sc               1361 drivers/net/wireless/ath/ath9k/channel.c void ath9k_init_channel_context(struct ath_softc *sc)
sc               1363 drivers/net/wireless/ath/ath9k/channel.c 	INIT_WORK(&sc->chanctx_work, ath_chanctx_work);
sc               1365 drivers/net/wireless/ath/ath9k/channel.c 	timer_setup(&sc->offchannel.timer, ath_offchannel_timer, 0);
sc               1366 drivers/net/wireless/ath/ath9k/channel.c 	timer_setup(&sc->sched.timer, ath_chanctx_timer, 0);
sc               1368 drivers/net/wireless/ath/ath9k/channel.c 	init_completion(&sc->go_beacon);
sc               1371 drivers/net/wireless/ath/ath9k/channel.c void ath9k_deinit_channel_context(struct ath_softc *sc)
sc               1373 drivers/net/wireless/ath/ath9k/channel.c 	cancel_work_sync(&sc->chanctx_work);
sc               1385 drivers/net/wireless/ath/ath9k/channel.c void ath9k_chanctx_stop_queues(struct ath_softc *sc, struct ath_chanctx *ctx)
sc               1387 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_hw *ah = sc->sc_ah;
sc               1390 drivers/net/wireless/ath/ath9k/channel.c 	if (ctx == &sc->offchannel.chan) {
sc               1391 drivers/net/wireless/ath/ath9k/channel.c 		ieee80211_stop_queue(sc->hw,
sc               1392 drivers/net/wireless/ath/ath9k/channel.c 				     sc->hw->offchannel_tx_hw_queue);
sc               1395 drivers/net/wireless/ath/ath9k/channel.c 			ieee80211_stop_queue(sc->hw,
sc               1400 drivers/net/wireless/ath/ath9k/channel.c 		ieee80211_stop_queue(sc->hw, sc->hw->queues - 2);
sc               1404 drivers/net/wireless/ath/ath9k/channel.c void ath9k_chanctx_wake_queues(struct ath_softc *sc, struct ath_chanctx *ctx)
sc               1406 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_hw *ah = sc->sc_ah;
sc               1409 drivers/net/wireless/ath/ath9k/channel.c 	if (ctx == &sc->offchannel.chan) {
sc               1410 drivers/net/wireless/ath/ath9k/channel.c 		ieee80211_wake_queue(sc->hw,
sc               1411 drivers/net/wireless/ath/ath9k/channel.c 				     sc->hw->offchannel_tx_hw_queue);
sc               1414 drivers/net/wireless/ath/ath9k/channel.c 			ieee80211_wake_queue(sc->hw,
sc               1419 drivers/net/wireless/ath/ath9k/channel.c 		ieee80211_wake_queue(sc->hw, sc->hw->queues - 2);
sc               1426 drivers/net/wireless/ath/ath9k/channel.c static void ath9k_update_p2p_ps_timer(struct ath_softc *sc, struct ath_vif *avp)
sc               1428 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1429 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_hw *ah = sc->sc_ah;
sc               1435 drivers/net/wireless/ath/ath9k/channel.c 	ath9k_hw_gen_timer_stop(ah, sc->p2p_ps_timer);
sc               1437 drivers/net/wireless/ath/ath9k/channel.c 	tsf = ath9k_hw_gettsf32(sc->sc_ah);
sc               1452 drivers/net/wireless/ath/ath9k/channel.c 	ath9k_hw_gen_timer_start(ah, sc->p2p_ps_timer, target_tsf, 1000000);
sc               1455 drivers/net/wireless/ath/ath9k/channel.c static void ath9k_update_p2p_ps(struct ath_softc *sc, struct ieee80211_vif *vif)
sc               1460 drivers/net/wireless/ath/ath9k/channel.c 	if (!sc->p2p_ps_timer)
sc               1466 drivers/net/wireless/ath/ath9k/channel.c 	sc->p2p_ps_vif = avp;
sc               1468 drivers/net/wireless/ath/ath9k/channel.c 	if (sc->ps_flags & PS_BEACON_SYNC)
sc               1471 drivers/net/wireless/ath/ath9k/channel.c 	tsf = ath9k_hw_gettsf32(sc->sc_ah);
sc               1473 drivers/net/wireless/ath/ath9k/channel.c 	ath9k_update_p2p_ps_timer(sc, avp);
sc               1476 drivers/net/wireless/ath/ath9k/channel.c static u8 ath9k_get_ctwin(struct ath_softc *sc, struct ath_vif *avp)
sc               1478 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_beacon_config *cur_conf = &sc->cur_chan->beacon;
sc               1500 drivers/net/wireless/ath/ath9k/channel.c void ath9k_beacon_add_noa(struct ath_softc *sc, struct ath_vif *avp,
sc               1530 drivers/net/wireless/ath/ath9k/channel.c 	noa->oppps_ctwindow = ath9k_get_ctwin(sc, avp);
sc               1536 drivers/net/wireless/ath/ath9k/channel.c 			u32 interval = TU_TO_USEC(sc->cur_chan->beacon.beacon_interval);
sc               1557 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_softc *sc = priv;
sc               1558 drivers/net/wireless/ath/ath9k/channel.c 	struct ath_vif *avp = sc->p2p_ps_vif;
sc               1564 drivers/net/wireless/ath/ath9k/channel.c 	del_timer_sync(&sc->sched.timer);
sc               1565 drivers/net/wireless/ath/ath9k/channel.c 	ath9k_hw_gen_timer_stop(sc->sc_ah, sc->p2p_ps_timer);
sc               1566 drivers/net/wireless/ath/ath9k/channel.c 	ath_chanctx_event(sc, NULL, ATH_CHANCTX_EVENT_TSF_TIMER);
sc               1568 drivers/net/wireless/ath/ath9k/channel.c 	if (!avp || avp->chanctx != sc->cur_chan)
sc               1571 drivers/net/wireless/ath/ath9k/channel.c 	tsf = ath9k_hw_gettsf32(sc->sc_ah);
sc               1581 drivers/net/wireless/ath/ath9k/channel.c 	ath9k_update_p2p_ps_timer(sc, avp);
sc               1596 drivers/net/wireless/ath/ath9k/channel.c 		ath_tx_aggr_sleep(sta, sc, an);
sc               1598 drivers/net/wireless/ath/ath9k/channel.c 		ath_tx_aggr_wakeup(sc, an);
sc               1604 drivers/net/wireless/ath/ath9k/channel.c void ath9k_p2p_bss_info_changed(struct ath_softc *sc,
sc               1609 drivers/net/wireless/ath/ath9k/channel.c 	spin_lock_bh(&sc->sc_pcu_lock);
sc               1610 drivers/net/wireless/ath/ath9k/channel.c 	spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc               1611 drivers/net/wireless/ath/ath9k/channel.c 	ath9k_update_p2p_ps(sc, vif);
sc               1612 drivers/net/wireless/ath/ath9k/channel.c 	spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc               1613 drivers/net/wireless/ath/ath9k/channel.c 	spin_unlock_bh(&sc->sc_pcu_lock);
sc               1616 drivers/net/wireless/ath/ath9k/channel.c void ath9k_p2p_beacon_sync(struct ath_softc *sc)
sc               1618 drivers/net/wireless/ath/ath9k/channel.c 	if (sc->p2p_ps_vif)
sc               1619 drivers/net/wireless/ath/ath9k/channel.c 		ath9k_update_p2p_ps(sc, sc->p2p_ps_vif->vif);
sc               1622 drivers/net/wireless/ath/ath9k/channel.c void ath9k_p2p_remove_vif(struct ath_softc *sc,
sc               1627 drivers/net/wireless/ath/ath9k/channel.c 	spin_lock_bh(&sc->sc_pcu_lock);
sc               1628 drivers/net/wireless/ath/ath9k/channel.c 	if (avp == sc->p2p_ps_vif) {
sc               1629 drivers/net/wireless/ath/ath9k/channel.c 		sc->p2p_ps_vif = NULL;
sc               1630 drivers/net/wireless/ath/ath9k/channel.c 		ath9k_update_p2p_ps_timer(sc, NULL);
sc               1632 drivers/net/wireless/ath/ath9k/channel.c 	spin_unlock_bh(&sc->sc_pcu_lock);
sc               1635 drivers/net/wireless/ath/ath9k/channel.c int ath9k_init_p2p(struct ath_softc *sc)
sc               1637 drivers/net/wireless/ath/ath9k/channel.c 	sc->p2p_ps_timer = ath_gen_timer_alloc(sc->sc_ah, ath9k_p2p_ps_timer,
sc               1638 drivers/net/wireless/ath/ath9k/channel.c 					       NULL, sc, AR_FIRST_NDP_TIMER);
sc               1639 drivers/net/wireless/ath/ath9k/channel.c 	if (!sc->p2p_ps_timer)
sc               1645 drivers/net/wireless/ath/ath9k/channel.c void ath9k_deinit_p2p(struct ath_softc *sc)
sc               1647 drivers/net/wireless/ath/ath9k/channel.c 	if (sc->p2p_ps_timer)
sc               1648 drivers/net/wireless/ath/ath9k/channel.c 		ath_gen_timer_free(sc->sc_ah, sc->p2p_ps_timer);
sc                474 drivers/net/wireless/ath/ath9k/common-spectral.c 	struct ath_softc *sc = (struct ath_softc *)common->priv;
sc                627 drivers/net/wireless/ath/ath9k/common-spectral.c 					RX_STAT_INC(sc, rx_spectral_sample_good);
sc                629 drivers/net/wireless/ath/ath9k/common-spectral.c 					RX_STAT_INC(sc, rx_spectral_sample_err);
sc                645 drivers/net/wireless/ath/ath9k/common-spectral.c 					RX_STAT_INC(sc, rx_spectral_sample_good);
sc                647 drivers/net/wireless/ath/ath9k/common-spectral.c 					RX_STAT_INC(sc, rx_spectral_sample_err);
sc                 29 drivers/net/wireless/ath/ath9k/debug.c void ath9k_debug_sync_cause(struct ath_softc *sc, u32 sync_cause)
sc                 32 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.sync_cause_all++;
sc                 34 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.sync_rtc_irq++;
sc                 36 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.sync_mac_irq++;
sc                 38 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.eeprom_illegal_access++;
sc                 40 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.apb_timeout++;
sc                 42 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.pci_mode_conflict++;
sc                 44 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.host1_fatal++;
sc                 46 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.host1_perr++;
sc                 48 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.trcv_fifo_perr++;
sc                 50 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.radm_cpl_ep++;
sc                 52 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.radm_cpl_dllp_abort++;
sc                 54 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.radm_cpl_tlp_abort++;
sc                 56 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.radm_cpl_ecrc_err++;
sc                 58 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.radm_cpl_timeout++;
sc                 60 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.local_timeout++;
sc                 62 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.pm_access++;
sc                 64 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.mac_awake++;
sc                 66 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.mac_asleep++;
sc                 68 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.mac_sleep_access++;
sc                 89 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc                 90 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                101 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc                102 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                135 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc                136 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                137 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_hw *ah = sc->sc_ah;
sc                191 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc                192 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                212 drivers/net/wireless/ath/ath9k/debug.c 		ath_stop_ani(sc);
sc                214 drivers/net/wireless/ath/ath9k/debug.c 		ath_check_ani(sc);
sc                234 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc                235 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                247 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc                248 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                249 drivers/net/wireless/ath/ath9k/debug.c 	struct ath9k_hw_capabilities *pCap = &sc->sc_ah->caps;
sc                266 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_wakeup(sc);
sc                267 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_hw_set_bt_ant_diversity(sc->sc_ah, common->bt_ant_diversity);
sc                270 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_restore(sc);
sc                285 drivers/net/wireless/ath/ath9k/debug.c void ath9k_debug_stat_ant(struct ath_softc *sc,
sc                289 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_antenna_stats *as_main = &sc->debug.stats.ant_stats[ANT_MAIN];
sc                290 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_antenna_stats *as_alt = &sc->debug.stats.ant_stats[ANT_ALT];
sc                303 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc                304 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_hw *ah = sc->sc_ah;
sc                306 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_antenna_stats *as_main = &sc->debug.stats.ant_stats[ANT_MAIN];
sc                307 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_antenna_stats *as_alt = &sc->debug.stats.ant_stats[ANT_ALT];
sc                327 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_wakeup(sc);
sc                337 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_restore(sc);
sc                407 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = hw->priv;
sc                408 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_hw *ah = sc->sc_ah;
sc                413 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_wakeup(sc);
sc                469 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_restore(sc);
sc                474 drivers/net/wireless/ath/ath9k/debug.c void ath_debug_stat_interrupt(struct ath_softc *sc, enum ath9k_int status)
sc                477 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.total++;
sc                478 drivers/net/wireless/ath/ath9k/debug.c 	if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) {
sc                480 drivers/net/wireless/ath/ath9k/debug.c 			sc->debug.stats.istats.rxlp++;
sc                482 drivers/net/wireless/ath/ath9k/debug.c 			sc->debug.stats.istats.rxhp++;
sc                484 drivers/net/wireless/ath/ath9k/debug.c 			sc->debug.stats.istats.bb_watchdog++;
sc                487 drivers/net/wireless/ath/ath9k/debug.c 			sc->debug.stats.istats.rxok++;
sc                490 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.rxeol++;
sc                492 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.rxorn++;
sc                494 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.txok++;
sc                496 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.txurn++;
sc                498 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.rxphyerr++;
sc                500 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.rx_keycache_miss++;
sc                502 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.swba++;
sc                504 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.bmiss++;
sc                506 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.bnr++;
sc                508 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.cst++;
sc                510 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.gtt++;
sc                512 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.tim++;
sc                514 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.cabend++;
sc                516 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.dtimsync++;
sc                518 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.dtim++;
sc                520 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.tsfoor++;
sc                522 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.mci++;
sc                524 drivers/net/wireless/ath/ath9k/debug.c 		sc->debug.stats.istats.gen_timer++;
sc                530 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = hw->priv;
sc                535 drivers/net/wireless/ath/ath9k/debug.c 			   sc->debug.stats.istats.s);		\
sc                538 drivers/net/wireless/ath/ath9k/debug.c 	if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) {
sc                594 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = hw->priv;
sc                623 drivers/net/wireless/ath/ath9k/debug.c static void print_queue(struct ath_softc *sc, struct ath_txq *txq,
sc                626 drivers/net/wireless/ath/ath9k/debug.c 	ath_txq_lock(sc, txq);
sc                633 drivers/net/wireless/ath/ath9k/debug.c 	ath_txq_unlock(sc, txq);
sc                639 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = hw->priv;
sc                647 drivers/net/wireless/ath/ath9k/debug.c 		txq = sc->tx.txq_map[i];
sc                649 drivers/net/wireless/ath/ath9k/debug.c 		print_queue(sc, txq, file);
sc                653 drivers/net/wireless/ath/ath9k/debug.c 	print_queue(sc, sc->beacon.cabq, file);
sc                661 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = hw->priv;
sc                662 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                671 drivers/net/wireless/ath/ath9k/debug.c 		   ath_opmode_to_string(sc->sc_ah->opmode));
sc                673 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_wakeup(sc);
sc                674 drivers/net/wireless/ath/ath9k/debug.c 	rxfilter = ath9k_hw_getrxfilter(sc->sc_ah);
sc                675 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_restore(sc);
sc                710 drivers/net/wireless/ath/ath9k/debug.c 	reg = sc->sc_ah->imask;
sc                732 drivers/net/wireless/ath/ath9k/debug.c 	ath_for_each_chanctx(sc, ctx) {
sc                735 drivers/net/wireless/ath/ath9k/debug.c 		ath9k_calculate_iter_data(sc, ctx, &iter_data);
sc                743 drivers/net/wireless/ath/ath9k/debug.c 			   iter_data.nadhocs, iter_data.nocbs, sc->cur_chan->nvifs,
sc                744 drivers/net/wireless/ath/ath9k/debug.c 			   sc->nbcnvifs);
sc                753 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = hw->priv;
sc                776 drivers/net/wireless/ath/ath9k/debug.c 			   sc->debug.stats.reset[i]);
sc                782 drivers/net/wireless/ath/ath9k/debug.c void ath_debug_stat_tx(struct ath_softc *sc, struct ath_buf *bf,
sc                788 drivers/net/wireless/ath/ath9k/debug.c 	TX_STAT_INC(sc, qnum, tx_pkts_all);
sc                789 drivers/net/wireless/ath/ath9k/debug.c 	sc->debug.stats.txstats[qnum].tx_bytes_all += bf->bf_mpdu->len;
sc                793 drivers/net/wireless/ath/ath9k/debug.c 			TX_STAT_INC(sc, qnum, a_xretries);
sc                795 drivers/net/wireless/ath/ath9k/debug.c 			TX_STAT_INC(sc, qnum, a_completed);
sc                798 drivers/net/wireless/ath/ath9k/debug.c 			TX_STAT_INC(sc, qnum, xretries);
sc                800 drivers/net/wireless/ath/ath9k/debug.c 			TX_STAT_INC(sc, qnum, completed);
sc                804 drivers/net/wireless/ath/ath9k/debug.c 		TX_STAT_INC(sc, qnum, txerr_filtered);
sc                806 drivers/net/wireless/ath/ath9k/debug.c 		TX_STAT_INC(sc, qnum, fifo_underrun);
sc                808 drivers/net/wireless/ath/ath9k/debug.c 		TX_STAT_INC(sc, qnum, xtxop);
sc                810 drivers/net/wireless/ath/ath9k/debug.c 		TX_STAT_INC(sc, qnum, timer_exp);
sc                812 drivers/net/wireless/ath/ath9k/debug.c 		TX_STAT_INC(sc, qnum, desc_cfg_err);
sc                814 drivers/net/wireless/ath/ath9k/debug.c 		TX_STAT_INC(sc, qnum, data_underrun);
sc                816 drivers/net/wireless/ath/ath9k/debug.c 		TX_STAT_INC(sc, qnum, delim_underrun);
sc                819 drivers/net/wireless/ath/ath9k/debug.c void ath_debug_stat_rx(struct ath_softc *sc, struct ath_rx_status *rs)
sc                821 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_cmn_debug_stat_rx(&sc->debug.stats.rxstats, rs);
sc                827 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc                831 drivers/net/wireless/ath/ath9k/debug.c 	len = sprintf(buf, "0x%08x\n", sc->debug.regidx);
sc                838 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc                851 drivers/net/wireless/ath/ath9k/debug.c 	sc->debug.regidx = regidx;
sc                866 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc                867 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_hw *ah = sc->sc_ah;
sc                872 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_wakeup(sc);
sc                873 drivers/net/wireless/ath/ath9k/debug.c 	regval = REG_READ_D(ah, sc->debug.regidx);
sc                874 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_restore(sc);
sc                882 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc                883 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_hw *ah = sc->sc_ah;
sc                896 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_wakeup(sc);
sc                897 drivers/net/wireless/ath/ath9k/debug.c 	REG_WRITE_D(ah, sc->debug.regidx, regval);
sc                898 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_restore(sc);
sc                914 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = inode->i_private;
sc                931 drivers/net/wireless/ath/ath9k/debug.c 	max_reg_offset = AR_SREV_9300_20_OR_LATER(sc->sc_ah) ? 0x8800 : 0xb500;
sc                938 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_wakeup(sc);
sc                947 drivers/net/wireless/ath/ath9k/debug.c 			"0x%06x 0x%08x\n", i << 2, REG_READ(sc->sc_ah, i << 2));
sc                949 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_ps_restore(sc);
sc                967 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = hw->priv;
sc                968 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_hw *ah = sc->sc_ah;
sc                969 drivers/net/wireless/ath/ath9k/debug.c 	struct ath9k_nfcal_hist *h = sc->cur_chan->caldata.nfCalHist;
sc                997 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc               1006 drivers/net/wireless/ath/ath9k/debug.c 	if (!sc->sc_ah->common.btcoex_enabled) {
sc               1012 drivers/net/wireless/ath/ath9k/debug.c 	len = ath9k_dump_btcoex(sc, buf, size);
sc               1032 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc               1033 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_hw *ah = sc->sc_ah;
sc               1056 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc               1066 drivers/net/wireless/ath/ath9k/debug.c 			 sc->force_wow ? "ENABLED" : "DISABLED");
sc               1080 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc               1096 drivers/net/wireless/ath/ath9k/debug.c 	if (!sc->force_wow) {
sc               1097 drivers/net/wireless/ath/ath9k/debug.c 		sc->force_wow = true;
sc               1098 drivers/net/wireless/ath/ath9k/debug.c 		ath9k_init_wow(sc->hw);
sc               1117 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc               1118 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_hw *ah = sc->sc_ah;
sc               1142 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc               1143 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_hw *ah = sc->sc_ah;
sc               1165 drivers/net/wireless/ath/ath9k/debug.c 		mutex_lock(&sc->mutex);
sc               1166 drivers/net/wireless/ath/ath9k/debug.c 		ath9k_set_txpower(sc, NULL);
sc               1167 drivers/net/wireless/ath/ath9k/debug.c 		mutex_unlock(&sc->mutex);
sc               1185 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc               1186 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_hw *ah = sc->sc_ah;
sc               1202 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = file->private_data;
sc               1203 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_hw *ah = sc->sc_ah;
sc               1298 drivers/net/wireless/ath/ath9k/debug.c 		data[i++] = sc->debug.stats.txstats[PR_QNUM(IEEE80211_AC_BE)].elem; \
sc               1299 drivers/net/wireless/ath/ath9k/debug.c 		data[i++] = sc->debug.stats.txstats[PR_QNUM(IEEE80211_AC_BK)].elem; \
sc               1300 drivers/net/wireless/ath/ath9k/debug.c 		data[i++] = sc->debug.stats.txstats[PR_QNUM(IEEE80211_AC_VI)].elem; \
sc               1301 drivers/net/wireless/ath/ath9k/debug.c 		data[i++] = sc->debug.stats.txstats[PR_QNUM(IEEE80211_AC_VO)].elem; \
sc               1306 drivers/net/wireless/ath/ath9k/debug.c 		data[i++] = sc->debug.stats.rxstats.elem;	\
sc               1313 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = hw->priv;
sc               1316 drivers/net/wireless/ath/ath9k/debug.c 	data[i++] = (sc->debug.stats.txstats[PR_QNUM(IEEE80211_AC_BE)].tx_pkts_all +
sc               1317 drivers/net/wireless/ath/ath9k/debug.c 		     sc->debug.stats.txstats[PR_QNUM(IEEE80211_AC_BK)].tx_pkts_all +
sc               1318 drivers/net/wireless/ath/ath9k/debug.c 		     sc->debug.stats.txstats[PR_QNUM(IEEE80211_AC_VI)].tx_pkts_all +
sc               1319 drivers/net/wireless/ath/ath9k/debug.c 		     sc->debug.stats.txstats[PR_QNUM(IEEE80211_AC_VO)].tx_pkts_all);
sc               1320 drivers/net/wireless/ath/ath9k/debug.c 	data[i++] = (sc->debug.stats.txstats[PR_QNUM(IEEE80211_AC_BE)].tx_bytes_all +
sc               1321 drivers/net/wireless/ath/ath9k/debug.c 		     sc->debug.stats.txstats[PR_QNUM(IEEE80211_AC_BK)].tx_bytes_all +
sc               1322 drivers/net/wireless/ath/ath9k/debug.c 		     sc->debug.stats.txstats[PR_QNUM(IEEE80211_AC_VI)].tx_bytes_all +
sc               1323 drivers/net/wireless/ath/ath9k/debug.c 		     sc->debug.stats.txstats[PR_QNUM(IEEE80211_AC_VO)].tx_bytes_all);
sc               1359 drivers/net/wireless/ath/ath9k/debug.c void ath9k_deinit_debug(struct ath_softc *sc)
sc               1361 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_cmn_spectral_deinit_debug(&sc->spec_priv);
sc               1367 drivers/net/wireless/ath/ath9k/debug.c 	struct ath_softc *sc = (struct ath_softc *) common->priv;
sc               1369 drivers/net/wireless/ath/ath9k/debug.c 	sc->debug.debugfs_phy = debugfs_create_dir("ath9k",
sc               1370 drivers/net/wireless/ath/ath9k/debug.c 						   sc->hw->wiphy->debugfsdir);
sc               1371 drivers/net/wireless/ath/ath9k/debug.c 	if (!sc->debug.debugfs_phy)
sc               1375 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_file("debug", 0600, sc->debug.debugfs_phy,
sc               1376 drivers/net/wireless/ath/ath9k/debug.c 			    sc, &fops_debug);
sc               1379 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_dfs_init_debug(sc);
sc               1380 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_tx99_init_debug(sc);
sc               1381 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_cmn_spectral_init_debug(&sc->spec_priv, sc->debug.debugfs_phy);
sc               1383 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_devm_seqfile(sc->dev, "dma", sc->debug.debugfs_phy,
sc               1385 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_devm_seqfile(sc->dev, "interrupt", sc->debug.debugfs_phy,
sc               1387 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_devm_seqfile(sc->dev, "xmit", sc->debug.debugfs_phy,
sc               1389 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_devm_seqfile(sc->dev, "queues", sc->debug.debugfs_phy,
sc               1391 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_devm_seqfile(sc->dev, "misc", sc->debug.debugfs_phy,
sc               1393 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_devm_seqfile(sc->dev, "reset", sc->debug.debugfs_phy,
sc               1396 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_cmn_debug_recv(sc->debug.debugfs_phy, &sc->debug.stats.rxstats);
sc               1397 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_cmn_debug_phy_err(sc->debug.debugfs_phy, &sc->debug.stats.rxstats);
sc               1399 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_u8("rx_chainmask", 0400, sc->debug.debugfs_phy,
sc               1401 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_u8("tx_chainmask", 0400, sc->debug.debugfs_phy,
sc               1404 drivers/net/wireless/ath/ath9k/debug.c 			    sc->debug.debugfs_phy, sc, &fops_ani);
sc               1405 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_bool("paprd", 0600, sc->debug.debugfs_phy,
sc               1406 drivers/net/wireless/ath/ath9k/debug.c 			    &sc->sc_ah->config.enable_paprd);
sc               1407 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_file("regidx", 0600, sc->debug.debugfs_phy,
sc               1408 drivers/net/wireless/ath/ath9k/debug.c 			    sc, &fops_regidx);
sc               1409 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_file("regval", 0600, sc->debug.debugfs_phy,
sc               1410 drivers/net/wireless/ath/ath9k/debug.c 			    sc, &fops_regval);
sc               1412 drivers/net/wireless/ath/ath9k/debug.c 			    sc->debug.debugfs_phy,
sc               1414 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_file("regdump", 0400, sc->debug.debugfs_phy, sc,
sc               1416 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_devm_seqfile(sc->dev, "dump_nfcal",
sc               1417 drivers/net/wireless/ath/ath9k/debug.c 				    sc->debug.debugfs_phy,
sc               1420 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_cmn_debug_base_eeprom(sc->debug.debugfs_phy, sc->sc_ah);
sc               1421 drivers/net/wireless/ath/ath9k/debug.c 	ath9k_cmn_debug_modal_eeprom(sc->debug.debugfs_phy, sc->sc_ah);
sc               1424 drivers/net/wireless/ath/ath9k/debug.c 			   sc->debug.debugfs_phy, &sc->sc_ah->gpio_mask);
sc               1426 drivers/net/wireless/ath/ath9k/debug.c 			   sc->debug.debugfs_phy, &sc->sc_ah->gpio_val);
sc               1428 drivers/net/wireless/ath/ath9k/debug.c 			    sc->debug.debugfs_phy, sc, &fops_antenna_diversity);
sc               1431 drivers/net/wireless/ath/ath9k/debug.c 			    sc->debug.debugfs_phy, sc, &fops_bt_ant_diversity);
sc               1432 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_file("btcoex", 0400, sc->debug.debugfs_phy, sc,
sc               1437 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_file("wow", 0600, sc->debug.debugfs_phy, sc, &fops_wow);
sc               1441 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_file("ack_to", 0400, sc->debug.debugfs_phy,
sc               1442 drivers/net/wireless/ath/ath9k/debug.c 			    sc, &fops_ackto);
sc               1444 drivers/net/wireless/ath/ath9k/debug.c 	debugfs_create_file("tpc", 0600, sc->debug.debugfs_phy, sc, &fops_tpc);
sc               1447 drivers/net/wireless/ath/ath9k/debug.c 			    sc->debug.debugfs_phy, sc, &fops_nf_override);
sc                 28 drivers/net/wireless/ath/ath9k/debug.h #define TX_STAT_INC(sc, q, c)	 do { (sc)->debug.stats.txstats[q].c++; } while (0)
sc                 29 drivers/net/wireless/ath/ath9k/debug.h #define RX_STAT_INC(sc, c)	 do { (sc)->debug.stats.rxstats.c++; } while (0)
sc                 30 drivers/net/wireless/ath/ath9k/debug.h #define RESET_STAT_INC(sc, type) do { (sc)->debug.stats.reset[type]++; } while (0)
sc                 31 drivers/net/wireless/ath/ath9k/debug.h #define ANT_STAT_INC(sc, i, c)	 do { (sc)->debug.stats.ant_stats[i].c++; } while (0)
sc                 32 drivers/net/wireless/ath/ath9k/debug.h #define ANT_LNA_INC(sc, i, c)	 do { (sc)->debug.stats.ant_stats[i].lna_recv_cnt[c]++; } while (0)
sc                 34 drivers/net/wireless/ath/ath9k/debug.h #define TX_STAT_INC(sc, q, c)	 do { (void)(sc); } while (0)
sc                 35 drivers/net/wireless/ath/ath9k/debug.h #define RX_STAT_INC(sc, c)	 do { (void)(sc); } while (0)
sc                 36 drivers/net/wireless/ath/ath9k/debug.h #define RESET_STAT_INC(sc, type) do { (void)(sc); } while (0)
sc                 37 drivers/net/wireless/ath/ath9k/debug.h #define ANT_STAT_INC(sc, i, c)	 do { (void)(sc); } while (0)
sc                 38 drivers/net/wireless/ath/ath9k/debug.h #define ANT_LNA_INC(sc, i, c)	 do { (void)(sc); } while (0)
sc                195 drivers/net/wireless/ath/ath9k/debug.h #define PR_QNUM(_n) sc->tx.txq_map[_n]->axq_qnum
sc                196 drivers/net/wireless/ath/ath9k/debug.h #define TXSTATS sc->debug.stats.txstats
sc                255 drivers/net/wireless/ath/ath9k/debug.h void ath9k_deinit_debug(struct ath_softc *sc);
sc                257 drivers/net/wireless/ath/ath9k/debug.h void ath_debug_stat_interrupt(struct ath_softc *sc, enum ath9k_int status);
sc                258 drivers/net/wireless/ath/ath9k/debug.h void ath_debug_stat_tx(struct ath_softc *sc, struct ath_buf *bf,
sc                261 drivers/net/wireless/ath/ath9k/debug.h void ath_debug_stat_rx(struct ath_softc *sc, struct ath_rx_status *rs);
sc                274 drivers/net/wireless/ath/ath9k/debug.h void ath9k_debug_stat_ant(struct ath_softc *sc,
sc                277 drivers/net/wireless/ath/ath9k/debug.h void ath9k_debug_sync_cause(struct ath_softc *sc, u32 sync_cause);
sc                286 drivers/net/wireless/ath/ath9k/debug.h static inline void ath9k_deinit_debug(struct ath_softc *sc)
sc                289 drivers/net/wireless/ath/ath9k/debug.h static inline void ath_debug_stat_interrupt(struct ath_softc *sc,
sc                293 drivers/net/wireless/ath/ath9k/debug.h static inline void ath_debug_stat_tx(struct ath_softc *sc,
sc                300 drivers/net/wireless/ath/ath9k/debug.h static inline void ath_debug_stat_rx(struct ath_softc *sc,
sc                304 drivers/net/wireless/ath/ath9k/debug.h static inline void ath9k_debug_stat_ant(struct ath_softc *sc,
sc                312 drivers/net/wireless/ath/ath9k/debug.h ath9k_debug_sync_cause(struct ath_softc *sc, u32 sync_cause)
sc                319 drivers/net/wireless/ath/ath9k/debug.h void ath_debug_rate_stats(struct ath_softc *sc,
sc                323 drivers/net/wireless/ath/ath9k/debug.h static inline void ath_debug_rate_stats(struct ath_softc *sc,
sc                 27 drivers/net/wireless/ath/ath9k/debug_sta.c 	struct ath_softc *sc = an->sc;
sc                 58 drivers/net/wireless/ath/ath9k/debug_sta.c 		ath_txq_lock(sc, txq);
sc                 71 drivers/net/wireless/ath/ath9k/debug_sta.c 		ath_txq_unlock(sc, txq);
sc                 91 drivers/net/wireless/ath/ath9k/debug_sta.c void ath_debug_rate_stats(struct ath_softc *sc,
sc                 96 drivers/net/wireless/ath/ath9k/debug_sta.c 	struct ath_hw *ah = sc->sc_ah;
sc                107 drivers/net/wireless/ath/ath9k/debug_sta.c 	sta = ieee80211_find_sta_by_ifaddr(sc->hw, hdr->addr2, NULL);
sc                172 drivers/net/wireless/ath/ath9k/debug_sta.c 	struct ath_softc *sc = an->sc;
sc                173 drivers/net/wireless/ath/ath9k/debug_sta.c 	struct ath_hw *ah = sc->sc_ah;
sc                100 drivers/net/wireless/ath/ath9k/dfs.c static bool ath9k_check_chirping(struct ath_softc *sc, u8 *data,
sc                105 drivers/net/wireless/ath/ath9k/dfs.c 	struct ath_hw *ah = sc->sc_ah;
sc                200 drivers/net/wireless/ath/ath9k/dfs.c ath9k_postprocess_radar_event(struct ath_softc *sc,
sc                217 drivers/net/wireless/ath/ath9k/dfs.c 		DFS_STAT_INC(sc, pri_phy_errors);
sc                227 drivers/net/wireless/ath/ath9k/dfs.c 		DFS_STAT_INC(sc, ext_phy_errors);
sc                246 drivers/net/wireless/ath/ath9k/dfs.c 		DFS_STAT_INC(sc, dc_phy_errors);
sc                256 drivers/net/wireless/ath/ath9k/dfs.c 		DFS_STAT_INC(sc, bwinfo_discards);
sc                261 drivers/net/wireless/ath/ath9k/dfs.c 		DFS_STAT_INC(sc, rssi_discards);
sc                266 drivers/net/wireless/ath/ath9k/dfs.c 	pe->width = dur_to_usecs(sc->sc_ah, dur);
sc                269 drivers/net/wireless/ath/ath9k/dfs.c 	DFS_STAT_INC(sc, pulses_detected);
sc                274 drivers/net/wireless/ath/ath9k/dfs.c ath9k_dfs_process_radar_pulse(struct ath_softc *sc, struct pulse_event *pe)
sc                276 drivers/net/wireless/ath/ath9k/dfs.c 	struct dfs_pattern_detector *pd = sc->dfs_detector;
sc                277 drivers/net/wireless/ath/ath9k/dfs.c 	DFS_STAT_INC(sc, pulses_processed);
sc                282 drivers/net/wireless/ath/ath9k/dfs.c 	DFS_STAT_INC(sc, radar_detected);
sc                283 drivers/net/wireless/ath/ath9k/dfs.c 	ieee80211_radar_detected(sc->hw);
sc                289 drivers/net/wireless/ath/ath9k/dfs.c void ath9k_dfs_process_phyerr(struct ath_softc *sc, void *data,
sc                296 drivers/net/wireless/ath/ath9k/dfs.c 	struct ath_hw *ah = sc->sc_ah;
sc                299 drivers/net/wireless/ath/ath9k/dfs.c 	DFS_STAT_INC(sc, pulses_total);
sc                305 drivers/net/wireless/ath/ath9k/dfs.c 		DFS_STAT_INC(sc, pulses_no_dfs);
sc                311 drivers/net/wireless/ath/ath9k/dfs.c 		DFS_STAT_INC(sc, datalen_discards);
sc                333 drivers/net/wireless/ath/ath9k/dfs.c 	if (!ath9k_postprocess_radar_event(sc, &ard, &pe))
sc                341 drivers/net/wireless/ath/ath9k/dfs.c 		pe.chirp = ath9k_check_chirping(sc, data, clen, is_ctl, is_ext);
sc                350 drivers/net/wireless/ath/ath9k/dfs.c 		pe.ts - sc->dfs_prev_pulse_ts);
sc                351 drivers/net/wireless/ath/ath9k/dfs.c 	sc->dfs_prev_pulse_ts = pe.ts;
sc                353 drivers/net/wireless/ath/ath9k/dfs.c 		ath9k_dfs_process_radar_pulse(sc, &pe);
sc                357 drivers/net/wireless/ath/ath9k/dfs.c 		ath9k_dfs_process_radar_pulse(sc, &pe);
sc                 37 drivers/net/wireless/ath/ath9k/dfs.h void ath9k_dfs_process_phyerr(struct ath_softc *sc, void *data,
sc                 41 drivers/net/wireless/ath/ath9k/dfs.h ath9k_dfs_process_phyerr(struct ath_softc *sc, void *data,
sc                 29 drivers/net/wireless/ath/ath9k/dfs_debug.c 			 sc->debug.stats.dfs_stats.p);
sc                 37 drivers/net/wireless/ath/ath9k/dfs_debug.c 	struct ath_softc *sc = file->private_data;
sc                 38 drivers/net/wireless/ath/ath9k/dfs_debug.c 	struct ath9k_hw_version *hw_ver = &sc->sc_ah->hw_version;
sc                 50 drivers/net/wireless/ath/ath9k/dfs_debug.c 			 (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_DFS) ?
sc                 53 drivers/net/wireless/ath/ath9k/dfs_debug.c 	if (!sc->dfs_detector) {
sc                 59 drivers/net/wireless/ath/ath9k/dfs_debug.c 	dfs_pool_stats = sc->dfs_detector->get_stats(sc->dfs_detector);
sc                 73 drivers/net/wireless/ath/ath9k/dfs_debug.c 			 sc->dfs_detector->region);
sc                100 drivers/net/wireless/ath/ath9k/dfs_debug.c 	struct ath_softc *sc = file->private_data;
sc                114 drivers/net/wireless/ath/ath9k/dfs_debug.c 		memset(&sc->debug.stats.dfs_stats, 0,
sc                115 drivers/net/wireless/ath/ath9k/dfs_debug.c 		       sizeof(sc->debug.stats.dfs_stats));
sc                123 drivers/net/wireless/ath/ath9k/dfs_debug.c 	struct ath_softc *sc = file->private_data;
sc                125 drivers/net/wireless/ath/ath9k/dfs_debug.c 	ieee80211_radar_detected(sc->hw);
sc                145 drivers/net/wireless/ath/ath9k/dfs_debug.c void ath9k_dfs_init_debug(struct ath_softc *sc)
sc                148 drivers/net/wireless/ath/ath9k/dfs_debug.c 			    sc->debug.debugfs_phy, sc, &fops_dfs_stats);
sc                150 drivers/net/wireless/ath/ath9k/dfs_debug.c 			    sc->debug.debugfs_phy, sc, &fops_simulate_radar);
sc                 58 drivers/net/wireless/ath/ath9k/dfs_debug.h #define DFS_STAT_INC(sc, c) (sc->debug.stats.dfs_stats.c++)
sc                 59 drivers/net/wireless/ath/ath9k/dfs_debug.h void ath9k_dfs_init_debug(struct ath_softc *sc);
sc                 65 drivers/net/wireless/ath/ath9k/dfs_debug.h #define DFS_STAT_INC(sc, c) do { } while (0)
sc                 66 drivers/net/wireless/ath/ath9k/dfs_debug.h static inline void ath9k_dfs_init_debug(struct ath_softc *sc) { }
sc                 25 drivers/net/wireless/ath/ath9k/gpio.c static void ath_fill_led_pin(struct ath_softc *sc)
sc                 27 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                 54 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_softc *sc = container_of(led_cdev, struct ath_softc, led_cdev);
sc                 57 drivers/net/wireless/ath/ath9k/gpio.c 	if (sc->sc_ah->config.led_active_high)
sc                 60 drivers/net/wireless/ath/ath9k/gpio.c 	ath9k_hw_set_gpio(sc->sc_ah, sc->sc_ah->led_pin, val);
sc                 63 drivers/net/wireless/ath/ath9k/gpio.c void ath_deinit_leds(struct ath_softc *sc)
sc                 65 drivers/net/wireless/ath/ath9k/gpio.c 	if (!sc->led_registered)
sc                 68 drivers/net/wireless/ath/ath9k/gpio.c 	ath_led_brightness(&sc->led_cdev, LED_OFF);
sc                 69 drivers/net/wireless/ath/ath9k/gpio.c 	led_classdev_unregister(&sc->led_cdev);
sc                 71 drivers/net/wireless/ath/ath9k/gpio.c 	ath9k_hw_gpio_free(sc->sc_ah, sc->sc_ah->led_pin);
sc                 74 drivers/net/wireless/ath/ath9k/gpio.c void ath_init_leds(struct ath_softc *sc)
sc                 78 drivers/net/wireless/ath/ath9k/gpio.c 	if (AR_SREV_9100(sc->sc_ah))
sc                 81 drivers/net/wireless/ath/ath9k/gpio.c 	ath_fill_led_pin(sc);
sc                 84 drivers/net/wireless/ath/ath9k/gpio.c 		sc->led_cdev.default_trigger =
sc                 85 drivers/net/wireless/ath/ath9k/gpio.c 			ieee80211_get_radio_led_name(sc->hw);
sc                 87 drivers/net/wireless/ath/ath9k/gpio.c 	snprintf(sc->led_name, sizeof(sc->led_name),
sc                 88 drivers/net/wireless/ath/ath9k/gpio.c 		"ath9k-%s", wiphy_name(sc->hw->wiphy));
sc                 89 drivers/net/wireless/ath/ath9k/gpio.c 	sc->led_cdev.name = sc->led_name;
sc                 90 drivers/net/wireless/ath/ath9k/gpio.c 	sc->led_cdev.brightness_set = ath_led_brightness;
sc                 92 drivers/net/wireless/ath/ath9k/gpio.c 	ret = led_classdev_register(wiphy_dev(sc->hw->wiphy), &sc->led_cdev);
sc                 96 drivers/net/wireless/ath/ath9k/gpio.c 	sc->led_registered = true;
sc                104 drivers/net/wireless/ath/ath9k/gpio.c static bool ath_is_rfkill_set(struct ath_softc *sc)
sc                106 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                109 drivers/net/wireless/ath/ath9k/gpio.c 	ath9k_ps_wakeup(sc);
sc                112 drivers/net/wireless/ath/ath9k/gpio.c 	ath9k_ps_restore(sc);
sc                119 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_softc *sc = hw->priv;
sc                120 drivers/net/wireless/ath/ath9k/gpio.c 	bool blocked = !!ath_is_rfkill_set(sc);
sc                125 drivers/net/wireless/ath/ath9k/gpio.c void ath_start_rfkill_poll(struct ath_softc *sc)
sc                127 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                130 drivers/net/wireless/ath/ath9k/gpio.c 		wiphy_rfkill_start_polling(sc->hw->wiphy);
sc                142 drivers/net/wireless/ath/ath9k/gpio.c static void ath_detect_bt_priority(struct ath_softc *sc)
sc                144 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                145 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                147 drivers/net/wireless/ath/ath9k/gpio.c 	if (ath9k_hw_gpio_get(sc->sc_ah, ah->btcoex_hw.btpriority_gpio))
sc                156 drivers/net/wireless/ath/ath9k/gpio.c 			ath_dbg(ath9k_hw_common(sc->sc_ah), BTCOEX,
sc                161 drivers/net/wireless/ath/ath9k/gpio.c 			ath_dbg(ath9k_hw_common(sc->sc_ah), BTCOEX,
sc                171 drivers/net/wireless/ath/ath9k/gpio.c static void ath_mci_ftp_adjust(struct ath_softc *sc)
sc                173 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                175 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                181 drivers/net/wireless/ath/ath9k/gpio.c 				(sc->rx.num_pkts < ATH_BTCOEX_STOMP_FTP_THRESH);
sc                185 drivers/net/wireless/ath/ath9k/gpio.c 		sc->rx.num_pkts = 0;
sc                196 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_softc *sc = from_timer(sc, t, btcoex.period_timer);
sc                197 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                198 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                203 drivers/net/wireless/ath/ath9k/gpio.c 	spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc                204 drivers/net/wireless/ath/ath9k/gpio.c 	if (sc->sc_ah->power_mode == ATH9K_PM_NETWORK_SLEEP) {
sc                206 drivers/net/wireless/ath/ath9k/gpio.c 		spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc                209 drivers/net/wireless/ath/ath9k/gpio.c 	spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc                211 drivers/net/wireless/ath/ath9k/gpio.c 	ath9k_ps_wakeup(sc);
sc                215 drivers/net/wireless/ath/ath9k/gpio.c 		ath9k_mci_update_rssi(sc);
sc                216 drivers/net/wireless/ath/ath9k/gpio.c 		ath_mci_ftp_adjust(sc);
sc                220 drivers/net/wireless/ath/ath9k/gpio.c 		ath_detect_bt_priority(sc);
sc                244 drivers/net/wireless/ath/ath9k/gpio.c 	ath9k_ps_restore(sc);
sc                257 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_softc *sc = from_timer(sc, t, btcoex.no_stomp_timer);
sc                258 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                259 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                261 drivers/net/wireless/ath/ath9k/gpio.c 	ath9k_ps_wakeup(sc);
sc                273 drivers/net/wireless/ath/ath9k/gpio.c 	ath9k_ps_restore(sc);
sc                276 drivers/net/wireless/ath/ath9k/gpio.c static void ath_init_btcoex_timer(struct ath_softc *sc)
sc                278 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                296 drivers/net/wireless/ath/ath9k/gpio.c void ath9k_btcoex_timer_resume(struct ath_softc *sc)
sc                298 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                299 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                321 drivers/net/wireless/ath/ath9k/gpio.c void ath9k_btcoex_timer_pause(struct ath_softc *sc)
sc                323 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                324 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                336 drivers/net/wireless/ath/ath9k/gpio.c void ath9k_btcoex_stop_gen_timer(struct ath_softc *sc)
sc                338 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                343 drivers/net/wireless/ath/ath9k/gpio.c u16 ath9k_btcoex_aggr_limit(struct ath_softc *sc, u32 max_4ms_framelen)
sc                345 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                346 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_mci_profile *mci = &sc->btcoex.mci;
sc                349 drivers/net/wireless/ath/ath9k/gpio.c 	if ((sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_MCI) && mci->aggr_limit)
sc                358 drivers/net/wireless/ath/ath9k/gpio.c void ath9k_btcoex_handle_interrupt(struct ath_softc *sc, u32 status)
sc                361 drivers/net/wireless/ath/ath9k/gpio.c 		ath_mci_intr(sc);
sc                364 drivers/net/wireless/ath/ath9k/gpio.c void ath9k_start_btcoex(struct ath_softc *sc)
sc                366 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                379 drivers/net/wireless/ath/ath9k/gpio.c 	ath9k_btcoex_timer_resume(sc);
sc                382 drivers/net/wireless/ath/ath9k/gpio.c void ath9k_stop_btcoex(struct ath_softc *sc)
sc                384 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                390 drivers/net/wireless/ath/ath9k/gpio.c 	ath9k_btcoex_timer_pause(sc);
sc                394 drivers/net/wireless/ath/ath9k/gpio.c 		ath_mci_flush_profile(&sc->btcoex.mci);
sc                397 drivers/net/wireless/ath/ath9k/gpio.c void ath9k_deinit_btcoex(struct ath_softc *sc)
sc                399 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                402 drivers/net/wireless/ath/ath9k/gpio.c 		ath_mci_cleanup(sc);
sc                408 drivers/net/wireless/ath/ath9k/gpio.c 			ath9k_hw_btcoex_deinit(sc->sc_ah);
sc                412 drivers/net/wireless/ath/ath9k/gpio.c int ath9k_init_btcoex(struct ath_softc *sc)
sc                415 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                420 drivers/net/wireless/ath/ath9k/gpio.c 	switch (ath9k_hw_get_btcoex_scheme(sc->sc_ah)) {
sc                424 drivers/net/wireless/ath/ath9k/gpio.c 		ath9k_hw_btcoex_init_2wire(sc->sc_ah);
sc                427 drivers/net/wireless/ath/ath9k/gpio.c 		ath9k_hw_btcoex_init_3wire(sc->sc_ah);
sc                428 drivers/net/wireless/ath/ath9k/gpio.c 		ath_init_btcoex_timer(sc);
sc                429 drivers/net/wireless/ath/ath9k/gpio.c 		txq = sc->tx.txq_map[IEEE80211_AC_BE];
sc                430 drivers/net/wireless/ath/ath9k/gpio.c 		ath9k_hw_init_btcoex_hw(sc->sc_ah, txq->axq_qnum);
sc                433 drivers/net/wireless/ath/ath9k/gpio.c 		ath_init_btcoex_timer(sc);
sc                435 drivers/net/wireless/ath/ath9k/gpio.c 		sc->btcoex.duty_cycle = ATH_BTCOEX_DEF_DUTY_CYCLE;
sc                436 drivers/net/wireless/ath/ath9k/gpio.c 		INIT_LIST_HEAD(&sc->btcoex.mci.info);
sc                439 drivers/net/wireless/ath/ath9k/gpio.c 		r = ath_mci_setup(sc);
sc                452 drivers/net/wireless/ath/ath9k/gpio.c static int ath9k_dump_mci_btcoex(struct ath_softc *sc, u8 *buf, u32 size)
sc                454 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                456 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_hw *ah = sc->sc_ah;
sc                497 drivers/net/wireless/ath/ath9k/gpio.c static int ath9k_dump_legacy_btcoex(struct ath_softc *sc, u8 *buf, u32 size)
sc                500 drivers/net/wireless/ath/ath9k/gpio.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                511 drivers/net/wireless/ath/ath9k/gpio.c int ath9k_dump_btcoex(struct ath_softc *sc, u8 *buf, u32 size)
sc                513 drivers/net/wireless/ath/ath9k/gpio.c 	if (ath9k_hw_mci_is_enabled(sc->sc_ah))
sc                514 drivers/net/wireless/ath/ath9k/gpio.c 		return ath9k_dump_mci_btcoex(sc, buf, size);
sc                516 drivers/net/wireless/ath/ath9k/gpio.c 		return ath9k_dump_legacy_btcoex(sc, buf, size);
sc                150 drivers/net/wireless/ath/ath9k/init.c static void ath9k_deinit_softc(struct ath_softc *sc);
sc                177 drivers/net/wireless/ath/ath9k/init.c 	struct ath_softc *sc = (struct ath_softc *) common->priv;
sc                181 drivers/net/wireless/ath/ath9k/init.c 		spin_lock_irqsave(&sc->sc_serial_rw, flags);
sc                182 drivers/net/wireless/ath/ath9k/init.c 		iowrite32(val, sc->mem + reg_offset);
sc                183 drivers/net/wireless/ath/ath9k/init.c 		spin_unlock_irqrestore(&sc->sc_serial_rw, flags);
sc                185 drivers/net/wireless/ath/ath9k/init.c 		iowrite32(val, sc->mem + reg_offset);
sc                192 drivers/net/wireless/ath/ath9k/init.c 	struct ath_softc *sc = (struct ath_softc *) common->priv;
sc                197 drivers/net/wireless/ath/ath9k/init.c 		spin_lock_irqsave(&sc->sc_serial_rw, flags);
sc                198 drivers/net/wireless/ath/ath9k/init.c 		val = ioread32(sc->mem + reg_offset);
sc                199 drivers/net/wireless/ath/ath9k/init.c 		spin_unlock_irqrestore(&sc->sc_serial_rw, flags);
sc                201 drivers/net/wireless/ath/ath9k/init.c 		val = ioread32(sc->mem + reg_offset);
sc                215 drivers/net/wireless/ath/ath9k/init.c static unsigned int __ath9k_reg_rmw(struct ath_softc *sc, u32 reg_offset,
sc                220 drivers/net/wireless/ath/ath9k/init.c 	val = ioread32(sc->mem + reg_offset);
sc                223 drivers/net/wireless/ath/ath9k/init.c 	iowrite32(val, sc->mem + reg_offset);
sc                232 drivers/net/wireless/ath/ath9k/init.c 	struct ath_softc *sc = (struct ath_softc *) common->priv;
sc                237 drivers/net/wireless/ath/ath9k/init.c 		spin_lock_irqsave(&sc->sc_serial_rw, flags);
sc                238 drivers/net/wireless/ath/ath9k/init.c 		val = __ath9k_reg_rmw(sc, reg_offset, set, clr);
sc                239 drivers/net/wireless/ath/ath9k/init.c 		spin_unlock_irqrestore(&sc->sc_serial_rw, flags);
sc                241 drivers/net/wireless/ath/ath9k/init.c 		val = __ath9k_reg_rmw(sc, reg_offset, set, clr);
sc                254 drivers/net/wireless/ath/ath9k/init.c 	struct ath_softc *sc = hw->priv;
sc                255 drivers/net/wireless/ath/ath9k/init.c 	struct ath_hw *ah = sc->sc_ah;
sc                261 drivers/net/wireless/ath/ath9k/init.c 	if (sc->dfs_detector != NULL)
sc                262 drivers/net/wireless/ath/ath9k/init.c 		sc->dfs_detector->set_dfs_domain(sc->dfs_detector,
sc                269 drivers/net/wireless/ath/ath9k/init.c 	sc->cur_chan->txpower = 2 * ah->curchan->chan->max_power;
sc                270 drivers/net/wireless/ath/ath9k/init.c 	ath9k_ps_wakeup(sc);
sc                271 drivers/net/wireless/ath/ath9k/init.c 	ath9k_hw_set_txpowerlimit(ah, sc->cur_chan->txpower, false);
sc                272 drivers/net/wireless/ath/ath9k/init.c 	ath9k_cmn_update_txpow(ah, sc->cur_chan->cur_txpower,
sc                273 drivers/net/wireless/ath/ath9k/init.c 			       sc->cur_chan->txpower,
sc                274 drivers/net/wireless/ath/ath9k/init.c 			       &sc->cur_chan->cur_txpower);
sc                275 drivers/net/wireless/ath/ath9k/init.c 	ath9k_ps_restore(sc);
sc                283 drivers/net/wireless/ath/ath9k/init.c int ath_descdma_setup(struct ath_softc *sc, struct ath_descdma *dd,
sc                287 drivers/net/wireless/ath/ath9k/init.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                297 drivers/net/wireless/ath/ath9k/init.c 		desc_len = sc->sc_ah->caps.tx_desc_len;
sc                315 drivers/net/wireless/ath/ath9k/init.c 	if (!(sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_4KB_SPLITTRANS)) {
sc                329 drivers/net/wireless/ath/ath9k/init.c 	dd->dd_desc = dmam_alloc_coherent(sc->dev, dd->dd_desc_len,
sc                344 drivers/net/wireless/ath/ath9k/init.c 		bf = devm_kzalloc(sc->dev, bsize, GFP_KERNEL);
sc                352 drivers/net/wireless/ath/ath9k/init.c 			if (!(sc->sc_ah->caps.hw_caps &
sc                375 drivers/net/wireless/ath/ath9k/init.c 		bf = devm_kzalloc(sc->dev, bsize, GFP_KERNEL);
sc                383 drivers/net/wireless/ath/ath9k/init.c 			if (!(sc->sc_ah->caps.hw_caps &
sc                406 drivers/net/wireless/ath/ath9k/init.c static int ath9k_init_queues(struct ath_softc *sc)
sc                410 drivers/net/wireless/ath/ath9k/init.c 	sc->beacon.beaconq = ath9k_hw_beaconq_setup(sc->sc_ah);
sc                411 drivers/net/wireless/ath/ath9k/init.c 	sc->beacon.cabq = ath_txq_setup(sc, ATH9K_TX_QUEUE_CAB, 0);
sc                412 drivers/net/wireless/ath/ath9k/init.c 	ath_cabq_update(sc);
sc                414 drivers/net/wireless/ath/ath9k/init.c 	sc->tx.uapsdq = ath_txq_setup(sc, ATH9K_TX_QUEUE_UAPSD, 0);
sc                417 drivers/net/wireless/ath/ath9k/init.c 		sc->tx.txq_map[i] = ath_txq_setup(sc, ATH9K_TX_QUEUE_DATA, i);
sc                418 drivers/net/wireless/ath/ath9k/init.c 		sc->tx.txq_map[i]->mac80211_qnum = i;
sc                423 drivers/net/wireless/ath/ath9k/init.c static void ath9k_init_misc(struct ath_softc *sc)
sc                425 drivers/net/wireless/ath/ath9k/init.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                432 drivers/net/wireless/ath/ath9k/init.c 	sc->beacon.slottime = 9;
sc                434 drivers/net/wireless/ath/ath9k/init.c 	for (i = 0; i < ARRAY_SIZE(sc->beacon.bslot); i++)
sc                435 drivers/net/wireless/ath/ath9k/init.c 		sc->beacon.bslot[i] = NULL;
sc                437 drivers/net/wireless/ath/ath9k/init.c 	if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_ANT_DIV_COMB)
sc                438 drivers/net/wireless/ath/ath9k/init.c 		sc->ant_comb.count = ATH_ANT_DIV_COMB_INIT_COUNT;
sc                440 drivers/net/wireless/ath/ath9k/init.c 	sc->spec_priv.ah = sc->sc_ah;
sc                441 drivers/net/wireless/ath/ath9k/init.c 	sc->spec_priv.spec_config.enabled = 0;
sc                442 drivers/net/wireless/ath/ath9k/init.c 	sc->spec_priv.spec_config.short_repeat = true;
sc                443 drivers/net/wireless/ath/ath9k/init.c 	sc->spec_priv.spec_config.count = 8;
sc                444 drivers/net/wireless/ath/ath9k/init.c 	sc->spec_priv.spec_config.endless = false;
sc                445 drivers/net/wireless/ath/ath9k/init.c 	sc->spec_priv.spec_config.period = 0xFF;
sc                446 drivers/net/wireless/ath/ath9k/init.c 	sc->spec_priv.spec_config.fft_period = 0xF;
sc                449 drivers/net/wireless/ath/ath9k/init.c static void ath9k_init_pcoem_platform(struct ath_softc *sc)
sc                451 drivers/net/wireless/ath/ath9k/init.c 	struct ath_hw *ah = sc->sc_ah;
sc                461 drivers/net/wireless/ath/ath9k/init.c 	if (sc->driver_data & (ATH9K_PCI_CUS198 |
sc                467 drivers/net/wireless/ath/ath9k/init.c 		sc->ant_comb.low_rssi_thresh = 20;
sc                468 drivers/net/wireless/ath/ath9k/init.c 		sc->ant_comb.fast_div_bias = 3;
sc                471 drivers/net/wireless/ath/ath9k/init.c 			 (sc->driver_data & ATH9K_PCI_CUS198) ?
sc                475 drivers/net/wireless/ath/ath9k/init.c 	if (sc->driver_data & ATH9K_PCI_CUS217)
sc                478 drivers/net/wireless/ath/ath9k/init.c 	if (sc->driver_data & ATH9K_PCI_CUS252)
sc                481 drivers/net/wireless/ath/ath9k/init.c 	if (sc->driver_data & ATH9K_PCI_AR9565_1ANT)
sc                484 drivers/net/wireless/ath/ath9k/init.c 	if (sc->driver_data & ATH9K_PCI_AR9565_2ANT)
sc                487 drivers/net/wireless/ath/ath9k/init.c 	if (sc->driver_data & ATH9K_PCI_KILLER)
sc                496 drivers/net/wireless/ath/ath9k/init.c 	if (sc->driver_data & (ATH9K_PCI_AR9565_1ANT | ATH9K_PCI_AR9565_2ANT)) {
sc                497 drivers/net/wireless/ath/ath9k/init.c 		if (!(sc->driver_data & ATH9K_PCI_BT_ANT_DIV))
sc                501 drivers/net/wireless/ath/ath9k/init.c 	if (sc->driver_data & ATH9K_PCI_BT_ANT_DIV) {
sc                506 drivers/net/wireless/ath/ath9k/init.c 	if (sc->driver_data & ATH9K_PCI_D3_L1_WAR) {
sc                518 drivers/net/wireless/ath/ath9k/init.c 	if (sc->driver_data & ATH9K_PCI_NO_PLL_PWRSAVE) {
sc                523 drivers/net/wireless/ath/ath9k/init.c 	if (sc->driver_data & ATH9K_PCI_LED_ACT_HI)
sc                538 drivers/net/wireless/ath/ath9k/init.c static int ath9k_eeprom_request(struct ath_softc *sc, const char *name)
sc                541 drivers/net/wireless/ath/ath9k/init.c 	struct ath_hw *ah = sc->sc_ah;
sc                546 drivers/net/wireless/ath/ath9k/init.c 	ec.ah = sc->sc_ah;
sc                548 drivers/net/wireless/ath/ath9k/init.c 	err = request_firmware_nowait(THIS_MODULE, 1, name, sc->dev, GFP_KERNEL,
sc                567 drivers/net/wireless/ath/ath9k/init.c static void ath9k_eeprom_release(struct ath_softc *sc)
sc                569 drivers/net/wireless/ath/ath9k/init.c 	release_firmware(sc->sc_ah->eeprom_blob);
sc                572 drivers/net/wireless/ath/ath9k/init.c static int ath9k_init_platform(struct ath_softc *sc)
sc                574 drivers/net/wireless/ath/ath9k/init.c 	struct ath9k_platform_data *pdata = sc->dev->platform_data;
sc                575 drivers/net/wireless/ath/ath9k/init.c 	struct ath_hw *ah = sc->sc_ah;
sc                598 drivers/net/wireless/ath/ath9k/init.c 		ret = ath9k_eeprom_request(sc, pdata->eeprom_name);
sc                615 drivers/net/wireless/ath/ath9k/init.c static int ath9k_of_init(struct ath_softc *sc)
sc                617 drivers/net/wireless/ath/ath9k/init.c 	struct device_node *np = sc->dev->of_node;
sc                618 drivers/net/wireless/ath/ath9k/init.c 	struct ath_hw *ah = sc->sc_ah;
sc                636 drivers/net/wireless/ath/ath9k/init.c 		ret = ath9k_eeprom_request(sc, eeprom_name);
sc                651 drivers/net/wireless/ath/ath9k/init.c static int ath9k_init_softc(u16 devid, struct ath_softc *sc,
sc                660 drivers/net/wireless/ath/ath9k/init.c 	ah = devm_kzalloc(sc->dev, sizeof(struct ath_hw), GFP_KERNEL);
sc                664 drivers/net/wireless/ath/ath9k/init.c 	ah->dev = sc->dev;
sc                665 drivers/net/wireless/ath/ath9k/init.c 	ah->hw = sc->hw;
sc                680 drivers/net/wireless/ath/ath9k/init.c 	sc->sc_ah = ah;
sc                681 drivers/net/wireless/ath/ath9k/init.c 	sc->dfs_detector = dfs_pattern_detector_init(common, NL80211_DFS_UNSET);
sc                682 drivers/net/wireless/ath/ath9k/init.c 	sc->tx99_power = MAX_RATE_POWER + 1;
sc                683 drivers/net/wireless/ath/ath9k/init.c 	init_waitqueue_head(&sc->tx_wait);
sc                684 drivers/net/wireless/ath/ath9k/init.c 	sc->cur_chan = &sc->chanctx[0];
sc                686 drivers/net/wireless/ath/ath9k/init.c 		sc->cur_chan->hw_queue_base = 0;
sc                692 drivers/net/wireless/ath/ath9k/init.c 	common->hw = sc->hw;
sc                693 drivers/net/wireless/ath/ath9k/init.c 	common->priv = sc;
sc                701 drivers/net/wireless/ath/ath9k/init.c 	ath9k_init_pcoem_platform(sc);
sc                703 drivers/net/wireless/ath/ath9k/init.c 	ret = ath9k_init_platform(sc);
sc                707 drivers/net/wireless/ath/ath9k/init.c 	ret = ath9k_of_init(sc);
sc                726 drivers/net/wireless/ath/ath9k/init.c 	spin_lock_init(&sc->intr_lock);
sc                727 drivers/net/wireless/ath/ath9k/init.c 	spin_lock_init(&sc->sc_serial_rw);
sc                728 drivers/net/wireless/ath/ath9k/init.c 	spin_lock_init(&sc->sc_pm_lock);
sc                729 drivers/net/wireless/ath/ath9k/init.c 	spin_lock_init(&sc->chan_lock);
sc                730 drivers/net/wireless/ath/ath9k/init.c 	mutex_init(&sc->mutex);
sc                731 drivers/net/wireless/ath/ath9k/init.c 	tasklet_init(&sc->intr_tq, ath9k_tasklet, (unsigned long)sc);
sc                732 drivers/net/wireless/ath/ath9k/init.c 	tasklet_init(&sc->bcon_tasklet, ath9k_beacon_tasklet,
sc                733 drivers/net/wireless/ath/ath9k/init.c 		     (unsigned long)sc);
sc                735 drivers/net/wireless/ath/ath9k/init.c 	timer_setup(&sc->sleep_timer, ath_ps_full_sleep, 0);
sc                736 drivers/net/wireless/ath/ath9k/init.c 	INIT_WORK(&sc->hw_reset_work, ath_reset_work);
sc                737 drivers/net/wireless/ath/ath9k/init.c 	INIT_WORK(&sc->paprd_work, ath_paprd_calibrate);
sc                738 drivers/net/wireless/ath/ath9k/init.c 	INIT_DELAYED_WORK(&sc->hw_pll_work, ath_hw_pll_work);
sc                739 drivers/net/wireless/ath/ath9k/init.c 	INIT_DELAYED_WORK(&sc->hw_check_work, ath_hw_check_work);
sc                741 drivers/net/wireless/ath/ath9k/init.c 	ath9k_init_channel_context(sc);
sc                755 drivers/net/wireless/ath/ath9k/init.c 	ret = ath9k_init_queues(sc);
sc                759 drivers/net/wireless/ath/ath9k/init.c 	ret =  ath9k_init_btcoex(sc);
sc                767 drivers/net/wireless/ath/ath9k/init.c 	ret = ath9k_init_p2p(sc);
sc                771 drivers/net/wireless/ath/ath9k/init.c 	ath9k_cmn_init_crypto(sc->sc_ah);
sc                772 drivers/net/wireless/ath/ath9k/init.c 	ath9k_init_misc(sc);
sc                773 drivers/net/wireless/ath/ath9k/init.c 	ath_chanctx_init(sc);
sc                774 drivers/net/wireless/ath/ath9k/init.c 	ath9k_offchannel_init(sc);
sc                783 drivers/net/wireless/ath/ath9k/init.c 		if (ATH_TXQ_SETUP(sc, i))
sc                784 drivers/net/wireless/ath/ath9k/init.c 			ath_tx_cleanupq(sc, &sc->tx.txq[i]);
sc                788 drivers/net/wireless/ath/ath9k/init.c 	ath9k_eeprom_release(sc);
sc                789 drivers/net/wireless/ath/ath9k/init.c 	dev_kfree_skb_any(sc->tx99_skb);
sc                793 drivers/net/wireless/ath/ath9k/init.c static void ath9k_init_band_txpower(struct ath_softc *sc, int band)
sc                797 drivers/net/wireless/ath/ath9k/init.c 	struct ath_hw *ah = sc->sc_ah;
sc                807 drivers/net/wireless/ath/ath9k/init.c 		ath9k_cmn_get_channel(sc->hw, ah, &chandef);
sc                812 drivers/net/wireless/ath/ath9k/init.c static void ath9k_init_txpower_limits(struct ath_softc *sc)
sc                814 drivers/net/wireless/ath/ath9k/init.c 	struct ath_hw *ah = sc->sc_ah;
sc                818 drivers/net/wireless/ath/ath9k/init.c 		ath9k_init_band_txpower(sc, NL80211_BAND_2GHZ);
sc                820 drivers/net/wireless/ath/ath9k/init.c 		ath9k_init_band_txpower(sc, NL80211_BAND_5GHZ);
sc                890 drivers/net/wireless/ath/ath9k/init.c static void ath9k_set_mcc_capab(struct ath_softc *sc, struct ieee80211_hw *hw)
sc                892 drivers/net/wireless/ath/ath9k/init.c 	struct ath_hw *ah = sc->sc_ah;
sc                915 drivers/net/wireless/ath/ath9k/init.c static void ath9k_set_hw_capab(struct ath_softc *sc, struct ieee80211_hw *hw)
sc                917 drivers/net/wireless/ath/ath9k/init.c 	struct ath_hw *ah = sc->sc_ah;
sc                934 drivers/net/wireless/ath/ath9k/init.c 	if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_HT) {
sc                942 drivers/net/wireless/ath/ath9k/init.c 	if (AR_SREV_9160_10_OR_LATER(sc->sc_ah) || ath9k_modparam_nohwcrypt)
sc                995 drivers/net/wireless/ath/ath9k/init.c 	sc->ant_rx = hw->wiphy->available_antennas_rx;
sc                996 drivers/net/wireless/ath/ath9k/init.c 	sc->ant_tx = hw->wiphy->available_antennas_tx;
sc                998 drivers/net/wireless/ath/ath9k/init.c 	if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_2GHZ)
sc               1001 drivers/net/wireless/ath/ath9k/init.c 	if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_5GHZ)
sc               1006 drivers/net/wireless/ath/ath9k/init.c 	ath9k_set_mcc_capab(sc, hw);
sc               1017 drivers/net/wireless/ath/ath9k/init.c int ath9k_init_device(u16 devid, struct ath_softc *sc,
sc               1020 drivers/net/wireless/ath/ath9k/init.c 	struct ieee80211_hw *hw = sc->hw;
sc               1027 drivers/net/wireless/ath/ath9k/init.c 	error = ath9k_init_softc(devid, sc, bus_ops);
sc               1031 drivers/net/wireless/ath/ath9k/init.c 	ah = sc->sc_ah;
sc               1033 drivers/net/wireless/ath/ath9k/init.c 	ath9k_set_hw_capab(sc, hw);
sc               1036 drivers/net/wireless/ath/ath9k/init.c 	error = ath_regd_init(&common->regulatory, sc->hw->wiphy,
sc               1044 drivers/net/wireless/ath/ath9k/init.c 	error = ath_tx_init(sc, ATH_TXBUF);
sc               1049 drivers/net/wireless/ath/ath9k/init.c 	error = ath_rx_init(sc, ATH_RXBUF);
sc               1053 drivers/net/wireless/ath/ath9k/init.c 	ath9k_init_txpower_limits(sc);
sc               1057 drivers/net/wireless/ath/ath9k/init.c 	sc->led_cdev.default_trigger = ieee80211_create_tpt_led_trigger(sc->hw,
sc               1080 drivers/net/wireless/ath/ath9k/init.c 	ath_init_leds(sc);
sc               1081 drivers/net/wireless/ath/ath9k/init.c 	ath_start_rfkill_poll(sc);
sc               1086 drivers/net/wireless/ath/ath9k/init.c 	ath9k_deinit_debug(sc);
sc               1090 drivers/net/wireless/ath/ath9k/init.c 	ath_rx_cleanup(sc);
sc               1092 drivers/net/wireless/ath/ath9k/init.c 	ath9k_deinit_softc(sc);
sc               1100 drivers/net/wireless/ath/ath9k/init.c static void ath9k_deinit_softc(struct ath_softc *sc)
sc               1104 drivers/net/wireless/ath/ath9k/init.c 	ath9k_deinit_p2p(sc);
sc               1105 drivers/net/wireless/ath/ath9k/init.c 	ath9k_deinit_btcoex(sc);
sc               1108 drivers/net/wireless/ath/ath9k/init.c 		if (ATH_TXQ_SETUP(sc, i))
sc               1109 drivers/net/wireless/ath/ath9k/init.c 			ath_tx_cleanupq(sc, &sc->tx.txq[i]);
sc               1111 drivers/net/wireless/ath/ath9k/init.c 	del_timer_sync(&sc->sleep_timer);
sc               1112 drivers/net/wireless/ath/ath9k/init.c 	ath9k_hw_deinit(sc->sc_ah);
sc               1113 drivers/net/wireless/ath/ath9k/init.c 	if (sc->dfs_detector != NULL)
sc               1114 drivers/net/wireless/ath/ath9k/init.c 		sc->dfs_detector->exit(sc->dfs_detector);
sc               1116 drivers/net/wireless/ath/ath9k/init.c 	ath9k_eeprom_release(sc);
sc               1119 drivers/net/wireless/ath/ath9k/init.c void ath9k_deinit_device(struct ath_softc *sc)
sc               1121 drivers/net/wireless/ath/ath9k/init.c 	struct ieee80211_hw *hw = sc->hw;
sc               1123 drivers/net/wireless/ath/ath9k/init.c 	ath9k_ps_wakeup(sc);
sc               1125 drivers/net/wireless/ath/ath9k/init.c 	wiphy_rfkill_stop_polling(sc->hw->wiphy);
sc               1126 drivers/net/wireless/ath/ath9k/init.c 	ath_deinit_leds(sc);
sc               1128 drivers/net/wireless/ath/ath9k/init.c 	ath9k_ps_restore(sc);
sc               1130 drivers/net/wireless/ath/ath9k/init.c 	ath9k_deinit_debug(sc);
sc               1133 drivers/net/wireless/ath/ath9k/init.c 	ath_rx_cleanup(sc);
sc               1134 drivers/net/wireless/ath/ath9k/init.c 	ath9k_deinit_softc(sc);
sc                 23 drivers/net/wireless/ath/ath9k/link.c static bool ath_tx_complete_check(struct ath_softc *sc)
sc                 28 drivers/net/wireless/ath/ath9k/link.c 	if (sc->tx99_state)
sc                 32 drivers/net/wireless/ath/ath9k/link.c 		txq = sc->tx.txq_map[i];
sc                 34 drivers/net/wireless/ath/ath9k/link.c 		ath_txq_lock(sc, txq);
sc                 37 drivers/net/wireless/ath/ath9k/link.c 				ath_txq_unlock(sc, txq);
sc                 43 drivers/net/wireless/ath/ath9k/link.c 		ath_txq_unlock(sc, txq);
sc                 49 drivers/net/wireless/ath/ath9k/link.c 	ath_dbg(ath9k_hw_common(sc->sc_ah), RESET,
sc                 51 drivers/net/wireless/ath/ath9k/link.c 	ath9k_queue_reset(sc, RESET_TYPE_TX_HANG);
sc                 58 drivers/net/wireless/ath/ath9k/link.c 	struct ath_softc *sc = container_of(work, struct ath_softc,
sc                 61 drivers/net/wireless/ath/ath9k/link.c 	if (!ath_hw_check(sc) ||
sc                 62 drivers/net/wireless/ath/ath9k/link.c 	    !ath_tx_complete_check(sc))
sc                 65 drivers/net/wireless/ath/ath9k/link.c 	ieee80211_queue_delayed_work(sc->hw, &sc->hw_check_work,
sc                 72 drivers/net/wireless/ath/ath9k/link.c bool ath_hw_check(struct ath_softc *sc)
sc                 74 drivers/net/wireless/ath/ath9k/link.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                 78 drivers/net/wireless/ath/ath9k/link.c 	ath9k_ps_wakeup(sc);
sc                 80 drivers/net/wireless/ath/ath9k/link.c 	is_alive = ath9k_hw_check_alive(sc->sc_ah);
sc                 86 drivers/net/wireless/ath/ath9k/link.c 		ath9k_queue_reset(sc, type);
sc                 89 drivers/net/wireless/ath/ath9k/link.c 	ath9k_ps_restore(sc);
sc                 97 drivers/net/wireless/ath/ath9k/link.c static bool ath_hw_pll_rx_hang_check(struct ath_softc *sc, u32 pll_sqsum)
sc                100 drivers/net/wireless/ath/ath9k/link.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                106 drivers/net/wireless/ath/ath9k/link.c 			ath9k_queue_reset(sc, RESET_TYPE_PLL_HANG);
sc                120 drivers/net/wireless/ath/ath9k/link.c 	struct ath_softc *sc = container_of(work, struct ath_softc,
sc                122 drivers/net/wireless/ath/ath9k/link.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                132 drivers/net/wireless/ath/ath9k/link.c 	if (sc->tx99_state)
sc                135 drivers/net/wireless/ath/ath9k/link.c 	ath9k_ps_wakeup(sc);
sc                136 drivers/net/wireless/ath/ath9k/link.c 	pll_sqsum = ar9003_get_pll_sqsum_dvc(sc->sc_ah);
sc                137 drivers/net/wireless/ath/ath9k/link.c 	ath9k_ps_restore(sc);
sc                138 drivers/net/wireless/ath/ath9k/link.c 	if (ath_hw_pll_rx_hang_check(sc, pll_sqsum))
sc                141 drivers/net/wireless/ath/ath9k/link.c 	ieee80211_queue_delayed_work(sc->hw, &sc->hw_pll_work,
sc                148 drivers/net/wireless/ath/ath9k/link.c static void ath_paprd_activate(struct ath_softc *sc)
sc                150 drivers/net/wireless/ath/ath9k/link.c 	struct ath_hw *ah = sc->sc_ah;
sc                172 drivers/net/wireless/ath/ath9k/link.c static bool ath_paprd_send_frame(struct ath_softc *sc, struct sk_buff *skb, int chain)
sc                174 drivers/net/wireless/ath/ath9k/link.c 	struct ieee80211_hw *hw = sc->hw;
sc                176 drivers/net/wireless/ath/ath9k/link.c 	struct ath_hw *ah = sc->sc_ah;
sc                182 drivers/net/wireless/ath/ath9k/link.c 	txctl.txq = sc->tx.txq_map[IEEE80211_AC_BE];
sc                185 drivers/net/wireless/ath/ath9k/link.c 	tx_info->band = sc->cur_chandef.chan->band;
sc                192 drivers/net/wireless/ath/ath9k/link.c 	init_completion(&sc->paprd_complete);
sc                201 drivers/net/wireless/ath/ath9k/link.c 	time_left = wait_for_completion_timeout(&sc->paprd_complete,
sc                214 drivers/net/wireless/ath/ath9k/link.c 	struct ath_softc *sc = container_of(work, struct ath_softc, paprd_work);
sc                215 drivers/net/wireless/ath/ath9k/link.c 	struct ieee80211_hw *hw = sc->hw;
sc                216 drivers/net/wireless/ath/ath9k/link.c 	struct ath_hw *ah = sc->sc_ah;
sc                234 drivers/net/wireless/ath/ath9k/link.c 	ath9k_ps_wakeup(sc);
sc                262 drivers/net/wireless/ath/ath9k/link.c 		if (!ath_paprd_send_frame(sc, skb, chain))
sc                290 drivers/net/wireless/ath/ath9k/link.c 		ath_paprd_activate(sc);
sc                294 drivers/net/wireless/ath/ath9k/link.c 	ath9k_ps_restore(sc);
sc                307 drivers/net/wireless/ath/ath9k/link.c 	struct ath_softc *sc = (struct ath_softc *)common->priv;
sc                308 drivers/net/wireless/ath/ath9k/link.c 	struct ath_hw *ah = sc->sc_ah;
sc                325 drivers/net/wireless/ath/ath9k/link.c 	if (sc->sc_ah->power_mode != ATH9K_PM_AWAKE) {
sc                327 drivers/net/wireless/ath/ath9k/link.c 			spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc                328 drivers/net/wireless/ath/ath9k/link.c 			sc->ps_flags |= PS_WAIT_FOR_ANI;
sc                329 drivers/net/wireless/ath/ath9k/link.c 			spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc                334 drivers/net/wireless/ath/ath9k/link.c 	spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc                335 drivers/net/wireless/ath/ath9k/link.c 	sc->ps_flags &= ~PS_WAIT_FOR_ANI;
sc                336 drivers/net/wireless/ath/ath9k/link.c 	spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc                338 drivers/net/wireless/ath/ath9k/link.c 	ath9k_ps_wakeup(sc);
sc                372 drivers/net/wireless/ath/ath9k/link.c 		ath_update_survey_stats(sc);
sc                382 drivers/net/wireless/ath/ath9k/link.c 			ath9k_queue_reset(sc, RESET_TYPE_CALIBRATION);
sc                395 drivers/net/wireless/ath/ath9k/link.c 	ath9k_ps_restore(sc);
sc                412 drivers/net/wireless/ath/ath9k/link.c 			ieee80211_queue_work(sc->hw, &sc->paprd_work);
sc                414 drivers/net/wireless/ath/ath9k/link.c 			ath9k_ps_wakeup(sc);
sc                415 drivers/net/wireless/ath/ath9k/link.c 			ath_paprd_activate(sc);
sc                416 drivers/net/wireless/ath/ath9k/link.c 			ath9k_ps_restore(sc);
sc                421 drivers/net/wireless/ath/ath9k/link.c void ath_start_ani(struct ath_softc *sc)
sc                423 drivers/net/wireless/ath/ath9k/link.c 	struct ath_hw *ah = sc->sc_ah;
sc                429 drivers/net/wireless/ath/ath9k/link.c 	    sc->cur_chan->offchannel)
sc                441 drivers/net/wireless/ath/ath9k/link.c void ath_stop_ani(struct ath_softc *sc)
sc                443 drivers/net/wireless/ath/ath9k/link.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                449 drivers/net/wireless/ath/ath9k/link.c void ath_check_ani(struct ath_softc *sc)
sc                451 drivers/net/wireless/ath/ath9k/link.c 	struct ath_hw *ah = sc->sc_ah;
sc                452 drivers/net/wireless/ath/ath9k/link.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                453 drivers/net/wireless/ath/ath9k/link.c 	struct ath_beacon_config *cur_conf = &sc->cur_chan->beacon;
sc                478 drivers/net/wireless/ath/ath9k/link.c 		ath_start_ani(sc);
sc                485 drivers/net/wireless/ath/ath9k/link.c 	ath_stop_ani(sc);
sc                488 drivers/net/wireless/ath/ath9k/link.c void ath_update_survey_nf(struct ath_softc *sc, int channel)
sc                490 drivers/net/wireless/ath/ath9k/link.c 	struct ath_hw *ah = sc->sc_ah;
sc                492 drivers/net/wireless/ath/ath9k/link.c 	struct survey_info *survey = &sc->survey[channel];
sc                506 drivers/net/wireless/ath/ath9k/link.c int ath_update_survey_stats(struct ath_softc *sc)
sc                508 drivers/net/wireless/ath/ath9k/link.c 	struct ath_hw *ah = sc->sc_ah;
sc                511 drivers/net/wireless/ath/ath9k/link.c 	struct survey_info *survey = &sc->survey[pos];
sc                541 drivers/net/wireless/ath/ath9k/link.c 	ath_update_survey_nf(sc, pos);
sc                 57 drivers/net/wireless/ath/ath9k/main.c static bool ath9k_has_pending_frames(struct ath_softc *sc, struct ath_txq *txq,
sc                 75 drivers/net/wireless/ath/ath9k/main.c 		acq = &sc->cur_chan->acq[txq->mac80211_qnum];
sc                 84 drivers/net/wireless/ath/ath9k/main.c static bool ath9k_setpower(struct ath_softc *sc, enum ath9k_power_mode mode)
sc                 89 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc                 90 drivers/net/wireless/ath/ath9k/main.c 	ret = ath9k_hw_setpower(sc->sc_ah, mode);
sc                 91 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc                 98 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = from_timer(sc, t, sleep_timer);
sc                 99 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                107 drivers/net/wireless/ath/ath9k/main.c 	ath9k_hw_setrxabort(sc->sc_ah, 1);
sc                108 drivers/net/wireless/ath/ath9k/main.c 	ath9k_hw_stopdmarecv(sc->sc_ah, &reset);
sc                110 drivers/net/wireless/ath/ath9k/main.c 	ath9k_hw_setpower(sc->sc_ah, ATH9K_PM_FULL_SLEEP);
sc                113 drivers/net/wireless/ath/ath9k/main.c void ath9k_ps_wakeup(struct ath_softc *sc)
sc                115 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                119 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc                120 drivers/net/wireless/ath/ath9k/main.c 	if (++sc->ps_usecount != 1)
sc                123 drivers/net/wireless/ath/ath9k/main.c 	del_timer_sync(&sc->sleep_timer);
sc                124 drivers/net/wireless/ath/ath9k/main.c 	power_mode = sc->sc_ah->power_mode;
sc                125 drivers/net/wireless/ath/ath9k/main.c 	ath9k_hw_setpower(sc->sc_ah, ATH9K_PM_AWAKE);
sc                141 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc                144 drivers/net/wireless/ath/ath9k/main.c void ath9k_ps_restore(struct ath_softc *sc)
sc                146 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                150 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc                151 drivers/net/wireless/ath/ath9k/main.c 	if (--sc->ps_usecount != 0)
sc                154 drivers/net/wireless/ath/ath9k/main.c 	if (sc->ps_idle) {
sc                155 drivers/net/wireless/ath/ath9k/main.c 		mod_timer(&sc->sleep_timer, jiffies + HZ / 10);
sc                159 drivers/net/wireless/ath/ath9k/main.c 	if (sc->ps_enabled &&
sc                160 drivers/net/wireless/ath/ath9k/main.c 		   !(sc->ps_flags & (PS_WAIT_FOR_BEACON |
sc                166 drivers/net/wireless/ath/ath9k/main.c 		if (ath9k_hw_btcoex_is_enabled(sc->sc_ah))
sc                167 drivers/net/wireless/ath/ath9k/main.c 			ath9k_btcoex_stop_gen_timer(sc);
sc                176 drivers/net/wireless/ath/ath9k/main.c 	ath9k_hw_setpower(sc->sc_ah, mode);
sc                179 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc                182 drivers/net/wireless/ath/ath9k/main.c static void __ath_cancel_work(struct ath_softc *sc)
sc                184 drivers/net/wireless/ath/ath9k/main.c 	cancel_work_sync(&sc->paprd_work);
sc                185 drivers/net/wireless/ath/ath9k/main.c 	cancel_delayed_work_sync(&sc->hw_check_work);
sc                186 drivers/net/wireless/ath/ath9k/main.c 	cancel_delayed_work_sync(&sc->hw_pll_work);
sc                189 drivers/net/wireless/ath/ath9k/main.c 	if (ath9k_hw_mci_is_enabled(sc->sc_ah))
sc                190 drivers/net/wireless/ath/ath9k/main.c 		cancel_work_sync(&sc->mci_work);
sc                194 drivers/net/wireless/ath/ath9k/main.c void ath_cancel_work(struct ath_softc *sc)
sc                196 drivers/net/wireless/ath/ath9k/main.c 	__ath_cancel_work(sc);
sc                197 drivers/net/wireless/ath/ath9k/main.c 	cancel_work_sync(&sc->hw_reset_work);
sc                200 drivers/net/wireless/ath/ath9k/main.c void ath_restart_work(struct ath_softc *sc)
sc                202 drivers/net/wireless/ath/ath9k/main.c 	ieee80211_queue_delayed_work(sc->hw, &sc->hw_check_work,
sc                205 drivers/net/wireless/ath/ath9k/main.c 	if (AR_SREV_9340(sc->sc_ah) || AR_SREV_9330(sc->sc_ah))
sc                206 drivers/net/wireless/ath/ath9k/main.c 		ieee80211_queue_delayed_work(sc->hw, &sc->hw_pll_work,
sc                209 drivers/net/wireless/ath/ath9k/main.c 	ath_start_ani(sc);
sc                212 drivers/net/wireless/ath/ath9k/main.c static bool ath_prepare_reset(struct ath_softc *sc)
sc                214 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc                217 drivers/net/wireless/ath/ath9k/main.c 	ieee80211_stop_queues(sc->hw);
sc                218 drivers/net/wireless/ath/ath9k/main.c 	ath_stop_ani(sc);
sc                222 drivers/net/wireless/ath/ath9k/main.c 		ret &= ath_stoprecv(sc);
sc                223 drivers/net/wireless/ath/ath9k/main.c 		ret &= ath_drain_all_txq(sc);
sc                225 drivers/net/wireless/ath/ath9k/main.c 		ret &= ath_drain_all_txq(sc);
sc                226 drivers/net/wireless/ath/ath9k/main.c 		ret &= ath_stoprecv(sc);
sc                232 drivers/net/wireless/ath/ath9k/main.c static bool ath_complete_reset(struct ath_softc *sc, bool start)
sc                234 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc                238 drivers/net/wireless/ath/ath9k/main.c 	ath9k_calculate_summary_state(sc, sc->cur_chan);
sc                239 drivers/net/wireless/ath/ath9k/main.c 	ath_startrecv(sc);
sc                240 drivers/net/wireless/ath/ath9k/main.c 	ath9k_cmn_update_txpow(ah, sc->cur_chan->cur_txpower,
sc                241 drivers/net/wireless/ath/ath9k/main.c 			       sc->cur_chan->txpower,
sc                242 drivers/net/wireless/ath/ath9k/main.c 			       &sc->cur_chan->cur_txpower);
sc                245 drivers/net/wireless/ath/ath9k/main.c 	if (!sc->cur_chan->offchannel && start) {
sc                247 drivers/net/wireless/ath/ath9k/main.c 		if (sc->cur_chan->tsf_val) {
sc                250 drivers/net/wireless/ath/ath9k/main.c 			offset = ath9k_hw_get_tsf_offset(&sc->cur_chan->tsf_ts,
sc                252 drivers/net/wireless/ath/ath9k/main.c 			ath9k_hw_settsf64(ah, sc->cur_chan->tsf_val + offset);
sc                261 drivers/net/wireless/ath/ath9k/main.c 			spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc                262 drivers/net/wireless/ath/ath9k/main.c 			sc->ps_flags |= PS_BEACON_SYNC | PS_WAIT_FOR_BEACON;
sc                263 drivers/net/wireless/ath/ath9k/main.c 			spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc                265 drivers/net/wireless/ath/ath9k/main.c 			ath9k_set_beacon(sc);
sc                268 drivers/net/wireless/ath/ath9k/main.c 		ath_restart_work(sc);
sc                269 drivers/net/wireless/ath/ath9k/main.c 		ath_txq_schedule_all(sc);
sc                272 drivers/net/wireless/ath/ath9k/main.c 	sc->gtt_cnt = 0;
sc                276 drivers/net/wireless/ath/ath9k/main.c 	ieee80211_wake_queues(sc->hw);
sc                277 drivers/net/wireless/ath/ath9k/main.c 	ath9k_p2p_ps_timer(sc);
sc                282 drivers/net/wireless/ath/ath9k/main.c static int ath_reset_internal(struct ath_softc *sc, struct ath9k_channel *hchan)
sc                284 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc                290 drivers/net/wireless/ath/ath9k/main.c 	__ath_cancel_work(sc);
sc                292 drivers/net/wireless/ath/ath9k/main.c 	disable_irq(sc->irq);
sc                293 drivers/net/wireless/ath/ath9k/main.c 	tasklet_disable(&sc->intr_tq);
sc                294 drivers/net/wireless/ath/ath9k/main.c 	tasklet_disable(&sc->bcon_tasklet);
sc                295 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_bh(&sc->sc_pcu_lock);
sc                297 drivers/net/wireless/ath/ath9k/main.c 	if (!sc->cur_chan->offchannel) {
sc                299 drivers/net/wireless/ath/ath9k/main.c 		caldata = &sc->cur_chan->caldata;
sc                307 drivers/net/wireless/ath/ath9k/main.c 	if (!ath_prepare_reset(sc))
sc                313 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_bh(&sc->chan_lock);
sc                314 drivers/net/wireless/ath/ath9k/main.c 	sc->cur_chandef = sc->cur_chan->chandef;
sc                315 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_bh(&sc->chan_lock);
sc                326 drivers/net/wireless/ath/ath9k/main.c 		ath9k_queue_reset(sc, RESET_TYPE_BB_HANG);
sc                331 drivers/net/wireless/ath/ath9k/main.c 	if (ath9k_hw_mci_is_enabled(sc->sc_ah) &&
sc                332 drivers/net/wireless/ath/ath9k/main.c 	    sc->cur_chan->offchannel)
sc                333 drivers/net/wireless/ath/ath9k/main.c 		ath9k_mci_set_txpower(sc, true, false);
sc                335 drivers/net/wireless/ath/ath9k/main.c 	if (!ath_complete_reset(sc, true))
sc                339 drivers/net/wireless/ath/ath9k/main.c 	enable_irq(sc->irq);
sc                340 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_bh(&sc->sc_pcu_lock);
sc                341 drivers/net/wireless/ath/ath9k/main.c 	tasklet_enable(&sc->bcon_tasklet);
sc                342 drivers/net/wireless/ath/ath9k/main.c 	tasklet_enable(&sc->intr_tq);
sc                347 drivers/net/wireless/ath/ath9k/main.c static void ath_node_attach(struct ath_softc *sc, struct ieee80211_sta *sta,
sc                353 drivers/net/wireless/ath/ath9k/main.c 	an->sc = sc;
sc                358 drivers/net/wireless/ath/ath9k/main.c 	ath_tx_node_init(sc, an);
sc                360 drivers/net/wireless/ath/ath9k/main.c 	ath_dynack_node_init(sc->sc_ah, an);
sc                363 drivers/net/wireless/ath/ath9k/main.c static void ath_node_detach(struct ath_softc *sc, struct ieee80211_sta *sta)
sc                366 drivers/net/wireless/ath/ath9k/main.c 	ath_tx_node_cleanup(sc, an);
sc                368 drivers/net/wireless/ath/ath9k/main.c 	ath_dynack_node_deinit(sc->sc_ah, an);
sc                373 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = (struct ath_softc *)data;
sc                374 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc                381 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_irqsave(&sc->intr_lock, flags);
sc                382 drivers/net/wireless/ath/ath9k/main.c 	status = sc->intrstatus;
sc                383 drivers/net/wireless/ath/ath9k/main.c 	sc->intrstatus = 0;
sc                384 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_irqrestore(&sc->intr_lock, flags);
sc                386 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc                387 drivers/net/wireless/ath/ath9k/main.c 	spin_lock(&sc->sc_pcu_lock);
sc                391 drivers/net/wireless/ath/ath9k/main.c 		ath9k_queue_reset(sc, type);
sc                405 drivers/net/wireless/ath/ath9k/main.c 			ath9k_queue_reset(sc, type);
sc                414 drivers/net/wireless/ath/ath9k/main.c 		sc->gtt_cnt++;
sc                416 drivers/net/wireless/ath/ath9k/main.c 		if ((sc->gtt_cnt >= MAX_GTT_CNT) && !ath9k_hw_check_alive(ah)) {
sc                418 drivers/net/wireless/ath/ath9k/main.c 			ath9k_queue_reset(sc, type);
sc                425 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc                426 drivers/net/wireless/ath/ath9k/main.c 	if ((status & ATH9K_INT_TSFOOR) && sc->ps_enabled) {
sc                432 drivers/net/wireless/ath/ath9k/main.c 		sc->ps_flags |= PS_WAIT_FOR_BEACON | PS_BEACON_SYNC;
sc                434 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc                446 drivers/net/wireless/ath/ath9k/main.c 			ath_rx_tasklet(sc, 0, true);
sc                448 drivers/net/wireless/ath/ath9k/main.c 		ath_rx_tasklet(sc, 0, false);
sc                459 drivers/net/wireless/ath/ath9k/main.c 			sc->gtt_cnt = 0;
sc                461 drivers/net/wireless/ath/ath9k/main.c 			ath_tx_edma_tasklet(sc);
sc                463 drivers/net/wireless/ath/ath9k/main.c 			ath_tx_tasklet(sc);
sc                466 drivers/net/wireless/ath/ath9k/main.c 		wake_up(&sc->tx_wait);
sc                470 drivers/net/wireless/ath/ath9k/main.c 		ath_gen_timer_isr(sc->sc_ah);
sc                472 drivers/net/wireless/ath/ath9k/main.c 	ath9k_btcoex_handle_interrupt(sc, status);
sc                477 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock(&sc->sc_pcu_lock);
sc                478 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc                499 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = dev;
sc                500 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc                525 drivers/net/wireless/ath/ath9k/main.c 	ath9k_debug_sync_cause(sc, sync_cause);
sc                539 drivers/net/wireless/ath/ath9k/main.c 	spin_lock(&sc->intr_lock);
sc                540 drivers/net/wireless/ath/ath9k/main.c 	sc->intrstatus |= status;
sc                541 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock(&sc->intr_lock);
sc                558 drivers/net/wireless/ath/ath9k/main.c 		tasklet_schedule(&sc->bcon_tasklet);
sc                570 drivers/net/wireless/ath/ath9k/main.c 			if (ATH_DBG_WARN_ON_ONCE(sc->ps_idle))
sc                574 drivers/net/wireless/ath/ath9k/main.c 			ath9k_setpower(sc, ATH9K_PM_AWAKE);
sc                575 drivers/net/wireless/ath/ath9k/main.c 			spin_lock(&sc->sc_pm_lock);
sc                576 drivers/net/wireless/ath/ath9k/main.c 			ath9k_hw_setrxabort(sc->sc_ah, 0);
sc                577 drivers/net/wireless/ath/ath9k/main.c 			sc->ps_flags |= PS_WAIT_FOR_BEACON;
sc                578 drivers/net/wireless/ath/ath9k/main.c 			spin_unlock(&sc->sc_pm_lock);
sc                583 drivers/net/wireless/ath/ath9k/main.c 	ath_debug_stat_interrupt(sc, status);
sc                588 drivers/net/wireless/ath/ath9k/main.c 		tasklet_schedule(&sc->intr_tq);
sc                600 drivers/net/wireless/ath/ath9k/main.c int ath_reset(struct ath_softc *sc, struct ath9k_channel *hchan)
sc                602 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                605 drivers/net/wireless/ath/ath9k/main.c 	ath9k_hw_kill_interrupts(sc->sc_ah);
sc                608 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc                609 drivers/net/wireless/ath/ath9k/main.c 	r = ath_reset_internal(sc, hchan);
sc                610 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc                620 drivers/net/wireless/ath/ath9k/main.c void ath9k_queue_reset(struct ath_softc *sc, enum ath_reset_type type)
sc                622 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                624 drivers/net/wireless/ath/ath9k/main.c 	RESET_STAT_INC(sc, type);
sc                626 drivers/net/wireless/ath/ath9k/main.c 	ath9k_hw_kill_interrupts(sc->sc_ah);
sc                628 drivers/net/wireless/ath/ath9k/main.c 	ieee80211_queue_work(sc->hw, &sc->hw_reset_work);
sc                633 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = container_of(work, struct ath_softc, hw_reset_work);
sc                635 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc                636 drivers/net/wireless/ath/ath9k/main.c 	ath_reset_internal(sc, NULL);
sc                637 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc                646 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc                647 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc                649 drivers/net/wireless/ath/ath9k/main.c 	struct ieee80211_channel *curchan = sc->cur_chan->chandef.chan;
sc                650 drivers/net/wireless/ath/ath9k/main.c 	struct ath_chanctx *ctx = sc->cur_chan;
sc                658 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc                659 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc                662 drivers/net/wireless/ath/ath9k/main.c 	sc->cur_chandef = hw->conf.chandef;
sc                674 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_bh(&sc->sc_pcu_lock);
sc                710 drivers/net/wireless/ath/ath9k/main.c 	ath_mci_enable(sc);
sc                713 drivers/net/wireless/ath/ath9k/main.c 	sc->sc_ah->is_monitoring = false;
sc                715 drivers/net/wireless/ath/ath9k/main.c 	if (!ath_complete_reset(sc, false))
sc                729 drivers/net/wireless/ath/ath9k/main.c 	ath9k_cmn_init_crypto(sc->sc_ah);
sc                733 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_bh(&sc->sc_pcu_lock);
sc                735 drivers/net/wireless/ath/ath9k/main.c 	ath9k_rng_start(sc);
sc                737 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc                739 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc                748 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc                749 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                754 drivers/net/wireless/ath/ath9k/main.c 	if (sc->ps_enabled) {
sc                768 drivers/net/wireless/ath/ath9k/main.c 	if (unlikely(sc->sc_ah->power_mode == ATH9K_PM_NETWORK_SLEEP)) {
sc                774 drivers/net/wireless/ath/ath9k/main.c 		ath9k_ps_wakeup(sc);
sc                775 drivers/net/wireless/ath/ath9k/main.c 		spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc                776 drivers/net/wireless/ath/ath9k/main.c 		if (!(sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_AUTOSLEEP))
sc                777 drivers/net/wireless/ath/ath9k/main.c 			ath9k_hw_setrxabort(sc->sc_ah, 0);
sc                781 drivers/net/wireless/ath/ath9k/main.c 			sc->ps_flags |= PS_WAIT_FOR_PSPOLL_DATA;
sc                784 drivers/net/wireless/ath/ath9k/main.c 			sc->ps_flags |= PS_WAIT_FOR_TX_ACK;
sc                791 drivers/net/wireless/ath/ath9k/main.c 		spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc                792 drivers/net/wireless/ath/ath9k/main.c 		ath9k_ps_restore(sc);
sc                799 drivers/net/wireless/ath/ath9k/main.c 	if (unlikely(sc->sc_ah->power_mode == ATH9K_PM_FULL_SLEEP)) {
sc                805 drivers/net/wireless/ath/ath9k/main.c 	txctl.txq = sc->tx.txq_map[skb_get_queue_mapping(skb)];
sc                812 drivers/net/wireless/ath/ath9k/main.c 		TX_STAT_INC(sc, txctl.txq->axq_qnum, txfailed);
sc                823 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc                824 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc                828 drivers/net/wireless/ath/ath9k/main.c 	ath9k_deinit_channel_context(sc);
sc                830 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc                832 drivers/net/wireless/ath/ath9k/main.c 	ath9k_rng_stop(sc);
sc                834 drivers/net/wireless/ath/ath9k/main.c 	ath_cancel_work(sc);
sc                838 drivers/net/wireless/ath/ath9k/main.c 		mutex_unlock(&sc->mutex);
sc                843 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc                845 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_bh(&sc->sc_pcu_lock);
sc                854 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_bh(&sc->sc_pcu_lock);
sc                858 drivers/net/wireless/ath/ath9k/main.c 	synchronize_irq(sc->irq);
sc                859 drivers/net/wireless/ath/ath9k/main.c 	tasklet_kill(&sc->intr_tq);
sc                860 drivers/net/wireless/ath/ath9k/main.c 	tasklet_kill(&sc->bcon_tasklet);
sc                862 drivers/net/wireless/ath/ath9k/main.c 	prev_idle = sc->ps_idle;
sc                863 drivers/net/wireless/ath/ath9k/main.c 	sc->ps_idle = true;
sc                865 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_bh(&sc->sc_pcu_lock);
sc                873 drivers/net/wireless/ath/ath9k/main.c 	ath_prepare_reset(sc);
sc                875 drivers/net/wireless/ath/ath9k/main.c 	if (sc->rx.frag) {
sc                876 drivers/net/wireless/ath/ath9k/main.c 		dev_kfree_skb_any(sc->rx.frag);
sc                877 drivers/net/wireless/ath/ath9k/main.c 		sc->rx.frag = NULL;
sc                882 drivers/net/wireless/ath/ath9k/main.c 						    &sc->cur_chan->chandef);
sc                892 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_bh(&sc->sc_pcu_lock);
sc                894 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc                896 drivers/net/wireless/ath/ath9k/main.c 	sc->ps_idle = prev_idle;
sc                898 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc                981 drivers/net/wireless/ath/ath9k/main.c static void ath9k_update_bssid_mask(struct ath_softc *sc,
sc                985 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1005 drivers/net/wireless/ath/ath9k/main.c 				  sc->hw->wiphy->perm_addr[i]);
sc               1010 drivers/net/wireless/ath/ath9k/main.c void ath9k_calculate_iter_data(struct ath_softc *sc,
sc               1027 drivers/net/wireless/ath/ath9k/main.c 	ath9k_update_bssid_mask(sc, ctx, iter_data);
sc               1030 drivers/net/wireless/ath/ath9k/main.c static void ath9k_set_assoc_state(struct ath_softc *sc,
sc               1033 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1041 drivers/net/wireless/ath/ath9k/main.c 	ath9k_hw_write_associd(sc->sc_ah);
sc               1045 drivers/net/wireless/ath/ath9k/main.c 		sc->sc_ah->stats.avgbrssi = ATH_RSSI_DUMMY_MARKER;
sc               1047 drivers/net/wireless/ath/ath9k/main.c 		spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc               1048 drivers/net/wireless/ath/ath9k/main.c 		sc->ps_flags |= PS_BEACON_SYNC | PS_WAIT_FOR_BEACON;
sc               1049 drivers/net/wireless/ath/ath9k/main.c 		spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc               1052 drivers/net/wireless/ath/ath9k/main.c 	if (ath9k_hw_mci_is_enabled(sc->sc_ah))
sc               1053 drivers/net/wireless/ath/ath9k/main.c 		ath9k_mci_update_wlan_channels(sc, false);
sc               1061 drivers/net/wireless/ath/ath9k/main.c static void ath9k_set_offchannel_state(struct ath_softc *sc)
sc               1063 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               1067 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc               1069 drivers/net/wireless/ath/ath9k/main.c 	if (sc->offchannel.state < ATH_OFFCHANNEL_ROC_START)
sc               1070 drivers/net/wireless/ath/ath9k/main.c 		vif = sc->offchannel.scan_vif;
sc               1072 drivers/net/wireless/ath/ath9k/main.c 		vif = sc->offchannel.roc_vif;
sc               1088 drivers/net/wireless/ath/ath9k/main.c 	ath9k_hw_write_associd(sc->sc_ah);
sc               1093 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc               1098 drivers/net/wireless/ath/ath9k/main.c void ath9k_calculate_summary_state(struct ath_softc *sc,
sc               1101 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               1105 drivers/net/wireless/ath/ath9k/main.c 	ath_chanctx_check_active(sc, ctx);
sc               1107 drivers/net/wireless/ath/ath9k/main.c 	if (ctx != sc->cur_chan)
sc               1111 drivers/net/wireless/ath/ath9k/main.c 	if (ctx == &sc->offchannel.chan)
sc               1112 drivers/net/wireless/ath/ath9k/main.c 		return ath9k_set_offchannel_state(sc);
sc               1115 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc               1116 drivers/net/wireless/ath/ath9k/main.c 	ath9k_calculate_iter_data(sc, ctx, &iter_data);
sc               1130 drivers/net/wireless/ath/ath9k/main.c 			ath9k_beacon_ensure_primary_slot(sc);
sc               1161 drivers/net/wireless/ath/ath9k/main.c 			ath9k_set_assoc_state(sc, iter_data.primary_sta,
sc               1168 drivers/net/wireless/ath/ath9k/main.c 			ath9k_hw_write_associd(sc->sc_ah);
sc               1169 drivers/net/wireless/ath/ath9k/main.c 			if (ath9k_hw_mci_is_enabled(sc->sc_ah))
sc               1170 drivers/net/wireless/ath/ath9k/main.c 				ath9k_mci_update_wlan_channels(sc, true);
sc               1173 drivers/net/wireless/ath/ath9k/main.c 	sc->nbcnvifs = iter_data.nbcnvifs;
sc               1174 drivers/net/wireless/ath/ath9k/main.c 	ath9k_beacon_config(sc, iter_data.primary_beacon_vif,
sc               1192 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc               1204 drivers/net/wireless/ath/ath9k/main.c void ath9k_set_txpower(struct ath_softc *sc, struct ieee80211_vif *vif)
sc               1207 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               1210 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc               1214 drivers/net/wireless/ath/ath9k/main.c 				sc->hw, IEEE80211_IFACE_ITER_RESUME_ALL,
sc               1217 drivers/net/wireless/ath/ath9k/main.c 			power = sc->hw->conf.power_level;
sc               1219 drivers/net/wireless/ath/ath9k/main.c 		power = sc->hw->conf.power_level;
sc               1221 drivers/net/wireless/ath/ath9k/main.c 	sc->cur_chan->txpower = 2 * power;
sc               1222 drivers/net/wireless/ath/ath9k/main.c 	ath9k_hw_set_txpowerlimit(ah, sc->cur_chan->txpower, false);
sc               1223 drivers/net/wireless/ath/ath9k/main.c 	sc->cur_chan->cur_txpower = reg->max_power_level;
sc               1224 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc               1248 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1249 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               1254 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               1256 drivers/net/wireless/ath/ath9k/main.c 		if (sc->cur_chan->nvifs >= 1) {
sc               1257 drivers/net/wireless/ath/ath9k/main.c 			mutex_unlock(&sc->mutex);
sc               1260 drivers/net/wireless/ath/ath9k/main.c 		sc->tx99_vif = vif;
sc               1264 drivers/net/wireless/ath/ath9k/main.c 	sc->cur_chan->nvifs++;
sc               1270 drivers/net/wireless/ath/ath9k/main.c 		ath9k_beacon_assign_slot(sc, vif);
sc               1274 drivers/net/wireless/ath/ath9k/main.c 		avp->chanctx = sc->cur_chan;
sc               1278 drivers/net/wireless/ath/ath9k/main.c 	ath9k_calculate_summary_state(sc, avp->chanctx);
sc               1282 drivers/net/wireless/ath/ath9k/main.c 	ath9k_set_txpower(sc, vif);
sc               1284 drivers/net/wireless/ath/ath9k/main.c 	an->sc = sc;
sc               1288 drivers/net/wireless/ath/ath9k/main.c 	ath_tx_node_init(sc, an);
sc               1290 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               1299 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1300 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1303 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               1306 drivers/net/wireless/ath/ath9k/main.c 		mutex_unlock(&sc->mutex);
sc               1313 drivers/net/wireless/ath/ath9k/main.c 		ath9k_beacon_remove_slot(sc, vif);
sc               1319 drivers/net/wireless/ath/ath9k/main.c 		ath9k_beacon_assign_slot(sc, vif);
sc               1322 drivers/net/wireless/ath/ath9k/main.c 	ath9k_calculate_summary_state(sc, avp->chanctx);
sc               1324 drivers/net/wireless/ath/ath9k/main.c 	ath9k_set_txpower(sc, vif);
sc               1326 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               1333 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1334 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1339 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               1341 drivers/net/wireless/ath/ath9k/main.c 	ath9k_p2p_remove_vif(sc, vif);
sc               1343 drivers/net/wireless/ath/ath9k/main.c 	sc->cur_chan->nvifs--;
sc               1344 drivers/net/wireless/ath/ath9k/main.c 	sc->tx99_vif = NULL;
sc               1349 drivers/net/wireless/ath/ath9k/main.c 		ath9k_beacon_remove_slot(sc, vif);
sc               1351 drivers/net/wireless/ath/ath9k/main.c 	ath_tx_node_cleanup(sc, &avp->mcast_node);
sc               1353 drivers/net/wireless/ath/ath9k/main.c 	ath9k_calculate_summary_state(sc, avp->chanctx);
sc               1355 drivers/net/wireless/ath/ath9k/main.c 	ath9k_set_txpower(sc, NULL);
sc               1357 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               1360 drivers/net/wireless/ath/ath9k/main.c static void ath9k_enable_ps(struct ath_softc *sc)
sc               1362 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               1368 drivers/net/wireless/ath/ath9k/main.c 	sc->ps_enabled = true;
sc               1379 drivers/net/wireless/ath/ath9k/main.c static void ath9k_disable_ps(struct ath_softc *sc)
sc               1381 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               1387 drivers/net/wireless/ath/ath9k/main.c 	sc->ps_enabled = false;
sc               1391 drivers/net/wireless/ath/ath9k/main.c 		sc->ps_flags &= ~(PS_WAIT_FOR_BEACON |
sc               1405 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1406 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               1409 drivers/net/wireless/ath/ath9k/main.c 	struct ath_chanctx *ctx = sc->cur_chan;
sc               1411 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc               1412 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               1415 drivers/net/wireless/ath/ath9k/main.c 		sc->ps_idle = !!(conf->flags & IEEE80211_CONF_IDLE);
sc               1416 drivers/net/wireless/ath/ath9k/main.c 		if (sc->ps_idle) {
sc               1417 drivers/net/wireless/ath/ath9k/main.c 			ath_cancel_work(sc);
sc               1418 drivers/net/wireless/ath/ath9k/main.c 			ath9k_stop_btcoex(sc);
sc               1420 drivers/net/wireless/ath/ath9k/main.c 			ath9k_start_btcoex(sc);
sc               1425 drivers/net/wireless/ath/ath9k/main.c 			ath_chanctx_set_channel(sc, ctx, &ctx->chandef);
sc               1437 drivers/net/wireless/ath/ath9k/main.c 		spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc               1439 drivers/net/wireless/ath/ath9k/main.c 			ath9k_enable_ps(sc);
sc               1441 drivers/net/wireless/ath/ath9k/main.c 			ath9k_disable_ps(sc);
sc               1442 drivers/net/wireless/ath/ath9k/main.c 		spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc               1448 drivers/net/wireless/ath/ath9k/main.c 			sc->sc_ah->is_monitoring = true;
sc               1451 drivers/net/wireless/ath/ath9k/main.c 			sc->sc_ah->is_monitoring = false;
sc               1457 drivers/net/wireless/ath/ath9k/main.c 		ath_chanctx_set_channel(sc, ctx, &hw->conf.chandef);
sc               1461 drivers/net/wireless/ath/ath9k/main.c 		ath9k_set_txpower(sc, NULL);
sc               1463 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               1464 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc               1484 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1491 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_bh(&sc->chan_lock);
sc               1492 drivers/net/wireless/ath/ath9k/main.c 	ath_for_each_chanctx(sc, ctx)
sc               1495 drivers/net/wireless/ath/ath9k/main.c 	sc->offchannel.chan.rxfilter = *total_flags;
sc               1497 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_bh(&sc->chan_lock);
sc               1499 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc               1500 drivers/net/wireless/ath/ath9k/main.c 	rfilt = ath_calcrxfilter(sc);
sc               1501 drivers/net/wireless/ath/ath9k/main.c 	ath9k_hw_setrxfilter(sc->sc_ah, rfilt);
sc               1502 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc               1504 drivers/net/wireless/ath/ath9k/main.c 	ath_dbg(ath9k_hw_common(sc->sc_ah), CONFIG, "Set HW RX filter: 0x%x\n",
sc               1512 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1513 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1518 drivers/net/wireless/ath/ath9k/main.c 	ath_node_attach(sc, sta, vif);
sc               1533 drivers/net/wireless/ath/ath9k/main.c static void ath9k_del_ps_key(struct ath_softc *sc,
sc               1537 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1553 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1555 drivers/net/wireless/ath/ath9k/main.c 	ath9k_del_ps_key(sc, vif, sta);
sc               1556 drivers/net/wireless/ath/ath9k/main.c 	ath_node_detach(sc, sta);
sc               1567 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1568 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1587 drivers/net/wireless/ath/ath9k/main.c 				ath_chanctx_event(sc, vif,
sc               1613 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1619 drivers/net/wireless/ath/ath9k/main.c 		ath_tx_aggr_sleep(sta, sc, an);
sc               1620 drivers/net/wireless/ath/ath9k/main.c 		ath9k_sta_set_tx_filter(sc->sc_ah, an, true);
sc               1623 drivers/net/wireless/ath/ath9k/main.c 		ath9k_sta_set_tx_filter(sc->sc_ah, an, false);
sc               1625 drivers/net/wireless/ath/ath9k/main.c 		ath_tx_aggr_wakeup(sc, an);
sc               1634 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1635 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1643 drivers/net/wireless/ath/ath9k/main.c 	txq = sc->tx.txq_map[queue];
sc               1645 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc               1646 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               1660 drivers/net/wireless/ath/ath9k/main.c 	ath_update_max_aggr_framelen(sc, queue, qi.tqi_burstTime);
sc               1661 drivers/net/wireless/ath/ath9k/main.c 	ret = ath_txq_update(sc, txq->axq_qnum, &qi);
sc               1665 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               1666 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc               1677 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1678 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1700 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               1701 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc               1709 drivers/net/wireless/ath/ath9k/main.c 			ath9k_del_ps_key(sc, vif, sta);
sc               1719 drivers/net/wireless/ath/ath9k/main.c 			if (sc->sc_ah->sw_mgmt_crypto_tx &&
sc               1750 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc               1751 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               1766 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1767 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               1772 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc               1773 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               1783 drivers/net/wireless/ath/ath9k/main.c 		ath9k_calculate_summary_state(sc, avp->chanctx);
sc               1790 drivers/net/wireless/ath/ath9k/main.c 		ath9k_hw_write_associd(sc->sc_ah);
sc               1796 drivers/net/wireless/ath/ath9k/main.c 		ath9k_calculate_summary_state(sc, avp->chanctx);
sc               1799 drivers/net/wireless/ath/ath9k/main.c 	if ((avp->chanctx == sc->cur_chan) &&
sc               1812 drivers/net/wireless/ath/ath9k/main.c 			sc->beacon.slottime = slottime;
sc               1813 drivers/net/wireless/ath/ath9k/main.c 			sc->beacon.updateslot = UPDATE;
sc               1821 drivers/net/wireless/ath/ath9k/main.c 		ath9k_p2p_bss_info_changed(sc, vif);
sc               1824 drivers/net/wireless/ath/ath9k/main.c 		ath_check_ani(sc);
sc               1829 drivers/net/wireless/ath/ath9k/main.c 		ath9k_set_txpower(sc, vif);
sc               1832 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               1833 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc               1840 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1844 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               1845 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc               1847 drivers/net/wireless/ath/ath9k/main.c 	if (sc->cur_chan == avp->chanctx) {
sc               1848 drivers/net/wireless/ath/ath9k/main.c 		tsf = ath9k_hw_gettsf64(sc->sc_ah);
sc               1850 drivers/net/wireless/ath/ath9k/main.c 		tsf = sc->cur_chan->tsf_val +
sc               1851 drivers/net/wireless/ath/ath9k/main.c 		      ath9k_hw_get_tsf_offset(&sc->cur_chan->tsf_ts, NULL);
sc               1854 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc               1855 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               1864 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1867 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               1868 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc               1871 drivers/net/wireless/ath/ath9k/main.c 	if (sc->cur_chan == avp->chanctx)
sc               1872 drivers/net/wireless/ath/ath9k/main.c 		ath9k_hw_settsf64(sc->sc_ah, tsf);
sc               1874 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc               1875 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               1880 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1883 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               1885 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc               1887 drivers/net/wireless/ath/ath9k/main.c 	if (sc->cur_chan == avp->chanctx)
sc               1888 drivers/net/wireless/ath/ath9k/main.c 		ath9k_hw_reset_tsf(sc->sc_ah);
sc               1890 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_restore(sc);
sc               1892 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               1899 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1900 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1910 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               1924 drivers/net/wireless/ath/ath9k/main.c 		ath9k_ps_wakeup(sc);
sc               1925 drivers/net/wireless/ath/ath9k/main.c 		ret = ath_tx_aggr_start(sc, sta, tid, ssn);
sc               1928 drivers/net/wireless/ath/ath9k/main.c 		ath9k_ps_restore(sc);
sc               1935 drivers/net/wireless/ath/ath9k/main.c 		ath9k_ps_wakeup(sc);
sc               1936 drivers/net/wireless/ath/ath9k/main.c 		ath_tx_aggr_stop(sc, sta, tid);
sc               1939 drivers/net/wireless/ath/ath9k/main.c 		ath9k_ps_restore(sc);
sc               1947 drivers/net/wireless/ath/ath9k/main.c 		ath_err(ath9k_hw_common(sc->sc_ah), "Unknown AMPDU action\n");
sc               1950 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               1958 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               1959 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1970 drivers/net/wireless/ath/ath9k/main.c 		ath_update_survey_stats(sc);
sc               1988 drivers/net/wireless/ath/ath9k/main.c 	memcpy(survey, &sc->survey[pos], sizeof(*survey));
sc               1995 drivers/net/wireless/ath/ath9k/main.c static void ath9k_enable_dynack(struct ath_softc *sc)
sc               1999 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               2004 drivers/net/wireless/ath/ath9k/main.c 	rfilt = ath_calcrxfilter(sc);
sc               2012 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2013 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               2018 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               2026 drivers/net/wireless/ath/ath9k/main.c 			rfilt = ath_calcrxfilter(sc);
sc               2029 drivers/net/wireless/ath/ath9k/main.c 		ath9k_ps_wakeup(sc);
sc               2031 drivers/net/wireless/ath/ath9k/main.c 		ath9k_ps_restore(sc);
sc               2033 drivers/net/wireless/ath/ath9k/main.c 		ath9k_enable_dynack(sc);
sc               2036 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               2039 drivers/net/wireless/ath/ath9k/main.c static bool ath9k_has_tx_pending(struct ath_softc *sc,
sc               2045 drivers/net/wireless/ath/ath9k/main.c 		if (!ATH_TXQ_SETUP(sc, i))
sc               2048 drivers/net/wireless/ath/ath9k/main.c 		npend = ath9k_has_pending_frames(sc, &sc->tx.txq[i],
sc               2060 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2061 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2081 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               2083 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               2089 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2090 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               2095 drivers/net/wireless/ath/ath9k/main.c 	cancel_delayed_work_sync(&sc->hw_check_work);
sc               2107 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_bh(&sc->chan_lock);
sc               2111 drivers/net/wireless/ath/ath9k/main.c 		timeout = sc->cur_chan->flush_timeout;
sc               2112 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_bh(&sc->chan_lock);
sc               2117 drivers/net/wireless/ath/ath9k/main.c 	if (wait_event_timeout(sc->tx_wait, !ath9k_has_tx_pending(sc, sw_pending),
sc               2122 drivers/net/wireless/ath/ath9k/main.c 		ath9k_ps_wakeup(sc);
sc               2123 drivers/net/wireless/ath/ath9k/main.c 		spin_lock_bh(&sc->sc_pcu_lock);
sc               2124 drivers/net/wireless/ath/ath9k/main.c 		drain_txq = ath_drain_all_txq(sc);
sc               2125 drivers/net/wireless/ath/ath9k/main.c 		spin_unlock_bh(&sc->sc_pcu_lock);
sc               2128 drivers/net/wireless/ath/ath9k/main.c 			ath_reset(sc, NULL);
sc               2130 drivers/net/wireless/ath/ath9k/main.c 		ath9k_ps_restore(sc);
sc               2133 drivers/net/wireless/ath/ath9k/main.c 	ieee80211_queue_delayed_work(hw, &sc->hw_check_work,
sc               2139 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2141 drivers/net/wireless/ath/ath9k/main.c 	return ath9k_has_tx_pending(sc, true);
sc               2146 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2147 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               2155 drivers/net/wireless/ath/ath9k/main.c 	vif = sc->beacon.bslot[0];
sc               2164 drivers/net/wireless/ath/ath9k/main.c 	if (!sc->beacon.tx_processed && !edma) {
sc               2165 drivers/net/wireless/ath/ath9k/main.c 		tasklet_disable(&sc->bcon_tasklet);
sc               2175 drivers/net/wireless/ath/ath9k/main.c 		sc->beacon.tx_processed = true;
sc               2176 drivers/net/wireless/ath/ath9k/main.c 		sc->beacon.tx_last = !(ts.ts_status & ATH9K_TXERR_MASK);
sc               2179 drivers/net/wireless/ath/ath9k/main.c 		tasklet_enable(&sc->bcon_tasklet);
sc               2182 drivers/net/wireless/ath/ath9k/main.c 	return sc->beacon.tx_last;
sc               2188 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2189 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               2236 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2237 drivers/net/wireless/ath/ath9k/main.c 	struct ath_hw *ah = sc->sc_ah;
sc               2245 drivers/net/wireless/ath/ath9k/main.c 	sc->ant_rx = rx_ant;
sc               2246 drivers/net/wireless/ath/ath9k/main.c 	sc->ant_tx = tx_ant;
sc               2265 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2267 drivers/net/wireless/ath/ath9k/main.c 	*tx_ant = sc->ant_tx;
sc               2268 drivers/net/wireless/ath/ath9k/main.c 	*rx_ant = sc->ant_rx;
sc               2276 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2277 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2284 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2285 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2291 drivers/net/wireless/ath/ath9k/main.c static void ath9k_cancel_pending_offchannel(struct ath_softc *sc)
sc               2293 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2295 drivers/net/wireless/ath/ath9k/main.c 	if (sc->offchannel.roc_vif) {
sc               2299 drivers/net/wireless/ath/ath9k/main.c 		del_timer_sync(&sc->offchannel.timer);
sc               2300 drivers/net/wireless/ath/ath9k/main.c 		if (sc->offchannel.state >= ATH_OFFCHANNEL_ROC_START)
sc               2301 drivers/net/wireless/ath/ath9k/main.c 			ath_roc_complete(sc, ATH_ROC_COMPLETE_ABORT);
sc               2308 drivers/net/wireless/ath/ath9k/main.c 		del_timer_sync(&sc->offchannel.timer);
sc               2309 drivers/net/wireless/ath/ath9k/main.c 		ath_scan_complete(sc, true);
sc               2317 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2318 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2321 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               2323 drivers/net/wireless/ath/ath9k/main.c 	if (WARN_ON(sc->offchannel.scan_req)) {
sc               2328 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc               2330 drivers/net/wireless/ath/ath9k/main.c 	sc->offchannel.scan_vif = vif;
sc               2331 drivers/net/wireless/ath/ath9k/main.c 	sc->offchannel.scan_req = req;
sc               2332 drivers/net/wireless/ath/ath9k/main.c 	sc->offchannel.scan_idx = 0;
sc               2337 drivers/net/wireless/ath/ath9k/main.c 	if (sc->offchannel.state == ATH_OFFCHANNEL_IDLE) {
sc               2339 drivers/net/wireless/ath/ath9k/main.c 		ath_offchannel_next(sc);
sc               2343 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               2351 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2352 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2356 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               2357 drivers/net/wireless/ath/ath9k/main.c 	del_timer_sync(&sc->offchannel.timer);
sc               2358 drivers/net/wireless/ath/ath9k/main.c 	ath_scan_complete(sc, true);
sc               2359 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               2367 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2368 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2371 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               2373 drivers/net/wireless/ath/ath9k/main.c 	if (WARN_ON(sc->offchannel.roc_vif)) {
sc               2378 drivers/net/wireless/ath/ath9k/main.c 	ath9k_ps_wakeup(sc);
sc               2379 drivers/net/wireless/ath/ath9k/main.c 	sc->offchannel.roc_vif = vif;
sc               2380 drivers/net/wireless/ath/ath9k/main.c 	sc->offchannel.roc_chan = chan;
sc               2381 drivers/net/wireless/ath/ath9k/main.c 	sc->offchannel.roc_duration = duration;
sc               2387 drivers/net/wireless/ath/ath9k/main.c 	if (sc->offchannel.state == ATH_OFFCHANNEL_IDLE) {
sc               2389 drivers/net/wireless/ath/ath9k/main.c 		ath_offchannel_next(sc);
sc               2393 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               2401 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2402 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2404 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               2407 drivers/net/wireless/ath/ath9k/main.c 	del_timer_sync(&sc->offchannel.timer);
sc               2409 drivers/net/wireless/ath/ath9k/main.c 	if (sc->offchannel.roc_vif) {
sc               2410 drivers/net/wireless/ath/ath9k/main.c 		if (sc->offchannel.state >= ATH_OFFCHANNEL_ROC_START)
sc               2411 drivers/net/wireless/ath/ath9k/main.c 			ath_roc_complete(sc, ATH_ROC_COMPLETE_CANCEL);
sc               2414 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               2422 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2423 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2427 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               2429 drivers/net/wireless/ath/ath9k/main.c 	ath_for_each_chanctx(sc, ctx) {
sc               2436 drivers/net/wireless/ath/ath9k/main.c 		pos = ctx - &sc->chanctx[0];
sc               2443 drivers/net/wireless/ath/ath9k/main.c 		ath_chanctx_set_channel(sc, ctx, &conf->def);
sc               2445 drivers/net/wireless/ath/ath9k/main.c 		mutex_unlock(&sc->mutex);
sc               2449 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               2457 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2458 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2461 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               2469 drivers/net/wireless/ath/ath9k/main.c 	ath_chanctx_event(sc, NULL, ATH_CHANCTX_EVENT_UNASSIGN);
sc               2471 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               2478 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2479 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2482 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               2486 drivers/net/wireless/ath/ath9k/main.c 	ath_chanctx_set_channel(sc, ctx, &conf->def);
sc               2487 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               2494 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2495 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2500 drivers/net/wireless/ath/ath9k/main.c 	ath9k_cancel_pending_offchannel(sc);
sc               2502 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               2512 drivers/net/wireless/ath/ath9k/main.c 	ath9k_calculate_summary_state(sc, ctx);
sc               2516 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               2525 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2526 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2531 drivers/net/wireless/ath/ath9k/main.c 	ath9k_cancel_pending_offchannel(sc);
sc               2533 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               2543 drivers/net/wireless/ath/ath9k/main.c 	ath9k_calculate_summary_state(sc, ctx);
sc               2547 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               2554 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2555 drivers/net/wireless/ath/ath9k/main.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2569 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               2571 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_bh(&sc->chan_lock);
sc               2572 drivers/net/wireless/ath/ath9k/main.c 	if (sc->next_chan || (sc->cur_chan != avp->chanctx))
sc               2574 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_bh(&sc->chan_lock);
sc               2579 drivers/net/wireless/ath/ath9k/main.c 	ath9k_cancel_pending_offchannel(sc);
sc               2581 drivers/net/wireless/ath/ath9k/main.c 	go_ctx = ath_is_go_chanctx_present(sc);
sc               2588 drivers/net/wireless/ath/ath9k/main.c 		spin_lock_bh(&sc->chan_lock);
sc               2589 drivers/net/wireless/ath/ath9k/main.c 		sc->sched.mgd_prepare_tx = true;
sc               2592 drivers/net/wireless/ath/ath9k/main.c 		spin_unlock_bh(&sc->chan_lock);
sc               2595 drivers/net/wireless/ath/ath9k/main.c 		init_completion(&sc->go_beacon);
sc               2597 drivers/net/wireless/ath/ath9k/main.c 		mutex_unlock(&sc->mutex);
sc               2599 drivers/net/wireless/ath/ath9k/main.c 		if (wait_for_completion_timeout(&sc->go_beacon,
sc               2604 drivers/net/wireless/ath/ath9k/main.c 			spin_lock_bh(&sc->chan_lock);
sc               2605 drivers/net/wireless/ath/ath9k/main.c 			sc->sched.mgd_prepare_tx = false;
sc               2606 drivers/net/wireless/ath/ath9k/main.c 			spin_unlock_bh(&sc->chan_lock);
sc               2609 drivers/net/wireless/ath/ath9k/main.c 		mutex_lock(&sc->mutex);
sc               2616 drivers/net/wireless/ath/ath9k/main.c 	spin_lock_bh(&sc->chan_lock);
sc               2617 drivers/net/wireless/ath/ath9k/main.c 	sc->next_chan = avp->chanctx;
sc               2618 drivers/net/wireless/ath/ath9k/main.c 	sc->sched.state = ATH_CHANCTX_STATE_FORCE_ACTIVE;
sc               2619 drivers/net/wireless/ath/ath9k/main.c 	spin_unlock_bh(&sc->chan_lock);
sc               2621 drivers/net/wireless/ath/ath9k/main.c 	ath_chanctx_set_next(sc, true);
sc               2623 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc               2648 drivers/net/wireless/ath/ath9k/main.c 	struct ath_softc *sc = hw->priv;
sc               2651 drivers/net/wireless/ath/ath9k/main.c 	mutex_lock(&sc->mutex);
sc               2655 drivers/net/wireless/ath/ath9k/main.c 		*dbm = sc->cur_chan->cur_txpower;
sc               2656 drivers/net/wireless/ath/ath9k/main.c 	mutex_unlock(&sc->mutex);
sc                119 drivers/net/wireless/ath/ath9k/mci.c static void ath_mci_update_scheme(struct ath_softc *sc)
sc                121 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                122 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                124 drivers/net/wireless/ath/ath9k/mci.c 	struct ath9k_hw_mci *mci_hw = &sc->sc_ah->btcoex_hw.mci;
sc                160 drivers/net/wireless/ath/ath9k/mci.c 			btcoex->duty_cycle = AR_SREV_9565(sc->sc_ah) ? 40 : 35;
sc                186 drivers/net/wireless/ath/ath9k/mci.c 	if (IS_CHAN_2GHZ(sc->sc_ah->curchan)) {
sc                187 drivers/net/wireless/ath/ath9k/mci.c 		if (IS_CHAN_HT(sc->sc_ah->curchan))
sc                193 drivers/net/wireless/ath/ath9k/mci.c 	ath9k_btcoex_timer_pause(sc);
sc                194 drivers/net/wireless/ath/ath9k/mci.c 	ath9k_hw_btcoex_disable(sc->sc_ah);
sc                196 drivers/net/wireless/ath/ath9k/mci.c 	if (IS_CHAN_5GHZ(sc->sc_ah->curchan))
sc                206 drivers/net/wireless/ath/ath9k/mci.c 	ath9k_hw_btcoex_enable(sc->sc_ah);
sc                207 drivers/net/wireless/ath/ath9k/mci.c 	ath9k_btcoex_timer_resume(sc);
sc                210 drivers/net/wireless/ath/ath9k/mci.c static void ath_mci_cal_msg(struct ath_softc *sc, u8 opcode, u8 *rx_payload)
sc                212 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_hw *ah = sc->sc_ah;
sc                221 drivers/net/wireless/ath/ath9k/mci.c 			ath9k_queue_reset(sc, RESET_TYPE_MCI);
sc                227 drivers/net/wireless/ath/ath9k/mci.c 		ar9003_mci_send_message(sc->sc_ah, MCI_GPM, 0, payload,
sc                238 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_softc *sc = container_of(work, struct ath_softc, mci_work);
sc                240 drivers/net/wireless/ath/ath9k/mci.c 	ath_mci_update_scheme(sc);
sc                256 drivers/net/wireless/ath/ath9k/mci.c static void ath_mci_set_concur_txprio(struct ath_softc *sc)
sc                258 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                292 drivers/net/wireless/ath/ath9k/mci.c 	ath9k_hw_btcoex_set_concur_txprio(sc->sc_ah, stomp_txprio);
sc                295 drivers/net/wireless/ath/ath9k/mci.c static u8 ath_mci_process_profile(struct ath_softc *sc,
sc                298 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                299 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                326 drivers/net/wireless/ath/ath9k/mci.c 	ath_mci_set_concur_txprio(sc);
sc                330 drivers/net/wireless/ath/ath9k/mci.c static u8 ath_mci_process_status(struct ath_softc *sc,
sc                333 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                360 drivers/net/wireless/ath/ath9k/mci.c 	ath_mci_set_concur_txprio(sc);
sc                367 drivers/net/wireless/ath/ath9k/mci.c static void ath_mci_msg(struct ath_softc *sc, u8 opcode, u8 *rx_payload)
sc                369 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_hw *ah = sc->sc_ah;
sc                372 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                379 drivers/net/wireless/ath/ath9k/mci.c 		ath_mci_flush_profile(&sc->btcoex.mci);
sc                408 drivers/net/wireless/ath/ath9k/mci.c 		update_scheme += ath_mci_process_profile(sc, &profile_info);
sc                424 drivers/net/wireless/ath/ath9k/mci.c 		update_scheme += ath_mci_process_status(sc, &profile_status);
sc                431 drivers/net/wireless/ath/ath9k/mci.c 		ieee80211_queue_work(sc->hw, &sc->mci_work);
sc                434 drivers/net/wireless/ath/ath9k/mci.c int ath_mci_setup(struct ath_softc *sc)
sc                436 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                437 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_mci_coex *mci = &sc->mci_coex;
sc                441 drivers/net/wireless/ath/ath9k/mci.c 	buf->bf_addr = dmam_alloc_coherent(sc->dev,
sc                459 drivers/net/wireless/ath/ath9k/mci.c 	ret = ar9003_mci_setup(sc->sc_ah, mci->gpm_buf.bf_paddr,
sc                467 drivers/net/wireless/ath/ath9k/mci.c 	INIT_WORK(&sc->mci_work, ath9k_mci_work);
sc                473 drivers/net/wireless/ath/ath9k/mci.c void ath_mci_cleanup(struct ath_softc *sc)
sc                475 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                476 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_hw *ah = sc->sc_ah;
sc                483 drivers/net/wireless/ath/ath9k/mci.c void ath_mci_intr(struct ath_softc *sc)
sc                485 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_mci_coex *mci = &sc->mci_coex;
sc                486 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_hw *ah = sc->sc_ah;
sc                495 drivers/net/wireless/ath/ath9k/mci.c 	ar9003_mci_get_interrupt(sc->sc_ah, &mci_int, &mci_int_rxmsg);
sc                580 drivers/net/wireless/ath/ath9k/mci.c 				ath_mci_cal_msg(sc, subtype, (u8 *)pgpm);
sc                584 drivers/net/wireless/ath/ath9k/mci.c 					ath_mci_msg(sc, opcode, (u8 *)pgpm);
sc                627 drivers/net/wireless/ath/ath9k/mci.c 		ath_mci_msg(sc, MCI_GPM_COEX_NOOP, NULL);
sc                631 drivers/net/wireless/ath/ath9k/mci.c void ath_mci_enable(struct ath_softc *sc)
sc                633 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                638 drivers/net/wireless/ath/ath9k/mci.c 	if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_MCI)
sc                639 drivers/net/wireless/ath/ath9k/mci.c 		sc->sc_ah->imask |= ATH9K_INT_MCI;
sc                642 drivers/net/wireless/ath/ath9k/mci.c void ath9k_mci_update_wlan_channels(struct ath_softc *sc, bool allow_all)
sc                644 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_hw *ah = sc->sc_ah;
sc                692 drivers/net/wireless/ath/ath9k/mci.c void ath9k_mci_set_txpower(struct ath_softc *sc, bool setchannel,
sc                695 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_hw *ah = sc->sc_ah;
sc                696 drivers/net/wireless/ath/ath9k/mci.c 	struct ath9k_hw_mci *mci_hw = &sc->sc_ah->btcoex_hw.mci;
sc                708 drivers/net/wireless/ath/ath9k/mci.c 		struct ath9k_hw_cal_data *caldata = &sc->cur_chan->caldata;
sc                722 drivers/net/wireless/ath/ath9k/mci.c 		ath9k_hw_set_txpowerlimit(ah, sc->cur_chan->txpower, false);
sc                725 drivers/net/wireless/ath/ath9k/mci.c static void ath9k_mci_stomp_audio(struct ath_softc *sc)
sc                727 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_hw *ah = sc->sc_ah;
sc                728 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                741 drivers/net/wireless/ath/ath9k/mci.c void ath9k_mci_update_rssi(struct ath_softc *sc)
sc                743 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_hw *ah = sc->sc_ah;
sc                744 drivers/net/wireless/ath/ath9k/mci.c 	struct ath_btcoex *btcoex = &sc->btcoex;
sc                745 drivers/net/wireless/ath/ath9k/mci.c 	struct ath9k_hw_mci *mci_hw = &sc->sc_ah->btcoex_hw.mci;
sc                747 drivers/net/wireless/ath/ath9k/mci.c 	ath9k_mci_stomp_audio(sc);
sc                757 drivers/net/wireless/ath/ath9k/mci.c 			ath9k_mci_set_txpower(sc, false, true);
sc                764 drivers/net/wireless/ath/ath9k/mci.c 			ath9k_mci_set_txpower(sc, false, false);
sc                154 drivers/net/wireless/ath/ath9k/mci.h int ath_mci_setup(struct ath_softc *sc);
sc                155 drivers/net/wireless/ath/ath9k/mci.h void ath_mci_cleanup(struct ath_softc *sc);
sc                156 drivers/net/wireless/ath/ath9k/mci.h void ath_mci_intr(struct ath_softc *sc);
sc                157 drivers/net/wireless/ath/ath9k/mci.h void ath9k_mci_update_rssi(struct ath_softc *sc);
sc                160 drivers/net/wireless/ath/ath9k/mci.h void ath_mci_enable(struct ath_softc *sc);
sc                161 drivers/net/wireless/ath/ath9k/mci.h void ath9k_mci_update_wlan_channels(struct ath_softc *sc, bool allow_all);
sc                162 drivers/net/wireless/ath/ath9k/mci.h void ath9k_mci_set_txpower(struct ath_softc *sc, bool setchannel,
sc                165 drivers/net/wireless/ath/ath9k/mci.h static inline void ath_mci_enable(struct ath_softc *sc)
sc                168 drivers/net/wireless/ath/ath9k/mci.h static inline void ath9k_mci_update_wlan_channels(struct ath_softc *sc,
sc                172 drivers/net/wireless/ath/ath9k/mci.h static inline void ath9k_mci_set_txpower(struct ath_softc *sc, bool setchannel,
sc                784 drivers/net/wireless/ath/ath9k/pci.c 	struct ath_softc *sc = (struct ath_softc *) common->priv;
sc                787 drivers/net/wireless/ath/ath9k/pci.c 	pci_read_config_byte(to_pci_dev(sc->dev), PCI_CACHE_LINE_SIZE, &u8tmp);
sc                823 drivers/net/wireless/ath/ath9k/pci.c 	struct ath_softc *sc = (struct ath_softc *) common->priv;
sc                824 drivers/net/wireless/ath/ath9k/pci.c 	struct ath_hw *ah = sc->sc_ah;
sc                825 drivers/net/wireless/ath/ath9k/pci.c 	struct pci_dev *pdev = to_pci_dev(sc->dev);
sc                887 drivers/net/wireless/ath/ath9k/pci.c 	struct ath_softc *sc;
sc                959 drivers/net/wireless/ath/ath9k/pci.c 	sc = hw->priv;
sc                960 drivers/net/wireless/ath/ath9k/pci.c 	sc->hw = hw;
sc                961 drivers/net/wireless/ath/ath9k/pci.c 	sc->dev = &pdev->dev;
sc                962 drivers/net/wireless/ath/ath9k/pci.c 	sc->mem = pcim_iomap_table(pdev)[0];
sc                963 drivers/net/wireless/ath/ath9k/pci.c 	sc->driver_data = id->driver_data;
sc                975 drivers/net/wireless/ath/ath9k/pci.c 		ret = request_irq(pdev->irq, ath_isr, IRQF_SHARED, "ath9k", sc);
sc                977 drivers/net/wireless/ath/ath9k/pci.c 		ret = request_irq(pdev->irq, ath_isr, 0, "ath9k", sc);
sc                984 drivers/net/wireless/ath/ath9k/pci.c 	sc->irq = pdev->irq;
sc                986 drivers/net/wireless/ath/ath9k/pci.c 	ret = ath9k_init_device(id->device, sc, &ath_pci_bus_ops);
sc                992 drivers/net/wireless/ath/ath9k/pci.c 	sc->sc_ah->msi_enabled = msi_enabled;
sc                993 drivers/net/wireless/ath/ath9k/pci.c 	sc->sc_ah->msi_reg = 0;
sc                995 drivers/net/wireless/ath/ath9k/pci.c 	ath9k_hw_name(sc->sc_ah, hw_name, sizeof(hw_name));
sc                997 drivers/net/wireless/ath/ath9k/pci.c 		   hw_name, (unsigned long)sc->mem, pdev->irq);
sc               1002 drivers/net/wireless/ath/ath9k/pci.c 	free_irq(sc->irq, sc);
sc               1011 drivers/net/wireless/ath/ath9k/pci.c 	struct ath_softc *sc = hw->priv;
sc               1014 drivers/net/wireless/ath/ath9k/pci.c 		sc->sc_ah->ah_flags |= AH_UNPLUGGED;
sc               1015 drivers/net/wireless/ath/ath9k/pci.c 	ath9k_deinit_device(sc);
sc               1016 drivers/net/wireless/ath/ath9k/pci.c 	free_irq(sc->irq, sc);
sc               1017 drivers/net/wireless/ath/ath9k/pci.c 	ieee80211_free_hw(sc->hw);
sc               1026 drivers/net/wireless/ath/ath9k/pci.c 	struct ath_softc *sc = hw->priv;
sc               1027 drivers/net/wireless/ath/ath9k/pci.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1038 drivers/net/wireless/ath/ath9k/pci.c 	ath9k_stop_btcoex(sc);
sc               1039 drivers/net/wireless/ath/ath9k/pci.c 	ath9k_hw_disable(sc->sc_ah);
sc               1040 drivers/net/wireless/ath/ath9k/pci.c 	del_timer_sync(&sc->sleep_timer);
sc               1041 drivers/net/wireless/ath/ath9k/pci.c 	ath9k_hw_setpower(sc->sc_ah, ATH9K_PM_FULL_SLEEP);
sc               1050 drivers/net/wireless/ath/ath9k/pci.c 	struct ath_softc *sc = hw->priv;
sc               1051 drivers/net/wireless/ath/ath9k/pci.c 	struct ath_hw *ah = sc->sc_ah;
sc                 23 drivers/net/wireless/ath/ath9k/recv.c static inline bool ath9k_check_auto_sleep(struct ath_softc *sc)
sc                 25 drivers/net/wireless/ath/ath9k/recv.c 	return sc->ps_enabled &&
sc                 26 drivers/net/wireless/ath/ath9k/recv.c 	       (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_AUTOSLEEP);
sc                 37 drivers/net/wireless/ath/ath9k/recv.c static void ath_rx_buf_link(struct ath_softc *sc, struct ath_rxbuf *bf,
sc                 40 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc                 63 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->rx.rxlink)
sc                 64 drivers/net/wireless/ath/ath9k/recv.c 		*sc->rx.rxlink = bf->bf_daddr;
sc                 68 drivers/net/wireless/ath/ath9k/recv.c 	sc->rx.rxlink = &ds->ds_link;
sc                 71 drivers/net/wireless/ath/ath9k/recv.c static void ath_rx_buf_relink(struct ath_softc *sc, struct ath_rxbuf *bf,
sc                 74 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->rx.buf_hold)
sc                 75 drivers/net/wireless/ath/ath9k/recv.c 		ath_rx_buf_link(sc, sc->rx.buf_hold, flush);
sc                 77 drivers/net/wireless/ath/ath9k/recv.c 	sc->rx.buf_hold = bf;
sc                 80 drivers/net/wireless/ath/ath9k/recv.c static void ath_setdefantenna(struct ath_softc *sc, u32 antenna)
sc                 83 drivers/net/wireless/ath/ath9k/recv.c 	ath9k_hw_setantenna(sc->sc_ah, antenna);
sc                 84 drivers/net/wireless/ath/ath9k/recv.c 	sc->rx.defant = antenna;
sc                 85 drivers/net/wireless/ath/ath9k/recv.c 	sc->rx.rxotherant = 0;
sc                 88 drivers/net/wireless/ath/ath9k/recv.c static void ath_opmode_init(struct ath_softc *sc)
sc                 90 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc                 96 drivers/net/wireless/ath/ath9k/recv.c 	rfilt = ath_calcrxfilter(sc);
sc                110 drivers/net/wireless/ath/ath9k/recv.c static bool ath_rx_edma_buf_link(struct ath_softc *sc,
sc                113 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc                118 drivers/net/wireless/ath/ath9k/recv.c 	rx_edma = &sc->rx.rx_edma[qtype];
sc                122 drivers/net/wireless/ath/ath9k/recv.c 	bf = list_first_entry(&sc->rx.rxbuf, struct ath_rxbuf, list);
sc                128 drivers/net/wireless/ath/ath9k/recv.c 	dma_sync_single_for_device(sc->dev, bf->bf_buf_addr,
sc                138 drivers/net/wireless/ath/ath9k/recv.c static void ath_rx_addbuffer_edma(struct ath_softc *sc,
sc                141 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                144 drivers/net/wireless/ath/ath9k/recv.c 	if (list_empty(&sc->rx.rxbuf)) {
sc                149 drivers/net/wireless/ath/ath9k/recv.c 	list_for_each_entry_safe(bf, tbf, &sc->rx.rxbuf, list)
sc                150 drivers/net/wireless/ath/ath9k/recv.c 		if (!ath_rx_edma_buf_link(sc, qtype))
sc                155 drivers/net/wireless/ath/ath9k/recv.c static void ath_rx_remove_buffer(struct ath_softc *sc,
sc                162 drivers/net/wireless/ath/ath9k/recv.c 	rx_edma = &sc->rx.rx_edma[qtype];
sc                167 drivers/net/wireless/ath/ath9k/recv.c 		list_add_tail(&bf->list, &sc->rx.rxbuf);
sc                171 drivers/net/wireless/ath/ath9k/recv.c static void ath_rx_edma_cleanup(struct ath_softc *sc)
sc                173 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc                177 drivers/net/wireless/ath/ath9k/recv.c 	ath_rx_remove_buffer(sc, ATH9K_RX_QUEUE_LP);
sc                178 drivers/net/wireless/ath/ath9k/recv.c 	ath_rx_remove_buffer(sc, ATH9K_RX_QUEUE_HP);
sc                180 drivers/net/wireless/ath/ath9k/recv.c 	list_for_each_entry(bf, &sc->rx.rxbuf, list) {
sc                182 drivers/net/wireless/ath/ath9k/recv.c 			dma_unmap_single(sc->dev, bf->bf_buf_addr,
sc                198 drivers/net/wireless/ath/ath9k/recv.c static int ath_rx_edma_init(struct ath_softc *sc, int nbufs)
sc                200 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                201 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc                210 drivers/net/wireless/ath/ath9k/recv.c 	ath_rx_edma_init_queue(&sc->rx.rx_edma[ATH9K_RX_QUEUE_LP],
sc                212 drivers/net/wireless/ath/ath9k/recv.c 	ath_rx_edma_init_queue(&sc->rx.rx_edma[ATH9K_RX_QUEUE_HP],
sc                216 drivers/net/wireless/ath/ath9k/recv.c 	bf = devm_kzalloc(sc->dev, size, GFP_KERNEL);
sc                220 drivers/net/wireless/ath/ath9k/recv.c 	INIT_LIST_HEAD(&sc->rx.rxbuf);
sc                232 drivers/net/wireless/ath/ath9k/recv.c 		bf->bf_buf_addr = dma_map_single(sc->dev, skb->data,
sc                235 drivers/net/wireless/ath/ath9k/recv.c 		if (unlikely(dma_mapping_error(sc->dev,
sc                246 drivers/net/wireless/ath/ath9k/recv.c 		list_add_tail(&bf->list, &sc->rx.rxbuf);
sc                252 drivers/net/wireless/ath/ath9k/recv.c 	ath_rx_edma_cleanup(sc);
sc                256 drivers/net/wireless/ath/ath9k/recv.c static void ath_edma_start_recv(struct ath_softc *sc)
sc                258 drivers/net/wireless/ath/ath9k/recv.c 	ath9k_hw_rxena(sc->sc_ah);
sc                259 drivers/net/wireless/ath/ath9k/recv.c 	ath_rx_addbuffer_edma(sc, ATH9K_RX_QUEUE_HP);
sc                260 drivers/net/wireless/ath/ath9k/recv.c 	ath_rx_addbuffer_edma(sc, ATH9K_RX_QUEUE_LP);
sc                261 drivers/net/wireless/ath/ath9k/recv.c 	ath_opmode_init(sc);
sc                262 drivers/net/wireless/ath/ath9k/recv.c 	ath9k_hw_startpcureceive(sc->sc_ah, sc->cur_chan->offchannel);
sc                265 drivers/net/wireless/ath/ath9k/recv.c static void ath_edma_stop_recv(struct ath_softc *sc)
sc                267 drivers/net/wireless/ath/ath9k/recv.c 	ath_rx_remove_buffer(sc, ATH9K_RX_QUEUE_HP);
sc                268 drivers/net/wireless/ath/ath9k/recv.c 	ath_rx_remove_buffer(sc, ATH9K_RX_QUEUE_LP);
sc                271 drivers/net/wireless/ath/ath9k/recv.c int ath_rx_init(struct ath_softc *sc, int nbufs)
sc                273 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                278 drivers/net/wireless/ath/ath9k/recv.c 	spin_lock_init(&sc->sc_pcu_lock);
sc                281 drivers/net/wireless/ath/ath9k/recv.c 			     sc->sc_ah->caps.rx_status_len;
sc                283 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA)
sc                284 drivers/net/wireless/ath/ath9k/recv.c 		return ath_rx_edma_init(sc, nbufs);
sc                291 drivers/net/wireless/ath/ath9k/recv.c 	error = ath_descdma_setup(sc, &sc->rx.rxdma, &sc->rx.rxbuf,
sc                300 drivers/net/wireless/ath/ath9k/recv.c 	list_for_each_entry(bf, &sc->rx.rxbuf, list) {
sc                309 drivers/net/wireless/ath/ath9k/recv.c 		bf->bf_buf_addr = dma_map_single(sc->dev, skb->data,
sc                312 drivers/net/wireless/ath/ath9k/recv.c 		if (unlikely(dma_mapping_error(sc->dev,
sc                323 drivers/net/wireless/ath/ath9k/recv.c 	sc->rx.rxlink = NULL;
sc                326 drivers/net/wireless/ath/ath9k/recv.c 		ath_rx_cleanup(sc);
sc                331 drivers/net/wireless/ath/ath9k/recv.c void ath_rx_cleanup(struct ath_softc *sc)
sc                333 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc                338 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) {
sc                339 drivers/net/wireless/ath/ath9k/recv.c 		ath_rx_edma_cleanup(sc);
sc                343 drivers/net/wireless/ath/ath9k/recv.c 	list_for_each_entry(bf, &sc->rx.rxbuf, list) {
sc                346 drivers/net/wireless/ath/ath9k/recv.c 			dma_unmap_single(sc->dev, bf->bf_buf_addr,
sc                375 drivers/net/wireless/ath/ath9k/recv.c u32 ath_calcrxfilter(struct ath_softc *sc)
sc                377 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                387 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->hw->conf.radar_enabled)
sc                390 drivers/net/wireless/ath/ath9k/recv.c 	spin_lock_bh(&sc->chan_lock);
sc                392 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->cur_chan->rxfilter & FIF_PROBE_REQ)
sc                395 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->sc_ah->is_monitoring)
sc                398 drivers/net/wireless/ath/ath9k/recv.c 	if ((sc->cur_chan->rxfilter & FIF_CONTROL) ||
sc                399 drivers/net/wireless/ath/ath9k/recv.c 	    sc->sc_ah->dynack.enabled)
sc                402 drivers/net/wireless/ath/ath9k/recv.c 	if ((sc->sc_ah->opmode == NL80211_IFTYPE_STATION) &&
sc                403 drivers/net/wireless/ath/ath9k/recv.c 	    (sc->cur_chan->nvifs <= 1) &&
sc                404 drivers/net/wireless/ath/ath9k/recv.c 	    !(sc->cur_chan->rxfilter & FIF_BCN_PRBRESP_PROMISC))
sc                406 drivers/net/wireless/ath/ath9k/recv.c 	else if (sc->sc_ah->opmode != NL80211_IFTYPE_OCB)
sc                409 drivers/net/wireless/ath/ath9k/recv.c 	if ((sc->sc_ah->opmode == NL80211_IFTYPE_AP) ||
sc                410 drivers/net/wireless/ath/ath9k/recv.c 	    (sc->cur_chan->rxfilter & FIF_PSPOLL))
sc                413 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->cur_chandef.width != NL80211_CHAN_WIDTH_20_NOHT)
sc                416 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->cur_chan->nvifs > 1 || (sc->cur_chan->rxfilter & FIF_OTHER_BSS)) {
sc                418 drivers/net/wireless/ath/ath9k/recv.c 		if (sc->sc_ah->hw_version.macVersion <= AR_SREV_VERSION_9160)
sc                423 drivers/net/wireless/ath/ath9k/recv.c 	if (AR_SREV_9550(sc->sc_ah) || AR_SREV_9531(sc->sc_ah) ||
sc                424 drivers/net/wireless/ath/ath9k/recv.c 	    AR_SREV_9561(sc->sc_ah))
sc                427 drivers/net/wireless/ath/ath9k/recv.c 	if (AR_SREV_9462(sc->sc_ah) || AR_SREV_9565(sc->sc_ah))
sc                434 drivers/net/wireless/ath/ath9k/recv.c 	spin_unlock_bh(&sc->chan_lock);
sc                440 drivers/net/wireless/ath/ath9k/recv.c void ath_startrecv(struct ath_softc *sc)
sc                442 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc                446 drivers/net/wireless/ath/ath9k/recv.c 		ath_edma_start_recv(sc);
sc                450 drivers/net/wireless/ath/ath9k/recv.c 	if (list_empty(&sc->rx.rxbuf))
sc                453 drivers/net/wireless/ath/ath9k/recv.c 	sc->rx.buf_hold = NULL;
sc                454 drivers/net/wireless/ath/ath9k/recv.c 	sc->rx.rxlink = NULL;
sc                455 drivers/net/wireless/ath/ath9k/recv.c 	list_for_each_entry_safe(bf, tbf, &sc->rx.rxbuf, list) {
sc                456 drivers/net/wireless/ath/ath9k/recv.c 		ath_rx_buf_link(sc, bf, false);
sc                460 drivers/net/wireless/ath/ath9k/recv.c 	if (list_empty(&sc->rx.rxbuf))
sc                463 drivers/net/wireless/ath/ath9k/recv.c 	bf = list_first_entry(&sc->rx.rxbuf, struct ath_rxbuf, list);
sc                468 drivers/net/wireless/ath/ath9k/recv.c 	ath_opmode_init(sc);
sc                469 drivers/net/wireless/ath/ath9k/recv.c 	ath9k_hw_startpcureceive(ah, sc->cur_chan->offchannel);
sc                472 drivers/net/wireless/ath/ath9k/recv.c static void ath_flushrecv(struct ath_softc *sc)
sc                474 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA)
sc                475 drivers/net/wireless/ath/ath9k/recv.c 		ath_rx_tasklet(sc, 1, true);
sc                476 drivers/net/wireless/ath/ath9k/recv.c 	ath_rx_tasklet(sc, 1, false);
sc                479 drivers/net/wireless/ath/ath9k/recv.c bool ath_stoprecv(struct ath_softc *sc)
sc                481 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc                488 drivers/net/wireless/ath/ath9k/recv.c 	ath_flushrecv(sc);
sc                490 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA)
sc                491 drivers/net/wireless/ath/ath9k/recv.c 		ath_edma_stop_recv(sc);
sc                493 drivers/net/wireless/ath/ath9k/recv.c 		sc->rx.rxlink = NULL;
sc                497 drivers/net/wireless/ath/ath9k/recv.c 		ath_dbg(ath9k_hw_common(sc->sc_ah), RESET,
sc                499 drivers/net/wireless/ath/ath9k/recv.c 		RESET_STAT_INC(sc, RESET_RX_DMA_ERROR);
sc                536 drivers/net/wireless/ath/ath9k/recv.c static void ath_rx_ps_beacon(struct ath_softc *sc, struct sk_buff *skb)
sc                538 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                544 drivers/net/wireless/ath/ath9k/recv.c 	sc->ps_flags &= ~PS_WAIT_FOR_BEACON;
sc                546 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->ps_flags & PS_BEACON_SYNC) {
sc                547 drivers/net/wireless/ath/ath9k/recv.c 		sc->ps_flags &= ~PS_BEACON_SYNC;
sc                553 drivers/net/wireless/ath/ath9k/recv.c 			if (sc->cur_chan == &sc->offchannel.chan)
sc                559 drivers/net/wireless/ath/ath9k/recv.c 		    !(WARN_ON_ONCE(sc->cur_chan->beacon.beacon_interval == 0)))
sc                560 drivers/net/wireless/ath/ath9k/recv.c 			ath9k_set_beacon(sc);
sc                562 drivers/net/wireless/ath/ath9k/recv.c 		ath9k_p2p_beacon_sync(sc);
sc                575 drivers/net/wireless/ath/ath9k/recv.c 		sc->ps_flags |= PS_WAIT_FOR_CAB | PS_WAIT_FOR_BEACON;
sc                579 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->ps_flags & PS_WAIT_FOR_CAB) {
sc                585 drivers/net/wireless/ath/ath9k/recv.c 		sc->ps_flags &= ~PS_WAIT_FOR_CAB;
sc                590 drivers/net/wireless/ath/ath9k/recv.c static void ath_rx_ps(struct ath_softc *sc, struct sk_buff *skb, bool mybeacon)
sc                593 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                598 drivers/net/wireless/ath/ath9k/recv.c 	if (((sc->ps_flags & PS_WAIT_FOR_BEACON) || ath9k_check_auto_sleep(sc))
sc                600 drivers/net/wireless/ath/ath9k/recv.c 		ath_rx_ps_beacon(sc, skb);
sc                601 drivers/net/wireless/ath/ath9k/recv.c 	} else if ((sc->ps_flags & PS_WAIT_FOR_CAB) &&
sc                610 drivers/net/wireless/ath/ath9k/recv.c 		sc->ps_flags &= ~(PS_WAIT_FOR_CAB | PS_WAIT_FOR_BEACON);
sc                613 drivers/net/wireless/ath/ath9k/recv.c 	} else if ((sc->ps_flags & PS_WAIT_FOR_PSPOLL_DATA) &&
sc                616 drivers/net/wireless/ath/ath9k/recv.c 		sc->ps_flags &= ~PS_WAIT_FOR_PSPOLL_DATA;
sc                619 drivers/net/wireless/ath/ath9k/recv.c 			sc->ps_flags & (PS_WAIT_FOR_BEACON |
sc                626 drivers/net/wireless/ath/ath9k/recv.c static bool ath_edma_get_buffers(struct ath_softc *sc,
sc                631 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_rx_edma *rx_edma = &sc->rx.rx_edma[qtype];
sc                632 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc                645 drivers/net/wireless/ath/ath9k/recv.c 	dma_sync_single_for_cpu(sc->dev, bf->bf_buf_addr,
sc                651 drivers/net/wireless/ath/ath9k/recv.c 		dma_sync_single_for_device(sc->dev, bf->bf_buf_addr,
sc                659 drivers/net/wireless/ath/ath9k/recv.c 		list_add_tail(&bf->list, &sc->rx.rxbuf);
sc                660 drivers/net/wireless/ath/ath9k/recv.c 		ath_rx_edma_buf_link(sc, qtype);
sc                668 drivers/net/wireless/ath/ath9k/recv.c 			list_add_tail(&bf->list, &sc->rx.rxbuf);
sc                669 drivers/net/wireless/ath/ath9k/recv.c 			ath_rx_edma_buf_link(sc, qtype);
sc                679 drivers/net/wireless/ath/ath9k/recv.c static struct ath_rxbuf *ath_edma_get_next_rx_buf(struct ath_softc *sc,
sc                685 drivers/net/wireless/ath/ath9k/recv.c 	while (ath_edma_get_buffers(sc, qtype, rs, &bf)) {
sc                694 drivers/net/wireless/ath/ath9k/recv.c static struct ath_rxbuf *ath_get_next_rx_buf(struct ath_softc *sc,
sc                697 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc                703 drivers/net/wireless/ath/ath9k/recv.c 	if (list_empty(&sc->rx.rxbuf)) {
sc                704 drivers/net/wireless/ath/ath9k/recv.c 		sc->rx.rxlink = NULL;
sc                708 drivers/net/wireless/ath/ath9k/recv.c 	bf = list_first_entry(&sc->rx.rxbuf, struct ath_rxbuf, list);
sc                709 drivers/net/wireless/ath/ath9k/recv.c 	if (bf == sc->rx.buf_hold)
sc                732 drivers/net/wireless/ath/ath9k/recv.c 		if (list_is_last(&bf->list, &sc->rx.rxbuf)) {
sc                733 drivers/net/wireless/ath/ath9k/recv.c 			sc->rx.rxlink = NULL;
sc                779 drivers/net/wireless/ath/ath9k/recv.c 	dma_sync_single_for_cpu(sc->dev, bf->bf_buf_addr,
sc                807 drivers/net/wireless/ath/ath9k/recv.c static int ath9k_rx_skb_preprocess(struct ath_softc *sc,
sc                813 drivers/net/wireless/ath/ath9k/recv.c 	struct ieee80211_hw *hw = sc->hw;
sc                814 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc                817 drivers/net/wireless/ath/ath9k/recv.c 	bool discard_current = sc->rx.discard_next;
sc                827 drivers/net/wireless/ath/ath9k/recv.c 	sc->rx.discard_next = false;
sc                836 drivers/net/wireless/ath/ath9k/recv.c 		RX_STAT_INC(sc, rx_len_err);
sc                846 drivers/net/wireless/ath/ath9k/recv.c 		RX_STAT_INC(sc, rx_len_err);
sc                867 drivers/net/wireless/ath/ath9k/recv.c 	ath_debug_stat_rx(sc, rx_stats);
sc                882 drivers/net/wireless/ath/ath9k/recv.c 			ath9k_dfs_process_phyerr(sc, hdr, rx_stats,
sc                884 drivers/net/wireless/ath/ath9k/recv.c 		} else if (sc->spec_priv.spectral_mode != SPECTRAL_DISABLED &&
sc                885 drivers/net/wireless/ath/ath9k/recv.c 			   ath_cmn_process_fft(&sc->spec_priv, hdr, rx_stats,
sc                887 drivers/net/wireless/ath/ath9k/recv.c 			RX_STAT_INC(sc, rx_spectral);
sc                896 drivers/net/wireless/ath/ath9k/recv.c 	spin_lock_bh(&sc->chan_lock);
sc                898 drivers/net/wireless/ath/ath9k/recv.c 				 sc->cur_chan->rxfilter)) {
sc                899 drivers/net/wireless/ath/ath9k/recv.c 		spin_unlock_bh(&sc->chan_lock);
sc                902 drivers/net/wireless/ath/ath9k/recv.c 	spin_unlock_bh(&sc->chan_lock);
sc                905 drivers/net/wireless/ath/ath9k/recv.c 		RX_STAT_INC(sc, rx_beacons);
sc                922 drivers/net/wireless/ath/ath9k/recv.c 		RX_STAT_INC(sc, rx_rate_err);
sc                928 drivers/net/wireless/ath/ath9k/recv.c 			ath_chanctx_beacon_recv_ev(sc,
sc                942 drivers/net/wireless/ath/ath9k/recv.c 		sc->rx.num_pkts++;
sc                948 drivers/net/wireless/ath/ath9k/recv.c 	sc->rx.discard_next = rx_stats->rs_more;
sc                962 drivers/net/wireless/ath/ath9k/recv.c static void ath9k_antenna_check(struct ath_softc *sc,
sc                965 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc                976 drivers/net/wireless/ath/ath9k/recv.c 	if (sc->rx.defant != rs->rs_antenna) {
sc                977 drivers/net/wireless/ath/ath9k/recv.c 		if (++sc->rx.rxotherant >= 3)
sc                978 drivers/net/wireless/ath/ath9k/recv.c 			ath_setdefantenna(sc, rs->rs_antenna);
sc                980 drivers/net/wireless/ath/ath9k/recv.c 		sc->rx.rxotherant = 0;
sc                985 drivers/net/wireless/ath/ath9k/recv.c 			ath_ant_comb_scan(sc, rs);
sc                987 drivers/net/wireless/ath/ath9k/recv.c 		ath_ant_comb_scan(sc, rs);
sc                991 drivers/net/wireless/ath/ath9k/recv.c static void ath9k_apply_ampdu_details(struct ath_softc *sc,
sc                997 drivers/net/wireless/ath/ath9k/recv.c 		rxs->ampdu_reference = sc->rx.ampdu_ref;
sc               1001 drivers/net/wireless/ath/ath9k/recv.c 			sc->rx.ampdu_ref++;
sc               1009 drivers/net/wireless/ath/ath9k/recv.c static void ath_rx_count_airtime(struct ath_softc *sc,
sc               1014 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc               1030 drivers/net/wireless/ath/ath9k/recv.c 	sta = ieee80211_find_sta_by_ifaddr(sc->hw, hdr->addr2, NULL);
sc               1044 drivers/net/wireless/ath/ath9k/recv.c 		airtime += ath_pkt_duration(sc, rxs->rate_idx, len,
sc               1059 drivers/net/wireless/ath/ath9k/recv.c int ath_rx_tasklet(struct ath_softc *sc, int flush, bool hp)
sc               1064 drivers/net/wireless/ath/ath9k/recv.c 	struct ath_hw *ah = sc->sc_ah;
sc               1066 drivers/net/wireless/ath/ath9k/recv.c 	struct ieee80211_hw *hw = sc->hw;
sc               1092 drivers/net/wireless/ath/ath9k/recv.c 			bf = ath_edma_get_next_rx_buf(sc, &rs, qtype);
sc               1094 drivers/net/wireless/ath/ath9k/recv.c 			bf = ath_get_next_rx_buf(sc, &rs);
sc               1107 drivers/net/wireless/ath/ath9k/recv.c 		if (sc->rx.frag)
sc               1108 drivers/net/wireless/ath/ath9k/recv.c 			hdr_skb = sc->rx.frag;
sc               1115 drivers/net/wireless/ath/ath9k/recv.c 		retval = ath9k_rx_skb_preprocess(sc, hdr_skb, &rs, rxs,
sc               1129 drivers/net/wireless/ath/ath9k/recv.c 			RX_STAT_INC(sc, rx_oom_err);
sc               1134 drivers/net/wireless/ath/ath9k/recv.c 		new_buf_addr = dma_map_single(sc->dev, requeue_skb->data,
sc               1136 drivers/net/wireless/ath/ath9k/recv.c 		if (unlikely(dma_mapping_error(sc->dev, new_buf_addr))) {
sc               1142 drivers/net/wireless/ath/ath9k/recv.c 		dma_unmap_single(sc->dev, bf->bf_buf_addr,
sc               1157 drivers/net/wireless/ath/ath9k/recv.c 			RX_STAT_INC(sc, rx_frags);
sc               1163 drivers/net/wireless/ath/ath9k/recv.c 			if (sc->rx.frag) {
sc               1165 drivers/net/wireless/ath/ath9k/recv.c 				dev_kfree_skb_any(sc->rx.frag);
sc               1167 drivers/net/wireless/ath/ath9k/recv.c 				RX_STAT_INC(sc, rx_too_many_frags_err);
sc               1170 drivers/net/wireless/ath/ath9k/recv.c 			sc->rx.frag = skb;
sc               1174 drivers/net/wireless/ath/ath9k/recv.c 		if (sc->rx.frag) {
sc               1179 drivers/net/wireless/ath/ath9k/recv.c 				RX_STAT_INC(sc, rx_oom_err);
sc               1183 drivers/net/wireless/ath/ath9k/recv.c 			sc->rx.frag = NULL;
sc               1194 drivers/net/wireless/ath/ath9k/recv.c 		spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc               1195 drivers/net/wireless/ath/ath9k/recv.c 		if ((sc->ps_flags & (PS_WAIT_FOR_BEACON |
sc               1198 drivers/net/wireless/ath/ath9k/recv.c 		    ath9k_check_auto_sleep(sc))
sc               1199 drivers/net/wireless/ath/ath9k/recv.c 			ath_rx_ps(sc, skb, rs.is_mybeacon);
sc               1200 drivers/net/wireless/ath/ath9k/recv.c 		spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc               1202 drivers/net/wireless/ath/ath9k/recv.c 		ath9k_antenna_check(sc, &rs);
sc               1203 drivers/net/wireless/ath/ath9k/recv.c 		ath9k_apply_ampdu_details(sc, &rs, rxs);
sc               1204 drivers/net/wireless/ath/ath9k/recv.c 		ath_debug_rate_stats(sc, &rs, skb);
sc               1205 drivers/net/wireless/ath/ath9k/recv.c 		ath_rx_count_airtime(sc, &rs, skb);
sc               1209 drivers/net/wireless/ath/ath9k/recv.c 			ath_dynack_sample_ack_ts(sc->sc_ah, skb, rs.rs_tstamp);
sc               1214 drivers/net/wireless/ath/ath9k/recv.c 		if (sc->rx.frag) {
sc               1215 drivers/net/wireless/ath/ath9k/recv.c 			dev_kfree_skb_any(sc->rx.frag);
sc               1216 drivers/net/wireless/ath/ath9k/recv.c 			sc->rx.frag = NULL;
sc               1219 drivers/net/wireless/ath/ath9k/recv.c 		list_add_tail(&bf->list, &sc->rx.rxbuf);
sc               1222 drivers/net/wireless/ath/ath9k/recv.c 			ath_rx_buf_relink(sc, bf, flush);
sc               1226 drivers/net/wireless/ath/ath9k/recv.c 			ath_rx_edma_buf_link(sc, qtype);
sc                 29 drivers/net/wireless/ath/ath9k/rng.c static int ath9k_rng_data_read(struct ath_softc *sc, u32 *buf, u32 buf_size)
sc                 32 drivers/net/wireless/ath/ath9k/rng.c 	u32  v1, v2, rng_last = sc->rng_last;
sc                 33 drivers/net/wireless/ath/ath9k/rng.c 	struct ath_hw *ah = sc->sc_ah;
sc                 35 drivers/net/wireless/ath/ath9k/rng.c 	ath9k_ps_wakeup(sc);
sc                 53 drivers/net/wireless/ath/ath9k/rng.c 	ath9k_ps_restore(sc);
sc                 55 drivers/net/wireless/ath/ath9k/rng.c 	sc->rng_last = rng_last;
sc                 77 drivers/net/wireless/ath/ath9k/rng.c 	struct ath_softc *sc = data;
sc                 86 drivers/net/wireless/ath/ath9k/rng.c 		bytes_read = ath9k_rng_data_read(sc, rng_buf,
sc                105 drivers/net/wireless/ath/ath9k/rng.c 	sc->rng_task = NULL;
sc                110 drivers/net/wireless/ath/ath9k/rng.c void ath9k_rng_start(struct ath_softc *sc)
sc                112 drivers/net/wireless/ath/ath9k/rng.c 	struct ath_hw *ah = sc->sc_ah;
sc                114 drivers/net/wireless/ath/ath9k/rng.c 	if (sc->rng_task)
sc                120 drivers/net/wireless/ath/ath9k/rng.c 	sc->rng_task = kthread_run(ath9k_rng_kthread, sc, "ath9k-hwrng");
sc                121 drivers/net/wireless/ath/ath9k/rng.c 	if (IS_ERR(sc->rng_task))
sc                122 drivers/net/wireless/ath/ath9k/rng.c 		sc->rng_task = NULL;
sc                125 drivers/net/wireless/ath/ath9k/rng.c void ath9k_rng_stop(struct ath_softc *sc)
sc                127 drivers/net/wireless/ath/ath9k/rng.c 	if (sc->rng_task) {
sc                128 drivers/net/wireless/ath/ath9k/rng.c 		kthread_stop(sc->rng_task);
sc                129 drivers/net/wireless/ath/ath9k/rng.c 		sc->rng_task = NULL;
sc                 19 drivers/net/wireless/ath/ath9k/tx99.c static void ath9k_tx99_stop(struct ath_softc *sc)
sc                 21 drivers/net/wireless/ath/ath9k/tx99.c 	struct ath_hw *ah = sc->sc_ah;
sc                 24 drivers/net/wireless/ath/ath9k/tx99.c 	ath_drain_all_txq(sc);
sc                 25 drivers/net/wireless/ath/ath9k/tx99.c 	ath_startrecv(sc);
sc                 30 drivers/net/wireless/ath/ath9k/tx99.c 	ieee80211_wake_queues(sc->hw);
sc                 32 drivers/net/wireless/ath/ath9k/tx99.c 	kfree_skb(sc->tx99_skb);
sc                 33 drivers/net/wireless/ath/ath9k/tx99.c 	sc->tx99_skb = NULL;
sc                 34 drivers/net/wireless/ath/ath9k/tx99.c 	sc->tx99_state = false;
sc                 36 drivers/net/wireless/ath/ath9k/tx99.c 	ath9k_hw_tx99_stop(sc->sc_ah);
sc                 40 drivers/net/wireless/ath/ath9k/tx99.c static struct sk_buff *ath9k_build_tx99_skb(struct ath_softc *sc)
sc                 52 drivers/net/wireless/ath/ath9k/tx99.c 	struct ieee80211_hw *hw = sc->hw;
sc                 53 drivers/net/wireless/ath/ath9k/tx99.c 	struct ath_hw *ah = sc->sc_ah;
sc                 75 drivers/net/wireless/ath/ath9k/tx99.c 	if (sc->tx99_vif) {
sc                 76 drivers/net/wireless/ath/ath9k/tx99.c 		avp = (struct ath_vif *) sc->tx99_vif->drv_priv;
sc                 83 drivers/net/wireless/ath/ath9k/tx99.c 	tx_info->band = sc->cur_chan->chandef.chan->band;
sc                 85 drivers/net/wireless/ath/ath9k/tx99.c 	tx_info->control.vif = sc->tx99_vif;
sc                 98 drivers/net/wireless/ath/ath9k/tx99.c static void ath9k_tx99_deinit(struct ath_softc *sc)
sc                100 drivers/net/wireless/ath/ath9k/tx99.c 	ath_reset(sc, NULL);
sc                102 drivers/net/wireless/ath/ath9k/tx99.c 	ath9k_ps_wakeup(sc);
sc                103 drivers/net/wireless/ath/ath9k/tx99.c 	ath9k_tx99_stop(sc);
sc                104 drivers/net/wireless/ath/ath9k/tx99.c 	ath9k_ps_restore(sc);
sc                107 drivers/net/wireless/ath/ath9k/tx99.c static int ath9k_tx99_init(struct ath_softc *sc)
sc                109 drivers/net/wireless/ath/ath9k/tx99.c 	struct ieee80211_hw *hw = sc->hw;
sc                110 drivers/net/wireless/ath/ath9k/tx99.c 	struct ath_hw *ah = sc->sc_ah;
sc                121 drivers/net/wireless/ath/ath9k/tx99.c 	sc->tx99_skb = ath9k_build_tx99_skb(sc);
sc                122 drivers/net/wireless/ath/ath9k/tx99.c 	if (!sc->tx99_skb)
sc                126 drivers/net/wireless/ath/ath9k/tx99.c 	txctl.txq = sc->tx.txq_map[IEEE80211_AC_VO];
sc                128 drivers/net/wireless/ath/ath9k/tx99.c 	ath_reset(sc, NULL);
sc                130 drivers/net/wireless/ath/ath9k/tx99.c 	ath9k_ps_wakeup(sc);
sc                133 drivers/net/wireless/ath/ath9k/tx99.c 	ath_drain_all_txq(sc);
sc                134 drivers/net/wireless/ath/ath9k/tx99.c 	ath_stoprecv(sc);
sc                136 drivers/net/wireless/ath/ath9k/tx99.c 	sc->tx99_state = true;
sc                140 drivers/net/wireless/ath/ath9k/tx99.c 	if (sc->tx99_power == MAX_RATE_POWER + 1)
sc                141 drivers/net/wireless/ath/ath9k/tx99.c 		sc->tx99_power = MAX_RATE_POWER;
sc                143 drivers/net/wireless/ath/ath9k/tx99.c 	ath9k_hw_tx99_set_txpower(ah, sc->tx99_power);
sc                144 drivers/net/wireless/ath/ath9k/tx99.c 	r = ath9k_tx99_send(sc, sc->tx99_skb, &txctl);
sc                151 drivers/net/wireless/ath/ath9k/tx99.c 		sc->tx99_power,
sc                152 drivers/net/wireless/ath/ath9k/tx99.c 		sc->tx99_power / 2);
sc                162 drivers/net/wireless/ath/ath9k/tx99.c 	struct ath_softc *sc = file->private_data;
sc                166 drivers/net/wireless/ath/ath9k/tx99.c 	len = sprintf(buf, "%d\n", sc->tx99_state);
sc                173 drivers/net/wireless/ath/ath9k/tx99.c 	struct ath_softc *sc = file->private_data;
sc                174 drivers/net/wireless/ath/ath9k/tx99.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                183 drivers/net/wireless/ath/ath9k/tx99.c 	if (sc->cur_chan->nvifs > 1)
sc                195 drivers/net/wireless/ath/ath9k/tx99.c 	mutex_lock(&sc->mutex);
sc                197 drivers/net/wireless/ath/ath9k/tx99.c 	if (start == sc->tx99_state) {
sc                201 drivers/net/wireless/ath/ath9k/tx99.c 		ath9k_tx99_deinit(sc);
sc                205 drivers/net/wireless/ath/ath9k/tx99.c 		ath9k_tx99_deinit(sc);
sc                209 drivers/net/wireless/ath/ath9k/tx99.c 	r = ath9k_tx99_init(sc);
sc                211 drivers/net/wireless/ath/ath9k/tx99.c 		mutex_unlock(&sc->mutex);
sc                215 drivers/net/wireless/ath/ath9k/tx99.c 	mutex_unlock(&sc->mutex);
sc                231 drivers/net/wireless/ath/ath9k/tx99.c 	struct ath_softc *sc = file->private_data;
sc                236 drivers/net/wireless/ath/ath9k/tx99.c 		      sc->tx99_power,
sc                237 drivers/net/wireless/ath/ath9k/tx99.c 		      sc->tx99_power / 2);
sc                246 drivers/net/wireless/ath/ath9k/tx99.c 	struct ath_softc *sc = file->private_data;
sc                257 drivers/net/wireless/ath/ath9k/tx99.c 	sc->tx99_power = tx_power;
sc                259 drivers/net/wireless/ath/ath9k/tx99.c 	ath9k_ps_wakeup(sc);
sc                260 drivers/net/wireless/ath/ath9k/tx99.c 	ath9k_hw_tx99_set_txpower(sc->sc_ah, sc->tx99_power);
sc                261 drivers/net/wireless/ath/ath9k/tx99.c 	ath9k_ps_restore(sc);
sc                274 drivers/net/wireless/ath/ath9k/tx99.c void ath9k_tx99_init_debug(struct ath_softc *sc)
sc                276 drivers/net/wireless/ath/ath9k/tx99.c 	if (!AR_SREV_9280_20_OR_LATER(sc->sc_ah))
sc                280 drivers/net/wireless/ath/ath9k/tx99.c 			    sc->debug.debugfs_phy, sc,
sc                283 drivers/net/wireless/ath/ath9k/tx99.c 			    sc->debug.debugfs_phy, sc,
sc                 33 drivers/net/wireless/ath/ath9k/wow.c static u8 ath9k_wow_map_triggers(struct ath_softc *sc,
sc                 50 drivers/net/wireless/ath/ath9k/wow.c static int ath9k_wow_add_disassoc_deauth_pattern(struct ath_softc *sc)
sc                 52 drivers/net/wireless/ath/ath9k/wow.c 	struct ath_hw *ah = sc->sc_ah;
sc                138 drivers/net/wireless/ath/ath9k/wow.c static int ath9k_wow_add_pattern(struct ath_softc *sc,
sc                141 drivers/net/wireless/ath/ath9k/wow.c 	struct ath_hw *ah = sc->sc_ah;
sc                170 drivers/net/wireless/ath/ath9k/wow.c 	struct ath_softc *sc = hw->priv;
sc                171 drivers/net/wireless/ath/ath9k/wow.c 	struct ath_hw *ah = sc->sc_ah;
sc                176 drivers/net/wireless/ath/ath9k/wow.c 	ath9k_deinit_channel_context(sc);
sc                178 drivers/net/wireless/ath/ath9k/wow.c 	mutex_lock(&sc->mutex);
sc                192 drivers/net/wireless/ath/ath9k/wow.c 	if (sc->cur_chan->nvifs > 1) {
sc                213 drivers/net/wireless/ath/ath9k/wow.c 	triggers = ath9k_wow_map_triggers(sc, wowlan);
sc                220 drivers/net/wireless/ath/ath9k/wow.c 	ath_cancel_work(sc);
sc                221 drivers/net/wireless/ath/ath9k/wow.c 	ath_stop_ani(sc);
sc                223 drivers/net/wireless/ath/ath9k/wow.c 	ath9k_ps_wakeup(sc);
sc                225 drivers/net/wireless/ath/ath9k/wow.c 	ath9k_stop_btcoex(sc);
sc                231 drivers/net/wireless/ath/ath9k/wow.c 	ret = ath9k_wow_add_disassoc_deauth_pattern(sc);
sc                239 drivers/net/wireless/ath/ath9k/wow.c 		ret = ath9k_wow_add_pattern(sc, wowlan);
sc                247 drivers/net/wireless/ath/ath9k/wow.c 	spin_lock_bh(&sc->sc_pcu_lock);
sc                253 drivers/net/wireless/ath/ath9k/wow.c 	sc->wow_intr_before_sleep = ah->imask;
sc                260 drivers/net/wireless/ath/ath9k/wow.c 	spin_unlock_bh(&sc->sc_pcu_lock);
sc                266 drivers/net/wireless/ath/ath9k/wow.c 	synchronize_irq(sc->irq);
sc                267 drivers/net/wireless/ath/ath9k/wow.c 	tasklet_kill(&sc->intr_tq);
sc                271 drivers/net/wireless/ath/ath9k/wow.c 	ath9k_ps_restore(sc);
sc                276 drivers/net/wireless/ath/ath9k/wow.c 	mutex_unlock(&sc->mutex);
sc                282 drivers/net/wireless/ath/ath9k/wow.c 	struct ath_softc *sc = hw->priv;
sc                283 drivers/net/wireless/ath/ath9k/wow.c 	struct ath_hw *ah = sc->sc_ah;
sc                287 drivers/net/wireless/ath/ath9k/wow.c 	mutex_lock(&sc->mutex);
sc                289 drivers/net/wireless/ath/ath9k/wow.c 	ath9k_ps_wakeup(sc);
sc                291 drivers/net/wireless/ath/ath9k/wow.c 	spin_lock_bh(&sc->sc_pcu_lock);
sc                294 drivers/net/wireless/ath/ath9k/wow.c 	ah->imask = sc->wow_intr_before_sleep;
sc                298 drivers/net/wireless/ath/ath9k/wow.c 	spin_unlock_bh(&sc->sc_pcu_lock);
sc                303 drivers/net/wireless/ath/ath9k/wow.c 	ath_restart_work(sc);
sc                304 drivers/net/wireless/ath/ath9k/wow.c 	ath9k_start_btcoex(sc);
sc                308 drivers/net/wireless/ath/ath9k/wow.c 	ath9k_ps_restore(sc);
sc                309 drivers/net/wireless/ath/ath9k/wow.c 	mutex_unlock(&sc->mutex);
sc                316 drivers/net/wireless/ath/ath9k/wow.c 	struct ath_softc *sc = hw->priv;
sc                317 drivers/net/wireless/ath/ath9k/wow.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                319 drivers/net/wireless/ath/ath9k/wow.c 	mutex_lock(&sc->mutex);
sc                320 drivers/net/wireless/ath/ath9k/wow.c 	device_set_wakeup_enable(sc->dev, enabled);
sc                321 drivers/net/wireless/ath/ath9k/wow.c 	mutex_unlock(&sc->mutex);
sc                329 drivers/net/wireless/ath/ath9k/wow.c 	struct ath_softc *sc = hw->priv;
sc                330 drivers/net/wireless/ath/ath9k/wow.c 	struct ath_hw *ah = sc->sc_ah;
sc                332 drivers/net/wireless/ath/ath9k/wow.c 	if ((sc->driver_data & ATH9K_PCI_WOW) || sc->force_wow) {
sc                338 drivers/net/wireless/ath/ath9k/wow.c 		device_init_wakeup(sc->dev, 1);
sc                344 drivers/net/wireless/ath/ath9k/wow.c 	struct ath_softc *sc = hw->priv;
sc                346 drivers/net/wireless/ath/ath9k/wow.c 	if ((sc->driver_data & ATH9K_PCI_WOW) || sc->force_wow)
sc                347 drivers/net/wireless/ath/ath9k/wow.c 		device_init_wakeup(sc->dev, 0);
sc                 50 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_send_normal(struct ath_softc *sc, struct ath_txq *txq,
sc                 52 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_complete(struct ath_softc *sc, struct sk_buff *skb,
sc                 55 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_complete_buf(struct ath_softc *sc, struct ath_buf *bf,
sc                 59 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_txqaddbuf(struct ath_softc *sc, struct ath_txq *txq,
sc                 61 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_rc_status(struct ath_softc *sc, struct ath_buf *bf,
sc                 64 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_update_baw(struct ath_softc *sc, struct ath_atx_tid *tid,
sc                 66 drivers/net/wireless/ath/ath9k/xmit.c static struct ath_buf *ath_tx_setup_buffer(struct ath_softc *sc,
sc                101 drivers/net/wireless/ath/ath9k/xmit.c void ath_txq_unlock_complete(struct ath_softc *sc, struct ath_txq *txq)
sc                104 drivers/net/wireless/ath/ath9k/xmit.c 	struct ieee80211_hw *hw = sc->hw;
sc                116 drivers/net/wireless/ath/ath9k/xmit.c void ath_tx_queue_tid(struct ath_softc *sc, struct ath_atx_tid *tid)
sc                121 drivers/net/wireless/ath/ath9k/xmit.c 	ieee80211_schedule_txq(sc->hw, queue);
sc                126 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_softc *sc = hw->priv;
sc                127 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc                135 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_lock(sc, txq);
sc                136 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_schedule(sc, txq);
sc                137 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_unlock(sc, txq);
sc                164 drivers/net/wireless/ath/ath9k/xmit.c static void ath_txq_skb_done(struct ath_softc *sc, struct ath_txq *txq,
sc                173 drivers/net/wireless/ath/ath9k/xmit.c 	txq = sc->tx.txq_map[q];
sc                180 drivers/net/wireless/ath/ath9k/xmit.c ath_get_skb_tid(struct ath_softc *sc, struct ath_node *an, struct sk_buff *skb)
sc                190 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_softc *sc = tid->an->sc;
sc                191 drivers/net/wireless/ath/ath9k/xmit.c 	struct ieee80211_hw *hw = sc->hw;
sc                211 drivers/net/wireless/ath/ath9k/xmit.c 	if (tid->txq == sc->tx.txq_map[q]) {
sc                232 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_flush_tid(struct ath_softc *sc, struct ath_atx_tid *tid)
sc                250 drivers/net/wireless/ath/ath9k/xmit.c 			ath_txq_skb_done(sc, txq, skb);
sc                251 drivers/net/wireless/ath/ath9k/xmit.c 			ieee80211_free_txskb(sc->hw, skb);
sc                256 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_update_baw(sc, tid, bf);
sc                261 drivers/net/wireless/ath/ath9k/xmit.c 		ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, &ts, 0);
sc                265 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_unlock(sc, txq);
sc                267 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_lock(sc, txq);
sc                271 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_update_baw(struct ath_softc *sc, struct ath_atx_tid *tid,
sc                294 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_addto_baw(struct ath_softc *sc, struct ath_atx_tid *tid,
sc                316 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tid_drain(struct ath_softc *sc, struct ath_txq *txq,
sc                335 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_complete(sc, skb, ATH_TX_ERROR, txq, NULL);
sc                340 drivers/net/wireless/ath/ath9k/xmit.c 		ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, &ts, 0);
sc                344 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_set_retry(struct ath_softc *sc, struct ath_txq *txq,
sc                352 drivers/net/wireless/ath/ath9k/xmit.c 	TX_STAT_INC(sc, txq->axq_qnum, a_retries);
sc                360 drivers/net/wireless/ath/ath9k/xmit.c 	dma_sync_single_for_device(sc->dev, bf->bf_buf_addr,
sc                364 drivers/net/wireless/ath/ath9k/xmit.c static struct ath_buf *ath_tx_get_buffer(struct ath_softc *sc)
sc                368 drivers/net/wireless/ath/ath9k/xmit.c 	spin_lock_bh(&sc->tx.txbuflock);
sc                370 drivers/net/wireless/ath/ath9k/xmit.c 	if (unlikely(list_empty(&sc->tx.txbuf))) {
sc                371 drivers/net/wireless/ath/ath9k/xmit.c 		spin_unlock_bh(&sc->tx.txbuflock);
sc                375 drivers/net/wireless/ath/ath9k/xmit.c 	bf = list_first_entry(&sc->tx.txbuf, struct ath_buf, list);
sc                378 drivers/net/wireless/ath/ath9k/xmit.c 	spin_unlock_bh(&sc->tx.txbuflock);
sc                383 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_return_buffer(struct ath_softc *sc, struct ath_buf *bf)
sc                385 drivers/net/wireless/ath/ath9k/xmit.c 	spin_lock_bh(&sc->tx.txbuflock);
sc                386 drivers/net/wireless/ath/ath9k/xmit.c 	list_add_tail(&bf->list, &sc->tx.txbuf);
sc                387 drivers/net/wireless/ath/ath9k/xmit.c 	spin_unlock_bh(&sc->tx.txbuflock);
sc                390 drivers/net/wireless/ath/ath9k/xmit.c static struct ath_buf* ath_clone_txbuf(struct ath_softc *sc, struct ath_buf *bf)
sc                394 drivers/net/wireless/ath/ath9k/xmit.c 	tbf = ath_tx_get_buffer(sc);
sc                402 drivers/net/wireless/ath/ath9k/xmit.c 	memcpy(tbf->bf_desc, bf->bf_desc, sc->sc_ah->caps.tx_desc_len);
sc                409 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_count_frames(struct ath_softc *sc, struct ath_buf *bf,
sc                439 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_complete_aggr(struct ath_softc *sc, struct ath_txq *txq,
sc                479 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, ts, 0);
sc                516 drivers/net/wireless/ath/ath9k/xmit.c 			if (sc->sc_ah->opmode == NL80211_IFTYPE_STATION)
sc                523 drivers/net/wireless/ath/ath9k/xmit.c 	ath_tx_count_frames(sc, bf, ts, txok, &nframes, &nbad);
sc                552 drivers/net/wireless/ath/ath9k/xmit.c 				ath_tx_set_retry(sc, txq, bf->bf_mpdu,
sc                576 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_update_baw(sc, tid, bf);
sc                580 drivers/net/wireless/ath/ath9k/xmit.c 				ath_tx_rc_status(sc, bf, ts, nframes, nbad, txok);
sc                583 drivers/net/wireless/ath/ath9k/xmit.c 					ath_dynack_sample_tx_ts(sc->sc_ah,
sc                588 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_complete_buf(sc, bf, txq, &bf_head, sta, ts,
sc                599 drivers/net/wireless/ath/ath9k/xmit.c 				tbf = ath_clone_txbuf(sc, bf_last);
sc                606 drivers/net/wireless/ath/ath9k/xmit.c 					ath_tx_update_baw(sc, tid, bf);
sc                608 drivers/net/wireless/ath/ath9k/xmit.c 					ath_tx_complete_buf(sc, bf, txq,
sc                636 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_queue_tid(sc, tid);
sc                648 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_unlock(sc, txq);
sc                650 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_lock(sc, txq);
sc                654 drivers/net/wireless/ath/ath9k/xmit.c 		ath9k_queue_reset(sc, RESET_TYPE_TX_ERROR);
sc                663 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_count_airtime(struct ath_softc *sc,
sc                674 drivers/net/wireless/ath/ath9k/xmit.c 		int rate_dur = ath9k_hw_get_duration(sc->sc_ah, bf->bf_desc, i);
sc                681 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_process_buffer(struct ath_softc *sc, struct ath_txq *txq,
sc                685 drivers/net/wireless/ath/ath9k/xmit.c 	struct ieee80211_hw *hw = sc->hw;
sc                700 drivers/net/wireless/ath/ath9k/xmit.c 	ts->duration = ath9k_hw_get_duration(sc->sc_ah, bf->bf_desc,
sc                707 drivers/net/wireless/ath/ath9k/xmit.c 		tid = ath_get_skb_tid(sc, an, bf->bf_mpdu);
sc                708 drivers/net/wireless/ath/ath9k/xmit.c 		ath_tx_count_airtime(sc, sta, bf, ts, tid->tidno);
sc                718 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_rc_status(sc, bf, ts, 1, txok ? 0 : 1, txok);
sc                719 drivers/net/wireless/ath/ath9k/xmit.c 			ath_dynack_sample_tx_ts(sc->sc_ah, bf->bf_mpdu, ts,
sc                722 drivers/net/wireless/ath/ath9k/xmit.c 		ath_tx_complete_buf(sc, bf, txq, bf_head, sta, ts, txok);
sc                724 drivers/net/wireless/ath/ath9k/xmit.c 		ath_tx_complete_aggr(sc, txq, bf, bf_head, sta, tid, ts, txok);
sc                727 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_schedule(sc, txq);
sc                752 drivers/net/wireless/ath/ath9k/xmit.c static u32 ath_lookup_rate(struct ath_softc *sc, struct ath_buf *bf,
sc                792 drivers/net/wireless/ath/ath9k/xmit.c 		frmlen = sc->tx.max_aggr_framelen[q][modeidx][rates[i].idx];
sc                809 drivers/net/wireless/ath/ath9k/xmit.c 	bt_aggr_limit = ath9k_btcoex_aggr_limit(sc, max_4ms_framelen);
sc                823 drivers/net/wireless/ath/ath9k/xmit.c static int ath_compute_num_delims(struct ath_softc *sc, struct ath_atx_tid *tid,
sc                844 drivers/net/wireless/ath/ath9k/xmit.c 	    !(sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA))
sc                851 drivers/net/wireless/ath/ath9k/xmit.c 	if (first_subfrm && !AR_SREV_9580_10_OR_LATER(sc->sc_ah) &&
sc                852 drivers/net/wireless/ath/ath9k/xmit.c 	    (sc->sc_ah->ent_mode & AR_ENT_OTP_MIN_PKT_SIZE_DISABLE))
sc                894 drivers/net/wireless/ath/ath9k/xmit.c ath_tx_get_tid_subframe(struct ath_softc *sc, struct ath_txq *txq,
sc                912 drivers/net/wireless/ath/ath9k/xmit.c 			bf = ath_tx_setup_buffer(sc, txq, tid, skb);
sc                917 drivers/net/wireless/ath/ath9k/xmit.c 			ath_txq_skb_done(sc, txq, skb);
sc                918 drivers/net/wireless/ath/ath9k/xmit.c 			ieee80211_free_txskb(sc->hw, skb);
sc                967 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_update_baw(sc, tid, bf);
sc                968 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, &ts, 0);
sc                973 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_addto_baw(sc, tid, bf);
sc                983 drivers/net/wireless/ath/ath9k/xmit.c ath_tx_form_aggr(struct ath_softc *sc, struct ath_txq *txq,
sc                998 drivers/net/wireless/ath/ath9k/xmit.c 	aggr_limit = ath_lookup_rate(sc, bf, tid);
sc               1025 drivers/net/wireless/ath/ath9k/xmit.c 		ndelim = ath_compute_num_delims(sc, tid, bf_first, fi->framelen,
sc               1041 drivers/net/wireless/ath/ath9k/xmit.c 		ret = ath_tx_get_tid_subframe(sc, txq, tid, &bf);
sc               1056 drivers/net/wireless/ath/ath9k/xmit.c 		TX_STAT_INC(sc, txq->axq_qnum, a_aggr);
sc               1069 drivers/net/wireless/ath/ath9k/xmit.c u32 ath_pkt_duration(struct ath_softc *sc, u8 rix, int pktlen,
sc               1109 drivers/net/wireless/ath/ath9k/xmit.c void ath_update_max_aggr_framelen(struct ath_softc *sc, int queue, int txop)
sc               1118 drivers/net/wireless/ath/ath9k/xmit.c 	cur_ht20 = sc->tx.max_aggr_framelen[queue][MCS_HT20];
sc               1119 drivers/net/wireless/ath/ath9k/xmit.c 	cur_ht20_sgi = sc->tx.max_aggr_framelen[queue][MCS_HT20_SGI];
sc               1120 drivers/net/wireless/ath/ath9k/xmit.c 	cur_ht40 = sc->tx.max_aggr_framelen[queue][MCS_HT40];
sc               1121 drivers/net/wireless/ath/ath9k/xmit.c 	cur_ht40_sgi = sc->tx.max_aggr_framelen[queue][MCS_HT40_SGI];
sc               1130 drivers/net/wireless/ath/ath9k/xmit.c static u8 ath_get_rate_txpower(struct ath_softc *sc, struct ath_buf *bf,
sc               1137 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_hw *ah = sc->sc_ah;
sc               1139 drivers/net/wireless/ath/ath9k/xmit.c 	if (sc->tx99_state || !ah->tpc_enabled)
sc               1204 drivers/net/wireless/ath/ath9k/xmit.c static void ath_buf_set_rate(struct ath_softc *sc, struct ath_buf *bf,
sc               1207 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_hw *ah = sc->sc_ah;
sc               1215 drivers/net/wireless/ath/ath9k/xmit.c 	u32 rts_thresh = sc->hw->wiphy->rts_threshold;
sc               1268 drivers/net/wireless/ath/ath9k/xmit.c 			info->rates[i].ChSel = ath_txchainmask_reduction(sc,
sc               1270 drivers/net/wireless/ath/ath9k/xmit.c 			info->rates[i].PktDuration = ath_pkt_duration(sc, rix, len,
sc               1275 drivers/net/wireless/ath/ath9k/xmit.c 			info->txpower[i] = ath_get_rate_txpower(sc, bf, rix,
sc               1299 drivers/net/wireless/ath/ath9k/xmit.c 			info->rates[i].ChSel = ath_txchainmask_reduction(sc,
sc               1302 drivers/net/wireless/ath/ath9k/xmit.c 		info->rates[i].PktDuration = ath9k_hw_computetxtime(sc->sc_ah,
sc               1306 drivers/net/wireless/ath/ath9k/xmit.c 		info->txpower[i] = ath_get_rate_txpower(sc, bf, rix, false,
sc               1311 drivers/net/wireless/ath/ath9k/xmit.c 	if (bf_isaggr(bf) && (len > sc->sc_ah->caps.rts_aggr_limit))
sc               1342 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_fill_desc(struct ath_softc *sc, struct ath_buf *bf,
sc               1345 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_hw *ah = sc->sc_ah;
sc               1348 drivers/net/wireless/ath/ath9k/xmit.c 	u32 rts_thresh = sc->hw->wiphy->rts_threshold;
sc               1366 drivers/net/wireless/ath/ath9k/xmit.c 			info.link = (sc->tx99_state) ? bf->bf_daddr : 0;
sc               1371 drivers/net/wireless/ath/ath9k/xmit.c 			if (!sc->tx99_state)
sc               1374 drivers/net/wireless/ath/ath9k/xmit.c 			    txq == sc->tx.uapsdq)
sc               1405 drivers/net/wireless/ath/ath9k/xmit.c 			ath_buf_set_rate(sc, bf, &info, len, rts);
sc               1435 drivers/net/wireless/ath/ath9k/xmit.c ath_tx_form_burst(struct ath_softc *sc, struct ath_txq *txq,
sc               1454 drivers/net/wireless/ath/ath9k/xmit.c 		ret = ath_tx_get_tid_subframe(sc, txq, tid, &bf);
sc               1468 drivers/net/wireless/ath/ath9k/xmit.c static int ath_tx_sched_aggr(struct ath_softc *sc, struct ath_txq *txq,
sc               1479 drivers/net/wireless/ath/ath9k/xmit.c 	ret = ath_tx_get_tid_subframe(sc, txq, tid, &bf);
sc               1493 drivers/net/wireless/ath/ath9k/xmit.c 		aggr_len = ath_tx_form_aggr(sc, txq, tid, &bf_q, bf);
sc               1495 drivers/net/wireless/ath/ath9k/xmit.c 		ath_tx_form_burst(sc, txq, tid, &bf_q, bf);
sc               1505 drivers/net/wireless/ath/ath9k/xmit.c 	ath_tx_fill_desc(sc, bf, txq, aggr_len);
sc               1506 drivers/net/wireless/ath/ath9k/xmit.c 	ath_tx_txqaddbuf(sc, txq, &bf_q, false);
sc               1510 drivers/net/wireless/ath/ath9k/xmit.c int ath_tx_aggr_start(struct ath_softc *sc, struct ieee80211_sta *sta,
sc               1513 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1525 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_lock(sc, txq);
sc               1545 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_unlock_complete(sc, txq);
sc               1550 drivers/net/wireless/ath/ath9k/xmit.c void ath_tx_aggr_stop(struct ath_softc *sc, struct ieee80211_sta *sta, u16 tid)
sc               1552 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1559 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_lock(sc, txq);
sc               1561 drivers/net/wireless/ath/ath9k/xmit.c 	ath_tx_flush_tid(sc, txtid);
sc               1562 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_unlock_complete(sc, txq);
sc               1565 drivers/net/wireless/ath/ath9k/xmit.c void ath_tx_aggr_sleep(struct ieee80211_sta *sta, struct ath_softc *sc,
sc               1568 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1583 drivers/net/wireless/ath/ath9k/xmit.c void ath_tx_aggr_wakeup(struct ath_softc *sc, struct ath_node *an)
sc               1585 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1596 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_lock(sc, txq);
sc               1599 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_queue_tid(sc, tid);
sc               1600 drivers/net/wireless/ath/ath9k/xmit.c 			ath_txq_schedule(sc, txq);
sc               1602 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_unlock_complete(sc, txq);
sc               1609 drivers/net/wireless/ath/ath9k/xmit.c ath9k_set_moredata(struct ath_softc *sc, struct ath_buf *bf, bool val)
sc               1618 drivers/net/wireless/ath/ath9k/xmit.c 		dma_sync_single_for_device(sc->dev, bf->bf_buf_addr,
sc               1629 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_softc *sc = hw->priv;
sc               1631 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_txq *txq = sc->tx.uapsdq;
sc               1647 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_lock(sc, tid->txq);
sc               1649 drivers/net/wireless/ath/ath9k/xmit.c 			ret = ath_tx_get_tid_subframe(sc, sc->tx.uapsdq,
sc               1654 drivers/net/wireless/ath/ath9k/xmit.c 			ath9k_set_moredata(sc, bf, true);
sc               1665 drivers/net/wireless/ath/ath9k/xmit.c 			TX_STAT_INC(sc, txq->axq_qnum, a_queued_hw);
sc               1670 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_unlock_complete(sc, tid->txq);
sc               1677 drivers/net/wireless/ath/ath9k/xmit.c 		ath9k_set_moredata(sc, bf_tail, false);
sc               1683 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_lock(sc, txq);
sc               1684 drivers/net/wireless/ath/ath9k/xmit.c 	ath_tx_fill_desc(sc, bf, txq, 0);
sc               1685 drivers/net/wireless/ath/ath9k/xmit.c 	ath_tx_txqaddbuf(sc, txq, &bf_q, false);
sc               1686 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_unlock(sc, txq);
sc               1693 drivers/net/wireless/ath/ath9k/xmit.c struct ath_txq *ath_txq_setup(struct ath_softc *sc, int qtype, int subtype)
sc               1695 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_hw *ah = sc->sc_ah;
sc               1744 drivers/net/wireless/ath/ath9k/xmit.c 	if (!ATH_TXQ_SETUP(sc, axq_qnum)) {
sc               1745 drivers/net/wireless/ath/ath9k/xmit.c 		struct ath_txq *txq = &sc->tx.txq[axq_qnum];
sc               1756 drivers/net/wireless/ath/ath9k/xmit.c 		sc->tx.txqsetup |= 1<<axq_qnum;
sc               1762 drivers/net/wireless/ath/ath9k/xmit.c 	return &sc->tx.txq[axq_qnum];
sc               1765 drivers/net/wireless/ath/ath9k/xmit.c int ath_txq_update(struct ath_softc *sc, int qnum,
sc               1768 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_hw *ah = sc->sc_ah;
sc               1772 drivers/net/wireless/ath/ath9k/xmit.c 	BUG_ON(sc->tx.txq[qnum].axq_qnum != qnum);
sc               1782 drivers/net/wireless/ath/ath9k/xmit.c 		ath_err(ath9k_hw_common(sc->sc_ah),
sc               1792 drivers/net/wireless/ath/ath9k/xmit.c int ath_cabq_update(struct ath_softc *sc)
sc               1795 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_beacon_config *cur_conf = &sc->cur_chan->beacon;
sc               1796 drivers/net/wireless/ath/ath9k/xmit.c 	int qnum = sc->beacon.cabq->axq_qnum;
sc               1798 drivers/net/wireless/ath/ath9k/xmit.c 	ath9k_hw_get_txq_props(sc->sc_ah, qnum, &qi);
sc               1802 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_update(sc, qnum, &qi);
sc               1807 drivers/net/wireless/ath/ath9k/xmit.c static void ath_drain_txq_list(struct ath_softc *sc, struct ath_txq *txq,
sc               1824 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_return_buffer(sc, bf);
sc               1830 drivers/net/wireless/ath/ath9k/xmit.c 		ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head);
sc               1840 drivers/net/wireless/ath/ath9k/xmit.c void ath_draintxq(struct ath_softc *sc, struct ath_txq *txq)
sc               1843 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_lock(sc, txq);
sc               1845 drivers/net/wireless/ath/ath9k/xmit.c 	if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) {
sc               1849 drivers/net/wireless/ath/ath9k/xmit.c 			ath_drain_txq_list(sc, txq, &txq->txq_fifo[idx]);
sc               1858 drivers/net/wireless/ath/ath9k/xmit.c 	ath_drain_txq_list(sc, txq, &txq->axq_q);
sc               1860 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_unlock_complete(sc, txq);
sc               1864 drivers/net/wireless/ath/ath9k/xmit.c bool ath_drain_all_txq(struct ath_softc *sc)
sc               1866 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_hw *ah = sc->sc_ah;
sc               1867 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1879 drivers/net/wireless/ath/ath9k/xmit.c 		if (!ATH_TXQ_SETUP(sc, i))
sc               1882 drivers/net/wireless/ath/ath9k/xmit.c 		if (!sc->tx.txq[i].axq_depth)
sc               1885 drivers/net/wireless/ath/ath9k/xmit.c 		if (ath9k_hw_numtxpending(ah, sc->tx.txq[i].axq_qnum))
sc               1890 drivers/net/wireless/ath/ath9k/xmit.c 		RESET_STAT_INC(sc, RESET_TX_DMA_ERROR);
sc               1896 drivers/net/wireless/ath/ath9k/xmit.c 		if (!ATH_TXQ_SETUP(sc, i))
sc               1899 drivers/net/wireless/ath/ath9k/xmit.c 		txq = &sc->tx.txq[i];
sc               1900 drivers/net/wireless/ath/ath9k/xmit.c 		ath_draintxq(sc, txq);
sc               1906 drivers/net/wireless/ath/ath9k/xmit.c void ath_tx_cleanupq(struct ath_softc *sc, struct ath_txq *txq)
sc               1908 drivers/net/wireless/ath/ath9k/xmit.c 	ath9k_hw_releasetxqueue(sc->sc_ah, txq->axq_qnum);
sc               1909 drivers/net/wireless/ath/ath9k/xmit.c 	sc->tx.txqsetup &= ~(1<<txq->axq_qnum);
sc               1915 drivers/net/wireless/ath/ath9k/xmit.c void ath_txq_schedule(struct ath_softc *sc, struct ath_txq *txq)
sc               1917 drivers/net/wireless/ath/ath9k/xmit.c 	struct ieee80211_hw *hw = sc->hw;
sc               1918 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               1930 drivers/net/wireless/ath/ath9k/xmit.c 	spin_lock_bh(&sc->chan_lock);
sc               1933 drivers/net/wireless/ath/ath9k/xmit.c 	if (sc->cur_chan->stopped)
sc               1941 drivers/net/wireless/ath/ath9k/xmit.c 		ret = ath_tx_sched_aggr(sc, txq, tid);
sc               1950 drivers/net/wireless/ath/ath9k/xmit.c 	spin_unlock_bh(&sc->chan_lock);
sc               1954 drivers/net/wireless/ath/ath9k/xmit.c void ath_txq_schedule_all(struct ath_softc *sc)
sc               1960 drivers/net/wireless/ath/ath9k/xmit.c 		txq = sc->tx.txq_map[i];
sc               1963 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_schedule(sc, txq);
sc               1976 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_txqaddbuf(struct ath_softc *sc, struct ath_txq *txq,
sc               1979 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_hw *ah = sc->sc_ah;
sc               2019 drivers/net/wireless/ath/ath9k/xmit.c 		TX_STAT_INC(sc, txq->axq_qnum, puttxbuf);
sc               2025 drivers/net/wireless/ath/ath9k/xmit.c 	if (!edma || sc->tx99_state) {
sc               2026 drivers/net/wireless/ath/ath9k/xmit.c 		TX_STAT_INC(sc, txq->axq_qnum, txstart);
sc               2043 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_send_normal(struct ath_softc *sc, struct ath_txq *txq,
sc               2056 drivers/net/wireless/ath/ath9k/xmit.c 		ath_tx_addto_baw(sc, tid, bf);
sc               2061 drivers/net/wireless/ath/ath9k/xmit.c 	ath_tx_fill_desc(sc, bf, txq, fi->framelen);
sc               2062 drivers/net/wireless/ath/ath9k/xmit.c 	ath_tx_txqaddbuf(sc, txq, &bf_head, false);
sc               2063 drivers/net/wireless/ath/ath9k/xmit.c 	TX_STAT_INC(sc, txq->axq_qnum, queued);
sc               2101 drivers/net/wireless/ath/ath9k/xmit.c 		struct ath_softc *sc = hw->priv;
sc               2103 drivers/net/wireless/ath/ath9k/xmit.c 		txpower = sc->cur_chan->cur_txpower;
sc               2125 drivers/net/wireless/ath/ath9k/xmit.c u8 ath_txchainmask_reduction(struct ath_softc *sc, u8 chainmask, u32 rate)
sc               2127 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_hw *ah = sc->sc_ah;
sc               2144 drivers/net/wireless/ath/ath9k/xmit.c static struct ath_buf *ath_tx_setup_buffer(struct ath_softc *sc,
sc               2149 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2156 drivers/net/wireless/ath/ath9k/xmit.c 	bf = ath_tx_get_buffer(sc);
sc               2180 drivers/net/wireless/ath/ath9k/xmit.c 	bf->bf_buf_addr = dma_map_single(sc->dev, skb->data,
sc               2182 drivers/net/wireless/ath/ath9k/xmit.c 	if (unlikely(dma_mapping_error(sc->dev, bf->bf_buf_addr))) {
sc               2185 drivers/net/wireless/ath/ath9k/xmit.c 		ath_err(ath9k_hw_common(sc->sc_ah),
sc               2187 drivers/net/wireless/ath/ath9k/xmit.c 		ath_tx_return_buffer(sc, bf);
sc               2226 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_softc *sc = hw->priv;
sc               2241 drivers/net/wireless/ath/ath9k/xmit.c 	ath_assign_seq(ath9k_hw_common(sc->sc_ah), skb);
sc               2272 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_softc *sc = hw->priv;
sc               2294 drivers/net/wireless/ath/ath9k/xmit.c 		txq = sc->tx.uapsdq;
sc               2298 drivers/net/wireless/ath/ath9k/xmit.c 		tid = ath_get_skb_tid(sc, an, skb);
sc               2301 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_lock(sc, txq);
sc               2302 drivers/net/wireless/ath/ath9k/xmit.c 	if (txq == sc->tx.txq_map[q]) {
sc               2307 drivers/net/wireless/ath/ath9k/xmit.c 	bf = ath_tx_setup_buffer(sc, txq, tid, skb);
sc               2309 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_skb_done(sc, txq, skb);
sc               2313 drivers/net/wireless/ath/ath9k/xmit.c 			ieee80211_free_txskb(sc->hw, skb);
sc               2323 drivers/net/wireless/ath/ath9k/xmit.c 	ath_tx_send_normal(sc, txq, tid, skb);
sc               2326 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_unlock(sc, txq);
sc               2334 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_softc *sc = hw->priv;
sc               2336 drivers/net/wireless/ath/ath9k/xmit.c 		.txq = sc->beacon.cabq
sc               2346 drivers/net/wireless/ath/ath9k/xmit.c 		sc->cur_chan->beacon.beacon_interval * 1000 *
sc               2347 drivers/net/wireless/ath/ath9k/xmit.c 		sc->cur_chan->beacon.dtim_period / ATH_BCBUF;
sc               2355 drivers/net/wireless/ath/ath9k/xmit.c 		bf = ath_tx_setup_buffer(sc, txctl.txq, NULL, skb);
sc               2361 drivers/net/wireless/ath/ath9k/xmit.c 		ath_buf_set_rate(sc, bf, &info, fi->framelen, false);
sc               2383 drivers/net/wireless/ath/ath9k/xmit.c 	ath9k_set_moredata(sc, bf, false);
sc               2386 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_lock(sc, txctl.txq);
sc               2387 drivers/net/wireless/ath/ath9k/xmit.c 	ath_tx_fill_desc(sc, bf, txctl.txq, 0);
sc               2388 drivers/net/wireless/ath/ath9k/xmit.c 	ath_tx_txqaddbuf(sc, txctl.txq, &bf_q, false);
sc               2389 drivers/net/wireless/ath/ath9k/xmit.c 	TX_STAT_INC(sc, txctl.txq->axq_qnum, queued);
sc               2390 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_unlock(sc, txctl.txq);
sc               2397 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_complete(struct ath_softc *sc, struct sk_buff *skb,
sc               2402 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2409 drivers/net/wireless/ath/ath9k/xmit.c 	if (sc->sc_ah->caldata)
sc               2410 drivers/net/wireless/ath/ath9k/xmit.c 		set_bit(PAPRD_PACKET_SENT, &sc->sc_ah->caldata->cal_flags);
sc               2432 drivers/net/wireless/ath/ath9k/xmit.c 	spin_lock_irqsave(&sc->sc_pm_lock, flags);
sc               2433 drivers/net/wireless/ath/ath9k/xmit.c 	if ((sc->ps_flags & PS_WAIT_FOR_TX_ACK) && !txq->axq_depth) {
sc               2434 drivers/net/wireless/ath/ath9k/xmit.c 		sc->ps_flags &= ~PS_WAIT_FOR_TX_ACK;
sc               2437 drivers/net/wireless/ath/ath9k/xmit.c 			sc->ps_flags & (PS_WAIT_FOR_BEACON |
sc               2442 drivers/net/wireless/ath/ath9k/xmit.c 	spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
sc               2444 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_skb_done(sc, txq, skb);
sc               2449 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_complete_buf(struct ath_softc *sc, struct ath_buf *bf,
sc               2465 drivers/net/wireless/ath/ath9k/xmit.c 	dma_unmap_single(sc->dev, bf->bf_buf_addr, skb->len, DMA_TO_DEVICE);
sc               2467 drivers/net/wireless/ath/ath9k/xmit.c 	if (sc->tx99_state)
sc               2476 drivers/net/wireless/ath/ath9k/xmit.c 			complete(&sc->paprd_complete);
sc               2478 drivers/net/wireless/ath/ath9k/xmit.c 		ath_debug_stat_tx(sc, bf, ts, txq, tx_flags);
sc               2479 drivers/net/wireless/ath/ath9k/xmit.c 		ath_tx_complete(sc, skb, tx_flags, txq, sta);
sc               2490 drivers/net/wireless/ath/ath9k/xmit.c 	spin_lock_irqsave(&sc->tx.txbuflock, flags);
sc               2491 drivers/net/wireless/ath/ath9k/xmit.c 	list_splice_tail_init(bf_q, &sc->tx.txbuf);
sc               2492 drivers/net/wireless/ath/ath9k/xmit.c 	spin_unlock_irqrestore(&sc->tx.txbuflock, flags);
sc               2495 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_rc_status(struct ath_softc *sc, struct ath_buf *bf,
sc               2502 drivers/net/wireless/ath/ath9k/xmit.c 	struct ieee80211_hw *hw = sc->hw;
sc               2503 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_hw *ah = sc->sc_ah;
sc               2537 drivers/net/wireless/ath/ath9k/xmit.c 		    ah->tx_trig_level >= sc->sc_ah->config.max_txtrig_level)
sc               2553 drivers/net/wireless/ath/ath9k/xmit.c static void ath_tx_processq(struct ath_softc *sc, struct ath_txq *txq)
sc               2555 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_hw *ah = sc->sc_ah;
sc               2564 drivers/net/wireless/ath/ath9k/xmit.c 		txq->axq_qnum, ath9k_hw_gettxbuf(sc->sc_ah, txq->axq_qnum),
sc               2567 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_lock(sc, txq);
sc               2574 drivers/net/wireless/ath/ath9k/xmit.c 			ath_txq_schedule(sc, txq);
sc               2605 drivers/net/wireless/ath/ath9k/xmit.c 		TX_STAT_INC(sc, txq->axq_qnum, txprocdesc);
sc               2620 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_return_buffer(sc, bf_held);
sc               2623 drivers/net/wireless/ath/ath9k/xmit.c 		ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head);
sc               2625 drivers/net/wireless/ath/ath9k/xmit.c 	ath_txq_unlock_complete(sc, txq);
sc               2628 drivers/net/wireless/ath/ath9k/xmit.c void ath_tx_tasklet(struct ath_softc *sc)
sc               2630 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_hw *ah = sc->sc_ah;
sc               2636 drivers/net/wireless/ath/ath9k/xmit.c 		if (ATH_TXQ_SETUP(sc, i) && (qcumask & (1 << i)))
sc               2637 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_processq(sc, &sc->tx.txq[i]);
sc               2642 drivers/net/wireless/ath/ath9k/xmit.c void ath_tx_edma_tasklet(struct ath_softc *sc)
sc               2645 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2646 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_hw *ah = sc->sc_ah;
sc               2667 drivers/net/wireless/ath/ath9k/xmit.c 		if (ts.qid == sc->beacon.beaconq) {
sc               2668 drivers/net/wireless/ath/ath9k/xmit.c 			sc->beacon.tx_processed = true;
sc               2669 drivers/net/wireless/ath/ath9k/xmit.c 			sc->beacon.tx_last = !(ts.ts_status & ATH9K_TXERR_MASK);
sc               2672 drivers/net/wireless/ath/ath9k/xmit.c 				ath_chanctx_event(sc, NULL,
sc               2676 drivers/net/wireless/ath/ath9k/xmit.c 			ath9k_csa_update(sc);
sc               2680 drivers/net/wireless/ath/ath9k/xmit.c 		txq = &sc->tx.txq[ts.qid];
sc               2682 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_lock(sc, txq);
sc               2684 drivers/net/wireless/ath/ath9k/xmit.c 		TX_STAT_INC(sc, txq->axq_qnum, txprocdesc);
sc               2688 drivers/net/wireless/ath/ath9k/xmit.c 			ath_txq_unlock(sc, txq);
sc               2695 drivers/net/wireless/ath/ath9k/xmit.c 			ath_tx_return_buffer(sc, bf);
sc               2712 drivers/net/wireless/ath/ath9k/xmit.c 				ath_tx_txqaddbuf(sc, txq, &bf_q, true);
sc               2721 drivers/net/wireless/ath/ath9k/xmit.c 		ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head);
sc               2722 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_unlock_complete(sc, txq);
sc               2731 drivers/net/wireless/ath/ath9k/xmit.c static int ath_txstatus_setup(struct ath_softc *sc, int size)
sc               2733 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_descdma *dd = &sc->txsdma;
sc               2734 drivers/net/wireless/ath/ath9k/xmit.c 	u8 txs_len = sc->sc_ah->caps.txs_len;
sc               2737 drivers/net/wireless/ath/ath9k/xmit.c 	dd->dd_desc = dmam_alloc_coherent(sc->dev, dd->dd_desc_len,
sc               2745 drivers/net/wireless/ath/ath9k/xmit.c static int ath_tx_edma_init(struct ath_softc *sc)
sc               2749 drivers/net/wireless/ath/ath9k/xmit.c 	err = ath_txstatus_setup(sc, ATH_TXSTATUS_RING_SIZE);
sc               2751 drivers/net/wireless/ath/ath9k/xmit.c 		ath9k_hw_setup_statusring(sc->sc_ah, sc->txsdma.dd_desc,
sc               2752 drivers/net/wireless/ath/ath9k/xmit.c 					  sc->txsdma.dd_desc_paddr,
sc               2758 drivers/net/wireless/ath/ath9k/xmit.c int ath_tx_init(struct ath_softc *sc, int nbufs)
sc               2760 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2763 drivers/net/wireless/ath/ath9k/xmit.c 	spin_lock_init(&sc->tx.txbuflock);
sc               2765 drivers/net/wireless/ath/ath9k/xmit.c 	error = ath_descdma_setup(sc, &sc->tx.txdma, &sc->tx.txbuf,
sc               2773 drivers/net/wireless/ath/ath9k/xmit.c 	error = ath_descdma_setup(sc, &sc->beacon.bdma, &sc->beacon.bbuf,
sc               2781 drivers/net/wireless/ath/ath9k/xmit.c 	if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA)
sc               2782 drivers/net/wireless/ath/ath9k/xmit.c 		error = ath_tx_edma_init(sc);
sc               2787 drivers/net/wireless/ath/ath9k/xmit.c void ath_tx_node_init(struct ath_softc *sc, struct ath_node *an)
sc               2804 drivers/net/wireless/ath/ath9k/xmit.c 		tid->txq = sc->tx.txq_map[acno];
sc               2811 drivers/net/wireless/ath/ath9k/xmit.c void ath_tx_node_cleanup(struct ath_softc *sc, struct ath_node *an)
sc               2823 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_lock(sc, txq);
sc               2828 drivers/net/wireless/ath/ath9k/xmit.c 		ath_tid_drain(sc, txq, tid);
sc               2831 drivers/net/wireless/ath/ath9k/xmit.c 		ath_txq_unlock(sc, txq);
sc               2842 drivers/net/wireless/ath/ath9k/xmit.c int ath9k_tx99_send(struct ath_softc *sc, struct sk_buff *skb,
sc               2847 drivers/net/wireless/ath/ath9k/xmit.c 	struct ath_common *common = ath9k_hw_common(sc->sc_ah);
sc               2869 drivers/net/wireless/ath/ath9k/xmit.c 	bf = ath_tx_setup_buffer(sc, txctl->txq, NULL, skb);
sc               2875 drivers/net/wireless/ath/ath9k/xmit.c 	ath_set_rates(sc->tx99_vif, NULL, bf);
sc               2877 drivers/net/wireless/ath/ath9k/xmit.c 	ath9k_hw_set_desc_link(sc->sc_ah, bf->bf_desc, bf->bf_daddr);
sc               2878 drivers/net/wireless/ath/ath9k/xmit.c 	ath9k_hw_tx99_start(sc->sc_ah, txctl->txq->axq_qnum);
sc               2880 drivers/net/wireless/ath/ath9k/xmit.c 	ath_tx_send_normal(sc, txctl->txq, NULL, skb);
sc               8128 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	u16 sc = le16_to_cpu(header->seq_ctl);
sc               8129 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	u16 seq = WLAN_GET_SEQ_SEQ(sc);
sc               8130 drivers/net/wireless/intel/ipw2x00/ipw2200.c 	u16 frag = WLAN_GET_SEQ_FRAG(sc);
sc                 89 drivers/net/wireless/intel/ipw2x00/libipw_rx.c 	u16 sc;
sc                 93 drivers/net/wireless/intel/ipw2x00/libipw_rx.c 	sc = le16_to_cpu(hdr->seq_ctl);
sc                 94 drivers/net/wireless/intel/ipw2x00/libipw_rx.c 	frag = WLAN_GET_SEQ_FRAG(sc);
sc                 95 drivers/net/wireless/intel/ipw2x00/libipw_rx.c 	seq = WLAN_GET_SEQ_SEQ(sc);
sc                139 drivers/net/wireless/intel/ipw2x00/libipw_rx.c 	u16 sc;
sc                143 drivers/net/wireless/intel/ipw2x00/libipw_rx.c 	sc = le16_to_cpu(hdr->seq_ctl);
sc                144 drivers/net/wireless/intel/ipw2x00/libipw_rx.c 	seq = WLAN_GET_SEQ_SEQ(sc);
sc                334 drivers/net/wireless/intel/ipw2x00/libipw_rx.c 	u16 fc, type, stype, sc;
sc                361 drivers/net/wireless/intel/ipw2x00/libipw_rx.c 	sc = le16_to_cpu(hdr->seq_ctl);
sc                362 drivers/net/wireless/intel/ipw2x00/libipw_rx.c 	frag = WLAN_GET_SEQ_FRAG(sc);
sc                482 drivers/net/wireless/intel/ipw2x00/libipw_rx.c 	if (sc == ieee->prev_seq_ctl)
sc                485 drivers/net/wireless/intel/ipw2x00/libipw_rx.c 		ieee->prev_seq_ctl = sc;
sc                588 drivers/net/wireless/intel/ipw2x00/libipw_rx.c 					WLAN_GET_SEQ_SEQ(sc), frag);
sc               2694 drivers/net/wireless/intel/iwlegacy/4965-mac.c 			u16 sc;
sc               2713 drivers/net/wireless/intel/iwlegacy/4965-mac.c 			sc = le16_to_cpu(hdr->seq_ctrl);
sc               2714 drivers/net/wireless/intel/iwlegacy/4965-mac.c 			if (idx != (IEEE80211_SEQ_TO_SN(sc) & 0xff)) {
sc               2717 drivers/net/wireless/intel/iwlegacy/4965-mac.c 				       IEEE80211_SEQ_TO_SN(sc), hdr->seq_ctrl);
sc               2722 drivers/net/wireless/intel/iwlegacy/4965-mac.c 				   IEEE80211_SEQ_TO_SN(sc));
sc               1254 drivers/net/wireless/intel/iwlwifi/mvm/d3.c static void iwl_mvm_aes_sc_to_seq(struct aes_sc *sc,
sc               1259 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 	pn = le64_to_cpu(sc->pn);
sc               1268 drivers/net/wireless/intel/iwlwifi/mvm/d3.c static void iwl_mvm_tkip_sc_to_seq(struct tkip_sc *sc,
sc               1271 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 	seq->tkip.iv32 = le32_to_cpu(sc->iv32);
sc               1272 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 	seq->tkip.iv16 = le16_to_cpu(sc->iv16);
sc               1390 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 		union iwl_all_tsc_rsc *sc =
sc               1398 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 			iwl_mvm_set_aes_rx_seq(data->mvm, sc->aes.unicast_rsc,
sc               1400 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 			atomic64_set(&key->tx_pn, le64_to_cpu(sc->aes.tsc.pn));
sc               1403 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 			iwl_mvm_tkip_sc_to_seq(&sc->tkip.tsc, &seq);
sc               1404 drivers/net/wireless/intel/iwlwifi/mvm/d3.c 			iwl_mvm_set_tkip_rx_seq(sc->tkip.unicast_rsc, key);
sc                254 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c 	u16 sc;
sc                258 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c 	sc = le16_to_cpu(hdr->seq_ctrl);
sc                259 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c 	frag = sc & IEEE80211_SCTL_FRAG;
sc                260 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c 	seq = (sc & IEEE80211_SCTL_SEQ) >> 4;
sc                305 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c 	u16 sc;
sc                309 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c 	sc = le16_to_cpu(hdr->seq_ctrl);
sc                310 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c 	seq = (sc & IEEE80211_SCTL_SEQ) >> 4;
sc                723 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c 	u16 fc, type, stype, sc;
sc                755 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c 	sc = le16_to_cpu(hdr->seq_ctrl);
sc                756 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c 	frag = sc & IEEE80211_SCTL_FRAG;
sc                927 drivers/net/wireless/intersil/hostap/hostap_80211_rx.c 			       (sc & IEEE80211_SCTL_SEQ) >> 4, frag);
sc               1169 drivers/net/wireless/marvell/mwifiex/sta_ioctl.c 	u8 sc;
sc               1175 drivers/net/wireless/marvell/mwifiex/sta_ioctl.c 		sc = mwifiex_chan_type_to_sec_chan_offset(NL80211_CHAN_NO_HT);
sc               1176 drivers/net/wireless/marvell/mwifiex/sta_ioctl.c 		roc_cfg.band_cfg |= (sc << 2);
sc               2049 drivers/net/wireless/quantenna/qtnfmac/commands.c 				     const struct ieee80211_channel *sc)
sc               2059 drivers/net/wireless/quantenna/qtnfmac/commands.c 	qch->center_freq = cpu_to_le16(sc->center_freq);
sc               2060 drivers/net/wireless/quantenna/qtnfmac/commands.c 	qch->hw_value = cpu_to_le16(sc->hw_value);
sc               2061 drivers/net/wireless/quantenna/qtnfmac/commands.c 	qch->band = qlink_utils_band_cfg2q(sc->band);
sc               2062 drivers/net/wireless/quantenna/qtnfmac/commands.c 	qch->max_power = sc->max_power;
sc               2063 drivers/net/wireless/quantenna/qtnfmac/commands.c 	qch->max_reg_power = sc->max_reg_power;
sc               2064 drivers/net/wireless/quantenna/qtnfmac/commands.c 	qch->max_antenna_gain = sc->max_antenna_gain;
sc               2065 drivers/net/wireless/quantenna/qtnfmac/commands.c 	qch->beacon_found = sc->beacon_found;
sc               2066 drivers/net/wireless/quantenna/qtnfmac/commands.c 	qch->dfs_state = qlink_utils_dfs_state_cfg2q(sc->dfs_state);
sc               2067 drivers/net/wireless/quantenna/qtnfmac/commands.c 	qch->flags = cpu_to_le32(qlink_utils_chflags_cfg2q(sc->flags));
sc               2118 drivers/net/wireless/quantenna/qtnfmac/commands.c 	struct ieee80211_channel *sc;
sc               2150 drivers/net/wireless/quantenna/qtnfmac/commands.c 			sc = scan_req->channels[count];
sc               2151 drivers/net/wireless/quantenna/qtnfmac/commands.c 			if (sc->flags & IEEE80211_CHAN_DISABLED) {
sc               2157 drivers/net/wireless/quantenna/qtnfmac/commands.c 				 mac->macid, sc->hw_value, sc->center_freq,
sc               2158 drivers/net/wireless/quantenna/qtnfmac/commands.c 				 sc->flags);
sc               2160 drivers/net/wireless/quantenna/qtnfmac/commands.c 			qtnf_cmd_channel_tlv_add(cmd_skb, sc);
sc                 88 drivers/nfc/pn533/pn533.c 		__be16 sc;
sc                117 drivers/nfc/pn533/pn533.c 				.sc = PN533_FELICA_SENSF_SC_ALL,
sc                130 drivers/nfc/pn533/pn533.c 				.sc = PN533_FELICA_SENSF_SC_ALL,
sc               1445 drivers/nfc/port100.c 	hdr->nfcf_param[16] = (params->sc >> 8) & 0xFF;
sc               1446 drivers/nfc/port100.c 	hdr->nfcf_param[17] = params->sc & 0xFF;
sc                613 drivers/s390/scsi/zfcp_dbf.c 			  struct scsi_cmnd *sc, struct zfcp_fsf_req *fsf)
sc                628 drivers/s390/scsi/zfcp_dbf.c 	if (sc) {
sc                629 drivers/s390/scsi/zfcp_dbf.c 		rec->scsi_result = sc->result;
sc                630 drivers/s390/scsi/zfcp_dbf.c 		rec->scsi_retries = sc->retries;
sc                631 drivers/s390/scsi/zfcp_dbf.c 		rec->scsi_allowed = sc->allowed;
sc                632 drivers/s390/scsi/zfcp_dbf.c 		rec->scsi_id = sc->device->id;
sc                633 drivers/s390/scsi/zfcp_dbf.c 		rec->scsi_lun = (u32)sc->device->lun;
sc                634 drivers/s390/scsi/zfcp_dbf.c 		rec->scsi_lun_64_hi = (u32)(sc->device->lun >> 32);
sc                635 drivers/s390/scsi/zfcp_dbf.c 		rec->host_scribble = (unsigned long)sc->host_scribble;
sc                637 drivers/s390/scsi/zfcp_dbf.c 		memcpy(rec->scsi_opcode, sc->cmnd,
sc                638 drivers/s390/scsi/zfcp_dbf.c 		       min_t(int, sc->cmd_len, ZFCP_DBF_SCSI_OPCODE));
sc                 54 drivers/s390/scsi/zfcp_ext.h 				 struct scsi_cmnd *sc,
sc               8638 drivers/scsi/advansys.c 	unsigned char sc;
sc               8641 drivers/scsi/advansys.c 	sc = inp(iop_base + IOP_REG_SC);
sc               8643 drivers/scsi/advansys.c 	return sc;
sc               1414 drivers/scsi/aha152x.c 			struct aha152x_scdata *sc = SCDATA(cmd);
sc               1416 drivers/scsi/aha152x.c 			scsi_eh_restore_cmnd(cmd, &sc->ses);
sc               1429 drivers/scsi/aha152x.c 				struct aha152x_scdata *sc;
sc               1433 drivers/scsi/aha152x.c 				sc = SCDATA(ptr);
sc               1435 drivers/scsi/aha152x.c 				BUG_ON(!sc);
sc               1436 drivers/scsi/aha152x.c 				scsi_eh_prep_cmnd(ptr, &sc->ses, NULL, 0, ~0);
sc                126 drivers/scsi/aha1740.c 		        sc:1,	/* Specification Check */
sc                146 drivers/scsi/aha1740.c 		if ( (status[1]&0x18) || status_word.sc ) {
sc                272 drivers/scsi/aic7xxx/aic7770.c 	struct	seeprom_config *sc;
sc                287 drivers/scsi/aic7xxx/aic7770.c 	sc = ahc->seep_config;
sc                291 drivers/scsi/aic7xxx/aic7770.c 	have_seeprom = ahc_read_seeprom(&sd, (uint16_t *)sc,
sc                292 drivers/scsi/aic7xxx/aic7770.c 					/*start_addr*/0, sizeof(*sc)/2);
sc                296 drivers/scsi/aic7xxx/aic7770.c 		if (ahc_verify_cksum(sc) == 0) {
sc                323 drivers/scsi/aic7xxx/aic7770.c 			target_settings = (sc->device_flags[i] & CFXFER) << 4;
sc                324 drivers/scsi/aic7xxx/aic7770.c 			if (sc->device_flags[i] & CFSYNCH)
sc                326 drivers/scsi/aic7xxx/aic7770.c 			if (sc->device_flags[i] & CFWIDEB)
sc                328 drivers/scsi/aic7xxx/aic7770.c 			if (sc->device_flags[i] & CFDISC)
sc                335 drivers/scsi/aic7xxx/aic7770.c 		ahc->our_id = sc->brtime_id & CFSCSIID;
sc                338 drivers/scsi/aic7xxx/aic7770.c 		if (sc->adapter_control & CFSPARITY)
sc                340 drivers/scsi/aic7xxx/aic7770.c 		if (sc->adapter_control & CFRESETB)
sc                343 drivers/scsi/aic7xxx/aic7770.c 		if (sc->bios_control & CF284XEXTEND)		
sc                348 drivers/scsi/aic7xxx/aic7770.c 		if (sc->adapter_control & CF284XSTERM)
sc                964 drivers/scsi/aic7xxx/aic79xx.h int		ahd_verify_cksum(struct seeprom_config *sc);
sc               1355 drivers/scsi/aic7xxx/aic79xx.h 					   struct seeprom_config *sc);
sc               7658 drivers/scsi/aic7xxx/aic79xx_core.c ahd_parse_cfgdata(struct ahd_softc *ahd, struct seeprom_config *sc)
sc               7663 drivers/scsi/aic7xxx/aic79xx_core.c 	max_targ = sc->max_targets & CFMAXTARG;
sc               7664 drivers/scsi/aic7xxx/aic79xx_core.c 	ahd->our_id = sc->brtime_id & CFSCSIID;
sc               7698 drivers/scsi/aic7xxx/aic79xx_core.c 		if (sc->device_flags[targ] & CFDISC) {
sc               7706 drivers/scsi/aic7xxx/aic79xx_core.c 			sc->device_flags[targ] &= ~CFPACKETIZED;
sc               7710 drivers/scsi/aic7xxx/aic79xx_core.c 		user_tinfo->period = (sc->device_flags[targ] & CFXFER);
sc               7724 drivers/scsi/aic7xxx/aic79xx_core.c 		if ((sc->device_flags[targ] & CFPACKETIZED) != 0) {
sc               7733 drivers/scsi/aic7xxx/aic79xx_core.c 		if ((sc->device_flags[targ] & CFQAS) != 0)
sc               7736 drivers/scsi/aic7xxx/aic79xx_core.c 		if ((sc->device_flags[targ] & CFWIDEB) != 0)
sc               7766 drivers/scsi/aic7xxx/aic79xx_core.c 	if (sc->bios_control & CFSPARITY)
sc               7770 drivers/scsi/aic7xxx/aic79xx_core.c 	if (sc->bios_control & CFRESETB)
sc               7774 drivers/scsi/aic7xxx/aic79xx_core.c 	if (sc->bios_control & CFEXTEND)
sc               7778 drivers/scsi/aic7xxx/aic79xx_core.c 	if ((sc->bios_control & CFBIOSSTATE) == CFBS_ENABLED)
sc               7782 drivers/scsi/aic7xxx/aic79xx_core.c 	if ((sc->adapter_control & CFSTPWLEVEL) != 0)
sc               8977 drivers/scsi/aic7xxx/aic79xx_core.c 		struct scsi_sense *sc;
sc               9004 drivers/scsi/aic7xxx/aic79xx_core.c 		sc = (struct scsi_sense *)hscb->shared_data.idata.cdb;
sc               9019 drivers/scsi/aic7xxx/aic79xx_core.c 		sc->opcode = REQUEST_SENSE;
sc               9020 drivers/scsi/aic7xxx/aic79xx_core.c 		sc->byte2 = 0;
sc               9023 drivers/scsi/aic7xxx/aic79xx_core.c 			sc->byte2 = SCB_GET_LUN(scb) << 5;
sc               9024 drivers/scsi/aic7xxx/aic79xx_core.c 		sc->unused[0] = 0;
sc               9025 drivers/scsi/aic7xxx/aic79xx_core.c 		sc->unused[1] = 0;
sc               9026 drivers/scsi/aic7xxx/aic79xx_core.c 		sc->length = ahd_get_sense_bufsize(ahd, scb);
sc               9027 drivers/scsi/aic7xxx/aic79xx_core.c 		sc->control = 0;
sc               9057 drivers/scsi/aic7xxx/aic79xx_core.c 		hscb->cdb_len = sizeof(*sc);
sc               10114 drivers/scsi/aic7xxx/aic79xx_core.c ahd_verify_cksum(struct seeprom_config *sc)
sc               10121 drivers/scsi/aic7xxx/aic79xx_core.c 	maxaddr = (sizeof(*sc)/2) - 1;
sc               10123 drivers/scsi/aic7xxx/aic79xx_core.c 	scarray = (uint16_t *)sc;
sc               10128 drivers/scsi/aic7xxx/aic79xx_core.c 	 || (checksum & 0xFFFF) != sc->checksum) {
sc                612 drivers/scsi/aic7xxx/aic79xx_osm.c 	struct seeprom_config *sc = ahd->seep_config;
sc                626 drivers/scsi/aic7xxx/aic79xx_osm.c 	if (sc) {
sc                627 drivers/scsi/aic7xxx/aic79xx_osm.c 		int flags = sc->device_flags[starget->id];
sc                509 drivers/scsi/aic7xxx/aic79xx_pci.c 	struct	seeprom_config *sc;
sc                514 drivers/scsi/aic7xxx/aic79xx_pci.c 	sc = ahd->seep_config;
sc                527 drivers/scsi/aic7xxx/aic79xx_pci.c 		start_addr = ((2 * sizeof(*sc))
sc                544 drivers/scsi/aic7xxx/aic79xx_pci.c 		start_addr = (sizeof(*sc) / 2) * (ahd->channel - 'A');
sc                546 drivers/scsi/aic7xxx/aic79xx_pci.c 		error = ahd_read_seeprom(ahd, (uint16_t *)sc,
sc                547 drivers/scsi/aic7xxx/aic79xx_pci.c 					 start_addr, sizeof(*sc)/2,
sc                554 drivers/scsi/aic7xxx/aic79xx_pci.c 			have_seeprom = ahd_verify_cksum(sc);
sc                595 drivers/scsi/aic7xxx/aic79xx_pci.c 			sc_data = (uint16_t *)sc;
sc                598 drivers/scsi/aic7xxx/aic79xx_pci.c 			have_seeprom = ahd_verify_cksum(sc);
sc                611 drivers/scsi/aic7xxx/aic79xx_pci.c 		sc_data = (uint16_t *)sc;
sc                612 drivers/scsi/aic7xxx/aic79xx_pci.c 		for (i = 0; i < (sizeof(*sc)); i += 2)
sc                627 drivers/scsi/aic7xxx/aic79xx_pci.c 		error = ahd_parse_cfgdata(ahd, sc);
sc                628 drivers/scsi/aic7xxx/aic79xx_pci.c 		adapter_control = sc->adapter_control;
sc                299 drivers/scsi/aic7xxx/aic7xxx_93cx6.c ahc_verify_cksum(struct seeprom_config *sc)
sc                306 drivers/scsi/aic7xxx/aic7xxx_93cx6.c 	maxaddr = (sizeof(*sc)/2) - 1;
sc                308 drivers/scsi/aic7xxx/aic7xxx_93cx6.c 	scarray = (uint16_t *)sc;
sc                313 drivers/scsi/aic7xxx/aic7xxx_93cx6.c 	 || (checksum & 0xFFFF) != sc->checksum) {
sc                100 drivers/scsi/aic7xxx/aic7xxx_93cx6.h int ahc_verify_cksum(struct seeprom_config *sc);
sc               1052 drivers/scsi/aic7xxx/aic7xxx_core.c 			struct scsi_sense *sc;
sc               1074 drivers/scsi/aic7xxx/aic7xxx_core.c 			sc = (struct scsi_sense *)(&hscb->shared_data.cdb); 
sc               1093 drivers/scsi/aic7xxx/aic7xxx_core.c 			sc->opcode = REQUEST_SENSE;
sc               1094 drivers/scsi/aic7xxx/aic7xxx_core.c 			sc->byte2 = 0;
sc               1097 drivers/scsi/aic7xxx/aic7xxx_core.c 				sc->byte2 = SCB_GET_LUN(scb) << 5;
sc               1098 drivers/scsi/aic7xxx/aic7xxx_core.c 			sc->unused[0] = 0;
sc               1099 drivers/scsi/aic7xxx/aic7xxx_core.c 			sc->unused[1] = 0;
sc               1100 drivers/scsi/aic7xxx/aic7xxx_core.c 			sc->length = sg->len;
sc               1101 drivers/scsi/aic7xxx/aic7xxx_core.c 			sc->control = 0;
sc               1131 drivers/scsi/aic7xxx/aic7xxx_core.c 			hscb->cdb_len = sizeof(*sc);
sc                562 drivers/scsi/aic7xxx/aic7xxx_osm.c 	struct seeprom_config *sc = ahc->seep_config;
sc                586 drivers/scsi/aic7xxx/aic7xxx_osm.c 	if (sc) {
sc                589 drivers/scsi/aic7xxx/aic7xxx_osm.c 		int flags = sc->device_flags[target_offset];
sc                605 drivers/scsi/aic7xxx/aic7xxx_pci.c 				 struct seeprom_config *sc);
sc               1256 drivers/scsi/aic7xxx/aic7xxx_pci.c 	struct	seeprom_config *sc;
sc               1264 drivers/scsi/aic7xxx/aic7xxx_pci.c 	sc = ahc->seep_config;
sc               1295 drivers/scsi/aic7xxx/aic7xxx_pci.c 			have_seeprom = ahc_read_seeprom(&sd, (uint16_t *)sc,
sc               1297 drivers/scsi/aic7xxx/aic7xxx_pci.c 							sizeof(*sc)/2);
sc               1300 drivers/scsi/aic7xxx/aic7xxx_pci.c 				have_seeprom = ahc_verify_cksum(sc);
sc               1336 drivers/scsi/aic7xxx/aic7xxx_pci.c 			sc_data = (uint16_t *)sc;
sc               1344 drivers/scsi/aic7xxx/aic7xxx_pci.c 			have_seeprom = ahc_verify_cksum(sc);
sc               1362 drivers/scsi/aic7xxx/aic7xxx_pci.c 		sc = NULL;
sc               1364 drivers/scsi/aic7xxx/aic7xxx_pci.c 		ahc_parse_pci_eeprom(ahc, sc);
sc               1388 drivers/scsi/aic7xxx/aic7xxx_pci.c 		configure_termination(ahc, &sd, sc->adapter_control, sxfrctl1);
sc               1392 drivers/scsi/aic7xxx/aic7xxx_pci.c 		if ((sc->adapter_control & CFSTERM) != 0)
sc               1402 drivers/scsi/aic7xxx/aic7xxx_pci.c ahc_parse_pci_eeprom(struct ahc_softc *ahc, struct seeprom_config *sc)
sc               1409 drivers/scsi/aic7xxx/aic7xxx_pci.c 	int	 max_targ = sc->max_targets & CFMAXTARG;
sc               1416 drivers/scsi/aic7xxx/aic7xxx_pci.c 	if ((sc->adapter_control & CFULTRAEN) != 0) {
sc               1422 drivers/scsi/aic7xxx/aic7xxx_pci.c 			if ((sc->device_flags[i] & CFSYNCHISULTRA) != 0) {
sc               1434 drivers/scsi/aic7xxx/aic7xxx_pci.c 		if (sc->device_flags[i] & CFDISC)
sc               1437 drivers/scsi/aic7xxx/aic7xxx_pci.c 			if ((sc->device_flags[i] & CFSYNCHISULTRA) != 0)
sc               1439 drivers/scsi/aic7xxx/aic7xxx_pci.c 		} else if ((sc->adapter_control & CFULTRAEN) != 0) {
sc               1442 drivers/scsi/aic7xxx/aic7xxx_pci.c 		if ((sc->device_flags[i] & CFXFER) == 0x04
sc               1445 drivers/scsi/aic7xxx/aic7xxx_pci.c 			sc->device_flags[i] &= ~CFXFER;
sc               1451 drivers/scsi/aic7xxx/aic7xxx_pci.c 			if (sc->device_flags[i] & CFSYNCH)
sc               1462 drivers/scsi/aic7xxx/aic7xxx_pci.c 			scsirate = (sc->device_flags[i] & CFXFER)
sc               1464 drivers/scsi/aic7xxx/aic7xxx_pci.c 			if (sc->device_flags[i] & CFWIDEB)
sc               1467 drivers/scsi/aic7xxx/aic7xxx_pci.c 			scsirate = (sc->device_flags[i] & CFXFER) << 4;
sc               1468 drivers/scsi/aic7xxx/aic7xxx_pci.c 			if (sc->device_flags[i] & CFSYNCH)
sc               1470 drivers/scsi/aic7xxx/aic7xxx_pci.c 			if (sc->device_flags[i] & CFWIDEB)
sc               1475 drivers/scsi/aic7xxx/aic7xxx_pci.c 	ahc->our_id = sc->brtime_id & CFSCSIID;
sc               1478 drivers/scsi/aic7xxx/aic7xxx_pci.c 	if (sc->adapter_control & CFSPARITY)
sc               1480 drivers/scsi/aic7xxx/aic7xxx_pci.c 	if (sc->adapter_control & CFRESETB)
sc               1483 drivers/scsi/aic7xxx/aic7xxx_pci.c 	ahc->flags |= (sc->adapter_control & CFBOOTCHAN) >> CFBOOTCHANSHIFT;
sc               1485 drivers/scsi/aic7xxx/aic7xxx_pci.c 	if (sc->bios_control & CFEXTEND)
sc               1488 drivers/scsi/aic7xxx/aic7xxx_pci.c 	if (sc->bios_control & CFBIOSEN)
sc               1493 drivers/scsi/aic7xxx/aic7xxx_pci.c 		if (!(sc->adapter_control & CFULTRAEN))
sc               1498 drivers/scsi/aic7xxx/aic7xxx_pci.c 	if (sc->signature == CFSIGNATURE
sc               1499 drivers/scsi/aic7xxx/aic7xxx_pci.c 	 || sc->signature == CFSIGNATURE2) {
sc               1506 drivers/scsi/aic7xxx/aic7xxx_pci.c 		if ((sc->bios_control & CFSTPWLEVEL) != 0)
sc                 42 drivers/scsi/aic94xx/aic94xx_task.c 	struct scatterlist *sc;
sc                 79 drivers/scsi/aic94xx/aic94xx_task.c 		for_each_sg(task->scatter, sc, num_sg, i) {
sc                 82 drivers/scsi/aic94xx/aic94xx_task.c 			sg->bus_addr = cpu_to_le64((u64)sg_dma_address(sc));
sc                 83 drivers/scsi/aic94xx/aic94xx_task.c 			sg->size = cpu_to_le32((u32)sg_dma_len(sc));
sc                 88 drivers/scsi/aic94xx/aic94xx_task.c 		for_each_sg(task->scatter, sc, 2, i) {
sc                 90 drivers/scsi/aic94xx/aic94xx_task.c 				cpu_to_le64((u64)sg_dma_address(sc));
sc                 91 drivers/scsi/aic94xx/aic94xx_task.c 			sg_arr[i].size = cpu_to_le32((u32)sg_dma_len(sc));
sc                100 drivers/scsi/aic94xx/aic94xx_task.c 		for_each_sg(task->scatter, sc, num_sg, i) {
sc                102 drivers/scsi/aic94xx/aic94xx_task.c 				cpu_to_le64((u64)sg_dma_address(sc));
sc                103 drivers/scsi/aic94xx/aic94xx_task.c 			sg_arr[i].size = cpu_to_le32((u32)sg_dma_len(sc));
sc                217 drivers/scsi/be2iscsi/be_main.c static int beiscsi_eh_abort(struct scsi_cmnd *sc)
sc                219 drivers/scsi/be2iscsi/be_main.c 	struct iscsi_task *abrt_task = (struct iscsi_task *)sc->SCp.ptr;
sc                229 drivers/scsi/be2iscsi/be_main.c 	cls_session = starget_to_session(scsi_target(sc->device));
sc                234 drivers/scsi/be2iscsi/be_main.c 	if (!abrt_task || !abrt_task->sc) {
sc                261 drivers/scsi/be2iscsi/be_main.c 			    sc, rc);
sc                265 drivers/scsi/be2iscsi/be_main.c 	return iscsi_eh_abort(sc);
sc                268 drivers/scsi/be2iscsi/be_main.c static int beiscsi_eh_device_reset(struct scsi_cmnd *sc)
sc                284 drivers/scsi/be2iscsi/be_main.c 	cls_session = starget_to_session(scsi_target(sc->device));
sc                309 drivers/scsi/be2iscsi/be_main.c 		if (!task->sc)
sc                312 drivers/scsi/be2iscsi/be_main.c 		if (sc->device->lun != task->sc->device->lun)
sc                367 drivers/scsi/be2iscsi/be_main.c 		rc = iscsi_eh_device_reset(sc);
sc               1120 drivers/scsi/be2iscsi/be_main.c 	if (!task->sc) {
sc               1128 drivers/scsi/be2iscsi/be_main.c 	task->sc->result = (DID_OK << 16) | status;
sc               1130 drivers/scsi/be2iscsi/be_main.c 		task->sc->result = DID_ERROR << 16;
sc               1137 drivers/scsi/be2iscsi/be_main.c 			task->sc->result = DID_ERROR << 16;
sc               1140 drivers/scsi/be2iscsi/be_main.c 			scsi_set_resid(task->sc, resid);
sc               1141 drivers/scsi/be2iscsi/be_main.c 			if (!status && (scsi_bufflen(task->sc) - resid <
sc               1142 drivers/scsi/be2iscsi/be_main.c 			    task->sc->underflow))
sc               1143 drivers/scsi/be2iscsi/be_main.c 				task->sc->result = DID_ERROR << 16;
sc               1153 drivers/scsi/be2iscsi/be_main.c 		memcpy(task->sc->sense_buffer, sense,
sc               4275 drivers/scsi/be2iscsi/be_main.c 	if (task->sc) {
sc               4399 drivers/scsi/be2iscsi/be_main.c 	if (task->sc) {
sc               4761 drivers/scsi/be2iscsi/be_main.c 	struct scsi_cmnd *sc = task->sc;
sc               4779 drivers/scsi/be2iscsi/be_main.c 	if (!sc)
sc               4782 drivers/scsi/be2iscsi/be_main.c 	io_task->scsi_cmnd = sc;
sc               4784 drivers/scsi/be2iscsi/be_main.c 	num_sg = scsi_dma_map(sc);
sc               4791 drivers/scsi/be2iscsi/be_main.c 			    io_task->libiscsi_itt, scsi_bufflen(sc));
sc               4800 drivers/scsi/be2iscsi/be_main.c 	xferlen = scsi_bufflen(sc);
sc               4801 drivers/scsi/be2iscsi/be_main.c 	sg = scsi_sglist(sc);
sc               4802 drivers/scsi/be2iscsi/be_main.c 	if (sc->sc_data_direction == DMA_TO_DEVICE)
sc               1648 drivers/scsi/bnx2fc/bnx2fc_io.c 	struct scsi_cmnd *sc = io_req->sc_cmd;
sc               1659 drivers/scsi/bnx2fc/bnx2fc_io.c 	WARN_ON(scsi_sg_count(sc) > BNX2FC_MAX_BDS_PER_CMD);
sc               1664 drivers/scsi/bnx2fc/bnx2fc_io.c 	sg_count = dma_map_sg(&hba->pcidev->dev, scsi_sglist(sc),
sc               1665 drivers/scsi/bnx2fc/bnx2fc_io.c 			      scsi_sg_count(sc), sc->sc_data_direction);
sc               1666 drivers/scsi/bnx2fc/bnx2fc_io.c 	scsi_for_each_sg(sc, sg, sg_count, i) {
sc               1683 drivers/scsi/bnx2fc/bnx2fc_io.c 	if (byte_count != scsi_bufflen(sc))
sc               1685 drivers/scsi/bnx2fc/bnx2fc_io.c 			"task_id = 0x%x\n", byte_count, scsi_bufflen(sc),
sc               1692 drivers/scsi/bnx2fc/bnx2fc_io.c 	struct scsi_cmnd *sc = io_req->sc_cmd;
sc               1696 drivers/scsi/bnx2fc/bnx2fc_io.c 	if (scsi_sg_count(sc)) {
sc               1722 drivers/scsi/bnx2fc/bnx2fc_io.c 	struct scsi_cmnd *sc = io_req->sc_cmd;
sc               1730 drivers/scsi/bnx2fc/bnx2fc_io.c 	if (io_req->bd_tbl->bd_valid && sc && scsi_sg_count(sc)) {
sc               1731 drivers/scsi/bnx2fc/bnx2fc_io.c 		dma_unmap_sg(&hba->pcidev->dev, scsi_sglist(sc),
sc               1732 drivers/scsi/bnx2fc/bnx2fc_io.c 		    scsi_sg_count(sc), sc->sc_data_direction);
sc                408 drivers/scsi/bnx2i/bnx2i_hwi.c 		if (!ctask || !ctask->sc)
sc                419 drivers/scsi/bnx2i/bnx2i_hwi.c 		ref_sc = ctask->sc;
sc               1904 drivers/scsi/bnx2i/bnx2i_hwi.c 	struct scsi_cmnd *sc;
sc               1910 drivers/scsi/bnx2i/bnx2i_hwi.c 	if (!task || !task->sc) {
sc               1914 drivers/scsi/bnx2i/bnx2i_hwi.c 	sc = task->sc;
sc               1918 drivers/scsi/bnx2i/bnx2i_hwi.c 	p = &per_cpu(bnx2i_percpu, blk_mq_rq_cpu(sc->request));
sc                145 drivers/scsi/bnx2i/bnx2i_iscsi.c 	struct scsi_cmnd *sc = cmd->scsi_cmd;
sc                155 drivers/scsi/bnx2i/bnx2i_iscsi.c 	BUG_ON(scsi_sg_count(sc) > ISCSI_MAX_BDS_PER_CMD);
sc                157 drivers/scsi/bnx2i/bnx2i_iscsi.c 	sg_count = scsi_dma_map(sc);
sc                159 drivers/scsi/bnx2i/bnx2i_iscsi.c 	scsi_for_each_sg(sc, sg, sg_count, i) {
sc                176 drivers/scsi/bnx2i/bnx2i_iscsi.c 	BUG_ON(byte_count != scsi_bufflen(sc));
sc                209 drivers/scsi/bnx2i/bnx2i_iscsi.c 	struct scsi_cmnd *sc = cmd->scsi_cmd;
sc                211 drivers/scsi/bnx2i/bnx2i_iscsi.c 	if (cmd->io_tbl.bd_valid && sc) {
sc                212 drivers/scsi/bnx2i/bnx2i_iscsi.c 		scsi_dma_unmap(sc);
sc               1131 drivers/scsi/bnx2i/bnx2i_iscsi.c static void bnx2i_cpy_scsi_cdb(struct scsi_cmnd *sc, struct bnx2i_cmd *cmd)
sc               1139 drivers/scsi/bnx2i/bnx2i_iscsi.c 	int_to_scsilun(sc->device->lun, (struct scsi_lun *) scsi_lun);
sc               1144 drivers/scsi/bnx2i/bnx2i_iscsi.c 	srcp = (u8 *) sc->cmnd;
sc               1152 drivers/scsi/bnx2i/bnx2i_iscsi.c 	if (sc->cmd_len & 0x3) {
sc               1167 drivers/scsi/bnx2i/bnx2i_iscsi.c 	if (!task->sc || task->state == ISCSI_TASK_PENDING)
sc               1230 drivers/scsi/bnx2i/bnx2i_iscsi.c 	struct scsi_cmnd *sc = task->sc;
sc               1241 drivers/scsi/bnx2i/bnx2i_iscsi.c 	if (!sc)
sc               1247 drivers/scsi/bnx2i/bnx2i_iscsi.c 	cmd->scsi_cmd = sc;
sc               1248 drivers/scsi/bnx2i/bnx2i_iscsi.c 	cmd->req.total_data_transfer_length = scsi_bufflen(sc);
sc               1252 drivers/scsi/bnx2i/bnx2i_iscsi.c 	bnx2i_cpy_scsi_cdb(sc, cmd);
sc               1255 drivers/scsi/bnx2i/bnx2i_iscsi.c 	if (sc->sc_data_direction == DMA_TO_DEVICE) {
sc               1261 drivers/scsi/bnx2i/bnx2i_iscsi.c 		if (scsi_bufflen(sc))
sc               1216 drivers/scsi/cxgbi/libcxgbi.c scmd_get_params(struct scsi_cmnd *sc, struct scatterlist **sgl,
sc               1220 drivers/scsi/cxgbi/libcxgbi.c 	struct scsi_data_buffer *sdb = prot ? scsi_prot(sc) : &sc->sdb;
sc               1429 drivers/scsi/cxgbi/libcxgbi.c 	struct scsi_cmnd *sc = task->sc;
sc               1439 drivers/scsi/cxgbi/libcxgbi.c 	if (sc && sc->sc_data_direction == DMA_FROM_DEVICE &&
sc               1460 drivers/scsi/cxgbi/libcxgbi.c 	struct scsi_cmnd *sc = task->sc;
sc               1471 drivers/scsi/cxgbi/libcxgbi.c 	if (sc && sc->sc_data_direction == DMA_FROM_DEVICE) {
sc               1475 drivers/scsi/cxgbi/libcxgbi.c 		scmd_get_params(sc, &ttinfo->sgl, &ttinfo->nents,
sc               1619 drivers/scsi/cxgbi/libcxgbi.c 		if (task && task->sc) {
sc               1894 drivers/scsi/cxgbi/libcxgbi.c 	struct scsi_cmnd *sc = task->sc;
sc               1905 drivers/scsi/cxgbi/libcxgbi.c 	      sc->sc_data_direction == DMA_TO_DEVICE)))
sc               1919 drivers/scsi/cxgbi/libcxgbi.c 	if (task->sc) {
sc               1971 drivers/scsi/cxgbi/libcxgbi.c 		task, task->sc, skb, (*skb->data) & ISCSI_OPCODE_MASK,
sc               1979 drivers/scsi/cxgbi/libcxgbi.c 	if (task->sc) {
sc               1980 drivers/scsi/cxgbi/libcxgbi.c 		struct scsi_data_buffer *sdb = &task->sc->sdb;
sc               2101 drivers/scsi/cxgbi/libcxgbi.c 	if (!task->sc)
sc               2110 drivers/scsi/cxgbi/libcxgbi.c 			task, task->sc, skb, skb->len, skb->data_len, err);
sc               2150 drivers/scsi/cxgbi/libcxgbi.c 			task, task->sc, tcp_task,
sc               2161 drivers/scsi/cxgbi/libcxgbi.c 	if (!task->sc)
sc                175 drivers/scsi/cxlflash/common.h static inline struct afu_cmd *sc_to_afuc(struct scsi_cmnd *sc)
sc                177 drivers/scsi/cxlflash/common.h 	return PTR_ALIGN(scsi_cmd_priv(sc), __alignof__(struct afu_cmd));
sc                180 drivers/scsi/cxlflash/common.h static inline struct afu_cmd *sc_to_afuci(struct scsi_cmnd *sc)
sc                182 drivers/scsi/cxlflash/common.h 	struct afu_cmd *afuc = sc_to_afuc(sc);
sc                188 drivers/scsi/cxlflash/common.h static inline struct afu_cmd *sc_to_afucz(struct scsi_cmnd *sc)
sc                190 drivers/scsi/cxlflash/common.h 	struct afu_cmd *afuc = sc_to_afuc(sc);
sc                193 drivers/scsi/cxlflash/common.h 	return sc_to_afuci(sc);
sc               2101 drivers/scsi/dc395x.c 		unsigned int sc, fc;
sc               2139 drivers/scsi/dc395x.c 		sc = DC395x_read32(acb, TRM_S1040_SCSI_COUNTER);
sc               2141 drivers/scsi/dc395x.c 		d_left_counter = sc + ((fc & 0x1f)
sc               2150 drivers/scsi/dc395x.c 			sc,
sc                107 drivers/scsi/fnic/fnic_scsi.c 					    struct scsi_cmnd *sc)
sc                109 drivers/scsi/fnic/fnic_scsi.c 	u32 hash = sc->request->tag & (FNIC_IO_LOCKS - 1);
sc                126 drivers/scsi/fnic/fnic_scsi.c 				   struct scsi_cmnd *sc)
sc                132 drivers/scsi/fnic/fnic_scsi.c 	scsi_dma_unmap(sc);
sc                319 drivers/scsi/fnic/fnic_scsi.c 					  struct scsi_cmnd *sc,
sc                323 drivers/scsi/fnic/fnic_scsi.c 	struct fc_rport *rport = starget_to_rport(scsi_target(sc->device));
sc                336 drivers/scsi/fnic/fnic_scsi.c 		for_each_sg(scsi_sglist(sc), sg, sg_count, i) {
sc                354 drivers/scsi/fnic/fnic_scsi.c 					      sc->sense_buffer,
sc                365 drivers/scsi/fnic/fnic_scsi.c 	int_to_scsilun(sc->device->lun, &fc_lun);
sc                382 drivers/scsi/fnic/fnic_scsi.c 	if (sc->sc_data_direction == DMA_FROM_DEVICE)
sc                384 drivers/scsi/fnic/fnic_scsi.c 	else if (sc->sc_data_direction == DMA_TO_DEVICE)
sc                392 drivers/scsi/fnic/fnic_scsi.c 	fnic_queue_wq_copy_desc_icmnd_16(wq, sc->request->tag,
sc                401 drivers/scsi/fnic/fnic_scsi.c 					 sc->cmnd, sc->cmd_len,
sc                402 drivers/scsi/fnic/fnic_scsi.c 					 scsi_bufflen(sc),
sc                422 drivers/scsi/fnic/fnic_scsi.c static int fnic_queuecommand_lck(struct scsi_cmnd *sc, void (*done)(struct scsi_cmnd *))
sc                424 drivers/scsi/fnic/fnic_scsi.c 	struct fc_lport *lp = shost_priv(sc->device->host);
sc                445 drivers/scsi/fnic/fnic_scsi.c 	rport = starget_to_rport(scsi_target(sc->device));
sc                449 drivers/scsi/fnic/fnic_scsi.c 		sc->result = DID_NO_CONNECT << 16;
sc                450 drivers/scsi/fnic/fnic_scsi.c 		done(sc);
sc                459 drivers/scsi/fnic/fnic_scsi.c 		sc->result = ret;
sc                460 drivers/scsi/fnic/fnic_scsi.c 		done(sc);
sc                471 drivers/scsi/fnic/fnic_scsi.c 		sc->result = DID_NO_CONNECT<<16;
sc                472 drivers/scsi/fnic/fnic_scsi.c 		done(sc);
sc                481 drivers/scsi/fnic/fnic_scsi.c 		sc->result = DID_IMM_RETRY << 16;
sc                482 drivers/scsi/fnic/fnic_scsi.c 		done(sc);
sc                497 drivers/scsi/fnic/fnic_scsi.c 	CMD_STATE(sc) = FNIC_IOREQ_NOT_INITED;
sc                498 drivers/scsi/fnic/fnic_scsi.c 	CMD_FLAGS(sc) = FNIC_NO_FLAGS;
sc                510 drivers/scsi/fnic/fnic_scsi.c 	sg_count = scsi_dma_map(sc);
sc                512 drivers/scsi/fnic/fnic_scsi.c 		FNIC_TRACE(fnic_queuecommand, sc->device->host->host_no,
sc                513 drivers/scsi/fnic/fnic_scsi.c 			  sc->request->tag, sc, 0, sc->cmnd[0],
sc                514 drivers/scsi/fnic/fnic_scsi.c 			  sg_count, CMD_STATE(sc));
sc                532 drivers/scsi/fnic/fnic_scsi.c 			scsi_dma_unmap(sc);
sc                552 drivers/scsi/fnic/fnic_scsi.c 	io_lock = fnic_io_lock_hash(fnic, sc);
sc                559 drivers/scsi/fnic/fnic_scsi.c 	CMD_STATE(sc) = FNIC_IOREQ_CMD_PENDING;
sc                560 drivers/scsi/fnic/fnic_scsi.c 	CMD_SP(sc) = (char *)io_req;
sc                561 drivers/scsi/fnic/fnic_scsi.c 	CMD_FLAGS(sc) |= FNIC_IO_INITIALIZED;
sc                562 drivers/scsi/fnic/fnic_scsi.c 	sc->scsi_done = done;
sc                566 drivers/scsi/fnic/fnic_scsi.c 	ret = fnic_queue_wq_copy_desc(fnic, wq, io_req, sc, sg_count);
sc                572 drivers/scsi/fnic/fnic_scsi.c 		FNIC_TRACE(fnic_queuecommand, sc->device->host->host_no,
sc                573 drivers/scsi/fnic/fnic_scsi.c 			  sc->request->tag, sc, 0, 0, 0,
sc                574 drivers/scsi/fnic/fnic_scsi.c 			  (((u64)CMD_FLAGS(sc) << 32) | CMD_STATE(sc)));
sc                575 drivers/scsi/fnic/fnic_scsi.c 		io_req = (struct fnic_io_req *)CMD_SP(sc);
sc                576 drivers/scsi/fnic/fnic_scsi.c 		CMD_SP(sc) = NULL;
sc                577 drivers/scsi/fnic/fnic_scsi.c 		CMD_STATE(sc) = FNIC_IOREQ_CMD_COMPLETE;
sc                580 drivers/scsi/fnic/fnic_scsi.c 			fnic_release_ioreq_buf(fnic, io_req, sc);
sc                596 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_IO_ISSUED;
sc                599 drivers/scsi/fnic/fnic_scsi.c 	cmd_trace = ((u64)sc->cmnd[0] << 56 | (u64)sc->cmnd[7] << 40 |
sc                600 drivers/scsi/fnic/fnic_scsi.c 			(u64)sc->cmnd[8] << 32 | (u64)sc->cmnd[2] << 24 |
sc                601 drivers/scsi/fnic/fnic_scsi.c 			(u64)sc->cmnd[3] << 16 | (u64)sc->cmnd[4] << 8 |
sc                602 drivers/scsi/fnic/fnic_scsi.c 			sc->cmnd[5]);
sc                604 drivers/scsi/fnic/fnic_scsi.c 	FNIC_TRACE(fnic_queuecommand, sc->device->host->host_no,
sc                605 drivers/scsi/fnic/fnic_scsi.c 		  sc->request->tag, sc, io_req,
sc                607 drivers/scsi/fnic/fnic_scsi.c 		  (((u64)CMD_FLAGS(sc) >> 32) | CMD_STATE(sc)));
sc                829 drivers/scsi/fnic/fnic_scsi.c 	struct scsi_cmnd *sc;
sc                849 drivers/scsi/fnic/fnic_scsi.c 	sc = scsi_host_find_tag(fnic->lport->host, id);
sc                850 drivers/scsi/fnic/fnic_scsi.c 	WARN_ON_ONCE(!sc);
sc                851 drivers/scsi/fnic/fnic_scsi.c 	if (!sc) {
sc                868 drivers/scsi/fnic/fnic_scsi.c 	io_lock = fnic_io_lock_hash(fnic, sc);
sc                870 drivers/scsi/fnic/fnic_scsi.c 	io_req = (struct fnic_io_req *)CMD_SP(sc);
sc                874 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_IO_REQ_NULL;
sc                879 drivers/scsi/fnic/fnic_scsi.c 			  fnic_fcpio_status_to_str(hdr_status), id, sc);
sc                891 drivers/scsi/fnic/fnic_scsi.c 	if (CMD_STATE(sc) == FNIC_IOREQ_ABTS_PENDING) {
sc                897 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_IO_DONE;
sc                898 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_IO_ABTS_PENDING;
sc                901 drivers/scsi/fnic/fnic_scsi.c 			CMD_FLAGS(sc) |= FNIC_IO_ABORTED;
sc                908 drivers/scsi/fnic/fnic_scsi.c 			  id, sc,
sc                915 drivers/scsi/fnic/fnic_scsi.c 	CMD_STATE(sc) = FNIC_IOREQ_CMD_COMPLETE;
sc                921 drivers/scsi/fnic/fnic_scsi.c 		sc->result = (DID_OK << 16) | icmnd_cmpl->scsi_status;
sc                922 drivers/scsi/fnic/fnic_scsi.c 		xfer_len = scsi_bufflen(sc);
sc                923 drivers/scsi/fnic/fnic_scsi.c 		scsi_set_resid(sc, icmnd_cmpl->residual);
sc                937 drivers/scsi/fnic/fnic_scsi.c 		sc->result = (DID_TIME_OUT << 16) | icmnd_cmpl->scsi_status;
sc                942 drivers/scsi/fnic/fnic_scsi.c 		sc->result = (DID_ERROR << 16) | icmnd_cmpl->scsi_status;
sc                947 drivers/scsi/fnic/fnic_scsi.c 		scsi_set_resid(sc, icmnd_cmpl->residual);
sc                948 drivers/scsi/fnic/fnic_scsi.c 		sc->result = (DID_ERROR << 16) | icmnd_cmpl->scsi_status;
sc                953 drivers/scsi/fnic/fnic_scsi.c 		sc->result = (DID_REQUEUE << 16) | icmnd_cmpl->scsi_status;
sc                958 drivers/scsi/fnic/fnic_scsi.c 		sc->result = (DID_ERROR << 16) | icmnd_cmpl->scsi_status;
sc                963 drivers/scsi/fnic/fnic_scsi.c 		sc->result = (DID_ERROR << 16) | icmnd_cmpl->scsi_status;
sc                968 drivers/scsi/fnic/fnic_scsi.c 		sc->result = (DID_ERROR << 16) | icmnd_cmpl->scsi_status;
sc                973 drivers/scsi/fnic/fnic_scsi.c 		sc->result = (DID_ERROR << 16) | icmnd_cmpl->scsi_status;
sc                980 drivers/scsi/fnic/fnic_scsi.c 		sc->result = (DID_ERROR << 16) | icmnd_cmpl->scsi_status;
sc                985 drivers/scsi/fnic/fnic_scsi.c 	CMD_SP(sc) = NULL;
sc                986 drivers/scsi/fnic/fnic_scsi.c 	CMD_FLAGS(sc) |= FNIC_IO_DONE;
sc                996 drivers/scsi/fnic/fnic_scsi.c 	fnic_release_ioreq_buf(fnic, io_req, sc);
sc               1002 drivers/scsi/fnic/fnic_scsi.c 		  (u64)icmnd_cmpl->flags << 40 | (u64)sc->cmnd[0] << 32 |
sc               1003 drivers/scsi/fnic/fnic_scsi.c 		  (u64)sc->cmnd[2] << 24 | (u64)sc->cmnd[3] << 16 |
sc               1004 drivers/scsi/fnic/fnic_scsi.c 		  (u64)sc->cmnd[4] << 8 | sc->cmnd[5];
sc               1007 drivers/scsi/fnic/fnic_scsi.c 		  sc->device->host->host_no, id, sc,
sc               1012 drivers/scsi/fnic/fnic_scsi.c 		  (((u64)CMD_FLAGS(sc) << 32) | CMD_STATE(sc)));
sc               1014 drivers/scsi/fnic/fnic_scsi.c 	if (sc->sc_data_direction == DMA_FROM_DEVICE) {
sc               1017 drivers/scsi/fnic/fnic_scsi.c 	} else if (sc->sc_data_direction == DMA_TO_DEVICE) {
sc               1052 drivers/scsi/fnic/fnic_scsi.c 	if (sc->scsi_done)
sc               1053 drivers/scsi/fnic/fnic_scsi.c 		sc->scsi_done(sc);
sc               1066 drivers/scsi/fnic/fnic_scsi.c 	struct scsi_cmnd *sc;
sc               1086 drivers/scsi/fnic/fnic_scsi.c 	sc = scsi_host_find_tag(fnic->lport->host, id & FNIC_TAG_MASK);
sc               1087 drivers/scsi/fnic/fnic_scsi.c 	WARN_ON_ONCE(!sc);
sc               1088 drivers/scsi/fnic/fnic_scsi.c 	if (!sc) {
sc               1095 drivers/scsi/fnic/fnic_scsi.c 	io_lock = fnic_io_lock_hash(fnic, sc);
sc               1097 drivers/scsi/fnic/fnic_scsi.c 	io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               1102 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_IO_ABT_TERM_REQ_NULL;
sc               1106 drivers/scsi/fnic/fnic_scsi.c 			  fnic_fcpio_status_to_str(hdr_status), id, sc);
sc               1117 drivers/scsi/fnic/fnic_scsi.c 		CMD_STATE(sc) = FNIC_IOREQ_ABTS_COMPLETE;
sc               1118 drivers/scsi/fnic/fnic_scsi.c 		CMD_ABTS_STATUS(sc) = hdr_status;
sc               1119 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_DEV_RST_DONE;
sc               1129 drivers/scsi/fnic/fnic_scsi.c 			if (CMD_FLAGS(sc) & FNIC_IO_ABTS_ISSUED)
sc               1141 drivers/scsi/fnic/fnic_scsi.c 			if (CMD_FLAGS(sc) & FNIC_IO_ABTS_ISSUED)
sc               1148 drivers/scsi/fnic/fnic_scsi.c 			if (CMD_FLAGS(sc) & FNIC_IO_ABTS_ISSUED)
sc               1155 drivers/scsi/fnic/fnic_scsi.c 		if (CMD_STATE(sc) != FNIC_IOREQ_ABTS_PENDING) {
sc               1161 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_IO_ABT_TERM_DONE;
sc               1162 drivers/scsi/fnic/fnic_scsi.c 		CMD_ABTS_STATUS(sc) = hdr_status;
sc               1166 drivers/scsi/fnic/fnic_scsi.c 			CMD_ABTS_STATUS(sc) = FCPIO_SUCCESS;
sc               1168 drivers/scsi/fnic/fnic_scsi.c 		if (!(CMD_FLAGS(sc) & (FNIC_IO_ABORTED | FNIC_IO_DONE)))
sc               1187 drivers/scsi/fnic/fnic_scsi.c 			CMD_SP(sc) = NULL;
sc               1188 drivers/scsi/fnic/fnic_scsi.c 			sc->result = (DID_ERROR << 16);
sc               1192 drivers/scsi/fnic/fnic_scsi.c 			fnic_release_ioreq_buf(fnic, io_req, sc);
sc               1194 drivers/scsi/fnic/fnic_scsi.c 			if (sc->scsi_done) {
sc               1196 drivers/scsi/fnic/fnic_scsi.c 					sc->device->host->host_no, id,
sc               1197 drivers/scsi/fnic/fnic_scsi.c 					sc,
sc               1201 drivers/scsi/fnic/fnic_scsi.c 					(u64)sc->cmnd[0] << 32 |
sc               1202 drivers/scsi/fnic/fnic_scsi.c 					(u64)sc->cmnd[2] << 24 |
sc               1203 drivers/scsi/fnic/fnic_scsi.c 					(u64)sc->cmnd[3] << 16 |
sc               1204 drivers/scsi/fnic/fnic_scsi.c 					(u64)sc->cmnd[4] << 8 | sc->cmnd[5]),
sc               1205 drivers/scsi/fnic/fnic_scsi.c 					(((u64)CMD_FLAGS(sc) << 32) |
sc               1206 drivers/scsi/fnic/fnic_scsi.c 					CMD_STATE(sc)));
sc               1207 drivers/scsi/fnic/fnic_scsi.c 				sc->scsi_done(sc);
sc               1218 drivers/scsi/fnic/fnic_scsi.c 		CMD_LR_STATUS(sc) = hdr_status;
sc               1219 drivers/scsi/fnic/fnic_scsi.c 		if (CMD_STATE(sc) == FNIC_IOREQ_ABTS_PENDING) {
sc               1221 drivers/scsi/fnic/fnic_scsi.c 			CMD_FLAGS(sc) |= FNIC_DEV_RST_ABTS_PENDING;
sc               1223 drivers/scsi/fnic/fnic_scsi.c 				  sc->device->host->host_no, id, sc,
sc               1226 drivers/scsi/fnic/fnic_scsi.c 				  (((u64)CMD_FLAGS(sc) << 32) | CMD_STATE(sc)));
sc               1234 drivers/scsi/fnic/fnic_scsi.c 		if (CMD_FLAGS(sc) & FNIC_DEV_RST_TIMED_OUT) {
sc               1238 drivers/scsi/fnic/fnic_scsi.c 				  sc->device->host->host_no, id, sc,
sc               1241 drivers/scsi/fnic/fnic_scsi.c 				  (((u64)CMD_FLAGS(sc) << 32) | CMD_STATE(sc)));
sc               1249 drivers/scsi/fnic/fnic_scsi.c 		CMD_STATE(sc) = FNIC_IOREQ_CMD_COMPLETE;
sc               1250 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_DEV_RST_DONE;
sc               1262 drivers/scsi/fnic/fnic_scsi.c 			     fnic_ioreq_state_to_str(CMD_STATE(sc)), id);
sc               1366 drivers/scsi/fnic/fnic_scsi.c 	struct scsi_cmnd *sc;
sc               1377 drivers/scsi/fnic/fnic_scsi.c 		sc = scsi_host_find_tag(fnic->lport->host, i);
sc               1378 drivers/scsi/fnic/fnic_scsi.c 		if (!sc) {
sc               1383 drivers/scsi/fnic/fnic_scsi.c 		io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               1384 drivers/scsi/fnic/fnic_scsi.c 		if ((CMD_FLAGS(sc) & FNIC_DEVICE_RESET) &&
sc               1385 drivers/scsi/fnic/fnic_scsi.c 			!(CMD_FLAGS(sc) & FNIC_DEV_RST_DONE)) {
sc               1390 drivers/scsi/fnic/fnic_scsi.c 			CMD_FLAGS(sc) |= FNIC_DEV_RST_DONE;
sc               1397 drivers/scsi/fnic/fnic_scsi.c 		} else if (CMD_FLAGS(sc) & FNIC_DEVICE_RESET) {
sc               1406 drivers/scsi/fnic/fnic_scsi.c 		CMD_SP(sc) = NULL;
sc               1415 drivers/scsi/fnic/fnic_scsi.c 		fnic_release_ioreq_buf(fnic, io_req, sc);
sc               1419 drivers/scsi/fnic/fnic_scsi.c 		sc->result = DID_TRANSPORT_DISRUPTED << 16;
sc               1422 drivers/scsi/fnic/fnic_scsi.c 			      __func__, sc->request->tag, sc,
sc               1431 drivers/scsi/fnic/fnic_scsi.c 		if (sc->scsi_done) {
sc               1432 drivers/scsi/fnic/fnic_scsi.c 			if (!(CMD_FLAGS(sc) & FNIC_IO_ISSUED))
sc               1435 drivers/scsi/fnic/fnic_scsi.c 				 sc->request->tag, sc);
sc               1438 drivers/scsi/fnic/fnic_scsi.c 				  sc->device->host->host_no, i, sc,
sc               1440 drivers/scsi/fnic/fnic_scsi.c 				  0, ((u64)sc->cmnd[0] << 32 |
sc               1441 drivers/scsi/fnic/fnic_scsi.c 				  (u64)sc->cmnd[2] << 24 |
sc               1442 drivers/scsi/fnic/fnic_scsi.c 				  (u64)sc->cmnd[3] << 16 |
sc               1443 drivers/scsi/fnic/fnic_scsi.c 				  (u64)sc->cmnd[4] << 8 | sc->cmnd[5]),
sc               1444 drivers/scsi/fnic/fnic_scsi.c 				  (((u64)CMD_FLAGS(sc) << 32) | CMD_STATE(sc)));
sc               1446 drivers/scsi/fnic/fnic_scsi.c 			sc->scsi_done(sc);
sc               1457 drivers/scsi/fnic/fnic_scsi.c 	struct scsi_cmnd *sc;
sc               1469 drivers/scsi/fnic/fnic_scsi.c 	sc = scsi_host_find_tag(fnic->lport->host, id);
sc               1470 drivers/scsi/fnic/fnic_scsi.c 	if (!sc)
sc               1473 drivers/scsi/fnic/fnic_scsi.c 	io_lock = fnic_io_lock_hash(fnic, sc);
sc               1477 drivers/scsi/fnic/fnic_scsi.c 	io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               1486 drivers/scsi/fnic/fnic_scsi.c 	CMD_SP(sc) = NULL;
sc               1491 drivers/scsi/fnic/fnic_scsi.c 	fnic_release_ioreq_buf(fnic, io_req, sc);
sc               1495 drivers/scsi/fnic/fnic_scsi.c 	sc->result = DID_NO_CONNECT << 16;
sc               1499 drivers/scsi/fnic/fnic_scsi.c 	if (sc->scsi_done) {
sc               1501 drivers/scsi/fnic/fnic_scsi.c 			  sc->device->host->host_no, id, sc,
sc               1503 drivers/scsi/fnic/fnic_scsi.c 			  0, ((u64)sc->cmnd[0] << 32 |
sc               1504 drivers/scsi/fnic/fnic_scsi.c 			  (u64)sc->cmnd[2] << 24 | (u64)sc->cmnd[3] << 16 |
sc               1505 drivers/scsi/fnic/fnic_scsi.c 			  (u64)sc->cmnd[4] << 8 | sc->cmnd[5]),
sc               1506 drivers/scsi/fnic/fnic_scsi.c 			  (((u64)CMD_FLAGS(sc) << 32) | CMD_STATE(sc)));
sc               1508 drivers/scsi/fnic/fnic_scsi.c 		sc->scsi_done(sc);
sc               1567 drivers/scsi/fnic/fnic_scsi.c 	struct scsi_cmnd *sc;
sc               1585 drivers/scsi/fnic/fnic_scsi.c 		sc = scsi_host_find_tag(fnic->lport->host, tag);
sc               1586 drivers/scsi/fnic/fnic_scsi.c 		if (!sc) {
sc               1591 drivers/scsi/fnic/fnic_scsi.c 		io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               1598 drivers/scsi/fnic/fnic_scsi.c 		if ((CMD_FLAGS(sc) & FNIC_DEVICE_RESET) &&
sc               1599 drivers/scsi/fnic/fnic_scsi.c 			(!(CMD_FLAGS(sc) & FNIC_DEV_RST_ISSUED))) {
sc               1602 drivers/scsi/fnic/fnic_scsi.c 			sc);
sc               1611 drivers/scsi/fnic/fnic_scsi.c 		if (CMD_STATE(sc) == FNIC_IOREQ_ABTS_PENDING) {
sc               1619 drivers/scsi/fnic/fnic_scsi.c 			fnic_ioreq_state_to_str(CMD_STATE(sc)));
sc               1622 drivers/scsi/fnic/fnic_scsi.c 		if (!(CMD_FLAGS(sc) & FNIC_IO_ISSUED)) {
sc               1627 drivers/scsi/fnic/fnic_scsi.c 				  sc, tag, CMD_FLAGS(sc), CMD_STATE(sc));
sc               1629 drivers/scsi/fnic/fnic_scsi.c 		old_ioreq_state = CMD_STATE(sc);
sc               1630 drivers/scsi/fnic/fnic_scsi.c 		CMD_STATE(sc) = FNIC_IOREQ_ABTS_PENDING;
sc               1631 drivers/scsi/fnic/fnic_scsi.c 		CMD_ABTS_STATUS(sc) = FCPIO_INVALID_CODE;
sc               1632 drivers/scsi/fnic/fnic_scsi.c 		if (CMD_FLAGS(sc) & FNIC_DEVICE_RESET) {
sc               1637 drivers/scsi/fnic/fnic_scsi.c 			sc);
sc               1648 drivers/scsi/fnic/fnic_scsi.c 		int_to_scsilun(sc->device->lun, &fc_lun);
sc               1660 drivers/scsi/fnic/fnic_scsi.c 			if (CMD_STATE(sc) == FNIC_IOREQ_ABTS_PENDING)
sc               1661 drivers/scsi/fnic/fnic_scsi.c 				CMD_STATE(sc) = old_ioreq_state;
sc               1665 drivers/scsi/fnic/fnic_scsi.c 			if (CMD_FLAGS(sc) & FNIC_DEVICE_RESET)
sc               1666 drivers/scsi/fnic/fnic_scsi.c 				CMD_FLAGS(sc) |= FNIC_DEV_RST_TERM_ISSUED;
sc               1668 drivers/scsi/fnic/fnic_scsi.c 				CMD_FLAGS(sc) |= FNIC_IO_INTERNAL_TERM_ISSUED;
sc               1687 drivers/scsi/fnic/fnic_scsi.c 	struct scsi_cmnd *sc;
sc               1730 drivers/scsi/fnic/fnic_scsi.c 		sc = scsi_host_find_tag(fnic->lport->host, tag);
sc               1731 drivers/scsi/fnic/fnic_scsi.c 		if (!sc) {
sc               1736 drivers/scsi/fnic/fnic_scsi.c 		cmd_rport = starget_to_rport(scsi_target(sc->device));
sc               1742 drivers/scsi/fnic/fnic_scsi.c 		io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               1749 drivers/scsi/fnic/fnic_scsi.c 		if ((CMD_FLAGS(sc) & FNIC_DEVICE_RESET) &&
sc               1750 drivers/scsi/fnic/fnic_scsi.c 			(!(CMD_FLAGS(sc) & FNIC_DEV_RST_ISSUED))) {
sc               1753 drivers/scsi/fnic/fnic_scsi.c 			sc);
sc               1761 drivers/scsi/fnic/fnic_scsi.c 		if (CMD_STATE(sc) == FNIC_IOREQ_ABTS_PENDING) {
sc               1769 drivers/scsi/fnic/fnic_scsi.c 			fnic_ioreq_state_to_str(CMD_STATE(sc)));
sc               1771 drivers/scsi/fnic/fnic_scsi.c 		if (!(CMD_FLAGS(sc) & FNIC_IO_ISSUED)) {
sc               1776 drivers/scsi/fnic/fnic_scsi.c 				  sc, tag, CMD_FLAGS(sc), CMD_STATE(sc));
sc               1778 drivers/scsi/fnic/fnic_scsi.c 		old_ioreq_state = CMD_STATE(sc);
sc               1779 drivers/scsi/fnic/fnic_scsi.c 		CMD_STATE(sc) = FNIC_IOREQ_ABTS_PENDING;
sc               1780 drivers/scsi/fnic/fnic_scsi.c 		CMD_ABTS_STATUS(sc) = FCPIO_INVALID_CODE;
sc               1781 drivers/scsi/fnic/fnic_scsi.c 		if (CMD_FLAGS(sc) & FNIC_DEVICE_RESET) {
sc               1785 drivers/scsi/fnic/fnic_scsi.c 			"fnic_terminate_rport_io dev rst sc 0x%p\n", sc);
sc               1797 drivers/scsi/fnic/fnic_scsi.c 		int_to_scsilun(sc->device->lun, &fc_lun);
sc               1809 drivers/scsi/fnic/fnic_scsi.c 			if (CMD_STATE(sc) == FNIC_IOREQ_ABTS_PENDING)
sc               1810 drivers/scsi/fnic/fnic_scsi.c 				CMD_STATE(sc) = old_ioreq_state;
sc               1814 drivers/scsi/fnic/fnic_scsi.c 			if (CMD_FLAGS(sc) & FNIC_DEVICE_RESET)
sc               1815 drivers/scsi/fnic/fnic_scsi.c 				CMD_FLAGS(sc) |= FNIC_DEV_RST_TERM_ISSUED;
sc               1817 drivers/scsi/fnic/fnic_scsi.c 				CMD_FLAGS(sc) |= FNIC_IO_INTERNAL_TERM_ISSUED;
sc               1833 drivers/scsi/fnic/fnic_scsi.c int fnic_abort_cmd(struct scsi_cmnd *sc)
sc               1854 drivers/scsi/fnic/fnic_scsi.c 	fc_block_scsi_eh(sc);
sc               1857 drivers/scsi/fnic/fnic_scsi.c 	lp = shost_priv(sc->device->host);
sc               1864 drivers/scsi/fnic/fnic_scsi.c 	rport = starget_to_rport(scsi_target(sc->device));
sc               1865 drivers/scsi/fnic/fnic_scsi.c 	tag = sc->request->tag;
sc               1869 drivers/scsi/fnic/fnic_scsi.c 		rport->port_id, sc->device->lun, tag, CMD_FLAGS(sc));
sc               1871 drivers/scsi/fnic/fnic_scsi.c 	CMD_FLAGS(sc) = FNIC_NO_FLAGS;
sc               1890 drivers/scsi/fnic/fnic_scsi.c 	io_lock = fnic_io_lock_hash(fnic, sc);
sc               1892 drivers/scsi/fnic/fnic_scsi.c 	io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               1900 drivers/scsi/fnic/fnic_scsi.c 	if (CMD_STATE(sc) == FNIC_IOREQ_ABTS_PENDING) {
sc               1922 drivers/scsi/fnic/fnic_scsi.c 		"CBD Opcode: %02x Abort issued time: %lu msec\n", sc->cmnd[0], abt_issued_time);
sc               1929 drivers/scsi/fnic/fnic_scsi.c 	old_ioreq_state = CMD_STATE(sc);
sc               1930 drivers/scsi/fnic/fnic_scsi.c 	CMD_STATE(sc) = FNIC_IOREQ_ABTS_PENDING;
sc               1931 drivers/scsi/fnic/fnic_scsi.c 	CMD_ABTS_STATUS(sc) = FCPIO_INVALID_CODE;
sc               1948 drivers/scsi/fnic/fnic_scsi.c 	int_to_scsilun(sc->device->lun, &fc_lun);
sc               1950 drivers/scsi/fnic/fnic_scsi.c 	if (fnic_queue_abort_io_req(fnic, sc->request->tag, task_req,
sc               1953 drivers/scsi/fnic/fnic_scsi.c 		if (CMD_STATE(sc) == FNIC_IOREQ_ABTS_PENDING)
sc               1954 drivers/scsi/fnic/fnic_scsi.c 			CMD_STATE(sc) = old_ioreq_state;
sc               1955 drivers/scsi/fnic/fnic_scsi.c 		io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               1963 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_IO_ABTS_ISSUED;
sc               1966 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_IO_TERM_ISSUED;
sc               1984 drivers/scsi/fnic/fnic_scsi.c 	io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               1988 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_IO_ABT_TERM_REQ_NULL;
sc               1995 drivers/scsi/fnic/fnic_scsi.c 	if (CMD_ABTS_STATUS(sc) == FCPIO_INVALID_CODE) {
sc               2002 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_IO_ABT_TERM_TIMED_OUT;
sc               2009 drivers/scsi/fnic/fnic_scsi.c 	if (!(CMD_FLAGS(sc) & (FNIC_IO_ABORTED | FNIC_IO_DONE))) {
sc               2018 drivers/scsi/fnic/fnic_scsi.c 	CMD_STATE(sc) = FNIC_IOREQ_ABTS_COMPLETE;
sc               2026 drivers/scsi/fnic/fnic_scsi.c 	if (CMD_ABTS_STATUS(sc) == FCPIO_SUCCESS)
sc               2027 drivers/scsi/fnic/fnic_scsi.c 		CMD_SP(sc) = NULL;
sc               2036 drivers/scsi/fnic/fnic_scsi.c 	fnic_release_ioreq_buf(fnic, io_req, sc);
sc               2039 drivers/scsi/fnic/fnic_scsi.c 	if (sc->scsi_done) {
sc               2041 drivers/scsi/fnic/fnic_scsi.c 		sc->result = (DID_ABORT << 16);
sc               2042 drivers/scsi/fnic/fnic_scsi.c 		sc->scsi_done(sc);
sc               2051 drivers/scsi/fnic/fnic_scsi.c 	FNIC_TRACE(fnic_abort_cmd, sc->device->host->host_no,
sc               2052 drivers/scsi/fnic/fnic_scsi.c 		  sc->request->tag, sc,
sc               2054 drivers/scsi/fnic/fnic_scsi.c 		  0, ((u64)sc->cmnd[0] << 32 |
sc               2055 drivers/scsi/fnic/fnic_scsi.c 		  (u64)sc->cmnd[2] << 24 | (u64)sc->cmnd[3] << 16 |
sc               2056 drivers/scsi/fnic/fnic_scsi.c 		  (u64)sc->cmnd[4] << 8 | sc->cmnd[5]),
sc               2057 drivers/scsi/fnic/fnic_scsi.c 		  (((u64)CMD_FLAGS(sc) << 32) | CMD_STATE(sc)));
sc               2067 drivers/scsi/fnic/fnic_scsi.c 				       struct scsi_cmnd *sc,
sc               2100 drivers/scsi/fnic/fnic_scsi.c 	int_to_scsilun(sc->device->lun, &fc_lun);
sc               2102 drivers/scsi/fnic/fnic_scsi.c 	fnic_queue_wq_copy_desc_itmf(wq, sc->request->tag | FNIC_TAG_DEV_RST,
sc               2136 drivers/scsi/fnic/fnic_scsi.c 	struct scsi_cmnd *sc;
sc               2145 drivers/scsi/fnic/fnic_scsi.c 		sc = scsi_host_find_tag(fnic->lport->host, tag);
sc               2150 drivers/scsi/fnic/fnic_scsi.c 		if (!sc || ((sc == lr_sc) && new_sc) || sc->device != lun_dev) {
sc               2155 drivers/scsi/fnic/fnic_scsi.c 		io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               2157 drivers/scsi/fnic/fnic_scsi.c 		if (!io_req || sc->device != lun_dev) {
sc               2168 drivers/scsi/fnic/fnic_scsi.c 			      fnic_ioreq_state_to_str(CMD_STATE(sc)));
sc               2170 drivers/scsi/fnic/fnic_scsi.c 		if (CMD_STATE(sc) == FNIC_IOREQ_ABTS_PENDING) {
sc               2174 drivers/scsi/fnic/fnic_scsi.c 		if ((CMD_FLAGS(sc) & FNIC_DEVICE_RESET) &&
sc               2175 drivers/scsi/fnic/fnic_scsi.c 			(!(CMD_FLAGS(sc) & FNIC_DEV_RST_ISSUED))) {
sc               2178 drivers/scsi/fnic/fnic_scsi.c 				sc);
sc               2186 drivers/scsi/fnic/fnic_scsi.c 			  __func__, fnic_ioreq_state_to_str(CMD_STATE(sc)));
sc               2187 drivers/scsi/fnic/fnic_scsi.c 		old_ioreq_state = CMD_STATE(sc);
sc               2195 drivers/scsi/fnic/fnic_scsi.c 		CMD_STATE(sc) = FNIC_IOREQ_ABTS_PENDING;
sc               2200 drivers/scsi/fnic/fnic_scsi.c 		if (CMD_FLAGS(sc) & FNIC_DEVICE_RESET) {
sc               2203 drivers/scsi/fnic/fnic_scsi.c 				  "%s: dev rst sc 0x%p\n", __func__, sc);
sc               2206 drivers/scsi/fnic/fnic_scsi.c 		CMD_ABTS_STATUS(sc) = FCPIO_INVALID_CODE;
sc               2211 drivers/scsi/fnic/fnic_scsi.c 		int_to_scsilun(sc->device->lun, &fc_lun);
sc               2217 drivers/scsi/fnic/fnic_scsi.c 			io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               2220 drivers/scsi/fnic/fnic_scsi.c 			if (CMD_STATE(sc) == FNIC_IOREQ_ABTS_PENDING)
sc               2221 drivers/scsi/fnic/fnic_scsi.c 				CMD_STATE(sc) = old_ioreq_state;
sc               2227 drivers/scsi/fnic/fnic_scsi.c 			if (CMD_FLAGS(sc) & FNIC_DEVICE_RESET)
sc               2228 drivers/scsi/fnic/fnic_scsi.c 				CMD_FLAGS(sc) |= FNIC_DEV_RST_TERM_ISSUED;
sc               2231 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_IO_INTERNAL_TERM_ISSUED;
sc               2239 drivers/scsi/fnic/fnic_scsi.c 		io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               2242 drivers/scsi/fnic/fnic_scsi.c 			CMD_FLAGS(sc) |= FNIC_IO_ABT_TERM_REQ_NULL;
sc               2249 drivers/scsi/fnic/fnic_scsi.c 		if (CMD_ABTS_STATUS(sc) == FCPIO_INVALID_CODE) {
sc               2251 drivers/scsi/fnic/fnic_scsi.c 			CMD_FLAGS(sc) |= FNIC_IO_ABT_TERM_DONE;
sc               2255 drivers/scsi/fnic/fnic_scsi.c 		CMD_STATE(sc) = FNIC_IOREQ_ABTS_COMPLETE;
sc               2258 drivers/scsi/fnic/fnic_scsi.c 		if (sc != lr_sc)
sc               2259 drivers/scsi/fnic/fnic_scsi.c 			CMD_SP(sc) = NULL;
sc               2263 drivers/scsi/fnic/fnic_scsi.c 		if (sc != lr_sc) {
sc               2264 drivers/scsi/fnic/fnic_scsi.c 			fnic_release_ioreq_buf(fnic, io_req, sc);
sc               2272 drivers/scsi/fnic/fnic_scsi.c 		if (sc->scsi_done) {
sc               2274 drivers/scsi/fnic/fnic_scsi.c 			sc->result = DID_RESET << 16;
sc               2275 drivers/scsi/fnic/fnic_scsi.c 			sc->scsi_done(sc);
sc               2294 drivers/scsi/fnic/fnic_scsi.c fnic_scsi_host_start_tag(struct fnic *fnic, struct scsi_cmnd *sc)
sc               2296 drivers/scsi/fnic/fnic_scsi.c 	struct request_queue *q = sc->request->q;
sc               2303 drivers/scsi/fnic/fnic_scsi.c 	sc->tag = sc->request->tag = dummy->tag;
sc               2304 drivers/scsi/fnic/fnic_scsi.c 	sc->host_scribble = (unsigned char *)dummy;
sc               2314 drivers/scsi/fnic/fnic_scsi.c fnic_scsi_host_end_tag(struct fnic *fnic, struct scsi_cmnd *sc)
sc               2316 drivers/scsi/fnic/fnic_scsi.c 	struct request *dummy = (struct request *)sc->host_scribble;
sc               2326 drivers/scsi/fnic/fnic_scsi.c int fnic_device_reset(struct scsi_cmnd *sc)
sc               2346 drivers/scsi/fnic/fnic_scsi.c 	fc_block_scsi_eh(sc);
sc               2349 drivers/scsi/fnic/fnic_scsi.c 	lp = shost_priv(sc->device->host);
sc               2357 drivers/scsi/fnic/fnic_scsi.c 	rport = starget_to_rport(scsi_target(sc->device));
sc               2360 drivers/scsi/fnic/fnic_scsi.c 		      rport->port_id, sc->device->lun, sc);
sc               2371 drivers/scsi/fnic/fnic_scsi.c 	CMD_FLAGS(sc) = FNIC_DEVICE_RESET;
sc               2374 drivers/scsi/fnic/fnic_scsi.c 	tag = sc->request->tag;
sc               2380 drivers/scsi/fnic/fnic_scsi.c 		tag = fnic_scsi_host_start_tag(fnic, sc);
sc               2386 drivers/scsi/fnic/fnic_scsi.c 	io_lock = fnic_io_lock_hash(fnic, sc);
sc               2388 drivers/scsi/fnic/fnic_scsi.c 	io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               2402 drivers/scsi/fnic/fnic_scsi.c 		CMD_SP(sc) = (char *)io_req;
sc               2405 drivers/scsi/fnic/fnic_scsi.c 	CMD_STATE(sc) = FNIC_IOREQ_CMD_PENDING;
sc               2406 drivers/scsi/fnic/fnic_scsi.c 	CMD_LR_STATUS(sc) = FCPIO_INVALID_CODE;
sc               2415 drivers/scsi/fnic/fnic_scsi.c 	if (fnic_queue_dr_io_req(fnic, sc, io_req)) {
sc               2417 drivers/scsi/fnic/fnic_scsi.c 		io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               2423 drivers/scsi/fnic/fnic_scsi.c 	CMD_FLAGS(sc) |= FNIC_DEV_RST_ISSUED;
sc               2434 drivers/scsi/fnic/fnic_scsi.c 	io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               2438 drivers/scsi/fnic/fnic_scsi.c 				"io_req is null tag 0x%x sc 0x%p\n", tag, sc);
sc               2443 drivers/scsi/fnic/fnic_scsi.c 	status = CMD_LR_STATUS(sc);
sc               2453 drivers/scsi/fnic/fnic_scsi.c 		CMD_FLAGS(sc) |= FNIC_DEV_RST_TIMED_OUT;
sc               2455 drivers/scsi/fnic/fnic_scsi.c 		int_to_scsilun(sc->device->lun, &fc_lun);
sc               2462 drivers/scsi/fnic/fnic_scsi.c 			if (CMD_FLAGS(sc) & FNIC_DEV_RST_TERM_ISSUED) {
sc               2475 drivers/scsi/fnic/fnic_scsi.c 				CMD_FLAGS(sc) |= FNIC_DEV_RST_TERM_ISSUED;
sc               2476 drivers/scsi/fnic/fnic_scsi.c 				CMD_STATE(sc) = FNIC_IOREQ_ABTS_PENDING;
sc               2481 drivers/scsi/fnic/fnic_scsi.c 				"tag 0x%x sc 0x%p\n", tag, sc);
sc               2487 drivers/scsi/fnic/fnic_scsi.c 			if (!(CMD_FLAGS(sc) & FNIC_DEV_RST_DONE)) {
sc               2493 drivers/scsi/fnic/fnic_scsi.c 				io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               2508 drivers/scsi/fnic/fnic_scsi.c 		io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               2519 drivers/scsi/fnic/fnic_scsi.c 	if (fnic_clean_pending_aborts(fnic, sc, new_sc)) {
sc               2521 drivers/scsi/fnic/fnic_scsi.c 		io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               2530 drivers/scsi/fnic/fnic_scsi.c 	io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               2537 drivers/scsi/fnic/fnic_scsi.c 		CMD_SP(sc) = NULL;
sc               2543 drivers/scsi/fnic/fnic_scsi.c 		fnic_release_ioreq_buf(fnic, io_req, sc);
sc               2548 drivers/scsi/fnic/fnic_scsi.c 	FNIC_TRACE(fnic_device_reset, sc->device->host->host_no,
sc               2549 drivers/scsi/fnic/fnic_scsi.c 		  sc->request->tag, sc,
sc               2551 drivers/scsi/fnic/fnic_scsi.c 		  0, ((u64)sc->cmnd[0] << 32 |
sc               2552 drivers/scsi/fnic/fnic_scsi.c 		  (u64)sc->cmnd[2] << 24 | (u64)sc->cmnd[3] << 16 |
sc               2553 drivers/scsi/fnic/fnic_scsi.c 		  (u64)sc->cmnd[4] << 8 | sc->cmnd[5]),
sc               2554 drivers/scsi/fnic/fnic_scsi.c 		  (((u64)CMD_FLAGS(sc) << 32) | CMD_STATE(sc)));
sc               2558 drivers/scsi/fnic/fnic_scsi.c 		fnic_scsi_host_end_tag(fnic, sc);
sc               2616 drivers/scsi/fnic/fnic_scsi.c int fnic_host_reset(struct scsi_cmnd *sc)
sc               2620 drivers/scsi/fnic/fnic_scsi.c 	struct Scsi_Host *shost = sc->device->host;
sc               2790 drivers/scsi/fnic/fnic_scsi.c 	struct scsi_cmnd *sc;
sc               2798 drivers/scsi/fnic/fnic_scsi.c 		sc = scsi_host_find_tag(fnic->lport->host, tag);
sc               2803 drivers/scsi/fnic/fnic_scsi.c 		if (!sc || (lr_sc && (sc->device != lun_dev || sc == lr_sc)))
sc               2806 drivers/scsi/fnic/fnic_scsi.c 		io_lock = fnic_io_lock_hash(fnic, sc);
sc               2809 drivers/scsi/fnic/fnic_scsi.c 		io_req = (struct fnic_io_req *)CMD_SP(sc);
sc               2811 drivers/scsi/fnic/fnic_scsi.c 		if (!io_req || sc->device != lun_dev) {
sc               2822 drivers/scsi/fnic/fnic_scsi.c 			      fnic_ioreq_state_to_str(CMD_STATE(sc)));
sc               2824 drivers/scsi/fnic/fnic_scsi.c 		if (CMD_STATE(sc) == FNIC_IOREQ_ABTS_PENDING)
sc                517 drivers/scsi/iscsi_tcp.c 	if (!task->sc)
sc                520 drivers/scsi/iscsi_tcp.c 		struct scsi_data_buffer *sdb = &task->sc->sdb;
sc                475 drivers/scsi/libfc/fc_fcp.c 	struct scsi_cmnd *sc = fsp->cmd;
sc                523 drivers/scsi/libfc/fc_fcp.c 	sg = scsi_sglist(sc);
sc                524 drivers/scsi/libfc/fc_fcp.c 	nents = scsi_sg_count(sc);
sc                597 drivers/scsi/libfc/fc_fcp.c 	struct scsi_cmnd *sc;
sc                641 drivers/scsi/libfc/fc_fcp.c 	sc = fsp->cmd;
sc                650 drivers/scsi/libfc/fc_fcp.c 	sg = scsi_sglist(sc);
sc                183 drivers/scsi/libiscsi.c 	struct scsi_cmnd *cmd = task->sc;
sc                249 drivers/scsi/libiscsi.c 		if (hdr_lun != task->sc->device->lun)
sc                311 drivers/scsi/libiscsi.c 	struct scsi_cmnd *sc = task->sc;
sc                341 drivers/scsi/libiscsi.c 	int_to_scsilun(sc->device->lun, &hdr->lun);
sc                344 drivers/scsi/libiscsi.c 	cmd_len = sc->cmd_len;
sc                353 drivers/scsi/libiscsi.c 	memcpy(hdr->cdb, sc->cmnd, cmd_len);
sc                356 drivers/scsi/libiscsi.c 	if (scsi_get_prot_op(sc) != SCSI_PROT_NORMAL)
sc                359 drivers/scsi/libiscsi.c 	transfer_length = scsi_transfer_length(sc);
sc                361 drivers/scsi/libiscsi.c 	if (sc->sc_data_direction == DMA_TO_DEVICE) {
sc                408 drivers/scsi/libiscsi.c 		if (sc->sc_data_direction == DMA_FROM_DEVICE)
sc                431 drivers/scsi/libiscsi.c 			  sc->sc_data_direction == DMA_TO_DEVICE ?
sc                432 drivers/scsi/libiscsi.c 			  "write" : "read", conn->id, sc, sc->cmnd[0],
sc                451 drivers/scsi/libiscsi.c 	struct scsi_cmnd *sc = task->sc;
sc                455 drivers/scsi/libiscsi.c 			  task->itt, task->state, task->sc);
sc                459 drivers/scsi/libiscsi.c 	task->sc = NULL;
sc                468 drivers/scsi/libiscsi.c 	if (sc) {
sc                470 drivers/scsi/libiscsi.c 		sc->SCp.ptr = NULL;
sc                476 drivers/scsi/libiscsi.c 			sc->scsi_done(sc);
sc                517 drivers/scsi/libiscsi.c 			  task->itt, task->state, task->sc);
sc                576 drivers/scsi/libiscsi.c 	struct scsi_cmnd *sc;
sc                584 drivers/scsi/libiscsi.c 	sc = task->sc;
sc                585 drivers/scsi/libiscsi.c 	if (!sc)
sc                601 drivers/scsi/libiscsi.c 	sc->result = err << 16;
sc                602 drivers/scsi/libiscsi.c 	scsi_set_resid(sc, scsi_bufflen(sc));
sc                711 drivers/scsi/libiscsi.c 	task->sc = NULL;
sc                796 drivers/scsi/libiscsi.c 	struct scsi_cmnd *sc = task->sc;
sc                801 drivers/scsi/libiscsi.c 	sc->result = (DID_OK << 16) | rhdr->cmd_status;
sc                816 drivers/scsi/libiscsi.c 			sc->result = DRIVER_SENSE << 24 |
sc                818 drivers/scsi/libiscsi.c 			scsi_build_sense_buffer(1, sc->sense_buffer,
sc                820 drivers/scsi/libiscsi.c 			scsi_set_sense_information(sc->sense_buffer,
sc                828 drivers/scsi/libiscsi.c 		sc->result = DID_ERROR << 16;
sc                840 drivers/scsi/libiscsi.c 			sc->result = DID_BAD_TARGET << 16;
sc                848 drivers/scsi/libiscsi.c 		memcpy(sc->sense_buffer, data + 2,
sc                857 drivers/scsi/libiscsi.c 		sc->result = (DID_BAD_TARGET << 16) | rhdr->cmd_status;
sc                866 drivers/scsi/libiscsi.c 		     res_count <= scsi_bufflen(sc)))
sc                868 drivers/scsi/libiscsi.c 			scsi_set_resid(sc, res_count);
sc                870 drivers/scsi/libiscsi.c 			sc->result = (DID_BAD_TARGET << 16) | rhdr->cmd_status;
sc                874 drivers/scsi/libiscsi.c 			  sc, sc->result, task->itt);
sc                893 drivers/scsi/libiscsi.c 	struct scsi_cmnd *sc = task->sc;
sc                899 drivers/scsi/libiscsi.c 	sc->result = (DID_OK << 16) | rhdr->cmd_status;
sc                907 drivers/scsi/libiscsi.c 		     res_count <= sc->sdb.length))
sc                908 drivers/scsi/libiscsi.c 			scsi_set_resid(sc, res_count);
sc                910 drivers/scsi/libiscsi.c 			sc->result = (DID_BAD_TARGET << 16) | rhdr->cmd_status;
sc                915 drivers/scsi/libiscsi.c 			  sc, sc->result, task->itt);
sc               1326 drivers/scsi/libiscsi.c 	if (!task || !task->sc)
sc               1329 drivers/scsi/libiscsi.c 	if (task->sc->SCp.phase != conn->session->age) {
sc               1332 drivers/scsi/libiscsi.c 				  task->sc->SCp.phase, conn->session->age);
sc               1599 drivers/scsi/libiscsi.c 						  struct scsi_cmnd *sc)
sc               1607 drivers/scsi/libiscsi.c 	sc->SCp.phase = conn->session->age;
sc               1608 drivers/scsi/libiscsi.c 	sc->SCp.ptr = (char *) task;
sc               1613 drivers/scsi/libiscsi.c 	task->sc = sc;
sc               1635 drivers/scsi/libiscsi.c int iscsi_queuecommand(struct Scsi_Host *host, struct scsi_cmnd *sc)
sc               1644 drivers/scsi/libiscsi.c 	sc->result = 0;
sc               1645 drivers/scsi/libiscsi.c 	sc->SCp.ptr = NULL;
sc               1649 drivers/scsi/libiscsi.c 	cls_session = starget_to_session(scsi_target(sc->device));
sc               1655 drivers/scsi/libiscsi.c 		sc->result = reason;
sc               1674 drivers/scsi/libiscsi.c 				sc->result = DID_NO_CONNECT << 16;
sc               1680 drivers/scsi/libiscsi.c 			sc->result = DID_IMM_RETRY << 16;
sc               1684 drivers/scsi/libiscsi.c 			sc->result = DID_IMM_RETRY << 16;
sc               1688 drivers/scsi/libiscsi.c 			sc->result = DID_TRANSPORT_FAILFAST << 16;
sc               1692 drivers/scsi/libiscsi.c 			sc->result = DID_NO_CONNECT << 16;
sc               1696 drivers/scsi/libiscsi.c 			sc->result = DID_NO_CONNECT << 16;
sc               1704 drivers/scsi/libiscsi.c 		sc->result = DID_NO_CONNECT << 16;
sc               1710 drivers/scsi/libiscsi.c 		sc->result = DID_REQUEUE << 16;
sc               1719 drivers/scsi/libiscsi.c 	task = iscsi_alloc_task(conn, sc);
sc               1732 drivers/scsi/libiscsi.c 				sc->result = DID_ABORT << 16;
sc               1759 drivers/scsi/libiscsi.c 			  sc->cmnd[0], reason);
sc               1769 drivers/scsi/libiscsi.c 			  sc->cmnd[0], reason);
sc               1770 drivers/scsi/libiscsi.c 	scsi_set_resid(sc, scsi_bufflen(sc));
sc               1771 drivers/scsi/libiscsi.c 	sc->scsi_done(sc);
sc               1861 drivers/scsi/libiscsi.c 		if (!task->sc || task->state == ISCSI_TASK_FREE)
sc               1864 drivers/scsi/libiscsi.c 		if (lun != -1 && lun != task->sc->device->lun)
sc               1869 drivers/scsi/libiscsi.c 				  task->sc, task->itt, task->state);
sc               1934 drivers/scsi/libiscsi.c enum blk_eh_timer_return iscsi_eh_cmd_timed_out(struct scsi_cmnd *sc)
sc               1943 drivers/scsi/libiscsi.c 	cls_session = starget_to_session(scsi_target(sc->device));
sc               1946 drivers/scsi/libiscsi.c 	ISCSI_DBG_EH(session, "scsi cmd %p timedout\n", sc);
sc               1949 drivers/scsi/libiscsi.c 	task = (struct iscsi_task *)sc->SCp.ptr;
sc               1968 drivers/scsi/libiscsi.c 			sc->result = DID_NO_CONNECT << 16;
sc               2018 drivers/scsi/libiscsi.c 		if (!running_task->sc || running_task == task ||
sc               2026 drivers/scsi/libiscsi.c 		if (time_after(running_task->sc->jiffies_at_alloc,
sc               2027 drivers/scsi/libiscsi.c 			       task->sc->jiffies_at_alloc))
sc               2138 drivers/scsi/libiscsi.c int iscsi_eh_abort(struct scsi_cmnd *sc)
sc               2147 drivers/scsi/libiscsi.c 	cls_session = starget_to_session(scsi_target(sc->device));
sc               2150 drivers/scsi/libiscsi.c 	ISCSI_DBG_EH(session, "aborting sc %p\n", sc);
sc               2158 drivers/scsi/libiscsi.c 	if (!sc->SCp.ptr) {
sc               2171 drivers/scsi/libiscsi.c 	    sc->SCp.phase != session->age) {
sc               2183 drivers/scsi/libiscsi.c 	task = (struct iscsi_task *)sc->SCp.ptr;
sc               2185 drivers/scsi/libiscsi.c 		     sc, task->itt);
sc               2188 drivers/scsi/libiscsi.c 	if (!task->sc) {
sc               2234 drivers/scsi/libiscsi.c 		if (!sc->SCp.ptr) {
sc               2252 drivers/scsi/libiscsi.c 		     sc, task->itt);
sc               2259 drivers/scsi/libiscsi.c 	ISCSI_DBG_EH(session, "abort failed [sc %p itt 0x%x]\n", sc,
sc               2266 drivers/scsi/libiscsi.c static void iscsi_prep_lun_reset_pdu(struct scsi_cmnd *sc, struct iscsi_tm *hdr)
sc               2272 drivers/scsi/libiscsi.c 	int_to_scsilun(sc->device->lun, &hdr->lun);
sc               2276 drivers/scsi/libiscsi.c int iscsi_eh_device_reset(struct scsi_cmnd *sc)
sc               2284 drivers/scsi/libiscsi.c 	cls_session = starget_to_session(scsi_target(sc->device));
sc               2287 drivers/scsi/libiscsi.c 	ISCSI_DBG_EH(session, "LU Reset [sc %p lun %llu]\n", sc,
sc               2288 drivers/scsi/libiscsi.c 		     sc->device->lun);
sc               2306 drivers/scsi/libiscsi.c 	iscsi_prep_lun_reset_pdu(sc, hdr);
sc               2333 drivers/scsi/libiscsi.c 	fail_scsi_tasks(conn, sc->device->lun, DID_ERROR);
sc               2371 drivers/scsi/libiscsi.c int iscsi_eh_session_reset(struct scsi_cmnd *sc)
sc               2377 drivers/scsi/libiscsi.c 	cls_session = starget_to_session(scsi_target(sc->device));
sc               2424 drivers/scsi/libiscsi.c static void iscsi_prep_tgt_reset_pdu(struct scsi_cmnd *sc, struct iscsi_tm *hdr)
sc               2439 drivers/scsi/libiscsi.c static int iscsi_eh_target_reset(struct scsi_cmnd *sc)
sc               2447 drivers/scsi/libiscsi.c 	cls_session = starget_to_session(scsi_target(sc->device));
sc               2450 drivers/scsi/libiscsi.c 	ISCSI_DBG_EH(session, "tgt Reset [sc %p tgt %s]\n", sc,
sc               2469 drivers/scsi/libiscsi.c 	iscsi_prep_tgt_reset_pdu(sc, hdr);
sc               2519 drivers/scsi/libiscsi.c int iscsi_eh_recover_target(struct scsi_cmnd *sc)
sc               2523 drivers/scsi/libiscsi.c 	rc = iscsi_eh_target_reset(sc);
sc               2525 drivers/scsi/libiscsi.c 		rc = iscsi_eh_session_reset(sc);
sc               3070 drivers/scsi/libiscsi.c 		if (task->sc)
sc                460 drivers/scsi/libiscsi_tcp.c 	if (!task->sc)
sc                492 drivers/scsi/libiscsi_tcp.c 	unsigned total_in_length = task->sc->sdb.length;
sc                557 drivers/scsi/libiscsi_tcp.c 	if (!task->sc || session->state != ISCSI_STATE_LOGGED_IN) {
sc                577 drivers/scsi/libiscsi_tcp.c 	if (data_offset + data_length > task->sc->sdb.length) {
sc                581 drivers/scsi/libiscsi_tcp.c 				  data_offset, task->sc->sdb.length);
sc                693 drivers/scsi/libiscsi_tcp.c 			struct scsi_data_buffer *sdb = &task->sc->sdb;
sc                740 drivers/scsi/libiscsi_tcp.c 		else if (task->sc->sc_data_direction == DMA_TO_DEVICE) {
sc                965 drivers/scsi/libiscsi_tcp.c 	struct scsi_cmnd *sc = task->sc;
sc                968 drivers/scsi/libiscsi_tcp.c 	if (!sc) {
sc               1051 drivers/scsi/libiscsi_tcp.c 	if (!task->sc) {
sc               1058 drivers/scsi/libiscsi_tcp.c 	if (task->sc->sc_data_direction != DMA_TO_DEVICE)
sc                 37 drivers/scsi/libsas/sas_scsi_host.c static void sas_end_task(struct scsi_cmnd *sc, struct sas_task *task)
sc                 56 drivers/scsi/libsas/sas_scsi_host.c 			scsi_set_resid(sc, ts->residual);
sc                 57 drivers/scsi/libsas/sas_scsi_host.c 			if (scsi_bufflen(sc) - scsi_get_resid(sc) < sc->underflow)
sc                 86 drivers/scsi/libsas/sas_scsi_host.c 			memcpy(sc->sense_buffer, ts->buf,
sc                 96 drivers/scsi/libsas/sas_scsi_host.c 	sc->result = (hs << 16) | stat;
sc                 97 drivers/scsi/libsas/sas_scsi_host.c 	ASSIGN_SAS_TASK(sc, NULL);
sc                103 drivers/scsi/libsas/sas_scsi_host.c 	struct scsi_cmnd *sc = task->uldd_task;
sc                112 drivers/scsi/libsas/sas_scsi_host.c 		ASSIGN_SAS_TASK(sc, NULL);
sc                121 drivers/scsi/libsas/sas_scsi_host.c 	if (unlikely(!sc)) {
sc                127 drivers/scsi/libsas/sas_scsi_host.c 	sas_end_task(sc, task);
sc                128 drivers/scsi/libsas/sas_scsi_host.c 	sc->scsi_done(sc);
sc                894 drivers/scsi/libsas/sas_scsi_host.c 	struct scsi_cmnd *sc = task->uldd_task;
sc                897 drivers/scsi/libsas/sas_scsi_host.c 	if (!sc) {
sc                911 drivers/scsi/libsas/sas_scsi_host.c 		blk_abort_request(sc->request);
sc                 88 drivers/scsi/lpfc/lpfc_scsi.c lpfc_prot_group_type(struct lpfc_hba *phba, struct scsi_cmnd *sc);
sc                 91 drivers/scsi/lpfc/lpfc_scsi.c lpfc_cmd_blksize(struct scsi_cmnd *sc)
sc                 93 drivers/scsi/lpfc/lpfc_scsi.c 	return sc->device->sector_size;
sc                 99 drivers/scsi/lpfc/lpfc_scsi.c lpfc_cmd_protect(struct scsi_cmnd *sc, int flag)
sc                105 drivers/scsi/lpfc/lpfc_scsi.c lpfc_cmd_guard_csum(struct scsi_cmnd *sc)
sc                107 drivers/scsi/lpfc/lpfc_scsi.c 	if (lpfc_prot_group_type(NULL, sc) == LPFC_PG_TYPE_NO_DIF)
sc                109 drivers/scsi/lpfc/lpfc_scsi.c 	if (scsi_host_get_guard(sc->device->host) == SHOST_DIX_GUARD_IP)
sc                984 drivers/scsi/lpfc/lpfc_scsi.c lpfc_bg_err_inject(struct lpfc_hba *phba, struct scsi_cmnd *sc,
sc                992 drivers/scsi/lpfc/lpfc_scsi.c 	uint32_t op = scsi_get_prot_op(sc);
sc               1002 drivers/scsi/lpfc/lpfc_scsi.c 	sgpe = scsi_prot_sglist(sc);
sc               1003 drivers/scsi/lpfc/lpfc_scsi.c 	lba = scsi_get_lba(sc);
sc               1007 drivers/scsi/lpfc/lpfc_scsi.c 		blksize = lpfc_cmd_blksize(sc);
sc               1008 drivers/scsi/lpfc/lpfc_scsi.c 		numblks = (scsi_bufflen(sc) + blksize - 1) / blksize;
sc               1024 drivers/scsi/lpfc/lpfc_scsi.c 	rdata = lpfc_rport_data_from_scsi_device(sc->device);
sc               1047 drivers/scsi/lpfc/lpfc_scsi.c 		lpfc_cmd = (struct lpfc_io_buf *)sc->host_scribble;
sc               1389 drivers/scsi/lpfc/lpfc_scsi.c lpfc_sc_to_bg_opcodes(struct lpfc_hba *phba, struct scsi_cmnd *sc,
sc               1394 drivers/scsi/lpfc/lpfc_scsi.c 	if (lpfc_cmd_guard_csum(sc)) {
sc               1395 drivers/scsi/lpfc/lpfc_scsi.c 		switch (scsi_get_prot_op(sc)) {
sc               1418 drivers/scsi/lpfc/lpfc_scsi.c 					scsi_get_prot_op(sc));
sc               1424 drivers/scsi/lpfc/lpfc_scsi.c 		switch (scsi_get_prot_op(sc)) {
sc               1447 drivers/scsi/lpfc/lpfc_scsi.c 					scsi_get_prot_op(sc));
sc               1469 drivers/scsi/lpfc/lpfc_scsi.c lpfc_bg_err_opcodes(struct lpfc_hba *phba, struct scsi_cmnd *sc,
sc               1474 drivers/scsi/lpfc/lpfc_scsi.c 	if (lpfc_cmd_guard_csum(sc)) {
sc               1475 drivers/scsi/lpfc/lpfc_scsi.c 		switch (scsi_get_prot_op(sc)) {
sc               1500 drivers/scsi/lpfc/lpfc_scsi.c 		switch (scsi_get_prot_op(sc)) {
sc               1561 drivers/scsi/lpfc/lpfc_scsi.c lpfc_bg_setup_bpl(struct lpfc_hba *phba, struct scsi_cmnd *sc,
sc               1569 drivers/scsi/lpfc/lpfc_scsi.c 	int datadir = sc->sc_data_direction;
sc               1577 drivers/scsi/lpfc/lpfc_scsi.c 	status  = lpfc_sc_to_bg_opcodes(phba, sc, &txop, &rxop);
sc               1582 drivers/scsi/lpfc/lpfc_scsi.c 	reftag = (uint32_t)scsi_get_lba(sc); /* Truncate LBA */
sc               1585 drivers/scsi/lpfc/lpfc_scsi.c 	rc = lpfc_bg_err_inject(phba, sc, &reftag, NULL, 1);
sc               1588 drivers/scsi/lpfc/lpfc_scsi.c 			lpfc_bg_err_opcodes(phba, sc, &txop, &rxop);
sc               1619 drivers/scsi/lpfc/lpfc_scsi.c 		if (lpfc_cmd_protect(sc, LPFC_CHECK_PROTECT_GUARD))
sc               1624 drivers/scsi/lpfc/lpfc_scsi.c 		if (lpfc_cmd_protect(sc, LPFC_CHECK_PROTECT_REF))
sc               1643 drivers/scsi/lpfc/lpfc_scsi.c 	scsi_for_each_sg(sc, sgde, datasegcnt, i) {
sc               1701 drivers/scsi/lpfc/lpfc_scsi.c lpfc_bg_setup_bpl_prot(struct lpfc_hba *phba, struct scsi_cmnd *sc,
sc               1716 drivers/scsi/lpfc/lpfc_scsi.c 	int datadir = sc->sc_data_direction;
sc               1727 drivers/scsi/lpfc/lpfc_scsi.c 	sgpe = scsi_prot_sglist(sc);
sc               1728 drivers/scsi/lpfc/lpfc_scsi.c 	sgde = scsi_sglist(sc);
sc               1737 drivers/scsi/lpfc/lpfc_scsi.c 	status = lpfc_sc_to_bg_opcodes(phba, sc, &txop, &rxop);
sc               1742 drivers/scsi/lpfc/lpfc_scsi.c 	blksize = lpfc_cmd_blksize(sc);
sc               1743 drivers/scsi/lpfc/lpfc_scsi.c 	reftag = (uint32_t)scsi_get_lba(sc); /* Truncate LBA */
sc               1746 drivers/scsi/lpfc/lpfc_scsi.c 	rc = lpfc_bg_err_inject(phba, sc, &reftag, NULL, 1);
sc               1749 drivers/scsi/lpfc/lpfc_scsi.c 			lpfc_bg_err_opcodes(phba, sc, &txop, &rxop);
sc               1781 drivers/scsi/lpfc/lpfc_scsi.c 		if (lpfc_cmd_protect(sc, LPFC_CHECK_PROTECT_GUARD))
sc               1786 drivers/scsi/lpfc/lpfc_scsi.c 		if (lpfc_cmd_protect(sc, LPFC_CHECK_PROTECT_REF))
sc               1946 drivers/scsi/lpfc/lpfc_scsi.c lpfc_bg_setup_sgl(struct lpfc_hba *phba, struct scsi_cmnd *sc,
sc               1966 drivers/scsi/lpfc/lpfc_scsi.c 	status  = lpfc_sc_to_bg_opcodes(phba, sc, &txop, &rxop);
sc               1971 drivers/scsi/lpfc/lpfc_scsi.c 	reftag = (uint32_t)scsi_get_lba(sc); /* Truncate LBA */
sc               1974 drivers/scsi/lpfc/lpfc_scsi.c 	rc = lpfc_bg_err_inject(phba, sc, &reftag, NULL, 1);
sc               1977 drivers/scsi/lpfc/lpfc_scsi.c 			lpfc_bg_err_opcodes(phba, sc, &txop, &rxop);
sc               1996 drivers/scsi/lpfc/lpfc_scsi.c 	if (sc->sc_data_direction == DMA_FROM_DEVICE) {
sc               1997 drivers/scsi/lpfc/lpfc_scsi.c 		if (lpfc_cmd_protect(sc, LPFC_CHECK_PROTECT_GUARD))
sc               2002 drivers/scsi/lpfc/lpfc_scsi.c 		if (lpfc_cmd_protect(sc, LPFC_CHECK_PROTECT_REF))
sc               2024 drivers/scsi/lpfc/lpfc_scsi.c 	sgde = scsi_sglist(sc);
sc               2126 drivers/scsi/lpfc/lpfc_scsi.c lpfc_bg_setup_sgl_prot(struct lpfc_hba *phba, struct scsi_cmnd *sc,
sc               2153 drivers/scsi/lpfc/lpfc_scsi.c 	sgpe = scsi_prot_sglist(sc);
sc               2154 drivers/scsi/lpfc/lpfc_scsi.c 	sgde = scsi_sglist(sc);
sc               2163 drivers/scsi/lpfc/lpfc_scsi.c 	status = lpfc_sc_to_bg_opcodes(phba, sc, &txop, &rxop);
sc               2168 drivers/scsi/lpfc/lpfc_scsi.c 	blksize = lpfc_cmd_blksize(sc);
sc               2169 drivers/scsi/lpfc/lpfc_scsi.c 	reftag = (uint32_t)scsi_get_lba(sc); /* Truncate LBA */
sc               2172 drivers/scsi/lpfc/lpfc_scsi.c 	rc = lpfc_bg_err_inject(phba, sc, &reftag, NULL, 1);
sc               2175 drivers/scsi/lpfc/lpfc_scsi.c 			lpfc_bg_err_opcodes(phba, sc, &txop, &rxop);
sc               2224 drivers/scsi/lpfc/lpfc_scsi.c 		if (lpfc_cmd_protect(sc, LPFC_CHECK_PROTECT_GUARD)) {
sc               2243 drivers/scsi/lpfc/lpfc_scsi.c 		if (lpfc_cmd_protect(sc, LPFC_CHECK_PROTECT_REF))
sc               2438 drivers/scsi/lpfc/lpfc_scsi.c lpfc_prot_group_type(struct lpfc_hba *phba, struct scsi_cmnd *sc)
sc               2441 drivers/scsi/lpfc/lpfc_scsi.c 	unsigned char op = scsi_get_prot_op(sc);
sc               2478 drivers/scsi/lpfc/lpfc_scsi.c 	struct scsi_cmnd *sc = lpfc_cmd->pCmd;
sc               2481 drivers/scsi/lpfc/lpfc_scsi.c 	fcpdl = scsi_bufflen(sc);
sc               2484 drivers/scsi/lpfc/lpfc_scsi.c 	if (sc->sc_data_direction == DMA_FROM_DEVICE) {
sc               2486 drivers/scsi/lpfc/lpfc_scsi.c 		if (scsi_get_prot_op(sc) ==  SCSI_PROT_READ_INSERT)
sc               2491 drivers/scsi/lpfc/lpfc_scsi.c 		if (scsi_get_prot_op(sc) ==  SCSI_PROT_WRITE_STRIP)
sc               2500 drivers/scsi/lpfc/lpfc_scsi.c 	fcpdl += (fcpdl / lpfc_cmd_blksize(sc)) * 8;
sc               1485 drivers/scsi/megaraid/megaraid_sas_base.c 	u8 sc = scp->cmnd[0];
sc               1504 drivers/scsi/megaraid/megaraid_sas_base.c 	ldio->cmd = (sc & 0x02) ? MFI_CMD_LD_WRITE : MFI_CMD_LD_READ;
sc               5406 drivers/scsi/mpt3sas/mpt3sas_base.c 	u32 s, sc;
sc               5409 drivers/scsi/mpt3sas/mpt3sas_base.c 	sc = s & MPI2_IOC_STATE_MASK;
sc               5410 drivers/scsi/mpt3sas/mpt3sas_base.c 	return cooked ? sc : s;
sc                480 drivers/scsi/qedf/qedf_io.c 	struct scsi_cmnd *sc = io_req->sc_cmd;
sc                481 drivers/scsi/qedf/qedf_io.c 	struct Scsi_Host *host = sc->device->host;
sc                493 drivers/scsi/qedf/qedf_io.c 	sg_count = dma_map_sg(&qedf->pdev->dev, scsi_sglist(sc),
sc                494 drivers/scsi/qedf/qedf_io.c 	    scsi_sg_count(sc), sc->sc_data_direction);
sc                495 drivers/scsi/qedf/qedf_io.c 	sg = scsi_sglist(sc);
sc                502 drivers/scsi/qedf/qedf_io.c 	scsi_for_each_sg(sc, sg, sg_count, i) {
sc                528 drivers/scsi/qedf/qedf_io.c 	if (byte_count != scsi_bufflen(sc))
sc                531 drivers/scsi/qedf/qedf_io.c 			   scsi_bufflen(sc), io_req->xid);
sc                538 drivers/scsi/qedf/qedf_io.c 	struct scsi_cmnd *sc = io_req->sc_cmd;
sc                542 drivers/scsi/qedf/qedf_io.c 	if (scsi_sg_count(sc)) {
sc               1117 drivers/scsi/qedf/qedf_io.c 	struct scsi_cmnd *sc = io_req->sc_cmd;
sc               1119 drivers/scsi/qedf/qedf_io.c 	if (io_req->bd_tbl->bd_valid && sc && scsi_sg_count(sc)) {
sc               1120 drivers/scsi/qedf/qedf_io.c 		dma_unmap_sg(&qedf->pdev->dev, scsi_sglist(sc),
sc               1121 drivers/scsi/qedf/qedf_io.c 		    scsi_sg_count(sc), sc->sc_data_direction);
sc                 22 drivers/scsi/qedi/qedi_fw.c 	struct scsi_cmnd *sc = cmd->scsi_cmd;
sc                 24 drivers/scsi/qedi/qedi_fw.c 	if (cmd->io_tbl.sge_valid && sc) {
sc                 26 drivers/scsi/qedi/qedi_fw.c 		scsi_dma_unmap(sc);
sc               1377 drivers/scsi/qedi/qedi_fw.c 	if (!ctask || !ctask->sc) {
sc               1507 drivers/scsi/qedi/qedi_fw.c 		if (!ctask || !ctask->sc) {
sc               1840 drivers/scsi/qedi/qedi_fw.c 	struct scsi_cmnd *sc = cmd->scsi_cmd;
sc               1851 drivers/scsi/qedi/qedi_fw.c 	WARN_ON(scsi_sg_count(sc) > QEDI_ISCSI_MAX_BDS_PER_CMD);
sc               1853 drivers/scsi/qedi/qedi_fw.c 	sg_count = dma_map_sg(&qedi->pdev->dev, scsi_sglist(sc),
sc               1854 drivers/scsi/qedi/qedi_fw.c 			      scsi_sg_count(sc), sc->sc_data_direction);
sc               1860 drivers/scsi/qedi/qedi_fw.c 	sg = scsi_sglist(sc);
sc               1876 drivers/scsi/qedi/qedi_fw.c 	scsi_for_each_sg(sc, sg, sg_count, i) {
sc               1920 drivers/scsi/qedi/qedi_fw.c 	if (byte_count != scsi_bufflen(sc))
sc               1923 drivers/scsi/qedi/qedi_fw.c 			 scsi_bufflen(sc));
sc               1928 drivers/scsi/qedi/qedi_fw.c 	WARN_ON(byte_count != scsi_bufflen(sc));
sc               1936 drivers/scsi/qedi/qedi_fw.c 	struct scsi_cmnd *sc = cmd->scsi_cmd;
sc               1938 drivers/scsi/qedi/qedi_fw.c 	if (scsi_sg_count(sc)) {
sc               1953 drivers/scsi/qedi/qedi_fw.c static void qedi_cpy_scsi_cdb(struct scsi_cmnd *sc, u32 *dstp)
sc               1959 drivers/scsi/qedi/qedi_fw.c 	lpcnt = sc->cmd_len / sizeof(dword);
sc               1960 drivers/scsi/qedi/qedi_fw.c 	srcp = (u8 *)sc->cmnd;
sc               1967 drivers/scsi/qedi/qedi_fw.c 	if (sc->cmd_len & 0x3) {
sc               1979 drivers/scsi/qedi/qedi_fw.c 	struct scsi_cmnd *sc_cmd = task->sc;
sc               2036 drivers/scsi/qedi/qedi_fw.c 	struct scsi_cmnd *sc = task->sc;
sc               2061 drivers/scsi/qedi/qedi_fw.c 	int_to_scsilun(sc->device->lun, (struct scsi_lun *)scsi_lun);
sc               2086 drivers/scsi/qedi/qedi_fw.c 		if (sc->sc_data_direction == DMA_TO_DEVICE) {
sc               2106 drivers/scsi/qedi/qedi_fw.c 	qedi_cpy_scsi_cdb(sc, (u32 *)cmd_pdu_header.cdb);
sc               2115 drivers/scsi/qedi/qedi_fw.c 		tx_sgl_task_params.total_buffer_size = scsi_bufflen(sc);
sc               2125 drivers/scsi/qedi/qedi_fw.c 		rx_sgl_task_params.total_buffer_size = scsi_bufflen(sc);
sc               2148 drivers/scsi/qedi/qedi_fw.c 		task_params.tx_io_size = scsi_bufflen(sc);
sc               2150 drivers/scsi/qedi/qedi_fw.c 		task_params.rx_io_size = scsi_bufflen(sc);
sc               2160 drivers/scsi/qedi/qedi_fw.c 		  (u16)cmd->io_tbl.sge_valid, scsi_bufflen(sc),
sc                783 drivers/scsi/qedi/qedi_iscsi.c 	struct scsi_cmnd *sc = task->sc;
sc                793 drivers/scsi/qedi/qedi_iscsi.c 	if (!sc)
sc                796 drivers/scsi/qedi/qedi_iscsi.c 	cmd->scsi_cmd = sc;
sc               1368 drivers/scsi/qedi/qedi_iscsi.c 	if (!task->sc || task->state == ISCSI_TASK_PENDING) {
sc                120 drivers/scsi/qla4xxx/ql4_os.c static enum blk_eh_timer_return qla4xxx_eh_cmd_timed_out(struct scsi_cmnd *sc);
sc               1845 drivers/scsi/qla4xxx/ql4_os.c static enum blk_eh_timer_return qla4xxx_eh_cmd_timed_out(struct scsi_cmnd *sc)
sc               1852 drivers/scsi/qla4xxx/ql4_os.c 	session = starget_to_session(scsi_target(sc->device));
sc               3373 drivers/scsi/qla4xxx/ql4_os.c 	if (task->sc) {
sc               3456 drivers/scsi/qla4xxx/ql4_os.c 	struct scsi_cmnd *sc = task->sc;
sc               3461 drivers/scsi/qla4xxx/ql4_os.c 	if (!sc)
sc                 87 drivers/scsi/snic/snic_io.h 	struct scsi_cmnd *sc;		/* Associated scsi cmd */
sc                 36 drivers/scsi/snic/snic_scsi.c #define snic_cmd_tag(sc)	(((struct scsi_cmnd *) sc)->request->tag)
sc                112 drivers/scsi/snic/snic_scsi.c snic_io_lock_hash(struct snic *snic, struct scsi_cmnd *sc)
sc                114 drivers/scsi/snic/snic_scsi.c 	u32 hash = snic_cmd_tag(sc) & (SNIC_IO_LOCKS - 1);
sc                129 drivers/scsi/snic/snic_scsi.c 		   struct scsi_cmnd *sc)
sc                134 drivers/scsi/snic/snic_scsi.c 	SNIC_BUG_ON(!((CMD_STATE(sc) == SNIC_IOREQ_COMPLETE) ||
sc                135 drivers/scsi/snic/snic_scsi.c 		      (CMD_STATE(sc) == SNIC_IOREQ_ABTS_COMPLETE) ||
sc                136 drivers/scsi/snic/snic_scsi.c 		      (CMD_FLAGS(sc) & SNIC_DEV_RST_NOTSUP) ||
sc                137 drivers/scsi/snic/snic_scsi.c 		      (CMD_FLAGS(sc) & SNIC_IO_INTERNAL_TERM_ISSUED) ||
sc                138 drivers/scsi/snic/snic_scsi.c 		      (CMD_FLAGS(sc) & SNIC_DEV_RST_TERM_ISSUED) ||
sc                139 drivers/scsi/snic/snic_scsi.c 		      (CMD_FLAGS(sc) & SNIC_SCSI_CLEANUP) ||
sc                140 drivers/scsi/snic/snic_scsi.c 		      (CMD_STATE(sc) == SNIC_IOREQ_LR_COMPLETE)));
sc                144 drivers/scsi/snic/snic_scsi.c 		      sc, snic_cmd_tag(sc), rqi, rqi->req, rqi->abort_req,
sc                145 drivers/scsi/snic/snic_scsi.c 		      rqi->dr_req, snic_ioreq_state_to_str(CMD_STATE(sc)),
sc                146 drivers/scsi/snic/snic_scsi.c 		      CMD_FLAGS(sc));
sc                154 drivers/scsi/snic/snic_scsi.c 	scsi_dma_unmap(sc);
sc                165 drivers/scsi/snic/snic_scsi.c 		     struct scsi_cmnd *sc,
sc                180 drivers/scsi/snic/snic_scsi.c 		for_each_sg(scsi_sglist(sc), sg, sg_cnt, i) {
sc                189 drivers/scsi/snic/snic_scsi.c 			    sc->sense_buffer,
sc                195 drivers/scsi/snic/snic_scsi.c 			      sc->sense_buffer, snic_cmd_tag(sc));
sc                201 drivers/scsi/snic/snic_scsi.c 	int_to_scsilun(sc->device->lun, &lun);
sc                202 drivers/scsi/snic/snic_scsi.c 	if (sc->sc_data_direction == DMA_FROM_DEVICE)
sc                204 drivers/scsi/snic/snic_scsi.c 	if (sc->sc_data_direction == DMA_TO_DEVICE)
sc                209 drivers/scsi/snic/snic_scsi.c 			snic_cmd_tag(sc),
sc                215 drivers/scsi/snic/snic_scsi.c 			sc->cmnd,
sc                216 drivers/scsi/snic/snic_scsi.c 			sc->cmd_len,
sc                217 drivers/scsi/snic/snic_scsi.c 			scsi_bufflen(sc),
sc                242 drivers/scsi/snic/snic_scsi.c 		      struct scsi_cmnd *sc)
sc                247 drivers/scsi/snic/snic_scsi.c 	u32 tag = snic_cmd_tag(sc);
sc                252 drivers/scsi/snic/snic_scsi.c 	CMD_STATE(sc) = SNIC_IOREQ_NOT_INITED;
sc                253 drivers/scsi/snic/snic_scsi.c 	CMD_FLAGS(sc) = SNIC_NO_FLAGS;
sc                254 drivers/scsi/snic/snic_scsi.c 	sg_cnt = scsi_dma_map(sc);
sc                256 drivers/scsi/snic/snic_scsi.c 		SNIC_TRC((u16)snic->shost->host_no, tag, (ulong) sc, 0,
sc                257 drivers/scsi/snic/snic_scsi.c 			 sc->cmnd[0], sg_cnt, CMD_STATE(sc));
sc                267 drivers/scsi/snic/snic_scsi.c 		scsi_dma_unmap(sc);
sc                274 drivers/scsi/snic/snic_scsi.c 	rqi->sc = sc;
sc                276 drivers/scsi/snic/snic_scsi.c 	CMD_STATE(sc) = SNIC_IOREQ_PENDING;
sc                277 drivers/scsi/snic/snic_scsi.c 	CMD_SP(sc) = (char *) rqi;
sc                278 drivers/scsi/snic/snic_scsi.c 	cmd_trc = SNIC_TRC_CMD(sc);
sc                279 drivers/scsi/snic/snic_scsi.c 	CMD_FLAGS(sc) |= (SNIC_IO_INITIALIZED | SNIC_IO_ISSUED);
sc                280 drivers/scsi/snic/snic_scsi.c 	cmd_st_flags = SNIC_TRC_CMD_STATE_FLAGS(sc);
sc                281 drivers/scsi/snic/snic_scsi.c 	io_lock = snic_io_lock_hash(snic, sc);
sc                284 drivers/scsi/snic/snic_scsi.c 	ret = snic_queue_icmnd_req(snic, rqi, sc, sg_cnt);
sc                288 drivers/scsi/snic/snic_scsi.c 			      sc, ret);
sc                291 drivers/scsi/snic/snic_scsi.c 		rqi = (struct snic_req_info *) CMD_SP(sc);
sc                292 drivers/scsi/snic/snic_scsi.c 		CMD_SP(sc) = NULL;
sc                293 drivers/scsi/snic/snic_scsi.c 		CMD_STATE(sc) = SNIC_IOREQ_COMPLETE;
sc                294 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) &= ~SNIC_IO_ISSUED; /* turn off the flag */
sc                298 drivers/scsi/snic/snic_scsi.c 			snic_release_req_buf(snic, rqi, sc);
sc                300 drivers/scsi/snic/snic_scsi.c 		SNIC_TRC(snic->shost->host_no, tag, (ulong) sc, 0, 0, 0,
sc                301 drivers/scsi/snic/snic_scsi.c 			 SNIC_TRC_CMD_STATE_FLAGS(sc));
sc                303 drivers/scsi/snic/snic_scsi.c 		u32 io_sz = scsi_bufflen(sc) >> 9;
sc                315 drivers/scsi/snic/snic_scsi.c 			      sc, tag);
sc                317 drivers/scsi/snic/snic_scsi.c 		SNIC_TRC(snic->shost->host_no, tag, (ulong) sc, (ulong) rqi,
sc                333 drivers/scsi/snic/snic_scsi.c snic_queuecommand(struct Scsi_Host *shost, struct scsi_cmnd *sc)
sc                339 drivers/scsi/snic/snic_scsi.c 	tgt = starget_to_tgt(scsi_target(sc->device));
sc                344 drivers/scsi/snic/snic_scsi.c 		sc->result = ret;
sc                345 drivers/scsi/snic/snic_scsi.c 		sc->scsi_done(sc);
sc                359 drivers/scsi/snic/snic_scsi.c 		      sc, snic_cmd_tag(sc), sc->cmnd[0], sc->device->lun);
sc                361 drivers/scsi/snic/snic_scsi.c 	ret = snic_issue_scsi_req(snic, tgt, sc);
sc                378 drivers/scsi/snic/snic_scsi.c 			      struct scsi_cmnd *sc,
sc                381 drivers/scsi/snic/snic_scsi.c 	int state = CMD_STATE(sc);
sc                384 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_ABTS_PENDING;
sc                386 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_DEV_RST_PENDING;
sc                392 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_DONE;
sc                396 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_ABORTED;
sc                411 drivers/scsi/snic/snic_scsi.c 			     struct scsi_cmnd *sc,
sc                429 drivers/scsi/snic/snic_scsi.c 		scsi_set_resid(sc, le32_to_cpu(icmnd_cmpl->resid));
sc                475 drivers/scsi/snic/snic_scsi.c 		      snic_io_status_to_str(cmpl_stat), CMD_FLAGS(sc));
sc                478 drivers/scsi/snic/snic_scsi.c 	sc->result = (res << 16) | icmnd_cmpl->scsi_status;
sc                485 drivers/scsi/snic/snic_scsi.c snic_tmreq_pending(struct scsi_cmnd *sc)
sc                487 drivers/scsi/snic/snic_scsi.c 	int state = CMD_STATE(sc);
sc                501 drivers/scsi/snic/snic_scsi.c 			       struct scsi_cmnd *sc)
sc                508 drivers/scsi/snic/snic_scsi.c 	CMD_STATE(sc) = SNIC_IOREQ_COMPLETE;
sc                511 drivers/scsi/snic/snic_scsi.c 		sc->result = (DID_OK << 16) | scsi_stat;
sc                513 drivers/scsi/snic/snic_scsi.c 		xfer_len = scsi_bufflen(sc);
sc                516 drivers/scsi/snic/snic_scsi.c 		scsi_set_resid(sc, le32_to_cpu(icmnd_cmpl->resid));
sc                528 drivers/scsi/snic/snic_scsi.c 		snic_process_io_failed_state(snic, icmnd_cmpl, sc, cmpl_stat);
sc                532 drivers/scsi/snic/snic_scsi.c 			      snic_io_status_to_str(cmpl_stat), CMD_FLAGS(sc));
sc                550 drivers/scsi/snic/snic_scsi.c 	struct scsi_cmnd *sc = NULL;
sc                573 drivers/scsi/snic/snic_scsi.c 	sc = scsi_host_find_tag(snic->shost, cmnd_id);
sc                574 drivers/scsi/snic/snic_scsi.c 	WARN_ON_ONCE(!sc);
sc                576 drivers/scsi/snic/snic_scsi.c 	if (!sc) {
sc                592 drivers/scsi/snic/snic_scsi.c 	io_lock = snic_io_lock_hash(snic, sc);
sc                595 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc                598 drivers/scsi/snic/snic_scsi.c 		      sc->device->lun, sc, sc->cmnd[0], snic_cmd_tag(sc),
sc                599 drivers/scsi/snic/snic_scsi.c 		      CMD_FLAGS(sc), rqi);
sc                601 drivers/scsi/snic/snic_scsi.c 	if (CMD_FLAGS(sc) & SNIC_HOST_RESET_CMD_TERM) {
sc                611 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_REQ_NULL;
sc                617 drivers/scsi/snic/snic_scsi.c 			      cmnd_id, sc, CMD_FLAGS(sc));
sc                631 drivers/scsi/snic/snic_scsi.c 	if (unlikely(snic_tmreq_pending(sc))) {
sc                632 drivers/scsi/snic/snic_scsi.c 		snic_proc_tmreq_pending_state(snic, sc, hdr_stat);
sc                643 drivers/scsi/snic/snic_scsi.c 			      snic_ioreq_state_to_str(CMD_STATE(sc)),
sc                645 drivers/scsi/snic/snic_scsi.c 			      sc, sc_stat, le32_to_cpu(icmnd_cmpl->resid),
sc                646 drivers/scsi/snic/snic_scsi.c 			      CMD_FLAGS(sc));
sc                648 drivers/scsi/snic/snic_scsi.c 		SNIC_TRC(snic->shost->host_no, cmnd_id, (ulong) sc,
sc                650 drivers/scsi/snic/snic_scsi.c 			 SNIC_TRC_CMD(sc), SNIC_TRC_CMD_STATE_FLAGS(sc));
sc                655 drivers/scsi/snic/snic_scsi.c 	if (snic_process_icmnd_cmpl_status(snic, icmnd_cmpl, hdr_stat, sc)) {
sc                656 drivers/scsi/snic/snic_scsi.c 		scsi_print_command(sc);
sc                659 drivers/scsi/snic/snic_scsi.c 			      sc, sc->cmnd[0], cmnd_id,
sc                660 drivers/scsi/snic/snic_scsi.c 			      snic_io_status_to_str(hdr_stat), CMD_FLAGS(sc));
sc                664 drivers/scsi/snic/snic_scsi.c 	CMD_SP(sc) = NULL;
sc                665 drivers/scsi/snic/snic_scsi.c 	CMD_FLAGS(sc) |= SNIC_IO_DONE;
sc                672 drivers/scsi/snic/snic_scsi.c 	snic_release_req_buf(snic, rqi, sc);
sc                674 drivers/scsi/snic/snic_scsi.c 	SNIC_TRC(snic->shost->host_no, cmnd_id, (ulong) sc,
sc                676 drivers/scsi/snic/snic_scsi.c 		 SNIC_TRC_CMD(sc), SNIC_TRC_CMD_STATE_FLAGS(sc));
sc                679 drivers/scsi/snic/snic_scsi.c 	if (sc->scsi_done)
sc                680 drivers/scsi/snic/snic_scsi.c 		sc->scsi_done(sc);
sc                690 drivers/scsi/snic/snic_scsi.c 			 struct scsi_cmnd *sc)
sc                692 drivers/scsi/snic/snic_scsi.c 	struct snic_req_info *rqi = (struct snic_req_info *) CMD_SP(sc);
sc                695 drivers/scsi/snic/snic_scsi.c 	CMD_LR_STATUS(sc) = cmpl_stat;
sc                698 drivers/scsi/snic/snic_scsi.c 		      snic_ioreq_state_to_str(CMD_STATE(sc)));
sc                700 drivers/scsi/snic/snic_scsi.c 	if (CMD_STATE(sc) == SNIC_IOREQ_ABTS_PENDING) {
sc                701 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_DEV_RST_ABTS_PENDING;
sc                703 drivers/scsi/snic/snic_scsi.c 		SNIC_TRC(snic->shost->host_no, cmnd_id, (ulong) sc,
sc                705 drivers/scsi/snic/snic_scsi.c 			 (ulong) fwreq, 0, SNIC_TRC_CMD_STATE_FLAGS(sc));
sc                711 drivers/scsi/snic/snic_scsi.c 			      CMD_FLAGS(sc));
sc                717 drivers/scsi/snic/snic_scsi.c 	if (CMD_FLAGS(sc) & SNIC_DEV_RST_TIMEDOUT) {
sc                718 drivers/scsi/snic/snic_scsi.c 		SNIC_TRC(snic->shost->host_no, cmnd_id, (ulong) sc,
sc                720 drivers/scsi/snic/snic_scsi.c 			 (ulong) fwreq, 0, SNIC_TRC_CMD_STATE_FLAGS(sc));
sc                726 drivers/scsi/snic/snic_scsi.c 			      CMD_FLAGS(sc));
sc                731 drivers/scsi/snic/snic_scsi.c 	CMD_STATE(sc) = SNIC_IOREQ_LR_COMPLETE;
sc                732 drivers/scsi/snic/snic_scsi.c 	CMD_FLAGS(sc) |= SNIC_DEV_RST_DONE;
sc                738 drivers/scsi/snic/snic_scsi.c 		      CMD_FLAGS(sc));
sc                777 drivers/scsi/snic/snic_scsi.c 		       struct scsi_cmnd *sc)
sc                786 drivers/scsi/snic/snic_scsi.c 	io_lock = snic_io_lock_hash(snic, sc);
sc                788 drivers/scsi/snic/snic_scsi.c 	if (CMD_FLAGS(sc) & SNIC_HOST_RESET_CMD_TERM) {
sc                793 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc                799 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_ABTS_TERM_REQ_NULL;
sc                802 drivers/scsi/snic/snic_scsi.c 			      snic_io_status_to_str(cmpl_stat), cmnd_id, sc,
sc                803 drivers/scsi/snic/snic_scsi.c 			      CMD_FLAGS(sc));
sc                819 drivers/scsi/snic/snic_scsi.c 		if (CMD_STATE(sc) != SNIC_IOREQ_ABTS_PENDING) {
sc                826 drivers/scsi/snic/snic_scsi.c 		CMD_STATE(sc) = SNIC_IOREQ_ABTS_COMPLETE;
sc                827 drivers/scsi/snic/snic_scsi.c 		CMD_ABTS_STATUS(sc) = cmpl_stat;
sc                828 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_ABTS_TERM_DONE;
sc                834 drivers/scsi/snic/snic_scsi.c 			      CMD_FLAGS(sc));
sc                848 drivers/scsi/snic/snic_scsi.c 		CMD_SP(sc) = NULL;
sc                849 drivers/scsi/snic/snic_scsi.c 		sc->result = (DID_ERROR << 16);
sc                852 drivers/scsi/snic/snic_scsi.c 			      sc, CMD_FLAGS(sc));
sc                856 drivers/scsi/snic/snic_scsi.c 		snic_release_req_buf(snic, rqi, sc);
sc                858 drivers/scsi/snic/snic_scsi.c 		if (sc->scsi_done) {
sc                859 drivers/scsi/snic/snic_scsi.c 			SNIC_TRC(snic->shost->host_no, cmnd_id, (ulong) sc,
sc                861 drivers/scsi/snic/snic_scsi.c 				 (ulong) fwreq, SNIC_TRC_CMD(sc),
sc                862 drivers/scsi/snic/snic_scsi.c 				 SNIC_TRC_CMD_STATE_FLAGS(sc));
sc                864 drivers/scsi/snic/snic_scsi.c 			sc->scsi_done(sc);
sc                871 drivers/scsi/snic/snic_scsi.c 		snic_proc_dr_cmpl_locked(snic, fwreq, cmpl_stat, cmnd_id, sc);
sc                880 drivers/scsi/snic/snic_scsi.c 		CMD_STATE(sc) = SNIC_IOREQ_ABTS_COMPLETE;
sc                881 drivers/scsi/snic/snic_scsi.c 		CMD_ABTS_STATUS(sc) = cmpl_stat;
sc                882 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_DEV_RST_DONE;
sc                887 drivers/scsi/snic/snic_scsi.c 			      CMD_FLAGS(sc));
sc                903 drivers/scsi/snic/snic_scsi.c 			      snic_ioreq_state_to_str(CMD_STATE(sc)),
sc                905 drivers/scsi/snic/snic_scsi.c 			      CMD_FLAGS(sc));
sc                922 drivers/scsi/snic/snic_scsi.c 	struct scsi_cmnd  *sc = NULL;
sc                944 drivers/scsi/snic/snic_scsi.c 		sc = rqi->sc;
sc                958 drivers/scsi/snic/snic_scsi.c 	sc = scsi_host_find_tag(snic->shost, cmnd_id & SNIC_TAG_MASK);
sc                959 drivers/scsi/snic/snic_scsi.c 	WARN_ON_ONCE(!sc);
sc                962 drivers/scsi/snic/snic_scsi.c 	if (!sc) {
sc                971 drivers/scsi/snic/snic_scsi.c 	snic_process_itmf_cmpl(snic, fwreq, cmnd_id, hdr_stat, sc);
sc                977 drivers/scsi/snic/snic_scsi.c snic_hba_reset_scsi_cleanup(struct snic *snic, struct scsi_cmnd *sc)
sc                983 drivers/scsi/snic/snic_scsi.c 	snic_scsi_cleanup(snic, snic_cmd_tag(sc));
sc               1009 drivers/scsi/snic/snic_scsi.c 	struct scsi_cmnd *sc = NULL;
sc               1030 drivers/scsi/snic/snic_scsi.c 		sc = rqi->sc;
sc               1044 drivers/scsi/snic/snic_scsi.c 	sc = scsi_host_find_tag(snic->shost, cmnd_id);
sc               1046 drivers/scsi/snic/snic_scsi.c 	if (!sc) {
sc               1058 drivers/scsi/snic/snic_scsi.c 		       sc, rqi, cmnd_id, CMD_FLAGS(sc));
sc               1060 drivers/scsi/snic/snic_scsi.c 	io_lock = snic_io_lock_hash(snic, sc);
sc               1072 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc               1078 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_ABTS_TERM_REQ_NULL;
sc               1081 drivers/scsi/snic/snic_scsi.c 			      snic_io_status_to_str(hdr_stat), cmnd_id, sc,
sc               1082 drivers/scsi/snic/snic_scsi.c 			      CMD_FLAGS(sc));
sc               1092 drivers/scsi/snic/snic_scsi.c 	snic_hba_reset_scsi_cleanup(snic, sc);
sc               1309 drivers/scsi/snic/snic_scsi.c 		    struct scsi_cmnd *sc,
sc               1315 drivers/scsi/snic/snic_scsi.c 	int tm_tag = snic_cmd_tag(sc) | rqi->tm_tag;
sc               1322 drivers/scsi/snic/snic_scsi.c 	int_to_scsilun(sc->device->lun, &lun);
sc               1346 drivers/scsi/snic/snic_scsi.c 			      tmf, sc, rqi, req_id, snic_cmd_tag(sc), ret);
sc               1350 drivers/scsi/snic/snic_scsi.c 			      tmf, sc, rqi, req_id, snic_cmd_tag(sc));
sc               1358 drivers/scsi/snic/snic_scsi.c 		    struct scsi_cmnd *sc,
sc               1362 drivers/scsi/snic/snic_scsi.c 	int req_id = 0, tag = snic_cmd_tag(sc);
sc               1389 drivers/scsi/snic/snic_scsi.c 	ret = snic_queue_itmf_req(snic, tmreq, sc, tmf, req_id);
sc               1399 drivers/scsi/snic/snic_scsi.c 			      tmf, sc, rqi, req_id, tag, ret);
sc               1403 drivers/scsi/snic/snic_scsi.c 			      tmf, sc, rqi, req_id, tag);
sc               1417 drivers/scsi/snic/snic_scsi.c 		     struct scsi_cmnd *sc,
sc               1421 drivers/scsi/snic/snic_scsi.c 		      sc, rqi, snic_cmd_tag(sc), tmf);
sc               1426 drivers/scsi/snic/snic_scsi.c 	return snic_issue_tm_req(snic, rqi, sc, tmf);
sc               1433 drivers/scsi/snic/snic_scsi.c snic_abort_finish(struct snic *snic, struct scsi_cmnd *sc)
sc               1438 drivers/scsi/snic/snic_scsi.c 	int ret = 0, tag = snic_cmd_tag(sc);
sc               1440 drivers/scsi/snic/snic_scsi.c 	io_lock = snic_io_lock_hash(snic, sc);
sc               1442 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc               1445 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_ABTS_TERM_REQ_NULL;
sc               1449 drivers/scsi/snic/snic_scsi.c 			      tag, sc, CMD_FLAGS(sc));
sc               1460 drivers/scsi/snic/snic_scsi.c 	switch (CMD_ABTS_STATUS(sc)) {
sc               1463 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_ABTS_TIMEDOUT;
sc               1467 drivers/scsi/snic/snic_scsi.c 			      sc, snic_cmd_tag(sc), CMD_FLAGS(sc));
sc               1481 drivers/scsi/snic/snic_scsi.c 		sc->result = (DID_ERROR << 16);
sc               1482 drivers/scsi/snic/snic_scsi.c 		sc->scsi_done(sc);
sc               1492 drivers/scsi/snic/snic_scsi.c 	CMD_SP(sc) = NULL;
sc               1495 drivers/scsi/snic/snic_scsi.c 		       tag, snic_io_status_to_str(CMD_ABTS_STATUS(sc)),
sc               1496 drivers/scsi/snic/snic_scsi.c 		       CMD_FLAGS(sc));
sc               1501 drivers/scsi/snic/snic_scsi.c 		snic_release_req_buf(snic, rqi, sc);
sc               1510 drivers/scsi/snic/snic_scsi.c snic_send_abort_and_wait(struct snic *snic, struct scsi_cmnd *sc)
sc               1518 drivers/scsi/snic/snic_scsi.c 	int ret = 0, tmf = 0, tag = snic_cmd_tag(sc);
sc               1520 drivers/scsi/snic/snic_scsi.c 	tgt = starget_to_tgt(scsi_target(sc->device));
sc               1528 drivers/scsi/snic/snic_scsi.c 	io_lock = snic_io_lock_hash(snic, sc);
sc               1543 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc               1549 drivers/scsi/snic/snic_scsi.c 			      tag, CMD_FLAGS(sc));
sc               1557 drivers/scsi/snic/snic_scsi.c 	if (CMD_STATE(sc) == SNIC_IOREQ_ABTS_PENDING) {
sc               1566 drivers/scsi/snic/snic_scsi.c 	sv_state = CMD_STATE(sc);
sc               1574 drivers/scsi/snic/snic_scsi.c 	CMD_STATE(sc) = SNIC_IOREQ_ABTS_PENDING;
sc               1575 drivers/scsi/snic/snic_scsi.c 	CMD_ABTS_STATUS(sc) = SNIC_INVALID_CODE;
sc               1582 drivers/scsi/snic/snic_scsi.c 	ret = snic_queue_abort_req(snic, rqi, sc, tmf);
sc               1587 drivers/scsi/snic/snic_scsi.c 			      tag, ret, CMD_FLAGS(sc));
sc               1591 drivers/scsi/snic/snic_scsi.c 		CMD_STATE(sc) = sv_state;
sc               1592 drivers/scsi/snic/snic_scsi.c 		rqi = (struct snic_req_info *) CMD_SP(sc);
sc               1603 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_ABTS_ISSUED;
sc               1607 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_TERM_ISSUED;
sc               1613 drivers/scsi/snic/snic_scsi.c 		      sc, tag, CMD_FLAGS(sc));
sc               1636 drivers/scsi/snic/snic_scsi.c snic_abort_cmd(struct scsi_cmnd *sc)
sc               1638 drivers/scsi/snic/snic_scsi.c 	struct snic *snic = shost_priv(sc->device->host);
sc               1639 drivers/scsi/snic/snic_scsi.c 	int ret = SUCCESS, tag = snic_cmd_tag(sc);
sc               1643 drivers/scsi/snic/snic_scsi.c 		       sc, sc->cmnd[0], sc->request, tag);
sc               1655 drivers/scsi/snic/snic_scsi.c 	ret = snic_send_abort_and_wait(snic, sc);
sc               1659 drivers/scsi/snic/snic_scsi.c 	ret = snic_abort_finish(snic, sc);
sc               1662 drivers/scsi/snic/snic_scsi.c 	SNIC_TRC(snic->shost->host_no, tag, (ulong) sc,
sc               1664 drivers/scsi/snic/snic_scsi.c 		 SNIC_TRC_CMD(sc), SNIC_TRC_CMD_STATE_FLAGS(sc));
sc               1680 drivers/scsi/snic/snic_scsi.c 	struct scsi_cmnd *sc = NULL;
sc               1694 drivers/scsi/snic/snic_scsi.c 		sc = scsi_host_find_tag(snic->shost, tag);
sc               1696 drivers/scsi/snic/snic_scsi.c 		if (!sc || (lr_sc && (sc->device != lr_sdev || sc == lr_sc))) {
sc               1702 drivers/scsi/snic/snic_scsi.c 		rqi = (struct snic_req_info *) CMD_SP(sc);
sc               1714 drivers/scsi/snic/snic_scsi.c 			      snic_ioreq_state_to_str(CMD_STATE(sc)));
sc               1716 drivers/scsi/snic/snic_scsi.c 		if (CMD_STATE(sc) == SNIC_IOREQ_ABTS_PENDING) {
sc               1735 drivers/scsi/snic/snic_scsi.c 	struct scsi_cmnd *sc = NULL;
sc               1744 drivers/scsi/snic/snic_scsi.c 	sc = scsi_host_find_tag(snic->shost, tag);
sc               1747 drivers/scsi/snic/snic_scsi.c 	if (!sc || sc->device != lr_sdev)
sc               1750 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc               1756 drivers/scsi/snic/snic_scsi.c 	if (CMD_STATE(sc) == SNIC_IOREQ_ABTS_PENDING)
sc               1760 drivers/scsi/snic/snic_scsi.c 	if ((CMD_FLAGS(sc) & SNIC_DEVICE_RESET) &&
sc               1761 drivers/scsi/snic/snic_scsi.c 			(!(CMD_FLAGS(sc) & SNIC_DEV_RST_ISSUED))) {
sc               1765 drivers/scsi/snic/snic_scsi.c 			      sc);
sc               1772 drivers/scsi/snic/snic_scsi.c 		snic_ioreq_state_to_str(CMD_STATE(sc)));
sc               1775 drivers/scsi/snic/snic_scsi.c 	sv_state = CMD_STATE(sc);
sc               1785 drivers/scsi/snic/snic_scsi.c 	CMD_STATE(sc) = SNIC_IOREQ_ABTS_PENDING;
sc               1788 drivers/scsi/snic/snic_scsi.c 	if (CMD_FLAGS(sc) & SNIC_DEVICE_RESET) {
sc               1792 drivers/scsi/snic/snic_scsi.c 			      "clean_single_req:devrst sc 0x%p\n", sc);
sc               1795 drivers/scsi/snic/snic_scsi.c 	CMD_ABTS_STATUS(sc) = SNIC_INVALID_CODE;
sc               1799 drivers/scsi/snic/snic_scsi.c 	tgt = starget_to_tgt(scsi_target(sc->device));
sc               1806 drivers/scsi/snic/snic_scsi.c 	ret = snic_queue_abort_req(snic, rqi, sc, tmf);
sc               1810 drivers/scsi/snic/snic_scsi.c 			      sc, tag, rqi->tm_tag, CMD_FLAGS(sc));
sc               1813 drivers/scsi/snic/snic_scsi.c 		rqi = (struct snic_req_info *) CMD_SP(sc);
sc               1818 drivers/scsi/snic/snic_scsi.c 		if (CMD_STATE(sc) == SNIC_IOREQ_ABTS_PENDING)
sc               1819 drivers/scsi/snic/snic_scsi.c 			CMD_STATE(sc) = sv_state;
sc               1826 drivers/scsi/snic/snic_scsi.c 	if (CMD_FLAGS(sc) & SNIC_DEVICE_RESET)
sc               1827 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_DEV_RST_TERM_ISSUED;
sc               1829 drivers/scsi/snic/snic_scsi.c 	CMD_FLAGS(sc) |= SNIC_IO_INTERNAL_TERM_ISSUED;
sc               1836 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc               1838 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_ABTS_TERM_REQ_NULL;
sc               1844 drivers/scsi/snic/snic_scsi.c 	if (CMD_ABTS_STATUS(sc) == SNIC_INVALID_CODE) {
sc               1847 drivers/scsi/snic/snic_scsi.c 			      sc, tag, rqi->tm_tag, CMD_FLAGS(sc));
sc               1849 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_ABTS_TERM_DONE;
sc               1855 drivers/scsi/snic/snic_scsi.c 	CMD_STATE(sc) = SNIC_IOREQ_ABTS_COMPLETE;
sc               1856 drivers/scsi/snic/snic_scsi.c 	CMD_SP(sc) = NULL;
sc               1859 drivers/scsi/snic/snic_scsi.c 	snic_release_req_buf(snic, rqi, sc);
sc               1861 drivers/scsi/snic/snic_scsi.c 	sc->result = (DID_ERROR << 16);
sc               1862 drivers/scsi/snic/snic_scsi.c 	sc->scsi_done(sc);
sc               1921 drivers/scsi/snic/snic_scsi.c snic_dr_finish(struct snic *snic, struct scsi_cmnd *sc)
sc               1929 drivers/scsi/snic/snic_scsi.c 	io_lock = snic_io_lock_hash(snic, sc);
sc               1931 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc               1936 drivers/scsi/snic/snic_scsi.c 			      snic_cmd_tag(sc), sc, CMD_FLAGS(sc));
sc               1944 drivers/scsi/snic/snic_scsi.c 	lr_res = CMD_LR_STATUS(sc);
sc               1951 drivers/scsi/snic/snic_scsi.c 			      snic_cmd_tag(sc), CMD_FLAGS(sc));
sc               1953 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_DEV_RST_TIMEDOUT;
sc               1961 drivers/scsi/snic/snic_scsi.c 			      snic_cmd_tag(sc));
sc               1968 drivers/scsi/snic/snic_scsi.c 			      snic_cmd_tag(sc),
sc               1969 drivers/scsi/snic/snic_scsi.c 			      snic_io_status_to_str(lr_res), CMD_FLAGS(sc));
sc               1983 drivers/scsi/snic/snic_scsi.c 	ret = snic_dr_clean_pending_req(snic, sc);
sc               1988 drivers/scsi/snic/snic_scsi.c 			      snic_cmd_tag(sc));
sc               1989 drivers/scsi/snic/snic_scsi.c 		rqi = (struct snic_req_info *) CMD_SP(sc);
sc               1995 drivers/scsi/snic/snic_scsi.c 		rqi = (struct snic_req_info *) CMD_SP(sc);
sc               2005 drivers/scsi/snic/snic_scsi.c 		CMD_SP(sc) = NULL;
sc               2009 drivers/scsi/snic/snic_scsi.c 		snic_release_req_buf(snic, rqi, sc);
sc               2018 drivers/scsi/snic/snic_scsi.c 		  struct scsi_cmnd *sc)
sc               2023 drivers/scsi/snic/snic_scsi.c 	return snic_issue_tm_req(snic, rqi, sc, SNIC_ITMF_LUN_RESET);
sc               2027 drivers/scsi/snic/snic_scsi.c snic_send_dr_and_wait(struct snic *snic, struct scsi_cmnd *sc)
sc               2034 drivers/scsi/snic/snic_scsi.c 	int ret = FAILED, tag = snic_cmd_tag(sc);
sc               2036 drivers/scsi/snic/snic_scsi.c 	io_lock = snic_io_lock_hash(snic, sc);
sc               2038 drivers/scsi/snic/snic_scsi.c 	CMD_FLAGS(sc) |= SNIC_DEVICE_RESET;
sc               2039 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc               2043 drivers/scsi/snic/snic_scsi.c 			      tag, CMD_FLAGS(sc));
sc               2051 drivers/scsi/snic/snic_scsi.c 	sv_state = CMD_STATE(sc);
sc               2053 drivers/scsi/snic/snic_scsi.c 	CMD_STATE(sc) = SNIC_IOREQ_LR_PENDING;
sc               2054 drivers/scsi/snic/snic_scsi.c 	CMD_LR_STATUS(sc) = SNIC_INVALID_CODE;
sc               2069 drivers/scsi/snic/snic_scsi.c 	ret = snic_queue_dr_req(snic, rqi, sc);
sc               2073 drivers/scsi/snic/snic_scsi.c 			      tag, ret, CMD_FLAGS(sc));
sc               2077 drivers/scsi/snic/snic_scsi.c 		CMD_STATE(sc) = sv_state;
sc               2078 drivers/scsi/snic/snic_scsi.c 		rqi = (struct snic_req_info *) CMD_SP(sc);
sc               2089 drivers/scsi/snic/snic_scsi.c 	CMD_FLAGS(sc) |= SNIC_DEV_RST_ISSUED;
sc               2116 drivers/scsi/snic/snic_scsi.c snic_unlink_and_release_req(struct snic *snic, struct scsi_cmnd *sc, int flag)
sc               2123 drivers/scsi/snic/snic_scsi.c 	io_lock = snic_io_lock_hash(snic, sc);
sc               2125 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc               2128 drivers/scsi/snic/snic_scsi.c 		CMD_SP(sc) = NULL;
sc               2131 drivers/scsi/snic/snic_scsi.c 	CMD_FLAGS(sc) |= flag;
sc               2135 drivers/scsi/snic/snic_scsi.c 		snic_release_req_buf(snic, rqi, sc);
sc               2137 drivers/scsi/snic/snic_scsi.c 	SNIC_TRC(snic->shost->host_no, snic_cmd_tag(sc), (ulong) sc,
sc               2139 drivers/scsi/snic/snic_scsi.c 		 SNIC_TRC_CMD(sc), SNIC_TRC_CMD_STATE_FLAGS(sc));
sc               2148 drivers/scsi/snic/snic_scsi.c snic_device_reset(struct scsi_cmnd *sc)
sc               2150 drivers/scsi/snic/snic_scsi.c 	struct Scsi_Host *shost = sc->device->host;
sc               2153 drivers/scsi/snic/snic_scsi.c 	int tag = snic_cmd_tag(sc);
sc               2159 drivers/scsi/snic/snic_scsi.c 		      sc, sc->cmnd[0], sc->request,
sc               2160 drivers/scsi/snic/snic_scsi.c 		      snic_cmd_tag(sc));
sc               2161 drivers/scsi/snic/snic_scsi.c 	dr_supp = snic_dev_reset_supported(sc->device);
sc               2165 drivers/scsi/snic/snic_scsi.c 		snic_unlink_and_release_req(snic, sc, SNIC_DEV_RST_NOTSUP);
sc               2171 drivers/scsi/snic/snic_scsi.c 		snic_unlink_and_release_req(snic, sc, 0);
sc               2186 drivers/scsi/snic/snic_scsi.c 		memset(scsi_cmd_priv(sc), 0,
sc               2188 drivers/scsi/snic/snic_scsi.c 		CMD_SP(sc) = (char *)rqi;
sc               2189 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) = SNIC_NO_FLAGS;
sc               2193 drivers/scsi/snic/snic_scsi.c 		rqi->sc = sc;
sc               2196 drivers/scsi/snic/snic_scsi.c 	ret = snic_send_dr_and_wait(snic, sc);
sc               2202 drivers/scsi/snic/snic_scsi.c 		snic_unlink_and_release_req(snic, sc, 0);
sc               2207 drivers/scsi/snic/snic_scsi.c 	ret = snic_dr_finish(snic, sc);
sc               2210 drivers/scsi/snic/snic_scsi.c 	SNIC_TRC(snic->shost->host_no, tag, (ulong) sc,
sc               2212 drivers/scsi/snic/snic_scsi.c 		 0, SNIC_TRC_CMD(sc), SNIC_TRC_CMD_STATE_FLAGS(sc));
sc               2232 drivers/scsi/snic/snic_scsi.c snic_issue_hba_reset(struct snic *snic, struct scsi_cmnd *sc)
sc               2248 drivers/scsi/snic/snic_scsi.c 	if (snic_cmd_tag(sc) == SCSI_NO_TAG) {
sc               2249 drivers/scsi/snic/snic_scsi.c 		memset(scsi_cmd_priv(sc), 0,
sc               2252 drivers/scsi/snic/snic_scsi.c 		rqi->sc = sc;
sc               2257 drivers/scsi/snic/snic_scsi.c 	io_lock = snic_io_lock_hash(snic, sc);
sc               2259 drivers/scsi/snic/snic_scsi.c 	SNIC_BUG_ON(CMD_SP(sc) != NULL);
sc               2260 drivers/scsi/snic/snic_scsi.c 	CMD_STATE(sc) = SNIC_IOREQ_PENDING;
sc               2261 drivers/scsi/snic/snic_scsi.c 	CMD_SP(sc) = (char *) rqi;
sc               2262 drivers/scsi/snic/snic_scsi.c 	CMD_FLAGS(sc) |= SNIC_IO_INITIALIZED;
sc               2267 drivers/scsi/snic/snic_scsi.c 	snic_io_hdr_enc(&req->hdr, SNIC_REQ_HBA_RESET, 0, snic_cmd_tag(sc),
sc               2282 drivers/scsi/snic/snic_scsi.c 	CMD_FLAGS(sc) |= SNIC_HOST_RESET_ISSUED;
sc               2299 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc               2300 drivers/scsi/snic/snic_scsi.c 	CMD_SP(sc) = NULL;
sc               2313 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc               2314 drivers/scsi/snic/snic_scsi.c 	CMD_SP(sc) = NULL;
sc               2329 drivers/scsi/snic/snic_scsi.c snic_reset(struct Scsi_Host *shost, struct scsi_cmnd *sc)
sc               2358 drivers/scsi/snic/snic_scsi.c 	ret = snic_issue_hba_reset(snic, sc);
sc               2386 drivers/scsi/snic/snic_scsi.c snic_host_reset(struct scsi_cmnd *sc)
sc               2388 drivers/scsi/snic/snic_scsi.c 	struct Scsi_Host *shost = sc->device->host;
sc               2394 drivers/scsi/snic/snic_scsi.c 		      sc, sc->cmnd[0], sc->request,
sc               2395 drivers/scsi/snic/snic_scsi.c 		      snic_cmd_tag(sc), CMD_FLAGS(sc));
sc               2397 drivers/scsi/snic/snic_scsi.c 	ret = snic_reset(shost, sc);
sc               2399 drivers/scsi/snic/snic_scsi.c 	SNIC_TRC(shost->host_no, snic_cmd_tag(sc), (ulong) sc,
sc               2401 drivers/scsi/snic/snic_scsi.c 		 0, SNIC_TRC_CMD(sc), SNIC_TRC_CMD_STATE_FLAGS(sc));
sc               2410 drivers/scsi/snic/snic_scsi.c snic_cmpl_pending_tmreq(struct snic *snic, struct scsi_cmnd *sc)
sc               2416 drivers/scsi/snic/snic_scsi.c 		      sc, snic_io_status_to_str(CMD_STATE(sc)), CMD_FLAGS(sc));
sc               2423 drivers/scsi/snic/snic_scsi.c 	CMD_ABTS_STATUS(sc) = SNIC_STAT_IO_SUCCESS;
sc               2425 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc               2442 drivers/scsi/snic/snic_scsi.c 	struct scsi_cmnd *sc = NULL;
sc               2457 drivers/scsi/snic/snic_scsi.c 		sc = scsi_host_find_tag(snic->shost, tag);
sc               2458 drivers/scsi/snic/snic_scsi.c 		if (!sc) {
sc               2464 drivers/scsi/snic/snic_scsi.c 		if (unlikely(snic_tmreq_pending(sc))) {
sc               2469 drivers/scsi/snic/snic_scsi.c 			snic_cmpl_pending_tmreq(snic, sc);
sc               2475 drivers/scsi/snic/snic_scsi.c 		rqi = (struct snic_req_info *) CMD_SP(sc);
sc               2484 drivers/scsi/snic/snic_scsi.c 			      sc, rqi, tag, CMD_FLAGS(sc));
sc               2486 drivers/scsi/snic/snic_scsi.c 		CMD_SP(sc) = NULL;
sc               2487 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_SCSI_CLEANUP;
sc               2493 drivers/scsi/snic/snic_scsi.c 			       rqi, CMD_FLAGS(sc));
sc               2495 drivers/scsi/snic/snic_scsi.c 		snic_release_req_buf(snic, rqi, sc);
sc               2498 drivers/scsi/snic/snic_scsi.c 		sc->result = DID_TRANSPORT_DISRUPTED << 16;
sc               2501 drivers/scsi/snic/snic_scsi.c 			       sc, sc->request->tag, CMD_FLAGS(sc), rqi,
sc               2507 drivers/scsi/snic/snic_scsi.c 		if (sc->scsi_done) {
sc               2508 drivers/scsi/snic/snic_scsi.c 			SNIC_TRC(snic->shost->host_no, tag, (ulong) sc,
sc               2510 drivers/scsi/snic/snic_scsi.c 				 SNIC_TRC_CMD(sc),
sc               2511 drivers/scsi/snic/snic_scsi.c 				 SNIC_TRC_CMD_STATE_FLAGS(sc));
sc               2513 drivers/scsi/snic/snic_scsi.c 			sc->scsi_done(sc);
sc               2531 drivers/scsi/snic/snic_scsi.c snic_internal_abort_io(struct snic *snic, struct scsi_cmnd *sc, int tmf)
sc               2539 drivers/scsi/snic/snic_scsi.c 	io_lock = snic_io_lock_hash(snic, sc);
sc               2541 drivers/scsi/snic/snic_scsi.c 	rqi = (struct snic_req_info *) CMD_SP(sc);
sc               2545 drivers/scsi/snic/snic_scsi.c 	if (CMD_STATE(sc) == SNIC_IOREQ_ABTS_PENDING)
sc               2548 drivers/scsi/snic/snic_scsi.c 	if ((CMD_FLAGS(sc) & SNIC_DEVICE_RESET) &&
sc               2549 drivers/scsi/snic/snic_scsi.c 		(!(CMD_FLAGS(sc) & SNIC_DEV_RST_ISSUED))) {
sc               2553 drivers/scsi/snic/snic_scsi.c 			      sc);
sc               2559 drivers/scsi/snic/snic_scsi.c 	if (!(CMD_FLAGS(sc) & SNIC_IO_ISSUED)) {
sc               2562 drivers/scsi/snic/snic_scsi.c 			sc, snic_cmd_tag(sc), CMD_FLAGS(sc), CMD_STATE(sc));
sc               2567 drivers/scsi/snic/snic_scsi.c 	sv_state = CMD_STATE(sc);
sc               2568 drivers/scsi/snic/snic_scsi.c 	CMD_STATE(sc) = SNIC_IOREQ_ABTS_PENDING;
sc               2569 drivers/scsi/snic/snic_scsi.c 	CMD_ABTS_STATUS(sc) = SNIC_INVALID_CODE;
sc               2570 drivers/scsi/snic/snic_scsi.c 	CMD_FLAGS(sc) |= SNIC_IO_INTERNAL_TERM_PENDING;
sc               2572 drivers/scsi/snic/snic_scsi.c 	if (CMD_FLAGS(sc) & SNIC_DEVICE_RESET) {
sc               2575 drivers/scsi/snic/snic_scsi.c 		SNIC_SCSI_DBG(snic->shost, "internal_abts:dev rst sc %p\n", sc);
sc               2579 drivers/scsi/snic/snic_scsi.c 		      snic_cmd_tag(sc));
sc               2583 drivers/scsi/snic/snic_scsi.c 	ret = snic_queue_abort_req(snic, rqi, sc, tmf);
sc               2587 drivers/scsi/snic/snic_scsi.c 			      snic_cmd_tag(sc), ret);
sc               2591 drivers/scsi/snic/snic_scsi.c 		if (CMD_STATE(sc) == SNIC_IOREQ_ABTS_PENDING)
sc               2592 drivers/scsi/snic/snic_scsi.c 			CMD_STATE(sc) = sv_state;
sc               2598 drivers/scsi/snic/snic_scsi.c 	if (CMD_FLAGS(sc) & SNIC_DEVICE_RESET)
sc               2599 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_DEV_RST_TERM_ISSUED;
sc               2601 drivers/scsi/snic/snic_scsi.c 		CMD_FLAGS(sc) |= SNIC_IO_INTERNAL_TERM_ISSUED;
sc               2619 drivers/scsi/snic/snic_scsi.c 	struct scsi_cmnd *sc = NULL;
sc               2640 drivers/scsi/snic/snic_scsi.c 		sc = scsi_host_find_tag(snic->shost, tag);
sc               2641 drivers/scsi/snic/snic_scsi.c 		if (!sc) {
sc               2647 drivers/scsi/snic/snic_scsi.c 		sc_tgt = starget_to_tgt(scsi_target(sc->device));
sc               2655 drivers/scsi/snic/snic_scsi.c 		ret = snic_internal_abort_io(snic, sc, tmf);
sc                112 drivers/scsi/snic/snic_trc.h #define SNIC_TRC_CMD(sc)	\
sc                113 drivers/scsi/snic/snic_trc.h 	((u64)sc->cmnd[0] << 56 | (u64)sc->cmnd[7] << 40 |	\
sc                114 drivers/scsi/snic/snic_trc.h 	 (u64)sc->cmnd[8] << 32 | (u64)sc->cmnd[2] << 24 |	\
sc                115 drivers/scsi/snic/snic_trc.h 	 (u64)sc->cmnd[3] << 16 | (u64)sc->cmnd[4] << 8 |	\
sc                116 drivers/scsi/snic/snic_trc.h 	 (u64)sc->cmnd[5])
sc                118 drivers/scsi/snic/snic_trc.h #define SNIC_TRC_CMD_STATE_FLAGS(sc)	\
sc                119 drivers/scsi/snic/snic_trc.h 	((u64) CMD_FLAGS(sc) << 32 | CMD_STATE(sc))
sc               2677 drivers/scsi/st.c 	s32 sc;
sc               2682 drivers/scsi/st.c 	sc = cmd[2] & 0x80 ? 0xff000000 : 0;
sc               2683 drivers/scsi/st.c 	sc |= (cmd[2] << 16) | (cmd[3] << 8) | cmd[4];
sc               2685 drivers/scsi/st.c 		sc = -sc;
sc               2687 drivers/scsi/st.c 		  direction ? "backward" : "forward", sc, units);
sc                307 drivers/scsi/ufs/ufshcd.c 	trace_ufshcd_upiu(dev_name(hba->dev), str, &rq->header, &rq->sc.cdb);
sc                357 drivers/scsi/ufs/ufshcd.c 				lrbp->ucd_req_ptr->sc.exp_data_transfer_len);
sc               2257 drivers/scsi/ufs/ufshcd.c 	ucd_req_ptr->sc.exp_data_transfer_len =
sc               2261 drivers/scsi/ufs/ufshcd.c 	memset(ucd_req_ptr->sc.cdb, 0, UFS_CDB_SIZE);
sc               2262 drivers/scsi/ufs/ufshcd.c 	memcpy(ucd_req_ptr->sc.cdb, lrbp->cmd->cmnd, cdb_len);
sc                 41 drivers/scsi/virtio_scsi.c 	struct scsi_cmnd *sc;
sc                 97 drivers/scsi/virtio_scsi.c static void virtscsi_compute_resid(struct scsi_cmnd *sc, u32 resid)
sc                100 drivers/scsi/virtio_scsi.c 		scsi_set_resid(sc, resid);
sc                111 drivers/scsi/virtio_scsi.c 	struct scsi_cmnd *sc = cmd->sc;
sc                114 drivers/scsi/virtio_scsi.c 	dev_dbg(&sc->device->sdev_gendev,
sc                116 drivers/scsi/virtio_scsi.c 		sc, resp->response, resp->status, resp->sense_len);
sc                118 drivers/scsi/virtio_scsi.c 	sc->result = resp->status;
sc                119 drivers/scsi/virtio_scsi.c 	virtscsi_compute_resid(sc, virtio32_to_cpu(vscsi->vdev, resp->resid));
sc                122 drivers/scsi/virtio_scsi.c 		set_host_byte(sc, DID_OK);
sc                125 drivers/scsi/virtio_scsi.c 		set_host_byte(sc, DID_ERROR);
sc                128 drivers/scsi/virtio_scsi.c 		set_host_byte(sc, DID_ABORT);
sc                131 drivers/scsi/virtio_scsi.c 		set_host_byte(sc, DID_BAD_TARGET);
sc                134 drivers/scsi/virtio_scsi.c 		set_host_byte(sc, DID_RESET);
sc                137 drivers/scsi/virtio_scsi.c 		set_host_byte(sc, DID_BUS_BUSY);
sc                140 drivers/scsi/virtio_scsi.c 		set_host_byte(sc, DID_TRANSPORT_DISRUPTED);
sc                143 drivers/scsi/virtio_scsi.c 		set_host_byte(sc, DID_TARGET_FAILURE);
sc                146 drivers/scsi/virtio_scsi.c 		set_host_byte(sc, DID_NEXUS_FAILURE);
sc                149 drivers/scsi/virtio_scsi.c 		scmd_printk(KERN_WARNING, sc, "Unknown response %d",
sc                153 drivers/scsi/virtio_scsi.c 		set_host_byte(sc, DID_ERROR);
sc                159 drivers/scsi/virtio_scsi.c 	if (sc->sense_buffer) {
sc                160 drivers/scsi/virtio_scsi.c 		memcpy(sc->sense_buffer, resp->sense,
sc                165 drivers/scsi/virtio_scsi.c 			set_driver_byte(sc, DRIVER_SENSE);
sc                168 drivers/scsi/virtio_scsi.c 	sc->scsi_done(sc);
sc                409 drivers/scsi/virtio_scsi.c 	struct scsi_cmnd *sc = cmd->sc;
sc                416 drivers/scsi/virtio_scsi.c 	if (sc && sc->sc_data_direction != DMA_NONE) {
sc                417 drivers/scsi/virtio_scsi.c 		if (sc->sc_data_direction != DMA_FROM_DEVICE)
sc                418 drivers/scsi/virtio_scsi.c 			out = &sc->sdb.table;
sc                419 drivers/scsi/virtio_scsi.c 		if (sc->sc_data_direction != DMA_TO_DEVICE)
sc                420 drivers/scsi/virtio_scsi.c 			in = &sc->sdb.table;
sc                430 drivers/scsi/virtio_scsi.c 		if (scsi_prot_sg_count(sc))
sc                431 drivers/scsi/virtio_scsi.c 			sgs[out_num++] = scsi_prot_sglist(sc);
sc                442 drivers/scsi/virtio_scsi.c 		if (scsi_prot_sg_count(sc))
sc                443 drivers/scsi/virtio_scsi.c 			sgs[out_num + in_num++] = scsi_prot_sglist(sc);
sc                494 drivers/scsi/virtio_scsi.c 				 struct scsi_cmnd *sc)
sc                497 drivers/scsi/virtio_scsi.c 	cmd->lun[1] = sc->device->id;
sc                498 drivers/scsi/virtio_scsi.c 	cmd->lun[2] = (sc->device->lun >> 8) | 0x40;
sc                499 drivers/scsi/virtio_scsi.c 	cmd->lun[3] = sc->device->lun & 0xff;
sc                500 drivers/scsi/virtio_scsi.c 	cmd->tag = cpu_to_virtio64(vdev, (unsigned long)sc);
sc                509 drivers/scsi/virtio_scsi.c 				    struct scsi_cmnd *sc)
sc                511 drivers/scsi/virtio_scsi.c 	struct request *rq = sc->request;
sc                514 drivers/scsi/virtio_scsi.c 	virtio_scsi_init_hdr(vdev, (struct virtio_scsi_cmd_req *)cmd_pi, sc);
sc                516 drivers/scsi/virtio_scsi.c 	if (!rq || !scsi_prot_sg_count(sc))
sc                521 drivers/scsi/virtio_scsi.c 	if (sc->sc_data_direction == DMA_TO_DEVICE)
sc                525 drivers/scsi/virtio_scsi.c 	else if (sc->sc_data_direction == DMA_FROM_DEVICE)
sc                533 drivers/scsi/virtio_scsi.c 						  struct scsi_cmnd *sc)
sc                535 drivers/scsi/virtio_scsi.c 	u32 tag = blk_mq_unique_tag(sc->request);
sc                542 drivers/scsi/virtio_scsi.c 				 struct scsi_cmnd *sc)
sc                545 drivers/scsi/virtio_scsi.c 	struct virtio_scsi_vq *req_vq = virtscsi_pick_vq_mq(vscsi, sc);
sc                546 drivers/scsi/virtio_scsi.c 	struct virtio_scsi_cmd *cmd = scsi_cmd_priv(sc);
sc                552 drivers/scsi/virtio_scsi.c 	BUG_ON(scsi_sg_count(sc) > shost->sg_tablesize);
sc                555 drivers/scsi/virtio_scsi.c 	BUG_ON(sc->sc_data_direction == DMA_BIDIRECTIONAL);
sc                557 drivers/scsi/virtio_scsi.c 	dev_dbg(&sc->device->sdev_gendev,
sc                558 drivers/scsi/virtio_scsi.c 		"cmd %p CDB: %#02x\n", sc, sc->cmnd[0]);
sc                560 drivers/scsi/virtio_scsi.c 	cmd->sc = sc;
sc                562 drivers/scsi/virtio_scsi.c 	BUG_ON(sc->cmd_len > VIRTIO_SCSI_CDB_SIZE);
sc                566 drivers/scsi/virtio_scsi.c 		virtio_scsi_init_hdr_pi(vscsi->vdev, &cmd->req.cmd_pi, sc);
sc                567 drivers/scsi/virtio_scsi.c 		memcpy(cmd->req.cmd_pi.cdb, sc->cmnd, sc->cmd_len);
sc                572 drivers/scsi/virtio_scsi.c 		virtio_scsi_init_hdr(vscsi->vdev, &cmd->req.cmd, sc);
sc                573 drivers/scsi/virtio_scsi.c 		memcpy(cmd->req.cmd.cdb, sc->cmnd, sc->cmd_len);
sc                577 drivers/scsi/virtio_scsi.c 	kick = (sc->flags & SCMD_LAST) != 0;
sc                622 drivers/scsi/virtio_scsi.c static int virtscsi_device_reset(struct scsi_cmnd *sc)
sc                624 drivers/scsi/virtio_scsi.c 	struct virtio_scsi *vscsi = shost_priv(sc->device->host);
sc                627 drivers/scsi/virtio_scsi.c 	sdev_printk(KERN_INFO, sc->device, "device reset\n");
sc                638 drivers/scsi/virtio_scsi.c 		.lun[1] = sc->device->id,
sc                639 drivers/scsi/virtio_scsi.c 		.lun[2] = (sc->device->lun >> 8) | 0x40,
sc                640 drivers/scsi/virtio_scsi.c 		.lun[3] = sc->device->lun & 0xff,
sc                680 drivers/scsi/virtio_scsi.c static int virtscsi_abort(struct scsi_cmnd *sc)
sc                682 drivers/scsi/virtio_scsi.c 	struct virtio_scsi *vscsi = shost_priv(sc->device->host);
sc                685 drivers/scsi/virtio_scsi.c 	scmd_printk(KERN_INFO, sc, "abort\n");
sc                695 drivers/scsi/virtio_scsi.c 		.lun[1] = sc->device->id,
sc                696 drivers/scsi/virtio_scsi.c 		.lun[2] = (sc->device->lun >> 8) | 0x40,
sc                697 drivers/scsi/virtio_scsi.c 		.lun[3] = sc->device->lun & 0xff,
sc                698 drivers/scsi/virtio_scsi.c 		.tag = cpu_to_virtio64(vscsi->vdev, (unsigned long)sc),
sc                 99 drivers/scsi/xen-scsifront.c 	struct scsi_cmnd *sc;
sc                183 drivers/scsi/xen-scsifront.c 	struct scsi_cmnd *sc = shadow->sc;
sc                205 drivers/scsi/xen-scsifront.c 	ring_req->id      = sc->device->id;
sc                206 drivers/scsi/xen-scsifront.c 	ring_req->lun     = sc->device->lun;
sc                207 drivers/scsi/xen-scsifront.c 	ring_req->channel = sc->device->channel;
sc                208 drivers/scsi/xen-scsifront.c 	ring_req->cmd_len = sc->cmd_len;
sc                210 drivers/scsi/xen-scsifront.c 	BUG_ON(sc->cmd_len > VSCSIIF_MAX_COMMAND_SIZE);
sc                212 drivers/scsi/xen-scsifront.c 	memcpy(ring_req->cmnd, sc->cmnd, sc->cmd_len);
sc                214 drivers/scsi/xen-scsifront.c 	ring_req->sc_data_direction   = (uint8_t)sc->sc_data_direction;
sc                215 drivers/scsi/xen-scsifront.c 	ring_req->timeout_per_command = sc->request->timeout / HZ;
sc                232 drivers/scsi/xen-scsifront.c 	if (shadow->sc->sc_data_direction == DMA_NONE)
sc                251 drivers/scsi/xen-scsifront.c 	struct scsi_cmnd *sc;
sc                257 drivers/scsi/xen-scsifront.c 	sc = shadow->sc;
sc                259 drivers/scsi/xen-scsifront.c 	BUG_ON(sc == NULL);
sc                264 drivers/scsi/xen-scsifront.c 	sc->result = ring_rsp->rslt;
sc                265 drivers/scsi/xen-scsifront.c 	scsi_set_resid(sc, ring_rsp->residual_len);
sc                271 drivers/scsi/xen-scsifront.c 		memcpy(sc->sense_buffer, ring_rsp->sense_buffer, sense_len);
sc                273 drivers/scsi/xen-scsifront.c 	sc->scsi_done(sc);
sc                393 drivers/scsi/xen-scsifront.c 				struct scsi_cmnd *sc,
sc                399 drivers/scsi/xen-scsifront.c 	int grant_ro = (sc->sc_data_direction == DMA_TO_DEVICE);
sc                401 drivers/scsi/xen-scsifront.c 	unsigned int data_len = scsi_bufflen(sc);
sc                406 drivers/scsi/xen-scsifront.c 	if (sc->sc_data_direction == DMA_NONE || !data_len)
sc                409 drivers/scsi/xen-scsifront.c 	scsi_for_each_sg(sc, sg, scsi_sg_count(sc), i)
sc                462 drivers/scsi/xen-scsifront.c 	scsi_for_each_sg(sc, sg, scsi_sg_count(sc), i) {
sc                529 drivers/scsi/xen-scsifront.c 				  struct scsi_cmnd *sc)
sc                532 drivers/scsi/xen-scsifront.c 	struct vscsifrnt_shadow *shadow = scsi_cmd_priv(sc);
sc                536 drivers/scsi/xen-scsifront.c 	sc->result = 0;
sc                538 drivers/scsi/xen-scsifront.c 	shadow->sc  = sc;
sc                547 drivers/scsi/xen-scsifront.c 	err = map_data_for_request(info, sc, shadow);
sc                554 drivers/scsi/xen-scsifront.c 		sc->result = DID_ERROR << 16;
sc                555 drivers/scsi/xen-scsifront.c 		sc->scsi_done(sc);
sc                581 drivers/scsi/xen-scsifront.c static int scsifront_action_handler(struct scsi_cmnd *sc, uint8_t act)
sc                583 drivers/scsi/xen-scsifront.c 	struct Scsi_Host *host = sc->device->host;
sc                585 drivers/scsi/xen-scsifront.c 	struct vscsifrnt_shadow *shadow, *s = scsi_cmd_priv(sc);
sc                594 drivers/scsi/xen-scsifront.c 	shadow->sc = sc;
sc                642 drivers/scsi/xen-scsifront.c static int scsifront_eh_abort_handler(struct scsi_cmnd *sc)
sc                645 drivers/scsi/xen-scsifront.c 	return scsifront_action_handler(sc, VSCSIIF_ACT_SCSI_ABORT);
sc                648 drivers/scsi/xen-scsifront.c static int scsifront_dev_reset_handler(struct scsi_cmnd *sc)
sc                651 drivers/scsi/xen-scsifront.c 	return scsifront_action_handler(sc, VSCSIIF_ACT_SCSI_RESET);
sc                526 drivers/spi/spi-fsl-dspi.c static void ns_delay_scale(char *psc, char *sc, int delay_ns,
sc                546 drivers/spi/spi-fsl-dspi.c 					*sc = j;
sc                556 drivers/spi/spi-fsl-dspi.c 		*sc = SPI_CTAR_SCALE_BITS;
sc                465 drivers/staging/android/ashmem.c ashmem_shrink_scan(struct shrinker *shrink, struct shrink_control *sc)
sc                470 drivers/staging/android/ashmem.c 	if (!(sc->gfp_mask & __GFP_FS))
sc                498 drivers/staging/android/ashmem.c 		if (--sc->nr_to_scan <= 0)
sc                507 drivers/staging/android/ashmem.c ashmem_shrink_count(struct shrinker *shrink, struct shrink_control *sc)
sc                842 drivers/staging/android/ashmem.c 			struct shrink_control sc = {
sc                846 drivers/staging/android/ashmem.c 			ret = ashmem_shrink_count(&ashmem_shrinker, &sc);
sc                847 drivers/staging/android/ashmem.c 			ashmem_shrink_scan(&ashmem_shrinker, &sc);
sc                544 drivers/staging/android/ion/ion.c 	struct shrink_control sc;
sc                547 drivers/staging/android/ion/ion.c 	sc.gfp_mask = GFP_HIGHUSER;
sc                548 drivers/staging/android/ion/ion.c 	sc.nr_to_scan = val;
sc                551 drivers/staging/android/ion/ion.c 		objs = heap->shrinker.count_objects(&heap->shrinker, &sc);
sc                552 drivers/staging/android/ion/ion.c 		sc.nr_to_scan = objs;
sc                555 drivers/staging/android/ion/ion.c 	heap->shrinker.scan_objects(&heap->shrinker, &sc);
sc                562 drivers/staging/android/ion/ion.c 	struct shrink_control sc;
sc                565 drivers/staging/android/ion/ion.c 	sc.gfp_mask = GFP_HIGHUSER;
sc                566 drivers/staging/android/ion/ion.c 	sc.nr_to_scan = 0;
sc                568 drivers/staging/android/ion/ion.c 	objs = heap->shrinker.count_objects(&heap->shrinker, &sc);
sc                264 drivers/staging/android/ion/ion_heap.c 					   struct shrink_control *sc)
sc                273 drivers/staging/android/ion/ion_heap.c 		total += heap->ops->shrink(heap, sc->gfp_mask, 0);
sc                279 drivers/staging/android/ion/ion_heap.c 					  struct shrink_control *sc)
sc                284 drivers/staging/android/ion/ion_heap.c 	int to_scan = sc->nr_to_scan;
sc                302 drivers/staging/android/ion/ion_heap.c 		freed += heap->ops->shrink(heap, sc->gfp_mask, to_scan);
sc                263 drivers/staging/kpc2000/kpc2000_spi.c 	union kp_spi_config sc;
sc                279 drivers/staging/kpc2000/kpc2000_spi.c 	sc.bitfield.wl = spidev->bits_per_word - 1;
sc                280 drivers/staging/kpc2000/kpc2000_spi.c 	sc.bitfield.cs = spidev->chip_select;
sc                281 drivers/staging/kpc2000/kpc2000_spi.c 	sc.bitfield.spi_en = 0;
sc                282 drivers/staging/kpc2000/kpc2000_spi.c 	sc.bitfield.trm = 0;
sc                283 drivers/staging/kpc2000/kpc2000_spi.c 	sc.bitfield.ffen = 0;
sc                284 drivers/staging/kpc2000/kpc2000_spi.c 	kp_spi_write_reg(spidev->controller_state, KP_SPI_REG_CONFIG, sc.reg);
sc                295 drivers/staging/kpc2000/kpc2000_spi.c 	union kp_spi_config sc;
sc                337 drivers/staging/kpc2000/kpc2000_spi.c 	sc.reg = kp_spi_read_reg(cs, KP_SPI_REG_CONFIG);
sc                338 drivers/staging/kpc2000/kpc2000_spi.c 	sc.bitfield.spi_en = 1;
sc                339 drivers/staging/kpc2000/kpc2000_spi.c 	kp_spi_write_reg(cs, KP_SPI_REG_CONFIG, sc.reg);
sc                362 drivers/staging/kpc2000/kpc2000_spi.c 			sc.reg = kp_spi_read_reg(cs, KP_SPI_REG_CONFIG);
sc                366 drivers/staging/kpc2000/kpc2000_spi.c 				sc.bitfield.trm = KP_SPI_REG_CONFIG_TRM_TX;
sc                368 drivers/staging/kpc2000/kpc2000_spi.c 				sc.bitfield.trm = KP_SPI_REG_CONFIG_TRM_RX;
sc                373 drivers/staging/kpc2000/kpc2000_spi.c 			sc.bitfield.wl = word_len - 1;
sc                376 drivers/staging/kpc2000/kpc2000_spi.c 			sc.bitfield.cs = spidev->chip_select;
sc                379 drivers/staging/kpc2000/kpc2000_spi.c 			kp_spi_write_reg(cs, KP_SPI_REG_CONFIG, sc.reg);
sc                396 drivers/staging/kpc2000/kpc2000_spi.c 	sc.reg = kp_spi_read_reg(cs, KP_SPI_REG_CONFIG);
sc                397 drivers/staging/kpc2000/kpc2000_spi.c 	sc.bitfield.spi_en = 0;
sc                398 drivers/staging/kpc2000/kpc2000_spi.c 	kp_spi_write_reg(cs, KP_SPI_REG_CONFIG, sc.reg);
sc               1706 drivers/staging/media/ipu3/ipu3-abi.h 		u8 sc;
sc               1688 drivers/staging/rtl8192e/rtl8192e/r8192E_dev.c 	u16 sc;
sc               1692 drivers/staging/rtl8192e/rtl8192e/r8192E_dev.c 	sc = le16_to_cpu(hdr->seq_ctl);
sc               1693 drivers/staging/rtl8192e/rtl8192e/r8192E_dev.c 	frag = WLAN_GET_SEQ_FRAG(sc);
sc               1694 drivers/staging/rtl8192e/rtl8192e/r8192E_dev.c 	seq = WLAN_GET_SEQ_SEQ(sc);
sc                 92 drivers/staging/rtl8192e/rtllib_rx.c 	u16 sc = le16_to_cpu(hdr->seq_ctl);
sc                 93 drivers/staging/rtl8192e/rtllib_rx.c 	unsigned int frag = WLAN_GET_SEQ_FRAG(sc);
sc                 94 drivers/staging/rtl8192e/rtllib_rx.c 	unsigned int seq = WLAN_GET_SEQ_SEQ(sc);
sc                163 drivers/staging/rtl8192e/rtllib_rx.c 	u16 sc = le16_to_cpu(hdr->seq_ctl);
sc                164 drivers/staging/rtl8192e/rtllib_rx.c 	unsigned int seq = WLAN_GET_SEQ_SEQ(sc);
sc                365 drivers/staging/rtl8192e/rtllib_rx.c 	u16 sc = le16_to_cpu(header->seq_ctl);
sc                366 drivers/staging/rtl8192e/rtllib_rx.c 	u16 seq = WLAN_GET_SEQ_SEQ(sc);
sc                367 drivers/staging/rtl8192e/rtllib_rx.c 	u16 frag = WLAN_GET_SEQ_FRAG(sc);
sc                918 drivers/staging/rtl8192e/rtllib_rx.c 	u16 fc, sc;
sc                924 drivers/staging/rtl8192e/rtllib_rx.c 	sc = le16_to_cpu(hdr->seq_ctl);
sc                925 drivers/staging/rtl8192e/rtllib_rx.c 	frag = WLAN_GET_SEQ_FRAG(sc);
sc                942 drivers/staging/rtl8192e/rtllib_rx.c 			    (WLAN_GET_SEQ_SEQ(sc) == pRxTS->RxLastSeqNum))
sc                945 drivers/staging/rtl8192e/rtllib_rx.c 			pRxTS->RxLastSeqNum = WLAN_GET_SEQ_SEQ(sc);
sc               1084 drivers/staging/rtl8192e/rtllib_rx.c 	u16 fc, sc;
sc               1089 drivers/staging/rtl8192e/rtllib_rx.c 	sc = le16_to_cpu(hdr->seq_ctl);
sc               1090 drivers/staging/rtl8192e/rtllib_rx.c 	frag = WLAN_GET_SEQ_FRAG(sc);
sc               1114 drivers/staging/rtl8192e/rtllib_rx.c 				   WLAN_GET_SEQ_SEQ(sc), frag);
sc               1301 drivers/staging/rtl8192e/rtllib_rx.c 	u16 fc, sc, SeqNum = 0;
sc               1314 drivers/staging/rtl8192e/rtllib_rx.c 	sc = le16_to_cpu(hdr->seq_ctl);
sc               1405 drivers/staging/rtl8192e/rtllib_rx.c 		SeqNum = WLAN_GET_SEQ_SEQ(sc);
sc                 97 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 	u16 sc = le16_to_cpu(hdr->seq_ctl);
sc                 98 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 	unsigned int frag = WLAN_GET_SEQ_FRAG(sc);
sc                 99 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 	unsigned int seq = WLAN_GET_SEQ_SEQ(sc);
sc                165 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 	u16 sc = le16_to_cpu(hdr->seq_ctl);
sc                166 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 	unsigned int seq = WLAN_GET_SEQ_SEQ(sc);
sc                416 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 	u16 sc = le16_to_cpu(header->seq_ctl);
sc                417 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 	u16 seq = WLAN_GET_SEQ_SEQ(sc);
sc                418 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 	u16 frag = WLAN_GET_SEQ_FRAG(sc);
sc                877 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 	u16 fc, type, stype, sc;
sc                915 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 	sc = le16_to_cpu(hdr->seq_ctl);
sc                917 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 	frag = WLAN_GET_SEQ_FRAG(sc);
sc               1012 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 			    (WLAN_GET_SEQ_SEQ(sc) == pRxTS->rx_last_seq_num)) {
sc               1016 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 				pRxTS->rx_last_seq_num = WLAN_GET_SEQ_SEQ(sc);
sc               1136 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 					WLAN_GET_SEQ_SEQ(sc), frag);
sc               1239 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c 		SeqNum = WLAN_GET_SEQ_SEQ(sc);
sc               3810 drivers/staging/rtl8192u/r8192U_core.c 	u16 sc;
sc               3814 drivers/staging/rtl8192u/r8192U_core.c 	sc = le16_to_cpu(hdr->seq_ctl);
sc               3815 drivers/staging/rtl8192u/r8192U_core.c 	seq = WLAN_GET_SEQ_SEQ(sc);
sc                 48 drivers/staging/speakup/speakup.h void spk_reset_index_count(int sc);
sc                269 drivers/staging/speakup/synth.c void spk_reset_index_count(int sc)
sc                278 drivers/staging/speakup/synth.c 	sentence_count = sc;
sc                296 drivers/staging/uwb/hwa-rc.c 	struct uwb_rc_cmd_scan *sc;
sc                298 drivers/staging/uwb/hwa-rc.c 	sc = container_of(*header, struct uwb_rc_cmd_scan, rccb);
sc                300 drivers/staging/uwb/hwa-rc.c 	if (sc->bScanState == UWB_SCAN_ONLY_STARTTIME)
sc                301 drivers/staging/uwb/hwa-rc.c 		sc->bScanState = UWB_SCAN_ONLY;
sc                101 drivers/target/loopback/tcm_loop.c 	struct scsi_cmnd *sc = tl_cmd->sc;
sc                109 drivers/target/loopback/tcm_loop.c 	tl_hba = *(struct tcm_loop_hba **)shost_priv(sc->device->host);
sc                110 drivers/target/loopback/tcm_loop.c 	tl_tpg = &tl_hba->tl_hba_tpgs[sc->device->id];
sc                117 drivers/target/loopback/tcm_loop.c 		set_host_byte(sc, DID_NO_CONNECT);
sc                121 drivers/target/loopback/tcm_loop.c 		set_host_byte(sc, DID_TRANSPORT_DISRUPTED);
sc                126 drivers/target/loopback/tcm_loop.c 		scmd_printk(KERN_ERR, sc,
sc                128 drivers/target/loopback/tcm_loop.c 		set_host_byte(sc, DID_ERROR);
sc                132 drivers/target/loopback/tcm_loop.c 	transfer_length = scsi_transfer_length(sc);
sc                133 drivers/target/loopback/tcm_loop.c 	if (!scsi_prot_sg_count(sc) &&
sc                134 drivers/target/loopback/tcm_loop.c 	    scsi_get_prot_op(sc) != SCSI_PROT_NORMAL) {
sc                141 drivers/target/loopback/tcm_loop.c 		transfer_length = scsi_bufflen(sc);
sc                145 drivers/target/loopback/tcm_loop.c 	rc = target_submit_cmd_map_sgls(se_cmd, tl_nexus->se_sess, sc->cmnd,
sc                146 drivers/target/loopback/tcm_loop.c 			&tl_cmd->tl_sense_buf[0], tl_cmd->sc->device->lun,
sc                148 drivers/target/loopback/tcm_loop.c 			sc->sc_data_direction, 0,
sc                149 drivers/target/loopback/tcm_loop.c 			scsi_sglist(sc), scsi_sg_count(sc),
sc                151 drivers/target/loopback/tcm_loop.c 			scsi_prot_sglist(sc), scsi_prot_sg_count(sc));
sc                153 drivers/target/loopback/tcm_loop.c 		set_host_byte(sc, DID_NO_CONNECT);
sc                160 drivers/target/loopback/tcm_loop.c 	sc->scsi_done(sc);
sc                167 drivers/target/loopback/tcm_loop.c static int tcm_loop_queuecommand(struct Scsi_Host *sh, struct scsi_cmnd *sc)
sc                172 drivers/target/loopback/tcm_loop.c 		 __func__, sc->device->host->host_no, sc->device->id,
sc                173 drivers/target/loopback/tcm_loop.c 		 sc->device->channel, sc->device->lun, sc->cmnd[0],
sc                174 drivers/target/loopback/tcm_loop.c 		 scsi_bufflen(sc));
sc                178 drivers/target/loopback/tcm_loop.c 		set_host_byte(sc, DID_ERROR);
sc                179 drivers/target/loopback/tcm_loop.c 		sc->scsi_done(sc);
sc                183 drivers/target/loopback/tcm_loop.c 	tl_cmd->sc = sc;
sc                184 drivers/target/loopback/tcm_loop.c 	tl_cmd->sc_cmd_tag = sc->request->tag;
sc                238 drivers/target/loopback/tcm_loop.c static int tcm_loop_abort_task(struct scsi_cmnd *sc)
sc                247 drivers/target/loopback/tcm_loop.c 	tl_hba = *(struct tcm_loop_hba **)shost_priv(sc->device->host);
sc                248 drivers/target/loopback/tcm_loop.c 	tl_tpg = &tl_hba->tl_hba_tpgs[sc->device->id];
sc                249 drivers/target/loopback/tcm_loop.c 	ret = tcm_loop_issue_tmr(tl_tpg, sc->device->lun,
sc                250 drivers/target/loopback/tcm_loop.c 				 sc->request->tag, TMR_ABORT_TASK);
sc                258 drivers/target/loopback/tcm_loop.c static int tcm_loop_device_reset(struct scsi_cmnd *sc)
sc                267 drivers/target/loopback/tcm_loop.c 	tl_hba = *(struct tcm_loop_hba **)shost_priv(sc->device->host);
sc                268 drivers/target/loopback/tcm_loop.c 	tl_tpg = &tl_hba->tl_hba_tpgs[sc->device->id];
sc                270 drivers/target/loopback/tcm_loop.c 	ret = tcm_loop_issue_tmr(tl_tpg, sc->device->lun,
sc                275 drivers/target/loopback/tcm_loop.c static int tcm_loop_target_reset(struct scsi_cmnd *sc)
sc                283 drivers/target/loopback/tcm_loop.c 	tl_hba = *(struct tcm_loop_hba **)shost_priv(sc->device->host);
sc                291 drivers/target/loopback/tcm_loop.c 	tl_tpg = &tl_hba->tl_hba_tpgs[sc->device->id];
sc                552 drivers/target/loopback/tcm_loop.c 	struct scsi_cmnd *sc = tl_cmd->sc;
sc                555 drivers/target/loopback/tcm_loop.c 		 __func__, sc, sc->cmnd[0]);
sc                557 drivers/target/loopback/tcm_loop.c 	sc->result = SAM_STAT_GOOD;
sc                558 drivers/target/loopback/tcm_loop.c 	set_host_byte(sc, DID_OK);
sc                561 drivers/target/loopback/tcm_loop.c 		scsi_set_resid(sc, se_cmd->residual_count);
sc                562 drivers/target/loopback/tcm_loop.c 	sc->scsi_done(sc);
sc                570 drivers/target/loopback/tcm_loop.c 	struct scsi_cmnd *sc = tl_cmd->sc;
sc                573 drivers/target/loopback/tcm_loop.c 		 __func__, sc, sc->cmnd[0]);
sc                579 drivers/target/loopback/tcm_loop.c 		memcpy(sc->sense_buffer, se_cmd->sense_buffer,
sc                581 drivers/target/loopback/tcm_loop.c 		sc->result = SAM_STAT_CHECK_CONDITION;
sc                582 drivers/target/loopback/tcm_loop.c 		set_driver_byte(sc, DRIVER_SENSE);
sc                584 drivers/target/loopback/tcm_loop.c 		sc->result = se_cmd->scsi_status;
sc                586 drivers/target/loopback/tcm_loop.c 	set_host_byte(sc, DID_OK);
sc                589 drivers/target/loopback/tcm_loop.c 		scsi_set_resid(sc, se_cmd->residual_count);
sc                590 drivers/target/loopback/tcm_loop.c 	sc->scsi_done(sc);
sc                 16 drivers/target/loopback/tcm_loop.h 	struct scsi_cmnd *sc;
sc                253 drivers/usb/atm/ueagle-atm.c #define IS_OPERATIONAL(sc) \
sc                254 drivers/usb/atm/ueagle-atm.c 	((UEA_CHIP_VERSION(sc) != EAGLE_IV) ? \
sc                255 drivers/usb/atm/ueagle-atm.c 	(GET_STATUS(sc->stats.phy.state) == 2) : \
sc                256 drivers/usb/atm/ueagle-atm.c 	(sc->stats.phy.state == 7))
sc                547 drivers/usb/atm/ueagle-atm.c #define uea_wait(sc, cond, timeo) \
sc                549 drivers/usb/atm/ueagle-atm.c 	int _r = wait_event_interruptible_timeout(sc->sync_q, \
sc                558 drivers/usb/atm/ueagle-atm.c 		if (sc->usbatm->atm_dev) \
sc                559 drivers/usb/atm/ueagle-atm.c 			sc->usbatm->atm_dev->type = val; \
sc                564 drivers/usb/atm/ueagle-atm.c 		if (sc->usbatm->atm_dev) \
sc                565 drivers/usb/atm/ueagle-atm.c 			atm_dev_signal_change(sc->usbatm->atm_dev, val); \
sc                821 drivers/usb/atm/ueagle-atm.c static int uea_idma_write(struct uea_softc *sc, const void *data, u32 size)
sc                829 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "can't allocate xfer_buff\n");
sc                833 drivers/usb/atm/ueagle-atm.c 	ret = usb_bulk_msg(sc->usb_dev,
sc                834 drivers/usb/atm/ueagle-atm.c 			 usb_sndbulkpipe(sc->usb_dev, UEA_IDMA_PIPE),
sc                841 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "size != bytes_read %d %d\n", size,
sc                849 drivers/usb/atm/ueagle-atm.c static int request_dsp(struct uea_softc *sc)
sc                854 drivers/usb/atm/ueagle-atm.c 	if (UEA_CHIP_VERSION(sc) == EAGLE_IV) {
sc                855 drivers/usb/atm/ueagle-atm.c 		if (IS_ISDN(sc))
sc                859 drivers/usb/atm/ueagle-atm.c 	} else if (UEA_CHIP_VERSION(sc) == ADI930) {
sc                860 drivers/usb/atm/ueagle-atm.c 		if (IS_ISDN(sc))
sc                865 drivers/usb/atm/ueagle-atm.c 		if (IS_ISDN(sc))
sc                871 drivers/usb/atm/ueagle-atm.c 	ret = request_firmware(&sc->dsp_firm, dsp_name, &sc->usb_dev->dev);
sc                873 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc),
sc                879 drivers/usb/atm/ueagle-atm.c 	if (UEA_CHIP_VERSION(sc) == EAGLE_IV)
sc                880 drivers/usb/atm/ueagle-atm.c 		ret = check_dsp_e4(sc->dsp_firm->data, sc->dsp_firm->size);
sc                882 drivers/usb/atm/ueagle-atm.c 		ret = check_dsp_e1(sc->dsp_firm->data, sc->dsp_firm->size);
sc                885 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "firmware %s is corrupted\n",
sc                887 drivers/usb/atm/ueagle-atm.c 		release_firmware(sc->dsp_firm);
sc                888 drivers/usb/atm/ueagle-atm.c 		sc->dsp_firm = NULL;
sc                900 drivers/usb/atm/ueagle-atm.c 	struct uea_softc *sc = container_of(work, struct uea_softc, task);
sc                901 drivers/usb/atm/ueagle-atm.c 	u16 pageno = sc->pageno;
sc                902 drivers/usb/atm/ueagle-atm.c 	u16 ovl = sc->ovl;
sc                913 drivers/usb/atm/ueagle-atm.c 		release_firmware(sc->dsp_firm);
sc                914 drivers/usb/atm/ueagle-atm.c 		sc->dsp_firm = NULL;
sc                917 drivers/usb/atm/ueagle-atm.c 	if (sc->dsp_firm == NULL && request_dsp(sc) < 0)
sc                920 drivers/usb/atm/ueagle-atm.c 	p = sc->dsp_firm->data;
sc                933 drivers/usb/atm/ueagle-atm.c 	p = sc->dsp_firm->data + pageoffset;
sc                937 drivers/usb/atm/ueagle-atm.c 	uea_dbg(INS_TO_USBDEV(sc),
sc                956 drivers/usb/atm/ueagle-atm.c 		if (uea_idma_write(sc, &bi, E1_BLOCK_INFO_SIZE))
sc                960 drivers/usb/atm/ueagle-atm.c 		if (uea_idma_write(sc, p, blocksize))
sc                969 drivers/usb/atm/ueagle-atm.c 	uea_err(INS_TO_USBDEV(sc), "sending DSP block %u failed\n", i);
sc                972 drivers/usb/atm/ueagle-atm.c 	uea_err(INS_TO_USBDEV(sc), "invalid DSP page %u requested\n", pageno);
sc                975 drivers/usb/atm/ueagle-atm.c static void __uea_load_page_e4(struct uea_softc *sc, u8 pageno, int boot)
sc                979 drivers/usb/atm/ueagle-atm.c 	struct l1_code *p = (struct l1_code *) sc->dsp_firm->data;
sc                993 drivers/usb/atm/ueagle-atm.c 		blockoffset = sc->dsp_firm->data + le32_to_cpu(
sc                999 drivers/usb/atm/ueagle-atm.c 		uea_dbg(INS_TO_USBDEV(sc),
sc               1006 drivers/usb/atm/ueagle-atm.c 		if (uea_idma_write(sc, &bi, E4_BLOCK_INFO_SIZE))
sc               1010 drivers/usb/atm/ueagle-atm.c 		if (uea_idma_write(sc, blockoffset, blocksize))
sc               1019 drivers/usb/atm/ueagle-atm.c 	uea_err(INS_TO_USBDEV(sc), "sending DSP block %u failed\n", blockno);
sc               1025 drivers/usb/atm/ueagle-atm.c 	struct uea_softc *sc = container_of(work, struct uea_softc, task);
sc               1026 drivers/usb/atm/ueagle-atm.c 	u8 pageno = sc->pageno;
sc               1031 drivers/usb/atm/ueagle-atm.c 	uea_dbg(INS_TO_USBDEV(sc), "sending DSP page %u\n", pageno);
sc               1035 drivers/usb/atm/ueagle-atm.c 		release_firmware(sc->dsp_firm);
sc               1036 drivers/usb/atm/ueagle-atm.c 		sc->dsp_firm = NULL;
sc               1039 drivers/usb/atm/ueagle-atm.c 	if (sc->dsp_firm == NULL && request_dsp(sc) < 0)
sc               1042 drivers/usb/atm/ueagle-atm.c 	p = (struct l1_code *) sc->dsp_firm->data;
sc               1044 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "invalid DSP "
sc               1050 drivers/usb/atm/ueagle-atm.c 		__uea_load_page_e4(sc, pageno, 0);
sc               1054 drivers/usb/atm/ueagle-atm.c 	uea_dbg(INS_TO_USBDEV(sc),
sc               1059 drivers/usb/atm/ueagle-atm.c 			__uea_load_page_e4(sc, i, 1);
sc               1062 drivers/usb/atm/ueagle-atm.c 	uea_dbg(INS_TO_USBDEV(sc) , "sending start bi\n");
sc               1072 drivers/usb/atm/ueagle-atm.c 	if (uea_idma_write(sc, &bi, E4_BLOCK_INFO_SIZE))
sc               1073 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "sending DSP start bi failed\n");
sc               1076 drivers/usb/atm/ueagle-atm.c static inline void wake_up_cmv_ack(struct uea_softc *sc)
sc               1078 drivers/usb/atm/ueagle-atm.c 	BUG_ON(sc->cmv_ack);
sc               1079 drivers/usb/atm/ueagle-atm.c 	sc->cmv_ack = 1;
sc               1080 drivers/usb/atm/ueagle-atm.c 	wake_up(&sc->sync_q);
sc               1083 drivers/usb/atm/ueagle-atm.c static inline int wait_cmv_ack(struct uea_softc *sc)
sc               1085 drivers/usb/atm/ueagle-atm.c 	int ret = uea_wait(sc, sc->cmv_ack , ACK_TIMEOUT);
sc               1087 drivers/usb/atm/ueagle-atm.c 	sc->cmv_ack = 0;
sc               1089 drivers/usb/atm/ueagle-atm.c 	uea_dbg(INS_TO_USBDEV(sc), "wait_event_timeout : %d ms\n",
sc               1100 drivers/usb/atm/ueagle-atm.c static int uea_request(struct uea_softc *sc,
sc               1108 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "can't allocate xfer_buff\n");
sc               1112 drivers/usb/atm/ueagle-atm.c 	ret = usb_control_msg(sc->usb_dev, usb_sndctrlpipe(sc->usb_dev, 0),
sc               1119 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "usb_control_msg error %d\n", ret);
sc               1124 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc),
sc               1133 drivers/usb/atm/ueagle-atm.c static int uea_cmv_e1(struct uea_softc *sc,
sc               1139 drivers/usb/atm/ueagle-atm.c 	uea_enters(INS_TO_USBDEV(sc));
sc               1140 drivers/usb/atm/ueagle-atm.c 	uea_vdbg(INS_TO_USBDEV(sc), "Function : %d-%d, Address : %c%c%c%c, "
sc               1149 drivers/usb/atm/ueagle-atm.c 	sc->cmv_dsc.e1.function = function | 0x2;
sc               1150 drivers/usb/atm/ueagle-atm.c 	sc->cmv_dsc.e1.idx++;
sc               1151 drivers/usb/atm/ueagle-atm.c 	sc->cmv_dsc.e1.address = address;
sc               1152 drivers/usb/atm/ueagle-atm.c 	sc->cmv_dsc.e1.offset = offset;
sc               1157 drivers/usb/atm/ueagle-atm.c 	cmv.wIndex = cpu_to_le16(sc->cmv_dsc.e1.idx);
sc               1162 drivers/usb/atm/ueagle-atm.c 	ret = uea_request(sc, UEA_E1_SET_BLOCK, UEA_MPTX_START,
sc               1166 drivers/usb/atm/ueagle-atm.c 	ret = wait_cmv_ack(sc);
sc               1167 drivers/usb/atm/ueagle-atm.c 	uea_leaves(INS_TO_USBDEV(sc));
sc               1171 drivers/usb/atm/ueagle-atm.c static int uea_cmv_e4(struct uea_softc *sc,
sc               1177 drivers/usb/atm/ueagle-atm.c 	uea_enters(INS_TO_USBDEV(sc));
sc               1180 drivers/usb/atm/ueagle-atm.c 	uea_vdbg(INS_TO_USBDEV(sc), "Function : %d-%d, Group : 0x%04x, "
sc               1186 drivers/usb/atm/ueagle-atm.c 	sc->cmv_dsc.e4.function = function | (0x1 << 4);
sc               1187 drivers/usb/atm/ueagle-atm.c 	sc->cmv_dsc.e4.offset = offset;
sc               1188 drivers/usb/atm/ueagle-atm.c 	sc->cmv_dsc.e4.address = address;
sc               1189 drivers/usb/atm/ueagle-atm.c 	sc->cmv_dsc.e4.group = group;
sc               1197 drivers/usb/atm/ueagle-atm.c 	ret = uea_request(sc, UEA_E4_SET_BLOCK, UEA_MPTX_START,
sc               1201 drivers/usb/atm/ueagle-atm.c 	ret = wait_cmv_ack(sc);
sc               1202 drivers/usb/atm/ueagle-atm.c 	uea_leaves(INS_TO_USBDEV(sc));
sc               1206 drivers/usb/atm/ueagle-atm.c static inline int uea_read_cmv_e1(struct uea_softc *sc,
sc               1209 drivers/usb/atm/ueagle-atm.c 	int ret = uea_cmv_e1(sc, E1_MAKEFUNCTION(E1_MEMACCESS, E1_REQUESTREAD),
sc               1212 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc),
sc               1215 drivers/usb/atm/ueagle-atm.c 		*data = sc->data;
sc               1220 drivers/usb/atm/ueagle-atm.c static inline int uea_read_cmv_e4(struct uea_softc *sc,
sc               1223 drivers/usb/atm/ueagle-atm.c 	int ret = uea_cmv_e4(sc, E4_MAKEFUNCTION(E4_MEMACCESS,
sc               1227 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc),
sc               1230 drivers/usb/atm/ueagle-atm.c 		*data = sc->data;
sc               1233 drivers/usb/atm/ueagle-atm.c 			*(data + 1) = sc->data1;
sc               1238 drivers/usb/atm/ueagle-atm.c static inline int uea_write_cmv_e1(struct uea_softc *sc,
sc               1241 drivers/usb/atm/ueagle-atm.c 	int ret = uea_cmv_e1(sc, E1_MAKEFUNCTION(E1_MEMACCESS, E1_REQUESTWRITE),
sc               1244 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc),
sc               1250 drivers/usb/atm/ueagle-atm.c static inline int uea_write_cmv_e4(struct uea_softc *sc,
sc               1253 drivers/usb/atm/ueagle-atm.c 	int ret = uea_cmv_e4(sc, E4_MAKEFUNCTION(E4_MEMACCESS,
sc               1257 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc),
sc               1263 drivers/usb/atm/ueagle-atm.c static void uea_set_bulk_timeout(struct uea_softc *sc, u32 dsrate)
sc               1274 drivers/usb/atm/ueagle-atm.c 	if (UEA_CHIP_VERSION(sc) == ADI930 ||
sc               1275 drivers/usb/atm/ueagle-atm.c 	    altsetting[sc->modem_index] > 0 ||
sc               1276 drivers/usb/atm/ueagle-atm.c 	    sc->stats.phy.dsrate == dsrate)
sc               1281 drivers/usb/atm/ueagle-atm.c 	ret = uea_request(sc, UEA_SET_TIMEOUT, timeout, 0, NULL);
sc               1282 drivers/usb/atm/ueagle-atm.c 	uea_info(INS_TO_USBDEV(sc), "setting new timeout %d%s\n",
sc               1292 drivers/usb/atm/ueagle-atm.c static int uea_stat_e1(struct uea_softc *sc)
sc               1297 drivers/usb/atm/ueagle-atm.c 	uea_enters(INS_TO_USBDEV(sc));
sc               1298 drivers/usb/atm/ueagle-atm.c 	data = sc->stats.phy.state;
sc               1300 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_STAT, 0, &sc->stats.phy.state);
sc               1304 drivers/usb/atm/ueagle-atm.c 	switch (GET_STATUS(sc->stats.phy.state)) {
sc               1306 drivers/usb/atm/ueagle-atm.c 		uea_dbg(INS_TO_USBDEV(sc),
sc               1311 drivers/usb/atm/ueagle-atm.c 		uea_dbg(INS_TO_USBDEV(sc), "modem initializing\n");
sc               1315 drivers/usb/atm/ueagle-atm.c 		uea_vdbg(INS_TO_USBDEV(sc), "modem operational\n");
sc               1319 drivers/usb/atm/ueagle-atm.c 		uea_info(INS_TO_USBDEV(sc), "modem synchronization failed"
sc               1324 drivers/usb/atm/ueagle-atm.c 		uea_warn(INS_TO_USBDEV(sc),
sc               1329 drivers/usb/atm/ueagle-atm.c 		uea_info(INS_TO_USBDEV(sc), "modem in fast-retain mode\n");
sc               1332 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "modem invalid SW mode %d\n",
sc               1333 drivers/usb/atm/ueagle-atm.c 			GET_STATUS(sc->stats.phy.state));
sc               1338 drivers/usb/atm/ueagle-atm.c 		uea_request(sc, UEA_SET_MODE, UEA_LOOPBACK_OFF, 0, NULL);
sc               1339 drivers/usb/atm/ueagle-atm.c 		uea_info(INS_TO_USBDEV(sc), "modem operational\n");
sc               1344 drivers/usb/atm/ueagle-atm.c 		release_firmware(sc->dsp_firm);
sc               1345 drivers/usb/atm/ueagle-atm.c 		sc->dsp_firm = NULL;
sc               1354 drivers/usb/atm/ueagle-atm.c 	wake_up(&sc->sync_q);
sc               1356 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_DIAG, 2, &sc->stats.phy.flags);
sc               1359 drivers/usb/atm/ueagle-atm.c 	sc->stats.phy.mflags |= sc->stats.phy.flags;
sc               1364 drivers/usb/atm/ueagle-atm.c 	if (sc->stats.phy.flags) {
sc               1365 drivers/usb/atm/ueagle-atm.c 		uea_dbg(INS_TO_USBDEV(sc), "Stat flag = 0x%x\n",
sc               1366 drivers/usb/atm/ueagle-atm.c 		       sc->stats.phy.flags);
sc               1370 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_RATE, 0, &data);
sc               1374 drivers/usb/atm/ueagle-atm.c 	uea_set_bulk_timeout(sc, (data >> 16) * 32);
sc               1375 drivers/usb/atm/ueagle-atm.c 	sc->stats.phy.dsrate = (data >> 16) * 32;
sc               1376 drivers/usb/atm/ueagle-atm.c 	sc->stats.phy.usrate = (data & 0xffff) * 32;
sc               1377 drivers/usb/atm/ueagle-atm.c 	UPDATE_ATM_STAT(link_rate, sc->stats.phy.dsrate * 1000 / 424);
sc               1379 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_DIAG, 23, &data);
sc               1382 drivers/usb/atm/ueagle-atm.c 	sc->stats.phy.dsattenuation = (data & 0xff) / 2;
sc               1384 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_DIAG, 47, &data);
sc               1387 drivers/usb/atm/ueagle-atm.c 	sc->stats.phy.usattenuation = (data & 0xff) / 2;
sc               1389 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_DIAG, 25, &sc->stats.phy.dsmargin);
sc               1393 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_DIAG, 49, &sc->stats.phy.usmargin);
sc               1397 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_DIAG, 51, &sc->stats.phy.rxflow);
sc               1401 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_DIAG, 52, &sc->stats.phy.txflow);
sc               1405 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_DIAG, 54, &sc->stats.phy.dsunc);
sc               1410 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_DIAG, 58, &sc->stats.phy.usunc);
sc               1414 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_DIAG, 53, &sc->stats.phy.dscorr);
sc               1419 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_DIAG, 57, &sc->stats.phy.uscorr);
sc               1423 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_INFO, 8, &sc->stats.phy.vidco);
sc               1427 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_INFO, 13, &sc->stats.phy.vidcpe);
sc               1434 drivers/usb/atm/ueagle-atm.c static int uea_stat_e4(struct uea_softc *sc)
sc               1440 drivers/usb/atm/ueagle-atm.c 	uea_enters(INS_TO_USBDEV(sc));
sc               1441 drivers/usb/atm/ueagle-atm.c 	data = sc->stats.phy.state;
sc               1444 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e4(sc, 1, E4_SA_STAT, 0, 0, &sc->stats.phy.state);
sc               1448 drivers/usb/atm/ueagle-atm.c 	switch (sc->stats.phy.state) {
sc               1453 drivers/usb/atm/ueagle-atm.c 		uea_dbg(INS_TO_USBDEV(sc), "modem not yet "
sc               1460 drivers/usb/atm/ueagle-atm.c 		uea_dbg(INS_TO_USBDEV(sc), "modem initializing\n");
sc               1463 drivers/usb/atm/ueagle-atm.c 		uea_info(INS_TO_USBDEV(sc), "modem synchronization "
sc               1469 drivers/usb/atm/ueagle-atm.c 		uea_warn(INS_TO_USBDEV(sc), "unknown state: %x\n",
sc               1470 drivers/usb/atm/ueagle-atm.c 						sc->stats.phy.state);
sc               1475 drivers/usb/atm/ueagle-atm.c 		uea_request(sc, UEA_SET_MODE, UEA_LOOPBACK_OFF, 0, NULL);
sc               1476 drivers/usb/atm/ueagle-atm.c 		uea_info(INS_TO_USBDEV(sc), "modem operational\n");
sc               1481 drivers/usb/atm/ueagle-atm.c 		release_firmware(sc->dsp_firm);
sc               1482 drivers/usb/atm/ueagle-atm.c 		sc->dsp_firm = NULL;
sc               1491 drivers/usb/atm/ueagle-atm.c 	wake_up(&sc->sync_q);
sc               1498 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e4(sc, 1, E4_SA_DIAG, 0, 0, &sc->stats.phy.flags);
sc               1501 drivers/usb/atm/ueagle-atm.c 	sc->stats.phy.mflags |= sc->stats.phy.flags;
sc               1506 drivers/usb/atm/ueagle-atm.c 	if (sc->stats.phy.flags) {
sc               1507 drivers/usb/atm/ueagle-atm.c 		uea_dbg(INS_TO_USBDEV(sc), "Stat flag = 0x%x\n",
sc               1508 drivers/usb/atm/ueagle-atm.c 		       sc->stats.phy.flags);
sc               1509 drivers/usb/atm/ueagle-atm.c 		if (sc->stats.phy.flags & 1) /* delineation LOSS */
sc               1511 drivers/usb/atm/ueagle-atm.c 		if (sc->stats.phy.flags & 0x4000) /* Reset Flag */
sc               1517 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e4(sc, 4, E4_SA_RATE, 0, 0, tmp_arr);
sc               1521 drivers/usb/atm/ueagle-atm.c 	sc->stats.phy.usrate = data / 1000;
sc               1523 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e4(sc, 4, E4_SA_RATE, 1, 0, tmp_arr);
sc               1527 drivers/usb/atm/ueagle-atm.c 	uea_set_bulk_timeout(sc, data / 1000);
sc               1528 drivers/usb/atm/ueagle-atm.c 	sc->stats.phy.dsrate = data / 1000;
sc               1529 drivers/usb/atm/ueagle-atm.c 	UPDATE_ATM_STAT(link_rate, sc->stats.phy.dsrate * 1000 / 424);
sc               1531 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e4(sc, 1, E4_SA_INFO, 68, 1, &data);
sc               1534 drivers/usb/atm/ueagle-atm.c 	sc->stats.phy.dsattenuation = data / 10;
sc               1536 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e4(sc, 1, E4_SA_INFO, 69, 1, &data);
sc               1539 drivers/usb/atm/ueagle-atm.c 	sc->stats.phy.usattenuation = data / 10;
sc               1541 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e4(sc, 1, E4_SA_INFO, 68, 3, &data);
sc               1544 drivers/usb/atm/ueagle-atm.c 	sc->stats.phy.dsmargin = data / 2;
sc               1546 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e4(sc, 1, E4_SA_INFO, 69, 3, &data);
sc               1549 drivers/usb/atm/ueagle-atm.c 	sc->stats.phy.usmargin = data / 10;
sc               1554 drivers/usb/atm/ueagle-atm.c static void cmvs_file_name(struct uea_softc *sc, char *const cmv_name, int ver)
sc               1561 drivers/usb/atm/ueagle-atm.c 	if (cmv_file[sc->modem_index] == NULL) {
sc               1562 drivers/usb/atm/ueagle-atm.c 		if (UEA_CHIP_VERSION(sc) == ADI930)
sc               1564 drivers/usb/atm/ueagle-atm.c 		else if (UEA_CHIP_VERSION(sc) == EAGLE_IV)
sc               1569 drivers/usb/atm/ueagle-atm.c 		file_arr[4] = IS_ISDN(sc) ? 'i' : 'p';
sc               1572 drivers/usb/atm/ueagle-atm.c 		file = cmv_file[sc->modem_index];
sc               1581 drivers/usb/atm/ueagle-atm.c static int request_cmvs_old(struct uea_softc *sc,
sc               1588 drivers/usb/atm/ueagle-atm.c 	cmvs_file_name(sc, cmv_name, 1);
sc               1589 drivers/usb/atm/ueagle-atm.c 	ret = request_firmware(fw, cmv_name, &sc->usb_dev->dev);
sc               1591 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc),
sc               1609 drivers/usb/atm/ueagle-atm.c 	uea_err(INS_TO_USBDEV(sc), "firmware %s is corrupted\n", cmv_name);
sc               1614 drivers/usb/atm/ueagle-atm.c static int request_cmvs(struct uea_softc *sc,
sc               1622 drivers/usb/atm/ueagle-atm.c 	cmvs_file_name(sc, cmv_name, 2);
sc               1623 drivers/usb/atm/ueagle-atm.c 	ret = request_firmware(fw, cmv_name, &sc->usb_dev->dev);
sc               1627 drivers/usb/atm/ueagle-atm.c 			uea_warn(INS_TO_USBDEV(sc), "requesting "
sc               1630 drivers/usb/atm/ueagle-atm.c 			return request_cmvs_old(sc, cmvs, fw);
sc               1632 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc),
sc               1642 drivers/usb/atm/ueagle-atm.c 			uea_warn(INS_TO_USBDEV(sc), "firmware %s is corrupted,"
sc               1645 drivers/usb/atm/ueagle-atm.c 			return request_cmvs_old(sc, cmvs, fw);
sc               1670 drivers/usb/atm/ueagle-atm.c 	uea_err(INS_TO_USBDEV(sc), "firmware %s is corrupted\n", cmv_name);
sc               1675 drivers/usb/atm/ueagle-atm.c static int uea_send_cmvs_e1(struct uea_softc *sc)
sc               1683 drivers/usb/atm/ueagle-atm.c 	ret = uea_write_cmv_e1(sc, E1_SA_CNTL, 0, 1);
sc               1688 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e1(sc, E1_SA_INFO, 10, &sc->stats.phy.firmid);
sc               1691 drivers/usb/atm/ueagle-atm.c 	uea_info(INS_TO_USBDEV(sc), "ATU-R firmware version : %x\n",
sc               1692 drivers/usb/atm/ueagle-atm.c 			sc->stats.phy.firmid);
sc               1695 drivers/usb/atm/ueagle-atm.c 	ret = len = request_cmvs(sc, &cmvs_ptr, &cmvs_fw, &ver);
sc               1703 drivers/usb/atm/ueagle-atm.c 		uea_warn(INS_TO_USBDEV(sc), "use deprecated cmvs version, "
sc               1707 drivers/usb/atm/ueagle-atm.c 			ret = uea_write_cmv_e1(sc,
sc               1718 drivers/usb/atm/ueagle-atm.c 			ret = uea_write_cmv_e1(sc,
sc               1727 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "bad cmvs version %d\n", ver);
sc               1732 drivers/usb/atm/ueagle-atm.c 	ret = uea_write_cmv_e1(sc, E1_SA_CNTL, 0, 2);
sc               1733 drivers/usb/atm/ueagle-atm.c 	uea_vdbg(INS_TO_USBDEV(sc), "Entering in R-ACT-REQ state\n");
sc               1734 drivers/usb/atm/ueagle-atm.c 	uea_info(INS_TO_USBDEV(sc), "modem started, waiting "
sc               1741 drivers/usb/atm/ueagle-atm.c static int uea_send_cmvs_e4(struct uea_softc *sc)
sc               1749 drivers/usb/atm/ueagle-atm.c 	ret = uea_write_cmv_e4(sc, 1, E4_SA_CNTL, 0, 0, 1);
sc               1755 drivers/usb/atm/ueagle-atm.c 	ret = uea_read_cmv_e4(sc, 2, E4_SA_INFO, 55, 0, &sc->stats.phy.firmid);
sc               1758 drivers/usb/atm/ueagle-atm.c 	uea_info(INS_TO_USBDEV(sc), "ATU-R firmware version : %x\n",
sc               1759 drivers/usb/atm/ueagle-atm.c 			sc->stats.phy.firmid);
sc               1763 drivers/usb/atm/ueagle-atm.c 	ret = len = request_cmvs(sc, &cmvs_ptr, &cmvs_fw, &ver);
sc               1772 drivers/usb/atm/ueagle-atm.c 			ret = uea_write_cmv_e4(sc, 1,
sc               1782 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "bad cmvs version %d\n", ver);
sc               1787 drivers/usb/atm/ueagle-atm.c 	ret = uea_write_cmv_e4(sc, 1, E4_SA_CNTL, 0, 0, 2);
sc               1788 drivers/usb/atm/ueagle-atm.c 	uea_vdbg(INS_TO_USBDEV(sc), "Entering in R-ACT-REQ state\n");
sc               1789 drivers/usb/atm/ueagle-atm.c 	uea_info(INS_TO_USBDEV(sc), "modem started, waiting "
sc               1802 drivers/usb/atm/ueagle-atm.c static int uea_start_reset(struct uea_softc *sc)
sc               1807 drivers/usb/atm/ueagle-atm.c 	uea_enters(INS_TO_USBDEV(sc));
sc               1808 drivers/usb/atm/ueagle-atm.c 	uea_info(INS_TO_USBDEV(sc), "(re)booting started\n");
sc               1811 drivers/usb/atm/ueagle-atm.c 	sc->booting = 1;
sc               1816 drivers/usb/atm/ueagle-atm.c 	sc->cmv_ack = 0;
sc               1820 drivers/usb/atm/ueagle-atm.c 	memset(&sc->stats, 0, sizeof(struct uea_stats));
sc               1823 drivers/usb/atm/ueagle-atm.c 	uea_request(sc, UEA_SET_MODE, UEA_LOOPBACK_ON, 0, NULL);
sc               1824 drivers/usb/atm/ueagle-atm.c 	uea_request(sc, UEA_SET_MODE, UEA_BOOT_IDMA, 0, NULL);
sc               1827 drivers/usb/atm/ueagle-atm.c 	uea_request(sc, UEA_SET_MODE, UEA_START_RESET, 0, NULL);
sc               1830 drivers/usb/atm/ueagle-atm.c 	ret = uea_wait(sc, 0, msecs_to_jiffies(100));
sc               1835 drivers/usb/atm/ueagle-atm.c 	uea_request(sc, UEA_SET_MODE, UEA_END_RESET, 0, NULL);
sc               1837 drivers/usb/atm/ueagle-atm.c 	if (UEA_CHIP_VERSION(sc) != EAGLE_IV) {
sc               1839 drivers/usb/atm/ueagle-atm.c 		uea_request(sc, UEA_SET_2183_DATA, UEA_MPTX_MAILBOX, 2, &zero);
sc               1840 drivers/usb/atm/ueagle-atm.c 		uea_request(sc, UEA_SET_2183_DATA, UEA_MPRX_MAILBOX, 2, &zero);
sc               1841 drivers/usb/atm/ueagle-atm.c 		uea_request(sc, UEA_SET_2183_DATA, UEA_SWAP_MAILBOX, 2, &zero);
sc               1844 drivers/usb/atm/ueagle-atm.c 	ret = uea_wait(sc, 0, msecs_to_jiffies(1000));
sc               1848 drivers/usb/atm/ueagle-atm.c 	if (UEA_CHIP_VERSION(sc) == EAGLE_IV)
sc               1849 drivers/usb/atm/ueagle-atm.c 		sc->cmv_dsc.e4.function = E4_MAKEFUNCTION(E4_ADSLDIRECTIVE,
sc               1852 drivers/usb/atm/ueagle-atm.c 		sc->cmv_dsc.e1.function = E1_MAKEFUNCTION(E1_ADSLDIRECTIVE,
sc               1856 drivers/usb/atm/ueagle-atm.c 	sc->booting = 0;
sc               1859 drivers/usb/atm/ueagle-atm.c 	sc->pageno = 0;
sc               1860 drivers/usb/atm/ueagle-atm.c 	sc->ovl = 0;
sc               1861 drivers/usb/atm/ueagle-atm.c 	schedule_work(&sc->task);
sc               1864 drivers/usb/atm/ueagle-atm.c 	ret = wait_cmv_ack(sc);
sc               1868 drivers/usb/atm/ueagle-atm.c 	uea_vdbg(INS_TO_USBDEV(sc), "Ready CMV received\n");
sc               1870 drivers/usb/atm/ueagle-atm.c 	ret = sc->send_cmvs(sc);
sc               1874 drivers/usb/atm/ueagle-atm.c 	sc->reset = 0;
sc               1875 drivers/usb/atm/ueagle-atm.c 	uea_leaves(INS_TO_USBDEV(sc));
sc               1887 drivers/usb/atm/ueagle-atm.c 	struct uea_softc *sc = data;
sc               1891 drivers/usb/atm/ueagle-atm.c 	uea_enters(INS_TO_USBDEV(sc));
sc               1893 drivers/usb/atm/ueagle-atm.c 		if (ret < 0 || sc->reset)
sc               1894 drivers/usb/atm/ueagle-atm.c 			ret = uea_start_reset(sc);
sc               1896 drivers/usb/atm/ueagle-atm.c 			ret = sc->stat(sc);
sc               1898 drivers/usb/atm/ueagle-atm.c 			uea_wait(sc, 0, msecs_to_jiffies(1000));
sc               1901 drivers/usb/atm/ueagle-atm.c 	uea_leaves(INS_TO_USBDEV(sc));
sc               1906 drivers/usb/atm/ueagle-atm.c static int load_XILINX_firmware(struct uea_softc *sc)
sc               1914 drivers/usb/atm/ueagle-atm.c 	uea_enters(INS_TO_USBDEV(sc));
sc               1916 drivers/usb/atm/ueagle-atm.c 	ret = request_firmware(&fw_entry, fw_name, &sc->usb_dev->dev);
sc               1918 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "firmware %s is not available\n",
sc               1926 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "firmware %s is corrupted\n",
sc               1933 drivers/usb/atm/ueagle-atm.c 		ret = uea_request(sc, 0xe, 0, ln, pfw + u);
sc               1935 drivers/usb/atm/ueagle-atm.c 			uea_err(INS_TO_USBDEV(sc),
sc               1942 drivers/usb/atm/ueagle-atm.c 	ret = uea_request(sc, 0xe, 1, 0, NULL);
sc               1944 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc),
sc               1951 drivers/usb/atm/ueagle-atm.c 	ret = uea_send_modem_cmd(sc->usb_dev, 0xe, 1, &value);
sc               1953 drivers/usb/atm/ueagle-atm.c 		uea_err(sc->usb_dev, "elsa de-assert failed with error"
sc               1959 drivers/usb/atm/ueagle-atm.c 	uea_leaves(INS_TO_USBDEV(sc));
sc               1964 drivers/usb/atm/ueagle-atm.c static void uea_dispatch_cmv_e1(struct uea_softc *sc, struct intr_pkt *intr)
sc               1966 drivers/usb/atm/ueagle-atm.c 	struct cmv_dsc_e1 *dsc = &sc->cmv_dsc.e1;
sc               1969 drivers/usb/atm/ueagle-atm.c 	uea_enters(INS_TO_USBDEV(sc));
sc               1980 drivers/usb/atm/ueagle-atm.c 		if (UEA_CHIP_VERSION(sc) == ADI930
sc               1992 drivers/usb/atm/ueagle-atm.c 		wake_up_cmv_ack(sc);
sc               1993 drivers/usb/atm/ueagle-atm.c 		uea_leaves(INS_TO_USBDEV(sc));
sc               2003 drivers/usb/atm/ueagle-atm.c 	sc->data = get_unaligned_le32(&cmv->dwData);
sc               2004 drivers/usb/atm/ueagle-atm.c 	sc->data = sc->data << 16 | sc->data >> 16;
sc               2006 drivers/usb/atm/ueagle-atm.c 	wake_up_cmv_ack(sc);
sc               2007 drivers/usb/atm/ueagle-atm.c 	uea_leaves(INS_TO_USBDEV(sc));
sc               2011 drivers/usb/atm/ueagle-atm.c 	uea_err(INS_TO_USBDEV(sc), "unexpected cmv received, "
sc               2015 drivers/usb/atm/ueagle-atm.c 	uea_leaves(INS_TO_USBDEV(sc));
sc               2019 drivers/usb/atm/ueagle-atm.c 	uea_err(INS_TO_USBDEV(sc), "invalid cmv received, "
sc               2022 drivers/usb/atm/ueagle-atm.c 	uea_leaves(INS_TO_USBDEV(sc));
sc               2026 drivers/usb/atm/ueagle-atm.c static void uea_dispatch_cmv_e4(struct uea_softc *sc, struct intr_pkt *intr)
sc               2028 drivers/usb/atm/ueagle-atm.c 	struct cmv_dsc_e4 *dsc = &sc->cmv_dsc.e4;
sc               2031 drivers/usb/atm/ueagle-atm.c 	uea_enters(INS_TO_USBDEV(sc));
sc               2032 drivers/usb/atm/ueagle-atm.c 	uea_dbg(INS_TO_USBDEV(sc), "cmv %x %x %x %x %x %x\n",
sc               2042 drivers/usb/atm/ueagle-atm.c 		wake_up_cmv_ack(sc);
sc               2043 drivers/usb/atm/ueagle-atm.c 		uea_leaves(INS_TO_USBDEV(sc));
sc               2053 drivers/usb/atm/ueagle-atm.c 	sc->data = be32_to_cpu(cmv->dwData[0]);
sc               2054 drivers/usb/atm/ueagle-atm.c 	sc->data1 = be32_to_cpu(cmv->dwData[1]);
sc               2055 drivers/usb/atm/ueagle-atm.c 	wake_up_cmv_ack(sc);
sc               2056 drivers/usb/atm/ueagle-atm.c 	uea_leaves(INS_TO_USBDEV(sc));
sc               2060 drivers/usb/atm/ueagle-atm.c 	uea_err(INS_TO_USBDEV(sc), "unexpected cmv received, "
sc               2064 drivers/usb/atm/ueagle-atm.c 	uea_leaves(INS_TO_USBDEV(sc));
sc               2068 drivers/usb/atm/ueagle-atm.c static void uea_schedule_load_page_e1(struct uea_softc *sc,
sc               2071 drivers/usb/atm/ueagle-atm.c 	sc->pageno = intr->e1_bSwapPageNo;
sc               2072 drivers/usb/atm/ueagle-atm.c 	sc->ovl = intr->e1_bOvl >> 4 | intr->e1_bOvl << 4;
sc               2073 drivers/usb/atm/ueagle-atm.c 	schedule_work(&sc->task);
sc               2076 drivers/usb/atm/ueagle-atm.c static void uea_schedule_load_page_e4(struct uea_softc *sc,
sc               2079 drivers/usb/atm/ueagle-atm.c 	sc->pageno = intr->e4_bSwapPageNo;
sc               2080 drivers/usb/atm/ueagle-atm.c 	schedule_work(&sc->task);
sc               2088 drivers/usb/atm/ueagle-atm.c 	struct uea_softc *sc = urb->context;
sc               2092 drivers/usb/atm/ueagle-atm.c 	uea_enters(INS_TO_USBDEV(sc));
sc               2095 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "uea_intr() failed with %d\n",
sc               2101 drivers/usb/atm/ueagle-atm.c 	if (intr->bType != 0x08 || sc->booting) {
sc               2102 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "wrong interrupt\n");
sc               2108 drivers/usb/atm/ueagle-atm.c 		sc->schedule_load_page(sc, intr);
sc               2112 drivers/usb/atm/ueagle-atm.c 		sc->dispatch_cmv(sc, intr);
sc               2116 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "unknown interrupt %u\n",
sc               2121 drivers/usb/atm/ueagle-atm.c 	usb_submit_urb(sc->urb_int, GFP_ATOMIC);
sc               2127 drivers/usb/atm/ueagle-atm.c static int uea_boot(struct uea_softc *sc, struct usb_interface *intf)
sc               2133 drivers/usb/atm/ueagle-atm.c 	uea_enters(INS_TO_USBDEV(sc));
sc               2135 drivers/usb/atm/ueagle-atm.c 	if (UEA_CHIP_VERSION(sc) == EAGLE_IV) {
sc               2137 drivers/usb/atm/ueagle-atm.c 		sc->dispatch_cmv = uea_dispatch_cmv_e4;
sc               2138 drivers/usb/atm/ueagle-atm.c 		sc->schedule_load_page = uea_schedule_load_page_e4;
sc               2139 drivers/usb/atm/ueagle-atm.c 		sc->stat = uea_stat_e4;
sc               2140 drivers/usb/atm/ueagle-atm.c 		sc->send_cmvs = uea_send_cmvs_e4;
sc               2141 drivers/usb/atm/ueagle-atm.c 		INIT_WORK(&sc->task, uea_load_page_e4);
sc               2144 drivers/usb/atm/ueagle-atm.c 		sc->dispatch_cmv = uea_dispatch_cmv_e1;
sc               2145 drivers/usb/atm/ueagle-atm.c 		sc->schedule_load_page = uea_schedule_load_page_e1;
sc               2146 drivers/usb/atm/ueagle-atm.c 		sc->stat = uea_stat_e1;
sc               2147 drivers/usb/atm/ueagle-atm.c 		sc->send_cmvs = uea_send_cmvs_e1;
sc               2148 drivers/usb/atm/ueagle-atm.c 		INIT_WORK(&sc->task, uea_load_page_e1);
sc               2151 drivers/usb/atm/ueagle-atm.c 	init_waitqueue_head(&sc->sync_q);
sc               2153 drivers/usb/atm/ueagle-atm.c 	if (UEA_CHIP_VERSION(sc) == ADI930)
sc               2154 drivers/usb/atm/ueagle-atm.c 		load_XILINX_firmware(sc);
sc               2165 drivers/usb/atm/ueagle-atm.c 	sc->urb_int = usb_alloc_urb(0, GFP_KERNEL);
sc               2166 drivers/usb/atm/ueagle-atm.c 	if (!sc->urb_int)
sc               2169 drivers/usb/atm/ueagle-atm.c 	usb_fill_int_urb(sc->urb_int, sc->usb_dev,
sc               2170 drivers/usb/atm/ueagle-atm.c 			 usb_rcvintpipe(sc->usb_dev, UEA_INTR_PIPE),
sc               2171 drivers/usb/atm/ueagle-atm.c 			 intr, size, uea_intr, sc,
sc               2174 drivers/usb/atm/ueagle-atm.c 	ret = usb_submit_urb(sc->urb_int, GFP_KERNEL);
sc               2176 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc),
sc               2184 drivers/usb/atm/ueagle-atm.c 	sc->kthread = kthread_create(uea_kthread, sc, "ueagle-atm");
sc               2185 drivers/usb/atm/ueagle-atm.c 	if (IS_ERR(sc->kthread)) {
sc               2186 drivers/usb/atm/ueagle-atm.c 		uea_err(INS_TO_USBDEV(sc), "failed to create thread\n");
sc               2187 drivers/usb/atm/ueagle-atm.c 		ret = PTR_ERR(sc->kthread);
sc               2191 drivers/usb/atm/ueagle-atm.c 	uea_leaves(INS_TO_USBDEV(sc));
sc               2195 drivers/usb/atm/ueagle-atm.c 	usb_kill_urb(sc->urb_int);
sc               2197 drivers/usb/atm/ueagle-atm.c 	usb_free_urb(sc->urb_int);
sc               2198 drivers/usb/atm/ueagle-atm.c 	sc->urb_int = NULL;
sc               2201 drivers/usb/atm/ueagle-atm.c 	uea_leaves(INS_TO_USBDEV(sc));
sc               2208 drivers/usb/atm/ueagle-atm.c static void uea_stop(struct uea_softc *sc)
sc               2211 drivers/usb/atm/ueagle-atm.c 	uea_enters(INS_TO_USBDEV(sc));
sc               2212 drivers/usb/atm/ueagle-atm.c 	ret = kthread_stop(sc->kthread);
sc               2213 drivers/usb/atm/ueagle-atm.c 	uea_dbg(INS_TO_USBDEV(sc), "kthread finish with status %d\n", ret);
sc               2215 drivers/usb/atm/ueagle-atm.c 	uea_request(sc, UEA_SET_MODE, UEA_LOOPBACK_ON, 0, NULL);
sc               2217 drivers/usb/atm/ueagle-atm.c 	usb_kill_urb(sc->urb_int);
sc               2218 drivers/usb/atm/ueagle-atm.c 	kfree(sc->urb_int->transfer_buffer);
sc               2219 drivers/usb/atm/ueagle-atm.c 	usb_free_urb(sc->urb_int);
sc               2222 drivers/usb/atm/ueagle-atm.c 	flush_work(&sc->task);
sc               2224 drivers/usb/atm/ueagle-atm.c 	release_firmware(sc->dsp_firm);
sc               2225 drivers/usb/atm/ueagle-atm.c 	uea_leaves(INS_TO_USBDEV(sc));
sc               2249 drivers/usb/atm/ueagle-atm.c 	struct uea_softc *sc;
sc               2252 drivers/usb/atm/ueagle-atm.c 	sc = dev_to_uea(dev);
sc               2253 drivers/usb/atm/ueagle-atm.c 	if (!sc)
sc               2255 drivers/usb/atm/ueagle-atm.c 	ret = snprintf(buf, 10, "%08x\n", sc->stats.phy.state);
sc               2265 drivers/usb/atm/ueagle-atm.c 	struct uea_softc *sc;
sc               2268 drivers/usb/atm/ueagle-atm.c 	sc = dev_to_uea(dev);
sc               2269 drivers/usb/atm/ueagle-atm.c 	if (!sc)
sc               2271 drivers/usb/atm/ueagle-atm.c 	sc->reset = 1;
sc               2285 drivers/usb/atm/ueagle-atm.c 	struct uea_softc *sc;
sc               2288 drivers/usb/atm/ueagle-atm.c 	sc = dev_to_uea(dev);
sc               2289 drivers/usb/atm/ueagle-atm.c 	if (!sc)
sc               2292 drivers/usb/atm/ueagle-atm.c 	if (UEA_CHIP_VERSION(sc) == EAGLE_IV) {
sc               2293 drivers/usb/atm/ueagle-atm.c 		switch (sc->stats.phy.state) {
sc               2317 drivers/usb/atm/ueagle-atm.c 		modem_state = GET_STATUS(sc->stats.phy.state);
sc               2347 drivers/usb/atm/ueagle-atm.c 	struct uea_softc *sc;
sc               2351 drivers/usb/atm/ueagle-atm.c 	sc = dev_to_uea(dev);
sc               2352 drivers/usb/atm/ueagle-atm.c 	if (!sc)
sc               2355 drivers/usb/atm/ueagle-atm.c 	if (UEA_CHIP_VERSION(sc) == EAGLE_IV) {
sc               2356 drivers/usb/atm/ueagle-atm.c 		if (sc->stats.phy.flags & 0x4000)
sc               2358 drivers/usb/atm/ueagle-atm.c 		else if (sc->stats.phy.flags & 0x0001)
sc               2361 drivers/usb/atm/ueagle-atm.c 		if (sc->stats.phy.flags & 0x0C00)
sc               2363 drivers/usb/atm/ueagle-atm.c 		else if (sc->stats.phy.flags & 0x0030)
sc               2381 drivers/usb/atm/ueagle-atm.c 	struct uea_softc *sc;					\
sc               2384 drivers/usb/atm/ueagle-atm.c 	sc = dev_to_uea(dev);					\
sc               2385 drivers/usb/atm/ueagle-atm.c 	if (!sc)						\
sc               2387 drivers/usb/atm/ueagle-atm.c 	ret = snprintf(buf, 10, "%08x\n", sc->stats.phy.name);	\
sc               2389 drivers/usb/atm/ueagle-atm.c 		sc->stats.phy.name = 0;				\
sc               2415 drivers/usb/atm/ueagle-atm.c static int uea_getesi(struct uea_softc *sc, u_char *esi)
sc               2420 drivers/usb/atm/ueagle-atm.c 	    (sc->usb_dev, sc->usb_dev->descriptor.iSerialNumber, mac_str,
sc               2434 drivers/usb/atm/ueagle-atm.c 	struct uea_softc *sc = usbatm->driver_data;
sc               2436 drivers/usb/atm/ueagle-atm.c 	return uea_getesi(sc, atm_dev->esi);
sc               2441 drivers/usb/atm/ueagle-atm.c 	struct uea_softc *sc = usbatm->driver_data;
sc               2443 drivers/usb/atm/ueagle-atm.c 	wait_event_interruptible(sc->sync_q, IS_OPERATIONAL(sc));
sc               2494 drivers/usb/atm/ueagle-atm.c 	struct uea_softc *sc;
sc               2519 drivers/usb/atm/ueagle-atm.c 	sc = kzalloc(sizeof(struct uea_softc), GFP_KERNEL);
sc               2520 drivers/usb/atm/ueagle-atm.c 	if (!sc)
sc               2523 drivers/usb/atm/ueagle-atm.c 	sc->usb_dev = usb;
sc               2524 drivers/usb/atm/ueagle-atm.c 	usbatm->driver_data = sc;
sc               2525 drivers/usb/atm/ueagle-atm.c 	sc->usbatm = usbatm;
sc               2526 drivers/usb/atm/ueagle-atm.c 	sc->modem_index = (modem_index < NB_MODEM) ? modem_index++ : 0;
sc               2527 drivers/usb/atm/ueagle-atm.c 	sc->driver_info = id->driver_info;
sc               2530 drivers/usb/atm/ueagle-atm.c 	if (annex[sc->modem_index] == 1)
sc               2531 drivers/usb/atm/ueagle-atm.c 		sc->annex = ANNEXA;
sc               2532 drivers/usb/atm/ueagle-atm.c 	else if (annex[sc->modem_index] == 2)
sc               2533 drivers/usb/atm/ueagle-atm.c 		sc->annex = ANNEXB;
sc               2535 drivers/usb/atm/ueagle-atm.c 	else if (sc->driver_info & AUTO_ANNEX_A)
sc               2536 drivers/usb/atm/ueagle-atm.c 		sc->annex = ANNEXA;
sc               2537 drivers/usb/atm/ueagle-atm.c 	else if (sc->driver_info & AUTO_ANNEX_B)
sc               2538 drivers/usb/atm/ueagle-atm.c 		sc->annex = ANNEXB;
sc               2540 drivers/usb/atm/ueagle-atm.c 		sc->annex = (le16_to_cpu
sc               2541 drivers/usb/atm/ueagle-atm.c 		(sc->usb_dev->descriptor.bcdDevice) & 0x80) ? ANNEXB : ANNEXA;
sc               2543 drivers/usb/atm/ueagle-atm.c 	alt = altsetting[sc->modem_index];
sc               2557 drivers/usb/atm/ueagle-atm.c 	ret = uea_boot(sc, intf);
sc               2564 drivers/usb/atm/ueagle-atm.c 	kfree(sc);
sc               2570 drivers/usb/atm/ueagle-atm.c 	struct uea_softc *sc = usbatm->driver_data;
sc               2572 drivers/usb/atm/ueagle-atm.c 	uea_stop(sc);
sc               2573 drivers/usb/atm/ueagle-atm.c 	kfree(sc);
sc               2607 drivers/usb/atm/ueagle-atm.c 		struct uea_softc *sc = usbatm->driver_data;
sc               2613 drivers/usb/atm/ueagle-atm.c 		wake_up_process(sc->kthread);
sc                113 drivers/usb/storage/usb.c #define UNUSUAL_VENDOR_INTF(idVendor, cl, sc, pr, \
sc                 30 drivers/usb/storage/usual-tables.c #define UNUSUAL_VENDOR_INTF(id_vendor, cl, sc, pr, \
sc                 38 drivers/usb/storage/usual-tables.c 	.bInterfaceSubClass = (sc), \
sc                203 drivers/video/fbdev/mb862xx/mb862xxfbdrv.c 	unsigned long reg, sc;
sc                218 drivers/video/fbdev/mb862xx/mb862xxfbdrv.c 	sc = par->refclk / (1000000 / fbi->var.pixclock) - 1;
sc                221 drivers/video/fbdev/mb862xx/mb862xxfbdrv.c 	reg |= sc << 8;
sc                223 drivers/video/fbdev/mb862xx/mb862xxfbdrv.c 	dev_dbg(par->dev, "SC 0x%lx\n", sc);
sc                444 drivers/video/fbdev/mb862xx/mb862xxfbdrv.c 		unsigned long sc = ((reg & GC_DCM01_SC) >> 8) + 1;
sc                448 drivers/video/fbdev/mb862xx/mb862xxfbdrv.c 		fbi->var.pixclock = (sc * 1000000) / par->refclk;
sc                416 drivers/video/fbdev/omap2/omapfb/dss/hdmi5_core.c 	unsigned itc, ec, q, sc;
sc                439 drivers/video/fbdev/omap2/omapfb/dss/hdmi5_core.c 	sc = (ptr[2] >> 0) & 0x3;
sc                454 drivers/video/fbdev/omap2/omapfb/dss/hdmi5_core.c 		(itc << 7) | (ec << 4) | (q << 2) | (sc << 0));
sc                817 drivers/virtio/virtio_balloon.c 						  struct shrink_control *sc)
sc                823 drivers/virtio/virtio_balloon.c 	pages_to_free = sc->nr_to_scan;
sc                837 drivers/virtio/virtio_balloon.c 						   struct shrink_control *sc)
sc                141 fs/btrfs/backref.c static inline int extent_is_shared(struct share_check *sc)
sc                143 fs/btrfs/backref.c 	return (sc && sc->share_count > 1) ? BACKREF_FOUND_SHARED : 0;
sc                206 fs/btrfs/backref.c static void update_share_count(struct share_check *sc, int oldcount,
sc                209 fs/btrfs/backref.c 	if ((!sc) || (oldcount == 0 && newcount < 1))
sc                213 fs/btrfs/backref.c 		sc->share_count--;
sc                215 fs/btrfs/backref.c 		sc->share_count++;
sc                226 fs/btrfs/backref.c 			      struct share_check *sc)
sc                265 fs/btrfs/backref.c 			update_share_count(sc, ref->count,
sc                273 fs/btrfs/backref.c 	update_share_count(sc, 0, newref->count);
sc                338 fs/btrfs/backref.c 			  struct share_check *sc, gfp_t gfp_mask)
sc                383 fs/btrfs/backref.c 	prelim_ref_insert(fs_info, preftree, ref, sc);
sc                384 fs/btrfs/backref.c 	return extent_is_shared(sc);
sc                391 fs/btrfs/backref.c 			  struct share_check *sc, gfp_t gfp_mask)
sc                394 fs/btrfs/backref.c 			      parent, wanted_disk_byte, count, sc, gfp_mask);
sc                402 fs/btrfs/backref.c 			    struct share_check *sc, gfp_t gfp_mask)
sc                409 fs/btrfs/backref.c 			      wanted_disk_byte, count, sc, gfp_mask);
sc                613 fs/btrfs/backref.c 				 struct share_check *sc, bool ignore_offset)
sc                650 fs/btrfs/backref.c 		if (sc && sc->root_objectid &&
sc                651 fs/btrfs/backref.c 		    ref->root_id != sc->root_objectid) {
sc                762 fs/btrfs/backref.c 			    struct share_check *sc)
sc                805 fs/btrfs/backref.c 					       node->bytenr, count, sc,
sc                817 fs/btrfs/backref.c 					     sc, GFP_ATOMIC);
sc                833 fs/btrfs/backref.c 			if (sc && sc->inum && ref->objectid != sc->inum) {
sc                839 fs/btrfs/backref.c 					       &key, 0, node->bytenr, count, sc,
sc                850 fs/btrfs/backref.c 					     node->bytenr, count, sc,
sc                865 fs/btrfs/backref.c 		ret = extent_is_shared(sc);
sc                879 fs/btrfs/backref.c 			   u64 *total_refs, struct share_check *sc)
sc                950 fs/btrfs/backref.c 					     bytenr, count, sc, GFP_NOFS);
sc                970 fs/btrfs/backref.c 			if (sc && sc->inum && key.objectid != sc->inum) {
sc                979 fs/btrfs/backref.c 					       sc, GFP_NOFS);
sc               1001 fs/btrfs/backref.c 			  struct share_check *sc)
sc               1046 fs/btrfs/backref.c 					     sc, GFP_NOFS);
sc               1069 fs/btrfs/backref.c 			if (sc && sc->inum && key.objectid != sc->inum) {
sc               1077 fs/btrfs/backref.c 					       sc, GFP_NOFS);
sc               1117 fs/btrfs/backref.c 			     struct share_check *sc, bool ignore_offset)
sc               1198 fs/btrfs/backref.c 					       &preftrees, &total_refs, sc);
sc               1220 fs/btrfs/backref.c 					      &total_refs, sc);
sc               1224 fs/btrfs/backref.c 					     &preftrees, sc);
sc               1239 fs/btrfs/backref.c 				    extent_item_pos, total_refs, sc, ignore_offset);
sc               1267 fs/btrfs/backref.c 			if (sc && sc->root_objectid &&
sc               1268 fs/btrfs/backref.c 			    ref->root_id != sc->root_objectid) {
sc               1194 fs/dcache.c    long prune_dcache_sb(struct super_block *sb, struct shrink_control *sc)
sc               1199 fs/dcache.c    	freed = list_lru_shrink_walk(&sb->s_dentry_lru, sc,
sc                257 fs/erofs/utils.c 					struct shrink_control *sc)
sc                263 fs/erofs/utils.c 				       struct shrink_control *sc)
sc                268 fs/erofs/utils.c 	unsigned long nr = sc->nr_to_scan;
sc               1534 fs/ext4/extents_status.c 				   struct shrink_control *sc)
sc               1541 fs/ext4/extents_status.c 	trace_ext4_es_shrink_count(sbi->s_sb, sc->nr_to_scan, nr);
sc               1546 fs/ext4/extents_status.c 				  struct shrink_control *sc)
sc               1550 fs/ext4/extents_status.c 	int nr_to_scan = sc->nr_to_scan;
sc               3522 fs/f2fs/f2fs.h 			struct shrink_control *sc);
sc               3524 fs/f2fs/f2fs.h 			struct shrink_control *sc);
sc                 40 fs/f2fs/shrinker.c 				struct shrink_control *sc)
sc                 76 fs/f2fs/shrinker.c 				struct shrink_control *sc)
sc                 78 fs/f2fs/shrinker.c 	unsigned long nr = sc->nr_to_scan;
sc               1615 fs/gfs2/glock.c 					    struct shrink_control *sc)
sc               1617 fs/gfs2/glock.c 	if (!(sc->gfp_mask & __GFP_FS))
sc               1619 fs/gfs2/glock.c 	return gfs2_scan_glock_lru(sc->nr_to_scan);
sc               1623 fs/gfs2/glock.c 					     struct shrink_control *sc)
sc                164 fs/gfs2/quota.c 					 struct shrink_control *sc)
sc                169 fs/gfs2/quota.c 	if (!(sc->gfp_mask & __GFP_FS))
sc                172 fs/gfs2/quota.c 	freed = list_lru_shrink_walk(&gfs2_qd_lru, sc,
sc                181 fs/gfs2/quota.c 					  struct shrink_control *sc)
sc                183 fs/gfs2/quota.c 	return vfs_pressure_ratio(list_lru_shrink_count(&gfs2_qd_lru, sc));
sc                212 fs/gfs2/super.c void gfs2_statfs_change_in(struct gfs2_statfs_change_host *sc, const void *buf)
sc                216 fs/gfs2/super.c 	sc->sc_total = be64_to_cpu(str->sc_total);
sc                217 fs/gfs2/super.c 	sc->sc_free = be64_to_cpu(str->sc_free);
sc                218 fs/gfs2/super.c 	sc->sc_dinodes = be64_to_cpu(str->sc_dinodes);
sc                221 fs/gfs2/super.c static void gfs2_statfs_change_out(const struct gfs2_statfs_change_host *sc, void *buf)
sc                225 fs/gfs2/super.c 	str->sc_total = cpu_to_be64(sc->sc_total);
sc                226 fs/gfs2/super.c 	str->sc_free = cpu_to_be64(sc->sc_free);
sc                227 fs/gfs2/super.c 	str->sc_dinodes = cpu_to_be64(sc->sc_dinodes);
sc                819 fs/gfs2/super.c 			    struct gfs2_statfs_change_host *sc)
sc                822 fs/gfs2/super.c 	sc->sc_total += rgd->rd_data;
sc                823 fs/gfs2/super.c 	sc->sc_free += rgd->rd_free;
sc                824 fs/gfs2/super.c 	sc->sc_dinodes += rgd->rd_dinodes;
sc                841 fs/gfs2/super.c static int gfs2_statfs_slow(struct gfs2_sbd *sdp, struct gfs2_statfs_change_host *sc)
sc                850 fs/gfs2/super.c 	memset(sc, 0, sizeof(struct gfs2_statfs_change_host));
sc                875 fs/gfs2/super.c 						error = statfs_slow_fill(rgd, sc);
sc                914 fs/gfs2/super.c static int gfs2_statfs_i(struct gfs2_sbd *sdp, struct gfs2_statfs_change_host *sc)
sc                921 fs/gfs2/super.c 	*sc = *m_sc;
sc                922 fs/gfs2/super.c 	sc->sc_total += l_sc->sc_total;
sc                923 fs/gfs2/super.c 	sc->sc_free += l_sc->sc_free;
sc                924 fs/gfs2/super.c 	sc->sc_dinodes += l_sc->sc_dinodes;
sc                928 fs/gfs2/super.c 	if (sc->sc_free < 0)
sc                929 fs/gfs2/super.c 		sc->sc_free = 0;
sc                930 fs/gfs2/super.c 	if (sc->sc_free > sc->sc_total)
sc                931 fs/gfs2/super.c 		sc->sc_free = sc->sc_total;
sc                932 fs/gfs2/super.c 	if (sc->sc_dinodes < 0)
sc                933 fs/gfs2/super.c 		sc->sc_dinodes = 0;
sc                950 fs/gfs2/super.c 	struct gfs2_statfs_change_host sc;
sc                958 fs/gfs2/super.c 		error = gfs2_statfs_slow(sdp, &sc);
sc                960 fs/gfs2/super.c 		error = gfs2_statfs_i(sdp, &sc);
sc                967 fs/gfs2/super.c 	buf->f_blocks = sc.sc_total;
sc                968 fs/gfs2/super.c 	buf->f_bfree = sc.sc_free;
sc                969 fs/gfs2/super.c 	buf->f_bavail = sc.sc_free;
sc                970 fs/gfs2/super.c 	buf->f_files = sc.sc_dinodes + sc.sc_free;
sc                971 fs/gfs2/super.c 	buf->f_ffree = sc.sc_free;
sc                 39 fs/gfs2/super.h extern void gfs2_statfs_change_in(struct gfs2_statfs_change_host *sc,
sc                798 fs/inode.c     long prune_icache_sb(struct super_block *sb, struct shrink_control *sc)
sc                803 fs/inode.c     	freed = list_lru_shrink_walk(&sb->s_inode_lru, sc,
sc                139 fs/internal.h  extern long prune_icache_sb(struct super_block *sb, struct shrink_control *sc);
sc                156 fs/internal.h  extern long prune_dcache_sb(struct super_block *sb, struct shrink_control *sc);
sc                271 fs/mbcache.c   				    struct shrink_control *sc)
sc                321 fs/mbcache.c   				   struct shrink_control *sc)
sc                325 fs/mbcache.c   	return mb_cache_shrink(cache, sc->nr_to_scan);
sc               2232 fs/nfs/dir.c   nfs_access_cache_scan(struct shrinker *shrink, struct shrink_control *sc)
sc               2234 fs/nfs/dir.c   	int nr_to_scan = sc->nr_to_scan;
sc               2235 fs/nfs/dir.c   	gfp_t gfp_mask = sc->gfp_mask;
sc               2244 fs/nfs/dir.c   nfs_access_cache_count(struct shrinker *shrink, struct shrink_control *sc)
sc                350 fs/nfs/internal.h 					    struct shrink_control *sc);
sc                352 fs/nfs/internal.h 					   struct shrink_control *sc);
sc               6060 fs/nfs/nfs4proc.c 	struct nfs4_setclientid *sc = calldata;
sc               6063 fs/nfs/nfs4proc.c 		sc->sc_cred = get_rpccred(task->tk_rqstp->rq_cred);
sc                467 fs/nfs/nfs4state.c nfs4_init_seqid_counter(struct nfs_seqid_counter *sc)
sc                469 fs/nfs/nfs4state.c 	sc->create_time = ktime_get();
sc                470 fs/nfs/nfs4state.c 	sc->flags = 0;
sc                471 fs/nfs/nfs4state.c 	sc->counter = 0;
sc                472 fs/nfs/nfs4state.c 	spin_lock_init(&sc->lock);
sc                473 fs/nfs/nfs4state.c 	INIT_LIST_HEAD(&sc->list);
sc                474 fs/nfs/nfs4state.c 	rpc_init_wait_queue(&sc->wait, "Seqid_waitqueue");
sc                478 fs/nfs/nfs4state.c nfs4_destroy_seqid_counter(struct nfs_seqid_counter *sc)
sc                480 fs/nfs/nfs4state.c 	rpc_destroy_wait_queue(&sc->wait);
sc               2737 fs/nfs/nfs4xdr.c 	const struct nfs4_setclientid *sc = data;
sc               2743 fs/nfs/nfs4xdr.c 	encode_setclientid(xdr, sc, &hdr);
sc                417 fs/nfsd/filecache.c nfsd_file_lru_count(struct shrinker *s, struct shrink_control *sc)
sc                423 fs/nfsd/filecache.c nfsd_file_lru_scan(struct shrinker *s, struct shrink_control *sc)
sc                428 fs/nfsd/filecache.c 	ret = list_lru_shrink_walk(&nfsd_file_lru, sc, nfsd_file_lru_cb, &head);
sc                 41 fs/nfsd/nfscache.c 					    struct shrink_control *sc);
sc                 43 fs/nfsd/nfscache.c 					   struct shrink_control *sc);
sc                272 fs/nfsd/nfscache.c nfsd_reply_cache_count(struct shrinker *shrink, struct shrink_control *sc)
sc                281 fs/nfsd/nfscache.c nfsd_reply_cache_scan(struct shrinker *shrink, struct shrink_control *sc)
sc               1884 fs/ntfs/file.c 			size_t sc;
sc               1894 fs/ntfs/file.c 			sc = iov_iter_single_seg_count(i);
sc               1895 fs/ntfs/file.c 			if (bytes > sc)
sc               1896 fs/ntfs/file.c 				bytes = sc;
sc                194 fs/ocfs2/cluster/netdebug.c void o2net_debug_add_sc(struct o2net_sock_container *sc)
sc                197 fs/ocfs2/cluster/netdebug.c 	list_add(&sc->sc_net_debug_item, &sock_containers);
sc                201 fs/ocfs2/cluster/netdebug.c void o2net_debug_del_sc(struct o2net_sock_container *sc)
sc                204 fs/ocfs2/cluster/netdebug.c 	list_del_init(&sc->sc_net_debug_item);
sc                216 fs/ocfs2/cluster/netdebug.c 	struct o2net_sock_container *sc, *ret = NULL;
sc                220 fs/ocfs2/cluster/netdebug.c 	list_for_each_entry(sc, &sc_start->sc_net_debug_item,
sc                223 fs/ocfs2/cluster/netdebug.c 		if (&sc->sc_net_debug_item == &sock_containers)
sc                227 fs/ocfs2/cluster/netdebug.c 		if (sc->sc_page != NULL) {
sc                228 fs/ocfs2/cluster/netdebug.c 			ret = sc;
sc                239 fs/ocfs2/cluster/netdebug.c 	struct o2net_sock_container *sc, *dummy_sc = sd->dbg_sock;
sc                242 fs/ocfs2/cluster/netdebug.c 	sc = next_sc(dummy_sc);
sc                245 fs/ocfs2/cluster/netdebug.c 	return sc;
sc                251 fs/ocfs2/cluster/netdebug.c 	struct o2net_sock_container *sc, *dummy_sc = sd->dbg_sock;
sc                254 fs/ocfs2/cluster/netdebug.c 	sc = next_sc(dummy_sc);
sc                256 fs/ocfs2/cluster/netdebug.c 	if (sc)
sc                257 fs/ocfs2/cluster/netdebug.c 		list_add(&dummy_sc->sc_net_debug_item, &sc->sc_net_debug_item);
sc                260 fs/ocfs2/cluster/netdebug.c 	return sc; /* unused, just needs to be null when done */
sc                282 fs/ocfs2/cluster/netdebug.c 			       struct o2net_sock_container *sc)
sc                284 fs/ocfs2/cluster/netdebug.c 	if (!sc)
sc                288 fs/ocfs2/cluster/netdebug.c 		   sc->sc_node->nd_num, (unsigned long)sc_send_count(sc),
sc                289 fs/ocfs2/cluster/netdebug.c 		   (long long)sc_tv_acquiry_total_ns(sc),
sc                290 fs/ocfs2/cluster/netdebug.c 		   (long long)sc_tv_send_total_ns(sc),
sc                291 fs/ocfs2/cluster/netdebug.c 		   (long long)sc_tv_status_total_ns(sc),
sc                292 fs/ocfs2/cluster/netdebug.c 		   (unsigned long)sc_recv_count(sc),
sc                293 fs/ocfs2/cluster/netdebug.c 		   (long long)sc_tv_process_total_ns(sc));
sc                297 fs/ocfs2/cluster/netdebug.c 				   struct o2net_sock_container *sc)
sc                303 fs/ocfs2/cluster/netdebug.c 	if (!sc)
sc                306 fs/ocfs2/cluster/netdebug.c 	if (sc->sc_sock) {
sc                307 fs/ocfs2/cluster/netdebug.c 		inet = inet_sk(sc->sc_sock->sk);
sc                332 fs/ocfs2/cluster/netdebug.c 		   sc,
sc                333 fs/ocfs2/cluster/netdebug.c 		   kref_read(&sc->sc_kref),
sc                336 fs/ocfs2/cluster/netdebug.c 		   sc->sc_node->nd_name,
sc                337 fs/ocfs2/cluster/netdebug.c 		   sc->sc_page_off,
sc                338 fs/ocfs2/cluster/netdebug.c 		   sc->sc_handshake_ok,
sc                339 fs/ocfs2/cluster/netdebug.c 		   (long long)ktime_to_us(sc->sc_tv_timer),
sc                340 fs/ocfs2/cluster/netdebug.c 		   (long long)ktime_to_us(sc->sc_tv_data_ready),
sc                341 fs/ocfs2/cluster/netdebug.c 		   (long long)ktime_to_us(sc->sc_tv_advance_start),
sc                342 fs/ocfs2/cluster/netdebug.c 		   (long long)ktime_to_us(sc->sc_tv_advance_stop),
sc                343 fs/ocfs2/cluster/netdebug.c 		   (long long)ktime_to_us(sc->sc_tv_func_start),
sc                344 fs/ocfs2/cluster/netdebug.c 		   (long long)ktime_to_us(sc->sc_tv_func_stop),
sc                345 fs/ocfs2/cluster/netdebug.c 		   sc->sc_msg_key,
sc                346 fs/ocfs2/cluster/netdebug.c 		   sc->sc_msg_type);
sc                352 fs/ocfs2/cluster/netdebug.c 	struct o2net_sock_container *sc, *dummy_sc = sd->dbg_sock;
sc                355 fs/ocfs2/cluster/netdebug.c 	sc = next_sc(dummy_sc);
sc                357 fs/ocfs2/cluster/netdebug.c 	if (sc) {
sc                359 fs/ocfs2/cluster/netdebug.c 			sc_show_sock_container(seq, sc);
sc                361 fs/ocfs2/cluster/netdebug.c 			sc_show_sock_stats(seq, sc);
sc                 64 fs/ocfs2/cluster/tcp.c #define SC_NODEF_ARGS(sc) sc->sc_node->nd_name, sc->sc_node->nd_num,	\
sc                 65 fs/ocfs2/cluster/tcp.c 			  &sc->sc_node->nd_ipv4_address,		\
sc                 66 fs/ocfs2/cluster/tcp.c 			  ntohs(sc->sc_node->nd_ipv4_port)
sc                 83 fs/ocfs2/cluster/tcp.c #define sclog(sc, fmt, args...) do {					\
sc                 84 fs/ocfs2/cluster/tcp.c 	typeof(sc) __sc = (sc);						\
sc                130 fs/ocfs2/cluster/tcp.c static void o2net_sc_postpone_idle(struct o2net_sock_container *sc);
sc                131 fs/ocfs2/cluster/tcp.c static void o2net_sc_reset_idle_timer(struct o2net_sock_container *sc);
sc                160 fs/ocfs2/cluster/tcp.c 						struct o2net_sock_container *sc)
sc                162 fs/ocfs2/cluster/tcp.c 	nst->st_sc = sc;
sc                171 fs/ocfs2/cluster/tcp.c static inline void o2net_set_sock_timer(struct o2net_sock_container *sc)
sc                173 fs/ocfs2/cluster/tcp.c 	sc->sc_tv_timer = ktime_get();
sc                176 fs/ocfs2/cluster/tcp.c static inline void o2net_set_data_ready_time(struct o2net_sock_container *sc)
sc                178 fs/ocfs2/cluster/tcp.c 	sc->sc_tv_data_ready = ktime_get();
sc                181 fs/ocfs2/cluster/tcp.c static inline void o2net_set_advance_start_time(struct o2net_sock_container *sc)
sc                183 fs/ocfs2/cluster/tcp.c 	sc->sc_tv_advance_start = ktime_get();
sc                186 fs/ocfs2/cluster/tcp.c static inline void o2net_set_advance_stop_time(struct o2net_sock_container *sc)
sc                188 fs/ocfs2/cluster/tcp.c 	sc->sc_tv_advance_stop = ktime_get();
sc                191 fs/ocfs2/cluster/tcp.c static inline void o2net_set_func_start_time(struct o2net_sock_container *sc)
sc                193 fs/ocfs2/cluster/tcp.c 	sc->sc_tv_func_start = ktime_get();
sc                196 fs/ocfs2/cluster/tcp.c static inline void o2net_set_func_stop_time(struct o2net_sock_container *sc)
sc                198 fs/ocfs2/cluster/tcp.c 	sc->sc_tv_func_stop = ktime_get();
sc                217 fs/ocfs2/cluster/tcp.c static ktime_t o2net_get_func_run_time(struct o2net_sock_container *sc)
sc                219 fs/ocfs2/cluster/tcp.c 	return ktime_sub(sc->sc_tv_func_stop, sc->sc_tv_func_start);
sc                223 fs/ocfs2/cluster/tcp.c 				    struct o2net_sock_container *sc)
sc                225 fs/ocfs2/cluster/tcp.c 	sc->sc_tv_status_total = ktime_add(sc->sc_tv_status_total,
sc                228 fs/ocfs2/cluster/tcp.c 	sc->sc_tv_send_total = ktime_add(sc->sc_tv_send_total,
sc                231 fs/ocfs2/cluster/tcp.c 	sc->sc_tv_acquiry_total = ktime_add(sc->sc_tv_acquiry_total,
sc                234 fs/ocfs2/cluster/tcp.c 	sc->sc_send_count++;
sc                237 fs/ocfs2/cluster/tcp.c static void o2net_update_recv_stats(struct o2net_sock_container *sc)
sc                239 fs/ocfs2/cluster/tcp.c 	sc->sc_tv_process_total = ktime_add(sc->sc_tv_process_total,
sc                240 fs/ocfs2/cluster/tcp.c 					    o2net_get_func_run_time(sc));
sc                241 fs/ocfs2/cluster/tcp.c 	sc->sc_recv_count++;
sc                248 fs/ocfs2/cluster/tcp.c # define o2net_update_recv_stats(sc)
sc                380 fs/ocfs2/cluster/tcp.c 	struct o2net_sock_container *sc = container_of(kref,
sc                382 fs/ocfs2/cluster/tcp.c 	BUG_ON(timer_pending(&sc->sc_idle_timeout));
sc                384 fs/ocfs2/cluster/tcp.c 	sclog(sc, "releasing\n");
sc                386 fs/ocfs2/cluster/tcp.c 	if (sc->sc_sock) {
sc                387 fs/ocfs2/cluster/tcp.c 		sock_release(sc->sc_sock);
sc                388 fs/ocfs2/cluster/tcp.c 		sc->sc_sock = NULL;
sc                391 fs/ocfs2/cluster/tcp.c 	o2nm_undepend_item(&sc->sc_node->nd_item);
sc                392 fs/ocfs2/cluster/tcp.c 	o2nm_node_put(sc->sc_node);
sc                393 fs/ocfs2/cluster/tcp.c 	sc->sc_node = NULL;
sc                395 fs/ocfs2/cluster/tcp.c 	o2net_debug_del_sc(sc);
sc                397 fs/ocfs2/cluster/tcp.c 	if (sc->sc_page)
sc                398 fs/ocfs2/cluster/tcp.c 		__free_page(sc->sc_page);
sc                399 fs/ocfs2/cluster/tcp.c 	kfree(sc);
sc                402 fs/ocfs2/cluster/tcp.c static void sc_put(struct o2net_sock_container *sc)
sc                404 fs/ocfs2/cluster/tcp.c 	sclog(sc, "put\n");
sc                405 fs/ocfs2/cluster/tcp.c 	kref_put(&sc->sc_kref, sc_kref_release);
sc                407 fs/ocfs2/cluster/tcp.c static void sc_get(struct o2net_sock_container *sc)
sc                409 fs/ocfs2/cluster/tcp.c 	sclog(sc, "get\n");
sc                410 fs/ocfs2/cluster/tcp.c 	kref_get(&sc->sc_kref);
sc                414 fs/ocfs2/cluster/tcp.c 	struct o2net_sock_container *sc, *ret = NULL;
sc                419 fs/ocfs2/cluster/tcp.c 	sc = kzalloc(sizeof(*sc), GFP_NOFS);
sc                420 fs/ocfs2/cluster/tcp.c 	if (sc == NULL || page == NULL)
sc                423 fs/ocfs2/cluster/tcp.c 	kref_init(&sc->sc_kref);
sc                425 fs/ocfs2/cluster/tcp.c 	sc->sc_node = node;
sc                434 fs/ocfs2/cluster/tcp.c 	INIT_WORK(&sc->sc_connect_work, o2net_sc_connect_completed);
sc                435 fs/ocfs2/cluster/tcp.c 	INIT_WORK(&sc->sc_rx_work, o2net_rx_until_empty);
sc                436 fs/ocfs2/cluster/tcp.c 	INIT_WORK(&sc->sc_shutdown_work, o2net_shutdown_sc);
sc                437 fs/ocfs2/cluster/tcp.c 	INIT_DELAYED_WORK(&sc->sc_keepalive_work, o2net_sc_send_keep_req);
sc                439 fs/ocfs2/cluster/tcp.c 	timer_setup(&sc->sc_idle_timeout, o2net_idle_timer, 0);
sc                441 fs/ocfs2/cluster/tcp.c 	sclog(sc, "alloced\n");
sc                443 fs/ocfs2/cluster/tcp.c 	ret = sc;
sc                444 fs/ocfs2/cluster/tcp.c 	sc->sc_page = page;
sc                445 fs/ocfs2/cluster/tcp.c 	o2net_debug_add_sc(sc);
sc                446 fs/ocfs2/cluster/tcp.c 	sc = NULL;
sc                452 fs/ocfs2/cluster/tcp.c 	kfree(sc);
sc                459 fs/ocfs2/cluster/tcp.c static void o2net_sc_queue_work(struct o2net_sock_container *sc,
sc                462 fs/ocfs2/cluster/tcp.c 	sc_get(sc);
sc                464 fs/ocfs2/cluster/tcp.c 		sc_put(sc);
sc                466 fs/ocfs2/cluster/tcp.c static void o2net_sc_queue_delayed_work(struct o2net_sock_container *sc,
sc                470 fs/ocfs2/cluster/tcp.c 	sc_get(sc);
sc                472 fs/ocfs2/cluster/tcp.c 		sc_put(sc);
sc                474 fs/ocfs2/cluster/tcp.c static void o2net_sc_cancel_delayed_work(struct o2net_sock_container *sc,
sc                478 fs/ocfs2/cluster/tcp.c 		sc_put(sc);
sc                489 fs/ocfs2/cluster/tcp.c 			       struct o2net_sock_container *sc,
sc                498 fs/ocfs2/cluster/tcp.c 	if (old_sc && !sc)
sc                500 fs/ocfs2/cluster/tcp.c 	else if (!old_sc && sc)
sc                505 fs/ocfs2/cluster/tcp.c 	BUG_ON(sc && nn->nn_sc && nn->nn_sc != sc);
sc                507 fs/ocfs2/cluster/tcp.c 	mlog_bug_on_msg(valid && !sc, "valid %u sc %p\n", valid, sc);
sc                513 fs/ocfs2/cluster/tcp.c 	     o2net_num_from_nn(nn), nn->nn_sc, sc, nn->nn_sc_valid, valid,
sc                516 fs/ocfs2/cluster/tcp.c 	nn->nn_sc = sc;
sc                541 fs/ocfs2/cluster/tcp.c 		       o2nm_this_node() > sc->sc_node->nd_num ?
sc                543 fs/ocfs2/cluster/tcp.c 		       SC_NODEF_ARGS(sc));
sc                576 fs/ocfs2/cluster/tcp.c 	if ((old_sc == NULL) && sc)
sc                577 fs/ocfs2/cluster/tcp.c 		sc_get(sc);
sc                578 fs/ocfs2/cluster/tcp.c 	if (old_sc && (old_sc != sc)) {
sc                588 fs/ocfs2/cluster/tcp.c 	struct o2net_sock_container *sc;
sc                591 fs/ocfs2/cluster/tcp.c 	sc = sk->sk_user_data;
sc                592 fs/ocfs2/cluster/tcp.c 	if (sc) {
sc                593 fs/ocfs2/cluster/tcp.c 		sclog(sc, "data_ready hit\n");
sc                594 fs/ocfs2/cluster/tcp.c 		o2net_set_data_ready_time(sc);
sc                595 fs/ocfs2/cluster/tcp.c 		o2net_sc_queue_work(sc, &sc->sc_rx_work);
sc                596 fs/ocfs2/cluster/tcp.c 		ready = sc->sc_data_ready;
sc                609 fs/ocfs2/cluster/tcp.c 	struct o2net_sock_container *sc;
sc                612 fs/ocfs2/cluster/tcp.c 	sc = sk->sk_user_data;
sc                613 fs/ocfs2/cluster/tcp.c 	if (sc == NULL) {
sc                618 fs/ocfs2/cluster/tcp.c 	sclog(sc, "state_change to %d\n", sk->sk_state);
sc                620 fs/ocfs2/cluster/tcp.c 	state_change = sc->sc_state_change;
sc                628 fs/ocfs2/cluster/tcp.c 		o2net_sc_queue_work(sc, &sc->sc_connect_work);
sc                633 fs/ocfs2/cluster/tcp.c 			SC_NODEF_ARGS(sc), sk->sk_state);
sc                634 fs/ocfs2/cluster/tcp.c 		o2net_sc_queue_work(sc, &sc->sc_shutdown_work);
sc                648 fs/ocfs2/cluster/tcp.c 				     struct o2net_sock_container *sc)
sc                659 fs/ocfs2/cluster/tcp.c 	sk->sk_user_data = sc;
sc                660 fs/ocfs2/cluster/tcp.c 	sc_get(sc);
sc                662 fs/ocfs2/cluster/tcp.c 	sc->sc_data_ready = sk->sk_data_ready;
sc                663 fs/ocfs2/cluster/tcp.c 	sc->sc_state_change = sk->sk_state_change;
sc                667 fs/ocfs2/cluster/tcp.c 	mutex_init(&sc->sc_send_lock);
sc                673 fs/ocfs2/cluster/tcp.c 			           struct o2net_sock_container *sc)
sc                678 fs/ocfs2/cluster/tcp.c 	if (sk->sk_user_data == sc) {
sc                681 fs/ocfs2/cluster/tcp.c 		sk->sk_data_ready = sc->sc_data_ready;
sc                682 fs/ocfs2/cluster/tcp.c 		sk->sk_state_change = sc->sc_state_change;
sc                696 fs/ocfs2/cluster/tcp.c 			           struct o2net_sock_container *sc,
sc                700 fs/ocfs2/cluster/tcp.c 	if (nn->nn_sc == sc)
sc                715 fs/ocfs2/cluster/tcp.c 	struct o2net_sock_container *sc =
sc                718 fs/ocfs2/cluster/tcp.c 	struct o2net_node *nn = o2net_nn_from_num(sc->sc_node->nd_num);
sc                720 fs/ocfs2/cluster/tcp.c 	sclog(sc, "shutting down\n");
sc                723 fs/ocfs2/cluster/tcp.c 	if (o2net_unregister_callbacks(sc->sc_sock->sk, sc)) {
sc                726 fs/ocfs2/cluster/tcp.c 		del_timer_sync(&sc->sc_idle_timeout);
sc                727 fs/ocfs2/cluster/tcp.c 		o2net_sc_cancel_delayed_work(sc, &sc->sc_keepalive_work);
sc                728 fs/ocfs2/cluster/tcp.c 		sc_put(sc);
sc                729 fs/ocfs2/cluster/tcp.c 		kernel_sock_shutdown(sc->sc_sock, SHUT_RDWR);
sc                734 fs/ocfs2/cluster/tcp.c 	o2net_ensure_shutdown(nn, sc, 0);
sc                735 fs/ocfs2/cluster/tcp.c 	sc_put(sc);
sc                931 fs/ocfs2/cluster/tcp.c static void o2net_sendpage(struct o2net_sock_container *sc,
sc                935 fs/ocfs2/cluster/tcp.c 	struct o2net_node *nn = o2net_nn_from_num(sc->sc_node->nd_num);
sc                939 fs/ocfs2/cluster/tcp.c 		mutex_lock(&sc->sc_send_lock);
sc                940 fs/ocfs2/cluster/tcp.c 		ret = sc->sc_sock->ops->sendpage(sc->sc_sock,
sc                944 fs/ocfs2/cluster/tcp.c 		mutex_unlock(&sc->sc_send_lock);
sc                949 fs/ocfs2/cluster/tcp.c 			     " returned EAGAIN\n", size, SC_NODEF_ARGS(sc));
sc                954 fs/ocfs2/cluster/tcp.c 		     " failed with %zd\n", size, SC_NODEF_ARGS(sc), ret);
sc                955 fs/ocfs2/cluster/tcp.c 		o2net_ensure_shutdown(nn, sc, 0);
sc                997 fs/ocfs2/cluster/tcp.c 	struct o2net_sock_container *sc;
sc               1004 fs/ocfs2/cluster/tcp.c 		if (!o2net_tx_can_proceed(o2net_nn_from_num(node), &sc, &ret))
sc               1008 fs/ocfs2/cluster/tcp.c 			sc_put(sc);
sc               1021 fs/ocfs2/cluster/tcp.c 	struct o2net_sock_container *sc = NULL;
sc               1058 fs/ocfs2/cluster/tcp.c 	wait_event(nn->nn_sc_wq, o2net_tx_can_proceed(nn, &sc, &ret));
sc               1062 fs/ocfs2/cluster/tcp.c 	o2net_set_nst_sock_container(&nst, sc);
sc               1096 fs/ocfs2/cluster/tcp.c 	mutex_lock(&sc->sc_send_lock);
sc               1097 fs/ocfs2/cluster/tcp.c 	ret = o2net_send_tcp_msg(sc->sc_sock, vec, veclen,
sc               1099 fs/ocfs2/cluster/tcp.c 	mutex_unlock(&sc->sc_send_lock);
sc               1110 fs/ocfs2/cluster/tcp.c 	o2net_update_send_stats(&nst, sc);
sc               1123 fs/ocfs2/cluster/tcp.c 	if (sc)
sc               1124 fs/ocfs2/cluster/tcp.c 		sc_put(sc);
sc               1168 fs/ocfs2/cluster/tcp.c static int o2net_process_message(struct o2net_sock_container *sc,
sc               1171 fs/ocfs2/cluster/tcp.c 	struct o2net_node *nn = o2net_nn_from_num(sc->sc_node->nd_num);
sc               1179 fs/ocfs2/cluster/tcp.c 	o2net_sc_postpone_idle(sc);
sc               1190 fs/ocfs2/cluster/tcp.c 			o2net_sendpage(sc, o2net_keep_resp,
sc               1223 fs/ocfs2/cluster/tcp.c 	o2net_set_func_start_time(sc);
sc               1224 fs/ocfs2/cluster/tcp.c 	sc->sc_msg_key = be32_to_cpu(hdr->key);
sc               1225 fs/ocfs2/cluster/tcp.c 	sc->sc_msg_type = be16_to_cpu(hdr->msg_type);
sc               1229 fs/ocfs2/cluster/tcp.c 	o2net_set_func_stop_time(sc);
sc               1231 fs/ocfs2/cluster/tcp.c 	o2net_update_recv_stats(sc);
sc               1235 fs/ocfs2/cluster/tcp.c 	mutex_lock(&sc->sc_send_lock);
sc               1236 fs/ocfs2/cluster/tcp.c 	ret = o2net_send_status_magic(sc->sc_sock, hdr, syserr,
sc               1238 fs/ocfs2/cluster/tcp.c 	mutex_unlock(&sc->sc_send_lock);
sc               1256 fs/ocfs2/cluster/tcp.c static int o2net_check_handshake(struct o2net_sock_container *sc)
sc               1258 fs/ocfs2/cluster/tcp.c 	struct o2net_handshake *hand = page_address(sc->sc_page);
sc               1259 fs/ocfs2/cluster/tcp.c 	struct o2net_node *nn = o2net_nn_from_num(sc->sc_node->nd_num);
sc               1264 fs/ocfs2/cluster/tcp.c 		       "Disconnecting.\n", SC_NODEF_ARGS(sc),
sc               1269 fs/ocfs2/cluster/tcp.c 		o2net_ensure_shutdown(nn, sc, -ENOTCONN);
sc               1282 fs/ocfs2/cluster/tcp.c 		       "Disconnecting.\n", SC_NODEF_ARGS(sc),
sc               1285 fs/ocfs2/cluster/tcp.c 		o2net_ensure_shutdown(nn, sc, -ENOTCONN);
sc               1293 fs/ocfs2/cluster/tcp.c 		       "Disconnecting.\n", SC_NODEF_ARGS(sc),
sc               1296 fs/ocfs2/cluster/tcp.c 		o2net_ensure_shutdown(nn, sc, -ENOTCONN);
sc               1304 fs/ocfs2/cluster/tcp.c 		       "Disconnecting.\n", SC_NODEF_ARGS(sc),
sc               1307 fs/ocfs2/cluster/tcp.c 		o2net_ensure_shutdown(nn, sc, -ENOTCONN);
sc               1311 fs/ocfs2/cluster/tcp.c 	sc->sc_handshake_ok = 1;
sc               1316 fs/ocfs2/cluster/tcp.c 	if (nn->nn_sc == sc) {
sc               1317 fs/ocfs2/cluster/tcp.c 		o2net_sc_reset_idle_timer(sc);
sc               1319 fs/ocfs2/cluster/tcp.c 		o2net_set_nn_state(nn, sc, 1, 0);
sc               1324 fs/ocfs2/cluster/tcp.c 	sc->sc_page_off -= sizeof(struct o2net_handshake);
sc               1325 fs/ocfs2/cluster/tcp.c 	if (sc->sc_page_off)
sc               1326 fs/ocfs2/cluster/tcp.c 		memmove(hand, hand + 1, sc->sc_page_off);
sc               1334 fs/ocfs2/cluster/tcp.c static int o2net_advance_rx(struct o2net_sock_container *sc)
sc               1341 fs/ocfs2/cluster/tcp.c 	sclog(sc, "receiving\n");
sc               1342 fs/ocfs2/cluster/tcp.c 	o2net_set_advance_start_time(sc);
sc               1344 fs/ocfs2/cluster/tcp.c 	if (unlikely(sc->sc_handshake_ok == 0)) {
sc               1345 fs/ocfs2/cluster/tcp.c 		if(sc->sc_page_off < sizeof(struct o2net_handshake)) {
sc               1346 fs/ocfs2/cluster/tcp.c 			data = page_address(sc->sc_page) + sc->sc_page_off;
sc               1347 fs/ocfs2/cluster/tcp.c 			datalen = sizeof(struct o2net_handshake) - sc->sc_page_off;
sc               1348 fs/ocfs2/cluster/tcp.c 			ret = o2net_recv_tcp_msg(sc->sc_sock, data, datalen);
sc               1350 fs/ocfs2/cluster/tcp.c 				sc->sc_page_off += ret;
sc               1353 fs/ocfs2/cluster/tcp.c 		if (sc->sc_page_off == sizeof(struct o2net_handshake)) {
sc               1354 fs/ocfs2/cluster/tcp.c 			o2net_check_handshake(sc);
sc               1355 fs/ocfs2/cluster/tcp.c 			if (unlikely(sc->sc_handshake_ok == 0))
sc               1362 fs/ocfs2/cluster/tcp.c 	if (sc->sc_page_off < sizeof(struct o2net_msg)) {
sc               1363 fs/ocfs2/cluster/tcp.c 		data = page_address(sc->sc_page) + sc->sc_page_off;
sc               1364 fs/ocfs2/cluster/tcp.c 		datalen = sizeof(struct o2net_msg) - sc->sc_page_off;
sc               1365 fs/ocfs2/cluster/tcp.c 		ret = o2net_recv_tcp_msg(sc->sc_sock, data, datalen);
sc               1367 fs/ocfs2/cluster/tcp.c 			sc->sc_page_off += ret;
sc               1371 fs/ocfs2/cluster/tcp.c 			if (sc->sc_page_off == sizeof(struct o2net_msg)) {
sc               1372 fs/ocfs2/cluster/tcp.c 				hdr = page_address(sc->sc_page);
sc               1382 fs/ocfs2/cluster/tcp.c 	if (sc->sc_page_off < sizeof(struct o2net_msg)) {
sc               1388 fs/ocfs2/cluster/tcp.c 	hdr = page_address(sc->sc_page);
sc               1390 fs/ocfs2/cluster/tcp.c 	msglog(hdr, "at page_off %zu\n", sc->sc_page_off);
sc               1393 fs/ocfs2/cluster/tcp.c 	if (sc->sc_page_off - sizeof(struct o2net_msg) < be16_to_cpu(hdr->data_len)) {
sc               1395 fs/ocfs2/cluster/tcp.c 		data = page_address(sc->sc_page) + sc->sc_page_off;
sc               1397 fs/ocfs2/cluster/tcp.c 			  sc->sc_page_off;
sc               1398 fs/ocfs2/cluster/tcp.c 		ret = o2net_recv_tcp_msg(sc->sc_sock, data, datalen);
sc               1400 fs/ocfs2/cluster/tcp.c 			sc->sc_page_off += ret;
sc               1405 fs/ocfs2/cluster/tcp.c 	if (sc->sc_page_off - sizeof(struct o2net_msg) == be16_to_cpu(hdr->data_len)) {
sc               1409 fs/ocfs2/cluster/tcp.c 		ret = o2net_process_message(sc, hdr);
sc               1412 fs/ocfs2/cluster/tcp.c 		sc->sc_page_off = 0;
sc               1416 fs/ocfs2/cluster/tcp.c 	sclog(sc, "ret = %d\n", ret);
sc               1417 fs/ocfs2/cluster/tcp.c 	o2net_set_advance_stop_time(sc);
sc               1426 fs/ocfs2/cluster/tcp.c 	struct o2net_sock_container *sc =
sc               1431 fs/ocfs2/cluster/tcp.c 		ret = o2net_advance_rx(sc);
sc               1435 fs/ocfs2/cluster/tcp.c 		struct o2net_node *nn = o2net_nn_from_num(sc->sc_node->nd_num);
sc               1436 fs/ocfs2/cluster/tcp.c 		sclog(sc, "saw error %d, closing\n", ret);
sc               1438 fs/ocfs2/cluster/tcp.c 		o2net_ensure_shutdown(nn, sc, 0);
sc               1441 fs/ocfs2/cluster/tcp.c 	sc_put(sc);
sc               1477 fs/ocfs2/cluster/tcp.c 	struct o2net_sock_container *sc =
sc               1486 fs/ocfs2/cluster/tcp.c 	o2net_sendpage(sc, o2net_hand, sizeof(*o2net_hand));
sc               1487 fs/ocfs2/cluster/tcp.c 	sc_put(sc);
sc               1493 fs/ocfs2/cluster/tcp.c 	struct o2net_sock_container *sc =
sc               1497 fs/ocfs2/cluster/tcp.c 	o2net_sendpage(sc, o2net_keep_req, sizeof(*o2net_keep_req));
sc               1498 fs/ocfs2/cluster/tcp.c 	sc_put(sc);
sc               1506 fs/ocfs2/cluster/tcp.c 	struct o2net_sock_container *sc = from_timer(sc, t, sc_idle_timeout);
sc               1507 fs/ocfs2/cluster/tcp.c 	struct o2net_node *nn = o2net_nn_from_num(sc->sc_node->nd_num);
sc               1510 fs/ocfs2/cluster/tcp.c 		ktime_to_ms(sc->sc_tv_timer);
sc               1517 fs/ocfs2/cluster/tcp.c 	       SC_NODEF_ARGS(sc), msecs / 1000, msecs % 1000);
sc               1528 fs/ocfs2/cluster/tcp.c 	o2net_sc_reset_idle_timer(sc);
sc               1532 fs/ocfs2/cluster/tcp.c static void o2net_sc_reset_idle_timer(struct o2net_sock_container *sc)
sc               1534 fs/ocfs2/cluster/tcp.c 	o2net_sc_cancel_delayed_work(sc, &sc->sc_keepalive_work);
sc               1535 fs/ocfs2/cluster/tcp.c 	o2net_sc_queue_delayed_work(sc, &sc->sc_keepalive_work,
sc               1537 fs/ocfs2/cluster/tcp.c 	o2net_set_sock_timer(sc);
sc               1538 fs/ocfs2/cluster/tcp.c 	mod_timer(&sc->sc_idle_timeout,
sc               1542 fs/ocfs2/cluster/tcp.c static void o2net_sc_postpone_idle(struct o2net_sock_container *sc)
sc               1544 fs/ocfs2/cluster/tcp.c 	struct o2net_node *nn = o2net_nn_from_num(sc->sc_node->nd_num);
sc               1554 fs/ocfs2/cluster/tcp.c 	if (timer_pending(&sc->sc_idle_timeout))
sc               1555 fs/ocfs2/cluster/tcp.c 		o2net_sc_reset_idle_timer(sc);
sc               1567 fs/ocfs2/cluster/tcp.c 	struct o2net_sock_container *sc = NULL;
sc               1613 fs/ocfs2/cluster/tcp.c 	sc = sc_alloc(node);
sc               1614 fs/ocfs2/cluster/tcp.c 	if (sc == NULL) {
sc               1625 fs/ocfs2/cluster/tcp.c 	sc->sc_sock = sock; /* freed by sc_kref_release */
sc               1641 fs/ocfs2/cluster/tcp.c 	ret = o2net_set_nodelay(sc->sc_sock);
sc               1653 fs/ocfs2/cluster/tcp.c 	o2net_register_callbacks(sc->sc_sock->sk, sc);
sc               1657 fs/ocfs2/cluster/tcp.c 	o2net_set_nn_state(nn, sc, 0, 0);
sc               1664 fs/ocfs2/cluster/tcp.c 	ret = sc->sc_sock->ops->connect(sc->sc_sock,
sc               1672 fs/ocfs2/cluster/tcp.c 	if (ret && sc) {
sc               1674 fs/ocfs2/cluster/tcp.c 		       " failed with errno %d\n", SC_NODEF_ARGS(sc), ret);
sc               1677 fs/ocfs2/cluster/tcp.c 		o2net_ensure_shutdown(nn, sc, 0);
sc               1679 fs/ocfs2/cluster/tcp.c 	if (sc)
sc               1680 fs/ocfs2/cluster/tcp.c 		sc_put(sc);
sc               1811 fs/ocfs2/cluster/tcp.c 	struct o2net_sock_container *sc = NULL;
sc               1907 fs/ocfs2/cluster/tcp.c 	sc = sc_alloc(node);
sc               1908 fs/ocfs2/cluster/tcp.c 	if (sc == NULL) {
sc               1913 fs/ocfs2/cluster/tcp.c 	sc->sc_sock = new_sock;
sc               1918 fs/ocfs2/cluster/tcp.c 	o2net_set_nn_state(nn, sc, 0, 0);
sc               1921 fs/ocfs2/cluster/tcp.c 	o2net_register_callbacks(sc->sc_sock->sk, sc);
sc               1922 fs/ocfs2/cluster/tcp.c 	o2net_sc_queue_work(sc, &sc->sc_rx_work);
sc               1925 fs/ocfs2/cluster/tcp.c 	o2net_sendpage(sc, o2net_hand, sizeof(*o2net_hand));
sc               1934 fs/ocfs2/cluster/tcp.c 	if (sc)
sc               1935 fs/ocfs2/cluster/tcp.c 		sc_put(sc);
sc                116 fs/ocfs2/cluster/tcp.h void o2net_debug_add_sc(struct o2net_sock_container *sc);
sc                117 fs/ocfs2/cluster/tcp.h void o2net_debug_del_sc(struct o2net_sock_container *sc);
sc                131 fs/ocfs2/cluster/tcp.h static inline void o2net_debug_add_sc(struct o2net_sock_container *sc)
sc                134 fs/ocfs2/cluster/tcp.h static inline void o2net_debug_del_sc(struct o2net_sock_container *sc)
sc               1214 fs/ocfs2/dlm/dlmmaster.c 	enum dlm_node_state_change sc;
sc               1224 fs/ocfs2/dlm/dlmmaster.c 	node = dlm_bitmap_diff_iter_next(&bdi, &sc);
sc               1226 fs/ocfs2/dlm/dlmmaster.c 		if (sc == NODE_UP) {
sc               1292 fs/ocfs2/dlm/dlmmaster.c 		node = dlm_bitmap_diff_iter_next(&bdi, &sc);
sc                719 fs/quota/dquot.c dqcache_shrink_scan(struct shrinker *shrink, struct shrink_control *sc)
sc                725 fs/quota/dquot.c 	while (!list_empty(&free_dquots) && sc->nr_to_scan) {
sc                731 fs/quota/dquot.c 		sc->nr_to_scan--;
sc                739 fs/quota/dquot.c dqcache_shrink_count(struct shrinker *shrink, struct shrink_control *sc)
sc                 62 fs/super.c     				      struct shrink_control *sc)
sc                 77 fs/super.c     	if (!(sc->gfp_mask & __GFP_FS))
sc                 84 fs/super.c     		fs_objects = sb->s_op->nr_cached_objects(sb, sc);
sc                 86 fs/super.c     	inodes = list_lru_shrink_count(&sb->s_inode_lru, sc);
sc                 87 fs/super.c     	dentries = list_lru_shrink_count(&sb->s_dentry_lru, sc);
sc                 93 fs/super.c     	dentries = mult_frac(sc->nr_to_scan, dentries, total_objects);
sc                 94 fs/super.c     	inodes = mult_frac(sc->nr_to_scan, inodes, total_objects);
sc                 95 fs/super.c     	fs_objects = mult_frac(sc->nr_to_scan, fs_objects, total_objects);
sc                104 fs/super.c     	sc->nr_to_scan = dentries + 1;
sc                105 fs/super.c     	freed = prune_dcache_sb(sb, sc);
sc                106 fs/super.c     	sc->nr_to_scan = inodes + 1;
sc                107 fs/super.c     	freed += prune_icache_sb(sb, sc);
sc                110 fs/super.c     		sc->nr_to_scan = fs_objects + 1;
sc                111 fs/super.c     		freed += sb->s_op->free_cached_objects(sb, sc);
sc                119 fs/super.c     				       struct shrink_control *sc)
sc                145 fs/super.c     		total_objects = sb->s_op->nr_cached_objects(sb, sc);
sc                147 fs/super.c     	total_objects += list_lru_shrink_count(&sb->s_dentry_lru, sc);
sc                148 fs/super.c     	total_objects += list_lru_shrink_count(&sb->s_inode_lru, sc);
sc                268 fs/ubifs/shrinker.c 				 struct shrink_control *sc)
sc                280 fs/ubifs/shrinker.c 				struct shrink_control *sc)
sc                282 fs/ubifs/shrinker.c 	unsigned long nr = sc->nr_to_scan;
sc               1881 fs/ubifs/ubifs.h 				struct shrink_control *sc);
sc               1883 fs/ubifs/ubifs.h 				 struct shrink_control *sc);
sc                 25 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc,
sc                 28 fs/xfs/scrub/agheader.c 	struct xfs_mount	*mp = sc->mp;
sc                 29 fs/xfs/scrub/agheader.c 	xfs_agnumber_t		agno = sc->sm->sm_agno;
sc                 33 fs/xfs/scrub/agheader.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                 38 fs/xfs/scrub/agheader.c 	error = xchk_ag_init(sc, agno, &sc->sa);
sc                 39 fs/xfs/scrub/agheader.c 	if (!xchk_xref_process_error(sc, agno, agbno, &error))
sc                 42 fs/xfs/scrub/agheader.c 	xchk_xref_is_used_space(sc, agbno, 1);
sc                 43 fs/xfs/scrub/agheader.c 	xchk_xref_is_not_inode_chunk(sc, agbno, 1);
sc                 44 fs/xfs/scrub/agheader.c 	xchk_xref_is_owned_by(sc, agbno, 1, &XFS_RMAP_OINFO_FS);
sc                 45 fs/xfs/scrub/agheader.c 	xchk_xref_is_not_shared(sc, agbno, 1);
sc                 60 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc)
sc                 62 fs/xfs/scrub/agheader.c 	struct xfs_mount	*mp = sc->mp;
sc                 71 fs/xfs/scrub/agheader.c 	agno = sc->sm->sm_agno;
sc                 75 fs/xfs/scrub/agheader.c 	error = xfs_sb_read_secondary(mp, sc->tp, agno, &bp);
sc                 92 fs/xfs/scrub/agheader.c 	if (!xchk_process_error(sc, agno, XFS_SB_BLOCK(mp), &error))
sc                104 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                107 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                110 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                113 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                116 fs/xfs/scrub/agheader.c 		xchk_block_set_preen(sc, bp);
sc                119 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                122 fs/xfs/scrub/agheader.c 		xchk_block_set_preen(sc, bp);
sc                125 fs/xfs/scrub/agheader.c 		xchk_block_set_preen(sc, bp);
sc                128 fs/xfs/scrub/agheader.c 		xchk_block_set_preen(sc, bp);
sc                131 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                134 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                137 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                140 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                143 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                157 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                165 fs/xfs/scrub/agheader.c 		xchk_block_set_preen(sc, bp);
sc                168 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                171 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                174 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                177 fs/xfs/scrub/agheader.c 		xchk_block_set_preen(sc, bp);
sc                180 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                183 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                186 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                189 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                192 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                195 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                198 fs/xfs/scrub/agheader.c 		xchk_block_set_preen(sc, bp);
sc                206 fs/xfs/scrub/agheader.c 		xchk_block_set_preen(sc, bp);
sc                209 fs/xfs/scrub/agheader.c 		xchk_block_set_preen(sc, bp);
sc                217 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                220 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                223 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                226 fs/xfs/scrub/agheader.c 		xchk_block_set_preen(sc, bp);
sc                229 fs/xfs/scrub/agheader.c 		xchk_block_set_preen(sc, bp);
sc                232 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                235 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                238 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                241 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                246 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, bp);
sc                253 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, bp);
sc                256 fs/xfs/scrub/agheader.c 			xchk_block_set_preen(sc, bp);
sc                266 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                272 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                279 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, bp);
sc                285 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, bp);
sc                295 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, bp);
sc                305 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, bp);
sc                312 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, bp);
sc                317 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, bp);
sc                320 fs/xfs/scrub/agheader.c 			xchk_block_set_preen(sc, bp);
sc                328 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, bp);
sc                334 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, bp);
sc                336 fs/xfs/scrub/agheader.c 	xchk_superblock_xref(sc, bp);
sc                359 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc)
sc                361 fs/xfs/scrub/agheader.c 	struct xfs_agf		*agf = XFS_BUF_TO_AGF(sc->sa.agf_bp);
sc                365 fs/xfs/scrub/agheader.c 	if (!sc->sa.bno_cur)
sc                368 fs/xfs/scrub/agheader.c 	error = xfs_alloc_query_all(sc->sa.bno_cur,
sc                370 fs/xfs/scrub/agheader.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.bno_cur))
sc                373 fs/xfs/scrub/agheader.c 		xchk_block_xref_set_corrupt(sc, sc->sa.agf_bp);
sc                379 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc)
sc                381 fs/xfs/scrub/agheader.c 	struct xfs_agf		*agf = XFS_BUF_TO_AGF(sc->sa.agf_bp);
sc                387 fs/xfs/scrub/agheader.c 	if (!sc->sa.cnt_cur)
sc                391 fs/xfs/scrub/agheader.c 	error = xfs_alloc_lookup_le(sc->sa.cnt_cur, 0, -1U, &have);
sc                392 fs/xfs/scrub/agheader.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.cnt_cur))
sc                396 fs/xfs/scrub/agheader.c 			xchk_block_xref_set_corrupt(sc, sc->sa.agf_bp);
sc                401 fs/xfs/scrub/agheader.c 	error = xfs_alloc_get_rec(sc->sa.cnt_cur, &agbno, &blocks, &have);
sc                402 fs/xfs/scrub/agheader.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.cnt_cur))
sc                405 fs/xfs/scrub/agheader.c 		xchk_block_xref_set_corrupt(sc, sc->sa.agf_bp);
sc                411 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc)
sc                413 fs/xfs/scrub/agheader.c 	struct xfs_agf		*agf = XFS_BUF_TO_AGF(sc->sa.agf_bp);
sc                414 fs/xfs/scrub/agheader.c 	struct xfs_mount	*mp = sc->mp;
sc                420 fs/xfs/scrub/agheader.c 	if (sc->sa.rmap_cur) {
sc                421 fs/xfs/scrub/agheader.c 		error = xfs_btree_count_blocks(sc->sa.rmap_cur, &blocks);
sc                422 fs/xfs/scrub/agheader.c 		if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur))
sc                426 fs/xfs/scrub/agheader.c 			xchk_block_xref_set_corrupt(sc, sc->sa.agf_bp);
sc                435 fs/xfs/scrub/agheader.c 	if ((xfs_sb_version_hasrmapbt(&mp->m_sb) && !sc->sa.rmap_cur) ||
sc                436 fs/xfs/scrub/agheader.c 	    !sc->sa.bno_cur || !sc->sa.cnt_cur)
sc                440 fs/xfs/scrub/agheader.c 	error = xfs_btree_count_blocks(sc->sa.bno_cur, &blocks);
sc                441 fs/xfs/scrub/agheader.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.bno_cur))
sc                445 fs/xfs/scrub/agheader.c 	error = xfs_btree_count_blocks(sc->sa.cnt_cur, &blocks);
sc                446 fs/xfs/scrub/agheader.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.cnt_cur))
sc                451 fs/xfs/scrub/agheader.c 		xchk_block_xref_set_corrupt(sc, sc->sa.agf_bp);
sc                457 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc)
sc                459 fs/xfs/scrub/agheader.c 	struct xfs_agf		*agf = XFS_BUF_TO_AGF(sc->sa.agf_bp);
sc                463 fs/xfs/scrub/agheader.c 	if (!sc->sa.refc_cur)
sc                466 fs/xfs/scrub/agheader.c 	error = xfs_btree_count_blocks(sc->sa.refc_cur, &blocks);
sc                467 fs/xfs/scrub/agheader.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.refc_cur))
sc                470 fs/xfs/scrub/agheader.c 		xchk_block_xref_set_corrupt(sc, sc->sa.agf_bp);
sc                476 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc)
sc                478 fs/xfs/scrub/agheader.c 	struct xfs_mount	*mp = sc->mp;
sc                482 fs/xfs/scrub/agheader.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                487 fs/xfs/scrub/agheader.c 	error = xchk_ag_btcur_init(sc, &sc->sa);
sc                491 fs/xfs/scrub/agheader.c 	xchk_xref_is_used_space(sc, agbno, 1);
sc                492 fs/xfs/scrub/agheader.c 	xchk_agf_xref_freeblks(sc);
sc                493 fs/xfs/scrub/agheader.c 	xchk_agf_xref_cntbt(sc);
sc                494 fs/xfs/scrub/agheader.c 	xchk_xref_is_not_inode_chunk(sc, agbno, 1);
sc                495 fs/xfs/scrub/agheader.c 	xchk_xref_is_owned_by(sc, agbno, 1, &XFS_RMAP_OINFO_FS);
sc                496 fs/xfs/scrub/agheader.c 	xchk_agf_xref_btreeblks(sc);
sc                497 fs/xfs/scrub/agheader.c 	xchk_xref_is_not_shared(sc, agbno, 1);
sc                498 fs/xfs/scrub/agheader.c 	xchk_agf_xref_refcblks(sc);
sc                506 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc)
sc                508 fs/xfs/scrub/agheader.c 	struct xfs_mount	*mp = sc->mp;
sc                521 fs/xfs/scrub/agheader.c 	agno = sc->sa.agno = sc->sm->sm_agno;
sc                522 fs/xfs/scrub/agheader.c 	error = xchk_ag_read_headers(sc, agno, &sc->sa.agi_bp,
sc                523 fs/xfs/scrub/agheader.c 			&sc->sa.agf_bp, &sc->sa.agfl_bp);
sc                524 fs/xfs/scrub/agheader.c 	if (!xchk_process_error(sc, agno, XFS_AGF_BLOCK(sc->mp), &error))
sc                526 fs/xfs/scrub/agheader.c 	xchk_buffer_recheck(sc, sc->sa.agf_bp);
sc                528 fs/xfs/scrub/agheader.c 	agf = XFS_BUF_TO_AGF(sc->sa.agf_bp);
sc                533 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                538 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                542 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                546 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                550 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                555 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                559 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                565 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                569 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                581 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                586 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                588 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                590 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                593 fs/xfs/scrub/agheader.c 	xchk_agf_xref(sc);
sc                604 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc;
sc                610 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc,
sc                613 fs/xfs/scrub/agheader.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                616 fs/xfs/scrub/agheader.c 	xchk_xref_is_used_space(sc, agbno, 1);
sc                617 fs/xfs/scrub/agheader.c 	xchk_xref_is_not_inode_chunk(sc, agbno, 1);
sc                618 fs/xfs/scrub/agheader.c 	xchk_xref_is_owned_by(sc, agbno, 1, &XFS_RMAP_OINFO_AG);
sc                619 fs/xfs/scrub/agheader.c 	xchk_xref_is_not_shared(sc, agbno, 1);
sc                630 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc = sai->sc;
sc                631 fs/xfs/scrub/agheader.c 	xfs_agnumber_t		agno = sc->sa.agno;
sc                637 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agfl_bp);
sc                639 fs/xfs/scrub/agheader.c 	xchk_agfl_block_xref(sc, agbno);
sc                641 fs/xfs/scrub/agheader.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                661 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc)
sc                663 fs/xfs/scrub/agheader.c 	struct xfs_mount	*mp = sc->mp;
sc                667 fs/xfs/scrub/agheader.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                672 fs/xfs/scrub/agheader.c 	error = xchk_ag_btcur_init(sc, &sc->sa);
sc                676 fs/xfs/scrub/agheader.c 	xchk_xref_is_used_space(sc, agbno, 1);
sc                677 fs/xfs/scrub/agheader.c 	xchk_xref_is_not_inode_chunk(sc, agbno, 1);
sc                678 fs/xfs/scrub/agheader.c 	xchk_xref_is_owned_by(sc, agbno, 1, &XFS_RMAP_OINFO_FS);
sc                679 fs/xfs/scrub/agheader.c 	xchk_xref_is_not_shared(sc, agbno, 1);
sc                690 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc)
sc                699 fs/xfs/scrub/agheader.c 	agno = sc->sa.agno = sc->sm->sm_agno;
sc                700 fs/xfs/scrub/agheader.c 	error = xchk_ag_read_headers(sc, agno, &sc->sa.agi_bp,
sc                701 fs/xfs/scrub/agheader.c 			&sc->sa.agf_bp, &sc->sa.agfl_bp);
sc                702 fs/xfs/scrub/agheader.c 	if (!xchk_process_error(sc, agno, XFS_AGFL_BLOCK(sc->mp), &error))
sc                704 fs/xfs/scrub/agheader.c 	if (!sc->sa.agf_bp)
sc                706 fs/xfs/scrub/agheader.c 	xchk_buffer_recheck(sc, sc->sa.agfl_bp);
sc                708 fs/xfs/scrub/agheader.c 	xchk_agfl_xref(sc);
sc                710 fs/xfs/scrub/agheader.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                714 fs/xfs/scrub/agheader.c 	agf = XFS_BUF_TO_AGF(sc->sa.agf_bp);
sc                716 fs/xfs/scrub/agheader.c 	if (agflcount > xfs_agfl_size(sc->mp)) {
sc                717 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                721 fs/xfs/scrub/agheader.c 	sai.sc = sc;
sc                731 fs/xfs/scrub/agheader.c 	error = xfs_agfl_walk(sc->mp, XFS_BUF_TO_AGF(sc->sa.agf_bp),
sc                732 fs/xfs/scrub/agheader.c 			sc->sa.agfl_bp, xchk_agfl_block, &sai);
sc                741 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                750 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, sc->sa.agf_bp);
sc                766 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc)
sc                768 fs/xfs/scrub/agheader.c 	struct xfs_agi		*agi = XFS_BUF_TO_AGI(sc->sa.agi_bp);
sc                773 fs/xfs/scrub/agheader.c 	if (!sc->sa.ino_cur)
sc                776 fs/xfs/scrub/agheader.c 	error = xfs_ialloc_count_inodes(sc->sa.ino_cur, &icount, &freecount);
sc                777 fs/xfs/scrub/agheader.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.ino_cur))
sc                781 fs/xfs/scrub/agheader.c 		xchk_block_xref_set_corrupt(sc, sc->sa.agi_bp);
sc                787 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc)
sc                789 fs/xfs/scrub/agheader.c 	struct xfs_mount	*mp = sc->mp;
sc                793 fs/xfs/scrub/agheader.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                798 fs/xfs/scrub/agheader.c 	error = xchk_ag_btcur_init(sc, &sc->sa);
sc                802 fs/xfs/scrub/agheader.c 	xchk_xref_is_used_space(sc, agbno, 1);
sc                803 fs/xfs/scrub/agheader.c 	xchk_xref_is_not_inode_chunk(sc, agbno, 1);
sc                804 fs/xfs/scrub/agheader.c 	xchk_agi_xref_icounts(sc);
sc                805 fs/xfs/scrub/agheader.c 	xchk_xref_is_owned_by(sc, agbno, 1, &XFS_RMAP_OINFO_FS);
sc                806 fs/xfs/scrub/agheader.c 	xchk_xref_is_not_shared(sc, agbno, 1);
sc                814 fs/xfs/scrub/agheader.c 	struct xfs_scrub	*sc)
sc                816 fs/xfs/scrub/agheader.c 	struct xfs_mount	*mp = sc->mp;
sc                830 fs/xfs/scrub/agheader.c 	agno = sc->sa.agno = sc->sm->sm_agno;
sc                831 fs/xfs/scrub/agheader.c 	error = xchk_ag_read_headers(sc, agno, &sc->sa.agi_bp,
sc                832 fs/xfs/scrub/agheader.c 			&sc->sa.agf_bp, &sc->sa.agfl_bp);
sc                833 fs/xfs/scrub/agheader.c 	if (!xchk_process_error(sc, agno, XFS_AGI_BLOCK(sc->mp), &error))
sc                835 fs/xfs/scrub/agheader.c 	xchk_buffer_recheck(sc, sc->sa.agi_bp);
sc                837 fs/xfs/scrub/agheader.c 	agi = XFS_BUF_TO_AGI(sc->sa.agi_bp);
sc                842 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agi_bp);
sc                847 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agi_bp);
sc                851 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agi_bp);
sc                856 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, sc->sa.agi_bp);
sc                860 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, sc->sa.agi_bp);
sc                868 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agi_bp);
sc                873 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agi_bp);
sc                877 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agi_bp);
sc                883 fs/xfs/scrub/agheader.c 			xchk_block_set_corrupt(sc, sc->sa.agi_bp);
sc                887 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agi_bp);
sc                892 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agi_bp);
sc                894 fs/xfs/scrub/agheader.c 		xchk_block_set_corrupt(sc, sc->sa.agi_bp);
sc                897 fs/xfs/scrub/agheader.c 	xchk_agi_xref(sc);
sc                 34 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc)
sc                 36 fs/xfs/scrub/agheader_repair.c 	struct xfs_mount	*mp = sc->mp;
sc                 42 fs/xfs/scrub/agheader_repair.c 	agno = sc->sm->sm_agno;
sc                 46 fs/xfs/scrub/agheader_repair.c 	error = xfs_sb_get_secondary(mp, sc->tp, agno, &bp);
sc                 55 fs/xfs/scrub/agheader_repair.c 	xfs_trans_buf_set_type(sc->tp, bp, XFS_BLFT_SB_BUF);
sc                 56 fs/xfs/scrub/agheader_repair.c 	xfs_trans_log_buf(sc->tp, bp, 0, BBTOB(bp->b_length) - 1);
sc                 63 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc;
sc                 78 fs/xfs/scrub/agheader_repair.c 	if (xchk_should_terminate(raa->sc, &error))
sc                 94 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc = priv;
sc                 96 fs/xfs/scrub/agheader_repair.c 	if (!xfs_verify_agbno(mp, sc->sa.agno, agbno))
sc                117 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub		*sc,
sc                120 fs/xfs/scrub/agheader_repair.c 	struct xfs_mount		*mp = sc->mp;
sc                121 fs/xfs/scrub/agheader_repair.c 	xfs_agnumber_t			agno = sc->sm->sm_agno;
sc                138 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub		*sc,
sc                147 fs/xfs/scrub/agheader_repair.c 	error = xrep_find_ag_btree_roots(sc, agf_bp, fab, agfl_bp);
sc                152 fs/xfs/scrub/agheader_repair.c 	if (!xrep_check_btree_root(sc, &fab[XREP_AGF_BNOBT]) ||
sc                153 fs/xfs/scrub/agheader_repair.c 	    !xrep_check_btree_root(sc, &fab[XREP_AGF_CNTBT]) ||
sc                154 fs/xfs/scrub/agheader_repair.c 	    !xrep_check_btree_root(sc, &fab[XREP_AGF_RMAPBT]))
sc                166 fs/xfs/scrub/agheader_repair.c 	if (xfs_sb_version_hasreflink(&sc->mp->m_sb) &&
sc                167 fs/xfs/scrub/agheader_repair.c 	    !xrep_check_btree_root(sc, &fab[XREP_AGF_REFCOUNTBT]))
sc                179 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc,
sc                183 fs/xfs/scrub/agheader_repair.c 	struct xfs_mount	*mp = sc->mp;
sc                190 fs/xfs/scrub/agheader_repair.c 	agf->agf_seqno = cpu_to_be32(sc->sa.agno);
sc                191 fs/xfs/scrub/agheader_repair.c 	agf->agf_length = cpu_to_be32(xfs_ag_block_count(mp, sc->sa.agno));
sc                199 fs/xfs/scrub/agheader_repair.c 	ASSERT(sc->sa.pag->pagf_init);
sc                200 fs/xfs/scrub/agheader_repair.c 	sc->sa.pag->pagf_init = 0;
sc                206 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub		*sc,
sc                225 fs/xfs/scrub/agheader_repair.c 	if (xfs_sb_version_hasreflink(&sc->mp->m_sb)) {
sc                236 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc,
sc                239 fs/xfs/scrub/agheader_repair.c 	struct xrep_agf_allocbt	raa = { .sc = sc };
sc                242 fs/xfs/scrub/agheader_repair.c 	struct xfs_mount	*mp = sc->mp;
sc                248 fs/xfs/scrub/agheader_repair.c 	cur = xfs_allocbt_init_cursor(mp, sc->tp, agf_bp, sc->sa.agno,
sc                262 fs/xfs/scrub/agheader_repair.c 	cur = xfs_allocbt_init_cursor(mp, sc->tp, agf_bp, sc->sa.agno,
sc                271 fs/xfs/scrub/agheader_repair.c 	cur = xfs_rmapbt_init_cursor(mp, sc->tp, agf_bp, sc->sa.agno);
sc                283 fs/xfs/scrub/agheader_repair.c 		cur = xfs_refcountbt_init_cursor(mp, sc->tp, agf_bp,
sc                284 fs/xfs/scrub/agheader_repair.c 				sc->sa.agno);
sc                301 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc,
sc                308 fs/xfs/scrub/agheader_repair.c 	xfs_force_summary_recalc(sc->mp);
sc                311 fs/xfs/scrub/agheader_repair.c 	xfs_trans_buf_set_type(sc->tp, agf_bp, XFS_BLFT_AGF_BUF);
sc                312 fs/xfs/scrub/agheader_repair.c 	xfs_trans_log_buf(sc->tp, agf_bp, 0, BBTOB(agf_bp->b_length) - 1);
sc                315 fs/xfs/scrub/agheader_repair.c 	pag = sc->sa.pag;
sc                334 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub		*sc)
sc                358 fs/xfs/scrub/agheader_repair.c 	struct xfs_mount		*mp = sc->mp;
sc                368 fs/xfs/scrub/agheader_repair.c 	xchk_perag_get(sc->mp, &sc->sa);
sc                373 fs/xfs/scrub/agheader_repair.c 	error = xfs_trans_read_buf(mp, sc->tp, mp->m_ddev_targp,
sc                374 fs/xfs/scrub/agheader_repair.c 			XFS_AG_DADDR(mp, sc->sa.agno, XFS_AGF_DADDR(mp)),
sc                390 fs/xfs/scrub/agheader_repair.c 	error = xfs_alloc_read_agfl(mp, sc->tp, sc->sa.agno, &agfl_bp);
sc                398 fs/xfs/scrub/agheader_repair.c 	error = xfs_agfl_walk(sc->mp, XFS_BUF_TO_AGF(agf_bp), agfl_bp,
sc                399 fs/xfs/scrub/agheader_repair.c 			xrep_agf_check_agfl_block, sc);
sc                407 fs/xfs/scrub/agheader_repair.c 	error = xrep_agf_find_btrees(sc, agf_bp, fab, agfl_bp);
sc                412 fs/xfs/scrub/agheader_repair.c 	xrep_agf_init_header(sc, agf_bp, &old_agf);
sc                413 fs/xfs/scrub/agheader_repair.c 	xrep_agf_set_roots(sc, agf, fab);
sc                414 fs/xfs/scrub/agheader_repair.c 	error = xrep_agf_calc_from_btrees(sc, agf_bp);
sc                419 fs/xfs/scrub/agheader_repair.c 	return xrep_agf_commit_new(sc, agf_bp);
sc                423 fs/xfs/scrub/agheader_repair.c 	sc->sa.pag->pagf_init = 0;
sc                437 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc;
sc                451 fs/xfs/scrub/agheader_repair.c 	if (xchk_should_terminate(ra->sc, &error))
sc                477 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc,
sc                483 fs/xfs/scrub/agheader_repair.c 	struct xfs_mount	*mp = sc->mp;
sc                489 fs/xfs/scrub/agheader_repair.c 	ra.sc = sc;
sc                494 fs/xfs/scrub/agheader_repair.c 	cur = xfs_rmapbt_init_cursor(mp, sc->tp, agf_bp, sc->sa.agno);
sc                501 fs/xfs/scrub/agheader_repair.c 	cur = xfs_allocbt_init_cursor(mp, sc->tp, agf_bp, sc->sa.agno,
sc                509 fs/xfs/scrub/agheader_repair.c 	cur = xfs_allocbt_init_cursor(mp, sc->tp, agf_bp, sc->sa.agno,
sc                549 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc,
sc                555 fs/xfs/scrub/agheader_repair.c 	ASSERT(flcount <= xfs_agfl_size(sc->mp));
sc                558 fs/xfs/scrub/agheader_repair.c 	xfs_force_summary_recalc(sc->mp);
sc                561 fs/xfs/scrub/agheader_repair.c 	if (sc->sa.pag->pagf_init)
sc                562 fs/xfs/scrub/agheader_repair.c 		sc->sa.pag->pagf_flcount = flcount;
sc                567 fs/xfs/scrub/agheader_repair.c 	xfs_alloc_log_agf(sc->tp, agf_bp,
sc                574 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc,
sc                579 fs/xfs/scrub/agheader_repair.c 	struct xfs_mount	*mp = sc->mp;
sc                596 fs/xfs/scrub/agheader_repair.c 	agfl->agfl_seqno = cpu_to_be32(sc->sa.agno);
sc                609 fs/xfs/scrub/agheader_repair.c 		trace_xrep_agfl_insert(mp, sc->sa.agno, agbno, br->len);
sc                631 fs/xfs/scrub/agheader_repair.c 	xfs_trans_buf_set_type(sc->tp, agfl_bp, XFS_BLFT_AGFL_BUF);
sc                632 fs/xfs/scrub/agheader_repair.c 	xfs_trans_log_buf(sc->tp, agfl_bp, 0, BBTOB(agfl_bp->b_length) - 1);
sc                638 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc)
sc                641 fs/xfs/scrub/agheader_repair.c 	struct xfs_mount	*mp = sc->mp;
sc                651 fs/xfs/scrub/agheader_repair.c 	xchk_perag_get(sc->mp, &sc->sa);
sc                659 fs/xfs/scrub/agheader_repair.c 	error = xfs_alloc_read_agf(mp, sc->tp, sc->sa.agno, 0, &agf_bp);
sc                669 fs/xfs/scrub/agheader_repair.c 	error = xfs_trans_read_buf(mp, sc->tp, mp->m_ddev_targp,
sc                670 fs/xfs/scrub/agheader_repair.c 			XFS_AG_DADDR(mp, sc->sa.agno, XFS_AGFL_DADDR(mp)),
sc                677 fs/xfs/scrub/agheader_repair.c 	error = xrep_agfl_collect_blocks(sc, agf_bp, &agfl_extents, &flcount);
sc                686 fs/xfs/scrub/agheader_repair.c 	xrep_agfl_update_agf(sc, agf_bp, flcount);
sc                687 fs/xfs/scrub/agheader_repair.c 	xrep_agfl_init_header(sc, agfl_bp, &agfl_extents, flcount);
sc                694 fs/xfs/scrub/agheader_repair.c 	sc->sa.agf_bp = agf_bp;
sc                695 fs/xfs/scrub/agheader_repair.c 	sc->sa.agfl_bp = agfl_bp;
sc                696 fs/xfs/scrub/agheader_repair.c 	error = xrep_roll_ag_trans(sc);
sc                701 fs/xfs/scrub/agheader_repair.c 	return xrep_reap_extents(sc, &agfl_extents, &XFS_RMAP_OINFO_AG,
sc                727 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub		*sc,
sc                731 fs/xfs/scrub/agheader_repair.c 	struct xfs_mount		*mp = sc->mp;
sc                735 fs/xfs/scrub/agheader_repair.c 	error = xfs_alloc_read_agf(mp, sc->tp, sc->sa.agno, 0, &agf_bp);
sc                742 fs/xfs/scrub/agheader_repair.c 	error = xrep_find_ag_btree_roots(sc, agf_bp, fab, NULL);
sc                747 fs/xfs/scrub/agheader_repair.c 	if (!xrep_check_btree_root(sc, &fab[XREP_AGI_INOBT]))
sc                752 fs/xfs/scrub/agheader_repair.c 	    !xrep_check_btree_root(sc, &fab[XREP_AGI_FINOBT]))
sc                764 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc,
sc                769 fs/xfs/scrub/agheader_repair.c 	struct xfs_mount	*mp = sc->mp;
sc                775 fs/xfs/scrub/agheader_repair.c 	agi->agi_seqno = cpu_to_be32(sc->sa.agno);
sc                776 fs/xfs/scrub/agheader_repair.c 	agi->agi_length = cpu_to_be32(xfs_ag_block_count(mp, sc->sa.agno));
sc                787 fs/xfs/scrub/agheader_repair.c 	ASSERT(sc->sa.pag->pagi_init);
sc                788 fs/xfs/scrub/agheader_repair.c 	sc->sa.pag->pagi_init = 0;
sc                794 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub		*sc,
sc                801 fs/xfs/scrub/agheader_repair.c 	if (xfs_sb_version_hasfinobt(&sc->mp->m_sb)) {
sc                810 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc,
sc                815 fs/xfs/scrub/agheader_repair.c 	struct xfs_mount	*mp = sc->mp;
sc                820 fs/xfs/scrub/agheader_repair.c 	cur = xfs_inobt_init_cursor(mp, sc->tp, agi_bp, sc->sa.agno,
sc                838 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub	*sc,
sc                845 fs/xfs/scrub/agheader_repair.c 	xfs_force_summary_recalc(sc->mp);
sc                848 fs/xfs/scrub/agheader_repair.c 	xfs_trans_buf_set_type(sc->tp, agi_bp, XFS_BLFT_AGI_BUF);
sc                849 fs/xfs/scrub/agheader_repair.c 	xfs_trans_log_buf(sc->tp, agi_bp, 0, BBTOB(agi_bp->b_length) - 1);
sc                852 fs/xfs/scrub/agheader_repair.c 	pag = sc->sa.pag;
sc                863 fs/xfs/scrub/agheader_repair.c 	struct xfs_scrub		*sc)
sc                879 fs/xfs/scrub/agheader_repair.c 	struct xfs_mount		*mp = sc->mp;
sc                888 fs/xfs/scrub/agheader_repair.c 	xchk_perag_get(sc->mp, &sc->sa);
sc                893 fs/xfs/scrub/agheader_repair.c 	error = xfs_trans_read_buf(mp, sc->tp, mp->m_ddev_targp,
sc                894 fs/xfs/scrub/agheader_repair.c 			XFS_AG_DADDR(mp, sc->sa.agno, XFS_AGI_DADDR(mp)),
sc                902 fs/xfs/scrub/agheader_repair.c 	error = xrep_agi_find_btrees(sc, fab);
sc                907 fs/xfs/scrub/agheader_repair.c 	xrep_agi_init_header(sc, agi_bp, &old_agi);
sc                908 fs/xfs/scrub/agheader_repair.c 	xrep_agi_set_roots(sc, agi, fab);
sc                909 fs/xfs/scrub/agheader_repair.c 	error = xrep_agi_calc_from_btrees(sc, agi_bp);
sc                914 fs/xfs/scrub/agheader_repair.c 	return xrep_agi_commit_new(sc, agi_bp);
sc                918 fs/xfs/scrub/agheader_repair.c 	sc->sa.pag->pagi_init = 0;
sc                 24 fs/xfs/scrub/alloc.c 	struct xfs_scrub	*sc,
sc                 27 fs/xfs/scrub/alloc.c 	return xchk_setup_ag_btree(sc, ip, false);
sc                 37 fs/xfs/scrub/alloc.c 	struct xfs_scrub	*sc,
sc                 47 fs/xfs/scrub/alloc.c 	if (sc->sm->sm_type == XFS_SCRUB_TYPE_BNOBT)
sc                 48 fs/xfs/scrub/alloc.c 		pcur = &sc->sa.cnt_cur;
sc                 50 fs/xfs/scrub/alloc.c 		pcur = &sc->sa.bno_cur;
sc                 51 fs/xfs/scrub/alloc.c 	if (!*pcur || xchk_skip_xref(sc->sm))
sc                 55 fs/xfs/scrub/alloc.c 	if (!xchk_should_check_xref(sc, &error, pcur))
sc                 58 fs/xfs/scrub/alloc.c 		xchk_btree_xref_set_corrupt(sc, *pcur, 0);
sc                 63 fs/xfs/scrub/alloc.c 	if (!xchk_should_check_xref(sc, &error, pcur))
sc                 66 fs/xfs/scrub/alloc.c 		xchk_btree_xref_set_corrupt(sc, *pcur, 0);
sc                 71 fs/xfs/scrub/alloc.c 		xchk_btree_xref_set_corrupt(sc, *pcur, 0);
sc                 77 fs/xfs/scrub/alloc.c 	struct xfs_scrub	*sc,
sc                 81 fs/xfs/scrub/alloc.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                 84 fs/xfs/scrub/alloc.c 	xchk_allocbt_xref_other(sc, agbno, len);
sc                 85 fs/xfs/scrub/alloc.c 	xchk_xref_is_not_inode_chunk(sc, agbno, len);
sc                 86 fs/xfs/scrub/alloc.c 	xchk_xref_has_no_owner(sc, agbno, len);
sc                 87 fs/xfs/scrub/alloc.c 	xchk_xref_is_not_shared(sc, agbno, len);
sc                107 fs/xfs/scrub/alloc.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                109 fs/xfs/scrub/alloc.c 	xchk_allocbt_xref(bs->sc, bno, len);
sc                117 fs/xfs/scrub/alloc.c 	struct xfs_scrub	*sc,
sc                122 fs/xfs/scrub/alloc.c 	cur = which == XFS_BTNUM_BNO ? sc->sa.bno_cur : sc->sa.cnt_cur;
sc                123 fs/xfs/scrub/alloc.c 	return xchk_btree(sc, cur, xchk_allocbt_rec, &XFS_RMAP_OINFO_AG, NULL);
sc                128 fs/xfs/scrub/alloc.c 	struct xfs_scrub	*sc)
sc                130 fs/xfs/scrub/alloc.c 	return xchk_allocbt(sc, XFS_BTNUM_BNO);
sc                135 fs/xfs/scrub/alloc.c 	struct xfs_scrub	*sc)
sc                137 fs/xfs/scrub/alloc.c 	return xchk_allocbt(sc, XFS_BTNUM_CNT);
sc                143 fs/xfs/scrub/alloc.c 	struct xfs_scrub	*sc,
sc                150 fs/xfs/scrub/alloc.c 	if (!sc->sa.bno_cur || xchk_skip_xref(sc->sm))
sc                153 fs/xfs/scrub/alloc.c 	error = xfs_alloc_has_record(sc->sa.bno_cur, agbno, len, &is_freesp);
sc                154 fs/xfs/scrub/alloc.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.bno_cur))
sc                157 fs/xfs/scrub/alloc.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.bno_cur, 0);
sc                 30 fs/xfs/scrub/attr.c 	struct xfs_scrub	*sc,
sc                 35 fs/xfs/scrub/attr.c 	struct xchk_xattr_buf	*ab = sc->buf;
sc                 42 fs/xfs/scrub/attr.c 	sz = 3 * sizeof(long) * BITS_TO_LONGS(sc->mp->m_attr_geo->blksize);
sc                 53 fs/xfs/scrub/attr.c 		sc->buf = NULL;
sc                 65 fs/xfs/scrub/attr.c 	sc->buf = ab;
sc                 72 fs/xfs/scrub/attr.c 	struct xfs_scrub	*sc,
sc                 82 fs/xfs/scrub/attr.c 	if (sc->flags & XCHK_TRY_HARDER) {
sc                 83 fs/xfs/scrub/attr.c 		error = xchk_setup_xattr_buf(sc, XATTR_SIZE_MAX, 0);
sc                 88 fs/xfs/scrub/attr.c 	return xchk_setup_inode_contents(sc, ip, 0);
sc                 95 fs/xfs/scrub/attr.c 	struct xfs_scrub		*sc;
sc                120 fs/xfs/scrub/attr.c 	if (xchk_should_terminate(sx->sc, &error)) {
sc                127 fs/xfs/scrub/attr.c 		xchk_ino_set_preen(sx->sc, context->dp->i_ino);
sc                133 fs/xfs/scrub/attr.c 		xchk_fblock_set_corrupt(sx->sc, XFS_ATTR_FORK, args.blkno);
sc                142 fs/xfs/scrub/attr.c 	error = xchk_setup_xattr_buf(sx->sc, valuelen, KM_MAYFAIL);
sc                162 fs/xfs/scrub/attr.c 	args.value = xchk_xattr_valuebuf(sx->sc);
sc                166 fs/xfs/scrub/attr.c 	if (!xchk_fblock_process_error(sx->sc, XFS_ATTR_FORK, args.blkno,
sc                170 fs/xfs/scrub/attr.c 		xchk_fblock_set_corrupt(sx->sc, XFS_ATTR_FORK,
sc                173 fs/xfs/scrub/attr.c 	if (sx->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                187 fs/xfs/scrub/attr.c 	struct xfs_scrub	*sc,
sc                192 fs/xfs/scrub/attr.c 	unsigned int		mapsize = sc->mp->m_attr_geo->blksize;
sc                215 fs/xfs/scrub/attr.c 	struct xfs_scrub		*sc,
sc                219 fs/xfs/scrub/attr.c 	unsigned long			*freemap = xchk_xattr_freemap(sc);
sc                220 fs/xfs/scrub/attr.c 	unsigned long			*dstmap = xchk_xattr_dstmap(sc);
sc                221 fs/xfs/scrub/attr.c 	unsigned int			mapsize = sc->mp->m_attr_geo->blksize;
sc                227 fs/xfs/scrub/attr.c 		if (!xchk_xattr_set_map(sc, freemap,
sc                254 fs/xfs/scrub/attr.c 	unsigned long			*usedmap = xchk_xattr_usedmap(ds->sc);
sc                294 fs/xfs/scrub/attr.c 	if (!xchk_xattr_set_map(ds->sc, usedmap, nameidx, namesize))
sc                296 fs/xfs/scrub/attr.c 	if (!(ds->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT))
sc                327 fs/xfs/scrub/attr.c 	error = xchk_setup_xattr_buf(ds->sc, 0, KM_MAYFAIL);
sc                332 fs/xfs/scrub/attr.c 	usedmap = xchk_xattr_usedmap(ds->sc);
sc                338 fs/xfs/scrub/attr.c 	if (xfs_sb_version_hascrc(&ds->sc->mp->m_sb)) {
sc                359 fs/xfs/scrub/attr.c 	if (!xchk_xattr_set_map(ds->sc, usedmap, 0, hdrsize))
sc                362 fs/xfs/scrub/attr.c 	if (ds->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                373 fs/xfs/scrub/attr.c 		if (!xchk_xattr_set_map(ds->sc, usedmap, off,
sc                383 fs/xfs/scrub/attr.c 		if (ds->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                387 fs/xfs/scrub/attr.c 	if (!xchk_xattr_check_freemap(ds->sc, usedmap, &leafhdr))
sc                423 fs/xfs/scrub/attr.c 	if (ds->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                473 fs/xfs/scrub/attr.c 	struct xfs_scrub		*sc)
sc                480 fs/xfs/scrub/attr.c 	if (!xfs_inode_hasattr(sc->ip))
sc                485 fs/xfs/scrub/attr.c 	error = xchk_da_btree(sc, XFS_ATTR_FORK, xchk_xattr_rec,
sc                490 fs/xfs/scrub/attr.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                494 fs/xfs/scrub/attr.c 	sx.context.dp = sc->ip;
sc                498 fs/xfs/scrub/attr.c 	sx.context.tp = sc->tp;
sc                500 fs/xfs/scrub/attr.c 	sx.sc = sc;
sc                519 fs/xfs/scrub/attr.c 	if (!xchk_fblock_process_error(sc, XFS_ATTR_FORK, 0, &error))
sc                 33 fs/xfs/scrub/attr.h 	struct xfs_scrub	*sc)
sc                 35 fs/xfs/scrub/attr.h 	struct xchk_xattr_buf	*ab = sc->buf;
sc                 43 fs/xfs/scrub/attr.h 	struct xfs_scrub	*sc)
sc                 45 fs/xfs/scrub/attr.h 	struct xchk_xattr_buf	*ab = sc->buf;
sc                 53 fs/xfs/scrub/attr.h 	struct xfs_scrub	*sc)
sc                 55 fs/xfs/scrub/attr.h 	return xchk_xattr_usedmap(sc) +
sc                 56 fs/xfs/scrub/attr.h 			BITS_TO_LONGS(sc->mp->m_attr_geo->blksize);
sc                 62 fs/xfs/scrub/attr.h 	struct xfs_scrub	*sc)
sc                 64 fs/xfs/scrub/attr.h 	return xchk_xattr_freemap(sc) +
sc                 65 fs/xfs/scrub/attr.h 			BITS_TO_LONGS(sc->mp->m_attr_geo->blksize);
sc                 68 fs/xfs/scrub/attr.h int xchk_setup_xattr_buf(struct xfs_scrub *sc, size_t value_size,
sc                 29 fs/xfs/scrub/bmap.c 	struct xfs_scrub	*sc,
sc                 34 fs/xfs/scrub/bmap.c 	error = xchk_get_inode(sc, ip);
sc                 38 fs/xfs/scrub/bmap.c 	sc->ilock_flags = XFS_IOLOCK_EXCL | XFS_MMAPLOCK_EXCL;
sc                 39 fs/xfs/scrub/bmap.c 	xfs_ilock(sc->ip, sc->ilock_flags);
sc                 46 fs/xfs/scrub/bmap.c 	if (S_ISREG(VFS_I(sc->ip)->i_mode) &&
sc                 47 fs/xfs/scrub/bmap.c 	    sc->sm->sm_type == XFS_SCRUB_TYPE_BMBTD) {
sc                 48 fs/xfs/scrub/bmap.c 		inode_dio_wait(VFS_I(sc->ip));
sc                 49 fs/xfs/scrub/bmap.c 		error = filemap_write_and_wait(VFS_I(sc->ip)->i_mapping);
sc                 55 fs/xfs/scrub/bmap.c 	error = xchk_trans_alloc(sc, 0);
sc                 58 fs/xfs/scrub/bmap.c 	sc->ilock_flags |= XFS_ILOCK_EXCL;
sc                 59 fs/xfs/scrub/bmap.c 	xfs_ilock(sc->ip, XFS_ILOCK_EXCL);
sc                 74 fs/xfs/scrub/bmap.c 	struct xfs_scrub	*sc;
sc                114 fs/xfs/scrub/bmap.c 		error = xfs_rmap_lookup_le_range(info->sc->sa.rmap_cur, agbno,
sc                116 fs/xfs/scrub/bmap.c 		if (!xchk_should_check_xref(info->sc, &error,
sc                117 fs/xfs/scrub/bmap.c 				&info->sc->sa.rmap_cur))
sc                125 fs/xfs/scrub/bmap.c 	error = xfs_rmap_lookup_le(info->sc->sa.rmap_cur, agbno, 0, owner,
sc                127 fs/xfs/scrub/bmap.c 	if (!xchk_should_check_xref(info->sc, &error,
sc                128 fs/xfs/scrub/bmap.c 			&info->sc->sa.rmap_cur))
sc                133 fs/xfs/scrub/bmap.c 	error = xfs_rmap_get_rec(info->sc->sa.rmap_cur, rmap, &has_rmap);
sc                134 fs/xfs/scrub/bmap.c 	if (!xchk_should_check_xref(info->sc, &error,
sc                135 fs/xfs/scrub/bmap.c 			&info->sc->sa.rmap_cur))
sc                140 fs/xfs/scrub/bmap.c 		xchk_fblock_xref_set_corrupt(info->sc, info->whichfork,
sc                156 fs/xfs/scrub/bmap.c 	if (!info->sc->sa.rmap_cur || xchk_skip_xref(info->sc->sm))
sc                162 fs/xfs/scrub/bmap.c 		owner = info->sc->ip->i_ino;
sc                172 fs/xfs/scrub/bmap.c 		xchk_fblock_xref_set_corrupt(info->sc, info->whichfork,
sc                185 fs/xfs/scrub/bmap.c 			xchk_fblock_xref_set_corrupt(info->sc,
sc                190 fs/xfs/scrub/bmap.c 		xchk_fblock_xref_set_corrupt(info->sc, info->whichfork,
sc                203 fs/xfs/scrub/bmap.c 		xchk_fblock_xref_set_corrupt(info->sc, info->whichfork,
sc                208 fs/xfs/scrub/bmap.c 		xchk_fblock_xref_set_corrupt(info->sc, info->whichfork,
sc                211 fs/xfs/scrub/bmap.c 		xchk_fblock_xref_set_corrupt(info->sc, info->whichfork,
sc                222 fs/xfs/scrub/bmap.c 	xchk_xref_is_used_rt_space(info->sc, irec->br_startblock,
sc                233 fs/xfs/scrub/bmap.c 	struct xfs_mount	*mp = info->sc->mp;
sc                243 fs/xfs/scrub/bmap.c 	error = xchk_ag_init(info->sc, agno, &info->sc->sa);
sc                244 fs/xfs/scrub/bmap.c 	if (!xchk_fblock_process_error(info->sc, info->whichfork,
sc                248 fs/xfs/scrub/bmap.c 	xchk_xref_is_used_space(info->sc, agbno, len);
sc                249 fs/xfs/scrub/bmap.c 	xchk_xref_is_not_inode_chunk(info->sc, agbno, len);
sc                253 fs/xfs/scrub/bmap.c 		if (xfs_is_reflink_inode(info->sc->ip))
sc                257 fs/xfs/scrub/bmap.c 		xchk_xref_is_not_shared(info->sc, agbno,
sc                261 fs/xfs/scrub/bmap.c 		xchk_xref_is_cow_staging(info->sc, agbno,
sc                266 fs/xfs/scrub/bmap.c 	xchk_ag_free(info->sc, &info->sc->sa);
sc                286 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(info->sc, info->whichfork,
sc                291 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(info->sc, info->whichfork, off);
sc                301 fs/xfs/scrub/bmap.c 	struct xfs_mount	*mp = info->sc->mp;
sc                310 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(info->sc, info->whichfork,
sc                317 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(info->sc, info->whichfork,
sc                325 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(info->sc, info->whichfork,
sc                330 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(info->sc, info->whichfork,
sc                333 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(info->sc, info->whichfork,
sc                339 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(info->sc, info->whichfork,
sc                346 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(info->sc, info->whichfork,
sc                352 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(info->sc, info->whichfork,
sc                355 fs/xfs/scrub/bmap.c 	if (info->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                394 fs/xfs/scrub/bmap.c 				xchk_fblock_set_corrupt(bs->sc,
sc                416 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(bs->sc, info->whichfork,
sc                424 fs/xfs/scrub/bmap.c 	struct xfs_scrub	*sc,
sc                429 fs/xfs/scrub/bmap.c 	struct xfs_ifork	*ifp = XFS_IFORK_PTR(sc->ip, whichfork);
sc                430 fs/xfs/scrub/bmap.c 	struct xfs_mount	*mp = sc->mp;
sc                431 fs/xfs/scrub/bmap.c 	struct xfs_inode	*ip = sc->ip;
sc                438 fs/xfs/scrub/bmap.c 		error = xfs_iread_extents(sc->tp, ip, whichfork);
sc                439 fs/xfs/scrub/bmap.c 		if (!xchk_fblock_process_error(sc, whichfork, 0, &error))
sc                444 fs/xfs/scrub/bmap.c 	cur = xfs_bmbt_init_cursor(mp, sc->tp, ip, whichfork);
sc                446 fs/xfs/scrub/bmap.c 	error = xchk_btree(sc, cur, xchk_bmapbt_rec, &oinfo, info);
sc                453 fs/xfs/scrub/bmap.c 	struct xfs_scrub	*sc;
sc                468 fs/xfs/scrub/bmap.c 	struct xfs_scrub		*sc = sbcri->sc;
sc                472 fs/xfs/scrub/bmap.c 	if (rec->rm_owner != sc->ip->i_ino)
sc                481 fs/xfs/scrub/bmap.c 	ifp = XFS_IFORK_PTR(sc->ip, sbcri->whichfork);
sc                483 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(sc, sbcri->whichfork,
sc                487 fs/xfs/scrub/bmap.c 	have_map = xfs_iext_lookup_extent(sc->ip, ifp, rec->rm_offset,
sc                490 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(sc, sbcri->whichfork,
sc                501 fs/xfs/scrub/bmap.c 			xchk_fblock_set_corrupt(sc, sbcri->whichfork,
sc                503 fs/xfs/scrub/bmap.c 		if (irec.br_startblock != XFS_AGB_TO_FSB(sc->mp,
sc                505 fs/xfs/scrub/bmap.c 			xchk_fblock_set_corrupt(sc, sbcri->whichfork,
sc                508 fs/xfs/scrub/bmap.c 			xchk_fblock_set_corrupt(sc, sbcri->whichfork,
sc                510 fs/xfs/scrub/bmap.c 		if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                519 fs/xfs/scrub/bmap.c 			xchk_fblock_set_corrupt(sc, sbcri->whichfork,
sc                524 fs/xfs/scrub/bmap.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                532 fs/xfs/scrub/bmap.c 	struct xfs_scrub		*sc,
sc                541 fs/xfs/scrub/bmap.c 	error = xfs_alloc_read_agf(sc->mp, sc->tp, agno, 0, &agf);
sc                545 fs/xfs/scrub/bmap.c 	cur = xfs_rmapbt_init_cursor(sc->mp, sc->tp, agf, agno);
sc                551 fs/xfs/scrub/bmap.c 	sbcri.sc = sc;
sc                559 fs/xfs/scrub/bmap.c 	xfs_trans_brelse(sc->tp, agf);
sc                566 fs/xfs/scrub/bmap.c 	struct xfs_scrub	*sc,
sc                573 fs/xfs/scrub/bmap.c 	if (!xfs_sb_version_hasrmapbt(&sc->mp->m_sb) ||
sc                575 fs/xfs/scrub/bmap.c 	    (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT))
sc                579 fs/xfs/scrub/bmap.c 	if (XFS_IS_REALTIME_INODE(sc->ip) && whichfork == XFS_DATA_FORK)
sc                591 fs/xfs/scrub/bmap.c 		size = i_size_read(VFS_I(sc->ip));
sc                594 fs/xfs/scrub/bmap.c 		size = XFS_IFORK_Q(sc->ip);
sc                600 fs/xfs/scrub/bmap.c 	if (XFS_IFORK_FORMAT(sc->ip, whichfork) != XFS_DINODE_FMT_BTREE &&
sc                601 fs/xfs/scrub/bmap.c 	    (size == 0 || XFS_IFORK_NEXTENTS(sc->ip, whichfork) > 0))
sc                604 fs/xfs/scrub/bmap.c 	for (agno = 0; agno < sc->mp->m_sb.sb_agcount; agno++) {
sc                605 fs/xfs/scrub/bmap.c 		error = xchk_bmap_check_ag_rmaps(sc, whichfork, agno);
sc                608 fs/xfs/scrub/bmap.c 		if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                623 fs/xfs/scrub/bmap.c 	struct xfs_scrub	*sc,
sc                628 fs/xfs/scrub/bmap.c 	struct xfs_mount	*mp = sc->mp;
sc                629 fs/xfs/scrub/bmap.c 	struct xfs_inode	*ip = sc->ip;
sc                640 fs/xfs/scrub/bmap.c 	info.sc = sc;
sc                649 fs/xfs/scrub/bmap.c 			xchk_ino_set_corrupt(sc, sc->ip->i_ino);
sc                658 fs/xfs/scrub/bmap.c 			xchk_ino_set_corrupt(sc, sc->ip->i_ino);
sc                674 fs/xfs/scrub/bmap.c 			xchk_fblock_set_corrupt(sc, whichfork, 0);
sc                680 fs/xfs/scrub/bmap.c 			xchk_fblock_set_corrupt(sc, whichfork, 0);
sc                684 fs/xfs/scrub/bmap.c 		error = xchk_bmap_btree(sc, whichfork, &info);
sc                689 fs/xfs/scrub/bmap.c 		xchk_fblock_set_corrupt(sc, whichfork, 0);
sc                693 fs/xfs/scrub/bmap.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                698 fs/xfs/scrub/bmap.c 	if (!xchk_fblock_process_error(sc, whichfork, 0, &error))
sc                705 fs/xfs/scrub/bmap.c 		if (xchk_should_terminate(sc, &error) ||
sc                706 fs/xfs/scrub/bmap.c 		    (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT))
sc                711 fs/xfs/scrub/bmap.c 			xchk_fblock_set_corrupt(sc, whichfork,
sc                721 fs/xfs/scrub/bmap.c 	error = xchk_bmap_check_rmaps(sc, whichfork);
sc                722 fs/xfs/scrub/bmap.c 	if (!xchk_fblock_xref_process_error(sc, whichfork, 0, &error))
sc                731 fs/xfs/scrub/bmap.c 	struct xfs_scrub	*sc)
sc                733 fs/xfs/scrub/bmap.c 	return xchk_bmap(sc, XFS_DATA_FORK);
sc                739 fs/xfs/scrub/bmap.c 	struct xfs_scrub	*sc)
sc                741 fs/xfs/scrub/bmap.c 	return xchk_bmap(sc, XFS_ATTR_FORK);
sc                747 fs/xfs/scrub/bmap.c 	struct xfs_scrub	*sc)
sc                749 fs/xfs/scrub/bmap.c 	if (!xfs_is_reflink_inode(sc->ip))
sc                752 fs/xfs/scrub/bmap.c 	return xchk_bmap(sc, XFS_COW_FORK);
sc                 26 fs/xfs/scrub/btree.c 	struct xfs_scrub	*sc,
sc                 39 fs/xfs/scrub/btree.c 		trace_xchk_deadlock_retry(sc->ip, sc->sm, *error);
sc                 44 fs/xfs/scrub/btree.c 		sc->sm->sm_flags |= errflag;
sc                 49 fs/xfs/scrub/btree.c 			trace_xchk_ifork_btree_op_error(sc, cur, level,
sc                 52 fs/xfs/scrub/btree.c 			trace_xchk_btree_op_error(sc, cur, level,
sc                 61 fs/xfs/scrub/btree.c 	struct xfs_scrub	*sc,
sc                 66 fs/xfs/scrub/btree.c 	return __xchk_btree_process_error(sc, cur, level, error,
sc                 72 fs/xfs/scrub/btree.c 	struct xfs_scrub	*sc,
sc                 77 fs/xfs/scrub/btree.c 	return __xchk_btree_process_error(sc, cur, level, error,
sc                 84 fs/xfs/scrub/btree.c 	struct xfs_scrub	*sc,
sc                 90 fs/xfs/scrub/btree.c 	sc->sm->sm_flags |= errflag;
sc                 93 fs/xfs/scrub/btree.c 		trace_xchk_ifork_btree_error(sc, cur, level,
sc                 96 fs/xfs/scrub/btree.c 		trace_xchk_btree_error(sc, cur, level,
sc                102 fs/xfs/scrub/btree.c 	struct xfs_scrub	*sc,
sc                106 fs/xfs/scrub/btree.c 	__xchk_btree_set_corrupt(sc, cur, level, XFS_SCRUB_OFLAG_CORRUPT,
sc                112 fs/xfs/scrub/btree.c 	struct xfs_scrub	*sc,
sc                116 fs/xfs/scrub/btree.c 	__xchk_btree_set_corrupt(sc, cur, level, XFS_SCRUB_OFLAG_XCORRUPT,
sc                140 fs/xfs/scrub/btree.c 	trace_xchk_btree_rec(bs->sc, cur, 0);
sc                144 fs/xfs/scrub/btree.c 		xchk_btree_set_corrupt(bs->sc, cur, 0);
sc                156 fs/xfs/scrub/btree.c 		xchk_btree_set_corrupt(bs->sc, cur, 1);
sc                165 fs/xfs/scrub/btree.c 		xchk_btree_set_corrupt(bs->sc, cur, 1);
sc                187 fs/xfs/scrub/btree.c 	trace_xchk_btree_key(bs->sc, cur, level);
sc                192 fs/xfs/scrub/btree.c 		xchk_btree_set_corrupt(bs->sc, cur, level);
sc                203 fs/xfs/scrub/btree.c 		xchk_btree_set_corrupt(bs->sc, cur, level);
sc                212 fs/xfs/scrub/btree.c 		xchk_btree_set_corrupt(bs->sc, cur, level);
sc                238 fs/xfs/scrub/btree.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, level);
sc                260 fs/xfs/scrub/btree.c 	if (!xchk_btree_process_error(bs->sc, cur, level + 1, &error) ||
sc                274 fs/xfs/scrub/btree.c 			xchk_btree_set_corrupt(bs->sc, cur, level);
sc                284 fs/xfs/scrub/btree.c 	if (!xchk_btree_process_error(bs->sc, cur, level + 1, &error))
sc                287 fs/xfs/scrub/btree.c 		xchk_btree_set_corrupt(bs->sc, cur, level + 1);
sc                297 fs/xfs/scrub/btree.c 		xchk_buffer_recheck(bs->sc, pbp);
sc                300 fs/xfs/scrub/btree.c 		xchk_btree_set_corrupt(bs->sc, cur, level);
sc                326 fs/xfs/scrub/btree.c 			xchk_btree_set_corrupt(bs->sc, cur, level);
sc                376 fs/xfs/scrub/btree.c 		error = xchk_ag_init(bs->sc, agno, &bs->sc->sa);
sc                377 fs/xfs/scrub/btree.c 		if (!xchk_btree_xref_process_error(bs->sc, bs->cur,
sc                382 fs/xfs/scrub/btree.c 	xchk_xref_is_used_space(bs->sc, agbno, 1);
sc                388 fs/xfs/scrub/btree.c 	if (!bs->sc->sa.bno_cur && btnum == XFS_BTNUM_BNO)
sc                391 fs/xfs/scrub/btree.c 	xchk_xref_is_owned_by(bs->sc, agbno, 1, bs->oinfo);
sc                392 fs/xfs/scrub/btree.c 	if (!bs->sc->sa.rmap_cur && btnum == XFS_BTNUM_RMAP)
sc                396 fs/xfs/scrub/btree.c 		xchk_ag_free(bs->sc, &bs->sc->sa);
sc                419 fs/xfs/scrub/btree.c 			xchk_btree_set_corrupt(bs->sc, bs->cur, level);
sc                480 fs/xfs/scrub/btree.c 	xchk_btree_set_corrupt(bs->sc, bs->cur, level);
sc                502 fs/xfs/scrub/btree.c 	if (!xchk_btree_process_error(bs->sc, bs->cur, level, &error) ||
sc                514 fs/xfs/scrub/btree.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, level);
sc                518 fs/xfs/scrub/btree.c 		xchk_buffer_recheck(bs->sc, *pbp);
sc                567 fs/xfs/scrub/btree.c 		xchk_btree_set_corrupt(bs->sc, cur, 1);
sc                578 fs/xfs/scrub/btree.c 		xchk_btree_set_corrupt(bs->sc, cur, 1);
sc                588 fs/xfs/scrub/btree.c 	struct xfs_scrub		*sc,
sc                600 fs/xfs/scrub/btree.c 		.sc			= sc,
sc                620 fs/xfs/scrub/btree.c 		xchk_btree_set_corrupt(sc, cur, 0);
sc                660 fs/xfs/scrub/btree.c 			if (xchk_should_terminate(sc, &error) ||
sc                661 fs/xfs/scrub/btree.c 			    (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT))
sc                 12 fs/xfs/scrub/btree.h bool xchk_btree_process_error(struct xfs_scrub *sc,
sc                 16 fs/xfs/scrub/btree.h bool xchk_btree_xref_process_error(struct xfs_scrub *sc,
sc                 20 fs/xfs/scrub/btree.h void xchk_btree_set_corrupt(struct xfs_scrub *sc,
sc                 24 fs/xfs/scrub/btree.h void xchk_btree_xref_set_corrupt(struct xfs_scrub *sc,
sc                 34 fs/xfs/scrub/btree.h 	struct xfs_scrub		*sc;
sc                 47 fs/xfs/scrub/btree.h int xchk_btree(struct xfs_scrub *sc, struct xfs_btree_cur *cur,
sc                 65 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                 77 fs/xfs/scrub/common.c 		trace_xchk_deadlock_retry(sc->ip, sc->sm, *error);
sc                 82 fs/xfs/scrub/common.c 		sc->sm->sm_flags |= errflag;
sc                 86 fs/xfs/scrub/common.c 		trace_xchk_op_error(sc, agno, bno, *error,
sc                 95 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                100 fs/xfs/scrub/common.c 	return __xchk_process_error(sc, agno, bno, error,
sc                106 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                111 fs/xfs/scrub/common.c 	return __xchk_process_error(sc, agno, bno, error,
sc                118 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                130 fs/xfs/scrub/common.c 		trace_xchk_deadlock_retry(sc->ip, sc->sm, *error);
sc                135 fs/xfs/scrub/common.c 		sc->sm->sm_flags |= errflag;
sc                139 fs/xfs/scrub/common.c 		trace_xchk_file_op_error(sc, whichfork, offset, *error,
sc                148 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                153 fs/xfs/scrub/common.c 	return __xchk_fblock_process_error(sc, whichfork, offset, error,
sc                159 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                164 fs/xfs/scrub/common.c 	return __xchk_fblock_process_error(sc, whichfork, offset, error,
sc                183 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                186 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_PREEN;
sc                187 fs/xfs/scrub/common.c 	trace_xchk_block_preen(sc, bp->b_bn, __return_address);
sc                197 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                200 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_PREEN;
sc                201 fs/xfs/scrub/common.c 	trace_xchk_ino_preen(sc, ino, __return_address);
sc                207 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc)
sc                209 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_CORRUPT;
sc                210 fs/xfs/scrub/common.c 	trace_xchk_fs_error(sc, 0, __return_address);
sc                216 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                219 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_CORRUPT;
sc                220 fs/xfs/scrub/common.c 	trace_xchk_block_error(sc, bp->b_bn, __return_address);
sc                226 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                229 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_XCORRUPT;
sc                230 fs/xfs/scrub/common.c 	trace_xchk_block_error(sc, bp->b_bn, __return_address);
sc                240 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                243 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_CORRUPT;
sc                244 fs/xfs/scrub/common.c 	trace_xchk_ino_error(sc, ino, __return_address);
sc                250 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                253 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_XCORRUPT;
sc                254 fs/xfs/scrub/common.c 	trace_xchk_ino_error(sc, ino, __return_address);
sc                260 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                264 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_CORRUPT;
sc                265 fs/xfs/scrub/common.c 	trace_xchk_fblock_error(sc, whichfork, offset, __return_address);
sc                271 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                275 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_XCORRUPT;
sc                276 fs/xfs/scrub/common.c 	trace_xchk_fblock_error(sc, whichfork, offset, __return_address);
sc                285 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                288 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_WARNING;
sc                289 fs/xfs/scrub/common.c 	trace_xchk_ino_warning(sc, ino, __return_address);
sc                295 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                299 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_WARNING;
sc                300 fs/xfs/scrub/common.c 	trace_xchk_fblock_warning(sc, whichfork, offset, __return_address);
sc                306 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc)
sc                308 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_INCOMPLETE;
sc                309 fs/xfs/scrub/common.c 	trace_xchk_incomplete(sc, __return_address);
sc                350 fs/xfs/scrub/common.c 	struct xfs_scrub		*sc,
sc                376 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                380 fs/xfs/scrub/common.c 	if (sc->sm->sm_type != XFS_SCRUB_TYPE_AGF &&
sc                381 fs/xfs/scrub/common.c 	    sc->sm->sm_type != XFS_SCRUB_TYPE_AGFL &&
sc                382 fs/xfs/scrub/common.c 	    sc->sm->sm_type != XFS_SCRUB_TYPE_AGI)
sc                389 fs/xfs/scrub/common.c 	if (sc->sm->sm_type == type)
sc                403 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                409 fs/xfs/scrub/common.c 	struct xfs_mount	*mp = sc->mp;
sc                412 fs/xfs/scrub/common.c 	error = xfs_ialloc_read_agi(mp, sc->tp, agno, agi);
sc                413 fs/xfs/scrub/common.c 	if (error && want_ag_read_header_failure(sc, XFS_SCRUB_TYPE_AGI))
sc                416 fs/xfs/scrub/common.c 	error = xfs_alloc_read_agf(mp, sc->tp, agno, 0, agf);
sc                417 fs/xfs/scrub/common.c 	if (error && want_ag_read_header_failure(sc, XFS_SCRUB_TYPE_AGF))
sc                420 fs/xfs/scrub/common.c 	error = xfs_alloc_read_agfl(mp, sc->tp, agno, agfl);
sc                421 fs/xfs/scrub/common.c 	if (error && want_ag_read_header_failure(sc, XFS_SCRUB_TYPE_AGFL))
sc                457 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                460 fs/xfs/scrub/common.c 	struct xfs_mount	*mp = sc->mp;
sc                463 fs/xfs/scrub/common.c 	xchk_perag_get(sc->mp, sa);
sc                465 fs/xfs/scrub/common.c 	    xchk_ag_btree_healthy_enough(sc, sa->pag, XFS_BTNUM_BNO)) {
sc                467 fs/xfs/scrub/common.c 		sa->bno_cur = xfs_allocbt_init_cursor(mp, sc->tp, sa->agf_bp,
sc                474 fs/xfs/scrub/common.c 	    xchk_ag_btree_healthy_enough(sc, sa->pag, XFS_BTNUM_CNT)) {
sc                476 fs/xfs/scrub/common.c 		sa->cnt_cur = xfs_allocbt_init_cursor(mp, sc->tp, sa->agf_bp,
sc                484 fs/xfs/scrub/common.c 	    xchk_ag_btree_healthy_enough(sc, sa->pag, XFS_BTNUM_INO)) {
sc                485 fs/xfs/scrub/common.c 		sa->ino_cur = xfs_inobt_init_cursor(mp, sc->tp, sa->agi_bp,
sc                493 fs/xfs/scrub/common.c 	    xchk_ag_btree_healthy_enough(sc, sa->pag, XFS_BTNUM_FINO)) {
sc                494 fs/xfs/scrub/common.c 		sa->fino_cur = xfs_inobt_init_cursor(mp, sc->tp, sa->agi_bp,
sc                502 fs/xfs/scrub/common.c 	    xchk_ag_btree_healthy_enough(sc, sa->pag, XFS_BTNUM_RMAP)) {
sc                503 fs/xfs/scrub/common.c 		sa->rmap_cur = xfs_rmapbt_init_cursor(mp, sc->tp, sa->agf_bp,
sc                511 fs/xfs/scrub/common.c 	    xchk_ag_btree_healthy_enough(sc, sa->pag, XFS_BTNUM_REFC)) {
sc                512 fs/xfs/scrub/common.c 		sa->refc_cur = xfs_refcountbt_init_cursor(mp, sc->tp,
sc                526 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                531 fs/xfs/scrub/common.c 		xfs_trans_brelse(sc->tp, sa->agfl_bp);
sc                535 fs/xfs/scrub/common.c 		xfs_trans_brelse(sc->tp, sa->agf_bp);
sc                539 fs/xfs/scrub/common.c 		xfs_trans_brelse(sc->tp, sa->agi_bp);
sc                558 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                565 fs/xfs/scrub/common.c 	error = xchk_ag_read_headers(sc, agno, &sa->agi_bp,
sc                570 fs/xfs/scrub/common.c 	return xchk_ag_btcur_init(sc, sa);
sc                600 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                603 fs/xfs/scrub/common.c 	if (sc->sm->sm_flags & XFS_SCRUB_IFLAG_REPAIR)
sc                604 fs/xfs/scrub/common.c 		return xfs_trans_alloc(sc->mp, &M_RES(sc->mp)->tr_itruncate,
sc                605 fs/xfs/scrub/common.c 				resblks, 0, 0, &sc->tp);
sc                607 fs/xfs/scrub/common.c 	return xfs_trans_alloc_empty(sc->mp, &sc->tp);
sc                613 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                618 fs/xfs/scrub/common.c 	resblks = xrep_calc_ag_resblks(sc);
sc                619 fs/xfs/scrub/common.c 	return xchk_trans_alloc(sc, resblks);
sc                625 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                629 fs/xfs/scrub/common.c 	struct xfs_mount	*mp = sc->mp;
sc                644 fs/xfs/scrub/common.c 	error = xchk_setup_fs(sc, ip);
sc                648 fs/xfs/scrub/common.c 	return xchk_ag_init(sc, sc->sm->sm_agno, &sc->sa);
sc                672 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                676 fs/xfs/scrub/common.c 	struct xfs_mount	*mp = sc->mp;
sc                681 fs/xfs/scrub/common.c 	if (sc->sm->sm_ino == 0 || sc->sm->sm_ino == ip_in->i_ino) {
sc                682 fs/xfs/scrub/common.c 		sc->ip = ip_in;
sc                687 fs/xfs/scrub/common.c 	if (xfs_internal_inum(mp, sc->sm->sm_ino))
sc                689 fs/xfs/scrub/common.c 	error = xfs_iget(mp, NULL, sc->sm->sm_ino,
sc                711 fs/xfs/scrub/common.c 		error = xfs_imap(sc->mp, sc->tp, sc->sm->sm_ino, &imap,
sc                718 fs/xfs/scrub/common.c 		trace_xchk_op_error(sc,
sc                719 fs/xfs/scrub/common.c 				XFS_INO_TO_AGNO(mp, sc->sm->sm_ino),
sc                720 fs/xfs/scrub/common.c 				XFS_INO_TO_AGBNO(mp, sc->sm->sm_ino),
sc                724 fs/xfs/scrub/common.c 	if (VFS_I(ip)->i_generation != sc->sm->sm_gen) {
sc                729 fs/xfs/scrub/common.c 	sc->ip = ip;
sc                736 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                742 fs/xfs/scrub/common.c 	error = xchk_get_inode(sc, ip);
sc                747 fs/xfs/scrub/common.c 	sc->ilock_flags = XFS_IOLOCK_EXCL | XFS_MMAPLOCK_EXCL;
sc                748 fs/xfs/scrub/common.c 	xfs_ilock(sc->ip, sc->ilock_flags);
sc                749 fs/xfs/scrub/common.c 	error = xchk_trans_alloc(sc, resblks);
sc                752 fs/xfs/scrub/common.c 	sc->ilock_flags |= XFS_ILOCK_EXCL;
sc                753 fs/xfs/scrub/common.c 	xfs_ilock(sc->ip, XFS_ILOCK_EXCL);
sc                767 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                772 fs/xfs/scrub/common.c 	if (xchk_skip_xref(sc->sm))
sc                788 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_XFAIL;
sc                789 fs/xfs/scrub/common.c 	trace_xchk_xref_error(sc, *error, __return_address);
sc                802 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc,
sc                808 fs/xfs/scrub/common.c 		xchk_block_set_corrupt(sc, bp);
sc                812 fs/xfs/scrub/common.c 		xchk_set_incomplete(sc);
sc                818 fs/xfs/scrub/common.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_CORRUPT;
sc                819 fs/xfs/scrub/common.c 	trace_xchk_block_error(sc, bp->b_bn, fa);
sc                828 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc)
sc                834 fs/xfs/scrub/common.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                838 fs/xfs/scrub/common.c 	if (sc->ip->i_d.di_flags & XFS_DIFLAG_REALTIME) {
sc                839 fs/xfs/scrub/common.c 		xchk_ino_set_corrupt(sc, sc->ip->i_ino);
sc                844 fs/xfs/scrub/common.c 	if (xfs_is_reflink_inode(sc->ip)) {
sc                845 fs/xfs/scrub/common.c 		xchk_ino_set_corrupt(sc, sc->ip->i_ino);
sc                850 fs/xfs/scrub/common.c 	if (xfs_inode_hasattr(sc->ip)) {
sc                851 fs/xfs/scrub/common.c 		xchk_ino_set_corrupt(sc, sc->ip->i_ino);
sc                856 fs/xfs/scrub/common.c 	smtype = sc->sm->sm_type;
sc                857 fs/xfs/scrub/common.c 	sc->sm->sm_type = XFS_SCRUB_TYPE_BMBTD;
sc                858 fs/xfs/scrub/common.c 	error = xchk_bmap_data(sc);
sc                859 fs/xfs/scrub/common.c 	sc->sm->sm_type = smtype;
sc                860 fs/xfs/scrub/common.c 	if (error || (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT))
sc                864 fs/xfs/scrub/common.c 	if (xfs_sb_version_hasreflink(&sc->mp->m_sb)) {
sc                865 fs/xfs/scrub/common.c 		error = xfs_reflink_inode_has_shared_extents(sc->tp, sc->ip,
sc                867 fs/xfs/scrub/common.c 		if (!xchk_fblock_process_error(sc, XFS_DATA_FORK, 0,
sc                871 fs/xfs/scrub/common.c 			xchk_ino_set_corrupt(sc, sc->ip->i_ino);
sc                902 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc)
sc                904 fs/xfs/scrub/common.c 	sc->flags |= XCHK_REAPING_DISABLED;
sc                905 fs/xfs/scrub/common.c 	xfs_stop_block_reaping(sc->mp);
sc                911 fs/xfs/scrub/common.c 	struct xfs_scrub	*sc)
sc                913 fs/xfs/scrub/common.c 	xfs_start_block_reaping(sc->mp);
sc                914 fs/xfs/scrub/common.c 	sc->flags &= ~XCHK_REAPING_DISABLED;
sc                 16 fs/xfs/scrub/common.h 	struct xfs_scrub	*sc,
sc                 34 fs/xfs/scrub/common.h int xchk_trans_alloc(struct xfs_scrub *sc, uint resblks);
sc                 35 fs/xfs/scrub/common.h bool xchk_process_error(struct xfs_scrub *sc, xfs_agnumber_t agno,
sc                 37 fs/xfs/scrub/common.h bool xchk_fblock_process_error(struct xfs_scrub *sc, int whichfork,
sc                 40 fs/xfs/scrub/common.h bool xchk_xref_process_error(struct xfs_scrub *sc,
sc                 42 fs/xfs/scrub/common.h bool xchk_fblock_xref_process_error(struct xfs_scrub *sc,
sc                 45 fs/xfs/scrub/common.h void xchk_block_set_preen(struct xfs_scrub *sc,
sc                 47 fs/xfs/scrub/common.h void xchk_ino_set_preen(struct xfs_scrub *sc, xfs_ino_t ino);
sc                 49 fs/xfs/scrub/common.h void xchk_set_corrupt(struct xfs_scrub *sc);
sc                 50 fs/xfs/scrub/common.h void xchk_block_set_corrupt(struct xfs_scrub *sc,
sc                 52 fs/xfs/scrub/common.h void xchk_ino_set_corrupt(struct xfs_scrub *sc, xfs_ino_t ino);
sc                 53 fs/xfs/scrub/common.h void xchk_fblock_set_corrupt(struct xfs_scrub *sc, int whichfork,
sc                 56 fs/xfs/scrub/common.h void xchk_block_xref_set_corrupt(struct xfs_scrub *sc,
sc                 58 fs/xfs/scrub/common.h void xchk_ino_xref_set_corrupt(struct xfs_scrub *sc,
sc                 60 fs/xfs/scrub/common.h void xchk_fblock_xref_set_corrupt(struct xfs_scrub *sc,
sc                 63 fs/xfs/scrub/common.h void xchk_ino_set_warning(struct xfs_scrub *sc, xfs_ino_t ino);
sc                 64 fs/xfs/scrub/common.h void xchk_fblock_set_warning(struct xfs_scrub *sc, int whichfork,
sc                 67 fs/xfs/scrub/common.h void xchk_set_incomplete(struct xfs_scrub *sc);
sc                 71 fs/xfs/scrub/common.h bool xchk_should_check_xref(struct xfs_scrub *sc, int *error,
sc                 75 fs/xfs/scrub/common.h int xchk_setup_fs(struct xfs_scrub *sc, struct xfs_inode *ip);
sc                 76 fs/xfs/scrub/common.h int xchk_setup_ag_allocbt(struct xfs_scrub *sc,
sc                 78 fs/xfs/scrub/common.h int xchk_setup_ag_iallocbt(struct xfs_scrub *sc,
sc                 80 fs/xfs/scrub/common.h int xchk_setup_ag_rmapbt(struct xfs_scrub *sc,
sc                 82 fs/xfs/scrub/common.h int xchk_setup_ag_refcountbt(struct xfs_scrub *sc,
sc                 84 fs/xfs/scrub/common.h int xchk_setup_inode(struct xfs_scrub *sc,
sc                 86 fs/xfs/scrub/common.h int xchk_setup_inode_bmap(struct xfs_scrub *sc,
sc                 88 fs/xfs/scrub/common.h int xchk_setup_inode_bmap_data(struct xfs_scrub *sc,
sc                 90 fs/xfs/scrub/common.h int xchk_setup_directory(struct xfs_scrub *sc,
sc                 92 fs/xfs/scrub/common.h int xchk_setup_xattr(struct xfs_scrub *sc,
sc                 94 fs/xfs/scrub/common.h int xchk_setup_symlink(struct xfs_scrub *sc,
sc                 96 fs/xfs/scrub/common.h int xchk_setup_parent(struct xfs_scrub *sc,
sc                 99 fs/xfs/scrub/common.h int xchk_setup_rt(struct xfs_scrub *sc, struct xfs_inode *ip);
sc                102 fs/xfs/scrub/common.h xchk_setup_rt(struct xfs_scrub *sc, struct xfs_inode *ip)
sc                108 fs/xfs/scrub/common.h int xchk_setup_quota(struct xfs_scrub *sc, struct xfs_inode *ip);
sc                111 fs/xfs/scrub/common.h xchk_setup_quota(struct xfs_scrub *sc, struct xfs_inode *ip)
sc                116 fs/xfs/scrub/common.h int xchk_setup_fscounters(struct xfs_scrub *sc, struct xfs_inode *ip);
sc                118 fs/xfs/scrub/common.h void xchk_ag_free(struct xfs_scrub *sc, struct xchk_ag *sa);
sc                119 fs/xfs/scrub/common.h int xchk_ag_init(struct xfs_scrub *sc, xfs_agnumber_t agno,
sc                122 fs/xfs/scrub/common.h int xchk_ag_read_headers(struct xfs_scrub *sc, xfs_agnumber_t agno,
sc                126 fs/xfs/scrub/common.h int xchk_ag_btcur_init(struct xfs_scrub *sc, struct xchk_ag *sa);
sc                127 fs/xfs/scrub/common.h int xchk_count_rmap_ownedby_ag(struct xfs_scrub *sc, struct xfs_btree_cur *cur,
sc                130 fs/xfs/scrub/common.h int xchk_setup_ag_btree(struct xfs_scrub *sc, struct xfs_inode *ip,
sc                132 fs/xfs/scrub/common.h int xchk_get_inode(struct xfs_scrub *sc, struct xfs_inode *ip_in);
sc                133 fs/xfs/scrub/common.h int xchk_setup_inode_contents(struct xfs_scrub *sc, struct xfs_inode *ip,
sc                135 fs/xfs/scrub/common.h void xchk_buffer_recheck(struct xfs_scrub *sc, struct xfs_buf *bp);
sc                147 fs/xfs/scrub/common.h int xchk_metadata_inode_forks(struct xfs_scrub *sc);
sc                149 fs/xfs/scrub/common.h void xchk_stop_reaping(struct xfs_scrub *sc);
sc                150 fs/xfs/scrub/common.h void xchk_start_reaping(struct xfs_scrub *sc);
sc                 35 fs/xfs/scrub/dabtree.c 	struct xfs_scrub	*sc = ds->sc;
sc                 43 fs/xfs/scrub/dabtree.c 		trace_xchk_deadlock_retry(sc->ip, sc->sm, *error);
sc                 48 fs/xfs/scrub/dabtree.c 		sc->sm->sm_flags |= XFS_SCRUB_OFLAG_CORRUPT;
sc                 52 fs/xfs/scrub/dabtree.c 		trace_xchk_file_op_error(sc, ds->dargs.whichfork,
sc                 70 fs/xfs/scrub/dabtree.c 	struct xfs_scrub	*sc = ds->sc;
sc                 72 fs/xfs/scrub/dabtree.c 	sc->sm->sm_flags |= XFS_SCRUB_OFLAG_CORRUPT;
sc                 74 fs/xfs/scrub/dabtree.c 	trace_xchk_fblock_error(sc, ds->dargs.whichfork,
sc                275 fs/xfs/scrub/dabtree.c 		xchk_buffer_recheck(ds->sc,
sc                364 fs/xfs/scrub/dabtree.c 		xchk_buffer_recheck(ds->sc, blk->bp);
sc                386 fs/xfs/scrub/dabtree.c 	if (xfs_sb_version_hascrc(&ds->sc->mp->m_sb) && hdr3->hdr.pad)
sc                473 fs/xfs/scrub/dabtree.c 	struct xfs_scrub		*sc,
sc                479 fs/xfs/scrub/dabtree.c 	struct xfs_mount		*mp = sc->mp;
sc                488 fs/xfs/scrub/dabtree.c 	if (XFS_IFORK_FORMAT(sc->ip, whichfork) != XFS_DINODE_FMT_EXTENTS &&
sc                489 fs/xfs/scrub/dabtree.c 	    XFS_IFORK_FORMAT(sc->ip, whichfork) != XFS_DINODE_FMT_BTREE)
sc                493 fs/xfs/scrub/dabtree.c 	ds.dargs.dp = sc->ip;
sc                495 fs/xfs/scrub/dabtree.c 	ds.dargs.trans = sc->tp;
sc                500 fs/xfs/scrub/dabtree.c 	ds.sc = sc;
sc                546 fs/xfs/scrub/dabtree.c 			if (xchk_should_terminate(sc, &error) ||
sc                547 fs/xfs/scrub/dabtree.c 			    (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT))
sc                593 fs/xfs/scrub/dabtree.c 		xfs_trans_brelse(sc->tp, blks[level].bp);
sc                 16 fs/xfs/scrub/dabtree.h 	struct xfs_scrub	*sc;
sc                 41 fs/xfs/scrub/dabtree.h int xchk_da_btree(struct xfs_scrub *sc, int whichfork,
sc                 25 fs/xfs/scrub/dir.c 	struct xfs_scrub	*sc,
sc                 28 fs/xfs/scrub/dir.c 	return xchk_setup_inode_contents(sc, ip, 0);
sc                 39 fs/xfs/scrub/dir.c 	struct xfs_scrub	*sc;
sc                 50 fs/xfs/scrub/dir.c 	struct xfs_mount	*mp = sdc->sc->mp;
sc                 57 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sdc->sc, XFS_DATA_FORK,
sc                 70 fs/xfs/scrub/dir.c 	error = xfs_iget(mp, sdc->sc->tp, inum, 0, 0, &ip);
sc                 71 fs/xfs/scrub/dir.c 	if (!xchk_fblock_xref_process_error(sdc->sc, XFS_DATA_FORK, offset,
sc                 79 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(sdc->sc, XFS_DATA_FORK, offset);
sc                111 fs/xfs/scrub/dir.c 	ip = sdc->sc->ip;
sc                118 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(sdc->sc, XFS_DATA_FORK, offset);
sc                124 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(sdc->sc, XFS_DATA_FORK, offset);
sc                131 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sdc->sc, XFS_DATA_FORK,
sc                134 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sdc->sc, XFS_DATA_FORK,
sc                142 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sdc->sc, XFS_DATA_FORK,
sc                145 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sdc->sc, XFS_DATA_FORK,
sc                154 fs/xfs/scrub/dir.c 	error = xfs_dir_lookup(sdc->sc->tp, ip, &xname, &lookup_ino, NULL);
sc                155 fs/xfs/scrub/dir.c 	if (!xchk_fblock_process_error(sdc->sc, XFS_DATA_FORK, offset,
sc                159 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(sdc->sc, XFS_DATA_FORK, offset);
sc                173 fs/xfs/scrub/dir.c 	if (error == 0 && sdc->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                221 fs/xfs/scrub/dir.c 	if (!xchk_fblock_process_error(ds->sc, XFS_DATA_FORK, rec_bno,
sc                225 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(ds->sc, XFS_DATA_FORK, rec_bno);
sc                228 fs/xfs/scrub/dir.c 	xchk_buffer_recheck(ds->sc, bp);
sc                230 fs/xfs/scrub/dir.c 	if (ds->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                239 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(ds->sc, XFS_DATA_FORK, rec_bno);
sc                257 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(ds->sc, XFS_DATA_FORK, rec_bno);
sc                266 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(ds->sc, XFS_DATA_FORK, rec_bno);
sc                268 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(ds->sc, XFS_DATA_FORK, rec_bno);
sc                273 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(ds->sc, XFS_DATA_FORK, rec_bno);
sc                288 fs/xfs/scrub/dir.c 	struct xfs_scrub		*sc,
sc                307 fs/xfs/scrub/dir.c 	xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                313 fs/xfs/scrub/dir.c 	struct xfs_scrub		*sc,
sc                321 fs/xfs/scrub/dir.c 	struct xfs_mount		*mp = sc->mp;
sc                333 fs/xfs/scrub/dir.c 	d_ops = sc->ip->d_ops;
sc                338 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                339 fs/xfs/scrub/dir.c 		error = xfs_dir3_block_read(sc->tp, sc->ip, &bp);
sc                342 fs/xfs/scrub/dir.c 		error = xfs_dir3_data_read(sc->tp, sc->ip, lblk, -1, &bp);
sc                344 fs/xfs/scrub/dir.c 	if (!xchk_fblock_process_error(sc, XFS_DATA_FORK, lblk, &error))
sc                346 fs/xfs/scrub/dir.c 	xchk_buffer_recheck(sc, bp);
sc                350 fs/xfs/scrub/dir.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                361 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                371 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                377 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                399 fs/xfs/scrub/dir.c 				xchk_fblock_set_corrupt(sc, XFS_DATA_FORK,
sc                410 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                418 fs/xfs/scrub/dir.c 		xchk_directory_check_free_entry(sc, lblk, bf, dup);
sc                419 fs/xfs/scrub/dir.c 		if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                425 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                435 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                439 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                441 fs/xfs/scrub/dir.c 	xfs_trans_brelse(sc->tp, bp);
sc                454 fs/xfs/scrub/dir.c 	struct xfs_scrub		*sc,
sc                461 fs/xfs/scrub/dir.c 	dfp = sc->ip->d_ops->data_bestfree_p(dbp->b_addr);
sc                464 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                467 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                473 fs/xfs/scrub/dir.c 	struct xfs_scrub		*sc,
sc                483 fs/xfs/scrub/dir.c 	const struct xfs_dir_ops	*d_ops = sc->ip->d_ops;
sc                484 fs/xfs/scrub/dir.c 	struct xfs_da_geometry		*geo = sc->mp->m_dir_geo;
sc                495 fs/xfs/scrub/dir.c 	error = xfs_dir3_leaf_read(sc->tp, sc->ip, lblk, -1, &bp);
sc                496 fs/xfs/scrub/dir.c 	if (!xchk_fblock_process_error(sc, XFS_DATA_FORK, lblk, &error))
sc                498 fs/xfs/scrub/dir.c 	xchk_buffer_recheck(sc, bp);
sc                507 fs/xfs/scrub/dir.c 	if (xfs_sb_version_hascrc(&sc->mp->m_sb)) {
sc                511 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                518 fs/xfs/scrub/dir.c 	if (bestcount != xfs_dir2_byte_to_db(geo, sc->ip->i_d.di_size)) {
sc                519 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                525 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                531 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                539 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                545 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                546 fs/xfs/scrub/dir.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                554 fs/xfs/scrub/dir.c 		error = xfs_dir3_data_read(sc->tp, sc->ip,
sc                556 fs/xfs/scrub/dir.c 		if (!xchk_fblock_process_error(sc, XFS_DATA_FORK, lblk,
sc                559 fs/xfs/scrub/dir.c 		xchk_directory_check_freesp(sc, lblk, dbp, best);
sc                560 fs/xfs/scrub/dir.c 		xfs_trans_brelse(sc->tp, dbp);
sc                561 fs/xfs/scrub/dir.c 		if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                571 fs/xfs/scrub/dir.c 	struct xfs_scrub		*sc,
sc                585 fs/xfs/scrub/dir.c 	error = xfs_dir2_free_read(sc->tp, sc->ip, lblk, &bp);
sc                586 fs/xfs/scrub/dir.c 	if (!xchk_fblock_process_error(sc, XFS_DATA_FORK, lblk, &error))
sc                588 fs/xfs/scrub/dir.c 	xchk_buffer_recheck(sc, bp);
sc                590 fs/xfs/scrub/dir.c 	if (xfs_sb_version_hascrc(&sc->mp->m_sb)) {
sc                594 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                598 fs/xfs/scrub/dir.c 	sc->ip->d_ops->free_hdr_from_disk(&freehdr, bp->b_addr);
sc                599 fs/xfs/scrub/dir.c 	bestp = sc->ip->d_ops->free_bests_p(bp->b_addr);
sc                606 fs/xfs/scrub/dir.c 		error = xfs_dir3_data_read(sc->tp, sc->ip,
sc                609 fs/xfs/scrub/dir.c 		if (!xchk_fblock_process_error(sc, XFS_DATA_FORK, lblk,
sc                612 fs/xfs/scrub/dir.c 		xchk_directory_check_freesp(sc, lblk, dbp, best);
sc                613 fs/xfs/scrub/dir.c 		xfs_trans_brelse(sc->tp, dbp);
sc                617 fs/xfs/scrub/dir.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                625 fs/xfs/scrub/dir.c 	struct xfs_scrub	*sc)
sc                630 fs/xfs/scrub/dir.c 	struct xfs_mount	*mp = sc->mp;
sc                641 fs/xfs/scrub/dir.c 	if (sc->ip->i_d.di_format != XFS_DINODE_FMT_EXTENTS &&
sc                642 fs/xfs/scrub/dir.c 	    sc->ip->i_d.di_format != XFS_DINODE_FMT_BTREE)
sc                645 fs/xfs/scrub/dir.c 	ifp = XFS_IFORK_PTR(sc->ip, XFS_DATA_FORK);
sc                651 fs/xfs/scrub/dir.c 	args.dp = sc->ip;
sc                653 fs/xfs/scrub/dir.c 	args.trans = sc->tp;
sc                655 fs/xfs/scrub/dir.c 	if (!xchk_fblock_process_error(sc, XFS_DATA_FORK, lblk, &error))
sc                659 fs/xfs/scrub/dir.c 	found = xfs_iext_lookup_extent(sc->ip, ifp, lblk, &icur, &got);
sc                660 fs/xfs/scrub/dir.c 	while (found && !(sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)) {
sc                665 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK,
sc                689 fs/xfs/scrub/dir.c 			error = xchk_directory_data_bestfree(sc, lblk,
sc                696 fs/xfs/scrub/dir.c 		found = xfs_iext_lookup_extent(sc->ip, ifp, lblk, &icur, &got);
sc                699 fs/xfs/scrub/dir.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                703 fs/xfs/scrub/dir.c 	if (xfs_iext_lookup_extent(sc->ip, ifp, leaf_lblk, &icur, &got) &&
sc                708 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                711 fs/xfs/scrub/dir.c 		error = xchk_directory_leaf1_bestfree(sc, &args,
sc                717 fs/xfs/scrub/dir.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                722 fs/xfs/scrub/dir.c 	found = xfs_iext_lookup_extent(sc->ip, ifp, lblk, &icur, &got);
sc                723 fs/xfs/scrub/dir.c 	while (found && !(sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)) {
sc                730 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                734 fs/xfs/scrub/dir.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, lblk);
sc                753 fs/xfs/scrub/dir.c 			error = xchk_directory_free_bestfree(sc, &args,
sc                760 fs/xfs/scrub/dir.c 		found = xfs_iext_lookup_extent(sc->ip, ifp, lblk, &icur, &got);
sc                769 fs/xfs/scrub/dir.c 	struct xfs_scrub	*sc)
sc                774 fs/xfs/scrub/dir.c 		.sc = sc,
sc                780 fs/xfs/scrub/dir.c 	if (!S_ISDIR(VFS_I(sc->ip)->i_mode))
sc                784 fs/xfs/scrub/dir.c 	if (sc->ip->i_d.di_size < xfs_dir2_sf_hdr_size(0)) {
sc                785 fs/xfs/scrub/dir.c 		xchk_ino_set_corrupt(sc, sc->ip->i_ino);
sc                790 fs/xfs/scrub/dir.c 	error = xchk_da_btree(sc, XFS_DATA_FORK, xchk_dir_rec, NULL);
sc                794 fs/xfs/scrub/dir.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                798 fs/xfs/scrub/dir.c 	error = xchk_directory_blocks(sc);
sc                802 fs/xfs/scrub/dir.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                810 fs/xfs/scrub/dir.c 			sc->ip->i_d.di_size);
sc                829 fs/xfs/scrub/dir.c 	sc->ilock_flags &= ~XFS_ILOCK_EXCL;
sc                830 fs/xfs/scrub/dir.c 	xfs_iunlock(sc->ip, XFS_ILOCK_EXCL);
sc                832 fs/xfs/scrub/dir.c 		error = xfs_readdir(sc->tp, sc->ip, &sdc.dir_iter, bufsize);
sc                833 fs/xfs/scrub/dir.c 		if (!xchk_fblock_process_error(sc, XFS_DATA_FORK, 0,
sc                 64 fs/xfs/scrub/fscounters.c 	struct xfs_scrub	*sc)
sc                 66 fs/xfs/scrub/fscounters.c 	struct xfs_mount	*mp = sc->mp;
sc                 80 fs/xfs/scrub/fscounters.c 		error = xfs_ialloc_read_agi(mp, sc->tp, agno, &agi_bp);
sc                 83 fs/xfs/scrub/fscounters.c 		error = xfs_alloc_read_agf(mp, sc->tp, agno, 0, &agf_bp);
sc                122 fs/xfs/scrub/fscounters.c 	struct xfs_scrub	*sc,
sc                128 fs/xfs/scrub/fscounters.c 	sc->buf = kmem_zalloc(sizeof(struct xchk_fscounters), 0);
sc                129 fs/xfs/scrub/fscounters.c 	if (!sc->buf)
sc                131 fs/xfs/scrub/fscounters.c 	fsc = sc->buf;
sc                133 fs/xfs/scrub/fscounters.c 	xfs_icount_range(sc->mp, &fsc->icount_min, &fsc->icount_max);
sc                136 fs/xfs/scrub/fscounters.c 	error = xchk_fscount_warmup(sc);
sc                145 fs/xfs/scrub/fscounters.c 	xchk_stop_reaping(sc);
sc                147 fs/xfs/scrub/fscounters.c 	return xchk_trans_alloc(sc, 0);
sc                158 fs/xfs/scrub/fscounters.c 	struct xfs_scrub	*sc,
sc                161 fs/xfs/scrub/fscounters.c 	struct xfs_mount	*mp = sc->mp;
sc                235 fs/xfs/scrub/fscounters.c 		xchk_set_incomplete(sc);
sc                257 fs/xfs/scrub/fscounters.c 	struct xfs_scrub	*sc,
sc                265 fs/xfs/scrub/fscounters.c 	trace_xchk_fscounters_within_range(sc->mp, expected, curr_value,
sc                294 fs/xfs/scrub/fscounters.c 		xchk_set_incomplete(sc);
sc                304 fs/xfs/scrub/fscounters.c 	struct xfs_scrub	*sc)
sc                306 fs/xfs/scrub/fscounters.c 	struct xfs_mount	*mp = sc->mp;
sc                307 fs/xfs/scrub/fscounters.c 	struct xchk_fscounters	*fsc = sc->buf;
sc                318 fs/xfs/scrub/fscounters.c 		xchk_set_corrupt(sc);
sc                322 fs/xfs/scrub/fscounters.c 		xchk_set_corrupt(sc);
sc                326 fs/xfs/scrub/fscounters.c 		xchk_set_corrupt(sc);
sc                333 fs/xfs/scrub/fscounters.c 		xchk_set_corrupt(sc);
sc                336 fs/xfs/scrub/fscounters.c 	error = xchk_fscount_aggregate_agcounts(sc, fsc);
sc                337 fs/xfs/scrub/fscounters.c 	if (!xchk_process_error(sc, 0, XFS_SB_BLOCK(mp), &error))
sc                339 fs/xfs/scrub/fscounters.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_INCOMPLETE)
sc                343 fs/xfs/scrub/fscounters.c 	if (!xchk_fscount_within_range(sc, icount, &mp->m_icount, fsc->icount))
sc                344 fs/xfs/scrub/fscounters.c 		xchk_set_corrupt(sc);
sc                346 fs/xfs/scrub/fscounters.c 	if (!xchk_fscount_within_range(sc, ifree, &mp->m_ifree, fsc->ifree))
sc                347 fs/xfs/scrub/fscounters.c 		xchk_set_corrupt(sc);
sc                349 fs/xfs/scrub/fscounters.c 	if (!xchk_fscount_within_range(sc, fdblocks, &mp->m_fdblocks,
sc                351 fs/xfs/scrub/fscounters.c 		xchk_set_corrupt(sc);
sc                127 fs/xfs/scrub/health.c 	struct xfs_scrub	*sc)
sc                132 fs/xfs/scrub/health.c 	if (!sc->sick_mask)
sc                135 fs/xfs/scrub/health.c 	bad = (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT);
sc                136 fs/xfs/scrub/health.c 	switch (type_to_health_flag[sc->sm->sm_type].group) {
sc                138 fs/xfs/scrub/health.c 		pag = xfs_perag_get(sc->mp, sc->sm->sm_agno);
sc                140 fs/xfs/scrub/health.c 			xfs_ag_mark_sick(pag, sc->sick_mask);
sc                142 fs/xfs/scrub/health.c 			xfs_ag_mark_healthy(pag, sc->sick_mask);
sc                146 fs/xfs/scrub/health.c 		if (!sc->ip)
sc                149 fs/xfs/scrub/health.c 			xfs_inode_mark_sick(sc->ip, sc->sick_mask);
sc                151 fs/xfs/scrub/health.c 			xfs_inode_mark_healthy(sc->ip, sc->sick_mask);
sc                155 fs/xfs/scrub/health.c 			xfs_fs_mark_sick(sc->mp, sc->sick_mask);
sc                157 fs/xfs/scrub/health.c 			xfs_fs_mark_healthy(sc->mp, sc->sick_mask);
sc                161 fs/xfs/scrub/health.c 			xfs_rt_mark_sick(sc->mp, sc->sick_mask);
sc                163 fs/xfs/scrub/health.c 			xfs_rt_mark_healthy(sc->mp, sc->sick_mask);
sc                174 fs/xfs/scrub/health.c 	struct xfs_scrub	*sc,
sc                189 fs/xfs/scrub/health.c 		if (sc->sm->sm_type == XFS_SCRUB_TYPE_BNOBT)
sc                194 fs/xfs/scrub/health.c 		if (sc->sm->sm_type == XFS_SCRUB_TYPE_CNTBT)
sc                199 fs/xfs/scrub/health.c 		if (sc->sm->sm_type == XFS_SCRUB_TYPE_INOBT)
sc                204 fs/xfs/scrub/health.c 		if (sc->sm->sm_type == XFS_SCRUB_TYPE_FINOBT)
sc                209 fs/xfs/scrub/health.c 		if (sc->sm->sm_type == XFS_SCRUB_TYPE_RMAPBT)
sc                214 fs/xfs/scrub/health.c 		if (sc->sm->sm_type == XFS_SCRUB_TYPE_REFCNTBT)
sc                224 fs/xfs/scrub/health.c 		sc->sm->sm_flags |= XFS_SCRUB_OFLAG_XFAIL;
sc                 10 fs/xfs/scrub/health.h void xchk_update_health(struct xfs_scrub *sc);
sc                 11 fs/xfs/scrub/health.h bool xchk_ag_btree_healthy_enough(struct xfs_scrub *sc, struct xfs_perag *pag,
sc                 32 fs/xfs/scrub/ialloc.c 	struct xfs_scrub	*sc,
sc                 35 fs/xfs/scrub/ialloc.c 	return xchk_setup_ag_btree(sc, ip, sc->flags & XCHK_TRY_HARDER);
sc                 58 fs/xfs/scrub/ialloc.c 	struct xfs_scrub		*sc,
sc                 66 fs/xfs/scrub/ialloc.c 	if (sc->sm->sm_type == XFS_SCRUB_TYPE_FINOBT)
sc                 67 fs/xfs/scrub/ialloc.c 		pcur = &sc->sa.ino_cur;
sc                 69 fs/xfs/scrub/ialloc.c 		pcur = &sc->sa.fino_cur;
sc                 73 fs/xfs/scrub/ialloc.c 	if (!xchk_should_check_xref(sc, &error, pcur))
sc                 77 fs/xfs/scrub/ialloc.c 		xchk_btree_xref_set_corrupt(sc, *pcur, 0);
sc                 83 fs/xfs/scrub/ialloc.c 	struct xfs_scrub		*sc,
sc                 89 fs/xfs/scrub/ialloc.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                 92 fs/xfs/scrub/ialloc.c 	xchk_xref_is_used_space(sc, agbno, len);
sc                 93 fs/xfs/scrub/ialloc.c 	xchk_iallocbt_chunk_xref_other(sc, irec, agino);
sc                 94 fs/xfs/scrub/ialloc.c 	xchk_xref_is_owned_by(sc, agbno, len, &XFS_RMAP_OINFO_INODES);
sc                 95 fs/xfs/scrub/ialloc.c 	xchk_xref_is_not_shared(sc, agbno, len);
sc                114 fs/xfs/scrub/ialloc.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                116 fs/xfs/scrub/ialloc.c 	xchk_iallocbt_chunk_xref(bs->sc, irec, agino, bno, len);
sc                159 fs/xfs/scrub/ialloc.c 	if (xchk_should_terminate(bs->sc, &error))
sc                172 fs/xfs/scrub/ialloc.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                181 fs/xfs/scrub/ialloc.c 		if (!(bs->sc->flags & XCHK_TRY_HARDER) && !freemask_ok)
sc                195 fs/xfs/scrub/ialloc.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                253 fs/xfs/scrub/ialloc.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                265 fs/xfs/scrub/ialloc.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                271 fs/xfs/scrub/ialloc.c 		xchk_xref_is_not_owned_by(bs->sc, agbno,
sc                277 fs/xfs/scrub/ialloc.c 	xchk_xref_is_owned_by(bs->sc, agbno, M_IGEO(mp)->blocks_per_cluster,
sc                283 fs/xfs/scrub/ialloc.c 	if (!xchk_btree_xref_process_error(bs->sc, bs->cur, 0, &error))
sc                291 fs/xfs/scrub/ialloc.c 			xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                329 fs/xfs/scrub/ialloc.c 	     cluster_base += M_IGEO(bs->sc->mp)->inodes_per_cluster) {
sc                349 fs/xfs/scrub/ialloc.c 	struct xfs_mount		*mp = bs->sc->mp;
sc                371 fs/xfs/scrub/ialloc.c 			xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                382 fs/xfs/scrub/ialloc.c 			xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                398 fs/xfs/scrub/ialloc.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                403 fs/xfs/scrub/ialloc.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                442 fs/xfs/scrub/ialloc.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                447 fs/xfs/scrub/ialloc.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                453 fs/xfs/scrub/ialloc.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                458 fs/xfs/scrub/ialloc.c 	if (bs->sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                468 fs/xfs/scrub/ialloc.c 			xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                483 fs/xfs/scrub/ialloc.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                496 fs/xfs/scrub/ialloc.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                513 fs/xfs/scrub/ialloc.c 	struct xfs_scrub	*sc,
sc                521 fs/xfs/scrub/ialloc.c 	if (!sc->sa.ino_cur || !sc->sa.rmap_cur ||
sc                522 fs/xfs/scrub/ialloc.c 	    (xfs_sb_version_hasfinobt(&sc->mp->m_sb) && !sc->sa.fino_cur) ||
sc                523 fs/xfs/scrub/ialloc.c 	    xchk_skip_xref(sc->sm))
sc                527 fs/xfs/scrub/ialloc.c 	error = xfs_btree_count_blocks(sc->sa.ino_cur, &inobt_blocks);
sc                528 fs/xfs/scrub/ialloc.c 	if (!xchk_process_error(sc, 0, 0, &error))
sc                531 fs/xfs/scrub/ialloc.c 	if (sc->sa.fino_cur) {
sc                532 fs/xfs/scrub/ialloc.c 		error = xfs_btree_count_blocks(sc->sa.fino_cur, &finobt_blocks);
sc                533 fs/xfs/scrub/ialloc.c 		if (!xchk_process_error(sc, 0, 0, &error))
sc                537 fs/xfs/scrub/ialloc.c 	error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur,
sc                539 fs/xfs/scrub/ialloc.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur))
sc                542 fs/xfs/scrub/ialloc.c 		xchk_btree_set_corrupt(sc, sc->sa.ino_cur, 0);
sc                551 fs/xfs/scrub/ialloc.c 	struct xfs_scrub	*sc,
sc                559 fs/xfs/scrub/ialloc.c 	if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm))
sc                563 fs/xfs/scrub/ialloc.c 	error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur,
sc                565 fs/xfs/scrub/ialloc.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur))
sc                567 fs/xfs/scrub/ialloc.c 	inode_blocks = XFS_B_TO_FSB(sc->mp, inodes * sc->mp->m_sb.sb_inodesize);
sc                569 fs/xfs/scrub/ialloc.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0);
sc                575 fs/xfs/scrub/ialloc.c 	struct xfs_scrub	*sc,
sc                586 fs/xfs/scrub/ialloc.c 	cur = which == XFS_BTNUM_INO ? sc->sa.ino_cur : sc->sa.fino_cur;
sc                587 fs/xfs/scrub/ialloc.c 	error = xchk_btree(sc, cur, xchk_iallocbt_rec, &XFS_RMAP_OINFO_INOBT,
sc                592 fs/xfs/scrub/ialloc.c 	xchk_iallocbt_xref_rmap_btreeblks(sc, which);
sc                602 fs/xfs/scrub/ialloc.c 		xchk_iallocbt_xref_rmap_inodes(sc, which, iabt.inodes);
sc                609 fs/xfs/scrub/ialloc.c 	struct xfs_scrub	*sc)
sc                611 fs/xfs/scrub/ialloc.c 	return xchk_iallocbt(sc, XFS_BTNUM_INO);
sc                616 fs/xfs/scrub/ialloc.c 	struct xfs_scrub	*sc)
sc                618 fs/xfs/scrub/ialloc.c 	return xchk_iallocbt(sc, XFS_BTNUM_FINO);
sc                624 fs/xfs/scrub/ialloc.c 	struct xfs_scrub	*sc,
sc                633 fs/xfs/scrub/ialloc.c 	if (!(*icur) || xchk_skip_xref(sc->sm))
sc                637 fs/xfs/scrub/ialloc.c 	if (!xchk_should_check_xref(sc, &error, icur))
sc                640 fs/xfs/scrub/ialloc.c 		xchk_btree_xref_set_corrupt(sc, *icur, 0);
sc                646 fs/xfs/scrub/ialloc.c 	struct xfs_scrub	*sc,
sc                650 fs/xfs/scrub/ialloc.c 	xchk_xref_inode_check(sc, agbno, len, &sc->sa.ino_cur, false);
sc                651 fs/xfs/scrub/ialloc.c 	xchk_xref_inode_check(sc, agbno, len, &sc->sa.fino_cur, false);
sc                657 fs/xfs/scrub/ialloc.c 	struct xfs_scrub	*sc,
sc                661 fs/xfs/scrub/ialloc.c 	xchk_xref_inode_check(sc, agbno, len, &sc->sa.ino_cur, true);
sc                 31 fs/xfs/scrub/inode.c 	struct xfs_scrub	*sc,
sc                 40 fs/xfs/scrub/inode.c 	error = xchk_get_inode(sc, ip);
sc                 46 fs/xfs/scrub/inode.c 		return xchk_trans_alloc(sc, 0);
sc                 52 fs/xfs/scrub/inode.c 	sc->ilock_flags = XFS_IOLOCK_EXCL | XFS_MMAPLOCK_EXCL;
sc                 53 fs/xfs/scrub/inode.c 	xfs_ilock(sc->ip, sc->ilock_flags);
sc                 54 fs/xfs/scrub/inode.c 	error = xchk_trans_alloc(sc, 0);
sc                 57 fs/xfs/scrub/inode.c 	sc->ilock_flags |= XFS_ILOCK_EXCL;
sc                 58 fs/xfs/scrub/inode.c 	xfs_ilock(sc->ip, XFS_ILOCK_EXCL);
sc                 70 fs/xfs/scrub/inode.c 	struct xfs_scrub	*sc,
sc                 78 fs/xfs/scrub/inode.c 	fa = xfs_inode_validate_extsize(sc->mp, be32_to_cpu(dip->di_extsize),
sc                 81 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                 92 fs/xfs/scrub/inode.c 	struct xfs_scrub	*sc,
sc                101 fs/xfs/scrub/inode.c 	fa = xfs_inode_validate_cowextsize(sc->mp,
sc                105 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                111 fs/xfs/scrub/inode.c 	struct xfs_scrub	*sc,
sc                117 fs/xfs/scrub/inode.c 	struct xfs_mount	*mp = sc->mp;
sc                151 fs/xfs/scrub/inode.c 	xchk_ino_set_corrupt(sc, ino);
sc                157 fs/xfs/scrub/inode.c 	struct xfs_scrub	*sc,
sc                164 fs/xfs/scrub/inode.c 	struct xfs_mount	*mp = sc->mp;
sc                168 fs/xfs/scrub/inode.c 		xchk_ino_set_warning(sc, ino);
sc                195 fs/xfs/scrub/inode.c 	xchk_ino_set_corrupt(sc, ino);
sc                201 fs/xfs/scrub/inode.c 	struct xfs_scrub	*sc,
sc                205 fs/xfs/scrub/inode.c 	struct xfs_mount	*mp = sc->mp;
sc                232 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                243 fs/xfs/scrub/inode.c 		xchk_ino_set_preen(sc, ino);
sc                248 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                250 fs/xfs/scrub/inode.c 		if (dip->di_mode == 0 && sc->ip)
sc                251 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                255 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                258 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                268 fs/xfs/scrub/inode.c 		xchk_ino_set_warning(sc, ino);
sc                275 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                279 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                283 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                287 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                291 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                297 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                299 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                301 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                310 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                314 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                318 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                322 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                331 fs/xfs/scrub/inode.c 		xchk_ino_set_warning(sc, ino);
sc                346 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                349 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                352 fs/xfs/scrub/inode.c 	xchk_inode_flags(sc, dip, ino, mode, flags);
sc                354 fs/xfs/scrub/inode.c 	xchk_inode_extsize(sc, dip, ino, mode, flags);
sc                362 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                366 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                370 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                376 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                378 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                380 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                386 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                394 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                398 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                402 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                407 fs/xfs/scrub/inode.c 			xchk_ino_set_corrupt(sc, ino);
sc                408 fs/xfs/scrub/inode.c 		xchk_inode_flags2(sc, dip, ino, mode, flags, flags2);
sc                409 fs/xfs/scrub/inode.c 		xchk_inode_cowextsize(sc, dip, ino, mode, flags,
sc                421 fs/xfs/scrub/inode.c 	struct xfs_scrub		*sc,
sc                429 fs/xfs/scrub/inode.c 	if (!sc->sa.fino_cur || xchk_skip_xref(sc->sm))
sc                432 fs/xfs/scrub/inode.c 	agino = XFS_INO_TO_AGINO(sc->mp, ino);
sc                438 fs/xfs/scrub/inode.c 	error = xfs_inobt_lookup(sc->sa.fino_cur, agino, XFS_LOOKUP_LE,
sc                440 fs/xfs/scrub/inode.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.fino_cur) ||
sc                444 fs/xfs/scrub/inode.c 	error = xfs_inobt_get_rec(sc->sa.fino_cur, &rec, &has_record);
sc                445 fs/xfs/scrub/inode.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.fino_cur) ||
sc                458 fs/xfs/scrub/inode.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.fino_cur, 0);
sc                464 fs/xfs/scrub/inode.c 	struct xfs_scrub	*sc,
sc                472 fs/xfs/scrub/inode.c 	if (xchk_skip_xref(sc->sm))
sc                476 fs/xfs/scrub/inode.c 	error = xfs_bmap_count_blocks(sc->tp, sc->ip, XFS_DATA_FORK,
sc                478 fs/xfs/scrub/inode.c 	if (!xchk_should_check_xref(sc, &error, NULL))
sc                481 fs/xfs/scrub/inode.c 		xchk_ino_xref_set_corrupt(sc, sc->ip->i_ino);
sc                483 fs/xfs/scrub/inode.c 	error = xfs_bmap_count_blocks(sc->tp, sc->ip, XFS_ATTR_FORK,
sc                485 fs/xfs/scrub/inode.c 	if (!xchk_should_check_xref(sc, &error, NULL))
sc                488 fs/xfs/scrub/inode.c 		xchk_ino_xref_set_corrupt(sc, sc->ip->i_ino);
sc                492 fs/xfs/scrub/inode.c 		xchk_ino_xref_set_corrupt(sc, sc->ip->i_ino);
sc                498 fs/xfs/scrub/inode.c 	struct xfs_scrub	*sc,
sc                506 fs/xfs/scrub/inode.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                509 fs/xfs/scrub/inode.c 	agno = XFS_INO_TO_AGNO(sc->mp, ino);
sc                510 fs/xfs/scrub/inode.c 	agbno = XFS_INO_TO_AGBNO(sc->mp, ino);
sc                512 fs/xfs/scrub/inode.c 	error = xchk_ag_init(sc, agno, &sc->sa);
sc                513 fs/xfs/scrub/inode.c 	if (!xchk_xref_process_error(sc, agno, agbno, &error))
sc                516 fs/xfs/scrub/inode.c 	xchk_xref_is_used_space(sc, agbno, 1);
sc                517 fs/xfs/scrub/inode.c 	xchk_inode_xref_finobt(sc, ino);
sc                518 fs/xfs/scrub/inode.c 	xchk_xref_is_owned_by(sc, agbno, 1, &XFS_RMAP_OINFO_INODES);
sc                519 fs/xfs/scrub/inode.c 	xchk_xref_is_not_shared(sc, agbno, 1);
sc                520 fs/xfs/scrub/inode.c 	xchk_inode_xref_bmap(sc, dip);
sc                522 fs/xfs/scrub/inode.c 	xchk_ag_free(sc, &sc->sa);
sc                533 fs/xfs/scrub/inode.c 	struct xfs_scrub	*sc,
sc                536 fs/xfs/scrub/inode.c 	struct xfs_mount	*mp = sc->mp;
sc                543 fs/xfs/scrub/inode.c 	error = xfs_reflink_inode_has_shared_extents(sc->tp, sc->ip,
sc                545 fs/xfs/scrub/inode.c 	if (!xchk_xref_process_error(sc, XFS_INO_TO_AGNO(mp, ino),
sc                548 fs/xfs/scrub/inode.c 	if (xfs_is_reflink_inode(sc->ip) && !has_shared)
sc                549 fs/xfs/scrub/inode.c 		xchk_ino_set_preen(sc, ino);
sc                550 fs/xfs/scrub/inode.c 	else if (!xfs_is_reflink_inode(sc->ip) && has_shared)
sc                551 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, ino);
sc                557 fs/xfs/scrub/inode.c 	struct xfs_scrub	*sc)
sc                567 fs/xfs/scrub/inode.c 	if (!sc->ip) {
sc                568 fs/xfs/scrub/inode.c 		xchk_ino_set_corrupt(sc, sc->sm->sm_ino);
sc                573 fs/xfs/scrub/inode.c 	xfs_inode_to_disk(sc->ip, &di, 0);
sc                574 fs/xfs/scrub/inode.c 	xchk_dinode(sc, &di, sc->ip->i_ino);
sc                575 fs/xfs/scrub/inode.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                583 fs/xfs/scrub/inode.c 	if (S_ISREG(VFS_I(sc->ip)->i_mode))
sc                584 fs/xfs/scrub/inode.c 		xchk_inode_check_reflink_iflag(sc, sc->ip->i_ino);
sc                586 fs/xfs/scrub/inode.c 	xchk_inode_xref(sc, sc->ip->i_ino, &di);
sc                 23 fs/xfs/scrub/parent.c 	struct xfs_scrub	*sc,
sc                 26 fs/xfs/scrub/parent.c 	return xchk_setup_inode_contents(sc, ip, 0);
sc                 60 fs/xfs/scrub/parent.c 	struct xfs_scrub	*sc,
sc                 67 fs/xfs/scrub/parent.c 		.ino = sc->ip->i_ino,
sc                 97 fs/xfs/scrub/parent.c 		error = xfs_readdir(sc->tp, parent, &spc.dc, bufsize);
sc                116 fs/xfs/scrub/parent.c 	struct xfs_scrub	*sc,
sc                120 fs/xfs/scrub/parent.c 	struct xfs_mount	*mp = sc->mp;
sc                128 fs/xfs/scrub/parent.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                132 fs/xfs/scrub/parent.c 	if (sc->ip->i_ino == dnum) {
sc                133 fs/xfs/scrub/parent.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, 0);
sc                141 fs/xfs/scrub/parent.c 	expected_nlink = VFS_I(sc->ip)->i_nlink == 0 ? 0 : 1;
sc                157 fs/xfs/scrub/parent.c 	error = xfs_iget(mp, sc->tp, dnum, XFS_IGET_UNTRUSTED, 0, &dp);
sc                160 fs/xfs/scrub/parent.c 		xchk_fblock_process_error(sc, XFS_DATA_FORK, 0, &error);
sc                163 fs/xfs/scrub/parent.c 	if (!xchk_fblock_xref_process_error(sc, XFS_DATA_FORK, 0, &error))
sc                165 fs/xfs/scrub/parent.c 	if (dp == sc->ip || !S_ISDIR(VFS_I(dp)->i_mode)) {
sc                166 fs/xfs/scrub/parent.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, 0);
sc                178 fs/xfs/scrub/parent.c 		error = xchk_parent_count_parent_dentries(sc, dp, &nlink);
sc                179 fs/xfs/scrub/parent.c 		if (!xchk_fblock_xref_process_error(sc, XFS_DATA_FORK, 0,
sc                183 fs/xfs/scrub/parent.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, 0);
sc                193 fs/xfs/scrub/parent.c 	xfs_iunlock(sc->ip, sc->ilock_flags);
sc                194 fs/xfs/scrub/parent.c 	sc->ilock_flags = 0;
sc                200 fs/xfs/scrub/parent.c 	error = xchk_parent_count_parent_dentries(sc, dp, &nlink);
sc                201 fs/xfs/scrub/parent.c 	if (!xchk_fblock_xref_process_error(sc, XFS_DATA_FORK, 0, &error))
sc                206 fs/xfs/scrub/parent.c 	error = xchk_ilock_inverted(sc->ip, XFS_IOLOCK_EXCL);
sc                209 fs/xfs/scrub/parent.c 	sc->ilock_flags = XFS_IOLOCK_EXCL;
sc                216 fs/xfs/scrub/parent.c 	expected_nlink = VFS_I(sc->ip)->i_nlink == 0 ? 0 : 1;
sc                219 fs/xfs/scrub/parent.c 	error = xfs_dir_lookup(sc->tp, sc->ip, &xfs_name_dotdot, &dnum, NULL);
sc                220 fs/xfs/scrub/parent.c 	if (!xchk_fblock_process_error(sc, XFS_DATA_FORK, 0, &error))
sc                236 fs/xfs/scrub/parent.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, 0);
sc                250 fs/xfs/scrub/parent.c 	struct xfs_scrub	*sc)
sc                252 fs/xfs/scrub/parent.c 	struct xfs_mount	*mp = sc->mp;
sc                262 fs/xfs/scrub/parent.c 	if (!S_ISDIR(VFS_I(sc->ip)->i_mode))
sc                266 fs/xfs/scrub/parent.c 	if (!xfs_verify_dir_ino(mp, sc->ip->i_ino)) {
sc                267 fs/xfs/scrub/parent.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, 0);
sc                278 fs/xfs/scrub/parent.c 	sc->ilock_flags &= ~(XFS_ILOCK_EXCL | XFS_MMAPLOCK_EXCL);
sc                279 fs/xfs/scrub/parent.c 	xfs_iunlock(sc->ip, XFS_ILOCK_EXCL | XFS_MMAPLOCK_EXCL);
sc                282 fs/xfs/scrub/parent.c 	error = xfs_dir_lookup(sc->tp, sc->ip, &xfs_name_dotdot, &dnum, NULL);
sc                283 fs/xfs/scrub/parent.c 	if (!xchk_fblock_process_error(sc, XFS_DATA_FORK, 0, &error))
sc                286 fs/xfs/scrub/parent.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, 0);
sc                291 fs/xfs/scrub/parent.c 	if (sc->ip == mp->m_rootip) {
sc                292 fs/xfs/scrub/parent.c 		if (sc->ip->i_ino != mp->m_sb.sb_rootino ||
sc                293 fs/xfs/scrub/parent.c 		    sc->ip->i_ino != dnum)
sc                294 fs/xfs/scrub/parent.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, 0);
sc                299 fs/xfs/scrub/parent.c 		error = xchk_parent_validate(sc, dnum, &try_again);
sc                309 fs/xfs/scrub/parent.c 		xchk_set_incomplete(sc);
sc                315 fs/xfs/scrub/parent.c 	if ((sc->flags & XCHK_TRY_HARDER) && error == -EDEADLOCK) {
sc                317 fs/xfs/scrub/parent.c 		xchk_set_incomplete(sc);
sc                 23 fs/xfs/scrub/quota.c 	struct xfs_scrub	*sc)
sc                 25 fs/xfs/scrub/quota.c 	switch (sc->sm->sm_type) {
sc                 40 fs/xfs/scrub/quota.c 	struct xfs_scrub	*sc,
sc                 46 fs/xfs/scrub/quota.c 	if (!XFS_IS_QUOTA_RUNNING(sc->mp) || !XFS_IS_QUOTA_ON(sc->mp))
sc                 49 fs/xfs/scrub/quota.c 	dqtype = xchk_quota_to_dqtype(sc);
sc                 52 fs/xfs/scrub/quota.c 	sc->flags |= XCHK_HAS_QUOTAOFFLOCK;
sc                 53 fs/xfs/scrub/quota.c 	mutex_lock(&sc->mp->m_quotainfo->qi_quotaofflock);
sc                 54 fs/xfs/scrub/quota.c 	if (!xfs_this_quota_on(sc->mp, dqtype))
sc                 56 fs/xfs/scrub/quota.c 	error = xchk_setup_fs(sc, ip);
sc                 59 fs/xfs/scrub/quota.c 	sc->ip = xfs_quota_inode(sc->mp, dqtype);
sc                 60 fs/xfs/scrub/quota.c 	xfs_ilock(sc->ip, XFS_ILOCK_EXCL);
sc                 61 fs/xfs/scrub/quota.c 	sc->ilock_flags = XFS_ILOCK_EXCL;
sc                 68 fs/xfs/scrub/quota.c 	struct xfs_scrub	*sc;
sc                 80 fs/xfs/scrub/quota.c 	struct xfs_scrub	*sc = sqi->sc;
sc                 81 fs/xfs/scrub/quota.c 	struct xfs_mount	*mp = sc->mp;
sc                103 fs/xfs/scrub/quota.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, offset);
sc                109 fs/xfs/scrub/quota.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, offset);
sc                112 fs/xfs/scrub/quota.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, offset);
sc                132 fs/xfs/scrub/quota.c 		xchk_fblock_set_warning(sc, XFS_DATA_FORK, offset);
sc                134 fs/xfs/scrub/quota.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, offset);
sc                137 fs/xfs/scrub/quota.c 		xchk_fblock_set_warning(sc, XFS_DATA_FORK, offset);
sc                139 fs/xfs/scrub/quota.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, offset);
sc                142 fs/xfs/scrub/quota.c 		xchk_fblock_set_warning(sc, XFS_DATA_FORK, offset);
sc                144 fs/xfs/scrub/quota.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, offset);
sc                159 fs/xfs/scrub/quota.c 			xchk_fblock_set_warning(sc, XFS_DATA_FORK,
sc                163 fs/xfs/scrub/quota.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK,
sc                167 fs/xfs/scrub/quota.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, offset);
sc                175 fs/xfs/scrub/quota.c 		xchk_fblock_set_warning(sc, XFS_DATA_FORK, offset);
sc                177 fs/xfs/scrub/quota.c 		xchk_fblock_set_warning(sc, XFS_DATA_FORK, offset);
sc                179 fs/xfs/scrub/quota.c 		xchk_fblock_set_warning(sc, XFS_DATA_FORK, offset);
sc                187 fs/xfs/scrub/quota.c 	struct xfs_scrub	*sc)
sc                191 fs/xfs/scrub/quota.c 	struct xfs_quotainfo	*qi = sc->mp->m_quotainfo;
sc                197 fs/xfs/scrub/quota.c 	error = xchk_metadata_inode_forks(sc);
sc                198 fs/xfs/scrub/quota.c 	if (error || (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT))
sc                203 fs/xfs/scrub/quota.c 	ifp = XFS_IFORK_PTR(sc->ip, XFS_DATA_FORK);
sc                205 fs/xfs/scrub/quota.c 		if (xchk_should_terminate(sc, &error))
sc                214 fs/xfs/scrub/quota.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK,
sc                226 fs/xfs/scrub/quota.c 	struct xfs_scrub	*sc)
sc                229 fs/xfs/scrub/quota.c 	struct xfs_mount	*mp = sc->mp;
sc                234 fs/xfs/scrub/quota.c 	dqtype = xchk_quota_to_dqtype(sc);
sc                237 fs/xfs/scrub/quota.c 	error = xchk_quota_data_fork(sc);
sc                240 fs/xfs/scrub/quota.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                248 fs/xfs/scrub/quota.c 	xfs_iunlock(sc->ip, sc->ilock_flags);
sc                249 fs/xfs/scrub/quota.c 	sc->ilock_flags = 0;
sc                250 fs/xfs/scrub/quota.c 	sqi.sc = sc;
sc                253 fs/xfs/scrub/quota.c 	sc->ilock_flags = XFS_ILOCK_EXCL;
sc                254 fs/xfs/scrub/quota.c 	xfs_ilock(sc->ip, sc->ilock_flags);
sc                255 fs/xfs/scrub/quota.c 	if (!xchk_fblock_process_error(sc, XFS_DATA_FORK,
sc                 22 fs/xfs/scrub/refcount.c 	struct xfs_scrub	*sc,
sc                 25 fs/xfs/scrub/refcount.c 	return xchk_setup_ag_btree(sc, ip, false);
sc                 72 fs/xfs/scrub/refcount.c 	struct xfs_scrub	*sc;
sc                103 fs/xfs/scrub/refcount.c 	if (xchk_should_terminate(refchk->sc, &error))
sc                111 fs/xfs/scrub/refcount.c 		xchk_btree_xref_set_corrupt(refchk->sc, cur, 0);
sc                271 fs/xfs/scrub/refcount.c 	struct xfs_scrub		*sc,
sc                277 fs/xfs/scrub/refcount.c 		.sc = sc,
sc                289 fs/xfs/scrub/refcount.c 	if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm))
sc                299 fs/xfs/scrub/refcount.c 	error = xfs_rmap_query_range(sc->sa.rmap_cur, &low, &high,
sc                301 fs/xfs/scrub/refcount.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur))
sc                306 fs/xfs/scrub/refcount.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0);
sc                318 fs/xfs/scrub/refcount.c 	struct xfs_scrub	*sc,
sc                323 fs/xfs/scrub/refcount.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                326 fs/xfs/scrub/refcount.c 	xchk_xref_is_used_space(sc, agbno, len);
sc                327 fs/xfs/scrub/refcount.c 	xchk_xref_is_not_inode_chunk(sc, agbno, len);
sc                328 fs/xfs/scrub/refcount.c 	xchk_refcountbt_xref_rmap(sc, agbno, len, refcount);
sc                352 fs/xfs/scrub/refcount.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                361 fs/xfs/scrub/refcount.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                364 fs/xfs/scrub/refcount.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                366 fs/xfs/scrub/refcount.c 	xchk_refcountbt_xref(bs->sc, bno, len, refcount);
sc                374 fs/xfs/scrub/refcount.c 	struct xfs_scrub	*sc,
sc                381 fs/xfs/scrub/refcount.c 	if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm))
sc                385 fs/xfs/scrub/refcount.c 	error = xfs_btree_count_blocks(sc->sa.refc_cur, &refcbt_blocks);
sc                386 fs/xfs/scrub/refcount.c 	if (!xchk_btree_process_error(sc, sc->sa.refc_cur, 0, &error))
sc                388 fs/xfs/scrub/refcount.c 	error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur,
sc                390 fs/xfs/scrub/refcount.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur))
sc                393 fs/xfs/scrub/refcount.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0);
sc                396 fs/xfs/scrub/refcount.c 	error = xchk_count_rmap_ownedby_ag(sc, sc->sa.rmap_cur,
sc                398 fs/xfs/scrub/refcount.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur))
sc                401 fs/xfs/scrub/refcount.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0);
sc                407 fs/xfs/scrub/refcount.c 	struct xfs_scrub	*sc)
sc                412 fs/xfs/scrub/refcount.c 	error = xchk_btree(sc, sc->sa.refc_cur, xchk_refcountbt_rec,
sc                417 fs/xfs/scrub/refcount.c 	xchk_refcount_xref_rmap(sc, cow_blocks);
sc                425 fs/xfs/scrub/refcount.c 	struct xfs_scrub		*sc,
sc                434 fs/xfs/scrub/refcount.c 	if (!sc->sa.refc_cur || xchk_skip_xref(sc->sm))
sc                438 fs/xfs/scrub/refcount.c 	error = xfs_refcount_lookup_le(sc->sa.refc_cur,
sc                440 fs/xfs/scrub/refcount.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.refc_cur))
sc                443 fs/xfs/scrub/refcount.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.refc_cur, 0);
sc                447 fs/xfs/scrub/refcount.c 	error = xfs_refcount_get_rec(sc->sa.refc_cur, &rc, &has_refcount);
sc                448 fs/xfs/scrub/refcount.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.refc_cur))
sc                451 fs/xfs/scrub/refcount.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.refc_cur, 0);
sc                458 fs/xfs/scrub/refcount.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.refc_cur, 0);
sc                462 fs/xfs/scrub/refcount.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.refc_cur, 0);
sc                471 fs/xfs/scrub/refcount.c 	struct xfs_scrub	*sc,
sc                478 fs/xfs/scrub/refcount.c 	if (!sc->sa.refc_cur || xchk_skip_xref(sc->sm))
sc                481 fs/xfs/scrub/refcount.c 	error = xfs_refcount_has_record(sc->sa.refc_cur, agbno, len, &shared);
sc                482 fs/xfs/scrub/refcount.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.refc_cur))
sc                485 fs/xfs/scrub/refcount.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.refc_cur, 0);
sc                 41 fs/xfs/scrub/repair.c 	struct xfs_scrub	*sc)
sc                 45 fs/xfs/scrub/repair.c 	trace_xrep_attempt(ip, sc->sm, error);
sc                 47 fs/xfs/scrub/repair.c 	xchk_ag_btcur_free(&sc->sa);
sc                 50 fs/xfs/scrub/repair.c 	ASSERT(sc->ops->repair);
sc                 51 fs/xfs/scrub/repair.c 	error = sc->ops->repair(sc);
sc                 52 fs/xfs/scrub/repair.c 	trace_xrep_done(ip, sc->sm, error);
sc                 59 fs/xfs/scrub/repair.c 		sc->sm->sm_flags &= ~XFS_SCRUB_FLAGS_OUT;
sc                 60 fs/xfs/scrub/repair.c 		sc->flags |= XREP_ALREADY_FIXED;
sc                 65 fs/xfs/scrub/repair.c 		if (!(sc->flags & XCHK_TRY_HARDER)) {
sc                 66 fs/xfs/scrub/repair.c 			sc->flags |= XCHK_TRY_HARDER;
sc                103 fs/xfs/scrub/repair.c 	struct xfs_scrub	*sc)
sc                107 fs/xfs/scrub/repair.c 	if (xchk_should_terminate(sc, &error))
sc                119 fs/xfs/scrub/repair.c 	struct xfs_scrub	*sc)
sc                124 fs/xfs/scrub/repair.c 	if (sc->sa.agi_bp)
sc                125 fs/xfs/scrub/repair.c 		xfs_trans_bhold(sc->tp, sc->sa.agi_bp);
sc                126 fs/xfs/scrub/repair.c 	if (sc->sa.agf_bp)
sc                127 fs/xfs/scrub/repair.c 		xfs_trans_bhold(sc->tp, sc->sa.agf_bp);
sc                128 fs/xfs/scrub/repair.c 	if (sc->sa.agfl_bp)
sc                129 fs/xfs/scrub/repair.c 		xfs_trans_bhold(sc->tp, sc->sa.agfl_bp);
sc                138 fs/xfs/scrub/repair.c 	error = xfs_trans_roll(&sc->tp);
sc                143 fs/xfs/scrub/repair.c 	if (sc->sa.agi_bp)
sc                144 fs/xfs/scrub/repair.c 		xfs_trans_bjoin(sc->tp, sc->sa.agi_bp);
sc                145 fs/xfs/scrub/repair.c 	if (sc->sa.agf_bp)
sc                146 fs/xfs/scrub/repair.c 		xfs_trans_bjoin(sc->tp, sc->sa.agf_bp);
sc                147 fs/xfs/scrub/repair.c 	if (sc->sa.agfl_bp)
sc                148 fs/xfs/scrub/repair.c 		xfs_trans_bjoin(sc->tp, sc->sa.agfl_bp);
sc                176 fs/xfs/scrub/repair.c 	struct xfs_scrub		*sc)
sc                178 fs/xfs/scrub/repair.c 	struct xfs_mount		*mp = sc->mp;
sc                179 fs/xfs/scrub/repair.c 	struct xfs_scrub_metadata	*sm = sc->sm;
sc                284 fs/xfs/scrub/repair.c 	struct xfs_scrub		*sc,
sc                296 fs/xfs/scrub/repair.c 		error = xfs_alloc_get_freelist(sc->tp, sc->sa.agf_bp, &bno, 1);
sc                301 fs/xfs/scrub/repair.c 		xfs_extent_busy_reuse(sc->mp, sc->sa.agno, bno,
sc                303 fs/xfs/scrub/repair.c 		*fsbno = XFS_AGB_TO_FSB(sc->mp, sc->sa.agno, bno);
sc                305 fs/xfs/scrub/repair.c 			xfs_ag_resv_rmapbt_alloc(sc->mp, sc->sa.agno);
sc                311 fs/xfs/scrub/repair.c 	args.tp = sc->tp;
sc                312 fs/xfs/scrub/repair.c 	args.mp = sc->mp;
sc                314 fs/xfs/scrub/repair.c 	args.fsbno = XFS_AGB_TO_FSB(args.mp, sc->sa.agno, 0);
sc                335 fs/xfs/scrub/repair.c 	struct xfs_scrub		*sc,
sc                341 fs/xfs/scrub/repair.c 	struct xfs_trans		*tp = sc->tp;
sc                342 fs/xfs/scrub/repair.c 	struct xfs_mount		*mp = sc->mp;
sc                348 fs/xfs/scrub/repair.c 	ASSERT(XFS_FSB_TO_AGNO(mp, fsb) == sc->sa.agno);
sc                352 fs/xfs/scrub/repair.c 	xfs_btree_init_block(mp, bp, btnum, 0, 0, sc->sa.agno);
sc                432 fs/xfs/scrub/repair.c 	struct xfs_scrub	*sc,
sc                450 fs/xfs/scrub/repair.c 		if (!xfs_verify_fsbno(sc->mp, fsbno))
sc                452 fs/xfs/scrub/repair.c 		bp = xfs_buf_incore(sc->mp->m_ddev_targp,
sc                453 fs/xfs/scrub/repair.c 				XFS_FSB_TO_DADDR(sc->mp, fsbno),
sc                454 fs/xfs/scrub/repair.c 				XFS_FSB_TO_BB(sc->mp, 1), XBF_TRYLOCK);
sc                456 fs/xfs/scrub/repair.c 			xfs_trans_bjoin(sc->tp, bp);
sc                457 fs/xfs/scrub/repair.c 			xfs_trans_binval(sc->tp, bp);
sc                467 fs/xfs/scrub/repair.c 	struct xfs_scrub	*sc,
sc                472 fs/xfs/scrub/repair.c 	args.mp = sc->mp;
sc                473 fs/xfs/scrub/repair.c 	args.tp = sc->tp;
sc                474 fs/xfs/scrub/repair.c 	args.agno = sc->sa.agno;
sc                476 fs/xfs/scrub/repair.c 	args.pag = sc->sa.pag;
sc                487 fs/xfs/scrub/repair.c 	struct xfs_scrub	*sc,
sc                493 fs/xfs/scrub/repair.c 	error = xrep_fix_freelist(sc, true);
sc                502 fs/xfs/scrub/repair.c 	error = xfs_rmap_alloc(sc->tp, sc->sa.agf_bp, sc->sa.agno, agbno, 1,
sc                508 fs/xfs/scrub/repair.c 	error = xfs_alloc_put_freelist(sc->tp, sc->sa.agf_bp, sc->sa.agfl_bp,
sc                512 fs/xfs/scrub/repair.c 	xfs_extent_busy_insert(sc->tp, sc->sa.agno, agbno, 1,
sc                521 fs/xfs/scrub/repair.c 	struct xfs_scrub		*sc,
sc                533 fs/xfs/scrub/repair.c 	agno = XFS_FSB_TO_AGNO(sc->mp, fsbno);
sc                534 fs/xfs/scrub/repair.c 	agbno = XFS_FSB_TO_AGBNO(sc->mp, fsbno);
sc                541 fs/xfs/scrub/repair.c 	if (sc->ip) {
sc                542 fs/xfs/scrub/repair.c 		error = xfs_alloc_read_agf(sc->mp, sc->tp, agno, 0, &agf_bp);
sc                548 fs/xfs/scrub/repair.c 		agf_bp = sc->sa.agf_bp;
sc                550 fs/xfs/scrub/repair.c 	cur = xfs_rmapbt_init_cursor(sc->mp, sc->tp, agf_bp, agno);
sc                572 fs/xfs/scrub/repair.c 		error = xfs_rmap_free(sc->tp, agf_bp, agno, agbno, 1, oinfo);
sc                574 fs/xfs/scrub/repair.c 		error = xrep_put_freelist(sc, agbno);
sc                576 fs/xfs/scrub/repair.c 		error = xfs_free_extent(sc->tp, fsbno, 1, oinfo, resv);
sc                577 fs/xfs/scrub/repair.c 	if (agf_bp != sc->sa.agf_bp)
sc                578 fs/xfs/scrub/repair.c 		xfs_trans_brelse(sc->tp, agf_bp);
sc                582 fs/xfs/scrub/repair.c 	if (sc->ip)
sc                583 fs/xfs/scrub/repair.c 		return xfs_trans_roll_inode(&sc->tp, sc->ip);
sc                584 fs/xfs/scrub/repair.c 	return xrep_roll_ag_trans(sc);
sc                587 fs/xfs/scrub/repair.c 	if (agf_bp != sc->sa.agf_bp)
sc                588 fs/xfs/scrub/repair.c 		xfs_trans_brelse(sc->tp, agf_bp);
sc                595 fs/xfs/scrub/repair.c 	struct xfs_scrub		*sc,
sc                605 fs/xfs/scrub/repair.c 	ASSERT(xfs_sb_version_hasrmapbt(&sc->mp->m_sb));
sc                608 fs/xfs/scrub/repair.c 		ASSERT(sc->ip != NULL ||
sc                609 fs/xfs/scrub/repair.c 		       XFS_FSB_TO_AGNO(sc->mp, fsbno) == sc->sa.agno);
sc                610 fs/xfs/scrub/repair.c 		trace_xrep_dispose_btree_extent(sc->mp,
sc                611 fs/xfs/scrub/repair.c 				XFS_FSB_TO_AGNO(sc->mp, fsbno),
sc                612 fs/xfs/scrub/repair.c 				XFS_FSB_TO_AGBNO(sc->mp, fsbno), 1);
sc                614 fs/xfs/scrub/repair.c 		error = xrep_reap_block(sc, fsbno, oinfo, type);
sc                652 fs/xfs/scrub/repair.c 	struct xfs_scrub		*sc;
sc                679 fs/xfs/scrub/repair.c 	struct xfs_mount		*mp = ri->sc->mp;
sc                686 fs/xfs/scrub/repair.c 	daddr = XFS_AGB_TO_DADDR(mp, ri->sc->sa.agno, agbno);
sc                721 fs/xfs/scrub/repair.c 	error = xfs_trans_read_buf(mp, ri->sc->tp, mp->m_ddev_targp, daddr,
sc                815 fs/xfs/scrub/repair.c 	trace_xrep_findroot_block(mp, ri->sc->sa.agno, agbno,
sc                818 fs/xfs/scrub/repair.c 	xfs_trans_brelse(ri->sc->tp, bp);
sc                864 fs/xfs/scrub/repair.c 	struct xfs_scrub		*sc,
sc                869 fs/xfs/scrub/repair.c 	struct xfs_mount		*mp = sc->mp;
sc                878 fs/xfs/scrub/repair.c 	ri.sc = sc;
sc                889 fs/xfs/scrub/repair.c 	cur = xfs_rmapbt_init_cursor(mp, sc->tp, agf_bp, sc->sa.agno);
sc                899 fs/xfs/scrub/repair.c 	struct xfs_scrub	*sc,
sc                905 fs/xfs/scrub/repair.c 	if (!(flag & sc->mp->m_qflags))
sc                908 fs/xfs/scrub/repair.c 	sc->mp->m_qflags &= ~flag;
sc                909 fs/xfs/scrub/repair.c 	spin_lock(&sc->mp->m_sb_lock);
sc                910 fs/xfs/scrub/repair.c 	sc->mp->m_sb.sb_qflags &= ~flag;
sc                911 fs/xfs/scrub/repair.c 	spin_unlock(&sc->mp->m_sb_lock);
sc                912 fs/xfs/scrub/repair.c 	xfs_log_sb(sc->tp);
sc                927 fs/xfs/scrub/repair.c 	struct xfs_scrub	*sc)
sc                931 fs/xfs/scrub/repair.c 	error = xfs_qm_dqattach_locked(sc->ip, false);
sc                936 fs/xfs/scrub/repair.c 		xfs_err_ratelimited(sc->mp,
sc                938 fs/xfs/scrub/repair.c 				(unsigned long long)sc->ip->i_ino, error);
sc                939 fs/xfs/scrub/repair.c 		if (XFS_IS_UQUOTA_ON(sc->mp) && !sc->ip->i_udquot)
sc                940 fs/xfs/scrub/repair.c 			xrep_force_quotacheck(sc, XFS_DQ_USER);
sc                941 fs/xfs/scrub/repair.c 		if (XFS_IS_GQUOTA_ON(sc->mp) && !sc->ip->i_gdquot)
sc                942 fs/xfs/scrub/repair.c 			xrep_force_quotacheck(sc, XFS_DQ_GROUP);
sc                943 fs/xfs/scrub/repair.c 		if (XFS_IS_PQUOTA_ON(sc->mp) && !sc->ip->i_pdquot)
sc                944 fs/xfs/scrub/repair.c 			xrep_force_quotacheck(sc, XFS_DQ_PROJ);
sc                  9 fs/xfs/scrub/repair.h static inline int xrep_notsupported(struct xfs_scrub *sc)
sc                 18 fs/xfs/scrub/repair.h int xrep_attempt(struct xfs_inode *ip, struct xfs_scrub *sc);
sc                 20 fs/xfs/scrub/repair.h int xrep_roll_ag_trans(struct xfs_scrub *sc);
sc                 23 fs/xfs/scrub/repair.h xfs_extlen_t xrep_calc_ag_resblks(struct xfs_scrub *sc);
sc                 24 fs/xfs/scrub/repair.h int xrep_alloc_ag_block(struct xfs_scrub *sc,
sc                 27 fs/xfs/scrub/repair.h int xrep_init_btblock(struct xfs_scrub *sc, xfs_fsblock_t fsb,
sc                 33 fs/xfs/scrub/repair.h int xrep_fix_freelist(struct xfs_scrub *sc, bool can_shrink);
sc                 34 fs/xfs/scrub/repair.h int xrep_invalidate_blocks(struct xfs_scrub *sc, struct xfs_bitmap *btlist);
sc                 35 fs/xfs/scrub/repair.h int xrep_reap_extents(struct xfs_scrub *sc, struct xfs_bitmap *exlist,
sc                 50 fs/xfs/scrub/repair.h int xrep_find_ag_btree_roots(struct xfs_scrub *sc, struct xfs_buf *agf_bp,
sc                 52 fs/xfs/scrub/repair.h void xrep_force_quotacheck(struct xfs_scrub *sc, uint dqtype);
sc                 53 fs/xfs/scrub/repair.h int xrep_ino_dqattach(struct xfs_scrub *sc);
sc                 57 fs/xfs/scrub/repair.h int xrep_probe(struct xfs_scrub *sc);
sc                 58 fs/xfs/scrub/repair.h int xrep_superblock(struct xfs_scrub *sc);
sc                 59 fs/xfs/scrub/repair.h int xrep_agf(struct xfs_scrub *sc);
sc                 60 fs/xfs/scrub/repair.h int xrep_agfl(struct xfs_scrub *sc);
sc                 61 fs/xfs/scrub/repair.h int xrep_agi(struct xfs_scrub *sc);
sc                 67 fs/xfs/scrub/repair.h 	struct xfs_scrub	*sc)
sc                 76 fs/xfs/scrub/repair.h 	struct xfs_scrub	*sc)
sc                 78 fs/xfs/scrub/repair.h 	ASSERT(!(sc->sm->sm_flags & XFS_SCRUB_IFLAG_REPAIR));
sc                 24 fs/xfs/scrub/rmap.c 	struct xfs_scrub	*sc,
sc                 27 fs/xfs/scrub/rmap.c 	return xchk_setup_ag_btree(sc, ip, false);
sc                 35 fs/xfs/scrub/rmap.c 	struct xfs_scrub	*sc,
sc                 46 fs/xfs/scrub/rmap.c 	if (!sc->sa.refc_cur || xchk_skip_xref(sc->sm))
sc                 55 fs/xfs/scrub/rmap.c 	error = xfs_refcount_find_shared(sc->sa.refc_cur, irec->rm_startblock,
sc                 57 fs/xfs/scrub/rmap.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.refc_cur))
sc                 60 fs/xfs/scrub/rmap.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.refc_cur, 0);
sc                 66 fs/xfs/scrub/rmap.c 	struct xfs_scrub	*sc,
sc                 72 fs/xfs/scrub/rmap.c 	if (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT)
sc                 75 fs/xfs/scrub/rmap.c 	xchk_xref_is_used_space(sc, agbno, len);
sc                 77 fs/xfs/scrub/rmap.c 		xchk_xref_is_inode_chunk(sc, agbno, len);
sc                 79 fs/xfs/scrub/rmap.c 		xchk_xref_is_not_inode_chunk(sc, agbno, len);
sc                 81 fs/xfs/scrub/rmap.c 		xchk_xref_is_cow_staging(sc, irec->rm_startblock,
sc                 84 fs/xfs/scrub/rmap.c 		xchk_rmapbt_xref_refc(sc, irec);
sc                103 fs/xfs/scrub/rmap.c 	if (!xchk_btree_process_error(bs->sc, bs->cur, 0, &error))
sc                108 fs/xfs/scrub/rmap.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                118 fs/xfs/scrub/rmap.c 			xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                127 fs/xfs/scrub/rmap.c 			xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                137 fs/xfs/scrub/rmap.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                140 fs/xfs/scrub/rmap.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                143 fs/xfs/scrub/rmap.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                146 fs/xfs/scrub/rmap.c 		xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                150 fs/xfs/scrub/rmap.c 			xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                155 fs/xfs/scrub/rmap.c 			xchk_btree_set_corrupt(bs->sc, bs->cur, 0);
sc                158 fs/xfs/scrub/rmap.c 	xchk_rmapbt_xref(bs->sc, &irec);
sc                166 fs/xfs/scrub/rmap.c 	struct xfs_scrub	*sc)
sc                168 fs/xfs/scrub/rmap.c 	return xchk_btree(sc, sc->sa.rmap_cur, xchk_rmapbt_rec,
sc                175 fs/xfs/scrub/rmap.c 	struct xfs_scrub		*sc,
sc                184 fs/xfs/scrub/rmap.c 	if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm))
sc                187 fs/xfs/scrub/rmap.c 	error = xfs_rmap_record_exists(sc->sa.rmap_cur, bno, len, oinfo,
sc                189 fs/xfs/scrub/rmap.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur))
sc                192 fs/xfs/scrub/rmap.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0);
sc                198 fs/xfs/scrub/rmap.c 	struct xfs_scrub		*sc,
sc                203 fs/xfs/scrub/rmap.c 	xchk_xref_check_owner(sc, bno, len, oinfo, true);
sc                209 fs/xfs/scrub/rmap.c 	struct xfs_scrub		*sc,
sc                214 fs/xfs/scrub/rmap.c 	xchk_xref_check_owner(sc, bno, len, oinfo, false);
sc                220 fs/xfs/scrub/rmap.c 	struct xfs_scrub	*sc,
sc                227 fs/xfs/scrub/rmap.c 	if (!sc->sa.rmap_cur || xchk_skip_xref(sc->sm))
sc                230 fs/xfs/scrub/rmap.c 	error = xfs_rmap_has_record(sc->sa.rmap_cur, bno, len, &has_rmap);
sc                231 fs/xfs/scrub/rmap.c 	if (!xchk_should_check_xref(sc, &error, &sc->sa.rmap_cur))
sc                234 fs/xfs/scrub/rmap.c 		xchk_btree_xref_set_corrupt(sc, sc->sa.rmap_cur, 0);
sc                 22 fs/xfs/scrub/rtbitmap.c 	struct xfs_scrub	*sc,
sc                 27 fs/xfs/scrub/rtbitmap.c 	error = xchk_setup_fs(sc, ip);
sc                 31 fs/xfs/scrub/rtbitmap.c 	sc->ilock_flags = XFS_ILOCK_EXCL | XFS_ILOCK_RTBITMAP;
sc                 32 fs/xfs/scrub/rtbitmap.c 	sc->ip = sc->mp->m_rbmip;
sc                 33 fs/xfs/scrub/rtbitmap.c 	xfs_ilock(sc->ip, sc->ilock_flags);
sc                 47 fs/xfs/scrub/rtbitmap.c 	struct xfs_scrub	*sc = priv;
sc                 55 fs/xfs/scrub/rtbitmap.c 	    !xfs_verify_rtbno(sc->mp, startblock) ||
sc                 56 fs/xfs/scrub/rtbitmap.c 	    !xfs_verify_rtbno(sc->mp, startblock + blockcount - 1))
sc                 57 fs/xfs/scrub/rtbitmap.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, 0);
sc                 64 fs/xfs/scrub/rtbitmap.c 	struct xfs_scrub	*sc)
sc                 69 fs/xfs/scrub/rtbitmap.c 	error = xchk_metadata_inode_forks(sc);
sc                 70 fs/xfs/scrub/rtbitmap.c 	if (error || (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT))
sc                 73 fs/xfs/scrub/rtbitmap.c 	error = xfs_rtalloc_query_all(sc->tp, xchk_rtbitmap_rec, sc);
sc                 74 fs/xfs/scrub/rtbitmap.c 	if (!xchk_fblock_process_error(sc, XFS_DATA_FORK, 0, &error))
sc                 84 fs/xfs/scrub/rtbitmap.c 	struct xfs_scrub	*sc)
sc                 86 fs/xfs/scrub/rtbitmap.c 	struct xfs_inode	*rsumip = sc->mp->m_rsumip;
sc                 87 fs/xfs/scrub/rtbitmap.c 	struct xfs_inode	*old_ip = sc->ip;
sc                 88 fs/xfs/scrub/rtbitmap.c 	uint			old_ilock_flags = sc->ilock_flags;
sc                 98 fs/xfs/scrub/rtbitmap.c 	sc->ip = rsumip;
sc                 99 fs/xfs/scrub/rtbitmap.c 	sc->ilock_flags = XFS_ILOCK_EXCL | XFS_ILOCK_RTSUM;
sc                100 fs/xfs/scrub/rtbitmap.c 	xfs_ilock(sc->ip, sc->ilock_flags);
sc                103 fs/xfs/scrub/rtbitmap.c 	error = xchk_metadata_inode_forks(sc);
sc                104 fs/xfs/scrub/rtbitmap.c 	if (error || (sc->sm->sm_flags & XFS_SCRUB_OFLAG_CORRUPT))
sc                108 fs/xfs/scrub/rtbitmap.c 	xchk_set_incomplete(sc);
sc                111 fs/xfs/scrub/rtbitmap.c 	xfs_iunlock(sc->ip, sc->ilock_flags);
sc                112 fs/xfs/scrub/rtbitmap.c 	sc->ilock_flags = old_ilock_flags;
sc                113 fs/xfs/scrub/rtbitmap.c 	sc->ip = old_ip;
sc                121 fs/xfs/scrub/rtbitmap.c 	struct xfs_scrub	*sc,
sc                131 fs/xfs/scrub/rtbitmap.c 	if (xchk_skip_xref(sc->sm))
sc                136 fs/xfs/scrub/rtbitmap.c 	do_div(startext, sc->mp->m_sb.sb_rextsize);
sc                137 fs/xfs/scrub/rtbitmap.c 	do_div(endext, sc->mp->m_sb.sb_rextsize);
sc                139 fs/xfs/scrub/rtbitmap.c 	xfs_ilock(sc->mp->m_rbmip, XFS_ILOCK_SHARED | XFS_ILOCK_RTBITMAP);
sc                140 fs/xfs/scrub/rtbitmap.c 	error = xfs_rtalloc_extent_is_free(sc->mp, sc->tp, startext, extcount,
sc                142 fs/xfs/scrub/rtbitmap.c 	if (!xchk_should_check_xref(sc, &error, NULL))
sc                145 fs/xfs/scrub/rtbitmap.c 		xchk_ino_xref_set_corrupt(sc, sc->mp->m_rbmip->i_ino);
sc                147 fs/xfs/scrub/rtbitmap.c 	xfs_iunlock(sc->mp->m_rbmip, XFS_ILOCK_SHARED | XFS_ILOCK_RTBITMAP);
sc                135 fs/xfs/scrub/scrub.c 	struct xfs_scrub	*sc)
sc                139 fs/xfs/scrub/scrub.c 	if (xchk_should_terminate(sc, &error))
sc                150 fs/xfs/scrub/scrub.c 	struct xfs_scrub	*sc,
sc                154 fs/xfs/scrub/scrub.c 	xchk_ag_free(sc, &sc->sa);
sc                155 fs/xfs/scrub/scrub.c 	if (sc->tp) {
sc                156 fs/xfs/scrub/scrub.c 		if (error == 0 && (sc->sm->sm_flags & XFS_SCRUB_IFLAG_REPAIR))
sc                157 fs/xfs/scrub/scrub.c 			error = xfs_trans_commit(sc->tp);
sc                159 fs/xfs/scrub/scrub.c 			xfs_trans_cancel(sc->tp);
sc                160 fs/xfs/scrub/scrub.c 		sc->tp = NULL;
sc                162 fs/xfs/scrub/scrub.c 	if (sc->ip) {
sc                163 fs/xfs/scrub/scrub.c 		if (sc->ilock_flags)
sc                164 fs/xfs/scrub/scrub.c 			xfs_iunlock(sc->ip, sc->ilock_flags);
sc                165 fs/xfs/scrub/scrub.c 		if (sc->ip != ip_in &&
sc                166 fs/xfs/scrub/scrub.c 		    !xfs_internal_inum(sc->mp, sc->ip->i_ino))
sc                167 fs/xfs/scrub/scrub.c 			xfs_irele(sc->ip);
sc                168 fs/xfs/scrub/scrub.c 		sc->ip = NULL;
sc                170 fs/xfs/scrub/scrub.c 	if (sc->flags & XCHK_REAPING_DISABLED)
sc                171 fs/xfs/scrub/scrub.c 		xchk_start_reaping(sc);
sc                172 fs/xfs/scrub/scrub.c 	if (sc->flags & XCHK_HAS_QUOTAOFFLOCK) {
sc                173 fs/xfs/scrub/scrub.c 		mutex_unlock(&sc->mp->m_quotainfo->qi_quotaofflock);
sc                174 fs/xfs/scrub/scrub.c 		sc->flags &= ~XCHK_HAS_QUOTAOFFLOCK;
sc                176 fs/xfs/scrub/scrub.c 	if (sc->buf) {
sc                177 fs/xfs/scrub/scrub.c 		kmem_free(sc->buf);
sc                178 fs/xfs/scrub/scrub.c 		sc->buf = NULL;
sc                428 fs/xfs/scrub/scrub.c static inline void xchk_postmortem(struct xfs_scrub *sc)
sc                435 fs/xfs/scrub/scrub.c 	if ((sc->sm->sm_flags & XFS_SCRUB_IFLAG_REPAIR) &&
sc                436 fs/xfs/scrub/scrub.c 	    (sc->sm->sm_flags & (XFS_SCRUB_OFLAG_CORRUPT |
sc                438 fs/xfs/scrub/scrub.c 		xrep_failure(sc->mp);
sc                441 fs/xfs/scrub/scrub.c static inline void xchk_postmortem(struct xfs_scrub *sc)
sc                447 fs/xfs/scrub/scrub.c 	if (sc->sm->sm_flags & (XFS_SCRUB_OFLAG_CORRUPT |
sc                449 fs/xfs/scrub/scrub.c 		xfs_alert_ratelimited(sc->mp,
sc                460 fs/xfs/scrub/scrub.c 	struct xfs_scrub		sc = {
sc                489 fs/xfs/scrub/scrub.c 	sc.ops = &meta_scrub_ops[sm->sm_type];
sc                490 fs/xfs/scrub/scrub.c 	sc.sick_mask = xchk_health_mask_for_scrub_type(sm->sm_type);
sc                493 fs/xfs/scrub/scrub.c 	error = sc.ops->setup(&sc, ip);
sc                498 fs/xfs/scrub/scrub.c 	error = sc.ops->scrub(&sc);
sc                499 fs/xfs/scrub/scrub.c 	if (!(sc.flags & XCHK_TRY_HARDER) && error == -EDEADLOCK) {
sc                505 fs/xfs/scrub/scrub.c 		error = xchk_teardown(&sc, ip, 0);
sc                508 fs/xfs/scrub/scrub.c 		sc.flags |= XCHK_TRY_HARDER;
sc                513 fs/xfs/scrub/scrub.c 	xchk_update_health(&sc);
sc                515 fs/xfs/scrub/scrub.c 	if ((sc.sm->sm_flags & XFS_SCRUB_IFLAG_REPAIR) &&
sc                516 fs/xfs/scrub/scrub.c 	    !(sc.flags & XREP_ALREADY_FIXED)) {
sc                521 fs/xfs/scrub/scrub.c 			sc.sm->sm_flags |= XFS_SCRUB_OFLAG_CORRUPT;
sc                523 fs/xfs/scrub/scrub.c 		needs_fix = (sc.sm->sm_flags & (XFS_SCRUB_OFLAG_CORRUPT |
sc                531 fs/xfs/scrub/scrub.c 			sc.sm->sm_flags |= XFS_SCRUB_OFLAG_NO_REPAIR_NEEDED;
sc                539 fs/xfs/scrub/scrub.c 		error = xrep_attempt(ip, &sc);
sc                546 fs/xfs/scrub/scrub.c 			error = xchk_teardown(&sc, ip, 0);
sc                556 fs/xfs/scrub/scrub.c 	xchk_postmortem(&sc);
sc                558 fs/xfs/scrub/scrub.c 	error = xchk_teardown(&sc, ip, error);
sc                 87 fs/xfs/scrub/scrub.h int xchk_tester(struct xfs_scrub *sc);
sc                 88 fs/xfs/scrub/scrub.h int xchk_superblock(struct xfs_scrub *sc);
sc                 89 fs/xfs/scrub/scrub.h int xchk_agf(struct xfs_scrub *sc);
sc                 90 fs/xfs/scrub/scrub.h int xchk_agfl(struct xfs_scrub *sc);
sc                 91 fs/xfs/scrub/scrub.h int xchk_agi(struct xfs_scrub *sc);
sc                 92 fs/xfs/scrub/scrub.h int xchk_bnobt(struct xfs_scrub *sc);
sc                 93 fs/xfs/scrub/scrub.h int xchk_cntbt(struct xfs_scrub *sc);
sc                 94 fs/xfs/scrub/scrub.h int xchk_inobt(struct xfs_scrub *sc);
sc                 95 fs/xfs/scrub/scrub.h int xchk_finobt(struct xfs_scrub *sc);
sc                 96 fs/xfs/scrub/scrub.h int xchk_rmapbt(struct xfs_scrub *sc);
sc                 97 fs/xfs/scrub/scrub.h int xchk_refcountbt(struct xfs_scrub *sc);
sc                 98 fs/xfs/scrub/scrub.h int xchk_inode(struct xfs_scrub *sc);
sc                 99 fs/xfs/scrub/scrub.h int xchk_bmap_data(struct xfs_scrub *sc);
sc                100 fs/xfs/scrub/scrub.h int xchk_bmap_attr(struct xfs_scrub *sc);
sc                101 fs/xfs/scrub/scrub.h int xchk_bmap_cow(struct xfs_scrub *sc);
sc                102 fs/xfs/scrub/scrub.h int xchk_directory(struct xfs_scrub *sc);
sc                103 fs/xfs/scrub/scrub.h int xchk_xattr(struct xfs_scrub *sc);
sc                104 fs/xfs/scrub/scrub.h int xchk_symlink(struct xfs_scrub *sc);
sc                105 fs/xfs/scrub/scrub.h int xchk_parent(struct xfs_scrub *sc);
sc                107 fs/xfs/scrub/scrub.h int xchk_rtbitmap(struct xfs_scrub *sc);
sc                108 fs/xfs/scrub/scrub.h int xchk_rtsummary(struct xfs_scrub *sc);
sc                111 fs/xfs/scrub/scrub.h xchk_rtbitmap(struct xfs_scrub *sc)
sc                116 fs/xfs/scrub/scrub.h xchk_rtsummary(struct xfs_scrub *sc)
sc                122 fs/xfs/scrub/scrub.h int xchk_quota(struct xfs_scrub *sc);
sc                125 fs/xfs/scrub/scrub.h xchk_quota(struct xfs_scrub *sc)
sc                130 fs/xfs/scrub/scrub.h int xchk_fscounters(struct xfs_scrub *sc);
sc                133 fs/xfs/scrub/scrub.h void xchk_xref_is_used_space(struct xfs_scrub *sc, xfs_agblock_t agbno,
sc                135 fs/xfs/scrub/scrub.h void xchk_xref_is_not_inode_chunk(struct xfs_scrub *sc, xfs_agblock_t agbno,
sc                137 fs/xfs/scrub/scrub.h void xchk_xref_is_inode_chunk(struct xfs_scrub *sc, xfs_agblock_t agbno,
sc                139 fs/xfs/scrub/scrub.h void xchk_xref_is_owned_by(struct xfs_scrub *sc, xfs_agblock_t agbno,
sc                141 fs/xfs/scrub/scrub.h void xchk_xref_is_not_owned_by(struct xfs_scrub *sc, xfs_agblock_t agbno,
sc                143 fs/xfs/scrub/scrub.h void xchk_xref_has_no_owner(struct xfs_scrub *sc, xfs_agblock_t agbno,
sc                145 fs/xfs/scrub/scrub.h void xchk_xref_is_cow_staging(struct xfs_scrub *sc, xfs_agblock_t bno,
sc                147 fs/xfs/scrub/scrub.h void xchk_xref_is_not_shared(struct xfs_scrub *sc, xfs_agblock_t bno,
sc                150 fs/xfs/scrub/scrub.h void xchk_xref_is_used_rt_space(struct xfs_scrub *sc, xfs_rtblock_t rtbno,
sc                153 fs/xfs/scrub/scrub.h # define xchk_xref_is_used_rt_space(sc, rtbno, len) do { } while (0)
sc                 21 fs/xfs/scrub/symlink.c 	struct xfs_scrub	*sc,
sc                 25 fs/xfs/scrub/symlink.c 	sc->buf = kmem_zalloc_large(XFS_SYMLINK_MAXLEN + 1, 0);
sc                 26 fs/xfs/scrub/symlink.c 	if (!sc->buf)
sc                 29 fs/xfs/scrub/symlink.c 	return xchk_setup_inode_contents(sc, ip, 0);
sc                 36 fs/xfs/scrub/symlink.c 	struct xfs_scrub	*sc)
sc                 38 fs/xfs/scrub/symlink.c 	struct xfs_inode	*ip = sc->ip;
sc                 50 fs/xfs/scrub/symlink.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, 0);
sc                 58 fs/xfs/scrub/symlink.c 			xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, 0);
sc                 63 fs/xfs/scrub/symlink.c 	error = xfs_readlink_bmap_ilocked(sc->ip, sc->buf);
sc                 64 fs/xfs/scrub/symlink.c 	if (!xchk_fblock_process_error(sc, XFS_DATA_FORK, 0, &error))
sc                 66 fs/xfs/scrub/symlink.c 	if (strnlen(sc->buf, XFS_SYMLINK_MAXLEN) < len)
sc                 67 fs/xfs/scrub/symlink.c 		xchk_fblock_set_corrupt(sc, XFS_DATA_FORK, 0);
sc                129 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, xfs_agnumber_t agno,
sc                131 fs/xfs/scrub/trace.h 	TP_ARGS(sc, agno, bno, error, ret_ip),
sc                141 fs/xfs/scrub/trace.h 		__entry->dev = sc->mp->m_super->s_dev;
sc                142 fs/xfs/scrub/trace.h 		__entry->type = sc->sm->sm_type;
sc                158 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, int whichfork,
sc                160 fs/xfs/scrub/trace.h 	TP_ARGS(sc, whichfork, offset, error, ret_ip),
sc                171 fs/xfs/scrub/trace.h 		__entry->dev = sc->ip->i_mount->m_super->s_dev;
sc                172 fs/xfs/scrub/trace.h 		__entry->ino = sc->ip->i_ino;
sc                174 fs/xfs/scrub/trace.h 		__entry->type = sc->sm->sm_type;
sc                190 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, xfs_daddr_t daddr, void *ret_ip),
sc                191 fs/xfs/scrub/trace.h 	TP_ARGS(sc, daddr, ret_ip),
sc                204 fs/xfs/scrub/trace.h 		fsbno = XFS_DADDR_TO_FSB(sc->mp, daddr);
sc                205 fs/xfs/scrub/trace.h 		agno = XFS_FSB_TO_AGNO(sc->mp, fsbno);
sc                206 fs/xfs/scrub/trace.h 		bno = XFS_FSB_TO_AGBNO(sc->mp, fsbno);
sc                208 fs/xfs/scrub/trace.h 		__entry->dev = sc->mp->m_super->s_dev;
sc                209 fs/xfs/scrub/trace.h 		__entry->type = sc->sm->sm_type;
sc                224 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, xfs_daddr_t daddr, \
sc                226 fs/xfs/scrub/trace.h 	TP_ARGS(sc, daddr, ret_ip))
sc                233 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, xfs_ino_t ino, void *ret_ip),
sc                234 fs/xfs/scrub/trace.h 	TP_ARGS(sc, ino, ret_ip),
sc                242 fs/xfs/scrub/trace.h 		__entry->dev = sc->mp->m_super->s_dev;
sc                244 fs/xfs/scrub/trace.h 		__entry->type = sc->sm->sm_type;
sc                256 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, xfs_ino_t ino, \
sc                258 fs/xfs/scrub/trace.h 	TP_ARGS(sc, ino, ret_ip))
sc                265 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, int whichfork,
sc                267 fs/xfs/scrub/trace.h 	TP_ARGS(sc, whichfork, offset, ret_ip),
sc                277 fs/xfs/scrub/trace.h 		__entry->dev = sc->ip->i_mount->m_super->s_dev;
sc                278 fs/xfs/scrub/trace.h 		__entry->ino = sc->ip->i_ino;
sc                280 fs/xfs/scrub/trace.h 		__entry->type = sc->sm->sm_type;
sc                295 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, int whichfork, \
sc                297 fs/xfs/scrub/trace.h 	TP_ARGS(sc, whichfork, offset, ret_ip))
sc                303 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, void *ret_ip),
sc                304 fs/xfs/scrub/trace.h 	TP_ARGS(sc, ret_ip),
sc                311 fs/xfs/scrub/trace.h 		__entry->dev = sc->mp->m_super->s_dev;
sc                312 fs/xfs/scrub/trace.h 		__entry->type = sc->sm->sm_type;
sc                322 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, struct xfs_btree_cur *cur,
sc                324 fs/xfs/scrub/trace.h 	TP_ARGS(sc, cur, level, error, ret_ip),
sc                339 fs/xfs/scrub/trace.h 		__entry->dev = sc->mp->m_super->s_dev;
sc                340 fs/xfs/scrub/trace.h 		__entry->type = sc->sm->sm_type;
sc                362 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, struct xfs_btree_cur *cur,
sc                364 fs/xfs/scrub/trace.h 	TP_ARGS(sc, cur, level, error, ret_ip),
sc                380 fs/xfs/scrub/trace.h 		__entry->dev = sc->mp->m_super->s_dev;
sc                381 fs/xfs/scrub/trace.h 		__entry->ino = sc->ip->i_ino;
sc                383 fs/xfs/scrub/trace.h 		__entry->type = sc->sm->sm_type;
sc                407 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, struct xfs_btree_cur *cur,
sc                409 fs/xfs/scrub/trace.h 	TP_ARGS(sc, cur, level, ret_ip),
sc                422 fs/xfs/scrub/trace.h 		__entry->dev = sc->mp->m_super->s_dev;
sc                423 fs/xfs/scrub/trace.h 		__entry->type = sc->sm->sm_type;
sc                443 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, struct xfs_btree_cur *cur,
sc                445 fs/xfs/scrub/trace.h 	TP_ARGS(sc, cur, level, ret_ip),
sc                460 fs/xfs/scrub/trace.h 		__entry->dev = sc->mp->m_super->s_dev;
sc                461 fs/xfs/scrub/trace.h 		__entry->ino = sc->ip->i_ino;
sc                463 fs/xfs/scrub/trace.h 		__entry->type = sc->sm->sm_type;
sc                485 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, struct xfs_btree_cur *cur,
sc                487 fs/xfs/scrub/trace.h 	TP_ARGS(sc, cur, level),
sc                501 fs/xfs/scrub/trace.h 		__entry->dev = sc->mp->m_super->s_dev;
sc                502 fs/xfs/scrub/trace.h 		__entry->type = sc->sm->sm_type;
sc                522 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, struct xfs_btree_cur *cur, \
sc                524 fs/xfs/scrub/trace.h 	TP_ARGS(sc, cur, level))
sc                530 fs/xfs/scrub/trace.h 	TP_PROTO(struct xfs_scrub *sc, int error, void *ret_ip),
sc                531 fs/xfs/scrub/trace.h 	TP_ARGS(sc, error, ret_ip),
sc                539 fs/xfs/scrub/trace.h 		__entry->dev = sc->mp->m_super->s_dev;
sc                540 fs/xfs/scrub/trace.h 		__entry->type = sc->sm->sm_type;
sc               1667 fs/xfs/xfs_buf.c 	struct shrink_control	*sc)
sc               1674 fs/xfs/xfs_buf.c 	freed = list_lru_shrink_walk(&btp->bt_lru, sc,
sc               1690 fs/xfs/xfs_buf.c 	struct shrink_control	*sc)
sc               1694 fs/xfs/xfs_buf.c 	return list_lru_shrink_count(&btp->bt_lru, sc);
sc                497 fs/xfs/xfs_qm.c 	struct shrink_control	*sc)
sc                505 fs/xfs/xfs_qm.c 	if ((sc->gfp_mask & (__GFP_FS|__GFP_DIRECT_RECLAIM)) != (__GFP_FS|__GFP_DIRECT_RECLAIM))
sc                511 fs/xfs/xfs_qm.c 	freed = list_lru_shrink_walk(&qi->qi_lru, sc,
sc                532 fs/xfs/xfs_qm.c 	struct shrink_control	*sc)
sc                537 fs/xfs/xfs_qm.c 	return list_lru_shrink_count(&qi->qi_lru, sc);
sc               1812 fs/xfs/xfs_super.c 	struct shrink_control	*sc)
sc               1823 fs/xfs/xfs_super.c 	struct shrink_control	*sc)
sc               1825 fs/xfs/xfs_super.c 	return xfs_reclaim_inodes_nr(XFS_M(sb), sc->nr_to_scan);
sc                173 include/linux/ceph/libceph.h 					struct ceph_snap_context *sc);
sc                174 include/linux/ceph/libceph.h extern void ceph_put_snap_context(struct ceph_snap_context *sc);
sc                523 include/linux/ceph/osd_client.h 				struct ceph_snap_context *sc,
sc                447 include/linux/firewire.h 		fw_iso_callback_t sc;
sc                121 include/linux/list_lru.h 						  struct shrink_control *sc)
sc                123 include/linux/list_lru.h 	return list_lru_count_one(lru, sc->nid, sc->memcg);
sc                195 include/linux/list_lru.h 	return list_lru_walk_one(lru, sc->nid, sc->memcg, isolate, cb_arg,
sc                196 include/linux/list_lru.h 				 &sc->nr_to_scan);
sc                203 include/linux/list_lru.h 	return list_lru_walk_one_irq(lru, sc->nid, sc->memcg, isolate, cb_arg,
sc                204 include/linux/list_lru.h 				     &sc->nr_to_scan);
sc                 62 include/linux/shrinker.h 				       struct shrink_control *sc);
sc                 64 include/linux/shrinker.h 				      struct shrink_control *sc);
sc               1022 include/linux/usb.h #define USB_DEVICE_INFO(cl, sc, pr) \
sc               1025 include/linux/usb.h 	.bDeviceSubClass = (sc), \
sc               1037 include/linux/usb.h #define USB_INTERFACE_INFO(cl, sc, pr) \
sc               1040 include/linux/usb.h 	.bInterfaceSubClass = (sc), \
sc               1057 include/linux/usb.h #define USB_DEVICE_AND_INTERFACE_INFO(vend, prod, cl, sc, pr) \
sc               1063 include/linux/usb.h 	.bInterfaceSubClass = (sc), \
sc               1079 include/linux/usb.h #define USB_VENDOR_AND_INTERFACE_INFO(vend, cl, sc, pr) \
sc               1084 include/linux/usb.h 	.bInterfaceSubClass = (sc), \
sc                128 include/media/drv-intf/msp3400.h #define MSP_INPUT(sc, t, main_aux_src, sc_i2s_src) \
sc                129 include/media/drv-intf/msp3400.h 	(MSP_SCART_TO_DSP(sc) | \
sc                138 include/media/drv-intf/msp3400.h #define MSP_OUTPUT(sc) \
sc                139 include/media/drv-intf/msp3400.h 	(MSP_SC_TO_SCART1(sc) | \
sc                140 include/media/drv-intf/msp3400.h 	 MSP_SC_TO_SCART2(sc))
sc                 72 include/net/nfc/digital.h 	u16 sc;
sc                117 include/scsi/libiscsi.h 	struct scsi_cmnd	*sc;		/* associated SCSI cmd*/
sc                370 include/scsi/libiscsi.h extern int iscsi_eh_abort(struct scsi_cmnd *sc);
sc                371 include/scsi/libiscsi.h extern int iscsi_eh_recover_target(struct scsi_cmnd *sc);
sc                372 include/scsi/libiscsi.h extern int iscsi_eh_session_reset(struct scsi_cmnd *sc);
sc                373 include/scsi/libiscsi.h extern int iscsi_eh_device_reset(struct scsi_cmnd *sc);
sc                374 include/scsi/libiscsi.h extern int iscsi_queuecommand(struct Scsi_Host *host, struct scsi_cmnd *sc);
sc                375 include/scsi/libiscsi.h extern enum blk_eh_timer_return iscsi_eh_cmd_timed_out(struct scsi_cmnd *sc);
sc                769 include/trace/events/rpcrdma.h 		const struct rpcrdma_sendctx *sc,
sc                773 include/trace/events/rpcrdma.h 	TP_ARGS(sc, wc),
sc                783 include/trace/events/rpcrdma.h 		__entry->req = sc->sc_req;
sc                784 include/trace/events/rpcrdma.h 		__entry->unmap_count = sc->sc_unmap_count;
sc                186 include/trace/events/vmscan.h 	TP_PROTO(struct shrinker *shr, struct shrink_control *sc,
sc                191 include/trace/events/vmscan.h 	TP_ARGS(shr, sc, nr_objects_to_shrink, cache_items, delta, total_scan,
sc                209 include/trace/events/vmscan.h 		__entry->nid = sc->nid;
sc                211 include/trace/events/vmscan.h 		__entry->gfp_flags = sc->gfp_mask;
sc                 23 include/uapi/misc/fastrpc.h 	__u32 sc;
sc                 75 include/uapi/scsi/scsi_bsg_ufs.h 		struct utp_upiu_cmd		sc;
sc                525 lib/string.c   	const char *sc;
sc                527 lib/string.c   	for (sc = s; *sc != '\0'; ++sc)
sc                529 lib/string.c   	return sc - s;
sc                542 lib/string.c   	const char *sc;
sc                544 lib/string.c   	for (sc = s; count-- && *sc != '\0'; ++sc)
sc                546 lib/string.c   	return sc - s;
sc                137 mm/huge_memory.c 					struct shrink_control *sc)
sc                144 mm/huge_memory.c 				       struct shrink_control *sc)
sc               2889 mm/huge_memory.c 		struct shrink_control *sc)
sc               2891 mm/huge_memory.c 	struct pglist_data *pgdata = NODE_DATA(sc->nid);
sc               2895 mm/huge_memory.c 	if (sc->memcg)
sc               2896 mm/huge_memory.c 		ds_queue = &sc->memcg->deferred_split_queue;
sc               2902 mm/huge_memory.c 		struct shrink_control *sc)
sc               2904 mm/huge_memory.c 	struct pglist_data *pgdata = NODE_DATA(sc->nid);
sc               2912 mm/huge_memory.c 	if (sc->memcg)
sc               2913 mm/huge_memory.c 		ds_queue = &sc->memcg->deferred_split_queue;
sc               2928 mm/huge_memory.c 		if (!--sc->nr_to_scan)
sc                856 mm/memory-failure.c 	{ sc|dirty,	sc|dirty,	MF_MSG_DIRTY_SWAPCACHE,	me_swapcache_dirty },
sc                857 mm/memory-failure.c 	{ sc|dirty,	sc,		MF_MSG_CLEAN_SWAPCACHE,	me_swapcache_clean },
sc                461 mm/shmem.c     		struct shrink_control *sc, unsigned long nr_to_split)
sc                468 mm/shmem.c     	unsigned long batch = sc ? sc->nr_to_scan : 128;
sc                567 mm/shmem.c     		struct shrink_control *sc)
sc                574 mm/shmem.c     	return shmem_unused_huge_shrink(sbinfo, sc, 0);
sc                578 mm/shmem.c     		struct shrink_control *sc)
sc                588 mm/shmem.c     		struct shrink_control *sc, unsigned long nr_to_split)
sc                 69 mm/swap_cgroup.c 	struct swap_cgroup *sc;
sc                 72 mm/swap_cgroup.c 	sc = page_address(mappage);
sc                 73 mm/swap_cgroup.c 	return sc + offset % SC_PER_PAGE;
sc                101 mm/swap_cgroup.c 	struct swap_cgroup *sc;
sc                105 mm/swap_cgroup.c 	sc = lookup_swap_cgroup(ent, &ctrl);
sc                108 mm/swap_cgroup.c 	retval = sc->id;
sc                110 mm/swap_cgroup.c 		sc->id = new;
sc                130 mm/swap_cgroup.c 	struct swap_cgroup *sc;
sc                136 mm/swap_cgroup.c 	sc = lookup_swap_cgroup(ent, &ctrl);
sc                139 mm/swap_cgroup.c 	old = sc->id;
sc                141 mm/swap_cgroup.c 		VM_BUG_ON(sc->id != old);
sc                142 mm/swap_cgroup.c 		sc->id = id;
sc                147 mm/swap_cgroup.c 			sc++;
sc                149 mm/swap_cgroup.c 			sc = __lookup_swap_cgroup(ctrl, offset);
sc                242 mm/vmscan.c    static bool global_reclaim(struct scan_control *sc)
sc                244 mm/vmscan.c    	return !sc->target_mem_cgroup;
sc                260 mm/vmscan.c    static bool sane_reclaim(struct scan_control *sc)
sc                262 mm/vmscan.c    	struct mem_cgroup *memcg = sc->target_mem_cgroup;
sc                305 mm/vmscan.c    static bool global_reclaim(struct scan_control *sc)
sc                310 mm/vmscan.c    static bool sane_reclaim(struct scan_control *sc)
sc                612 mm/vmscan.c    		struct shrink_control sc = {
sc                631 mm/vmscan.c    		ret = do_shrink_slab(&sc, shrinker, priority);
sc                650 mm/vmscan.c    			ret = do_shrink_slab(&sc, shrinker, priority);
sc                716 mm/vmscan.c    		struct shrink_control sc = {
sc                722 mm/vmscan.c    		ret = do_shrink_slab(&sc, shrinker, priority);
sc                778 mm/vmscan.c    static int may_write_to_inode(struct inode *inode, struct scan_control *sc)
sc                827 mm/vmscan.c    			 struct scan_control *sc)
sc                863 mm/vmscan.c    	if (!may_write_to_inode(mapping->host, sc))
sc               1032 mm/vmscan.c    						  struct scan_control *sc)
sc               1037 mm/vmscan.c    	referenced_ptes = page_referenced(page, 1, sc->target_mem_cgroup,
sc               1121 mm/vmscan.c    				      struct scan_control *sc,
sc               1155 mm/vmscan.c    		sc->nr_scanned += nr_pages;
sc               1160 mm/vmscan.c    		if (!sc->may_unmap && page_mapped(page))
sc               1163 mm/vmscan.c    		may_enter_fs = (sc->gfp_mask & __GFP_FS) ||
sc               1164 mm/vmscan.c    			(PageSwapCache(page) && (sc->gfp_mask & __GFP_IO));
sc               1242 mm/vmscan.c    			} else if (sane_reclaim(sc) ||
sc               1270 mm/vmscan.c    			references = page_check_references(page, sc);
sc               1290 mm/vmscan.c    				if (!(sc->gfp_mask & __GFP_IO))
sc               1339 mm/vmscan.c    			sc->nr_scanned -= (nr_pages - 1);
sc               1388 mm/vmscan.c    			if (!sc->may_writepage)
sc               1397 mm/vmscan.c    			switch (pageout(page, mapping, sc)) {
sc               1444 mm/vmscan.c    			if (!try_to_release_page(page, sc->gfp_mask))
sc               1502 mm/vmscan.c    			sc->nr_scanned -= (nr_pages - 1);
sc               1539 mm/vmscan.c    	struct scan_control sc = {
sc               1557 mm/vmscan.c    	ret = shrink_page_list(&clean_pages, zone->zone_pgdat, &sc,
sc               1685 mm/vmscan.c    		unsigned long *nr_scanned, struct scan_control *sc,
sc               1695 mm/vmscan.c    	isolate_mode_t mode = (sc->may_unmap ? 0 : ISOLATE_UNMAPPED);
sc               1710 mm/vmscan.c    		if (page_zonenum(page) > sc->reclaim_idx) {
sc               1764 mm/vmscan.c    	trace_mm_vmscan_lru_isolate(sc->reclaim_idx, sc->order, nr_to_scan,
sc               1829 mm/vmscan.c    		struct scan_control *sc)
sc               1836 mm/vmscan.c    	if (!sane_reclaim(sc))
sc               1852 mm/vmscan.c    	if ((sc->gfp_mask & (__GFP_IO | __GFP_FS)) == (__GFP_IO | __GFP_FS))
sc               1949 mm/vmscan.c    		     struct scan_control *sc, enum lru_list lru)
sc               1962 mm/vmscan.c    	while (unlikely(too_many_isolated(pgdat, file, sc))) {
sc               1980 mm/vmscan.c    				     &nr_scanned, sc, lru);
sc               1986 mm/vmscan.c    	if (global_reclaim(sc))
sc               1994 mm/vmscan.c    	nr_reclaimed = shrink_page_list(&page_list, pgdat, sc, 0,
sc               2000 mm/vmscan.c    	if (global_reclaim(sc))
sc               2029 mm/vmscan.c    	sc->nr.dirty += stat.nr_dirty;
sc               2030 mm/vmscan.c    	sc->nr.congested += stat.nr_congested;
sc               2031 mm/vmscan.c    	sc->nr.unqueued_dirty += stat.nr_unqueued_dirty;
sc               2032 mm/vmscan.c    	sc->nr.writeback += stat.nr_writeback;
sc               2033 mm/vmscan.c    	sc->nr.immediate += stat.nr_immediate;
sc               2034 mm/vmscan.c    	sc->nr.taken += nr_taken;
sc               2036 mm/vmscan.c    		sc->nr.file_taken += nr_taken;
sc               2039 mm/vmscan.c    			nr_scanned, nr_reclaimed, &stat, sc->priority, file);
sc               2045 mm/vmscan.c    			       struct scan_control *sc,
sc               2066 mm/vmscan.c    				     &nr_scanned, sc, lru);
sc               2094 mm/vmscan.c    		if (page_referenced(page, 0, sc->target_mem_cgroup,
sc               2143 mm/vmscan.c    			nr_deactivate, nr_rotated, sc->priority, file);
sc               2153 mm/vmscan.c    	struct scan_control sc = {
sc               2176 mm/vmscan.c    						&sc, 0,
sc               2190 mm/vmscan.c    						&sc, 0,
sc               2231 mm/vmscan.c    				 struct scan_control *sc, bool trace)
sc               2248 mm/vmscan.c    	inactive = lruvec_lru_size(lruvec, inactive_lru, sc->reclaim_idx);
sc               2249 mm/vmscan.c    	active = lruvec_lru_size(lruvec, active_lru, sc->reclaim_idx);
sc               2268 mm/vmscan.c    		trace_mm_vmscan_inactive_list_is_low(pgdat->node_id, sc->reclaim_idx,
sc               2277 mm/vmscan.c    				 struct lruvec *lruvec, struct scan_control *sc)
sc               2280 mm/vmscan.c    		if (inactive_list_is_low(lruvec, is_file_lru(lru), sc, true))
sc               2281 mm/vmscan.c    			shrink_active_list(nr_to_scan, lruvec, sc, lru);
sc               2285 mm/vmscan.c    	return shrink_inactive_list(nr_to_scan, lruvec, sc, lru);
sc               2305 mm/vmscan.c    			   struct scan_control *sc, unsigned long *nr,
sc               2320 mm/vmscan.c    	if (!sc->may_swap || mem_cgroup_get_nr_swap_pages(memcg) <= 0) {
sc               2332 mm/vmscan.c    	if (!global_reclaim(sc) && !swappiness) {
sc               2342 mm/vmscan.c    	if (!sc->priority && swappiness) {
sc               2356 mm/vmscan.c    	if (global_reclaim(sc)) {
sc               2380 mm/vmscan.c    			if (!inactive_list_is_low(lruvec, false, sc, false) &&
sc               2381 mm/vmscan.c    			    lruvec_lru_size(lruvec, LRU_INACTIVE_ANON, sc->reclaim_idx)
sc               2382 mm/vmscan.c    					>> sc->priority) {
sc               2398 mm/vmscan.c    	if (!inactive_list_is_low(lruvec, true, sc, false) &&
sc               2399 mm/vmscan.c    	    lruvec_lru_size(lruvec, LRU_INACTIVE_FILE, sc->reclaim_idx) >> sc->priority) {
sc               2464 mm/vmscan.c    		lruvec_size = lruvec_lru_size(lruvec, lru, sc->reclaim_idx);
sc               2466 mm/vmscan.c    						   sc->memcg_low_reclaim);
sc               2516 mm/vmscan.c    		scan >>= sc->priority;
sc               2564 mm/vmscan.c    			      struct scan_control *sc, unsigned long *lru_pages)
sc               2572 mm/vmscan.c    	unsigned long nr_to_reclaim = sc->nr_to_reclaim;
sc               2576 mm/vmscan.c    	get_scan_count(lruvec, memcg, sc, nr, lru_pages);
sc               2592 mm/vmscan.c    	scan_adjusted = (global_reclaim(sc) && !current_is_kswapd() &&
sc               2593 mm/vmscan.c    			 sc->priority == DEF_PRIORITY);
sc               2607 mm/vmscan.c    							    lruvec, sc);
sc               2668 mm/vmscan.c    	sc->nr_reclaimed += nr_reclaimed;
sc               2674 mm/vmscan.c    	if (inactive_list_is_low(lruvec, false, sc, true))
sc               2676 mm/vmscan.c    				   sc, LRU_ACTIVE_ANON);
sc               2680 mm/vmscan.c    static bool in_reclaim_compaction(struct scan_control *sc)
sc               2682 mm/vmscan.c    	if (IS_ENABLED(CONFIG_COMPACTION) && sc->order &&
sc               2683 mm/vmscan.c    			(sc->order > PAGE_ALLOC_COSTLY_ORDER ||
sc               2684 mm/vmscan.c    			 sc->priority < DEF_PRIORITY - 2))
sc               2699 mm/vmscan.c    					struct scan_control *sc)
sc               2706 mm/vmscan.c    	if (!in_reclaim_compaction(sc))
sc               2723 mm/vmscan.c    	for (z = 0; z <= sc->reclaim_idx; z++) {
sc               2728 mm/vmscan.c    		switch (compaction_suitable(zone, sc->order, 0, sc->reclaim_idx)) {
sc               2742 mm/vmscan.c    	pages_for_compaction = compact_gap(sc->order);
sc               2756 mm/vmscan.c    static bool shrink_node(pg_data_t *pgdat, struct scan_control *sc)
sc               2763 mm/vmscan.c    		struct mem_cgroup *root = sc->target_mem_cgroup;
sc               2767 mm/vmscan.c    		memset(&sc->nr, 0, sizeof(sc->nr));
sc               2769 mm/vmscan.c    		nr_reclaimed = sc->nr_reclaimed;
sc               2770 mm/vmscan.c    		nr_scanned = sc->nr_scanned;
sc               2792 mm/vmscan.c    				if (!sc->memcg_low_reclaim) {
sc               2793 mm/vmscan.c    					sc->memcg_low_skipped = 1;
sc               2809 mm/vmscan.c    			reclaimed = sc->nr_reclaimed;
sc               2810 mm/vmscan.c    			scanned = sc->nr_scanned;
sc               2811 mm/vmscan.c    			shrink_node_memcg(pgdat, memcg, sc, &lru_pages);
sc               2814 mm/vmscan.c    			shrink_slab(sc->gfp_mask, pgdat->node_id, memcg,
sc               2815 mm/vmscan.c    					sc->priority);
sc               2818 mm/vmscan.c    			vmpressure(sc->gfp_mask, memcg, false,
sc               2819 mm/vmscan.c    				   sc->nr_scanned - scanned,
sc               2820 mm/vmscan.c    				   sc->nr_reclaimed - reclaimed);
sc               2825 mm/vmscan.c    			sc->nr_reclaimed += reclaim_state->reclaimed_slab;
sc               2830 mm/vmscan.c    		vmpressure(sc->gfp_mask, sc->target_mem_cgroup, true,
sc               2831 mm/vmscan.c    			   sc->nr_scanned - nr_scanned,
sc               2832 mm/vmscan.c    			   sc->nr_reclaimed - nr_reclaimed);
sc               2834 mm/vmscan.c    		if (sc->nr_reclaimed - nr_reclaimed)
sc               2855 mm/vmscan.c    			if (sc->nr.writeback && sc->nr.writeback == sc->nr.taken)
sc               2863 mm/vmscan.c    			if (sc->nr.dirty && sc->nr.dirty == sc->nr.congested)
sc               2867 mm/vmscan.c    			if (sc->nr.unqueued_dirty == sc->nr.file_taken)
sc               2876 mm/vmscan.c    			if (sc->nr.immediate)
sc               2884 mm/vmscan.c    		if (!global_reclaim(sc) && sane_reclaim(sc) &&
sc               2885 mm/vmscan.c    		    sc->nr.dirty && sc->nr.dirty == sc->nr.congested)
sc               2894 mm/vmscan.c    		if (!sc->hibernation_mode && !current_is_kswapd() &&
sc               2898 mm/vmscan.c    	} while (should_continue_reclaim(pgdat, sc->nr_reclaimed - nr_reclaimed,
sc               2899 mm/vmscan.c    					 sc));
sc               2918 mm/vmscan.c    static inline bool compaction_ready(struct zone *zone, struct scan_control *sc)
sc               2923 mm/vmscan.c    	suitable = compaction_suitable(zone, sc->order, 0, sc->reclaim_idx);
sc               2940 mm/vmscan.c    	watermark = high_wmark_pages(zone) + compact_gap(sc->order);
sc               2942 mm/vmscan.c    	return zone_watermark_ok_safe(zone, 0, watermark, sc->reclaim_idx);
sc               2953 mm/vmscan.c    static void shrink_zones(struct zonelist *zonelist, struct scan_control *sc)
sc               2967 mm/vmscan.c    	orig_mask = sc->gfp_mask;
sc               2969 mm/vmscan.c    		sc->gfp_mask |= __GFP_HIGHMEM;
sc               2970 mm/vmscan.c    		sc->reclaim_idx = gfp_zone(sc->gfp_mask);
sc               2974 mm/vmscan.c    					sc->reclaim_idx, sc->nodemask) {
sc               2979 mm/vmscan.c    		if (global_reclaim(sc)) {
sc               2994 mm/vmscan.c    			    sc->order > PAGE_ALLOC_COSTLY_ORDER &&
sc               2995 mm/vmscan.c    			    compaction_ready(zone, sc)) {
sc               2996 mm/vmscan.c    				sc->compaction_ready = true;
sc               3017 mm/vmscan.c    						sc->order, sc->gfp_mask,
sc               3019 mm/vmscan.c    			sc->nr_reclaimed += nr_soft_reclaimed;
sc               3020 mm/vmscan.c    			sc->nr_scanned += nr_soft_scanned;
sc               3028 mm/vmscan.c    		shrink_node(zone->zone_pgdat, sc);
sc               3035 mm/vmscan.c    	sc->gfp_mask = orig_mask;
sc               3070 mm/vmscan.c    					  struct scan_control *sc)
sc               3072 mm/vmscan.c    	int initial_priority = sc->priority;
sc               3079 mm/vmscan.c    	if (global_reclaim(sc))
sc               3080 mm/vmscan.c    		__count_zid_vm_events(ALLOCSTALL, sc->reclaim_idx, 1);
sc               3083 mm/vmscan.c    		vmpressure_prio(sc->gfp_mask, sc->target_mem_cgroup,
sc               3084 mm/vmscan.c    				sc->priority);
sc               3085 mm/vmscan.c    		sc->nr_scanned = 0;
sc               3086 mm/vmscan.c    		shrink_zones(zonelist, sc);
sc               3088 mm/vmscan.c    		if (sc->nr_reclaimed >= sc->nr_to_reclaim)
sc               3091 mm/vmscan.c    		if (sc->compaction_ready)
sc               3098 mm/vmscan.c    		if (sc->priority < DEF_PRIORITY - 2)
sc               3099 mm/vmscan.c    			sc->may_writepage = 1;
sc               3100 mm/vmscan.c    	} while (--sc->priority >= 0);
sc               3103 mm/vmscan.c    	for_each_zone_zonelist_nodemask(zone, z, zonelist, sc->reclaim_idx,
sc               3104 mm/vmscan.c    					sc->nodemask) {
sc               3108 mm/vmscan.c    		snapshot_refaults(sc->target_mem_cgroup, zone->zone_pgdat);
sc               3109 mm/vmscan.c    		set_memcg_congestion(last_pgdat, sc->target_mem_cgroup, false);
sc               3114 mm/vmscan.c    	if (sc->nr_reclaimed)
sc               3115 mm/vmscan.c    		return sc->nr_reclaimed;
sc               3118 mm/vmscan.c    	if (sc->compaction_ready)
sc               3122 mm/vmscan.c    	if (sc->memcg_low_skipped) {
sc               3123 mm/vmscan.c    		sc->priority = initial_priority;
sc               3124 mm/vmscan.c    		sc->memcg_low_reclaim = 1;
sc               3125 mm/vmscan.c    		sc->memcg_low_skipped = 0;
sc               3268 mm/vmscan.c    	struct scan_control sc = {
sc               3293 mm/vmscan.c    	if (throttle_direct_reclaim(sc.gfp_mask, zonelist, nodemask))
sc               3296 mm/vmscan.c    	set_task_reclaim_state(current, &sc.reclaim_state);
sc               3297 mm/vmscan.c    	trace_mm_vmscan_direct_reclaim_begin(order, sc.gfp_mask);
sc               3299 mm/vmscan.c    	nr_reclaimed = do_try_to_free_pages(zonelist, &sc);
sc               3315 mm/vmscan.c    	struct scan_control sc = {
sc               3327 mm/vmscan.c    	sc.gfp_mask = (gfp_mask & GFP_RECLAIM_MASK) |
sc               3330 mm/vmscan.c    	trace_mm_vmscan_memcg_softlimit_reclaim_begin(sc.order,
sc               3331 mm/vmscan.c    						      sc.gfp_mask);
sc               3340 mm/vmscan.c    	shrink_node_memcg(pgdat, memcg, &sc, &lru_pages);
sc               3342 mm/vmscan.c    	trace_mm_vmscan_memcg_softlimit_reclaim_end(sc.nr_reclaimed);
sc               3344 mm/vmscan.c    	*nr_scanned = sc.nr_scanned;
sc               3346 mm/vmscan.c    	return sc.nr_reclaimed;
sc               3359 mm/vmscan.c    	struct scan_control sc = {
sc               3371 mm/vmscan.c    	set_task_reclaim_state(current, &sc.reclaim_state);
sc               3381 mm/vmscan.c    	trace_mm_vmscan_memcg_reclaim_begin(0, sc.gfp_mask);
sc               3386 mm/vmscan.c    	nr_reclaimed = do_try_to_free_pages(zonelist, &sc);
sc               3399 mm/vmscan.c    				struct scan_control *sc)
sc               3410 mm/vmscan.c    		if (inactive_list_is_low(lruvec, false, sc, true))
sc               3412 mm/vmscan.c    					   sc, LRU_ACTIVE_ANON);
sc               3531 mm/vmscan.c    			       struct scan_control *sc)
sc               3537 mm/vmscan.c    	sc->nr_to_reclaim = 0;
sc               3538 mm/vmscan.c    	for (z = 0; z <= sc->reclaim_idx; z++) {
sc               3543 mm/vmscan.c    		sc->nr_to_reclaim += max(high_wmark_pages(zone), SWAP_CLUSTER_MAX);
sc               3550 mm/vmscan.c    	shrink_node(pgdat, sc);
sc               3559 mm/vmscan.c    	if (sc->order && sc->nr_reclaimed >= compact_gap(sc->order))
sc               3560 mm/vmscan.c    		sc->order = 0;
sc               3562 mm/vmscan.c    	return sc->nr_scanned >= sc->nr_to_reclaim;
sc               3588 mm/vmscan.c    	struct scan_control sc = {
sc               3594 mm/vmscan.c    	set_task_reclaim_state(current, &sc.reclaim_state);
sc               3617 mm/vmscan.c    	sc.priority = DEF_PRIORITY;
sc               3619 mm/vmscan.c    		unsigned long nr_reclaimed = sc.nr_reclaimed;
sc               3624 mm/vmscan.c    		sc.reclaim_idx = classzone_idx;
sc               3642 mm/vmscan.c    				sc.reclaim_idx = i;
sc               3654 mm/vmscan.c    		balanced = pgdat_balanced(pgdat, sc.order, classzone_idx);
sc               3669 mm/vmscan.c    		if (nr_boost_reclaim && sc.priority == DEF_PRIORITY - 2)
sc               3678 mm/vmscan.c    		sc.may_writepage = !laptop_mode && !nr_boost_reclaim;
sc               3679 mm/vmscan.c    		sc.may_swap = !nr_boost_reclaim;
sc               3687 mm/vmscan.c    		age_active_anon(pgdat, &sc);
sc               3693 mm/vmscan.c    		if (sc.priority < DEF_PRIORITY - 2)
sc               3694 mm/vmscan.c    			sc.may_writepage = 1;
sc               3697 mm/vmscan.c    		sc.nr_scanned = 0;
sc               3699 mm/vmscan.c    		nr_soft_reclaimed = mem_cgroup_soft_limit_reclaim(pgdat, sc.order,
sc               3700 mm/vmscan.c    						sc.gfp_mask, &nr_soft_scanned);
sc               3701 mm/vmscan.c    		sc.nr_reclaimed += nr_soft_reclaimed;
sc               3708 mm/vmscan.c    		if (kswapd_shrink_node(pgdat, &sc))
sc               3731 mm/vmscan.c    		nr_reclaimed = sc.nr_reclaimed - nr_reclaimed;
sc               3743 mm/vmscan.c    			sc.priority--;
sc               3744 mm/vmscan.c    	} while (sc.priority >= 1);
sc               3746 mm/vmscan.c    	if (!sc.nr_reclaimed)
sc               3783 mm/vmscan.c    	return sc.order;
sc               4029 mm/vmscan.c    	struct scan_control sc = {
sc               4039 mm/vmscan.c    	struct zonelist *zonelist = node_zonelist(numa_node_id(), sc.gfp_mask);
sc               4043 mm/vmscan.c    	fs_reclaim_acquire(sc.gfp_mask);
sc               4045 mm/vmscan.c    	set_task_reclaim_state(current, &sc.reclaim_state);
sc               4047 mm/vmscan.c    	nr_reclaimed = do_try_to_free_pages(zonelist, &sc);
sc               4051 mm/vmscan.c    	fs_reclaim_release(sc.gfp_mask);
sc               4215 mm/vmscan.c    	struct scan_control sc = {
sc               4227 mm/vmscan.c    					   sc.gfp_mask);
sc               4230 mm/vmscan.c    	fs_reclaim_acquire(sc.gfp_mask);
sc               4238 mm/vmscan.c    	set_task_reclaim_state(p, &sc.reclaim_state);
sc               4246 mm/vmscan.c    			shrink_node(pgdat, &sc);
sc               4247 mm/vmscan.c    		} while (sc.nr_reclaimed < nr_pages && --sc.priority >= 0);
sc               4253 mm/vmscan.c    	fs_reclaim_release(sc.gfp_mask);
sc               4255 mm/vmscan.c    	trace_mm_vmscan_node_reclaim_end(sc.nr_reclaimed);
sc               4257 mm/vmscan.c    	return sc.nr_reclaimed >= nr_pages;
sc                394 mm/workingset.c 					struct shrink_control *sc)
sc                400 mm/workingset.c 	nodes = list_lru_shrink_count(&shadow_nodes, sc);
sc                425 mm/workingset.c 	if (sc->memcg) {
sc                429 mm/workingset.c 		lruvec = mem_cgroup_lruvec(NODE_DATA(sc->nid), sc->memcg);
sc                437 mm/workingset.c 		pages = node_present_pages(sc->nid);
sc                516 mm/workingset.c 				       struct shrink_control *sc)
sc                519 mm/workingset.c 	return list_lru_shrink_walk_irq(&shadow_nodes, sc, shadow_lru_isolate,
sc               2337 mm/zsmalloc.c  		struct shrink_control *sc)
sc               2355 mm/zsmalloc.c  		struct shrink_control *sc)
sc                578 net/ceph/auth_x.c 		struct ceph_x_server_challenge *sc = buf;
sc                580 net/ceph/auth_x.c 		if (len != sizeof(*sc))
sc                582 net/ceph/auth_x.c 		xi->server_challenge = le64_to_cpu(sc->server_challenge);
sc                 14 net/ceph/ceph_fs.c 	__u32 sc = layout->stripe_count;
sc                 26 net/ceph/ceph_fs.c 	if (!sc)
sc                 46 net/ceph/snapshot.c struct ceph_snap_context *ceph_get_snap_context(struct ceph_snap_context *sc)
sc                 48 net/ceph/snapshot.c 	if (sc)
sc                 49 net/ceph/snapshot.c 		refcount_inc(&sc->nref);
sc                 50 net/ceph/snapshot.c 	return sc;
sc                 54 net/ceph/snapshot.c void ceph_put_snap_context(struct ceph_snap_context *sc)
sc                 56 net/ceph/snapshot.c 	if (!sc)
sc                 58 net/ceph/snapshot.c 	if (refcount_dec_and_test(&sc->nref)) {
sc                 60 net/ceph/snapshot.c 		kfree(sc);
sc                198 net/ieee802154/header_ops.c static int ieee802154_hdr_sechdr_len(u8 sc)
sc                200 net/ieee802154/header_ops.c 	return ieee802154_sechdr_lengths[IEEE802154_SCF_KEY_ID_MODE(sc)];
sc               1266 net/mac80211/rx.c 	u16 sc = le16_to_cpu(hdr->seq_ctrl);
sc               1267 net/mac80211/rx.c 	u16 mpdu_seq_num = (sc & IEEE80211_SCTL_SEQ) >> 4;
sc               1367 net/mac80211/rx.c 	u16 sc;
sc               1413 net/mac80211/rx.c 	sc = le16_to_cpu(hdr->seq_ctrl);
sc               1414 net/mac80211/rx.c 	if (sc & IEEE80211_SCTL_FRAG) {
sc               2160 net/mac80211/rx.c 	u16 sc;
sc               2172 net/mac80211/rx.c 	sc = le16_to_cpu(hdr->seq_ctrl);
sc               2173 net/mac80211/rx.c 	frag = sc & IEEE80211_SCTL_FRAG;
sc               2194 net/mac80211/rx.c 	seq = (sc & IEEE80211_SCTL_SEQ) >> 4;
sc                792 net/netfilter/ipvs/ip_vs_sync.c ip_vs_conn_fill_param_sync(struct netns_ipvs *ipvs, int af, union ip_vs_sync_conn *sc,
sc                799 net/netfilter/ipvs/ip_vs_sync.c 		ip_vs_conn_fill_param(ipvs, af, sc->v6.protocol,
sc                800 net/netfilter/ipvs/ip_vs_sync.c 				      (const union nf_inet_addr *)&sc->v6.caddr,
sc                801 net/netfilter/ipvs/ip_vs_sync.c 				      sc->v6.cport,
sc                802 net/netfilter/ipvs/ip_vs_sync.c 				      (const union nf_inet_addr *)&sc->v6.vaddr,
sc                803 net/netfilter/ipvs/ip_vs_sync.c 				      sc->v6.vport, p);
sc                806 net/netfilter/ipvs/ip_vs_sync.c 		ip_vs_conn_fill_param(ipvs, af, sc->v4.protocol,
sc                807 net/netfilter/ipvs/ip_vs_sync.c 				      (const union nf_inet_addr *)&sc->v4.caddr,
sc                808 net/netfilter/ipvs/ip_vs_sync.c 				      sc->v4.cport,
sc                809 net/netfilter/ipvs/ip_vs_sync.c 				      (const union nf_inet_addr *)&sc->v4.vaddr,
sc                810 net/netfilter/ipvs/ip_vs_sync.c 				      sc->v4.vport, p);
sc                292 net/nfc/digital_core.c 	params->sc = DIGITAL_SENSF_FELICA_SC;
sc                474 net/sched/sch_hfsc.c sc2isc(struct tc_service_curve *sc, struct internal_sc *isc)
sc                476 net/sched/sch_hfsc.c 	isc->sm1  = m2sm(sc->m1);
sc                477 net/sched/sch_hfsc.c 	isc->ism1 = m2ism(sc->m1);
sc                478 net/sched/sch_hfsc.c 	isc->dx   = d2dx(sc->d);
sc                480 net/sched/sch_hfsc.c 	isc->sm2  = m2sm(sc->m2);
sc                481 net/sched/sch_hfsc.c 	isc->ism2 = m2ism(sc->m2);
sc               1254 net/sched/sch_hfsc.c hfsc_dump_sc(struct sk_buff *skb, int attr, struct internal_sc *sc)
sc               1258 net/sched/sch_hfsc.c 	tsc.m1 = sm2m(sc->sm1);
sc               1259 net/sched/sch_hfsc.c 	tsc.d  = dx2d(sc->dx);
sc               1260 net/sched/sch_hfsc.c 	tsc.m2 = sm2m(sc->sm2);
sc                527 net/sunrpc/auth.c rpcauth_cache_shrink_scan(struct shrinker *shrink, struct shrink_control *sc)
sc                530 net/sunrpc/auth.c 	if ((sc->gfp_mask & GFP_KERNEL) != GFP_KERNEL)
sc                537 net/sunrpc/auth.c 	return rpcauth_cache_do_shrink(sc->nr_to_scan);
sc                541 net/sunrpc/auth.c rpcauth_cache_shrink_count(struct shrinker *shrink, struct shrink_control *sc)
sc                566 net/sunrpc/xprtrdma/rpc_rdma.c void rpcrdma_sendctx_unmap(struct rpcrdma_sendctx *sc)
sc                570 net/sunrpc/xprtrdma/rpc_rdma.c 	if (!sc->sc_unmap_count)
sc                577 net/sunrpc/xprtrdma/rpc_rdma.c 	for (sge = &sc->sc_sges[2]; sc->sc_unmap_count;
sc                578 net/sunrpc/xprtrdma/rpc_rdma.c 	     ++sge, --sc->sc_unmap_count)
sc                579 net/sunrpc/xprtrdma/rpc_rdma.c 		ib_dma_unmap_page(sc->sc_device, sge->addr, sge->length,
sc                582 net/sunrpc/xprtrdma/rpc_rdma.c 	kref_put(&sc->sc_req->rl_kref, rpcrdma_sendctx_done);
sc                590 net/sunrpc/xprtrdma/rpc_rdma.c 	struct rpcrdma_sendctx *sc = req->rl_sendctx;
sc                592 net/sunrpc/xprtrdma/rpc_rdma.c 	struct ib_sge *sge = sc->sc_sges;
sc                602 net/sunrpc/xprtrdma/rpc_rdma.c 	sc->sc_wr.num_sge++;
sc                618 net/sunrpc/xprtrdma/rpc_rdma.c 	struct rpcrdma_sendctx *sc = req->rl_sendctx;
sc                621 net/sunrpc/xprtrdma/rpc_rdma.c 	struct ib_sge *sge = sc->sc_sges;
sc                629 net/sunrpc/xprtrdma/rpc_rdma.c 	sc->sc_device = rdmab_device(rb);
sc                685 net/sunrpc/xprtrdma/rpc_rdma.c 			sc->sc_unmap_count++;
sc                711 net/sunrpc/xprtrdma/rpc_rdma.c 		sc->sc_unmap_count++;
sc                715 net/sunrpc/xprtrdma/rpc_rdma.c 	sc->sc_wr.num_sge += sge_no;
sc                716 net/sunrpc/xprtrdma/rpc_rdma.c 	if (sc->sc_unmap_count)
sc                725 net/sunrpc/xprtrdma/rpc_rdma.c 	rpcrdma_sendctx_unmap(sc);
sc                730 net/sunrpc/xprtrdma/rpc_rdma.c 	rpcrdma_sendctx_unmap(sc);
sc                 77 net/sunrpc/xprtrdma/verbs.c static void rpcrdma_sendctx_put_locked(struct rpcrdma_sendctx *sc);
sc                135 net/sunrpc/xprtrdma/verbs.c 	struct rpcrdma_sendctx *sc =
sc                139 net/sunrpc/xprtrdma/verbs.c 	trace_xprtrdma_wc_send(sc, wc);
sc                140 net/sunrpc/xprtrdma/verbs.c 	rpcrdma_sendctx_put_locked(sc);
sc                814 net/sunrpc/xprtrdma/verbs.c 	struct rpcrdma_sendctx *sc;
sc                816 net/sunrpc/xprtrdma/verbs.c 	sc = kzalloc(struct_size(sc, sc_sges, ia->ri_max_send_sges),
sc                818 net/sunrpc/xprtrdma/verbs.c 	if (!sc)
sc                821 net/sunrpc/xprtrdma/verbs.c 	sc->sc_wr.wr_cqe = &sc->sc_cqe;
sc                822 net/sunrpc/xprtrdma/verbs.c 	sc->sc_wr.sg_list = sc->sc_sges;
sc                823 net/sunrpc/xprtrdma/verbs.c 	sc->sc_wr.opcode = IB_WR_SEND;
sc                824 net/sunrpc/xprtrdma/verbs.c 	sc->sc_cqe.done = rpcrdma_wc_send;
sc                825 net/sunrpc/xprtrdma/verbs.c 	return sc;
sc                831 net/sunrpc/xprtrdma/verbs.c 	struct rpcrdma_sendctx *sc;
sc                841 net/sunrpc/xprtrdma/verbs.c 	buf->rb_sc_ctxs = kcalloc(i, sizeof(sc), GFP_KERNEL);
sc                847 net/sunrpc/xprtrdma/verbs.c 		sc = rpcrdma_sendctx_create(&r_xprt->rx_ia);
sc                848 net/sunrpc/xprtrdma/verbs.c 		if (!sc)
sc                851 net/sunrpc/xprtrdma/verbs.c 		sc->sc_xprt = r_xprt;
sc                852 net/sunrpc/xprtrdma/verbs.c 		buf->rb_sc_ctxs[i] = sc;
sc                884 net/sunrpc/xprtrdma/verbs.c 	struct rpcrdma_sendctx *sc;
sc                893 net/sunrpc/xprtrdma/verbs.c 	sc = buf->rb_sc_ctxs[next_head];
sc                900 net/sunrpc/xprtrdma/verbs.c 	return sc;
sc                922 net/sunrpc/xprtrdma/verbs.c rpcrdma_sendctx_put_locked(struct rpcrdma_sendctx *sc)
sc                924 net/sunrpc/xprtrdma/verbs.c 	struct rpcrdma_buffer *buf = &sc->sc_xprt->rx_buf;
sc                937 net/sunrpc/xprtrdma/verbs.c 	} while (buf->rb_sc_ctxs[next_tail] != sc);
sc                942 net/sunrpc/xprtrdma/verbs.c 	xprt_write_space(&sc->sc_xprt->rx_xprt);
sc                579 net/sunrpc/xprtrdma/xprt_rdma.h void rpcrdma_sendctx_unmap(struct rpcrdma_sendctx *sc);
sc                 75 net/tipc/addr.h static inline int tipc_scope2node(struct net *net, int sc)
sc                 77 net/tipc/addr.h 	return sc != TIPC_NODE_SCOPE ? 0 : tipc_own_addr(net);
sc                143 net/tipc/name_table.c static struct service_range *tipc_service_first_range(struct tipc_service *sc,
sc                146 net/tipc/name_table.c 	struct rb_node *n = sc->ranges.rb_node;
sc                163 net/tipc/name_table.c static struct service_range *tipc_service_find_range(struct tipc_service *sc,
sc                166 net/tipc/name_table.c 	struct rb_node *n = sc->ranges.rb_node;
sc                169 net/tipc/name_table.c 	sr = tipc_service_first_range(sc, lower);
sc                185 net/tipc/name_table.c static struct service_range *tipc_service_create_range(struct tipc_service *sc,
sc                191 net/tipc/name_table.c 	n = &sc->ranges.rb_node;
sc                215 net/tipc/name_table.c 	rb_insert_color(&sr->tree_node, &sc->ranges);
sc                220 net/tipc/name_table.c 						    struct tipc_service *sc,
sc                231 net/tipc/name_table.c 	sr = tipc_service_create_range(sc, lower, upper);
sc                252 net/tipc/name_table.c 	list_for_each_entry_safe(sub, tmp, &sc->subscriptions, service_list) {
sc                342 net/tipc/name_table.c 	struct tipc_service *sc;
sc                350 net/tipc/name_table.c 	sc = tipc_service_find(net, type);
sc                351 net/tipc/name_table.c 	if (!sc)
sc                352 net/tipc/name_table.c 		sc = tipc_service_create(type, &nt->services[hash(type)]);
sc                353 net/tipc/name_table.c 	if (!sc)
sc                356 net/tipc/name_table.c 	spin_lock_bh(&sc->lock);
sc                357 net/tipc/name_table.c 	p = tipc_service_insert_publ(net, sc, type, lower, upper,
sc                359 net/tipc/name_table.c 	spin_unlock_bh(&sc->lock);
sc                367 net/tipc/name_table.c 	struct tipc_service *sc = tipc_service_find(net, type);
sc                373 net/tipc/name_table.c 	if (!sc)
sc                376 net/tipc/name_table.c 	spin_lock_bh(&sc->lock);
sc                377 net/tipc/name_table.c 	sr = tipc_service_find_range(sc, lower, upper);
sc                386 net/tipc/name_table.c 	list_for_each_entry_safe(sub, tmp, &sc->subscriptions, service_list) {
sc                393 net/tipc/name_table.c 		rb_erase(&sr->tree_node, &sc->ranges);
sc                398 net/tipc/name_table.c 	if (RB_EMPTY_ROOT(&sc->ranges) && list_empty(&sc->subscriptions)) {
sc                399 net/tipc/name_table.c 		hlist_del_init_rcu(&sc->service_list);
sc                400 net/tipc/name_table.c 		kfree_rcu(sc, rcu);
sc                403 net/tipc/name_table.c 	spin_unlock_bh(&sc->lock);
sc                429 net/tipc/name_table.c 	struct tipc_service *sc;
sc                439 net/tipc/name_table.c 	sc = tipc_service_find(net, type);
sc                440 net/tipc/name_table.c 	if (unlikely(!sc))
sc                443 net/tipc/name_table.c 	spin_lock_bh(&sc->lock);
sc                444 net/tipc/name_table.c 	sr = tipc_service_first_range(sc, instance);
sc                467 net/tipc/name_table.c 	spin_unlock_bh(&sc->lock);
sc                480 net/tipc/name_table.c 	struct tipc_service *sc;
sc                485 net/tipc/name_table.c 	sc = tipc_service_find(net, type);
sc                486 net/tipc/name_table.c 	if (unlikely(!sc))
sc                489 net/tipc/name_table.c 	spin_lock_bh(&sc->lock);
sc                491 net/tipc/name_table.c 	sr = tipc_service_first_range(sc, instance);
sc                508 net/tipc/name_table.c 	spin_unlock_bh(&sc->lock);
sc                518 net/tipc/name_table.c 	struct tipc_service *sc;
sc                523 net/tipc/name_table.c 	sc = tipc_service_find(net, type);
sc                524 net/tipc/name_table.c 	if (!sc)
sc                527 net/tipc/name_table.c 	spin_lock_bh(&sc->lock);
sc                529 net/tipc/name_table.c 	for (n = rb_first(&sc->ranges); n; n = rb_next(n)) {
sc                540 net/tipc/name_table.c 	spin_unlock_bh(&sc->lock);
sc                553 net/tipc/name_table.c 	struct tipc_service *sc;
sc                558 net/tipc/name_table.c 	sc = tipc_service_find(net, type);
sc                559 net/tipc/name_table.c 	if (!sc)
sc                562 net/tipc/name_table.c 	spin_lock_bh(&sc->lock);
sc                564 net/tipc/name_table.c 	for (n = rb_first(&sc->ranges); n; n = rb_next(n)) {
sc                574 net/tipc/name_table.c 	spin_unlock_bh(&sc->lock);
sc                585 net/tipc/name_table.c 	struct tipc_service *sc;
sc                590 net/tipc/name_table.c 	sc = tipc_service_find(net, type);
sc                591 net/tipc/name_table.c 	if (!sc)
sc                594 net/tipc/name_table.c 	spin_lock_bh(&sc->lock);
sc                595 net/tipc/name_table.c 	for (n = rb_first(&sc->ranges); n; n = rb_next(n)) {
sc                603 net/tipc/name_table.c 	spin_unlock_bh(&sc->lock);
sc                682 net/tipc/name_table.c 	struct tipc_service *sc;
sc                686 net/tipc/name_table.c 	sc = tipc_service_find(sub->net, type);
sc                687 net/tipc/name_table.c 	if (!sc)
sc                688 net/tipc/name_table.c 		sc = tipc_service_create(type, &nt->services[hash(type)]);
sc                689 net/tipc/name_table.c 	if (sc) {
sc                690 net/tipc/name_table.c 		spin_lock_bh(&sc->lock);
sc                691 net/tipc/name_table.c 		tipc_service_subscribe(sc, sub);
sc                692 net/tipc/name_table.c 		spin_unlock_bh(&sc->lock);
sc                711 net/tipc/name_table.c 	struct tipc_service *sc;
sc                714 net/tipc/name_table.c 	sc = tipc_service_find(sub->net, type);
sc                715 net/tipc/name_table.c 	if (!sc)
sc                718 net/tipc/name_table.c 	spin_lock_bh(&sc->lock);
sc                723 net/tipc/name_table.c 	if (RB_EMPTY_ROOT(&sc->ranges) && list_empty(&sc->subscriptions)) {
sc                724 net/tipc/name_table.c 		hlist_del_init_rcu(&sc->service_list);
sc                725 net/tipc/name_table.c 		kfree_rcu(sc, rcu);
sc                727 net/tipc/name_table.c 	spin_unlock_bh(&sc->lock);
sc                756 net/tipc/name_table.c static void tipc_service_delete(struct net *net, struct tipc_service *sc)
sc                761 net/tipc/name_table.c 	spin_lock_bh(&sc->lock);
sc                762 net/tipc/name_table.c 	rbtree_postorder_for_each_entry_safe(sr, tmpr, &sc->ranges, tree_node) {
sc                767 net/tipc/name_table.c 		rb_erase(&sr->tree_node, &sc->ranges);
sc                770 net/tipc/name_table.c 	hlist_del_init_rcu(&sc->service_list);
sc                771 net/tipc/name_table.c 	spin_unlock_bh(&sc->lock);
sc                772 net/tipc/name_table.c 	kfree_rcu(sc, rcu);
sc                874 net/tipc/name_table.c 					struct tipc_service *sc,
sc                881 net/tipc/name_table.c 	for (n = rb_first(&sc->ranges); n; n = rb_next(n)) {
sc                885 net/tipc/name_table.c 		err = __tipc_nl_add_nametable_publ(msg, sc, sr, last_key);
sc                875 sound/firewire/amdtp-stream.c 		context->callback.sc = in_stream_callback;
sc                879 sound/firewire/amdtp-stream.c 		context->callback.sc = out_stream_callback;
sc                884 sound/firewire/amdtp-stream.c 	context->callback.sc(context, tstamp, header_length, header, s);
sc                182 sound/pci/lola/lola_clock.c 				struct lola_sample_clock *sc;
sc                183 sound/pci/lola/lola_clock.c 				sc = &chip->clock.sample_clock[idx_list];
sc                184 sound/pci/lola/lola_clock.c 				sc->type = type;
sc                185 sound/pci/lola/lola_clock.c 				sc->format = format;
sc                186 sound/pci/lola/lola_clock.c 				sc->freq = freq;
sc                393 sound/soc/img/img-i2s-in.c 	struct snd_pcm_hw_params *params, struct dma_slave_config *sc)
sc                402 sound/soc/img/img-i2s-in.c 	ret = snd_hwparams_to_dma_slave_config(st, params, sc);
sc                406 sound/soc/img/img-i2s-in.c 	sc->src_addr = dma_data->addr;
sc                407 sound/soc/img/img-i2s-in.c 	sc->src_addr_width = dma_data->addr_width;
sc                408 sound/soc/img/img-i2s-in.c 	sc->src_maxburst = 4 * i2s_channels;
sc                399 sound/soc/img/img-i2s-out.c 	struct snd_pcm_hw_params *params, struct dma_slave_config *sc)
sc                408 sound/soc/img/img-i2s-out.c 	ret = snd_hwparams_to_dma_slave_config(st, params, sc);
sc                412 sound/soc/img/img-i2s-out.c 	sc->dst_addr = dma_data->addr;
sc                413 sound/soc/img/img-i2s-out.c 	sc->dst_addr_width = dma_data->addr_width;
sc                414 sound/soc/img/img-i2s-out.c 	sc->dst_maxburst = 4 * i2s_channels;
sc                270 tools/perf/builtin-trace.c 	({ struct syscall_tp *sc = evsel->priv;\
sc                271 tools/perf/builtin-trace.c 	   perf_evsel__init_tp_uint_field(evsel, &sc->name, #name); })
sc                286 tools/perf/builtin-trace.c 	({ struct syscall_tp *sc = evsel->priv;\
sc                287 tools/perf/builtin-trace.c 	   perf_evsel__init_tp_ptr_field(evsel, &sc->name, #name); })
sc                297 tools/perf/builtin-trace.c 	struct syscall_tp *sc = evsel->priv = malloc(sizeof(struct syscall_tp));
sc                300 tools/perf/builtin-trace.c 		if (perf_evsel__init_tp_uint_field(evsel, &sc->id, "__syscall_nr") &&
sc                301 tools/perf/builtin-trace.c 		    perf_evsel__init_tp_uint_field(evsel, &sc->id, "nr"))
sc                314 tools/perf/builtin-trace.c 	struct syscall_tp *sc = evsel->priv = malloc(sizeof(struct syscall_tp));
sc                322 tools/perf/builtin-trace.c 		if (__tp_field__init_uint(&sc->id, syscall_id->size, syscall_id->offset, evsel->needs_swap))
sc                336 tools/perf/builtin-trace.c 	struct syscall_tp *sc = evsel->priv;
sc                338 tools/perf/builtin-trace.c 	return __tp_field__init_ptr(&sc->args, sc->id.offset + sizeof(u64));
sc                343 tools/perf/builtin-trace.c 	struct syscall_tp *sc = evsel->priv;
sc                345 tools/perf/builtin-trace.c 	return __tp_field__init_uint(&sc->ret, sizeof(u64), sc->id.offset + sizeof(u64), evsel->needs_swap);
sc               1436 tools/perf/builtin-trace.c static int syscall__alloc_arg_fmts(struct syscall *sc, int nr_args)
sc               1440 tools/perf/builtin-trace.c 	if (nr_args == 6 && sc->fmt && sc->fmt->nr_args != 0)
sc               1441 tools/perf/builtin-trace.c 		nr_args = sc->fmt->nr_args;
sc               1443 tools/perf/builtin-trace.c 	sc->arg_fmt = calloc(nr_args, sizeof(*sc->arg_fmt));
sc               1444 tools/perf/builtin-trace.c 	if (sc->arg_fmt == NULL)
sc               1448 tools/perf/builtin-trace.c 		if (sc->fmt)
sc               1449 tools/perf/builtin-trace.c 			sc->arg_fmt[idx] = sc->fmt->arg[idx];
sc               1452 tools/perf/builtin-trace.c 	sc->nr_args = nr_args;
sc               1456 tools/perf/builtin-trace.c static int syscall__set_arg_fmts(struct syscall *sc)
sc               1461 tools/perf/builtin-trace.c 	for (field = sc->args; field; field = field->next, ++idx) {
sc               1464 tools/perf/builtin-trace.c 		if (sc->fmt && sc->fmt->arg[idx].scnprintf)
sc               1472 tools/perf/builtin-trace.c 			sc->arg_fmt[idx].scnprintf = SCA_FILENAME;
sc               1474 tools/perf/builtin-trace.c 			sc->arg_fmt[idx].scnprintf = SCA_PTR;
sc               1476 tools/perf/builtin-trace.c 			sc->arg_fmt[idx].scnprintf = SCA_PID;
sc               1478 tools/perf/builtin-trace.c 			sc->arg_fmt[idx].scnprintf = SCA_MODE_T;
sc               1490 tools/perf/builtin-trace.c 			sc->arg_fmt[idx].scnprintf = SCA_FD;
sc               1495 tools/perf/builtin-trace.c 		sc->args_size = last_field->offset + last_field->size;
sc               1503 tools/perf/builtin-trace.c 	struct syscall *sc;
sc               1507 tools/perf/builtin-trace.c 		trace->syscalls.table = calloc(trace->sctbl->syscalls.max_id + 1, sizeof(*sc));
sc               1512 tools/perf/builtin-trace.c 	sc = trace->syscalls.table + id;
sc               1513 tools/perf/builtin-trace.c 	if (sc->nonexistent)
sc               1517 tools/perf/builtin-trace.c 		sc->nonexistent = true;
sc               1521 tools/perf/builtin-trace.c 	sc->name = name;
sc               1522 tools/perf/builtin-trace.c 	sc->fmt  = syscall_fmt__find(sc->name);
sc               1524 tools/perf/builtin-trace.c 	snprintf(tp_name, sizeof(tp_name), "sys_enter_%s", sc->name);
sc               1525 tools/perf/builtin-trace.c 	sc->tp_format = trace_event__tp_format("syscalls", tp_name);
sc               1527 tools/perf/builtin-trace.c 	if (IS_ERR(sc->tp_format) && sc->fmt && sc->fmt->alias) {
sc               1528 tools/perf/builtin-trace.c 		snprintf(tp_name, sizeof(tp_name), "sys_enter_%s", sc->fmt->alias);
sc               1529 tools/perf/builtin-trace.c 		sc->tp_format = trace_event__tp_format("syscalls", tp_name);
sc               1532 tools/perf/builtin-trace.c 	if (syscall__alloc_arg_fmts(sc, IS_ERR(sc->tp_format) ? 6 : sc->tp_format->format.nr_fields))
sc               1535 tools/perf/builtin-trace.c 	if (IS_ERR(sc->tp_format))
sc               1536 tools/perf/builtin-trace.c 		return PTR_ERR(sc->tp_format);
sc               1538 tools/perf/builtin-trace.c 	sc->args = sc->tp_format->format.fields;
sc               1544 tools/perf/builtin-trace.c 	if (sc->args && (!strcmp(sc->args->name, "__syscall_nr") || !strcmp(sc->args->name, "nr"))) {
sc               1545 tools/perf/builtin-trace.c 		sc->args = sc->args->next;
sc               1546 tools/perf/builtin-trace.c 		--sc->nr_args;
sc               1549 tools/perf/builtin-trace.c 	sc->is_exit = !strcmp(name, "exit_group") || !strcmp(name, "exit");
sc               1550 tools/perf/builtin-trace.c 	sc->is_open = !strcmp(name, "open") || !strcmp(name, "openat");
sc               1552 tools/perf/builtin-trace.c 	return syscall__set_arg_fmts(sc);
sc               1580 tools/perf/builtin-trace.c 		const char *sc = pos->s;
sc               1581 tools/perf/builtin-trace.c 		int id = syscalltbl__id(trace->sctbl, sc), match_next = -1;
sc               1584 tools/perf/builtin-trace.c 			id = syscalltbl__strglobmatch_first(trace->sctbl, sc, &match_next);
sc               1595 tools/perf/builtin-trace.c 			pr_debug("%s", sc);
sc               1604 tools/perf/builtin-trace.c 			id = syscalltbl__strglobmatch_next(trace->sctbl, sc, &match_next);
sc               1669 tools/perf/builtin-trace.c static size_t syscall__scnprintf_name(struct syscall *sc, char *bf, size_t size,
sc               1672 tools/perf/builtin-trace.c 	if (sc->arg_fmt && sc->arg_fmt[arg->idx].name)
sc               1673 tools/perf/builtin-trace.c 		return scnprintf(bf, size, "%s: ", sc->arg_fmt[arg->idx].name);
sc               1683 tools/perf/builtin-trace.c static unsigned long syscall__mask_val(struct syscall *sc, struct syscall_arg *arg, unsigned long val)
sc               1685 tools/perf/builtin-trace.c 	if (sc->arg_fmt && sc->arg_fmt[arg->idx].mask_val)
sc               1686 tools/perf/builtin-trace.c 		return sc->arg_fmt[arg->idx].mask_val(arg, val);
sc               1691 tools/perf/builtin-trace.c static size_t syscall__scnprintf_val(struct syscall *sc, char *bf, size_t size,
sc               1694 tools/perf/builtin-trace.c 	if (sc->arg_fmt && sc->arg_fmt[arg->idx].scnprintf) {
sc               1696 tools/perf/builtin-trace.c 		if (sc->arg_fmt[arg->idx].parm)
sc               1697 tools/perf/builtin-trace.c 			arg->parm = sc->arg_fmt[arg->idx].parm;
sc               1698 tools/perf/builtin-trace.c 		return sc->arg_fmt[arg->idx].scnprintf(bf, size, arg);
sc               1703 tools/perf/builtin-trace.c static size_t syscall__scnprintf_args(struct syscall *sc, char *bf, size_t size,
sc               1731 tools/perf/builtin-trace.c 	if (sc->args != NULL) {
sc               1734 tools/perf/builtin-trace.c 		for (field = sc->args; field;
sc               1744 tools/perf/builtin-trace.c 			val = syscall__mask_val(sc, &arg, val);
sc               1753 tools/perf/builtin-trace.c 			    !(sc->arg_fmt &&
sc               1754 tools/perf/builtin-trace.c 			      (sc->arg_fmt[arg.idx].show_zero ||
sc               1755 tools/perf/builtin-trace.c 			       sc->arg_fmt[arg.idx].scnprintf == SCA_STRARRAY ||
sc               1756 tools/perf/builtin-trace.c 			       sc->arg_fmt[arg.idx].scnprintf == SCA_STRARRAYS) &&
sc               1757 tools/perf/builtin-trace.c 			      sc->arg_fmt[arg.idx].parm))
sc               1765 tools/perf/builtin-trace.c 			printed += syscall__scnprintf_val(sc, bf + printed, size - printed, &arg, val);
sc               1767 tools/perf/builtin-trace.c 	} else if (IS_ERR(sc->tp_format)) {
sc               1773 tools/perf/builtin-trace.c 		while (arg.idx < sc->nr_args) {
sc               1779 tools/perf/builtin-trace.c 			printed += syscall__scnprintf_name(sc, bf + printed, size - printed, &arg);
sc               1780 tools/perf/builtin-trace.c 			printed += syscall__scnprintf_val(sc, bf + printed, size - printed, &arg, val);
sc               1918 tools/perf/builtin-trace.c static void *syscall__augmented_args(struct syscall *sc, struct perf_sample *sample, int *augmented_args_size, int raw_augmented_args_size)
sc               1935 tools/perf/builtin-trace.c 	int args_size = raw_augmented_args_size ?: sc->args_size;
sc               1955 tools/perf/builtin-trace.c 	struct syscall *sc = trace__syscall_info(trace, evsel, id);
sc               1958 tools/perf/builtin-trace.c 	if (sc == NULL)
sc               1989 tools/perf/builtin-trace.c 		augmented_args = syscall__augmented_args(sc, sample, &augmented_args_size, trace->raw_augmented_syscalls_args_size);
sc               1992 tools/perf/builtin-trace.c 	printed += scnprintf(msg + printed, trace__entry_str_size - printed, "%s(", sc->name);
sc               1994 tools/perf/builtin-trace.c 	printed += syscall__scnprintf_args(sc, msg + printed, trace__entry_str_size - printed,
sc               1997 tools/perf/builtin-trace.c 	if (sc->is_exit) {
sc               2029 tools/perf/builtin-trace.c 	struct syscall *sc = trace__syscall_info(trace, evsel, id);
sc               2034 tools/perf/builtin-trace.c 	if (sc == NULL)
sc               2047 tools/perf/builtin-trace.c 	augmented_args = syscall__augmented_args(sc, sample, &augmented_args_size, trace->raw_augmented_syscalls_args_size);
sc               2048 tools/perf/builtin-trace.c 	syscall__scnprintf_args(sc, msg, sizeof(msg), args, augmented_args, augmented_args_size, trace, thread);
sc               2102 tools/perf/builtin-trace.c 	struct syscall *sc = trace__syscall_info(trace, evsel, id);
sc               2105 tools/perf/builtin-trace.c 	if (sc == NULL)
sc               2120 tools/perf/builtin-trace.c 	if (!trace->fd_path_disabled && sc->is_open && ret >= 0 && ttrace->filename.pending_open) {
sc               2154 tools/perf/builtin-trace.c 		printed += fprintf(trace->output, "]: %s()", sc->name);
sc               2166 tools/perf/builtin-trace.c 	if (sc->fmt == NULL) {
sc               2179 tools/perf/builtin-trace.c 	} else if (ret == 0 && sc->fmt->timeout)
sc               2191 tools/perf/builtin-trace.c 	} else if (sc->fmt->hexret)
sc               2193 tools/perf/builtin-trace.c 	else if (sc->fmt->errpid) {
sc               2386 tools/perf/builtin-trace.c 		struct syscall *sc = trace__syscall_info(trace, evsel, id);
sc               2388 tools/perf/builtin-trace.c 		if (sc) {
sc               2389 tools/perf/builtin-trace.c 			fprintf(trace->output, "%s(", sc->name);
sc               2797 tools/perf/builtin-trace.c static struct bpf_program *trace__find_syscall_bpf_prog(struct trace *trace, struct syscall *sc,
sc               2804 tools/perf/builtin-trace.c 		scnprintf(default_prog_name, sizeof(default_prog_name), "!syscalls:sys_%s_%s", type, sc->name);
sc               2808 tools/perf/builtin-trace.c 		if (sc->fmt && sc->fmt->alias) {
sc               2809 tools/perf/builtin-trace.c 			scnprintf(default_prog_name, sizeof(default_prog_name), "!syscalls:sys_%s_%s", type, sc->fmt->alias);
sc               2825 tools/perf/builtin-trace.c 		 prog_name, type, sc->name);
sc               2832 tools/perf/builtin-trace.c 	struct syscall *sc = trace__syscall_info(trace, NULL, id);
sc               2834 tools/perf/builtin-trace.c 	if (sc == NULL)
sc               2837 tools/perf/builtin-trace.c 	sc->bpf_prog.sys_enter = trace__find_syscall_bpf_prog(trace, sc, sc->fmt ? sc->fmt->bpf_prog_name.sys_enter : NULL, "enter");
sc               2838 tools/perf/builtin-trace.c 	sc->bpf_prog.sys_exit  = trace__find_syscall_bpf_prog(trace, sc, sc->fmt ? sc->fmt->bpf_prog_name.sys_exit  : NULL,  "exit");
sc               2843 tools/perf/builtin-trace.c 	struct syscall *sc = trace__syscall_info(trace, NULL, id);
sc               2844 tools/perf/builtin-trace.c 	return sc ? bpf_program__fd(sc->bpf_prog.sys_enter) : bpf_program__fd(trace->syscalls.unaugmented_prog);
sc               2849 tools/perf/builtin-trace.c 	struct syscall *sc = trace__syscall_info(trace, NULL, id);
sc               2850 tools/perf/builtin-trace.c 	return sc ? bpf_program__fd(sc->bpf_prog.sys_exit) : bpf_program__fd(trace->syscalls.unaugmented_prog);
sc               2855 tools/perf/builtin-trace.c 	struct syscall *sc = trace__syscall_info(trace, NULL, id);
sc               2858 tools/perf/builtin-trace.c 	if (sc == NULL)
sc               2861 tools/perf/builtin-trace.c 	for (; arg < sc->nr_args; ++arg) {
sc               2863 tools/perf/builtin-trace.c 		if (sc->arg_fmt[arg].scnprintf == SCA_FILENAME) {
sc               2927 tools/perf/builtin-trace.c static struct bpf_program *trace__find_usable_bpf_prog_entry(struct trace *trace, struct syscall *sc)
sc               2935 tools/perf/builtin-trace.c 	for (field = sc->args; field; field = field->next) {
sc               2948 tools/perf/builtin-trace.c 		if (pair == NULL || pair == sc ||
sc               2952 tools/perf/builtin-trace.c 		for (field = sc->args, candidate_field = pair->args;
sc               3006 tools/perf/builtin-trace.c 		pr_debug("Reusing \"%s\" BPF sys_enter augmenter for \"%s\"\n", pair->name, sc->name);
sc               3069 tools/perf/builtin-trace.c 		struct syscall *sc = trace__syscall_info(trace, NULL, key);
sc               3073 tools/perf/builtin-trace.c 		if (sc == NULL || sc->bpf_prog.sys_enter == NULL)
sc               3080 tools/perf/builtin-trace.c 		if (sc->bpf_prog.sys_enter != trace->syscalls.unaugmented_prog)
sc               3087 tools/perf/builtin-trace.c 		pair_prog = trace__find_usable_bpf_prog_entry(trace, sc);
sc               3091 tools/perf/builtin-trace.c 		sc->bpf_prog.sys_enter = pair_prog;
sc               3097 tools/perf/builtin-trace.c 		prog_fd = bpf_program__fd(sc->bpf_prog.sys_enter);
sc               3685 tools/perf/builtin-trace.c 	struct syscall *sc;
sc               3710 tools/perf/builtin-trace.c 			sc = &trace->syscalls.table[syscall_stats_entry->syscall];
sc               3711 tools/perf/builtin-trace.c 			printed += fprintf(fp, "   %-15s", sc->name);
sc               3884 tools/perf/builtin-trace.c 			struct syscall_tp *sc = evsel->priv;
sc               3886 tools/perf/builtin-trace.c 			if (__tp_field__init_ptr(&sc->args, sc->id.offset + sizeof(u64)))
sc               3889 tools/perf/builtin-trace.c 			struct syscall_tp *sc = evsel->priv;
sc               3891 tools/perf/builtin-trace.c 			if (__tp_field__init_uint(&sc->ret, sizeof(u64), sc->id.offset + sizeof(u64), evsel->needs_swap))
sc               4347 tools/perf/builtin-trace.c 				struct syscall_tp *sc;
sc               4351 tools/perf/builtin-trace.c 				sc = evsel->priv;
sc               4372 tools/perf/builtin-trace.c 					trace.raw_augmented_syscalls_args_size = (6 + 1) * sizeof(long) + sc->id.offset;
sc                341 tools/perf/util/stat-display.c 	double sc =  evsel->scale;
sc                345 tools/perf/util/stat-display.c 		fmt = floor(sc) != sc ?  "%.2f%s" : "%.0f%s";
sc                348 tools/perf/util/stat-display.c 			fmt = floor(sc) != sc ? "%'18.2f%s" : "%'18.0f%s";
sc                350 tools/perf/util/stat-display.c 			fmt = floor(sc) != sc ? "%18.2f%s" : "%18.0f%s";
sc                451 tools/perf/util/stat.c 	struct perf_stat_config sc;
sc                454 tools/perf/util/stat.c 	perf_event__read_stat_config(&sc, &event->stat_config);
sc                457 tools/perf/util/stat.c 	ret += fprintf(fp, "... aggr_mode %d\n", sc.aggr_mode);
sc                458 tools/perf/util/stat.c 	ret += fprintf(fp, "... scale     %d\n", sc.scale);
sc                459 tools/perf/util/stat.c 	ret += fprintf(fp, "... interval  %u\n", sc.interval);
sc                111 tools/perf/util/syscalltbl.c 	struct syscall *sc = bsearch(name, tbl->syscalls.entries,
sc                112 tools/perf/util/syscalltbl.c 				     tbl->syscalls.nr_entries, sizeof(*sc),
sc                115 tools/perf/util/syscalltbl.c 	return sc ? sc->id : -1;
sc                658 tools/testing/selftests/bpf/test_maps.c 	int one = 1, s, sc, rc;
sc                967 tools/testing/selftests/bpf/test_maps.c 		sc = send(sfd[2], buf, 20, 0);
sc                968 tools/testing/selftests/bpf/test_maps.c 		if (sc < 0) {
sc               1001 tools/testing/selftests/bpf/test_maps.c 	sc = send(sfd[2], buf, 20, 0);
sc               1002 tools/testing/selftests/bpf/test_maps.c 	if (sc < 0) {
sc                714 tools/testing/selftests/bpf/test_sockmap.c 	int sc;
sc                720 tools/testing/selftests/bpf/test_sockmap.c 	sc = send(c1, buf, sizeof(buf), 0);
sc                721 tools/testing/selftests/bpf/test_sockmap.c 	if (sc < 0) {
sc                723 tools/testing/selftests/bpf/test_sockmap.c 		return sc;
sc                765 tools/testing/selftests/bpf/test_sockmap.c 			sc = send(i, buf, rc, 0);
sc                766 tools/testing/selftests/bpf/test_sockmap.c 			if (sc < 0) {
sc                768 tools/testing/selftests/bpf/test_sockmap.c 				return sc;
sc                 16 tools/testing/selftests/powerpc/stringloops/string.c 	const char *sc;
sc                 18 tools/testing/selftests/powerpc/stringloops/string.c 	for (sc = s; *sc != '\0'; ++sc)
sc                 20 tools/testing/selftests/powerpc/stringloops/string.c 	return sc - s;
sc                 28 tools/testing/selftests/sigaltstack/sas.c static ucontext_t uc, sc;
sc                 71 tools/testing/selftests/sigaltstack/sas.c 	swapcontext(&sc, &uc);
sc                102 tools/testing/selftests/sigaltstack/sas.c 	setcontext(&sc);